From eb1fd744668b8eea5afa88c13b4aaef56ccb7871 Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Tue, 24 Sep 2024 01:43:26 +0530 Subject: [PATCH 01/17] fixing experiment type in create exp Signed-off-by: Shekhar Saxena --- migrations/kruize_local_ddl.sql | 3 +- .../exceptions/InvalidExperimentType.java | 26 +++++++++++++++ .../analyzer/kruizeObject/KruizeObject.java | 12 +++++++ .../engine/RecommendationEngine.java | 9 +++--- .../analyzer/serviceObjects/Converters.java | 31 ++++++------------ .../CreateExperimentAPIObject.java | 11 +++++++ .../serviceObjects/KubernetesAPIObject.java | 11 ------- .../ListRecommendationsAPIObject.java | 10 ++++++ .../analyzer/services/CreateExperiment.java | 32 ++++++++++++++++--- .../utils/AnalyzerErrorConstants.java | 1 + .../autotune/common/k8sObjects/K8sObject.java | 9 ------ .../autotune/database/helper/DBHelpers.java | 16 +++++++--- .../database/table/KruizeExperimentEntry.java | 10 ++++++ .../table/KruizeRecommendationEntry.java | 9 ++++++ .../com/autotune/utils/KruizeConstants.java | 1 + 15 files changed, 135 insertions(+), 56 deletions(-) create mode 100644 src/main/java/com/autotune/analyzer/exceptions/InvalidExperimentType.java diff --git a/migrations/kruize_local_ddl.sql b/migrations/kruize_local_ddl.sql index cd6453371..48781f500 100644 --- a/migrations/kruize_local_ddl.sql +++ b/migrations/kruize_local_ddl.sql @@ -1,4 +1,5 @@ create table IF NOT EXISTS kruize_datasources (version varchar(255), name varchar(255), provider varchar(255), serviceName varchar(255), namespace varchar(255), url varchar(255), authentication jsonb, primary key (name)); create table IF NOT EXISTS kruize_dsmetadata (id serial, version varchar(255), datasource_name varchar(255), cluster_name varchar(255), namespace varchar(255), workload_type varchar(255), workload_name varchar(255), container_name varchar(255), container_image_name varchar(255), primary key (id)); -alter table kruize_experiments add column metadata_id bigint references kruize_dsmetadata(id), alter column datasource type varchar(255); +alter table kruize_experiments add column experiment_type varchar(255), add column metadata_id bigint references kruize_dsmetadata(id), alter column datasource type varchar(255); create table IF NOT EXISTS kruize_metric_profiles (api_version varchar(255), kind varchar(255), metadata jsonb, name varchar(255) not null, k8s_type varchar(255), profile_version float(53) not null, slo jsonb, primary key (name)); +alter table kruize_recommendations add column experiment_type varchar(255); diff --git a/src/main/java/com/autotune/analyzer/exceptions/InvalidExperimentType.java b/src/main/java/com/autotune/analyzer/exceptions/InvalidExperimentType.java new file mode 100644 index 000000000..9de8c23bb --- /dev/null +++ b/src/main/java/com/autotune/analyzer/exceptions/InvalidExperimentType.java @@ -0,0 +1,26 @@ +/******************************************************************************* + * Copyright (c) 2020, 2021 Red Hat, IBM Corporation and others. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + *******************************************************************************/ +package com.autotune.analyzer.exceptions; + +public class InvalidExperimentType extends Throwable +{ + public InvalidExperimentType() { + } + + public InvalidExperimentType(String message) { + super(message); + } +} diff --git a/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java b/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java index 41670ac8f..3352e8ed9 100644 --- a/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java +++ b/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java @@ -47,6 +47,8 @@ public final class KruizeObject { private String clusterName; @SerializedName("datasource") private String datasource; + @SerializedName(KruizeConstants.JSONKeys.EXPERIMENT_TYPE) //TODO: to be used in future + private String experiment_type; private String namespace; // TODO: Currently adding it at this level with an assumption that there is only one entry in k8s object needs to be changed private String mode; //Todo convert into Enum @SerializedName("target_cluster") @@ -297,6 +299,15 @@ public void setDataSource(String datasource) { this.datasource = datasource; } + public String getExperimentType() { + return experiment_type; + } + + public void setExperimentType(String experiment_type) { + this.experiment_type = experiment_type; + } + + @Override public String toString() { // Creating a temporary cluster name as we allow null for cluster name now @@ -309,6 +320,7 @@ public String toString() { ", experimentName='" + experimentName + '\'' + ", clusterName=" + tmpClusterName + '\'' + ", datasource=" + datasource + '\'' + + ", experimentType=" + experiment_type + '\'' + ", mode='" + mode + '\'' + ", targetCluster='" + targetCluster + '\'' + ", hpoAlgoImpl=" + hpoAlgoImpl + diff --git a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java index 9abb232c5..89fa32d6f 100644 --- a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java +++ b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java @@ -312,12 +312,12 @@ public KruizeObject prepareRecommendations(int calCount) { public void generateRecommendations(KruizeObject kruizeObject) { for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) { - if (k8sObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { + if (null != kruizeObject.getExperimentType() && kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { String namespaceName = k8sObject.getNamespace(); NamespaceData namespaceData = k8sObject.getNamespaceData(); LOGGER.info("Generating recommendations for namespace: {}", namespaceName); generateRecommendationsBasedOnNamespace(namespaceData, kruizeObject); - } else { + } else if (null == kruizeObject.getExperimentType() || kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT)){ for (String containerName : k8sObject.getContainerDataMap().keySet()) { ContainerData containerData = k8sObject.getContainerDataMap().get(containerName); @@ -2004,7 +2004,7 @@ public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, T String workload_type = k8sObject.getType(); HashMap containerDataMap = k8sObject.getContainerDataMap(); // check if containerDataMap is not empty - if (!containerDataMap.isEmpty()) { + if (null == kruizeObject.getExperimentType() || kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT)) { // Iterate over containers for (Map.Entry entry : containerDataMap.entrySet()) { ContainerData containerData = entry.getValue(); @@ -2170,10 +2170,9 @@ public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, T setInterval_end_time(Collections.max(containerDataResults.keySet())); //TODO Temp fix invalid date is set if experiment having two container with different last seen date } - } else { + } else if (null != kruizeObject.getExperimentType() && kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)){ // fetch namespace related metrics if containerDataMap is empty NamespaceData namespaceData = k8sObject.getNamespaceData(); - // determine the max date query for namespace String namespaceMaxDateQuery = null; for (Metric metric: metrics) { diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java b/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java index b06ccad7c..667ee304a 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java @@ -61,19 +61,11 @@ public static KruizeObject convertCreateExperimentAPIObjToKruizeObject(CreateExp List kubernetesAPIObjectsList = createExperimentAPIObject.getKubernetesObjects(); for (KubernetesAPIObject kubernetesAPIObject : kubernetesAPIObjectsList) { K8sObject k8sObject = null; - // Verify the experiment type. - // If the experiment type is null, default is container type experiment. - // TODO: Update to make this field mandatory and validate if it is a container type. - if (null == kubernetesAPIObject.getExperimentType() || kubernetesAPIObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT)) { + // check if exp type is null to support remote monitoring experiments + if (null == createExperimentAPIObject.getExperimentType() || createExperimentAPIObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT)) { // container recommendations experiment type k8sObject = new K8sObject(kubernetesAPIObject.getName(), kubernetesAPIObject.getType(), kubernetesAPIObject.getNamespace()); - k8sObject.setExperimentType(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT); - // check if namespace data is also set for container-type experiments - if (null != kubernetesAPIObject.getNamespaceAPIObjects()) { - throw new Exception(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.NAMESPACE_DATA_NOT_NULL_FOR_CONTAINER_EXP); - } else { - k8sObject.setNamespaceData(new NamespaceData()); - } + k8sObject.setNamespaceData(new NamespaceData()); List containerAPIObjects = kubernetesAPIObject.getContainerAPIObjects(); HashMap containerDataHashMap = new HashMap<>(); for (ContainerAPIObject containerAPIObject : containerAPIObjects) { @@ -82,21 +74,16 @@ public static KruizeObject convertCreateExperimentAPIObjToKruizeObject(CreateExp containerDataHashMap.put(containerData.getContainer_name(), containerData); } k8sObject.setContainerDataMap(containerDataHashMap); - } else if (kubernetesAPIObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { + } else if (null != createExperimentAPIObject.getExperimentType() && createExperimentAPIObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { // namespace recommendations experiment type k8sObject = new K8sObject(); k8sObject.setNamespace(kubernetesAPIObject.getNamespaceAPIObjects().getnamespace_name()); - k8sObject.setExperimentType(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT); - if (null != kubernetesAPIObject.getContainerAPIObjects()) { - throw new Exception(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.CONTAINER_DATA_NOT_NULL_FOR_NAMESPACE_EXP); - } else { - HashMap containerDataHashMap = new HashMap<>(); - k8sObject.setContainerDataMap(containerDataHashMap); - } + HashMap containerDataHashMap = new HashMap<>(); + k8sObject.setContainerDataMap(containerDataHashMap); NamespaceAPIObject namespaceAPIObject = kubernetesAPIObject.getNamespaceAPIObjects(); k8sObject.setNamespaceData(new NamespaceData(namespaceAPIObject.getnamespace_name(), new NamespaceRecommendations(), null)); } - LOGGER.info("Experiment Type: " + k8sObject.getExperimentType()); + LOGGER.debug("Experiment Type: " + createExperimentAPIObject.getExperimentType()); k8sObjectList.add(k8sObject); } kruizeObject.setKubernetes_objects(k8sObjectList); @@ -107,6 +94,7 @@ public static KruizeObject convertCreateExperimentAPIObjToKruizeObject(CreateExp kruizeObject.setMode(createExperimentAPIObject.getMode()); kruizeObject.setPerformanceProfile(createExperimentAPIObject.getPerformanceProfile()); kruizeObject.setDataSource(createExperimentAPIObject.getDatasource()); + kruizeObject.setExperimentType(createExperimentAPIObject.getExperimentType()); kruizeObject.setSloInfo(createExperimentAPIObject.getSloInfo()); kruizeObject.setTrial_settings(createExperimentAPIObject.getTrialSettings()); kruizeObject.setRecommendation_settings(createExperimentAPIObject.getRecommendationSettings()); @@ -135,13 +123,14 @@ public static ListRecommendationsAPIObject convertKruizeObjectToListRecommendati listRecommendationsAPIObject.setApiVersion(AnalyzerConstants.VersionConstants.APIVersionConstants.CURRENT_LIST_RECOMMENDATIONS_VERSION); listRecommendationsAPIObject.setExperimentName(kruizeObject.getExperimentName()); listRecommendationsAPIObject.setClusterName(kruizeObject.getClusterName()); + listRecommendationsAPIObject.setExperimentType(kruizeObject.getExperimentType()); List kubernetesAPIObjects = new ArrayList<>(); KubernetesAPIObject kubernetesAPIObject; for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) { kubernetesAPIObject = new KubernetesAPIObject(k8sObject.getName(), k8sObject.getType(), k8sObject.getNamespace()); // namespace recommendations experiment type - if (k8sObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { + if (null != kruizeObject.getExperimentType() && kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { NamespaceAPIObject namespaceAPIObject; NamespaceData clonedNamespaceData = Utils.getClone(k8sObject.getNamespaceData(), NamespaceData.class); diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/CreateExperimentAPIObject.java b/src/main/java/com/autotune/analyzer/serviceObjects/CreateExperimentAPIObject.java index 6bd68e576..ac8b033d7 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/CreateExperimentAPIObject.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/CreateExperimentAPIObject.java @@ -47,6 +47,8 @@ public class CreateExperimentAPIObject extends BaseSO { private RecommendationSettings recommendationSettings; @SerializedName(KruizeConstants.JSONKeys.DATASOURCE) //TODO: to be used in future private String datasource; + @SerializedName(KruizeConstants.JSONKeys.EXPERIMENT_TYPE) //TODO: to be used in future + private String experiment_type; private AnalyzerConstants.ExperimentStatus status; private String experiment_id; // this id is UUID and getting set at createExperiment API private ValidationOutputData validationData; // This object indicates if this API object is valid or invalid @@ -147,6 +149,14 @@ public void setDatasource(String datasource) { this.datasource = datasource; } + public String getExperimentType() { + return experiment_type; + } + + public void setExperimentType(String experiment_type) { + this.experiment_type = experiment_type; + } + @Override public String toString() { return "CreateExperimentAPIObject{" + @@ -159,6 +169,7 @@ public String toString() { ", targetCluster='" + targetCluster + '\'' + ", kubernetesAPIObjects=" + kubernetesAPIObjects.toString() + ", trialSettings=" + trialSettings + + ", experimentType=" + experiment_type + ", recommendationSettings=" + recommendationSettings + '}'; } diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/KubernetesAPIObject.java b/src/main/java/com/autotune/analyzer/serviceObjects/KubernetesAPIObject.java index e3ab41b62..0a6d52ecf 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/KubernetesAPIObject.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/KubernetesAPIObject.java @@ -28,9 +28,6 @@ public class KubernetesAPIObject { private String type; private String name; private String namespace; - // Optional field to determine if the experiment type is 'container' or 'namespace'. - // TODO: Update to make this field mandatory in the future. - private String experiment_type; @SerializedName(KruizeConstants.JSONKeys.CONTAINERS) private List containerAPIObjects; @SerializedName(KruizeConstants.JSONKeys.NAMESPACES) @@ -60,10 +57,6 @@ public String getNamespace() { return namespace; } - public String getExperimentType() { - return experiment_type; - } - @JsonProperty(KruizeConstants.JSONKeys.CONTAINERS) public List getContainerAPIObjects() { return containerAPIObjects; @@ -82,10 +75,6 @@ public void setNamespaceAPIObject(NamespaceAPIObject namespaceAPIObject) { this.namespaceAPIObject = namespaceAPIObject; } - public void setExperimentType(String experimentType) { - this.experiment_type = experimentType; - } - @Override public String toString() { return "KubernetesObject{" + diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java b/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java index b5b796a95..a171286bc 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java @@ -23,6 +23,8 @@ public class ListRecommendationsAPIObject extends BaseSO{ @SerializedName(KruizeConstants.JSONKeys.CLUSTER_NAME) private String clusterName; + @SerializedName(KruizeConstants.JSONKeys.EXPERIMENT_TYPE) + private String experiment_type; @SerializedName(KruizeConstants.JSONKeys.KUBERNETES_OBJECTS) private List kubernetesObjects; @@ -42,4 +44,12 @@ public List getKubernetesObjects() { public void setKubernetesObjects(List kubernetesObjects) { this.kubernetesObjects = kubernetesObjects; } + + public String getExperimentType() { + return experiment_type; + } + + public void setExperimentType(String experiment_type) { + this.experiment_type = experiment_type; + } } diff --git a/src/main/java/com/autotune/analyzer/services/CreateExperiment.java b/src/main/java/com/autotune/analyzer/services/CreateExperiment.java index e938b9d06..8195cadcc 100644 --- a/src/main/java/com/autotune/analyzer/services/CreateExperiment.java +++ b/src/main/java/com/autotune/analyzer/services/CreateExperiment.java @@ -16,14 +16,19 @@ package com.autotune.analyzer.services; +import com.autotune.analyzer.exceptions.InvalidExperimentType; import com.autotune.analyzer.exceptions.KruizeResponse; import com.autotune.analyzer.experiment.ExperimentInitiator; import com.autotune.analyzer.kruizeObject.KruizeObject; import com.autotune.analyzer.serviceObjects.Converters; import com.autotune.analyzer.serviceObjects.CreateExperimentAPIObject; +import com.autotune.analyzer.serviceObjects.KubernetesAPIObject; import com.autotune.analyzer.utils.AnalyzerConstants; import com.autotune.analyzer.utils.AnalyzerErrorConstants; import com.autotune.common.data.ValidationOutputData; +import com.autotune.common.data.result.ContainerData; +import com.autotune.common.data.result.NamespaceData; +import com.autotune.common.k8sObjects.K8sObject; import com.autotune.database.dao.ExperimentDAO; import com.autotune.database.dao.ExperimentDAOImpl; import com.autotune.database.service.ExperimentDBService; @@ -42,10 +47,7 @@ import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; @@ -98,6 +100,26 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) for (CreateExperimentAPIObject createExperimentAPIObject : createExperimentAPIObjects) { createExperimentAPIObject.setExperiment_id(Utils.generateID(createExperimentAPIObject.toString())); createExperimentAPIObject.setStatus(AnalyzerConstants.ExperimentStatus.IN_PROGRESS); + // updating experiment type to container if not passed + if (null == createExperimentAPIObject.getExperimentType() || createExperimentAPIObject.getExperimentType().isEmpty()) { + createExperimentAPIObject.setExperimentType(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT); + } + // validating the kubernetes objects and experiment type + for (KubernetesAPIObject kubernetesAPIObject: createExperimentAPIObject.getKubernetesObjects()) { + if (createExperimentAPIObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT)) { + // check if namespace data is also set for container-type experiments + if (null != kubernetesAPIObject.getNamespaceAPIObjects()) { + throw new InvalidExperimentType(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.NAMESPACE_DATA_NOT_NULL_FOR_CONTAINER_EXP); + } + } else if (createExperimentAPIObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { + if (null != kubernetesAPIObject.getContainerAPIObjects()) { + throw new InvalidExperimentType(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.CONTAINER_DATA_NOT_NULL_FOR_NAMESPACE_EXP); + } + if (createExperimentAPIObject.getTargetCluster().equalsIgnoreCase(AnalyzerConstants.REMOTE)) { + throw new InvalidExperimentType(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.NAMESPACE_EXP_NOT_SUPPORTED_FOR_REMOTE); + } + } + } KruizeObject kruizeObject = Converters.KruizeObjectConverters.convertCreateExperimentAPIObjToKruizeObject(createExperimentAPIObject); if (null != kruizeObject) kruizeExpList.add(kruizeObject); @@ -130,6 +152,8 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) e.printStackTrace(); LOGGER.error("Unknown exception caught: " + e.getMessage()); sendErrorResponse(inputData, response, e, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Internal Server Error: " + e.getMessage()); + } catch (InvalidExperimentType e) { + sendErrorResponse(inputData, response, null, HttpServletResponse.SC_BAD_REQUEST, e.getMessage()); } finally { if (null != timerCreateExp) { MetricsConfig.timerCreateExp = MetricsConfig.timerBCreateExp.tag("status", statusValue).register(MetricsConfig.meterRegistry()); diff --git a/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java b/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java index 3bfaec3ba..07c42f313 100644 --- a/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java +++ b/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java @@ -165,6 +165,7 @@ public static final class CreateExperimentAPI { public static final String NAMESPACE_AND_CONTAINER_NOT_NULL = "Only one of Namespace or Container information can be specified."; public static final String CONTAINER_DATA_NOT_NULL_FOR_NAMESPACE_EXP = "Can not specify container data for namespace experiment"; public static final String NAMESPACE_DATA_NOT_NULL_FOR_CONTAINER_EXP = "Can not specify namespace data for container experiment"; + public static final String NAMESPACE_EXP_NOT_SUPPORTED_FOR_REMOTE = "Namespace experiment type is not supported for remote monitoring use case."; private CreateExperimentAPI() { diff --git a/src/main/java/com/autotune/common/k8sObjects/K8sObject.java b/src/main/java/com/autotune/common/k8sObjects/K8sObject.java index 5771f80c0..27db1b190 100644 --- a/src/main/java/com/autotune/common/k8sObjects/K8sObject.java +++ b/src/main/java/com/autotune/common/k8sObjects/K8sObject.java @@ -13,7 +13,6 @@ public class K8sObject { private String type; // TODO: Change to ENUM private String name; private String namespace; - private String experiment_type; @SerializedName(KruizeConstants.JSONKeys.CONTAINERS) private HashMap containerDataMap; @SerializedName(KruizeConstants.JSONKeys.NAMESPACES) @@ -31,10 +30,6 @@ public String getType() { return type; } - public String getExperimentType() { - return experiment_type; - } - public void setType(String type) { this.type = type; } @@ -47,10 +42,6 @@ public void setName(String name) { this.name = name; } - public void setExperimentType(String experiment_type) { - this.experiment_type = experiment_type; - } - public String getNamespace() { return namespace; } diff --git a/src/main/java/com/autotune/database/helper/DBHelpers.java b/src/main/java/com/autotune/database/helper/DBHelpers.java index 4a8b96322..b1c1d2c50 100644 --- a/src/main/java/com/autotune/database/helper/DBHelpers.java +++ b/src/main/java/com/autotune/database/helper/DBHelpers.java @@ -300,6 +300,9 @@ public static KruizeExperimentEntry convertCreateAPIObjToExperimentDBObj(CreateE kruizeExperimentEntry.setStatus(AnalyzerConstants.ExperimentStatus.IN_PROGRESS); kruizeExperimentEntry.setMeta_data(null); kruizeExperimentEntry.setDatasource(null); + if (apiObject.getTargetCluster().equalsIgnoreCase(AnalyzerConstants.LOCAL)) { + kruizeExperimentEntry.setExperimentType(apiObject.getExperimentType()); + } ObjectMapper objectMapper = new ObjectMapper(); try { kruizeExperimentEntry.setExtended_data( @@ -377,14 +380,13 @@ public static ListRecommendationsAPIObject getListRecommendationAPIObjectForDB(K for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) { if (null == k8sObject) continue; - if (null == k8sObject.getContainerDataMap() && k8sObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT)) + if (null == k8sObject.getContainerDataMap() && (null == kruizeObject.getExperimentType() || kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT))) continue; - if (k8sObject.getContainerDataMap().isEmpty() && k8sObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT)) + if (k8sObject.getContainerDataMap().isEmpty() && (null == kruizeObject.getExperimentType() || kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT))) continue; KubernetesAPIObject kubernetesAPIObject = new KubernetesAPIObject(k8sObject.getName(), k8sObject.getType(), k8sObject.getNamespace()); boolean matchFound = false; - kubernetesAPIObject.setExperimentType(k8sObject.getExperimentType()); - if (k8sObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { + if (null != kruizeObject.getExperimentType() && kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { // saving namespace recommendations NamespaceData clonedNamespaceData = Utils.getClone(k8sObject.getNamespaceData(), NamespaceData.class); if (null == clonedNamespaceData) @@ -456,6 +458,7 @@ public static ListRecommendationsAPIObject getListRecommendationAPIObjectForDB(K listRecommendationsAPIObject.setClusterName(kruizeObject.getClusterName()); listRecommendationsAPIObject.setExperimentName(kruizeObject.getExperimentName()); listRecommendationsAPIObject.setKubernetesObjects(kubernetesAPIObjectList); + listRecommendationsAPIObject.setExperimentType(kruizeObject.getExperimentType()); } return listRecommendationsAPIObject; } @@ -482,10 +485,12 @@ public static KruizeRecommendationEntry convertKruizeObjectTORecommendation(Krui kruizeRecommendationEntry.setVersion(KruizeConstants.KRUIZE_RECOMMENDATION_API_VERSION.LATEST.getVersionNumber()); kruizeRecommendationEntry.setExperiment_name(listRecommendationsAPIObject.getExperimentName()); kruizeRecommendationEntry.setCluster_name(listRecommendationsAPIObject.getClusterName()); + kruizeRecommendationEntry.setExperimentType(listRecommendationsAPIObject.getExperimentType()); + Timestamp endInterval = null; // todo : what happens if two k8 objects or Containers with different timestamp for (KubernetesAPIObject k8sObject : listRecommendationsAPIObject.getKubernetesObjects()) { - if (k8sObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { + if (null != listRecommendationsAPIObject.getExperimentType() && listRecommendationsAPIObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { endInterval = k8sObject.getNamespaceAPIObjects().getnamespaceRecommendations().getData().keySet().stream().max(Timestamp::compareTo).get(); } else { for (ContainerAPIObject containerAPIObject : k8sObject.getContainerAPIObjects()) { @@ -528,6 +533,7 @@ public static List convertExperimentEntryToCreateExpe CreateExperimentAPIObject apiObj = new Gson().fromJson(extended_data_rawJson, CreateExperimentAPIObject.class); apiObj.setExperiment_id(entry.getExperiment_id()); apiObj.setStatus(entry.getStatus()); + apiObj.setExperimentType(entry.getExperimentType()); createExperimentAPIObjects.add(apiObj); } catch (Exception e) { LOGGER.error("Error in converting to apiObj from db object due to : {}", e.getMessage()); diff --git a/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java b/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java index 935254a34..522bcbe3f 100644 --- a/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java +++ b/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java @@ -55,6 +55,7 @@ public class KruizeExperimentEntry { private String mode; private String target_cluster; private String performance_profile; + private String experiment_type; @Enumerated(EnumType.STRING) private AnalyzerConstants.ExperimentStatus status; @JdbcTypeCode(SqlTypes.JSON) @@ -154,4 +155,13 @@ public JsonNode getDatasource() { public void setDatasource(JsonNode datasource) { this.datasource = datasource; } + + public String getExperimentType() { + return experiment_type; + } + + public void setExperimentType(String experiment_type) { + this.experiment_type = experiment_type; + } + } diff --git a/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java b/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java index 9dfd076b1..902cc4b7a 100644 --- a/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java +++ b/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java @@ -27,6 +27,7 @@ public class KruizeRecommendationEntry { private String cluster_name; @JdbcTypeCode(SqlTypes.JSON) private JsonNode extended_data; + private String experiment_type; public String getExperiment_name() { return experiment_name; @@ -67,4 +68,12 @@ public String getVersion() { public void setVersion(String version) { this.version = version; } + + public String getExperimentType() { + return experiment_type; + } + + public void setExperimentType(String experiment_type) { + this.experiment_type = experiment_type; + } } diff --git a/src/main/java/com/autotune/utils/KruizeConstants.java b/src/main/java/com/autotune/utils/KruizeConstants.java index 4180fe902..15779cdae 100644 --- a/src/main/java/com/autotune/utils/KruizeConstants.java +++ b/src/main/java/com/autotune/utils/KruizeConstants.java @@ -159,6 +159,7 @@ public static final class JSONKeys { // Metadata Section public static final String EXPERIMENT_ID = "experiment_id"; public static final String EXPERIMENT_NAME = "experiment_name"; + public static final String EXPERIMENT_TYPE = "experiment_type"; // Deployments Section public static final String DEPLOYMENTS = "deployments"; public static final String NAMESPACE = "namespace"; From 3e25c60d5f1336138a6c113c76892e75ac919aee Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Tue, 24 Sep 2024 18:34:03 +0530 Subject: [PATCH 02/17] fixing rm use case Signed-off-by: Shekhar Saxena --- migrations/kruize_experiments_ddl.sql | 4 ++-- migrations/kruize_local_ddl.sql | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/migrations/kruize_experiments_ddl.sql b/migrations/kruize_experiments_ddl.sql index 8d9002b71..88ebec4f2 100644 --- a/migrations/kruize_experiments_ddl.sql +++ b/migrations/kruize_experiments_ddl.sql @@ -1,6 +1,6 @@ -create table IF NOT EXISTS kruize_experiments (experiment_id varchar(255) not null, cluster_name varchar(255), datasource jsonb, experiment_name varchar(255), extended_data jsonb, meta_data jsonb, mode varchar(255), performance_profile varchar(255), status varchar(255), target_cluster varchar(255), version varchar(255), primary key (experiment_id)); +create table IF NOT EXISTS kruize_experiments (experiment_id varchar(255) not null, cluster_name varchar(255), datasource jsonb, experiment_name varchar(255), extended_data jsonb, meta_data jsonb, mode varchar(255), performance_profile varchar(255), status varchar(255), target_cluster varchar(255), experiment_type varchar(255) null default null, version varchar(255), primary key (experiment_id)); create table IF NOT EXISTS kruize_performance_profiles (name varchar(255) not null, k8s_type varchar(255), profile_version float(53) not null, slo jsonb, primary key (name)); -create table IF NOT EXISTS kruize_recommendations (interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255), extended_data jsonb, version varchar(255), primary key (experiment_name, interval_end_time)) PARTITION BY RANGE (interval_end_time); +create table IF NOT EXISTS kruize_recommendations (interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255), extended_data jsonb, experiment_type varchar(255) null default null, version varchar(255), primary key (experiment_name, interval_end_time)) PARTITION BY RANGE (interval_end_time); create table IF NOT EXISTS kruize_results (interval_start_time timestamp(6) not null, interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255) , duration_minutes float(53) not null, extended_data jsonb, meta_data jsonb, version varchar(255), primary key (experiment_name, interval_end_time, interval_start_time)) PARTITION BY RANGE (interval_end_time); alter table if exists kruize_experiments add constraint UK_experiment_name unique (experiment_name); create index IF NOT EXISTS idx_recommendation_experiment_name on kruize_recommendations (experiment_name); diff --git a/migrations/kruize_local_ddl.sql b/migrations/kruize_local_ddl.sql index 48781f500..cd6453371 100644 --- a/migrations/kruize_local_ddl.sql +++ b/migrations/kruize_local_ddl.sql @@ -1,5 +1,4 @@ create table IF NOT EXISTS kruize_datasources (version varchar(255), name varchar(255), provider varchar(255), serviceName varchar(255), namespace varchar(255), url varchar(255), authentication jsonb, primary key (name)); create table IF NOT EXISTS kruize_dsmetadata (id serial, version varchar(255), datasource_name varchar(255), cluster_name varchar(255), namespace varchar(255), workload_type varchar(255), workload_name varchar(255), container_name varchar(255), container_image_name varchar(255), primary key (id)); -alter table kruize_experiments add column experiment_type varchar(255), add column metadata_id bigint references kruize_dsmetadata(id), alter column datasource type varchar(255); +alter table kruize_experiments add column metadata_id bigint references kruize_dsmetadata(id), alter column datasource type varchar(255); create table IF NOT EXISTS kruize_metric_profiles (api_version varchar(255), kind varchar(255), metadata jsonb, name varchar(255) not null, k8s_type varchar(255), profile_version float(53) not null, slo jsonb, primary key (name)); -alter table kruize_recommendations add column experiment_type varchar(255); From be947040027f9030569ad15c978eaebad23931f2 Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Wed, 25 Sep 2024 12:46:25 +0530 Subject: [PATCH 03/17] code-refactor Signed-off-by: Shekhar Saxena --- .../analyzer/kruizeObject/KruizeObject.java | 10 +- .../engine/RecommendationEngine.java | 654 ++++++++++-------- .../analyzer/serviceObjects/Converters.java | 53 +- .../ListRecommendationsAPIObject.java | 6 +- .../analyzer/services/CreateExperiment.java | 14 +- .../autotune/database/helper/DBHelpers.java | 14 +- .../database/table/KruizeExperimentEntry.java | 10 +- .../table/KruizeRecommendationEntry.java | 8 +- 8 files changed, 431 insertions(+), 338 deletions(-) diff --git a/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java b/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java index 3352e8ed9..bab99c4fc 100644 --- a/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java +++ b/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java @@ -48,7 +48,7 @@ public final class KruizeObject { @SerializedName("datasource") private String datasource; @SerializedName(KruizeConstants.JSONKeys.EXPERIMENT_TYPE) //TODO: to be used in future - private String experiment_type; + private String experimentType; private String namespace; // TODO: Currently adding it at this level with an assumption that there is only one entry in k8s object needs to be changed private String mode; //Todo convert into Enum @SerializedName("target_cluster") @@ -300,11 +300,11 @@ public void setDataSource(String datasource) { } public String getExperimentType() { - return experiment_type; + return experimentType; } - public void setExperimentType(String experiment_type) { - this.experiment_type = experiment_type; + public void setExperimentType(String experimentType) { + this.experimentType = experimentType; } @@ -320,7 +320,7 @@ public String toString() { ", experimentName='" + experimentName + '\'' + ", clusterName=" + tmpClusterName + '\'' + ", datasource=" + datasource + '\'' + - ", experimentType=" + experiment_type + '\'' + + ", experimentType=" + experimentType + '\'' + ", mode='" + mode + '\'' + ", targetCluster='" + targetCluster + '\'' + ", hpoAlgoImpl=" + hpoAlgoImpl + diff --git a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java index 89fa32d6f..ccb9286b0 100644 --- a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java +++ b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java @@ -312,12 +312,13 @@ public KruizeObject prepareRecommendations(int calCount) { public void generateRecommendations(KruizeObject kruizeObject) { for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) { - if (null != kruizeObject.getExperimentType() && kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { + String experimentType = kruizeObject.getExperimentType(); + if (null != experimentType && AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT.equalsIgnoreCase(experimentType)) { String namespaceName = k8sObject.getNamespace(); NamespaceData namespaceData = k8sObject.getNamespaceData(); LOGGER.info("Generating recommendations for namespace: {}", namespaceName); generateRecommendationsBasedOnNamespace(namespaceData, kruizeObject); - } else if (null == kruizeObject.getExperimentType() || kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT)){ + } else if (null == experimentType || AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT.equalsIgnoreCase(experimentType)){ for (String containerName : k8sObject.getContainerDataMap().keySet()) { ContainerData containerData = k8sObject.getContainerDataMap().get(containerName); @@ -1970,12 +1971,6 @@ private String getResults(Map mainKruizeExperimentMAP, Kru */ public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo) throws Exception { try { - long interval_end_time_epoc = 0; - long interval_start_time_epoc = 0; - SimpleDateFormat sdf = new SimpleDateFormat(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT, Locale.ROOT); - // Create the client - GenericRestApiClient client = new GenericRestApiClient(dataSourceInfo); - String metricProfileName = kruizeObject.getPerformanceProfile(); PerformanceProfile metricProfile = MetricProfileCollection.getInstance().getMetricProfileCollection().get(metricProfileName); if (null == metricProfile) { @@ -1984,212 +1979,255 @@ public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, T } String maxDateQuery = null; - Listmetrics = metricProfile.getSloInfo().getFunctionVariables(); - for (Metric metric: metrics) { - String name = metric.getName(); - if(name.equals("maxDate")){ - String query = metric.getAggregationFunctionsMap().get("max").getQuery(); - maxDateQuery = query; - break; - } + + if (null == kruizeObject.getExperimentType() || kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT)) { + maxDateQuery = getMaxDateQueryForContainers(metricProfile); + fetchContainerMetricsBasedOnDataSourceAndProfile(kruizeObject, interval_end_time, interval_start_time, dataSourceInfo, metricProfile, maxDateQuery); + } else if (null != kruizeObject.getExperimentType() && kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { + maxDateQuery = getMaxDateQueryForNamespace(metricProfile); + fetchNamespaceMetricsBasedOnDataSourceAndProfile(kruizeObject, interval_end_time, interval_start_time, dataSourceInfo, metricProfile, maxDateQuery); } + } catch (Exception e) { + e.printStackTrace(); + throw new Exception(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.METRIC_EXCEPTION + e.getMessage()); + } + } + + /** + * Fetches namespace metrics based on the specified datasource using queries from the metricProfile for the given time interval. + * + * @param kruizeObject KruizeObject + * @param interval_end_time The end time of the interval in the format yyyy-MM-ddTHH:mm:sssZ + * @param interval_start_time The start time of the interval in the format yyyy-MM-ddTHH:mm:sssZ. + * @param dataSourceInfo DataSource object + * @param metricProfile performance profile to be used + * @param maxDateQuery max date query for namespace + * @throws Exception + */ + private void fetchNamespaceMetricsBasedOnDataSourceAndProfile(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo, PerformanceProfile metricProfile, String maxDateQuery) throws Exception{ + try { + long interval_end_time_epoc = 0; + long interval_start_time_epoc = 0; + SimpleDateFormat sdf = new SimpleDateFormat(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT, Locale.ROOT); + // Create the client + GenericRestApiClient client = new GenericRestApiClient(dataSourceInfo); Double measurementDurationMinutesInDouble = kruizeObject.getTrial_settings().getMeasurement_durationMinutes_inDouble(); List kubernetes_objects = kruizeObject.getKubernetes_objects(); - // Iterate over Kubernetes objects for (K8sObject k8sObject : kubernetes_objects) { String namespace = k8sObject.getNamespace(); - String workload = k8sObject.getName(); - String workload_type = k8sObject.getType(); - HashMap containerDataMap = k8sObject.getContainerDataMap(); - // check if containerDataMap is not empty - if (null == kruizeObject.getExperimentType() || kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT)) { - // Iterate over containers - for (Map.Entry entry : containerDataMap.entrySet()) { - ContainerData containerData = entry.getValue(); - String containerName = containerData.getContainer_name(); - if (null == interval_end_time) { - LOGGER.info(KruizeConstants.APIMessages.CONTAINER_USAGE_INFO); - String queryToEncode = null; - if (null == maxDateQuery || maxDateQuery.isEmpty()) { - throw new NullPointerException("maxDate query cannot be empty or null"); - } - + // fetch namespace related metrics if containerDataMap is empty + NamespaceData namespaceData = k8sObject.getNamespaceData(); + // determine the max date query for namespace + String namespaceMaxDateQuery = maxDateQuery.replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace); + + if (null == interval_end_time) { + LOGGER.info(KruizeConstants.APIMessages.NAMESPACE_USAGE_INFO); + String dateMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATE_ENDPOINT_WITH_QUERY, + dataSourceInfo.getUrl(), + URLEncoder.encode(namespaceMaxDateQuery, CHARACTER_ENCODING) + ); + LOGGER.info(dateMetricsUrl); + client.setBaseURL(dateMetricsUrl); + JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, ""); + JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class); + JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT); + // Process fetched metrics + if (null != resultArray && !resultArray.isEmpty()) { + resultArray = resultArray.get(0) + .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.VALUE); + long epochTime = resultArray.get(0).getAsLong(); + String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC)); + Date date = sdf.parse(timestamp); + Timestamp dateTS = new Timestamp(date.getTime()); + interval_end_time_epoc = dateTS.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC + - ((long) dateTS.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE); + int maxDay = Terms.getMaxDays(kruizeObject.getTerms()); + LOGGER.info(KruizeConstants.APIMessages.MAX_DAY, maxDay); + Timestamp startDateTS = Timestamp.valueOf(Objects.requireNonNull(dateTS).toLocalDateTime().minusDays(maxDay)); + interval_start_time_epoc = startDateTS.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC + - ((long) startDateTS.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC); + } + } else { + // Convert timestamps to epoch time + interval_end_time_epoc = interval_end_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC + - ((long) interval_end_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE); + interval_start_time_epoc = interval_start_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC + - ((long) interval_start_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC); + } - LOGGER.debug("maxDateQuery: {}", maxDateQuery); - queryToEncode = maxDateQuery - .replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace) - .replace(AnalyzerConstants.CONTAINER_VARIABLE, containerName) - .replace(AnalyzerConstants.WORKLOAD_VARIABLE, workload) - .replace(AnalyzerConstants.WORKLOAD_TYPE_VARIABLE, workload_type); + HashMap namespaceDataResults = new HashMap<>(); + IntervalResults namespaceIntervalResults; + HashMap namespaceResMap; + HashMap namespaceResultMap; + MetricResults namespaceMetricResults; + MetricAggregationInfoResults namespaceMetricAggregationInfoResults; + + if (null == namespaceData) { + namespaceData = new NamespaceData(); + namespaceData.setNamespace_name(namespace); + k8sObject.setNamespaceData(namespaceData); + } - String dateMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATE_ENDPOINT_WITH_QUERY, - dataSourceInfo.getUrl(), - URLEncoder.encode(queryToEncode, CHARACTER_ENCODING) - ); - LOGGER.info(dateMetricsUrl); - client.setBaseURL(dateMetricsUrl); - JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, ""); - JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class); - JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT); - // Process fetched metrics - if (null != resultArray && !resultArray.isEmpty()) { - resultArray = resultArray.get(0) - .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.VALUE); - long epochTime = resultArray.get(0).getAsLong(); - String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC)); - Date date = sdf.parse(timestamp); - Timestamp dateTS = new Timestamp(date.getTime()); - interval_end_time_epoc = dateTS.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC - - ((long) dateTS.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE); - int maxDay = Terms.getMaxDays(kruizeObject.getTerms()); - LOGGER.info(KruizeConstants.APIMessages.MAX_DAY, maxDay); - Timestamp startDateTS = Timestamp.valueOf(Objects.requireNonNull(dateTS).toLocalDateTime().minusDays(maxDay)); - interval_start_time_epoc = startDateTS.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC - - ((long) startDateTS.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC); + List metricList = metricProfile.getSloInfo().getFunctionVariables(); + + // Iterate over metrics and aggregation functions + for (Metric metricEntry : metricList) { + if (metricEntry.getName().startsWith(AnalyzerConstants.NAMESPACE) && !metricEntry.getName().equals("namespaceMaxDate")) { + HashMap aggregationFunctions = metricEntry.getAggregationFunctionsMap(); + for (Map.Entry aggregationFunctionsEntry : aggregationFunctions.entrySet()) { + String promQL = aggregationFunctionsEntry.getValue().getQuery(); + String format = null; + + // Determine format based on metric type + List cpuFunction = Arrays.asList(AnalyzerConstants.MetricName.namespaceCpuRequest.toString(), AnalyzerConstants.MetricName.namespaceCpuLimit.toString(), AnalyzerConstants.MetricName.namespaceCpuUsage.toString(), AnalyzerConstants.MetricName.namespaceCpuThrottle.toString()); + List memFunction = Arrays.asList(AnalyzerConstants.MetricName.namespaceMemoryRequest.toString(), AnalyzerConstants.MetricName.namespaceMemoryLimit.toString(), AnalyzerConstants.MetricName.namespaceMemoryUsage.toString(), AnalyzerConstants.MetricName.namespaceMemoryRSS.toString()); + if (cpuFunction.contains(metricEntry.getName())) { + format = KruizeConstants.JSONKeys.CORES; + } else if (memFunction.contains(metricEntry.getName())) { + format = KruizeConstants.JSONKeys.BYTES; } - } else { - // Convert timestamps to epoch time - interval_end_time_epoc = interval_end_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC - - ((long) interval_end_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE); - interval_start_time_epoc = interval_start_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC - - ((long) interval_start_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC); - } - HashMap containerDataResults = new HashMap<>(); - IntervalResults intervalResults; - HashMap resMap; - HashMap resultMap; - MetricResults metricResults; - MetricAggregationInfoResults metricAggregationInfoResults; - - List metricList = metricProfile.getSloInfo().getFunctionVariables(); - - // Iterate over metrics and aggregation functions - for (Metric metricEntry : metricList) { - HashMap aggregationFunctions = metricEntry.getAggregationFunctionsMap(); - for (Map.Entry aggregationFunctionsEntry: aggregationFunctions.entrySet()) { - // Determine promQL query on metric type - String promQL = aggregationFunctionsEntry.getValue().getQuery(); - String format = null; - - - // Determine format based on metric type - Todo move this metric profile - List cpuFunction = Arrays.asList(AnalyzerConstants.MetricName.cpuUsage.toString(), AnalyzerConstants.MetricName.cpuThrottle.toString(), AnalyzerConstants.MetricName.cpuLimit.toString(), AnalyzerConstants.MetricName.cpuRequest.toString()); - List memFunction = Arrays.asList(AnalyzerConstants.MetricName.memoryLimit.toString(), AnalyzerConstants.MetricName.memoryRequest.toString(), AnalyzerConstants.MetricName.memoryRSS.toString(), AnalyzerConstants.MetricName.memoryUsage.toString()); - if (cpuFunction.contains(metricEntry.getName())) { - format = KruizeConstants.JSONKeys.CORES; - } else if (memFunction.contains(metricEntry.getName())) { - format = KruizeConstants.JSONKeys.BYTES; - } - promQL = promQL - .replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace) - .replace(AnalyzerConstants.CONTAINER_VARIABLE, containerName) - .replace(AnalyzerConstants.MEASUREMENT_DURATION_IN_MIN_VARAIBLE, Integer.toString(measurementDurationMinutesInDouble.intValue())) - .replace(AnalyzerConstants.WORKLOAD_VARIABLE, workload) - .replace(AnalyzerConstants.WORKLOAD_TYPE_VARIABLE, workload_type); - - // If promQL is determined, fetch metrics from the datasource - if (promQL != null) { - LOGGER.info(promQL); - String podMetricsUrl; - try { - podMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY, - dataSourceInfo.getUrl(), - URLEncoder.encode(promQL, CHARACTER_ENCODING), - interval_start_time_epoc, - interval_end_time_epoc, - measurementDurationMinutesInDouble.intValue() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE); - LOGGER.info(podMetricsUrl); - client.setBaseURL(podMetricsUrl); - JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, ""); - JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class); - JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT); - // Process fetched metrics - if (null != resultArray && !resultArray.isEmpty()) { - resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray( - KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT).get(0) - .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants - .DataSourceQueryJSONKeys.VALUES); - sdf.setTimeZone(TimeZone.getTimeZone(KruizeConstants.TimeUnitsExt.TimeZones.UTC)); - - // Iterate over fetched metrics - Timestamp sTime = new Timestamp(interval_start_time_epoc); - for (JsonElement element : resultArray) { - JsonArray valueArray = element.getAsJsonArray(); - long epochTime = valueArray.get(0).getAsLong(); - double value = valueArray.get(1).getAsDouble(); - String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC)); - Date date = sdf.parse(timestamp); - Timestamp eTime = new Timestamp(date.getTime()); - - // Prepare interval results - if (containerDataResults.containsKey(eTime)) { - intervalResults = containerDataResults.get(eTime); - resMap = intervalResults.getMetricResultsMap(); - } else { - intervalResults = new IntervalResults(); - resMap = new HashMap<>(); - } - AnalyzerConstants.MetricName metricName = AnalyzerConstants.MetricName.valueOf(metricEntry.getName()); - if (resMap.containsKey(metricName)) { - metricResults = resMap.get(metricName); - metricAggregationInfoResults = metricResults.getAggregationInfoResult(); - } else { - metricResults = new MetricResults(); - metricAggregationInfoResults = new MetricAggregationInfoResults(); - } - - Method method = MetricAggregationInfoResults.class.getDeclaredMethod(KruizeConstants.APIMessages.SET + aggregationFunctionsEntry.getKey().substring(0, 1).toUpperCase() + aggregationFunctionsEntry.getKey().substring(1), Double.class); - method.invoke(metricAggregationInfoResults, value); - metricAggregationInfoResults.setFormat(format); - metricResults.setAggregationInfoResult(metricAggregationInfoResults); - metricResults.setName(metricEntry.getName()); - metricResults.setFormat(format); - resMap.put(metricName, metricResults); - intervalResults.setMetricResultsMap(resMap); - intervalResults.setIntervalStartTime(sTime); //Todo this will change - intervalResults.setIntervalEndTime(eTime); - intervalResults.setDurationInMinutes((double) ((eTime.getTime() - sTime.getTime()) - / ((long) KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE - * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC))); - containerDataResults.put(eTime, intervalResults); - sTime = eTime; + promQL = promQL + .replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace) + .replace(AnalyzerConstants.MEASUREMENT_DURATION_IN_MIN_VARAIBLE, Integer.toString(measurementDurationMinutesInDouble.intValue())); + + // If promQL is determined, fetch metrics from the datasource + if (promQL != null) { + LOGGER.info(promQL); + String namespaceMetricsUrl; + try { + namespaceMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY, + dataSourceInfo.getUrl(), + URLEncoder.encode(promQL, CHARACTER_ENCODING), + interval_start_time_epoc, + interval_end_time_epoc, + measurementDurationMinutesInDouble.intValue() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE); + client.setBaseURL(namespaceMetricsUrl); + JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, ""); + JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class); + JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT); + // Process fetched metrics + if (null != resultArray && !resultArray.isEmpty()) { + resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray( + KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT).get(0) + .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants + .DataSourceQueryJSONKeys.VALUES); + sdf.setTimeZone(TimeZone.getTimeZone(KruizeConstants.TimeUnitsExt.TimeZones.UTC)); + + // Iterate over fetched metrics + Timestamp sTime = new Timestamp(interval_start_time_epoc); + for (JsonElement element : resultArray) { + JsonArray valueArray = element.getAsJsonArray(); + long epochTime = valueArray.get(0).getAsLong(); + double value = valueArray.get(1).getAsDouble(); + String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC)); + Date date = sdf.parse(timestamp); + Timestamp eTime = new Timestamp(date.getTime()); + + // Prepare interval results + if (namespaceDataResults.containsKey(eTime)) { + namespaceIntervalResults = namespaceDataResults.get(eTime); + namespaceResMap = namespaceIntervalResults.getMetricResultsMap(); + } else { + namespaceIntervalResults = new IntervalResults(); + namespaceResMap = new HashMap<>(); + } + AnalyzerConstants.MetricName metricName = AnalyzerConstants.MetricName.valueOf(metricEntry.getName()); + if (namespaceResMap.containsKey(metricName)) { + namespaceMetricResults = namespaceResMap.get(metricName); + namespaceMetricAggregationInfoResults = namespaceMetricResults.getAggregationInfoResult(); + } else { + namespaceMetricResults = new MetricResults(); + namespaceMetricAggregationInfoResults = new MetricAggregationInfoResults(); } + + Method method = MetricAggregationInfoResults.class.getDeclaredMethod(KruizeConstants.APIMessages.SET + aggregationFunctionsEntry.getKey().substring(0, 1).toUpperCase() + aggregationFunctionsEntry.getKey().substring(1), Double.class); + method.invoke(namespaceMetricAggregationInfoResults, value); + namespaceMetricAggregationInfoResults.setFormat(format); + namespaceMetricResults.setAggregationInfoResult(namespaceMetricAggregationInfoResults); + namespaceMetricResults.setName(metricEntry.getName()); + namespaceMetricResults.setFormat(format); + namespaceResMap.put(metricName, namespaceMetricResults); + namespaceIntervalResults.setMetricResultsMap(namespaceResMap); + namespaceIntervalResults.setIntervalStartTime(sTime); //Todo this will change + namespaceIntervalResults.setIntervalEndTime(eTime); + namespaceIntervalResults.setDurationInMinutes((double) ((eTime.getTime() - sTime.getTime()) + / ((long) KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE + * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC))); + namespaceDataResults.put(eTime, namespaceIntervalResults); + sTime = eTime; } - } catch (Exception e) { - throw new RuntimeException(e); } + } catch (Exception e) { + throw new RuntimeException(e); } } } - - containerData.setResults(containerDataResults); - if (!containerDataResults.isEmpty()) - setInterval_end_time(Collections.max(containerDataResults.keySet())); //TODO Temp fix invalid date is set if experiment having two container with different last seen date - - } - } else if (null != kruizeObject.getExperimentType() && kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)){ - // fetch namespace related metrics if containerDataMap is empty - NamespaceData namespaceData = k8sObject.getNamespaceData(); - // determine the max date query for namespace - String namespaceMaxDateQuery = null; - for (Metric metric: metrics) { - String name = metric.getName(); - if(name.equals("namespaceMaxDate")){ - namespaceMaxDateQuery = metric.getAggregationFunctionsMap().get("max").getQuery(); - break; + namespaceData.setResults(namespaceDataResults); + if (!namespaceDataResults.isEmpty()) { + setInterval_end_time(Collections.max(namespaceDataResults.keySet())); } } + } + } + } catch (Exception e) { + e.printStackTrace(); + throw new Exception(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.METRIC_EXCEPTION + e.getMessage()); + } + } + + + /** + * Fetches namespace metrics based on the specified datasource using queries from the metricProfile for the given time interval. + * + * @param kruizeObject KruizeObject + * @param interval_end_time The end time of the interval in the format yyyy-MM-ddTHH:mm:sssZ + * @param interval_start_time The start time of the interval in the format yyyy-MM-ddTHH:mm:sssZ. + * @param dataSourceInfo DataSource object + * @param metricProfile performance profile to be used + * @param maxDateQuery max date query for containers + * @throws Exception + */ + private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo, PerformanceProfile metricProfile, String maxDateQuery) throws Exception{ + try { + long interval_end_time_epoc = 0; + long interval_start_time_epoc = 0; + SimpleDateFormat sdf = new SimpleDateFormat(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT, Locale.ROOT); + // Create the client + GenericRestApiClient client = new GenericRestApiClient(dataSourceInfo); - namespaceMaxDateQuery = namespaceMaxDateQuery.replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace); + Double measurementDurationMinutesInDouble = kruizeObject.getTrial_settings().getMeasurement_durationMinutes_inDouble(); + List kubernetes_objects = kruizeObject.getKubernetes_objects(); + + for (K8sObject k8sObject : kubernetes_objects) { + String namespace = k8sObject.getNamespace(); + String workload = k8sObject.getName(); + String workload_type = k8sObject.getType(); + HashMap containerDataMap = k8sObject.getContainerDataMap(); + for (Map.Entry entry : containerDataMap.entrySet()) { + ContainerData containerData = entry.getValue(); + String containerName = containerData.getContainer_name(); if (null == interval_end_time) { - LOGGER.info(KruizeConstants.APIMessages.NAMESPACE_USAGE_INFO); + LOGGER.info(KruizeConstants.APIMessages.CONTAINER_USAGE_INFO); + String queryToEncode = null; + if (null == maxDateQuery || maxDateQuery.isEmpty()) { + throw new NullPointerException("maxDate query cannot be empty or null"); + } + + + LOGGER.debug("maxDateQuery: {}", maxDateQuery); + queryToEncode = maxDateQuery + .replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace) + .replace(AnalyzerConstants.CONTAINER_VARIABLE, containerName) + .replace(AnalyzerConstants.WORKLOAD_VARIABLE, workload) + .replace(AnalyzerConstants.WORKLOAD_TYPE_VARIABLE, workload_type); + String dateMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATE_ENDPOINT_WITH_QUERY, dataSourceInfo.getUrl(), - URLEncoder.encode(namespaceMaxDateQuery, CHARACTER_ENCODING) + URLEncoder.encode(queryToEncode, CHARACTER_ENCODING) ); LOGGER.info(dateMetricsUrl); client.setBaseURL(dateMetricsUrl); @@ -2219,121 +2257,119 @@ public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, T interval_start_time_epoc = interval_start_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC - ((long) interval_start_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC); } - - HashMap namespaceDataResults = new HashMap<>(); - IntervalResults namespaceIntervalResults; - HashMap namespaceResMap; - HashMap namespaceResultMap; - MetricResults namespaceMetricResults; - MetricAggregationInfoResults namespaceMetricAggregationInfoResults; - - if (null == namespaceData) { - namespaceData = new NamespaceData(); - namespaceData.setNamespace_name(namespace); - k8sObject.setNamespaceData(namespaceData); - } + HashMap containerDataResults = new HashMap<>(); + IntervalResults intervalResults; + HashMap resMap; + HashMap resultMap; + MetricResults metricResults; + MetricAggregationInfoResults metricAggregationInfoResults; List metricList = metricProfile.getSloInfo().getFunctionVariables(); // Iterate over metrics and aggregation functions for (Metric metricEntry : metricList) { - if (metricEntry.getName().startsWith(AnalyzerConstants.NAMESPACE) && !metricEntry.getName().equals("namespaceMaxDate")) { - HashMap aggregationFunctions = metricEntry.getAggregationFunctionsMap(); - for (Map.Entry aggregationFunctionsEntry : aggregationFunctions.entrySet()) { - String promQL = aggregationFunctionsEntry.getValue().getQuery(); - String format = null; - - // Determine format based on metric type - List cpuFunction = Arrays.asList(AnalyzerConstants.MetricName.namespaceCpuRequest.toString(), AnalyzerConstants.MetricName.namespaceCpuLimit.toString(), AnalyzerConstants.MetricName.namespaceCpuUsage.toString(), AnalyzerConstants.MetricName.namespaceCpuThrottle.toString()); - List memFunction = Arrays.asList(AnalyzerConstants.MetricName.namespaceMemoryRequest.toString(), AnalyzerConstants.MetricName.namespaceMemoryLimit.toString(), AnalyzerConstants.MetricName.namespaceMemoryUsage.toString(), AnalyzerConstants.MetricName.namespaceMemoryRSS.toString()); - if (cpuFunction.contains(metricEntry.getName())) { - format = KruizeConstants.JSONKeys.CORES; - } else if (memFunction.contains(metricEntry.getName())) { - format = KruizeConstants.JSONKeys.BYTES; - } + HashMap aggregationFunctions = metricEntry.getAggregationFunctionsMap(); + for (Map.Entry aggregationFunctionsEntry: aggregationFunctions.entrySet()) { + // Determine promQL query on metric type + String promQL = aggregationFunctionsEntry.getValue().getQuery(); + String format = null; + + + // Determine format based on metric type - Todo move this metric profile + List cpuFunction = Arrays.asList(AnalyzerConstants.MetricName.cpuUsage.toString(), AnalyzerConstants.MetricName.cpuThrottle.toString(), AnalyzerConstants.MetricName.cpuLimit.toString(), AnalyzerConstants.MetricName.cpuRequest.toString()); + List memFunction = Arrays.asList(AnalyzerConstants.MetricName.memoryLimit.toString(), AnalyzerConstants.MetricName.memoryRequest.toString(), AnalyzerConstants.MetricName.memoryRSS.toString(), AnalyzerConstants.MetricName.memoryUsage.toString()); + if (cpuFunction.contains(metricEntry.getName())) { + format = KruizeConstants.JSONKeys.CORES; + } else if (memFunction.contains(metricEntry.getName())) { + format = KruizeConstants.JSONKeys.BYTES; + } + + promQL = promQL + .replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace) + .replace(AnalyzerConstants.CONTAINER_VARIABLE, containerName) + .replace(AnalyzerConstants.MEASUREMENT_DURATION_IN_MIN_VARAIBLE, Integer.toString(measurementDurationMinutesInDouble.intValue())) + .replace(AnalyzerConstants.WORKLOAD_VARIABLE, workload) + .replace(AnalyzerConstants.WORKLOAD_TYPE_VARIABLE, workload_type); - promQL = promQL - .replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace) - .replace(AnalyzerConstants.MEASUREMENT_DURATION_IN_MIN_VARAIBLE, Integer.toString(measurementDurationMinutesInDouble.intValue())); - - // If promQL is determined, fetch metrics from the datasource - if (promQL != null) { - LOGGER.info(promQL); - String namespaceMetricsUrl; - try { - namespaceMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY, - dataSourceInfo.getUrl(), - URLEncoder.encode(promQL, CHARACTER_ENCODING), - interval_start_time_epoc, - interval_end_time_epoc, - measurementDurationMinutesInDouble.intValue() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE); - client.setBaseURL(namespaceMetricsUrl); - JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, ""); - JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class); - JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT); - // Process fetched metrics - if (null != resultArray && !resultArray.isEmpty()) { - resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray( - KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT).get(0) - .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants - .DataSourceQueryJSONKeys.VALUES); - sdf.setTimeZone(TimeZone.getTimeZone(KruizeConstants.TimeUnitsExt.TimeZones.UTC)); - - // Iterate over fetched metrics - Timestamp sTime = new Timestamp(interval_start_time_epoc); - for (JsonElement element : resultArray) { - JsonArray valueArray = element.getAsJsonArray(); - long epochTime = valueArray.get(0).getAsLong(); - double value = valueArray.get(1).getAsDouble(); - String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC)); - Date date = sdf.parse(timestamp); - Timestamp eTime = new Timestamp(date.getTime()); - - // Prepare interval results - if (namespaceDataResults.containsKey(eTime)) { - namespaceIntervalResults = namespaceDataResults.get(eTime); - namespaceResMap = namespaceIntervalResults.getMetricResultsMap(); - } else { - namespaceIntervalResults = new IntervalResults(); - namespaceResMap = new HashMap<>(); - } - AnalyzerConstants.MetricName metricName = AnalyzerConstants.MetricName.valueOf(metricEntry.getName()); - if (namespaceResMap.containsKey(metricName)) { - namespaceMetricResults = namespaceResMap.get(metricName); - namespaceMetricAggregationInfoResults = namespaceMetricResults.getAggregationInfoResult(); - } else { - namespaceMetricResults = new MetricResults(); - namespaceMetricAggregationInfoResults = new MetricAggregationInfoResults(); - } - - Method method = MetricAggregationInfoResults.class.getDeclaredMethod(KruizeConstants.APIMessages.SET + aggregationFunctionsEntry.getKey().substring(0, 1).toUpperCase() + aggregationFunctionsEntry.getKey().substring(1), Double.class); - method.invoke(namespaceMetricAggregationInfoResults, value); - namespaceMetricAggregationInfoResults.setFormat(format); - namespaceMetricResults.setAggregationInfoResult(namespaceMetricAggregationInfoResults); - namespaceMetricResults.setName(metricEntry.getName()); - namespaceMetricResults.setFormat(format); - namespaceResMap.put(metricName, namespaceMetricResults); - namespaceIntervalResults.setMetricResultsMap(namespaceResMap); - namespaceIntervalResults.setIntervalStartTime(sTime); //Todo this will change - namespaceIntervalResults.setIntervalEndTime(eTime); - namespaceIntervalResults.setDurationInMinutes((double) ((eTime.getTime() - sTime.getTime()) - / ((long) KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE - * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC))); - namespaceDataResults.put(eTime, namespaceIntervalResults); - sTime = eTime; + // If promQL is determined, fetch metrics from the datasource + if (promQL != null) { + LOGGER.info(promQL); + String podMetricsUrl; + try { + podMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY, + dataSourceInfo.getUrl(), + URLEncoder.encode(promQL, CHARACTER_ENCODING), + interval_start_time_epoc, + interval_end_time_epoc, + measurementDurationMinutesInDouble.intValue() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE); + LOGGER.info(podMetricsUrl); + client.setBaseURL(podMetricsUrl); + JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, ""); + JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class); + JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT); + // Process fetched metrics + if (null != resultArray && !resultArray.isEmpty()) { + resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray( + KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT).get(0) + .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants + .DataSourceQueryJSONKeys.VALUES); + sdf.setTimeZone(TimeZone.getTimeZone(KruizeConstants.TimeUnitsExt.TimeZones.UTC)); + + // Iterate over fetched metrics + Timestamp sTime = new Timestamp(interval_start_time_epoc); + for (JsonElement element : resultArray) { + JsonArray valueArray = element.getAsJsonArray(); + long epochTime = valueArray.get(0).getAsLong(); + double value = valueArray.get(1).getAsDouble(); + String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC)); + Date date = sdf.parse(timestamp); + Timestamp eTime = new Timestamp(date.getTime()); + + // Prepare interval results + if (containerDataResults.containsKey(eTime)) { + intervalResults = containerDataResults.get(eTime); + resMap = intervalResults.getMetricResultsMap(); + } else { + intervalResults = new IntervalResults(); + resMap = new HashMap<>(); + } + AnalyzerConstants.MetricName metricName = AnalyzerConstants.MetricName.valueOf(metricEntry.getName()); + if (resMap.containsKey(metricName)) { + metricResults = resMap.get(metricName); + metricAggregationInfoResults = metricResults.getAggregationInfoResult(); + } else { + metricResults = new MetricResults(); + metricAggregationInfoResults = new MetricAggregationInfoResults(); } + + Method method = MetricAggregationInfoResults.class.getDeclaredMethod(KruizeConstants.APIMessages.SET + aggregationFunctionsEntry.getKey().substring(0, 1).toUpperCase() + aggregationFunctionsEntry.getKey().substring(1), Double.class); + method.invoke(metricAggregationInfoResults, value); + metricAggregationInfoResults.setFormat(format); + metricResults.setAggregationInfoResult(metricAggregationInfoResults); + metricResults.setName(metricEntry.getName()); + metricResults.setFormat(format); + resMap.put(metricName, metricResults); + intervalResults.setMetricResultsMap(resMap); + intervalResults.setIntervalStartTime(sTime); //Todo this will change + intervalResults.setIntervalEndTime(eTime); + intervalResults.setDurationInMinutes((double) ((eTime.getTime() - sTime.getTime()) + / ((long) KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE + * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC))); + containerDataResults.put(eTime, intervalResults); + sTime = eTime; } - } catch (Exception e) { - throw new RuntimeException(e); } + } catch (Exception e) { + throw new RuntimeException(e); } } - namespaceData.setResults(namespaceDataResults); - if (!namespaceDataResults.isEmpty()) { - setInterval_end_time(Collections.max(namespaceDataResults.keySet())); - } } } + + containerData.setResults(containerDataResults); + if (!containerDataResults.isEmpty()) + setInterval_end_time(Collections.max(containerDataResults.keySet())); //TODO Temp fix invalid date is set if experiment having two container with different last seen date + } } } catch (Exception e) { @@ -2341,4 +2377,38 @@ public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, T throw new Exception(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.METRIC_EXCEPTION + e.getMessage()); } } + + /** + * Fetches max date query for containers from performance profile + * @param metricProfile performance profile to be used + */ + private String getMaxDateQueryForContainers (PerformanceProfile metricProfile) { + List metrics = metricProfile.getSloInfo().getFunctionVariables(); + String query = null; + for (Metric metric: metrics) { + String name = metric.getName(); + if(name.equals("maxDate")){ + query = metric.getAggregationFunctionsMap().get("max").getQuery(); + break; + } + } + return query; + } + + /** + * Fetches max date query for namespace from performance profile + * @param metricProfile performance profile to be used + */ + private String getMaxDateQueryForNamespace (PerformanceProfile metricProfile) { + List metrics = metricProfile.getSloInfo().getFunctionVariables(); + String query = null; + for (Metric metric: metrics) { + String name = metric.getName(); + if(name.equals("namespaceMaxDate")){ + query = metric.getAggregationFunctionsMap().get("max").getQuery(); + break; + } + } + return query; + } } diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java b/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java index 667ee304a..d8508d2e4 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java @@ -61,27 +61,14 @@ public static KruizeObject convertCreateExperimentAPIObjToKruizeObject(CreateExp List kubernetesAPIObjectsList = createExperimentAPIObject.getKubernetesObjects(); for (KubernetesAPIObject kubernetesAPIObject : kubernetesAPIObjectsList) { K8sObject k8sObject = null; + String experimentType = createExperimentAPIObject.getExperimentType(); // check if exp type is null to support remote monitoring experiments - if (null == createExperimentAPIObject.getExperimentType() || createExperimentAPIObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT)) { + if (null == experimentType || AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT.equalsIgnoreCase(experimentType)) { // container recommendations experiment type - k8sObject = new K8sObject(kubernetesAPIObject.getName(), kubernetesAPIObject.getType(), kubernetesAPIObject.getNamespace()); - k8sObject.setNamespaceData(new NamespaceData()); - List containerAPIObjects = kubernetesAPIObject.getContainerAPIObjects(); - HashMap containerDataHashMap = new HashMap<>(); - for (ContainerAPIObject containerAPIObject : containerAPIObjects) { - ContainerData containerData = new ContainerData(containerAPIObject.getContainer_name(), - containerAPIObject.getContainer_image_name(), new ContainerRecommendations(), null); - containerDataHashMap.put(containerData.getContainer_name(), containerData); - } - k8sObject.setContainerDataMap(containerDataHashMap); - } else if (null != createExperimentAPIObject.getExperimentType() && createExperimentAPIObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { + k8sObject = createContainerExperiment(kubernetesAPIObject); + } else if (null != experimentType && AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT.equalsIgnoreCase(experimentType)) { // namespace recommendations experiment type - k8sObject = new K8sObject(); - k8sObject.setNamespace(kubernetesAPIObject.getNamespaceAPIObjects().getnamespace_name()); - HashMap containerDataHashMap = new HashMap<>(); - k8sObject.setContainerDataMap(containerDataHashMap); - NamespaceAPIObject namespaceAPIObject = kubernetesAPIObject.getNamespaceAPIObjects(); - k8sObject.setNamespaceData(new NamespaceData(namespaceAPIObject.getnamespace_name(), new NamespaceRecommendations(), null)); + k8sObject = createNamespaceExperiment(kubernetesAPIObject); } LOGGER.debug("Experiment Type: " + createExperimentAPIObject.getExperimentType()); k8sObjectList.add(k8sObject); @@ -113,6 +100,33 @@ public static KruizeObject convertCreateExperimentAPIObjToKruizeObject(CreateExp return kruizeObject; } + // Generates K8sObject for container type experiments from KubernetesAPIObject + public static K8sObject createContainerExperiment(KubernetesAPIObject kubernetesAPIObject) { + K8sObject k8sObject = new K8sObject(kubernetesAPIObject.getName(), kubernetesAPIObject.getType(), kubernetesAPIObject.getNamespace()); + k8sObject.setNamespaceData(new NamespaceData()); + List containerAPIObjects = kubernetesAPIObject.getContainerAPIObjects(); + HashMap containerDataHashMap = new HashMap<>(); + for (ContainerAPIObject containerAPIObject : containerAPIObjects) { + ContainerData containerData = new ContainerData(containerAPIObject.getContainer_name(), + containerAPIObject.getContainer_image_name(), new ContainerRecommendations(), null); + containerDataHashMap.put(containerData.getContainer_name(), containerData); + } + k8sObject.setContainerDataMap(containerDataHashMap); + return k8sObject; + } + + // Generates K8sObject for namespace type experiments from KubernetesAPIObject + public static K8sObject createNamespaceExperiment(KubernetesAPIObject kubernetesAPIObject) { + K8sObject k8sObject = new K8sObject(); + k8sObject.setNamespace(kubernetesAPIObject.getNamespaceAPIObjects().getnamespace_name()); + HashMap containerDataHashMap = new HashMap<>(); + k8sObject.setContainerDataMap(containerDataHashMap); + NamespaceAPIObject namespaceAPIObject = kubernetesAPIObject.getNamespaceAPIObjects(); + k8sObject.setNamespaceData(new NamespaceData(namespaceAPIObject.getnamespace_name(), new NamespaceRecommendations(), null)); + return k8sObject; + } + + public static ListRecommendationsAPIObject convertKruizeObjectToListRecommendationSO( KruizeObject kruizeObject, boolean getLatest, @@ -130,7 +144,8 @@ public static ListRecommendationsAPIObject convertKruizeObjectToListRecommendati for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) { kubernetesAPIObject = new KubernetesAPIObject(k8sObject.getName(), k8sObject.getType(), k8sObject.getNamespace()); // namespace recommendations experiment type - if (null != kruizeObject.getExperimentType() && kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { + String experimentType = kruizeObject.getExperimentType(); + if (null != experimentType && AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT.equalsIgnoreCase(experimentType)) { NamespaceAPIObject namespaceAPIObject; NamespaceData clonedNamespaceData = Utils.getClone(k8sObject.getNamespaceData(), NamespaceData.class); diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java b/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java index a171286bc..eb859c192 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java @@ -24,7 +24,7 @@ public class ListRecommendationsAPIObject extends BaseSO{ @SerializedName(KruizeConstants.JSONKeys.CLUSTER_NAME) private String clusterName; @SerializedName(KruizeConstants.JSONKeys.EXPERIMENT_TYPE) - private String experiment_type; + private String experimentType; @SerializedName(KruizeConstants.JSONKeys.KUBERNETES_OBJECTS) private List kubernetesObjects; @@ -46,10 +46,10 @@ public void setKubernetesObjects(List kubernetesObjects) { } public String getExperimentType() { - return experiment_type; + return experimentType; } public void setExperimentType(String experiment_type) { - this.experiment_type = experiment_type; + this.experimentType = experiment_type; } } diff --git a/src/main/java/com/autotune/analyzer/services/CreateExperiment.java b/src/main/java/com/autotune/analyzer/services/CreateExperiment.java index 8195cadcc..ba1449008 100644 --- a/src/main/java/com/autotune/analyzer/services/CreateExperiment.java +++ b/src/main/java/com/autotune/analyzer/services/CreateExperiment.java @@ -98,24 +98,30 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) } else { List kruizeExpList = new ArrayList<>(); for (CreateExperimentAPIObject createExperimentAPIObject : createExperimentAPIObjects) { + String experimentType = createExperimentAPIObject.getExperimentType(); createExperimentAPIObject.setExperiment_id(Utils.generateID(createExperimentAPIObject.toString())); createExperimentAPIObject.setStatus(AnalyzerConstants.ExperimentStatus.IN_PROGRESS); + boolean isContainerExperiment = true; + boolean isNamespaceExperiment = false; // updating experiment type to container if not passed - if (null == createExperimentAPIObject.getExperimentType() || createExperimentAPIObject.getExperimentType().isEmpty()) { + if (null == experimentType || experimentType.isEmpty()) { createExperimentAPIObject.setExperimentType(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT); + } else if (AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT.equalsIgnoreCase(experimentType)) { + isNamespaceExperiment = true; + isContainerExperiment = false; } // validating the kubernetes objects and experiment type for (KubernetesAPIObject kubernetesAPIObject: createExperimentAPIObject.getKubernetesObjects()) { - if (createExperimentAPIObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT)) { + if (isContainerExperiment) { // check if namespace data is also set for container-type experiments if (null != kubernetesAPIObject.getNamespaceAPIObjects()) { throw new InvalidExperimentType(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.NAMESPACE_DATA_NOT_NULL_FOR_CONTAINER_EXP); } - } else if (createExperimentAPIObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { + } else if (isNamespaceExperiment) { if (null != kubernetesAPIObject.getContainerAPIObjects()) { throw new InvalidExperimentType(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.CONTAINER_DATA_NOT_NULL_FOR_NAMESPACE_EXP); } - if (createExperimentAPIObject.getTargetCluster().equalsIgnoreCase(AnalyzerConstants.REMOTE)) { + if (AnalyzerConstants.REMOTE.equalsIgnoreCase(createExperimentAPIObject.getTargetCluster())) { throw new InvalidExperimentType(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.NAMESPACE_EXP_NOT_SUPPORTED_FOR_REMOTE); } } diff --git a/src/main/java/com/autotune/database/helper/DBHelpers.java b/src/main/java/com/autotune/database/helper/DBHelpers.java index b1c1d2c50..5e0ba9e68 100644 --- a/src/main/java/com/autotune/database/helper/DBHelpers.java +++ b/src/main/java/com/autotune/database/helper/DBHelpers.java @@ -300,9 +300,8 @@ public static KruizeExperimentEntry convertCreateAPIObjToExperimentDBObj(CreateE kruizeExperimentEntry.setStatus(AnalyzerConstants.ExperimentStatus.IN_PROGRESS); kruizeExperimentEntry.setMeta_data(null); kruizeExperimentEntry.setDatasource(null); - if (apiObject.getTargetCluster().equalsIgnoreCase(AnalyzerConstants.LOCAL)) { - kruizeExperimentEntry.setExperimentType(apiObject.getExperimentType()); - } + kruizeExperimentEntry.setExperimentType(apiObject.getExperimentType()); + ObjectMapper objectMapper = new ObjectMapper(); try { kruizeExperimentEntry.setExtended_data( @@ -377,16 +376,17 @@ public static ListRecommendationsAPIObject getListRecommendationAPIObjectForDB(K if (kruizeObject.getKubernetes_objects().isEmpty()) return null; List kubernetesAPIObjectList = new ArrayList<>(); + String experimentType = kruizeObject.getExperimentType(); for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) { if (null == k8sObject) continue; - if (null == k8sObject.getContainerDataMap() && (null == kruizeObject.getExperimentType() || kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT))) + if (null == k8sObject.getContainerDataMap() && (null == experimentType || AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT.equalsIgnoreCase(experimentType))) continue; - if (k8sObject.getContainerDataMap().isEmpty() && (null == kruizeObject.getExperimentType() || kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT))) + if (k8sObject.getContainerDataMap().isEmpty() && (null == experimentType || AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT.equalsIgnoreCase(experimentType))) continue; KubernetesAPIObject kubernetesAPIObject = new KubernetesAPIObject(k8sObject.getName(), k8sObject.getType(), k8sObject.getNamespace()); boolean matchFound = false; - if (null != kruizeObject.getExperimentType() && kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { + if (null != experimentType && AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT.equalsIgnoreCase(experimentType)) { // saving namespace recommendations NamespaceData clonedNamespaceData = Utils.getClone(k8sObject.getNamespaceData(), NamespaceData.class); if (null == clonedNamespaceData) @@ -490,7 +490,7 @@ public static KruizeRecommendationEntry convertKruizeObjectTORecommendation(Krui Timestamp endInterval = null; // todo : what happens if two k8 objects or Containers with different timestamp for (KubernetesAPIObject k8sObject : listRecommendationsAPIObject.getKubernetesObjects()) { - if (null != listRecommendationsAPIObject.getExperimentType() && listRecommendationsAPIObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { + if (null != listRecommendationsAPIObject.getExperimentType() && AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT.equalsIgnoreCase(listRecommendationsAPIObject.getExperimentType())) { endInterval = k8sObject.getNamespaceAPIObjects().getnamespaceRecommendations().getData().keySet().stream().max(Timestamp::compareTo).get(); } else { for (ContainerAPIObject containerAPIObject : k8sObject.getContainerAPIObjects()) { diff --git a/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java b/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java index 522bcbe3f..5929b2ea3 100644 --- a/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java +++ b/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java @@ -17,7 +17,9 @@ import com.autotune.analyzer.utils.AnalyzerConstants; import com.autotune.database.helper.GenerateExperimentID; +import com.autotune.utils.KruizeConstants; import com.fasterxml.jackson.databind.JsonNode; +import com.google.gson.annotations.SerializedName; import jakarta.persistence.*; import org.hibernate.annotations.JdbcTypeCode; import org.hibernate.type.SqlTypes; @@ -55,7 +57,7 @@ public class KruizeExperimentEntry { private String mode; private String target_cluster; private String performance_profile; - private String experiment_type; + private String experimentType; @Enumerated(EnumType.STRING) private AnalyzerConstants.ExperimentStatus status; @JdbcTypeCode(SqlTypes.JSON) @@ -157,11 +159,11 @@ public void setDatasource(JsonNode datasource) { } public String getExperimentType() { - return experiment_type; + return experimentType; } - public void setExperimentType(String experiment_type) { - this.experiment_type = experiment_type; + public void setExperimentType(String experimentType) { + this.experimentType = experimentType; } } diff --git a/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java b/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java index 902cc4b7a..1d11f00db 100644 --- a/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java +++ b/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java @@ -27,7 +27,7 @@ public class KruizeRecommendationEntry { private String cluster_name; @JdbcTypeCode(SqlTypes.JSON) private JsonNode extended_data; - private String experiment_type; + private String experimentType; public String getExperiment_name() { return experiment_name; @@ -70,10 +70,10 @@ public void setVersion(String version) { } public String getExperimentType() { - return experiment_type; + return experimentType; } - public void setExperimentType(String experiment_type) { - this.experiment_type = experiment_type; + public void setExperimentType(String experimentType) { + this.experimentType = experimentType; } } From 2ad15a333c6845873ff332993ace9d7f563aeb7f Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Wed, 25 Sep 2024 12:56:13 +0530 Subject: [PATCH 04/17] code-refactoring Signed-off-by: Shekhar Saxena --- .../recommendations/engine/RecommendationEngine.java | 6 +++--- .../com/autotune/database/table/KruizeExperimentEntry.java | 6 +++--- .../autotune/database/table/KruizeRecommendationEntry.java | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java index ccb9286b0..19611d78c 100644 --- a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java +++ b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java @@ -1979,11 +1979,11 @@ public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, T } String maxDateQuery = null; - - if (null == kruizeObject.getExperimentType() || kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT)) { + String experimentType = kruizeObject.getExperimentType(); + if (null == experimentType || AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT.equalsIgnoreCase(experimentType)) { maxDateQuery = getMaxDateQueryForContainers(metricProfile); fetchContainerMetricsBasedOnDataSourceAndProfile(kruizeObject, interval_end_time, interval_start_time, dataSourceInfo, metricProfile, maxDateQuery); - } else if (null != kruizeObject.getExperimentType() && kruizeObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) { + } else if (null != experimentType && AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT.equalsIgnoreCase(experimentType)) { maxDateQuery = getMaxDateQueryForNamespace(metricProfile); fetchNamespaceMetricsBasedOnDataSourceAndProfile(kruizeObject, interval_end_time, interval_start_time, dataSourceInfo, metricProfile, maxDateQuery); } diff --git a/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java b/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java index 5929b2ea3..49fbe37cb 100644 --- a/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java +++ b/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java @@ -57,7 +57,7 @@ public class KruizeExperimentEntry { private String mode; private String target_cluster; private String performance_profile; - private String experimentType; + private String experiment_type; @Enumerated(EnumType.STRING) private AnalyzerConstants.ExperimentStatus status; @JdbcTypeCode(SqlTypes.JSON) @@ -159,11 +159,11 @@ public void setDatasource(JsonNode datasource) { } public String getExperimentType() { - return experimentType; + return experiment_type; } public void setExperimentType(String experimentType) { - this.experimentType = experimentType; + this.experiment_type = experimentType; } } diff --git a/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java b/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java index 1d11f00db..513f4b216 100644 --- a/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java +++ b/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java @@ -27,7 +27,7 @@ public class KruizeRecommendationEntry { private String cluster_name; @JdbcTypeCode(SqlTypes.JSON) private JsonNode extended_data; - private String experimentType; + private String experiment_type; public String getExperiment_name() { return experiment_name; @@ -70,10 +70,10 @@ public void setVersion(String version) { } public String getExperimentType() { - return experimentType; + return experiment_type; } public void setExperimentType(String experimentType) { - this.experimentType = experimentType; + this.experiment_type = experimentType; } } From f00f178ad2c6f497b4c35462fba07d8eedf39aec Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Wed, 25 Sep 2024 14:00:46 +0530 Subject: [PATCH 05/17] adding docs for createExp API Signed-off-by: Shekhar Saxena --- design/KruizeLocalAPI.md | 88 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 87 insertions(+), 1 deletion(-) diff --git a/design/KruizeLocalAPI.md b/design/KruizeLocalAPI.md index 69ceb02b3..1896d1c41 100644 --- a/design/KruizeLocalAPI.md +++ b/design/KruizeLocalAPI.md @@ -2155,6 +2155,88 @@ see [Create Experiment](/design/CreateExperiment.md) +**Request with `experiment_type` field** + +The `experiment_type` field in the JSON is optional and can be used to +indicate whether the experiment is of type `namespace` or `container`. +If no experiment type is specified, it will default to `container`. + +
+ Example Request with `experiment_type` - `namespace` + The `experiment_type` field in the JSON is optional and can be used to +indicate whether the experiment is of type `namespace` or `container`. +If no experiment type is specified, it will default to `container`. + +### EXAMPLE REQUEST +```json +[{ + "version": "v2.0", + "experiment_name": "default|namespace-demo", + "cluster_name": "default", + "performance_profile": "resource-optimization-local-monitoring", + "mode": "monitor", + "target_cluster": "local", + "datasource": "prometheus-1", + "experiment_type": "namespace", + "kubernetes_objects": [ + { + "namespaces": { + "namespace_name": "test-multiple-import" + } + } + ], + "trial_settings": { + "measurement_duration": "15min" + }, + "recommendation_settings": { + "threshold": "0.1" + } +}] +``` +
+ +
+ Example Request with `experiment_type` - `container` + +### EXAMPLE REQUEST +```json +[ + { + "version": "v2.0", + "experiment_name": "default|default|deployment|tfb-qrh-deployment", + "cluster_name": "default", + "performance_profile": "resource-optimization-openshift", + "mode": "monitor", + "target_cluster": "local", + "experiment_type": "container", + "kubernetes_objects": [ + { + "type": "deployment", + "name": "tfb-qrh-deployment", + "namespace": "default", + "containers": [ + { + "container_image_name": "kruize/tfb-db:1.15", + "container_name": "tfb-server-0" + }, + { + "container_image_name": "kruize/tfb-qrh:1.13.2.F_et17", + "container_name": "tfb-server-1" + } + ] + } + ], + "trial_settings": { + "measurement_duration": "15min" + }, + "recommendation_settings": { + "threshold": "0.1" + }, + "datasource": "prometheus-1" + } +] +``` +
**Response** @@ -2172,6 +2254,8 @@ see [Create Experiment](/design/CreateExperiment.md) } ``` + + @@ -2210,6 +2294,7 @@ Returns the latest recommendations of all the experiments "experiment_name": "default|default_0|deployment|tfb-qrh-deployment_0", "cluster_name": "default", "datasource": "prometheus-1", + "experiment_type": "container", "mode": "monitor", "target_cluster": "local", "status": "IN_PROGRESS", @@ -2380,6 +2465,7 @@ Returns the latest recommendations of all the experiments "experiment_name": "default|default_1|deployment|tfb-qrh-deployment_1", "cluster_name": "default", "datasource": "prometheus-1", + "experiment_type": "container", "mode": "monitor", "target_cluster": "local", "status": "IN_PROGRESS", @@ -2439,7 +2525,7 @@ Returns the latest recommendations of all the experiments } } ] - } + }, ] ``` From f7d88752e093b282d1d500247d3ab3788774162e Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Wed, 25 Sep 2024 16:39:49 +0530 Subject: [PATCH 06/17] adding sample response for namespace exp Signed-off-by: Shekhar Saxena --- design/KruizeLocalAPI.md | 202 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 202 insertions(+) diff --git a/design/KruizeLocalAPI.md b/design/KruizeLocalAPI.md index 1896d1c41..ebcc011ae 100644 --- a/design/KruizeLocalAPI.md +++ b/design/KruizeLocalAPI.md @@ -3393,6 +3393,208 @@ When `interval_end_time` is not specified, Kruize will determine the latest time +**Request for `namespace` experiment** + +`POST /generateRecommendations?experiment_name=?` + +example + +`curl --location --request POST 'http://:/generateRecommendations?experiment_name=temp_1'` + +success status code : 201 + +**Response for `namespace` Experiment** + +The response will contain an array of JSON object with the recommendations for the specified experiment. + +When `interval_end_time` is not specified, Kruize will determine the latest timestamp from the specified datasource +(E.g. Prometheus) by checking the latest active container CPU usage. + +
+Example Response Body + +```json +[ + { + "cluster_name": "test-multiple-import", + "experiment_type": "namespace", + "kubernetes_objects": [ + { + "namespace": "default", + "containers": [], + "namespaces": { + "namespace_name": "default", + "recommendations": { + "version": "1.0", + "notifications": { + "111000": { + "type": "info", + "message": "Recommendations Are Available", + "code": 111000 + } + }, + "data": { + "2024-09-25T09:46:20.000Z": { + "notifications": { + "111101": { + "type": "info", + "message": "Short Term Recommendations Available", + "code": 111101 + } + }, + "monitoring_end_time": "2024-09-25T09:46:20.000Z", + "current": {}, + "recommendation_terms": { + "short_term": { + "duration_in_hours": 24.0, + "notifications": { + "112101": { + "type": "info", + "message": "Cost Recommendations Available", + "code": 112101 + }, + "112102": { + "type": "info", + "message": "Performance Recommendations Available", + "code": 112102 + } + }, + "monitoring_start_time": "2024-09-24T09:46:20.000Z", + "recommendation_engines": { + "cost": { + "pods_count": 2, + "confidence_level": 0.0, + "config": { + "limits": { + "memory": { + "amount": 1.442955264E9, + "format": "bytes" + }, + "cpu": { + "amount": 5.834468490017892, + "format": "cores" + } + }, + "requests": { + "memory": { + "amount": 1.442955264E9, + "format": "bytes" + }, + "cpu": { + "amount": 5.834468490017892, + "format": "cores" + } + } + }, + "variation": { + "limits": { + "memory": { + "amount": 1.442955264E9, + "format": "bytes" + }, + "cpu": { + "amount": 5.834468490017892, + "format": "cores" + } + }, + "requests": { + "memory": { + "amount": 1.442955264E9, + "format": "bytes" + }, + "cpu": { + "amount": 5.834468490017892, + "format": "cores" + } + } + }, + "notifications": {} + }, + "performance": { + "pods_count": 2, + "confidence_level": 0.0, + "config": { + "limits": { + "memory": { + "amount": 1.442955264E9, + "format": "bytes" + }, + "cpu": { + "amount": 5.834468490017892, + "format": "cores" + } + }, + "requests": { + "memory": { + "amount": 1.442955264E9, + "format": "bytes" + }, + "cpu": { + "amount": 5.834468490017892, + "format": "cores" + } + } + }, + "variation": { + "limits": { + "memory": { + "amount": 1.442955264E9, + "format": "bytes" + }, + "cpu": { + "amount": 5.834468490017892, + "format": "cores" + } + }, + "requests": { + "memory": { + "amount": 1.442955264E9, + "format": "bytes" + }, + "cpu": { + "amount": 5.834468490017892, + "format": "cores" + } + } + }, + "notifications": {} + } + } + }, + "medium_term": { + "duration_in_hours": 168.0, + "notifications": { + "120001": { + "type": "info", + "message": "There is not enough data available to generate a recommendation.", + "code": 120001 + } + } + }, + "long_term": { + "duration_in_hours": 360.0, + "notifications": { + "120001": { + "type": "info", + "message": "There is not enough data available to generate a recommendation.", + "code": 120001 + } + } + } + } + } + } + } + } + } + ], + "version": "v2.0", + "experiment_name": "namespace-demo" + } +] +``` + +
**Error Responses** From 20bdb570fc51fa947833746306a8bc44bad6860d Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Wed, 25 Sep 2024 16:52:35 +0530 Subject: [PATCH 07/17] updating response Signed-off-by: Shekhar Saxena --- design/KruizeLocalAPI.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/design/KruizeLocalAPI.md b/design/KruizeLocalAPI.md index ebcc011ae..ebc7ceca7 100644 --- a/design/KruizeLocalAPI.md +++ b/design/KruizeLocalAPI.md @@ -2933,6 +2933,7 @@ The response will contain a array of JSON object with the recommendations for th [ { "cluster_name": "default", + "experiment_type": "namespace", "kubernetes_objects": [ { "type": "deployment", @@ -3175,6 +3176,7 @@ When `interval_end_time` is not specified, Kruize will determine the latest time [ { "cluster_name": "default", + "experiment_type": "container", "kubernetes_objects": [ { "type": "deployment", From 768b2d29262b61c4713921148773f9c5f6dc9b48 Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Wed, 25 Sep 2024 19:54:13 +0530 Subject: [PATCH 08/17] adding typeaware interface and utility class Signed-off-by: Shekhar Saxena --- design/KruizeLocalAPI.md | 2 +- .../analyzer/kruizeObject/KruizeObject.java | 14 +- .../engine/RecommendationEngine.java | 228 ++++++++---------- .../analyzer/serviceObjects/Converters.java | 9 +- .../CreateExperimentAPIObject.java | 25 +- .../ListRecommendationsAPIObject.java | 21 +- .../analyzer/services/CreateExperiment.java | 14 +- .../analyzer/utils/ExperimentTypeAware.java | 28 +++ .../analyzer/utils/ExperimentTypeUtil.java | 30 +++ .../autotune/database/helper/DBHelpers.java | 10 +- 10 files changed, 224 insertions(+), 157 deletions(-) create mode 100644 src/main/java/com/autotune/analyzer/utils/ExperimentTypeAware.java create mode 100644 src/main/java/com/autotune/analyzer/utils/ExperimentTypeUtil.java diff --git a/design/KruizeLocalAPI.md b/design/KruizeLocalAPI.md index ebc7ceca7..a166da5f7 100644 --- a/design/KruizeLocalAPI.md +++ b/design/KruizeLocalAPI.md @@ -2933,7 +2933,7 @@ The response will contain a array of JSON object with the recommendations for th [ { "cluster_name": "default", - "experiment_type": "namespace", + "experiment_type": "container", "kubernetes_objects": [ { "type": "deployment", diff --git a/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java b/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java index bab99c4fc..d86d399a0 100644 --- a/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java +++ b/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java @@ -18,6 +18,8 @@ import com.autotune.analyzer.exceptions.InvalidValueException; import com.autotune.analyzer.recommendations.term.Terms; import com.autotune.analyzer.utils.AnalyzerConstants; +import com.autotune.analyzer.utils.ExperimentTypeAware; +import com.autotune.analyzer.utils.ExperimentTypeUtil; import com.autotune.common.data.ValidationOutputData; import com.autotune.common.k8sObjects.K8sObject; import com.autotune.common.k8sObjects.TrialSettings; @@ -36,7 +38,7 @@ *

* Refer to examples dir for a reference AutotuneObject yaml. */ -public final class KruizeObject { +public final class KruizeObject implements ExperimentTypeAware { @SerializedName("version") private String apiVersion; @@ -299,6 +301,7 @@ public void setDataSource(String datasource) { this.datasource = datasource; } + @Override public String getExperimentType() { return experimentType; } @@ -307,6 +310,15 @@ public void setExperimentType(String experimentType) { this.experimentType = experimentType; } + @Override + public boolean isNamespaceExperiment() { + return ExperimentTypeUtil.isNamespaceExperiment(experimentType); + } + + @Override + public boolean isContainerExperiment() { + return ExperimentTypeUtil.isContainerExperiment(experimentType); + } @Override public String toString() { diff --git a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java index 19611d78c..7a6f8b374 100644 --- a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java +++ b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java @@ -16,6 +16,7 @@ import com.autotune.analyzer.recommendations.utils.RecommendationUtils; import com.autotune.analyzer.utils.AnalyzerConstants; import com.autotune.analyzer.utils.AnalyzerErrorConstants; +import com.autotune.analyzer.utils.ExperimentTypeUtil; import com.autotune.common.data.ValidationOutputData; import com.autotune.common.data.metrics.AggregationFunctions; import com.autotune.common.data.metrics.Metric; @@ -312,13 +313,12 @@ public KruizeObject prepareRecommendations(int calCount) { public void generateRecommendations(KruizeObject kruizeObject) { for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) { - String experimentType = kruizeObject.getExperimentType(); - if (null != experimentType && AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT.equalsIgnoreCase(experimentType)) { + if (kruizeObject.isNamespaceExperiment()) { String namespaceName = k8sObject.getNamespace(); NamespaceData namespaceData = k8sObject.getNamespaceData(); LOGGER.info("Generating recommendations for namespace: {}", namespaceName); generateRecommendationsBasedOnNamespace(namespaceData, kruizeObject); - } else if (null == experimentType || AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT.equalsIgnoreCase(experimentType)){ + } else if (kruizeObject.isContainerExperiment()){ for (String containerName : k8sObject.getContainerDataMap().keySet()) { ContainerData containerData = k8sObject.getContainerDataMap().get(containerName); @@ -1979,12 +1979,11 @@ public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, T } String maxDateQuery = null; - String experimentType = kruizeObject.getExperimentType(); - if (null == experimentType || AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT.equalsIgnoreCase(experimentType)) { - maxDateQuery = getMaxDateQueryForContainers(metricProfile); + if (kruizeObject.isContainerExperiment()) { + maxDateQuery = getMaxDateQuery(metricProfile, AnalyzerConstants.MetricName.maxDate.name()); fetchContainerMetricsBasedOnDataSourceAndProfile(kruizeObject, interval_end_time, interval_start_time, dataSourceInfo, metricProfile, maxDateQuery); - } else if (null != experimentType && AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT.equalsIgnoreCase(experimentType)) { - maxDateQuery = getMaxDateQueryForNamespace(metricProfile); + } else if (kruizeObject.isNamespaceExperiment()) { + maxDateQuery = getMaxDateQuery(metricProfile, AnalyzerConstants.MetricName.namespaceMaxDate.name()); fetchNamespaceMetricsBasedOnDataSourceAndProfile(kruizeObject, interval_end_time, interval_start_time, dataSourceInfo, metricProfile, maxDateQuery); } } catch (Exception e) { @@ -1995,7 +1994,6 @@ public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, T /** * Fetches namespace metrics based on the specified datasource using queries from the metricProfile for the given time interval. - * * @param kruizeObject KruizeObject * @param interval_end_time The end time of the interval in the format yyyy-MM-ddTHH:mm:sssZ * @param interval_start_time The start time of the interval in the format yyyy-MM-ddTHH:mm:sssZ. @@ -2070,105 +2068,105 @@ private void fetchNamespaceMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz k8sObject.setNamespaceData(namespaceData); } - List metricList = metricProfile.getSloInfo().getFunctionVariables(); + List namespaceMetricList = metricProfile.getSloInfo().getFunctionVariables().stream() + .filter(metricEntry -> metricEntry.getName().startsWith(AnalyzerConstants.NAMESPACE) && !metricEntry.getName().equals("namespaceMaxDate")) + .toList(); // Iterate over metrics and aggregation functions - for (Metric metricEntry : metricList) { - if (metricEntry.getName().startsWith(AnalyzerConstants.NAMESPACE) && !metricEntry.getName().equals("namespaceMaxDate")) { - HashMap aggregationFunctions = metricEntry.getAggregationFunctionsMap(); - for (Map.Entry aggregationFunctionsEntry : aggregationFunctions.entrySet()) { - String promQL = aggregationFunctionsEntry.getValue().getQuery(); - String format = null; - - // Determine format based on metric type - List cpuFunction = Arrays.asList(AnalyzerConstants.MetricName.namespaceCpuRequest.toString(), AnalyzerConstants.MetricName.namespaceCpuLimit.toString(), AnalyzerConstants.MetricName.namespaceCpuUsage.toString(), AnalyzerConstants.MetricName.namespaceCpuThrottle.toString()); - List memFunction = Arrays.asList(AnalyzerConstants.MetricName.namespaceMemoryRequest.toString(), AnalyzerConstants.MetricName.namespaceMemoryLimit.toString(), AnalyzerConstants.MetricName.namespaceMemoryUsage.toString(), AnalyzerConstants.MetricName.namespaceMemoryRSS.toString()); - if (cpuFunction.contains(metricEntry.getName())) { - format = KruizeConstants.JSONKeys.CORES; - } else if (memFunction.contains(metricEntry.getName())) { - format = KruizeConstants.JSONKeys.BYTES; - } - - promQL = promQL - .replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace) - .replace(AnalyzerConstants.MEASUREMENT_DURATION_IN_MIN_VARAIBLE, Integer.toString(measurementDurationMinutesInDouble.intValue())); - - // If promQL is determined, fetch metrics from the datasource - if (promQL != null) { - LOGGER.info(promQL); - String namespaceMetricsUrl; - try { - namespaceMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY, - dataSourceInfo.getUrl(), - URLEncoder.encode(promQL, CHARACTER_ENCODING), - interval_start_time_epoc, - interval_end_time_epoc, - measurementDurationMinutesInDouble.intValue() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE); - client.setBaseURL(namespaceMetricsUrl); - JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, ""); - JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class); - JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT); - // Process fetched metrics - if (null != resultArray && !resultArray.isEmpty()) { - resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray( - KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT).get(0) - .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants - .DataSourceQueryJSONKeys.VALUES); - sdf.setTimeZone(TimeZone.getTimeZone(KruizeConstants.TimeUnitsExt.TimeZones.UTC)); - - // Iterate over fetched metrics - Timestamp sTime = new Timestamp(interval_start_time_epoc); - for (JsonElement element : resultArray) { - JsonArray valueArray = element.getAsJsonArray(); - long epochTime = valueArray.get(0).getAsLong(); - double value = valueArray.get(1).getAsDouble(); - String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC)); - Date date = sdf.parse(timestamp); - Timestamp eTime = new Timestamp(date.getTime()); - - // Prepare interval results - if (namespaceDataResults.containsKey(eTime)) { - namespaceIntervalResults = namespaceDataResults.get(eTime); - namespaceResMap = namespaceIntervalResults.getMetricResultsMap(); - } else { - namespaceIntervalResults = new IntervalResults(); - namespaceResMap = new HashMap<>(); - } - AnalyzerConstants.MetricName metricName = AnalyzerConstants.MetricName.valueOf(metricEntry.getName()); - if (namespaceResMap.containsKey(metricName)) { - namespaceMetricResults = namespaceResMap.get(metricName); - namespaceMetricAggregationInfoResults = namespaceMetricResults.getAggregationInfoResult(); - } else { - namespaceMetricResults = new MetricResults(); - namespaceMetricAggregationInfoResults = new MetricAggregationInfoResults(); - } + for (Metric metricEntry : namespaceMetricList) { + HashMap aggregationFunctions = metricEntry.getAggregationFunctionsMap(); + for (Map.Entry aggregationFunctionsEntry : aggregationFunctions.entrySet()) { + String promQL = aggregationFunctionsEntry.getValue().getQuery(); + String format = null; + + // Determine format based on metric type + List cpuFunction = Arrays.asList(AnalyzerConstants.MetricName.namespaceCpuRequest.toString(), AnalyzerConstants.MetricName.namespaceCpuLimit.toString(), AnalyzerConstants.MetricName.namespaceCpuUsage.toString(), AnalyzerConstants.MetricName.namespaceCpuThrottle.toString()); + List memFunction = Arrays.asList(AnalyzerConstants.MetricName.namespaceMemoryRequest.toString(), AnalyzerConstants.MetricName.namespaceMemoryLimit.toString(), AnalyzerConstants.MetricName.namespaceMemoryUsage.toString(), AnalyzerConstants.MetricName.namespaceMemoryRSS.toString()); + if (cpuFunction.contains(metricEntry.getName())) { + format = KruizeConstants.JSONKeys.CORES; + } else if (memFunction.contains(metricEntry.getName())) { + format = KruizeConstants.JSONKeys.BYTES; + } - Method method = MetricAggregationInfoResults.class.getDeclaredMethod(KruizeConstants.APIMessages.SET + aggregationFunctionsEntry.getKey().substring(0, 1).toUpperCase() + aggregationFunctionsEntry.getKey().substring(1), Double.class); - method.invoke(namespaceMetricAggregationInfoResults, value); - namespaceMetricAggregationInfoResults.setFormat(format); - namespaceMetricResults.setAggregationInfoResult(namespaceMetricAggregationInfoResults); - namespaceMetricResults.setName(metricEntry.getName()); - namespaceMetricResults.setFormat(format); - namespaceResMap.put(metricName, namespaceMetricResults); - namespaceIntervalResults.setMetricResultsMap(namespaceResMap); - namespaceIntervalResults.setIntervalStartTime(sTime); //Todo this will change - namespaceIntervalResults.setIntervalEndTime(eTime); - namespaceIntervalResults.setDurationInMinutes((double) ((eTime.getTime() - sTime.getTime()) - / ((long) KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE - * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC))); - namespaceDataResults.put(eTime, namespaceIntervalResults); - sTime = eTime; + promQL = promQL + .replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace) + .replace(AnalyzerConstants.MEASUREMENT_DURATION_IN_MIN_VARAIBLE, Integer.toString(measurementDurationMinutesInDouble.intValue())); + + // If promQL is determined, fetch metrics from the datasource + if (promQL != null) { + LOGGER.info(promQL); + String namespaceMetricsUrl; + try { + namespaceMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY, + dataSourceInfo.getUrl(), + URLEncoder.encode(promQL, CHARACTER_ENCODING), + interval_start_time_epoc, + interval_end_time_epoc, + measurementDurationMinutesInDouble.intValue() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE); + client.setBaseURL(namespaceMetricsUrl); + JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, ""); + JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class); + JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT); + // Process fetched metrics + if (null != resultArray && !resultArray.isEmpty()) { + resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray( + KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT).get(0) + .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants + .DataSourceQueryJSONKeys.VALUES); + sdf.setTimeZone(TimeZone.getTimeZone(KruizeConstants.TimeUnitsExt.TimeZones.UTC)); + + // Iterate over fetched metrics + Timestamp sTime = new Timestamp(interval_start_time_epoc); + for (JsonElement element : resultArray) { + JsonArray valueArray = element.getAsJsonArray(); + long epochTime = valueArray.get(0).getAsLong(); + double value = valueArray.get(1).getAsDouble(); + String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC)); + Date date = sdf.parse(timestamp); + Timestamp eTime = new Timestamp(date.getTime()); + + // Prepare interval results + if (namespaceDataResults.containsKey(eTime)) { + namespaceIntervalResults = namespaceDataResults.get(eTime); + namespaceResMap = namespaceIntervalResults.getMetricResultsMap(); + } else { + namespaceIntervalResults = new IntervalResults(); + namespaceResMap = new HashMap<>(); + } + AnalyzerConstants.MetricName metricName = AnalyzerConstants.MetricName.valueOf(metricEntry.getName()); + if (namespaceResMap.containsKey(metricName)) { + namespaceMetricResults = namespaceResMap.get(metricName); + namespaceMetricAggregationInfoResults = namespaceMetricResults.getAggregationInfoResult(); + } else { + namespaceMetricResults = new MetricResults(); + namespaceMetricAggregationInfoResults = new MetricAggregationInfoResults(); } + + Method method = MetricAggregationInfoResults.class.getDeclaredMethod(KruizeConstants.APIMessages.SET + aggregationFunctionsEntry.getKey().substring(0, 1).toUpperCase() + aggregationFunctionsEntry.getKey().substring(1), Double.class); + method.invoke(namespaceMetricAggregationInfoResults, value); + namespaceMetricAggregationInfoResults.setFormat(format); + namespaceMetricResults.setAggregationInfoResult(namespaceMetricAggregationInfoResults); + namespaceMetricResults.setName(metricEntry.getName()); + namespaceMetricResults.setFormat(format); + namespaceResMap.put(metricName, namespaceMetricResults); + namespaceIntervalResults.setMetricResultsMap(namespaceResMap); + namespaceIntervalResults.setIntervalStartTime(sTime); //Todo this will change + namespaceIntervalResults.setIntervalEndTime(eTime); + namespaceIntervalResults.setDurationInMinutes((double) ((eTime.getTime() - sTime.getTime()) + / ((long) KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE + * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC))); + namespaceDataResults.put(eTime, namespaceIntervalResults); + sTime = eTime; } - } catch (Exception e) { - throw new RuntimeException(e); } + } catch (Exception e) { + throw new RuntimeException(e); } } - namespaceData.setResults(namespaceDataResults); - if (!namespaceDataResults.isEmpty()) { - setInterval_end_time(Collections.max(namespaceDataResults.keySet())); - } + } + namespaceData.setResults(namespaceDataResults); + if (!namespaceDataResults.isEmpty()) { + setInterval_end_time(Collections.max(namespaceDataResults.keySet())); } } } @@ -2379,36 +2377,18 @@ private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz } /** - * Fetches max date query for containers from performance profile - * @param metricProfile performance profile to be used - */ - private String getMaxDateQueryForContainers (PerformanceProfile metricProfile) { - List metrics = metricProfile.getSloInfo().getFunctionVariables(); - String query = null; - for (Metric metric: metrics) { - String name = metric.getName(); - if(name.equals("maxDate")){ - query = metric.getAggregationFunctionsMap().get("max").getQuery(); - break; - } - } - return query; - } - - /** - * Fetches max date query for namespace from performance profile + * Fetches max date query for namespace and containers from performance profile * @param metricProfile performance profile to be used */ - private String getMaxDateQueryForNamespace (PerformanceProfile metricProfile) { + private String getMaxDateQuery(PerformanceProfile metricProfile, String metricName) { List metrics = metricProfile.getSloInfo().getFunctionVariables(); - String query = null; for (Metric metric: metrics) { String name = metric.getName(); - if(name.equals("namespaceMaxDate")){ - query = metric.getAggregationFunctionsMap().get("max").getQuery(); - break; + if(name.equals(metricName)) { + return metric.getAggregationFunctionsMap().get("max").getQuery(); } } - return query; + return null; } } + diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java b/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java index d8508d2e4..eab5ec99e 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java @@ -11,6 +11,7 @@ import com.autotune.analyzer.recommendations.objects.MappedRecommendationForTimestamp; import com.autotune.analyzer.utils.AnalyzerConstants; import com.autotune.analyzer.utils.AnalyzerErrorConstants; +import com.autotune.analyzer.utils.ExperimentTypeUtil; import com.autotune.common.data.ValidationOutputData; import com.autotune.common.data.metrics.AggregationFunctions; import com.autotune.common.data.metrics.Metric; @@ -61,12 +62,11 @@ public static KruizeObject convertCreateExperimentAPIObjToKruizeObject(CreateExp List kubernetesAPIObjectsList = createExperimentAPIObject.getKubernetesObjects(); for (KubernetesAPIObject kubernetesAPIObject : kubernetesAPIObjectsList) { K8sObject k8sObject = null; - String experimentType = createExperimentAPIObject.getExperimentType(); // check if exp type is null to support remote monitoring experiments - if (null == experimentType || AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT.equalsIgnoreCase(experimentType)) { + if (createExperimentAPIObject.isContainerExperiment()) { // container recommendations experiment type k8sObject = createContainerExperiment(kubernetesAPIObject); - } else if (null != experimentType && AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT.equalsIgnoreCase(experimentType)) { + } else if (createExperimentAPIObject.isNamespaceExperiment()) { // namespace recommendations experiment type k8sObject = createNamespaceExperiment(kubernetesAPIObject); } @@ -144,8 +144,7 @@ public static ListRecommendationsAPIObject convertKruizeObjectToListRecommendati for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) { kubernetesAPIObject = new KubernetesAPIObject(k8sObject.getName(), k8sObject.getType(), k8sObject.getNamespace()); // namespace recommendations experiment type - String experimentType = kruizeObject.getExperimentType(); - if (null != experimentType && AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT.equalsIgnoreCase(experimentType)) { + if (kruizeObject.isNamespaceExperiment()) { NamespaceAPIObject namespaceAPIObject; NamespaceData clonedNamespaceData = Utils.getClone(k8sObject.getNamespaceData(), NamespaceData.class); diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/CreateExperimentAPIObject.java b/src/main/java/com/autotune/analyzer/serviceObjects/CreateExperimentAPIObject.java index ac8b033d7..6985beff2 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/CreateExperimentAPIObject.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/CreateExperimentAPIObject.java @@ -18,6 +18,8 @@ import com.autotune.analyzer.kruizeObject.RecommendationSettings; import com.autotune.analyzer.kruizeObject.SloInfo; import com.autotune.analyzer.utils.AnalyzerConstants; +import com.autotune.analyzer.utils.ExperimentTypeAware; +import com.autotune.analyzer.utils.ExperimentTypeUtil; import com.autotune.common.data.ValidationOutputData; import com.autotune.common.k8sObjects.TrialSettings; import com.autotune.utils.KruizeConstants; @@ -28,7 +30,7 @@ /** * Simulating the KruizeObject class for the CreateExperiment API */ -public class CreateExperimentAPIObject extends BaseSO { +public class CreateExperimentAPIObject extends BaseSO implements ExperimentTypeAware { @SerializedName(KruizeConstants.JSONKeys.CLUSTER_NAME) private String clusterName; @SerializedName(KruizeConstants.JSONKeys.PERFORMANCE_PROFILE) @@ -48,7 +50,7 @@ public class CreateExperimentAPIObject extends BaseSO { @SerializedName(KruizeConstants.JSONKeys.DATASOURCE) //TODO: to be used in future private String datasource; @SerializedName(KruizeConstants.JSONKeys.EXPERIMENT_TYPE) //TODO: to be used in future - private String experiment_type; + private String experimentType; private AnalyzerConstants.ExperimentStatus status; private String experiment_id; // this id is UUID and getting set at createExperiment API private ValidationOutputData validationData; // This object indicates if this API object is valid or invalid @@ -149,12 +151,23 @@ public void setDatasource(String datasource) { this.datasource = datasource; } + @Override public String getExperimentType() { - return experiment_type; + return experimentType; + } + + public void setExperimentType(String experimentType) { + this.experimentType = experimentType; } - public void setExperimentType(String experiment_type) { - this.experiment_type = experiment_type; + @Override + public boolean isNamespaceExperiment() { + return ExperimentTypeUtil.isNamespaceExperiment(experimentType); + } + + @Override + public boolean isContainerExperiment() { + return ExperimentTypeUtil.isContainerExperiment(experimentType); } @Override @@ -169,7 +182,7 @@ public String toString() { ", targetCluster='" + targetCluster + '\'' + ", kubernetesAPIObjects=" + kubernetesAPIObjects.toString() + ", trialSettings=" + trialSettings + - ", experimentType=" + experiment_type + + ", experimentType=" + experimentType + ", recommendationSettings=" + recommendationSettings + '}'; } diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java b/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java index eb859c192..86d57abfd 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java @@ -15,12 +15,14 @@ *******************************************************************************/ package com.autotune.analyzer.serviceObjects; +import com.autotune.analyzer.utils.ExperimentTypeAware; +import com.autotune.analyzer.utils.ExperimentTypeUtil; import com.autotune.utils.KruizeConstants; import com.google.gson.annotations.SerializedName; import java.util.List; -public class ListRecommendationsAPIObject extends BaseSO{ +public class ListRecommendationsAPIObject extends BaseSO implements ExperimentTypeAware { @SerializedName(KruizeConstants.JSONKeys.CLUSTER_NAME) private String clusterName; @SerializedName(KruizeConstants.JSONKeys.EXPERIMENT_TYPE) @@ -45,11 +47,24 @@ public void setKubernetesObjects(List kubernetesObjects) { this.kubernetesObjects = kubernetesObjects; } + @Override public String getExperimentType() { return experimentType; } - public void setExperimentType(String experiment_type) { - this.experimentType = experiment_type; + public void setExperimentType(String experimentType) { + this.experimentType = experimentType; } + + @Override + public boolean isNamespaceExperiment() { + return ExperimentTypeUtil.isNamespaceExperiment(experimentType); + } + + @Override + public boolean isContainerExperiment() { + return ExperimentTypeUtil.isContainerExperiment(experimentType); + } + + } diff --git a/src/main/java/com/autotune/analyzer/services/CreateExperiment.java b/src/main/java/com/autotune/analyzer/services/CreateExperiment.java index ba1449008..6e8c7ecab 100644 --- a/src/main/java/com/autotune/analyzer/services/CreateExperiment.java +++ b/src/main/java/com/autotune/analyzer/services/CreateExperiment.java @@ -98,26 +98,16 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) } else { List kruizeExpList = new ArrayList<>(); for (CreateExperimentAPIObject createExperimentAPIObject : createExperimentAPIObjects) { - String experimentType = createExperimentAPIObject.getExperimentType(); createExperimentAPIObject.setExperiment_id(Utils.generateID(createExperimentAPIObject.toString())); createExperimentAPIObject.setStatus(AnalyzerConstants.ExperimentStatus.IN_PROGRESS); - boolean isContainerExperiment = true; - boolean isNamespaceExperiment = false; - // updating experiment type to container if not passed - if (null == experimentType || experimentType.isEmpty()) { - createExperimentAPIObject.setExperimentType(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT); - } else if (AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT.equalsIgnoreCase(experimentType)) { - isNamespaceExperiment = true; - isContainerExperiment = false; - } // validating the kubernetes objects and experiment type for (KubernetesAPIObject kubernetesAPIObject: createExperimentAPIObject.getKubernetesObjects()) { - if (isContainerExperiment) { + if (createExperimentAPIObject.isContainerExperiment()) { // check if namespace data is also set for container-type experiments if (null != kubernetesAPIObject.getNamespaceAPIObjects()) { throw new InvalidExperimentType(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.NAMESPACE_DATA_NOT_NULL_FOR_CONTAINER_EXP); } - } else if (isNamespaceExperiment) { + } else if (createExperimentAPIObject.isNamespaceExperiment()) { if (null != kubernetesAPIObject.getContainerAPIObjects()) { throw new InvalidExperimentType(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.CONTAINER_DATA_NOT_NULL_FOR_NAMESPACE_EXP); } diff --git a/src/main/java/com/autotune/analyzer/utils/ExperimentTypeAware.java b/src/main/java/com/autotune/analyzer/utils/ExperimentTypeAware.java new file mode 100644 index 000000000..c8fd45dec --- /dev/null +++ b/src/main/java/com/autotune/analyzer/utils/ExperimentTypeAware.java @@ -0,0 +1,28 @@ +/******************************************************************************* + * Copyright (c) 2024 Red Hat, IBM Corporation and others. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + *******************************************************************************/ +package com.autotune.analyzer.utils; + +/** + * Interface to be implemented by classes with an experiment type. + */ +public interface ExperimentTypeAware { + // Retrieves the experiment type associated with the implementing class. + String getExperimentType(); + // checks if the experiment type is namespace + boolean isNamespaceExperiment(); + // checks if the experiment type is container + boolean isContainerExperiment(); +} diff --git a/src/main/java/com/autotune/analyzer/utils/ExperimentTypeUtil.java b/src/main/java/com/autotune/analyzer/utils/ExperimentTypeUtil.java new file mode 100644 index 000000000..591ea4d14 --- /dev/null +++ b/src/main/java/com/autotune/analyzer/utils/ExperimentTypeUtil.java @@ -0,0 +1,30 @@ +/******************************************************************************* + * Copyright (c) 2024 Red Hat, IBM Corporation and others. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + *******************************************************************************/ + +package com.autotune.analyzer.utils; + +/** + * This class contains utility functions to determine experiment type + */ +public class ExperimentTypeUtil { + public static boolean isContainerExperiment(String experimentType) { + return experimentType == null || experimentType.equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT); + } + + public static boolean isNamespaceExperiment(String experimentType) { + return experimentType != null && experimentType.equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT); + } +} diff --git a/src/main/java/com/autotune/database/helper/DBHelpers.java b/src/main/java/com/autotune/database/helper/DBHelpers.java index 5e0ba9e68..104ed091b 100644 --- a/src/main/java/com/autotune/database/helper/DBHelpers.java +++ b/src/main/java/com/autotune/database/helper/DBHelpers.java @@ -26,6 +26,7 @@ import com.autotune.analyzer.serviceObjects.*; import com.autotune.analyzer.utils.AnalyzerConstants; import com.autotune.analyzer.utils.AnalyzerErrorConstants; +import com.autotune.analyzer.utils.ExperimentTypeUtil; import com.autotune.analyzer.utils.GsonUTCDateAdapter; import com.autotune.common.data.dataSourceMetadata.*; import com.autotune.common.data.result.ContainerData; @@ -376,17 +377,16 @@ public static ListRecommendationsAPIObject getListRecommendationAPIObjectForDB(K if (kruizeObject.getKubernetes_objects().isEmpty()) return null; List kubernetesAPIObjectList = new ArrayList<>(); - String experimentType = kruizeObject.getExperimentType(); for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) { if (null == k8sObject) continue; - if (null == k8sObject.getContainerDataMap() && (null == experimentType || AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT.equalsIgnoreCase(experimentType))) + if (null == k8sObject.getContainerDataMap() && kruizeObject.isContainerExperiment()) continue; - if (k8sObject.getContainerDataMap().isEmpty() && (null == experimentType || AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT.equalsIgnoreCase(experimentType))) + if (k8sObject.getContainerDataMap().isEmpty() && kruizeObject.isContainerExperiment()) continue; KubernetesAPIObject kubernetesAPIObject = new KubernetesAPIObject(k8sObject.getName(), k8sObject.getType(), k8sObject.getNamespace()); boolean matchFound = false; - if (null != experimentType && AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT.equalsIgnoreCase(experimentType)) { + if (kruizeObject.isNamespaceExperiment()) { // saving namespace recommendations NamespaceData clonedNamespaceData = Utils.getClone(k8sObject.getNamespaceData(), NamespaceData.class); if (null == clonedNamespaceData) @@ -490,7 +490,7 @@ public static KruizeRecommendationEntry convertKruizeObjectTORecommendation(Krui Timestamp endInterval = null; // todo : what happens if two k8 objects or Containers with different timestamp for (KubernetesAPIObject k8sObject : listRecommendationsAPIObject.getKubernetesObjects()) { - if (null != listRecommendationsAPIObject.getExperimentType() && AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT.equalsIgnoreCase(listRecommendationsAPIObject.getExperimentType())) { + if (listRecommendationsAPIObject.isNamespaceExperiment()) { endInterval = k8sObject.getNamespaceAPIObjects().getnamespaceRecommendations().getData().keySet().stream().max(Timestamp::compareTo).get(); } else { for (ContainerAPIObject containerAPIObject : k8sObject.getContainerAPIObjects()) { From cc6cb8628ac62571e5f33466e6dce893683ef13c Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Wed, 25 Sep 2024 20:46:37 +0530 Subject: [PATCH 09/17] adding fetchMetricsError exception for generate recommendations Signed-off-by: Shekhar Saxena --- .../engine/RecommendationEngine.java | 194 +----------------- .../services/GenerateRecommendations.java | 5 + .../services/UpdateRecommendations.java | 3 + .../utils/AnalyzerErrorConstants.java | 8 + .../autotune/utils/GenericRestApiClient.java | 5 +- 5 files changed, 29 insertions(+), 186 deletions(-) diff --git a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java index 7a6f8b374..3b73da8e6 100644 --- a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java +++ b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java @@ -1,5 +1,6 @@ package com.autotune.analyzer.recommendations.engine; +import com.autotune.analyzer.exceptions.FetchMetricsError; import com.autotune.analyzer.kruizeObject.KruizeObject; import com.autotune.analyzer.kruizeObject.RecommendationSettings; import com.autotune.analyzer.performanceProfiles.MetricProfileCollection; @@ -239,7 +240,7 @@ public String validate_local() { //TODO Instead of relying on the 'lo * @param calCount The count of incoming requests. * @return The KruizeObject containing the prepared recommendations. */ - public KruizeObject prepareRecommendations(int calCount) { + public KruizeObject prepareRecommendations(int calCount) throws FetchMetricsError{ Map mainKruizeExperimentMAP = new ConcurrentHashMap<>(); Map terms = new HashMap<>(); ValidationOutputData validationOutputData; @@ -276,6 +277,8 @@ public KruizeObject prepareRecommendations(int calCount) { if (!errorMsg.isEmpty()) { throw new Exception(errorMsg); } + } catch (FetchMetricsError e) { + throw new FetchMetricsError(e.getMessage()); } catch (Exception e) { LOGGER.error(String.format(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.UPDATE_RECOMMENDATIONS_FAILED_COUNT, calCount)); kruizeObject = new KruizeObject(); @@ -301,7 +304,7 @@ public KruizeObject prepareRecommendations(int calCount) { experimentName, interval_start_time, interval_end_time)); kruizeObject.setValidation_data(new ValidationOutputData(false, e.getMessage(), HttpServletResponse.SC_INTERNAL_SERVER_ERROR)); } - } catch (Exception e) { + } catch (Exception | FetchMetricsError e) { LOGGER.error(String.format(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.RECOMMENDATION_EXCEPTION, experimentName, interval_end_time, e.getMessage())); LOGGER.error(String.format(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.UPDATE_RECOMMENDATIONS_FAILED_COUNT, calCount)); @@ -1748,7 +1751,7 @@ private ValidationOutputData addRecommendationsToDB(Map ma * @throws Exception if an error occurs during the process of fetching and storing results. */ private String getResults(Map mainKruizeExperimentMAP, KruizeObject kruizeObject, - String experimentName, Timestamp intervalStartTime, String dataSource) throws Exception { + String experimentName, Timestamp intervalStartTime, String dataSource) throws Exception, FetchMetricsError { String errorMsg = ""; mainKruizeExperimentMAP.put(experimentName, kruizeObject); @@ -1780,185 +1783,6 @@ private String getResults(Map mainKruizeExperimentMAP, Kru return errorMsg; } - /** - * Fetches metrics based on the specified datasource for the given time interval. - * - * @param kruizeObject The KruizeObject containing the experiment data. - * @param interval_end_time The end time of the interval for fetching metrics. - * @param interval_start_time The start time of the interval for fetching metrics. - * @param dataSourceInfo The datasource object to fetch metrics from. - * @throws Exception if an error occurs during the fetching process. - * TODO: Need to add right abstractions for this - */ -// public void fetchMetricsBasedOnDatasource(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo) throws Exception { -// try { -// long interval_end_time_epoc = 0; -// long interval_start_time_epoc = 0; -// SimpleDateFormat sdf = new SimpleDateFormat(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT, Locale.ROOT); -// -// // Get MetricsProfile name and list of promQL to fetch -// Map promQls = new HashMap<>(); -// getPromQls(promQls); -// List aggregationMethods = Arrays.asList(KruizeConstants.JSONKeys.SUM, KruizeConstants.JSONKeys.AVG, -// KruizeConstants.JSONKeys.MAX, KruizeConstants.JSONKeys.MIN); -// Double measurementDurationMinutesInDouble = kruizeObject.getTrial_settings().getMeasurement_durationMinutes_inDouble(); -// List kubernetes_objects = kruizeObject.getKubernetes_objects(); -// -// // Iterate over Kubernetes objects -// for (K8sObject k8sObject : kubernetes_objects) { -// String namespace = k8sObject.getNamespace(); -// HashMap containerDataMap = k8sObject.getContainerDataMap(); -// // Iterate over containers -// for (Map.Entry entry : containerDataMap.entrySet()) { -// ContainerData containerData = entry.getValue(); -// String containerName = containerData.getContainer_name(); -// if (null == interval_end_time) { -// LOGGER.info(KruizeConstants.APIMessages.CONTAINER_USAGE_INFO); -// String dateMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATE_ENDPOINT_WITH_QUERY, -// dataSourceInfo.getUrl(), -// URLEncoder.encode(String.format(PromQLDataSourceQueries.MAX_DATE, containerName, namespace), CHARACTER_ENCODING) -// ); -// LOGGER.info(dateMetricsUrl); -// JSONObject genericJsonObject = new GenericRestApiClient(dateMetricsUrl).fetchMetricsJson(KruizeConstants.APIMessages.GET, ""); -// JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class); -// JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT); -// // Process fetched metrics -// if (null != resultArray && !resultArray.isEmpty()) { -// resultArray = resultArray.get(0) -// .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.VALUE); -// long epochTime = resultArray.get(0).getAsLong(); -// String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC)); -// Date date = sdf.parse(timestamp); -// Timestamp dateTS = new Timestamp(date.getTime()); -// interval_end_time_epoc = dateTS.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC -// - ((long) dateTS.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE); -// int maxDay = Terms.getMaxDays(kruizeObject.getTerms()); -// LOGGER.info(KruizeConstants.APIMessages.MAX_DAY, maxDay); -// Timestamp startDateTS = Timestamp.valueOf(Objects.requireNonNull(dateTS).toLocalDateTime().minusDays(maxDay)); -// interval_start_time_epoc = startDateTS.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC -// - ((long) startDateTS.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC); -// } -// } else { -// // Convert timestamps to epoch time -// interval_end_time_epoc = interval_end_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC -// - ((long) interval_end_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE); -// interval_start_time_epoc = interval_start_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC -// - ((long) interval_start_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC); -// } -// HashMap containerDataResults = new HashMap<>(); -// IntervalResults intervalResults; -// HashMap resMap; -// MetricResults metricResults; -// MetricAggregationInfoResults metricAggregationInfoResults; -// // Iterate over metrics and aggregation methods -// for (Map.Entry metricEntry : promQls.entrySet()) { -// for (String methodName : aggregationMethods) { -// String promQL = null; -// String format = null; -// // Determine promQL and format based on metric type -// if (metricEntry.getKey() == AnalyzerConstants.MetricName.cpuUsage) { -// String secondMethodName = methodName; -// if (secondMethodName.equals(KruizeConstants.JSONKeys.SUM)) -// secondMethodName = KruizeConstants.JSONKeys.AVG; -// promQL = String.format(metricEntry.getValue(), methodName, secondMethodName, namespace, containerName, measurementDurationMinutesInDouble.intValue()); -// format = KruizeConstants.JSONKeys.CORES; -// } else if (metricEntry.getKey() == AnalyzerConstants.MetricName.cpuThrottle) { -// promQL = String.format(metricEntry.getValue(), methodName, namespace, containerName, measurementDurationMinutesInDouble.intValue()); -// format = KruizeConstants.JSONKeys.CORES; -// } else if (metricEntry.getKey() == AnalyzerConstants.MetricName.cpuLimit || metricEntry.getKey() == AnalyzerConstants.MetricName.cpuRequest) { -// promQL = String.format(metricEntry.getValue(), methodName, namespace, containerName); -// format = KruizeConstants.JSONKeys.CORES; -// } else if (metricEntry.getKey() == AnalyzerConstants.MetricName.memoryUsage || metricEntry.getKey() == AnalyzerConstants.MetricName.memoryRSS) { -// String secondMethodName = methodName; -// if (secondMethodName.equals(KruizeConstants.JSONKeys.SUM)) -// secondMethodName = KruizeConstants.JSONKeys.AVG; -// promQL = String.format(metricEntry.getValue(), methodName, secondMethodName, namespace, containerName, measurementDurationMinutesInDouble.intValue()); -// format = KruizeConstants.JSONKeys.BYTES; -// } else if (metricEntry.getKey() == AnalyzerConstants.MetricName.memoryLimit || metricEntry.getKey() == AnalyzerConstants.MetricName.memoryRequest) { -// promQL = String.format(metricEntry.getValue(), methodName, namespace, containerName); -// format = KruizeConstants.JSONKeys.BYTES; -// } -// // If promQL is determined, fetch metrics from the datasource -// if (promQL != null) { -// LOGGER.info(promQL); -// String podMetricsUrl; -// try { -// podMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY, -// dataSourceInfo.getUrl(), -// URLEncoder.encode(promQL, CHARACTER_ENCODING), -// interval_start_time_epoc, -// interval_end_time_epoc, -// measurementDurationMinutesInDouble.intValue() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE); -// LOGGER.info(podMetricsUrl); -// JSONObject genericJsonObject = new GenericRestApiClient(podMetricsUrl).fetchMetricsJson(KruizeConstants.APIMessages.GET, ""); -// JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class); -// JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT); -// // Process fetched metrics -// if (null != resultArray && !resultArray.isEmpty()) { -// resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray( -// KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT).get(0) -// .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants -// .DataSourceQueryJSONKeys.VALUES); -// sdf.setTimeZone(TimeZone.getTimeZone(KruizeConstants.TimeUnitsExt.TimeZones.UTC)); -// -// // Iterate over fetched metrics -// Timestamp sTime = new Timestamp(interval_start_time_epoc); -// for (JsonElement element : resultArray) { -// JsonArray valueArray = element.getAsJsonArray(); -// long epochTime = valueArray.get(0).getAsLong(); -// double value = valueArray.get(1).getAsDouble(); -// String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC)); -// Date date = sdf.parse(timestamp); -// Timestamp eTime = new Timestamp(date.getTime()); -// -// // Prepare interval results -// if (containerDataResults.containsKey(eTime)) { -// intervalResults = containerDataResults.get(eTime); -// resMap = intervalResults.getMetricResultsMap(); -// } else { -// intervalResults = new IntervalResults(); -// resMap = new HashMap<>(); -// } -// if (resMap.containsKey(metricEntry.getKey())) { -// metricResults = resMap.get(metricEntry.getKey()); -// metricAggregationInfoResults = metricResults.getAggregationInfoResult(); -// } else { -// metricResults = new MetricResults(); -// metricAggregationInfoResults = new MetricAggregationInfoResults(); -// } -// Method method = MetricAggregationInfoResults.class.getDeclaredMethod(KruizeConstants.APIMessages.SET + methodName.substring(0, 1).toUpperCase() + methodName.substring(1), Double.class); -// method.invoke(metricAggregationInfoResults, value); -// metricAggregationInfoResults.setFormat(format); -// metricResults.setAggregationInfoResult(metricAggregationInfoResults); -// metricResults.setName(String.valueOf(metricEntry.getKey())); -// metricResults.setFormat(format); -// resMap.put(metricEntry.getKey(), metricResults); -// intervalResults.setMetricResultsMap(resMap); -// intervalResults.setIntervalStartTime(sTime); //Todo this will change -// intervalResults.setIntervalEndTime(eTime); -// intervalResults.setDurationInMinutes((double) ((eTime.getTime() - sTime.getTime()) -// / ((long) KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE -// * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC))); -// containerDataResults.put(eTime, intervalResults); -// sTime = eTime; -// } -// } -// } catch (Exception e) { -// throw new RuntimeException(e); -// } -// } -// } -// } -// containerData.setResults(containerDataResults); -// if (!containerDataResults.isEmpty()) -// setInterval_end_time(Collections.max(containerDataResults.keySet())); //TODO Temp fix invalid date is set if experiment having two container with different last seen date -// } -// } -// } catch (Exception e) { -// e.printStackTrace(); -// throw new Exception(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.METRIC_EXCEPTION + e.getMessage()); -// } -// } /** * Fetches metrics based on the specified datasource using queries from the metricProfile for the given time interval. @@ -1969,7 +1793,7 @@ private String getResults(Map mainKruizeExperimentMAP, Kru * @param dataSourceInfo DataSource object * @throws Exception */ - public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo) throws Exception { + public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo) throws Exception, FetchMetricsError { try { String metricProfileName = kruizeObject.getPerformanceProfile(); PerformanceProfile metricProfile = MetricProfileCollection.getInstance().getMetricProfileCollection().get(metricProfileName); @@ -2002,7 +1826,7 @@ public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, T * @param maxDateQuery max date query for namespace * @throws Exception */ - private void fetchNamespaceMetricsBasedOnDataSourceAndProfile(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo, PerformanceProfile metricProfile, String maxDateQuery) throws Exception{ + private void fetchNamespaceMetricsBasedOnDataSourceAndProfile(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo, PerformanceProfile metricProfile, String maxDateQuery) throws Exception, FetchMetricsError { try { long interval_end_time_epoc = 0; long interval_start_time_epoc = 0; @@ -2188,7 +2012,7 @@ private void fetchNamespaceMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz * @param maxDateQuery max date query for containers * @throws Exception */ - private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo, PerformanceProfile metricProfile, String maxDateQuery) throws Exception{ + private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo, PerformanceProfile metricProfile, String maxDateQuery) throws Exception, FetchMetricsError { try { long interval_end_time_epoc = 0; long interval_start_time_epoc = 0; diff --git a/src/main/java/com/autotune/analyzer/services/GenerateRecommendations.java b/src/main/java/com/autotune/analyzer/services/GenerateRecommendations.java index 43669db12..64d05fe9c 100644 --- a/src/main/java/com/autotune/analyzer/services/GenerateRecommendations.java +++ b/src/main/java/com/autotune/analyzer/services/GenerateRecommendations.java @@ -15,12 +15,14 @@ *******************************************************************************/ package com.autotune.analyzer.services; +import com.autotune.analyzer.exceptions.FetchMetricsError; import com.autotune.analyzer.kruizeObject.KruizeObject; import com.autotune.analyzer.recommendations.engine.RecommendationEngine; import com.autotune.analyzer.serviceObjects.ContainerAPIObject; import com.autotune.analyzer.serviceObjects.Converters; import com.autotune.analyzer.serviceObjects.ListRecommendationsAPIObject; import com.autotune.analyzer.utils.AnalyzerConstants; +import com.autotune.analyzer.utils.AnalyzerErrorConstants; import com.autotune.analyzer.utils.GsonUTCDateAdapter; import com.autotune.common.data.dataSourceQueries.PromQLDataSourceQueries; import com.autotune.common.data.metrics.MetricAggregationInfoResults; @@ -118,6 +120,9 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) LOGGER.error("Validation failed: {}", validationMessage); sendErrorResponse(response, null, HttpServletResponse.SC_BAD_REQUEST, validationMessage); } + } catch (FetchMetricsError e) { + LOGGER.error(AnalyzerErrorConstants.APIErrors.generateRecommendationsAPI.ERROR_FETCHING_METRICS); + sendErrorResponse(response, new Exception(e), HttpServletResponse.SC_BAD_REQUEST, e.getMessage()); } catch (Exception e) { LOGGER.error("Exception occurred while processing request: " + e.getMessage()); sendErrorResponse(response, e, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage()); diff --git a/src/main/java/com/autotune/analyzer/services/UpdateRecommendations.java b/src/main/java/com/autotune/analyzer/services/UpdateRecommendations.java index 6715635cd..903378655 100644 --- a/src/main/java/com/autotune/analyzer/services/UpdateRecommendations.java +++ b/src/main/java/com/autotune/analyzer/services/UpdateRecommendations.java @@ -15,6 +15,7 @@ *******************************************************************************/ package com.autotune.analyzer.services; +import com.autotune.analyzer.exceptions.FetchMetricsError; import com.autotune.analyzer.kruizeObject.KruizeObject; import com.autotune.analyzer.recommendations.engine.RecommendationEngine; import com.autotune.analyzer.serviceObjects.ContainerAPIObject; @@ -115,6 +116,8 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) sendErrorResponse(response, null, HttpServletResponse.SC_BAD_REQUEST, validationMessage, experiment_name, intervalEndTimeStr); } + } catch (FetchMetricsError e) { + sendErrorResponse(response, new Exception(e), HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage(), experiment_name, intervalEndTimeStr); } catch (Exception e) { LOGGER.error(String.format(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.UPDATE_RECOMMENDATIONS_FAILED_COUNT, calCount)); e.printStackTrace(); diff --git a/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java b/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java index 07c42f313..a279ea77a 100644 --- a/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java +++ b/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java @@ -146,6 +146,14 @@ public static final class updateResultsAPI { } + public static final class generateRecommendationsAPI { + public static final String ERROR_FETCHING_METRICS = "Error while fetching metrics."; + + private generateRecommendationsAPI() { + + } + } + public static final class ListRecommendationsAPI { public static final String RECOMMENDATION_DOES_NOT_EXIST_EXCPTN = "Recommendation does not exist"; public static final String RECOMMENDATION_DOES_NOT_EXIST_MSG = "Recommendation for timestamp - \" %s \" does not exist"; diff --git a/src/main/java/com/autotune/utils/GenericRestApiClient.java b/src/main/java/com/autotune/utils/GenericRestApiClient.java index 8a6e6ea8a..1e6809683 100644 --- a/src/main/java/com/autotune/utils/GenericRestApiClient.java +++ b/src/main/java/com/autotune/utils/GenericRestApiClient.java @@ -15,6 +15,7 @@ *******************************************************************************/ package com.autotune.utils; +import com.autotune.analyzer.exceptions.FetchMetricsError; import com.autotune.common.auth.AuthenticationStrategy; import com.autotune.common.auth.AuthenticationStrategyFactory; import com.autotune.common.datasource.DataSourceInfo; @@ -75,7 +76,7 @@ public GenericRestApiClient(DataSourceInfo dataSourceInfo) { * @return Json object which contains API response. * @throws IOException */ - public JSONObject fetchMetricsJson(String methodType, String queryString) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + public JSONObject fetchMetricsJson(String methodType, String queryString) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException, FetchMetricsError { System.setProperty("https.protocols", "TLSv1.2"); String jsonOutputInString = ""; SSLContext sslContext = SSLContexts.custom().loadTrustMaterial((chain, authType) -> true).build(); // Trust all certificates @@ -96,6 +97,8 @@ public JSONObject fetchMetricsJson(String methodType, String queryString) throws LOGGER.info("Executing request: {}", httpRequestBase.getRequestLine()); jsonOutputInString = httpclient.execute(httpRequestBase, new StringResponseHandler()); + } catch (Exception e) { + throw new FetchMetricsError(e.getMessage()); } return new JSONObject(jsonOutputInString); } From f28aae60639ffd9342e66f65d568ea18609696cf Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Thu, 26 Sep 2024 00:18:55 +0530 Subject: [PATCH 10/17] adding common function for preapreIntervalResults Signed-off-by: Shekhar Saxena --- .../exceptions/FetchMetricsError.java | 26 ++++ .../engine/RecommendationEngine.java | 131 ++++++++---------- .../datasource/DataSourceOperatorImpl.java | 3 +- .../PrometheusDataOperatorImpl.java | 3 + 4 files changed, 90 insertions(+), 73 deletions(-) create mode 100644 src/main/java/com/autotune/analyzer/exceptions/FetchMetricsError.java diff --git a/src/main/java/com/autotune/analyzer/exceptions/FetchMetricsError.java b/src/main/java/com/autotune/analyzer/exceptions/FetchMetricsError.java new file mode 100644 index 000000000..bd2c1e30b --- /dev/null +++ b/src/main/java/com/autotune/analyzer/exceptions/FetchMetricsError.java @@ -0,0 +1,26 @@ +/******************************************************************************* + * Copyright (c) 2024 Red Hat, IBM Corporation and others. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + *******************************************************************************/ +package com.autotune.analyzer.exceptions; + +public class FetchMetricsError extends Throwable +{ + public FetchMetricsError() { + } + + public FetchMetricsError(String message) { + super(message); + } +} diff --git a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java index 3b73da8e6..aaac83423 100644 --- a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java +++ b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java @@ -1880,11 +1880,11 @@ private void fetchNamespaceMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz } HashMap namespaceDataResults = new HashMap<>(); - IntervalResults namespaceIntervalResults; - HashMap namespaceResMap; - HashMap namespaceResultMap; - MetricResults namespaceMetricResults; - MetricAggregationInfoResults namespaceMetricAggregationInfoResults; + IntervalResults namespaceIntervalResults = null; + HashMap namespaceResMap = null; + HashMap namespaceResultMap = null; + MetricResults namespaceMetricResults = null; + MetricAggregationInfoResults namespaceMetricAggregationInfoResults = null; if (null == namespaceData) { namespaceData = new NamespaceData(); @@ -1950,37 +1950,8 @@ private void fetchNamespaceMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz Timestamp eTime = new Timestamp(date.getTime()); // Prepare interval results - if (namespaceDataResults.containsKey(eTime)) { - namespaceIntervalResults = namespaceDataResults.get(eTime); - namespaceResMap = namespaceIntervalResults.getMetricResultsMap(); - } else { - namespaceIntervalResults = new IntervalResults(); - namespaceResMap = new HashMap<>(); - } - AnalyzerConstants.MetricName metricName = AnalyzerConstants.MetricName.valueOf(metricEntry.getName()); - if (namespaceResMap.containsKey(metricName)) { - namespaceMetricResults = namespaceResMap.get(metricName); - namespaceMetricAggregationInfoResults = namespaceMetricResults.getAggregationInfoResult(); - } else { - namespaceMetricResults = new MetricResults(); - namespaceMetricAggregationInfoResults = new MetricAggregationInfoResults(); - } - - Method method = MetricAggregationInfoResults.class.getDeclaredMethod(KruizeConstants.APIMessages.SET + aggregationFunctionsEntry.getKey().substring(0, 1).toUpperCase() + aggregationFunctionsEntry.getKey().substring(1), Double.class); - method.invoke(namespaceMetricAggregationInfoResults, value); - namespaceMetricAggregationInfoResults.setFormat(format); - namespaceMetricResults.setAggregationInfoResult(namespaceMetricAggregationInfoResults); - namespaceMetricResults.setName(metricEntry.getName()); - namespaceMetricResults.setFormat(format); - namespaceResMap.put(metricName, namespaceMetricResults); - namespaceIntervalResults.setMetricResultsMap(namespaceResMap); - namespaceIntervalResults.setIntervalStartTime(sTime); //Todo this will change - namespaceIntervalResults.setIntervalEndTime(eTime); - namespaceIntervalResults.setDurationInMinutes((double) ((eTime.getTime() - sTime.getTime()) - / ((long) KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE - * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC))); - namespaceDataResults.put(eTime, namespaceIntervalResults); - sTime = eTime; + prepareIntervalResults(namespaceDataResults, namespaceIntervalResults, namespaceResMap, namespaceMetricResults, + namespaceMetricAggregationInfoResults, sTime, eTime, metricEntry, aggregationFunctionsEntry, value, format); } } } catch (Exception e) { @@ -2080,11 +2051,11 @@ private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz - ((long) interval_start_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC); } HashMap containerDataResults = new HashMap<>(); - IntervalResults intervalResults; - HashMap resMap; - HashMap resultMap; - MetricResults metricResults; - MetricAggregationInfoResults metricAggregationInfoResults; + IntervalResults intervalResults = null; + HashMap resMap = null; + HashMap resultMap = null; + MetricResults metricResults = null; + MetricAggregationInfoResults metricAggregationInfoResults = null; List metricList = metricProfile.getSloInfo().getFunctionVariables(); @@ -2148,37 +2119,8 @@ private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz Timestamp eTime = new Timestamp(date.getTime()); // Prepare interval results - if (containerDataResults.containsKey(eTime)) { - intervalResults = containerDataResults.get(eTime); - resMap = intervalResults.getMetricResultsMap(); - } else { - intervalResults = new IntervalResults(); - resMap = new HashMap<>(); - } - AnalyzerConstants.MetricName metricName = AnalyzerConstants.MetricName.valueOf(metricEntry.getName()); - if (resMap.containsKey(metricName)) { - metricResults = resMap.get(metricName); - metricAggregationInfoResults = metricResults.getAggregationInfoResult(); - } else { - metricResults = new MetricResults(); - metricAggregationInfoResults = new MetricAggregationInfoResults(); - } - - Method method = MetricAggregationInfoResults.class.getDeclaredMethod(KruizeConstants.APIMessages.SET + aggregationFunctionsEntry.getKey().substring(0, 1).toUpperCase() + aggregationFunctionsEntry.getKey().substring(1), Double.class); - method.invoke(metricAggregationInfoResults, value); - metricAggregationInfoResults.setFormat(format); - metricResults.setAggregationInfoResult(metricAggregationInfoResults); - metricResults.setName(metricEntry.getName()); - metricResults.setFormat(format); - resMap.put(metricName, metricResults); - intervalResults.setMetricResultsMap(resMap); - intervalResults.setIntervalStartTime(sTime); //Todo this will change - intervalResults.setIntervalEndTime(eTime); - intervalResults.setDurationInMinutes((double) ((eTime.getTime() - sTime.getTime()) - / ((long) KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE - * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC))); - containerDataResults.put(eTime, intervalResults); - sTime = eTime; + prepareIntervalResults(containerDataResults, intervalResults, resMap, metricResults, + metricAggregationInfoResults, sTime, eTime, metricEntry, aggregationFunctionsEntry, value, format); } } } catch (Exception e) { @@ -2214,5 +2156,50 @@ private String getMaxDateQuery(PerformanceProfile metricProfile, String metricNa } return null; } + + /** + * prepares interval results for namespace and container experiments + */ + private void prepareIntervalResults(Map dataResultsMap, IntervalResults intervalResults, + HashMap resMap, MetricResults metricResults, + MetricAggregationInfoResults metricAggregationInfoResults, Timestamp sTime, Timestamp eTime, Metric metricEntry, + Map.Entry aggregationFunctionsEntry, double value, String format) throws Exception { + try { + if (dataResultsMap.containsKey(eTime)) { + intervalResults = dataResultsMap.get(eTime); + resMap = intervalResults.getMetricResultsMap(); + } else { + intervalResults = new IntervalResults(); + resMap = new HashMap<>(); + } + AnalyzerConstants.MetricName metricName = AnalyzerConstants.MetricName.valueOf(metricEntry.getName()); + if (resMap.containsKey(metricName)) { + metricResults = resMap.get(metricName); + metricAggregationInfoResults = metricResults.getAggregationInfoResult(); + } else { + metricResults = new MetricResults(); + metricAggregationInfoResults = new MetricAggregationInfoResults(); + } + + Method method = MetricAggregationInfoResults.class.getDeclaredMethod(KruizeConstants.APIMessages.SET + aggregationFunctionsEntry.getKey().substring(0, 1).toUpperCase() + aggregationFunctionsEntry.getKey().substring(1), Double.class); + method.invoke(metricAggregationInfoResults, value); + metricAggregationInfoResults.setFormat(format); + metricResults.setAggregationInfoResult(metricAggregationInfoResults); + metricResults.setName(metricEntry.getName()); + metricResults.setFormat(format); + resMap.put(metricName, metricResults); + intervalResults.setMetricResultsMap(resMap); + intervalResults.setIntervalStartTime(sTime); //Todo this will change + intervalResults.setIntervalEndTime(eTime); + intervalResults.setDurationInMinutes((double) ((eTime.getTime() - sTime.getTime()) + / ((long) KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE + * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC))); + dataResultsMap.put(eTime, intervalResults); + sTime = eTime; + } catch (Exception e) { + e.printStackTrace(); + throw new Exception(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.METRIC_EXCEPTION + e.getMessage()); + } + } } diff --git a/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java b/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java index 5b10e1184..5404a9d4b 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java @@ -1,5 +1,6 @@ package com.autotune.common.datasource; +import com.autotune.analyzer.exceptions.FetchMetricsError; import com.autotune.analyzer.exceptions.MonitoringAgentNotFoundException; import com.autotune.analyzer.exceptions.TooManyRecursiveCallsException; import com.autotune.analyzer.utils.AnalyzerConstants; @@ -176,7 +177,7 @@ public ArrayList getAppsForLayer(DataSourceInfo dataSource, String query } catch (TooManyRecursiveCallsException e) { e.printStackTrace(); } - } catch (IOException | NoSuchAlgorithmException | KeyStoreException | KeyManagementException e) { + } catch (IOException | NoSuchAlgorithmException | KeyStoreException | KeyManagementException | FetchMetricsError e) { LOGGER.error("Unable to proceed due to invalid connection to URL: "+ queryURL); } return valuesList; diff --git a/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java b/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java index 82865cc19..418add724 100644 --- a/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java +++ b/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java @@ -15,6 +15,7 @@ *******************************************************************************/ package com.autotune.common.datasource.prometheus; +import com.autotune.analyzer.exceptions.FetchMetricsError; import com.autotune.analyzer.utils.AnalyzerConstants; import com.autotune.common.auth.AuthenticationStrategy; import com.autotune.common.auth.AuthenticationStrategyFactory; @@ -181,6 +182,8 @@ public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) e.printStackTrace(); } catch (KeyManagementException e) { e.printStackTrace(); + } catch (FetchMetricsError e) { + e.printStackTrace(); } return null; } From 46ae4915f41f2fe2e0709b787a281a608188f83c Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Thu, 26 Sep 2024 00:33:24 +0530 Subject: [PATCH 11/17] updating docs Signed-off-by: Shekhar Saxena --- design/KruizeLocalAPI.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/design/KruizeLocalAPI.md b/design/KruizeLocalAPI.md index a166da5f7..32f31eff3 100644 --- a/design/KruizeLocalAPI.md +++ b/design/KruizeLocalAPI.md @@ -2162,7 +2162,7 @@ indicate whether the experiment is of type `namespace` or `container`. If no experiment type is specified, it will default to `container`.

- Example Request with `experiment_type` - `namespace` + Example Request with experiment_type - `namespace` The `experiment_type` field in the JSON is optional and can be used to indicate whether the experiment is of type `namespace` or `container`. If no experiment type is specified, it will default to `container`. @@ -2196,7 +2196,7 @@ If no experiment type is specified, it will default to `container`.
- Example Request with `experiment_type` - `container` + Example Request with experiment_type - `container` ### EXAMPLE REQUEST ```json From 141957461aeb69ddef41897e8277882cd5c079b2 Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Thu, 26 Sep 2024 13:00:33 +0530 Subject: [PATCH 12/17] adding trasient and dynamic query Signed-off-by: Shekhar Saxena --- migrations/kruize_experiments_ddl.sql | 2 +- migrations/kruize_local_ddl.sql | 3 +- .../database/dao/ExperimentDAOImpl.java | 96 +++++++++++++++++++ .../autotune/database/helper/DBConstants.java | 4 + .../database/table/KruizeExperimentEntry.java | 1 + .../table/KruizeRecommendationEntry.java | 1 + 6 files changed, 105 insertions(+), 2 deletions(-) diff --git a/migrations/kruize_experiments_ddl.sql b/migrations/kruize_experiments_ddl.sql index 88ebec4f2..6858aeb36 100644 --- a/migrations/kruize_experiments_ddl.sql +++ b/migrations/kruize_experiments_ddl.sql @@ -1,4 +1,4 @@ -create table IF NOT EXISTS kruize_experiments (experiment_id varchar(255) not null, cluster_name varchar(255), datasource jsonb, experiment_name varchar(255), extended_data jsonb, meta_data jsonb, mode varchar(255), performance_profile varchar(255), status varchar(255), target_cluster varchar(255), experiment_type varchar(255) null default null, version varchar(255), primary key (experiment_id)); +create table IF NOT EXISTS kruize_experiments (experiment_id varchar(255) not null, cluster_name varchar(255), datasource jsonb, experiment_name varchar(255), extended_data jsonb, meta_data jsonb, mode varchar(255), performance_profile varchar(255), status varchar(255), target_cluster varchar(255), version varchar(255), primary key (experiment_id)); create table IF NOT EXISTS kruize_performance_profiles (name varchar(255) not null, k8s_type varchar(255), profile_version float(53) not null, slo jsonb, primary key (name)); create table IF NOT EXISTS kruize_recommendations (interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255), extended_data jsonb, experiment_type varchar(255) null default null, version varchar(255), primary key (experiment_name, interval_end_time)) PARTITION BY RANGE (interval_end_time); create table IF NOT EXISTS kruize_results (interval_start_time timestamp(6) not null, interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255) , duration_minutes float(53) not null, extended_data jsonb, meta_data jsonb, version varchar(255), primary key (experiment_name, interval_end_time, interval_start_time)) PARTITION BY RANGE (interval_end_time); diff --git a/migrations/kruize_local_ddl.sql b/migrations/kruize_local_ddl.sql index cd6453371..4a7aba2d8 100644 --- a/migrations/kruize_local_ddl.sql +++ b/migrations/kruize_local_ddl.sql @@ -1,4 +1,5 @@ create table IF NOT EXISTS kruize_datasources (version varchar(255), name varchar(255), provider varchar(255), serviceName varchar(255), namespace varchar(255), url varchar(255), authentication jsonb, primary key (name)); create table IF NOT EXISTS kruize_dsmetadata (id serial, version varchar(255), datasource_name varchar(255), cluster_name varchar(255), namespace varchar(255), workload_type varchar(255), workload_name varchar(255), container_name varchar(255), container_image_name varchar(255), primary key (id)); -alter table kruize_experiments add column metadata_id bigint references kruize_dsmetadata(id), alter column datasource type varchar(255); +alter table kruize_experiments add column experiment_type varchar(255), add column metadata_id bigint references kruize_dsmetadata(id), alter column datasource type varchar(255); create table IF NOT EXISTS kruize_metric_profiles (api_version varchar(255), kind varchar(255), metadata jsonb, name varchar(255) not null, k8s_type varchar(255), profile_version float(53) not null, slo jsonb, primary key (name)); +alter table kruize_recommendations add column experiment_type varchar(255); \ No newline at end of file diff --git a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java index 46f163cd2..f5ecba2ff 100644 --- a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java +++ b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java @@ -51,6 +51,8 @@ public ValidationOutputData addExperimentToDB(KruizeExperimentEntry kruizeExperi tx = session.beginTransaction(); session.persist(kruizeExperimentEntry); tx.commit(); + // TODO: remove native sql query and transient + updateExperimentTypeInKruizeExperimentEntry(kruizeExperimentEntry); validationOutputData.setSuccess(true); statusValue = "success"; } catch (HibernateException e) { @@ -322,6 +324,7 @@ public ValidationOutputData addRecommendationToDB(KruizeRecommendationEntry reco tx = session.beginTransaction(); session.persist(recommendationEntry); tx.commit(); + updateExperimentTypeInKruizeRecommendationEntry(recommendationEntry); validationOutputData.setSuccess(true); statusValue = "success"; } else { @@ -616,6 +619,8 @@ public List loadAllExperiments() throws Exception { Timer.Sample timerLoadAllExp = Timer.start(MetricsConfig.meterRegistry()); try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { entries = session.createQuery(DBConstants.SQLQUERY.SELECT_FROM_EXPERIMENTS, KruizeExperimentEntry.class).list(); + // TODO: remove native sql query and transient + getExperimentTypeInKruizeExperimentEntry(entries); statusValue = "success"; } catch (Exception e) { LOGGER.error("Not able to load experiment due to {}", e.getMessage()); @@ -720,6 +725,8 @@ public List loadExperimentByName(String experimentName) t try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { entries = session.createQuery(DBConstants.SQLQUERY.SELECT_FROM_EXPERIMENTS_BY_EXP_NAME, KruizeExperimentEntry.class) .setParameter("experimentName", experimentName).list(); + // TODO: remove native sql query and transient + getExperimentTypeInKruizeExperimentEntry(entries); statusValue = "success"; } catch (Exception e) { LOGGER.error("Not able to load experiment {} due to {}", experimentName, e.getMessage()); @@ -820,6 +827,7 @@ public List loadRecommendationsByExperimentName(Strin try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { recommendationEntries = session.createQuery(DBConstants.SQLQUERY.SELECT_FROM_RECOMMENDATIONS_BY_EXP_NAME, KruizeRecommendationEntry.class) .setParameter("experimentName", experimentName).list(); + getExperimentTypeInKruizeRecommendationsEntry(recommendationEntries); statusValue = "success"; } catch (Exception e) { LOGGER.error("Not able to load recommendations due to {}", e.getMessage()); @@ -851,6 +859,7 @@ public KruizeRecommendationEntry loadRecommendationsByExperimentNameAndDate(Stri if (cluster_name != null) kruizeRecommendationEntryQuery.setParameter(CLUSTER_NAME, cluster_name); recommendationEntries = kruizeRecommendationEntryQuery.getSingleResult(); + getExperimentTypeInSingleKruizeRecommendationsEntry(recommendationEntries); statusValue = "success"; } catch (NoResultException e) { LOGGER.debug("Generating new recommendation for Experiment name : %s interval_end_time: %S", experimentName, interval_end_time); @@ -1054,4 +1063,91 @@ public List loadAllDataSources() throws Exception { } return entries; } + + private void getExperimentTypeInKruizeExperimentEntry(List entries) throws Exception { + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + for (KruizeExperimentEntry entry: entries) { + if (isTargetCluserLocal(entry.getTarget_cluster())) { + String sql = DBConstants.SQLQUERY.SELECT_EXPERIMENT_EXP_TYPE; + Query query = session.createNativeQuery(sql); + query.setParameter("id", entry.getExperiment_id()); + List experimentType = query.getResultList(); + if (null != experimentType && !experimentType.isEmpty()) { + entry.setExperimentType(experimentType.get(0)); + } + } + } + } catch (Exception e) { + LOGGER.error("Not able to get experiment type from experiment entry due to {}", e.getMessage()); + throw new Exception("Error while loading experiment type from database due to : " + e.getMessage()); + } + } + + private void updateExperimentTypeInKruizeExperimentEntry(KruizeExperimentEntry kruizeExperimentEntry) throws Exception { + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + if (isTargetCluserLocal(kruizeExperimentEntry.getTarget_cluster())) { + String sql = DBConstants.SQLQUERY.UPDATE_EXPERIMENT_EXP_TYPE; + Query query = session.createNativeQuery(sql); + query.setParameter("experiment_type", kruizeExperimentEntry.getExperimentType()); + query.setParameter("experiment_name", kruizeExperimentEntry.getExperiment_name()); + query.executeUpdate(); + } + } catch (Exception e) { + LOGGER.error("Not able to update experiment type in experiment entry due to {}", e.getMessage()); + throw new Exception("Error while updating experiment type to database due to : " + e.getMessage()); + } + } + + private void getExperimentTypeInKruizeRecommendationsEntry(List entries) throws Exception { + for (KruizeRecommendationEntry recomEntry: entries) { + getExperimentTypeInSingleKruizeRecommendationsEntry(recomEntry); + } + } + + private void getExperimentTypeInSingleKruizeRecommendationsEntry(KruizeRecommendationEntry recomEntry) throws Exception { + List expEntries = loadExperimentByName(recomEntry.getExperiment_name()); + if (null != expEntries && !expEntries.isEmpty()) { + if (isTargetCluserLocal(expEntries.get(0).getTarget_cluster())) { + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + String sql = DBConstants.SQLQUERY.SELECT_RECOMMENDATIONS_EXP_TYPE; + Query query = session.createNativeQuery(sql); + query.setParameter("experiment_type", recomEntry.getExperimentType()); + query.setParameter("experiment_name", recomEntry.getExperiment_name()); + List exType = query.getResultList(); + if (null != exType && !exType.isEmpty()) { + recomEntry.setExperimentType(exType.get(0)); + } + } catch (Exception e) { + LOGGER.error("Not able to get experiment type in recommendation entry due to {}", e.getMessage()); + throw new Exception("Error while updating experiment type to recommendation due to : " + e.getMessage()); + } + } + } + } + + private void updateExperimentTypeInKruizeRecommendationEntry(KruizeRecommendationEntry recommendationEntry) throws Exception { + List entries = loadExperimentByName(recommendationEntry.getExperiment_name()); + if (null != entries && !entries.isEmpty()) { + if (isTargetCluserLocal(entries.get(0).getTarget_cluster())) { + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + String sql = DBConstants.SQLQUERY.UPDATE_RECOMMENDATIONS_EXP_TYPE; + Query query = session.createNativeQuery(sql); + query.setParameter("experiment_type", recommendationEntry.getExperimentType()); + query.setParameter("experiment_name", recommendationEntry.getExperiment_name()); + query.setParameter("interval_end_time", recommendationEntry.getInterval_end_time()); + query.executeUpdate(); + } catch (Exception e) { + LOGGER.error("Not able to update experiment type in recommendation entry due to {}", e.getMessage()); + throw new Exception("Error while updating experiment type to recommendation due to : " + e.getMessage()); + } + } + } + } + + private boolean isTargetCluserLocal(String targetCluster) { + if (AnalyzerConstants.LOCAL.equalsIgnoreCase(targetCluster)) { + return true; + } + return false; + } } diff --git a/src/main/java/com/autotune/database/helper/DBConstants.java b/src/main/java/com/autotune/database/helper/DBConstants.java index 8771dcf51..e713db9b5 100644 --- a/src/main/java/com/autotune/database/helper/DBConstants.java +++ b/src/main/java/com/autotune/database/helper/DBConstants.java @@ -76,6 +76,10 @@ public static final class SQLQUERY { " WHERE container->>'container_name' = :container_name" + " AND container->>'container_image_name' = :container_image_name" + " ))"; + public static final String UPDATE_EXPERIMENT_EXP_TYPE = "UPDATE kruize_experiment SET experiment_type = :experiment_type WHERE experiment_name = :experiment_name"; + public static final String UPDATE_RECOMMENDATIONS_EXP_TYPE = "UPDATE kruize_recommendations SET experiment_type = :experiment_type WHERE experiment_name = :experiment_name and interval_end_time =: interval_end_time"; + public static final String SELECT_EXPERIMENT_EXP_TYPE = "SELECT experiment_type from kruize_experiment WHERE experiment_id = :experiment_id"; + public static final String SELECT_RECOMMENDATIONS_EXP_TYPE = "SELECT experiment_type from kruize_recommendations WHERE experiment_name = :experiment_name"; } diff --git a/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java b/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java index 49fbe37cb..01908cdcd 100644 --- a/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java +++ b/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java @@ -57,6 +57,7 @@ public class KruizeExperimentEntry { private String mode; private String target_cluster; private String performance_profile; + @Transient private String experiment_type; @Enumerated(EnumType.STRING) private AnalyzerConstants.ExperimentStatus status; diff --git a/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java b/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java index 513f4b216..d3d490f0c 100644 --- a/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java +++ b/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java @@ -27,6 +27,7 @@ public class KruizeRecommendationEntry { private String cluster_name; @JdbcTypeCode(SqlTypes.JSON) private JsonNode extended_data; + @Transient private String experiment_type; public String getExperiment_name() { From eca530106bbf5ed0145479c7c5cfae37d0c2aca7 Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Thu, 26 Sep 2024 14:27:41 +0530 Subject: [PATCH 13/17] correcting sql file Signed-off-by: Shekhar Saxena --- migrations/kruize_experiments_ddl.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/migrations/kruize_experiments_ddl.sql b/migrations/kruize_experiments_ddl.sql index 6858aeb36..8d9002b71 100644 --- a/migrations/kruize_experiments_ddl.sql +++ b/migrations/kruize_experiments_ddl.sql @@ -1,6 +1,6 @@ create table IF NOT EXISTS kruize_experiments (experiment_id varchar(255) not null, cluster_name varchar(255), datasource jsonb, experiment_name varchar(255), extended_data jsonb, meta_data jsonb, mode varchar(255), performance_profile varchar(255), status varchar(255), target_cluster varchar(255), version varchar(255), primary key (experiment_id)); create table IF NOT EXISTS kruize_performance_profiles (name varchar(255) not null, k8s_type varchar(255), profile_version float(53) not null, slo jsonb, primary key (name)); -create table IF NOT EXISTS kruize_recommendations (interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255), extended_data jsonb, experiment_type varchar(255) null default null, version varchar(255), primary key (experiment_name, interval_end_time)) PARTITION BY RANGE (interval_end_time); +create table IF NOT EXISTS kruize_recommendations (interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255), extended_data jsonb, version varchar(255), primary key (experiment_name, interval_end_time)) PARTITION BY RANGE (interval_end_time); create table IF NOT EXISTS kruize_results (interval_start_time timestamp(6) not null, interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255) , duration_minutes float(53) not null, extended_data jsonb, meta_data jsonb, version varchar(255), primary key (experiment_name, interval_end_time, interval_start_time)) PARTITION BY RANGE (interval_end_time); alter table if exists kruize_experiments add constraint UK_experiment_name unique (experiment_name); create index IF NOT EXISTS idx_recommendation_experiment_name on kruize_recommendations (experiment_name); From 9adeaf71545ceb39c55c98d382a0f65eef1af5ca Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Thu, 26 Sep 2024 14:29:02 +0530 Subject: [PATCH 14/17] adding EOF Signed-off-by: Shekhar Saxena --- migrations/kruize_local_ddl.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/migrations/kruize_local_ddl.sql b/migrations/kruize_local_ddl.sql index 4a7aba2d8..48781f500 100644 --- a/migrations/kruize_local_ddl.sql +++ b/migrations/kruize_local_ddl.sql @@ -2,4 +2,4 @@ create table IF NOT EXISTS kruize_datasources (version varchar(255), name varcha create table IF NOT EXISTS kruize_dsmetadata (id serial, version varchar(255), datasource_name varchar(255), cluster_name varchar(255), namespace varchar(255), workload_type varchar(255), workload_name varchar(255), container_name varchar(255), container_image_name varchar(255), primary key (id)); alter table kruize_experiments add column experiment_type varchar(255), add column metadata_id bigint references kruize_dsmetadata(id), alter column datasource type varchar(255); create table IF NOT EXISTS kruize_metric_profiles (api_version varchar(255), kind varchar(255), metadata jsonb, name varchar(255) not null, k8s_type varchar(255), profile_version float(53) not null, slo jsonb, primary key (name)); -alter table kruize_recommendations add column experiment_type varchar(255); \ No newline at end of file +alter table kruize_recommendations add column experiment_type varchar(255); From d107bbbd04f7c6a7583f716f0917b417e9e603da Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Thu, 26 Sep 2024 15:28:20 +0530 Subject: [PATCH 15/17] fixing create exp api Signed-off-by: Shekhar Saxena --- .../java/com/autotune/database/dao/ExperimentDAOImpl.java | 6 +++++- src/main/java/com/autotune/database/helper/DBConstants.java | 4 ++-- src/main/java/com/autotune/database/helper/DBHelpers.java | 1 - 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java index f5ecba2ff..00b733d8c 100644 --- a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java +++ b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java @@ -1070,7 +1070,7 @@ private void getExperimentTypeInKruizeExperimentEntry(List experimentType = query.getResultList(); if (null != experimentType && !experimentType.isEmpty()) { entry.setExperimentType(experimentType.get(0)); @@ -1086,11 +1086,13 @@ private void getExperimentTypeInKruizeExperimentEntry(List>'container_name' = :container_name" + " AND container->>'container_image_name' = :container_image_name" + " ))"; - public static final String UPDATE_EXPERIMENT_EXP_TYPE = "UPDATE kruize_experiment SET experiment_type = :experiment_type WHERE experiment_name = :experiment_name"; + public static final String UPDATE_EXPERIMENT_EXP_TYPE = "UPDATE kruize_experiments SET experiment_type = :experiment_type WHERE experiment_name = :experiment_name"; public static final String UPDATE_RECOMMENDATIONS_EXP_TYPE = "UPDATE kruize_recommendations SET experiment_type = :experiment_type WHERE experiment_name = :experiment_name and interval_end_time =: interval_end_time"; - public static final String SELECT_EXPERIMENT_EXP_TYPE = "SELECT experiment_type from kruize_experiment WHERE experiment_id = :experiment_id"; + public static final String SELECT_EXPERIMENT_EXP_TYPE = "SELECT experiment_type from kruize_experiments WHERE experiment_id = :experiment_id"; public static final String SELECT_RECOMMENDATIONS_EXP_TYPE = "SELECT experiment_type from kruize_recommendations WHERE experiment_name = :experiment_name"; } diff --git a/src/main/java/com/autotune/database/helper/DBHelpers.java b/src/main/java/com/autotune/database/helper/DBHelpers.java index 104ed091b..23069d348 100644 --- a/src/main/java/com/autotune/database/helper/DBHelpers.java +++ b/src/main/java/com/autotune/database/helper/DBHelpers.java @@ -616,7 +616,6 @@ private static List convertK8sObjectListToKubernetesAPIObje public static List convertRecommendationEntryToRecommendationAPIObject( List kruizeRecommendationEntryList) throws InvalidConversionOfRecommendationEntryException { - LOGGER.info("Hello 3: convertRecommendationEntryToRecommendationAPIObject" + kruizeRecommendationEntryList.size()); if (null == kruizeRecommendationEntryList) return null; if (kruizeRecommendationEntryList.size() == 0) From e85c281043385d07289c8f672e370cd49166d285 Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Thu, 26 Sep 2024 16:18:46 +0530 Subject: [PATCH 16/17] fixing generate recom error Signed-off-by: Shekhar Saxena --- .../java/com/autotune/analyzer/services/CreateExperiment.java | 1 + src/main/java/com/autotune/database/helper/DBConstants.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/autotune/analyzer/services/CreateExperiment.java b/src/main/java/com/autotune/analyzer/services/CreateExperiment.java index 6e8c7ecab..b66259f7d 100644 --- a/src/main/java/com/autotune/analyzer/services/CreateExperiment.java +++ b/src/main/java/com/autotune/analyzer/services/CreateExperiment.java @@ -103,6 +103,7 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) // validating the kubernetes objects and experiment type for (KubernetesAPIObject kubernetesAPIObject: createExperimentAPIObject.getKubernetesObjects()) { if (createExperimentAPIObject.isContainerExperiment()) { + createExperimentAPIObject.setExperimentType(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT); // check if namespace data is also set for container-type experiments if (null != kubernetesAPIObject.getNamespaceAPIObjects()) { throw new InvalidExperimentType(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.NAMESPACE_DATA_NOT_NULL_FOR_CONTAINER_EXP); diff --git a/src/main/java/com/autotune/database/helper/DBConstants.java b/src/main/java/com/autotune/database/helper/DBConstants.java index a58f9d1c2..62fa006ac 100644 --- a/src/main/java/com/autotune/database/helper/DBConstants.java +++ b/src/main/java/com/autotune/database/helper/DBConstants.java @@ -77,7 +77,7 @@ public static final class SQLQUERY { " AND container->>'container_image_name' = :container_image_name" + " ))"; public static final String UPDATE_EXPERIMENT_EXP_TYPE = "UPDATE kruize_experiments SET experiment_type = :experiment_type WHERE experiment_name = :experiment_name"; - public static final String UPDATE_RECOMMENDATIONS_EXP_TYPE = "UPDATE kruize_recommendations SET experiment_type = :experiment_type WHERE experiment_name = :experiment_name and interval_end_time =: interval_end_time"; + public static final String UPDATE_RECOMMENDATIONS_EXP_TYPE = "UPDATE kruize_recommendations SET experiment_type = :experiment_type WHERE experiment_name = :experiment_name and interval_end_time = :interval_end_time"; public static final String SELECT_EXPERIMENT_EXP_TYPE = "SELECT experiment_type from kruize_experiments WHERE experiment_id = :experiment_id"; public static final String SELECT_RECOMMENDATIONS_EXP_TYPE = "SELECT experiment_type from kruize_recommendations WHERE experiment_name = :experiment_name"; From d7a03062867a18f61e9b6e081b8e8d560fe84a11 Mon Sep 17 00:00:00 2001 From: Shekhar Saxena Date: Thu, 26 Sep 2024 16:46:58 +0530 Subject: [PATCH 17/17] updating javadoc Signed-off-by: Shekhar Saxena --- .../recommendations/engine/RecommendationEngine.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java index aaac83423..bb9a202be 100644 --- a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java +++ b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java @@ -313,9 +313,14 @@ public KruizeObject prepareRecommendations(int calCount) throws FetchMetricsErro return kruizeObject; } + /** + * Generates recommendations for the specified KruizeObject + * @param kruizeObject The KruizeObject containing experiment data + */ public void generateRecommendations(KruizeObject kruizeObject) { for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) { + // verify if the experiment type is namespace or container if (kruizeObject.isNamespaceExperiment()) { String namespaceName = k8sObject.getNamespace(); NamespaceData namespaceData = k8sObject.getNamespaceData();