/generateRecommendations?experiment_name=temp_1'`
+
+success status code : 201
+
+**Response for `namespace` Experiment**
+
+The response will contain an array of JSON object with the recommendations for the specified experiment.
+
+When `interval_end_time` is not specified, Kruize will determine the latest timestamp from the specified datasource
+(E.g. Prometheus) by checking the latest active container CPU usage.
+
+
+Example Response Body
+
+```json
+[
+ {
+ "cluster_name": "test-multiple-import",
+ "experiment_type": "namespace",
+ "kubernetes_objects": [
+ {
+ "namespace": "default",
+ "containers": [],
+ "namespaces": {
+ "namespace_name": "default",
+ "recommendations": {
+ "version": "1.0",
+ "notifications": {
+ "111000": {
+ "type": "info",
+ "message": "Recommendations Are Available",
+ "code": 111000
+ }
+ },
+ "data": {
+ "2024-09-25T09:46:20.000Z": {
+ "notifications": {
+ "111101": {
+ "type": "info",
+ "message": "Short Term Recommendations Available",
+ "code": 111101
+ }
+ },
+ "monitoring_end_time": "2024-09-25T09:46:20.000Z",
+ "current": {},
+ "recommendation_terms": {
+ "short_term": {
+ "duration_in_hours": 24.0,
+ "notifications": {
+ "112101": {
+ "type": "info",
+ "message": "Cost Recommendations Available",
+ "code": 112101
+ },
+ "112102": {
+ "type": "info",
+ "message": "Performance Recommendations Available",
+ "code": 112102
+ }
+ },
+ "monitoring_start_time": "2024-09-24T09:46:20.000Z",
+ "recommendation_engines": {
+ "cost": {
+ "pods_count": 2,
+ "confidence_level": 0.0,
+ "config": {
+ "limits": {
+ "memory": {
+ "amount": 1.442955264E9,
+ "format": "bytes"
+ },
+ "cpu": {
+ "amount": 5.834468490017892,
+ "format": "cores"
+ }
+ },
+ "requests": {
+ "memory": {
+ "amount": 1.442955264E9,
+ "format": "bytes"
+ },
+ "cpu": {
+ "amount": 5.834468490017892,
+ "format": "cores"
+ }
+ }
+ },
+ "variation": {
+ "limits": {
+ "memory": {
+ "amount": 1.442955264E9,
+ "format": "bytes"
+ },
+ "cpu": {
+ "amount": 5.834468490017892,
+ "format": "cores"
+ }
+ },
+ "requests": {
+ "memory": {
+ "amount": 1.442955264E9,
+ "format": "bytes"
+ },
+ "cpu": {
+ "amount": 5.834468490017892,
+ "format": "cores"
+ }
+ }
+ },
+ "notifications": {}
+ },
+ "performance": {
+ "pods_count": 2,
+ "confidence_level": 0.0,
+ "config": {
+ "limits": {
+ "memory": {
+ "amount": 1.442955264E9,
+ "format": "bytes"
+ },
+ "cpu": {
+ "amount": 5.834468490017892,
+ "format": "cores"
+ }
+ },
+ "requests": {
+ "memory": {
+ "amount": 1.442955264E9,
+ "format": "bytes"
+ },
+ "cpu": {
+ "amount": 5.834468490017892,
+ "format": "cores"
+ }
+ }
+ },
+ "variation": {
+ "limits": {
+ "memory": {
+ "amount": 1.442955264E9,
+ "format": "bytes"
+ },
+ "cpu": {
+ "amount": 5.834468490017892,
+ "format": "cores"
+ }
+ },
+ "requests": {
+ "memory": {
+ "amount": 1.442955264E9,
+ "format": "bytes"
+ },
+ "cpu": {
+ "amount": 5.834468490017892,
+ "format": "cores"
+ }
+ }
+ },
+ "notifications": {}
+ }
+ }
+ },
+ "medium_term": {
+ "duration_in_hours": 168.0,
+ "notifications": {
+ "120001": {
+ "type": "info",
+ "message": "There is not enough data available to generate a recommendation.",
+ "code": 120001
+ }
+ }
+ },
+ "long_term": {
+ "duration_in_hours": 360.0,
+ "notifications": {
+ "120001": {
+ "type": "info",
+ "message": "There is not enough data available to generate a recommendation.",
+ "code": 120001
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ ],
+ "version": "v2.0",
+ "experiment_name": "namespace-demo"
+ }
+]
+```
+
+
**Error Responses**
diff --git a/migrations/kruize_local_ddl.sql b/migrations/kruize_local_ddl.sql
index cd6453371..48781f500 100644
--- a/migrations/kruize_local_ddl.sql
+++ b/migrations/kruize_local_ddl.sql
@@ -1,4 +1,5 @@
create table IF NOT EXISTS kruize_datasources (version varchar(255), name varchar(255), provider varchar(255), serviceName varchar(255), namespace varchar(255), url varchar(255), authentication jsonb, primary key (name));
create table IF NOT EXISTS kruize_dsmetadata (id serial, version varchar(255), datasource_name varchar(255), cluster_name varchar(255), namespace varchar(255), workload_type varchar(255), workload_name varchar(255), container_name varchar(255), container_image_name varchar(255), primary key (id));
-alter table kruize_experiments add column metadata_id bigint references kruize_dsmetadata(id), alter column datasource type varchar(255);
+alter table kruize_experiments add column experiment_type varchar(255), add column metadata_id bigint references kruize_dsmetadata(id), alter column datasource type varchar(255);
create table IF NOT EXISTS kruize_metric_profiles (api_version varchar(255), kind varchar(255), metadata jsonb, name varchar(255) not null, k8s_type varchar(255), profile_version float(53) not null, slo jsonb, primary key (name));
+alter table kruize_recommendations add column experiment_type varchar(255);
diff --git a/src/main/java/com/autotune/analyzer/exceptions/FetchMetricsError.java b/src/main/java/com/autotune/analyzer/exceptions/FetchMetricsError.java
new file mode 100644
index 000000000..bd2c1e30b
--- /dev/null
+++ b/src/main/java/com/autotune/analyzer/exceptions/FetchMetricsError.java
@@ -0,0 +1,26 @@
+/*******************************************************************************
+ * Copyright (c) 2024 Red Hat, IBM Corporation and others.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *******************************************************************************/
+package com.autotune.analyzer.exceptions;
+
+public class FetchMetricsError extends Throwable
+{
+ public FetchMetricsError() {
+ }
+
+ public FetchMetricsError(String message) {
+ super(message);
+ }
+}
diff --git a/src/main/java/com/autotune/analyzer/exceptions/InvalidExperimentType.java b/src/main/java/com/autotune/analyzer/exceptions/InvalidExperimentType.java
new file mode 100644
index 000000000..9de8c23bb
--- /dev/null
+++ b/src/main/java/com/autotune/analyzer/exceptions/InvalidExperimentType.java
@@ -0,0 +1,26 @@
+/*******************************************************************************
+ * Copyright (c) 2020, 2021 Red Hat, IBM Corporation and others.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *******************************************************************************/
+package com.autotune.analyzer.exceptions;
+
+public class InvalidExperimentType extends Throwable
+{
+ public InvalidExperimentType() {
+ }
+
+ public InvalidExperimentType(String message) {
+ super(message);
+ }
+}
diff --git a/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java b/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java
index 41670ac8f..d86d399a0 100644
--- a/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java
+++ b/src/main/java/com/autotune/analyzer/kruizeObject/KruizeObject.java
@@ -18,6 +18,8 @@
import com.autotune.analyzer.exceptions.InvalidValueException;
import com.autotune.analyzer.recommendations.term.Terms;
import com.autotune.analyzer.utils.AnalyzerConstants;
+import com.autotune.analyzer.utils.ExperimentTypeAware;
+import com.autotune.analyzer.utils.ExperimentTypeUtil;
import com.autotune.common.data.ValidationOutputData;
import com.autotune.common.k8sObjects.K8sObject;
import com.autotune.common.k8sObjects.TrialSettings;
@@ -36,7 +38,7 @@
*
* Refer to examples dir for a reference AutotuneObject yaml.
*/
-public final class KruizeObject {
+public final class KruizeObject implements ExperimentTypeAware {
@SerializedName("version")
private String apiVersion;
@@ -47,6 +49,8 @@ public final class KruizeObject {
private String clusterName;
@SerializedName("datasource")
private String datasource;
+ @SerializedName(KruizeConstants.JSONKeys.EXPERIMENT_TYPE) //TODO: to be used in future
+ private String experimentType;
private String namespace; // TODO: Currently adding it at this level with an assumption that there is only one entry in k8s object needs to be changed
private String mode; //Todo convert into Enum
@SerializedName("target_cluster")
@@ -297,6 +301,25 @@ public void setDataSource(String datasource) {
this.datasource = datasource;
}
+ @Override
+ public String getExperimentType() {
+ return experimentType;
+ }
+
+ public void setExperimentType(String experimentType) {
+ this.experimentType = experimentType;
+ }
+
+ @Override
+ public boolean isNamespaceExperiment() {
+ return ExperimentTypeUtil.isNamespaceExperiment(experimentType);
+ }
+
+ @Override
+ public boolean isContainerExperiment() {
+ return ExperimentTypeUtil.isContainerExperiment(experimentType);
+ }
+
@Override
public String toString() {
// Creating a temporary cluster name as we allow null for cluster name now
@@ -309,6 +332,7 @@ public String toString() {
", experimentName='" + experimentName + '\'' +
", clusterName=" + tmpClusterName + '\'' +
", datasource=" + datasource + '\'' +
+ ", experimentType=" + experimentType + '\'' +
", mode='" + mode + '\'' +
", targetCluster='" + targetCluster + '\'' +
", hpoAlgoImpl=" + hpoAlgoImpl +
diff --git a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java
index 9abb232c5..bb9a202be 100644
--- a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java
+++ b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java
@@ -1,5 +1,6 @@
package com.autotune.analyzer.recommendations.engine;
+import com.autotune.analyzer.exceptions.FetchMetricsError;
import com.autotune.analyzer.kruizeObject.KruizeObject;
import com.autotune.analyzer.kruizeObject.RecommendationSettings;
import com.autotune.analyzer.performanceProfiles.MetricProfileCollection;
@@ -16,6 +17,7 @@
import com.autotune.analyzer.recommendations.utils.RecommendationUtils;
import com.autotune.analyzer.utils.AnalyzerConstants;
import com.autotune.analyzer.utils.AnalyzerErrorConstants;
+import com.autotune.analyzer.utils.ExperimentTypeUtil;
import com.autotune.common.data.ValidationOutputData;
import com.autotune.common.data.metrics.AggregationFunctions;
import com.autotune.common.data.metrics.Metric;
@@ -238,7 +240,7 @@ public String validate_local() { //TODO Instead of relying on the 'lo
* @param calCount The count of incoming requests.
* @return The KruizeObject containing the prepared recommendations.
*/
- public KruizeObject prepareRecommendations(int calCount) {
+ public KruizeObject prepareRecommendations(int calCount) throws FetchMetricsError{
Map mainKruizeExperimentMAP = new ConcurrentHashMap<>();
Map terms = new HashMap<>();
ValidationOutputData validationOutputData;
@@ -275,6 +277,8 @@ public KruizeObject prepareRecommendations(int calCount) {
if (!errorMsg.isEmpty()) {
throw new Exception(errorMsg);
}
+ } catch (FetchMetricsError e) {
+ throw new FetchMetricsError(e.getMessage());
} catch (Exception e) {
LOGGER.error(String.format(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.UPDATE_RECOMMENDATIONS_FAILED_COUNT, calCount));
kruizeObject = new KruizeObject();
@@ -300,7 +304,7 @@ public KruizeObject prepareRecommendations(int calCount) {
experimentName, interval_start_time, interval_end_time));
kruizeObject.setValidation_data(new ValidationOutputData(false, e.getMessage(), HttpServletResponse.SC_INTERNAL_SERVER_ERROR));
}
- } catch (Exception e) {
+ } catch (Exception | FetchMetricsError e) {
LOGGER.error(String.format(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.RECOMMENDATION_EXCEPTION,
experimentName, interval_end_time, e.getMessage()));
LOGGER.error(String.format(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.UPDATE_RECOMMENDATIONS_FAILED_COUNT, calCount));
@@ -309,15 +313,20 @@ public KruizeObject prepareRecommendations(int calCount) {
return kruizeObject;
}
+ /**
+ * Generates recommendations for the specified KruizeObject
+ * @param kruizeObject The KruizeObject containing experiment data
+ */
public void generateRecommendations(KruizeObject kruizeObject) {
for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) {
- if (k8sObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) {
+ // verify if the experiment type is namespace or container
+ if (kruizeObject.isNamespaceExperiment()) {
String namespaceName = k8sObject.getNamespace();
NamespaceData namespaceData = k8sObject.getNamespaceData();
LOGGER.info("Generating recommendations for namespace: {}", namespaceName);
generateRecommendationsBasedOnNamespace(namespaceData, kruizeObject);
- } else {
+ } else if (kruizeObject.isContainerExperiment()){
for (String containerName : k8sObject.getContainerDataMap().keySet()) {
ContainerData containerData = k8sObject.getContainerDataMap().get(containerName);
@@ -1747,7 +1756,7 @@ private ValidationOutputData addRecommendationsToDB(Map ma
* @throws Exception if an error occurs during the process of fetching and storing results.
*/
private String getResults(Map mainKruizeExperimentMAP, KruizeObject kruizeObject,
- String experimentName, Timestamp intervalStartTime, String dataSource) throws Exception {
+ String experimentName, Timestamp intervalStartTime, String dataSource) throws Exception, FetchMetricsError {
String errorMsg = "";
mainKruizeExperimentMAP.put(experimentName, kruizeObject);
@@ -1779,185 +1788,6 @@ private String getResults(Map mainKruizeExperimentMAP, Kru
return errorMsg;
}
- /**
- * Fetches metrics based on the specified datasource for the given time interval.
- *
- * @param kruizeObject The KruizeObject containing the experiment data.
- * @param interval_end_time The end time of the interval for fetching metrics.
- * @param interval_start_time The start time of the interval for fetching metrics.
- * @param dataSourceInfo The datasource object to fetch metrics from.
- * @throws Exception if an error occurs during the fetching process.
- * TODO: Need to add right abstractions for this
- */
-// public void fetchMetricsBasedOnDatasource(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo) throws Exception {
-// try {
-// long interval_end_time_epoc = 0;
-// long interval_start_time_epoc = 0;
-// SimpleDateFormat sdf = new SimpleDateFormat(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT, Locale.ROOT);
-//
-// // Get MetricsProfile name and list of promQL to fetch
-// Map promQls = new HashMap<>();
-// getPromQls(promQls);
-// List aggregationMethods = Arrays.asList(KruizeConstants.JSONKeys.SUM, KruizeConstants.JSONKeys.AVG,
-// KruizeConstants.JSONKeys.MAX, KruizeConstants.JSONKeys.MIN);
-// Double measurementDurationMinutesInDouble = kruizeObject.getTrial_settings().getMeasurement_durationMinutes_inDouble();
-// List kubernetes_objects = kruizeObject.getKubernetes_objects();
-//
-// // Iterate over Kubernetes objects
-// for (K8sObject k8sObject : kubernetes_objects) {
-// String namespace = k8sObject.getNamespace();
-// HashMap containerDataMap = k8sObject.getContainerDataMap();
-// // Iterate over containers
-// for (Map.Entry entry : containerDataMap.entrySet()) {
-// ContainerData containerData = entry.getValue();
-// String containerName = containerData.getContainer_name();
-// if (null == interval_end_time) {
-// LOGGER.info(KruizeConstants.APIMessages.CONTAINER_USAGE_INFO);
-// String dateMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATE_ENDPOINT_WITH_QUERY,
-// dataSourceInfo.getUrl(),
-// URLEncoder.encode(String.format(PromQLDataSourceQueries.MAX_DATE, containerName, namespace), CHARACTER_ENCODING)
-// );
-// LOGGER.info(dateMetricsUrl);
-// JSONObject genericJsonObject = new GenericRestApiClient(dateMetricsUrl).fetchMetricsJson(KruizeConstants.APIMessages.GET, "");
-// JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class);
-// JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT);
-// // Process fetched metrics
-// if (null != resultArray && !resultArray.isEmpty()) {
-// resultArray = resultArray.get(0)
-// .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.VALUE);
-// long epochTime = resultArray.get(0).getAsLong();
-// String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC));
-// Date date = sdf.parse(timestamp);
-// Timestamp dateTS = new Timestamp(date.getTime());
-// interval_end_time_epoc = dateTS.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
-// - ((long) dateTS.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE);
-// int maxDay = Terms.getMaxDays(kruizeObject.getTerms());
-// LOGGER.info(KruizeConstants.APIMessages.MAX_DAY, maxDay);
-// Timestamp startDateTS = Timestamp.valueOf(Objects.requireNonNull(dateTS).toLocalDateTime().minusDays(maxDay));
-// interval_start_time_epoc = startDateTS.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
-// - ((long) startDateTS.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC);
-// }
-// } else {
-// // Convert timestamps to epoch time
-// interval_end_time_epoc = interval_end_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
-// - ((long) interval_end_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE);
-// interval_start_time_epoc = interval_start_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
-// - ((long) interval_start_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC);
-// }
-// HashMap containerDataResults = new HashMap<>();
-// IntervalResults intervalResults;
-// HashMap resMap;
-// MetricResults metricResults;
-// MetricAggregationInfoResults metricAggregationInfoResults;
-// // Iterate over metrics and aggregation methods
-// for (Map.Entry metricEntry : promQls.entrySet()) {
-// for (String methodName : aggregationMethods) {
-// String promQL = null;
-// String format = null;
-// // Determine promQL and format based on metric type
-// if (metricEntry.getKey() == AnalyzerConstants.MetricName.cpuUsage) {
-// String secondMethodName = methodName;
-// if (secondMethodName.equals(KruizeConstants.JSONKeys.SUM))
-// secondMethodName = KruizeConstants.JSONKeys.AVG;
-// promQL = String.format(metricEntry.getValue(), methodName, secondMethodName, namespace, containerName, measurementDurationMinutesInDouble.intValue());
-// format = KruizeConstants.JSONKeys.CORES;
-// } else if (metricEntry.getKey() == AnalyzerConstants.MetricName.cpuThrottle) {
-// promQL = String.format(metricEntry.getValue(), methodName, namespace, containerName, measurementDurationMinutesInDouble.intValue());
-// format = KruizeConstants.JSONKeys.CORES;
-// } else if (metricEntry.getKey() == AnalyzerConstants.MetricName.cpuLimit || metricEntry.getKey() == AnalyzerConstants.MetricName.cpuRequest) {
-// promQL = String.format(metricEntry.getValue(), methodName, namespace, containerName);
-// format = KruizeConstants.JSONKeys.CORES;
-// } else if (metricEntry.getKey() == AnalyzerConstants.MetricName.memoryUsage || metricEntry.getKey() == AnalyzerConstants.MetricName.memoryRSS) {
-// String secondMethodName = methodName;
-// if (secondMethodName.equals(KruizeConstants.JSONKeys.SUM))
-// secondMethodName = KruizeConstants.JSONKeys.AVG;
-// promQL = String.format(metricEntry.getValue(), methodName, secondMethodName, namespace, containerName, measurementDurationMinutesInDouble.intValue());
-// format = KruizeConstants.JSONKeys.BYTES;
-// } else if (metricEntry.getKey() == AnalyzerConstants.MetricName.memoryLimit || metricEntry.getKey() == AnalyzerConstants.MetricName.memoryRequest) {
-// promQL = String.format(metricEntry.getValue(), methodName, namespace, containerName);
-// format = KruizeConstants.JSONKeys.BYTES;
-// }
-// // If promQL is determined, fetch metrics from the datasource
-// if (promQL != null) {
-// LOGGER.info(promQL);
-// String podMetricsUrl;
-// try {
-// podMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY,
-// dataSourceInfo.getUrl(),
-// URLEncoder.encode(promQL, CHARACTER_ENCODING),
-// interval_start_time_epoc,
-// interval_end_time_epoc,
-// measurementDurationMinutesInDouble.intValue() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE);
-// LOGGER.info(podMetricsUrl);
-// JSONObject genericJsonObject = new GenericRestApiClient(podMetricsUrl).fetchMetricsJson(KruizeConstants.APIMessages.GET, "");
-// JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class);
-// JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT);
-// // Process fetched metrics
-// if (null != resultArray && !resultArray.isEmpty()) {
-// resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(
-// KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT).get(0)
-// .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants
-// .DataSourceQueryJSONKeys.VALUES);
-// sdf.setTimeZone(TimeZone.getTimeZone(KruizeConstants.TimeUnitsExt.TimeZones.UTC));
-//
-// // Iterate over fetched metrics
-// Timestamp sTime = new Timestamp(interval_start_time_epoc);
-// for (JsonElement element : resultArray) {
-// JsonArray valueArray = element.getAsJsonArray();
-// long epochTime = valueArray.get(0).getAsLong();
-// double value = valueArray.get(1).getAsDouble();
-// String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC));
-// Date date = sdf.parse(timestamp);
-// Timestamp eTime = new Timestamp(date.getTime());
-//
-// // Prepare interval results
-// if (containerDataResults.containsKey(eTime)) {
-// intervalResults = containerDataResults.get(eTime);
-// resMap = intervalResults.getMetricResultsMap();
-// } else {
-// intervalResults = new IntervalResults();
-// resMap = new HashMap<>();
-// }
-// if (resMap.containsKey(metricEntry.getKey())) {
-// metricResults = resMap.get(metricEntry.getKey());
-// metricAggregationInfoResults = metricResults.getAggregationInfoResult();
-// } else {
-// metricResults = new MetricResults();
-// metricAggregationInfoResults = new MetricAggregationInfoResults();
-// }
-// Method method = MetricAggregationInfoResults.class.getDeclaredMethod(KruizeConstants.APIMessages.SET + methodName.substring(0, 1).toUpperCase() + methodName.substring(1), Double.class);
-// method.invoke(metricAggregationInfoResults, value);
-// metricAggregationInfoResults.setFormat(format);
-// metricResults.setAggregationInfoResult(metricAggregationInfoResults);
-// metricResults.setName(String.valueOf(metricEntry.getKey()));
-// metricResults.setFormat(format);
-// resMap.put(metricEntry.getKey(), metricResults);
-// intervalResults.setMetricResultsMap(resMap);
-// intervalResults.setIntervalStartTime(sTime); //Todo this will change
-// intervalResults.setIntervalEndTime(eTime);
-// intervalResults.setDurationInMinutes((double) ((eTime.getTime() - sTime.getTime())
-// / ((long) KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE
-// * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC)));
-// containerDataResults.put(eTime, intervalResults);
-// sTime = eTime;
-// }
-// }
-// } catch (Exception e) {
-// throw new RuntimeException(e);
-// }
-// }
-// }
-// }
-// containerData.setResults(containerDataResults);
-// if (!containerDataResults.isEmpty())
-// setInterval_end_time(Collections.max(containerDataResults.keySet())); //TODO Temp fix invalid date is set if experiment having two container with different last seen date
-// }
-// }
-// } catch (Exception e) {
-// e.printStackTrace();
-// throw new Exception(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.METRIC_EXCEPTION + e.getMessage());
-// }
-// }
/**
* Fetches metrics based on the specified datasource using queries from the metricProfile for the given time interval.
@@ -1968,14 +1798,8 @@ private String getResults(Map mainKruizeExperimentMAP, Kru
* @param dataSourceInfo DataSource object
* @throws Exception
*/
- public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo) throws Exception {
+ public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo) throws Exception, FetchMetricsError {
try {
- long interval_end_time_epoc = 0;
- long interval_start_time_epoc = 0;
- SimpleDateFormat sdf = new SimpleDateFormat(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT, Locale.ROOT);
- // Create the client
- GenericRestApiClient client = new GenericRestApiClient(dataSourceInfo);
-
String metricProfileName = kruizeObject.getPerformanceProfile();
PerformanceProfile metricProfile = MetricProfileCollection.getInstance().getMetricProfileCollection().get(metricProfileName);
if (null == metricProfile) {
@@ -1984,213 +1808,224 @@ public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, T
}
String maxDateQuery = null;
- Listmetrics = metricProfile.getSloInfo().getFunctionVariables();
- for (Metric metric: metrics) {
- String name = metric.getName();
- if(name.equals("maxDate")){
- String query = metric.getAggregationFunctionsMap().get("max").getQuery();
- maxDateQuery = query;
- break;
- }
+ if (kruizeObject.isContainerExperiment()) {
+ maxDateQuery = getMaxDateQuery(metricProfile, AnalyzerConstants.MetricName.maxDate.name());
+ fetchContainerMetricsBasedOnDataSourceAndProfile(kruizeObject, interval_end_time, interval_start_time, dataSourceInfo, metricProfile, maxDateQuery);
+ } else if (kruizeObject.isNamespaceExperiment()) {
+ maxDateQuery = getMaxDateQuery(metricProfile, AnalyzerConstants.MetricName.namespaceMaxDate.name());
+ fetchNamespaceMetricsBasedOnDataSourceAndProfile(kruizeObject, interval_end_time, interval_start_time, dataSourceInfo, metricProfile, maxDateQuery);
}
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw new Exception(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.METRIC_EXCEPTION + e.getMessage());
+ }
+ }
+
+ /**
+ * Fetches namespace metrics based on the specified datasource using queries from the metricProfile for the given time interval.
+ * @param kruizeObject KruizeObject
+ * @param interval_end_time The end time of the interval in the format yyyy-MM-ddTHH:mm:sssZ
+ * @param interval_start_time The start time of the interval in the format yyyy-MM-ddTHH:mm:sssZ.
+ * @param dataSourceInfo DataSource object
+ * @param metricProfile performance profile to be used
+ * @param maxDateQuery max date query for namespace
+ * @throws Exception
+ */
+ private void fetchNamespaceMetricsBasedOnDataSourceAndProfile(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo, PerformanceProfile metricProfile, String maxDateQuery) throws Exception, FetchMetricsError {
+ try {
+ long interval_end_time_epoc = 0;
+ long interval_start_time_epoc = 0;
+ SimpleDateFormat sdf = new SimpleDateFormat(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT, Locale.ROOT);
+ // Create the client
+ GenericRestApiClient client = new GenericRestApiClient(dataSourceInfo);
Double measurementDurationMinutesInDouble = kruizeObject.getTrial_settings().getMeasurement_durationMinutes_inDouble();
List kubernetes_objects = kruizeObject.getKubernetes_objects();
- // Iterate over Kubernetes objects
for (K8sObject k8sObject : kubernetes_objects) {
String namespace = k8sObject.getNamespace();
- String workload = k8sObject.getName();
- String workload_type = k8sObject.getType();
- HashMap containerDataMap = k8sObject.getContainerDataMap();
- // check if containerDataMap is not empty
- if (!containerDataMap.isEmpty()) {
- // Iterate over containers
- for (Map.Entry entry : containerDataMap.entrySet()) {
- ContainerData containerData = entry.getValue();
- String containerName = containerData.getContainer_name();
- if (null == interval_end_time) {
- LOGGER.info(KruizeConstants.APIMessages.CONTAINER_USAGE_INFO);
- String queryToEncode = null;
- if (null == maxDateQuery || maxDateQuery.isEmpty()) {
- throw new NullPointerException("maxDate query cannot be empty or null");
- }
-
+ // fetch namespace related metrics if containerDataMap is empty
+ NamespaceData namespaceData = k8sObject.getNamespaceData();
+ // determine the max date query for namespace
+ String namespaceMaxDateQuery = maxDateQuery.replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace);
+
+ if (null == interval_end_time) {
+ LOGGER.info(KruizeConstants.APIMessages.NAMESPACE_USAGE_INFO);
+ String dateMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATE_ENDPOINT_WITH_QUERY,
+ dataSourceInfo.getUrl(),
+ URLEncoder.encode(namespaceMaxDateQuery, CHARACTER_ENCODING)
+ );
+ LOGGER.info(dateMetricsUrl);
+ client.setBaseURL(dateMetricsUrl);
+ JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, "");
+ JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class);
+ JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT);
+ // Process fetched metrics
+ if (null != resultArray && !resultArray.isEmpty()) {
+ resultArray = resultArray.get(0)
+ .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.VALUE);
+ long epochTime = resultArray.get(0).getAsLong();
+ String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC));
+ Date date = sdf.parse(timestamp);
+ Timestamp dateTS = new Timestamp(date.getTime());
+ interval_end_time_epoc = dateTS.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
+ - ((long) dateTS.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE);
+ int maxDay = Terms.getMaxDays(kruizeObject.getTerms());
+ LOGGER.info(KruizeConstants.APIMessages.MAX_DAY, maxDay);
+ Timestamp startDateTS = Timestamp.valueOf(Objects.requireNonNull(dateTS).toLocalDateTime().minusDays(maxDay));
+ interval_start_time_epoc = startDateTS.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
+ - ((long) startDateTS.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC);
+ }
+ } else {
+ // Convert timestamps to epoch time
+ interval_end_time_epoc = interval_end_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
+ - ((long) interval_end_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE);
+ interval_start_time_epoc = interval_start_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
+ - ((long) interval_start_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC);
+ }
- LOGGER.debug("maxDateQuery: {}", maxDateQuery);
- queryToEncode = maxDateQuery
- .replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace)
- .replace(AnalyzerConstants.CONTAINER_VARIABLE, containerName)
- .replace(AnalyzerConstants.WORKLOAD_VARIABLE, workload)
- .replace(AnalyzerConstants.WORKLOAD_TYPE_VARIABLE, workload_type);
+ HashMap namespaceDataResults = new HashMap<>();
+ IntervalResults namespaceIntervalResults = null;
+ HashMap namespaceResMap = null;
+ HashMap namespaceResultMap = null;
+ MetricResults namespaceMetricResults = null;
+ MetricAggregationInfoResults namespaceMetricAggregationInfoResults = null;
+
+ if (null == namespaceData) {
+ namespaceData = new NamespaceData();
+ namespaceData.setNamespace_name(namespace);
+ k8sObject.setNamespaceData(namespaceData);
+ }
- String dateMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATE_ENDPOINT_WITH_QUERY,
- dataSourceInfo.getUrl(),
- URLEncoder.encode(queryToEncode, CHARACTER_ENCODING)
- );
- LOGGER.info(dateMetricsUrl);
- client.setBaseURL(dateMetricsUrl);
- JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, "");
- JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class);
- JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT);
- // Process fetched metrics
- if (null != resultArray && !resultArray.isEmpty()) {
- resultArray = resultArray.get(0)
- .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.VALUE);
- long epochTime = resultArray.get(0).getAsLong();
- String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC));
- Date date = sdf.parse(timestamp);
- Timestamp dateTS = new Timestamp(date.getTime());
- interval_end_time_epoc = dateTS.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
- - ((long) dateTS.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE);
- int maxDay = Terms.getMaxDays(kruizeObject.getTerms());
- LOGGER.info(KruizeConstants.APIMessages.MAX_DAY, maxDay);
- Timestamp startDateTS = Timestamp.valueOf(Objects.requireNonNull(dateTS).toLocalDateTime().minusDays(maxDay));
- interval_start_time_epoc = startDateTS.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
- - ((long) startDateTS.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC);
- }
- } else {
- // Convert timestamps to epoch time
- interval_end_time_epoc = interval_end_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
- - ((long) interval_end_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE);
- interval_start_time_epoc = interval_start_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
- - ((long) interval_start_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC);
+ List namespaceMetricList = metricProfile.getSloInfo().getFunctionVariables().stream()
+ .filter(metricEntry -> metricEntry.getName().startsWith(AnalyzerConstants.NAMESPACE) && !metricEntry.getName().equals("namespaceMaxDate"))
+ .toList();
+
+ // Iterate over metrics and aggregation functions
+ for (Metric metricEntry : namespaceMetricList) {
+ HashMap aggregationFunctions = metricEntry.getAggregationFunctionsMap();
+ for (Map.Entry aggregationFunctionsEntry : aggregationFunctions.entrySet()) {
+ String promQL = aggregationFunctionsEntry.getValue().getQuery();
+ String format = null;
+
+ // Determine format based on metric type
+ List cpuFunction = Arrays.asList(AnalyzerConstants.MetricName.namespaceCpuRequest.toString(), AnalyzerConstants.MetricName.namespaceCpuLimit.toString(), AnalyzerConstants.MetricName.namespaceCpuUsage.toString(), AnalyzerConstants.MetricName.namespaceCpuThrottle.toString());
+ List memFunction = Arrays.asList(AnalyzerConstants.MetricName.namespaceMemoryRequest.toString(), AnalyzerConstants.MetricName.namespaceMemoryLimit.toString(), AnalyzerConstants.MetricName.namespaceMemoryUsage.toString(), AnalyzerConstants.MetricName.namespaceMemoryRSS.toString());
+ if (cpuFunction.contains(metricEntry.getName())) {
+ format = KruizeConstants.JSONKeys.CORES;
+ } else if (memFunction.contains(metricEntry.getName())) {
+ format = KruizeConstants.JSONKeys.BYTES;
}
- HashMap containerDataResults = new HashMap<>();
- IntervalResults intervalResults;
- HashMap resMap;
- HashMap resultMap;
- MetricResults metricResults;
- MetricAggregationInfoResults metricAggregationInfoResults;
-
- List metricList = metricProfile.getSloInfo().getFunctionVariables();
-
- // Iterate over metrics and aggregation functions
- for (Metric metricEntry : metricList) {
- HashMap aggregationFunctions = metricEntry.getAggregationFunctionsMap();
- for (Map.Entry aggregationFunctionsEntry: aggregationFunctions.entrySet()) {
- // Determine promQL query on metric type
- String promQL = aggregationFunctionsEntry.getValue().getQuery();
- String format = null;
-
-
- // Determine format based on metric type - Todo move this metric profile
- List cpuFunction = Arrays.asList(AnalyzerConstants.MetricName.cpuUsage.toString(), AnalyzerConstants.MetricName.cpuThrottle.toString(), AnalyzerConstants.MetricName.cpuLimit.toString(), AnalyzerConstants.MetricName.cpuRequest.toString());
- List memFunction = Arrays.asList(AnalyzerConstants.MetricName.memoryLimit.toString(), AnalyzerConstants.MetricName.memoryRequest.toString(), AnalyzerConstants.MetricName.memoryRSS.toString(), AnalyzerConstants.MetricName.memoryUsage.toString());
- if (cpuFunction.contains(metricEntry.getName())) {
- format = KruizeConstants.JSONKeys.CORES;
- } else if (memFunction.contains(metricEntry.getName())) {
- format = KruizeConstants.JSONKeys.BYTES;
- }
- promQL = promQL
- .replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace)
- .replace(AnalyzerConstants.CONTAINER_VARIABLE, containerName)
- .replace(AnalyzerConstants.MEASUREMENT_DURATION_IN_MIN_VARAIBLE, Integer.toString(measurementDurationMinutesInDouble.intValue()))
- .replace(AnalyzerConstants.WORKLOAD_VARIABLE, workload)
- .replace(AnalyzerConstants.WORKLOAD_TYPE_VARIABLE, workload_type);
-
- // If promQL is determined, fetch metrics from the datasource
- if (promQL != null) {
- LOGGER.info(promQL);
- String podMetricsUrl;
- try {
- podMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY,
- dataSourceInfo.getUrl(),
- URLEncoder.encode(promQL, CHARACTER_ENCODING),
- interval_start_time_epoc,
- interval_end_time_epoc,
- measurementDurationMinutesInDouble.intValue() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE);
- LOGGER.info(podMetricsUrl);
- client.setBaseURL(podMetricsUrl);
- JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, "");
- JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class);
- JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT);
- // Process fetched metrics
- if (null != resultArray && !resultArray.isEmpty()) {
- resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(
- KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT).get(0)
- .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants
- .DataSourceQueryJSONKeys.VALUES);
- sdf.setTimeZone(TimeZone.getTimeZone(KruizeConstants.TimeUnitsExt.TimeZones.UTC));
-
- // Iterate over fetched metrics
- Timestamp sTime = new Timestamp(interval_start_time_epoc);
- for (JsonElement element : resultArray) {
- JsonArray valueArray = element.getAsJsonArray();
- long epochTime = valueArray.get(0).getAsLong();
- double value = valueArray.get(1).getAsDouble();
- String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC));
- Date date = sdf.parse(timestamp);
- Timestamp eTime = new Timestamp(date.getTime());
-
- // Prepare interval results
- if (containerDataResults.containsKey(eTime)) {
- intervalResults = containerDataResults.get(eTime);
- resMap = intervalResults.getMetricResultsMap();
- } else {
- intervalResults = new IntervalResults();
- resMap = new HashMap<>();
- }
- AnalyzerConstants.MetricName metricName = AnalyzerConstants.MetricName.valueOf(metricEntry.getName());
- if (resMap.containsKey(metricName)) {
- metricResults = resMap.get(metricName);
- metricAggregationInfoResults = metricResults.getAggregationInfoResult();
- } else {
- metricResults = new MetricResults();
- metricAggregationInfoResults = new MetricAggregationInfoResults();
- }
-
- Method method = MetricAggregationInfoResults.class.getDeclaredMethod(KruizeConstants.APIMessages.SET + aggregationFunctionsEntry.getKey().substring(0, 1).toUpperCase() + aggregationFunctionsEntry.getKey().substring(1), Double.class);
- method.invoke(metricAggregationInfoResults, value);
- metricAggregationInfoResults.setFormat(format);
- metricResults.setAggregationInfoResult(metricAggregationInfoResults);
- metricResults.setName(metricEntry.getName());
- metricResults.setFormat(format);
- resMap.put(metricName, metricResults);
- intervalResults.setMetricResultsMap(resMap);
- intervalResults.setIntervalStartTime(sTime); //Todo this will change
- intervalResults.setIntervalEndTime(eTime);
- intervalResults.setDurationInMinutes((double) ((eTime.getTime() - sTime.getTime())
- / ((long) KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE
- * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC)));
- containerDataResults.put(eTime, intervalResults);
- sTime = eTime;
- }
- }
- } catch (Exception e) {
- throw new RuntimeException(e);
+ promQL = promQL
+ .replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace)
+ .replace(AnalyzerConstants.MEASUREMENT_DURATION_IN_MIN_VARAIBLE, Integer.toString(measurementDurationMinutesInDouble.intValue()));
+
+ // If promQL is determined, fetch metrics from the datasource
+ if (promQL != null) {
+ LOGGER.info(promQL);
+ String namespaceMetricsUrl;
+ try {
+ namespaceMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY,
+ dataSourceInfo.getUrl(),
+ URLEncoder.encode(promQL, CHARACTER_ENCODING),
+ interval_start_time_epoc,
+ interval_end_time_epoc,
+ measurementDurationMinutesInDouble.intValue() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE);
+ client.setBaseURL(namespaceMetricsUrl);
+ JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, "");
+ JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class);
+ JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT);
+ // Process fetched metrics
+ if (null != resultArray && !resultArray.isEmpty()) {
+ resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(
+ KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT).get(0)
+ .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants
+ .DataSourceQueryJSONKeys.VALUES);
+ sdf.setTimeZone(TimeZone.getTimeZone(KruizeConstants.TimeUnitsExt.TimeZones.UTC));
+
+ // Iterate over fetched metrics
+ Timestamp sTime = new Timestamp(interval_start_time_epoc);
+ for (JsonElement element : resultArray) {
+ JsonArray valueArray = element.getAsJsonArray();
+ long epochTime = valueArray.get(0).getAsLong();
+ double value = valueArray.get(1).getAsDouble();
+ String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC));
+ Date date = sdf.parse(timestamp);
+ Timestamp eTime = new Timestamp(date.getTime());
+
+ // Prepare interval results
+ prepareIntervalResults(namespaceDataResults, namespaceIntervalResults, namespaceResMap, namespaceMetricResults,
+ namespaceMetricAggregationInfoResults, sTime, eTime, metricEntry, aggregationFunctionsEntry, value, format);
}
}
+ } catch (Exception e) {
+ throw new RuntimeException(e);
}
}
-
- containerData.setResults(containerDataResults);
- if (!containerDataResults.isEmpty())
- setInterval_end_time(Collections.max(containerDataResults.keySet())); //TODO Temp fix invalid date is set if experiment having two container with different last seen date
-
}
- } else {
- // fetch namespace related metrics if containerDataMap is empty
- NamespaceData namespaceData = k8sObject.getNamespaceData();
-
- // determine the max date query for namespace
- String namespaceMaxDateQuery = null;
- for (Metric metric: metrics) {
- String name = metric.getName();
- if(name.equals("namespaceMaxDate")){
- namespaceMaxDateQuery = metric.getAggregationFunctionsMap().get("max").getQuery();
- break;
- }
+ namespaceData.setResults(namespaceDataResults);
+ if (!namespaceDataResults.isEmpty()) {
+ setInterval_end_time(Collections.max(namespaceDataResults.keySet()));
}
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw new Exception(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.METRIC_EXCEPTION + e.getMessage());
+ }
+ }
- namespaceMaxDateQuery = namespaceMaxDateQuery.replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace);
+ /**
+ * Fetches namespace metrics based on the specified datasource using queries from the metricProfile for the given time interval.
+ *
+ * @param kruizeObject KruizeObject
+ * @param interval_end_time The end time of the interval in the format yyyy-MM-ddTHH:mm:sssZ
+ * @param interval_start_time The start time of the interval in the format yyyy-MM-ddTHH:mm:sssZ.
+ * @param dataSourceInfo DataSource object
+ * @param metricProfile performance profile to be used
+ * @param maxDateQuery max date query for containers
+ * @throws Exception
+ */
+ private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo, PerformanceProfile metricProfile, String maxDateQuery) throws Exception, FetchMetricsError {
+ try {
+ long interval_end_time_epoc = 0;
+ long interval_start_time_epoc = 0;
+ SimpleDateFormat sdf = new SimpleDateFormat(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT, Locale.ROOT);
+ // Create the client
+ GenericRestApiClient client = new GenericRestApiClient(dataSourceInfo);
+
+ Double measurementDurationMinutesInDouble = kruizeObject.getTrial_settings().getMeasurement_durationMinutes_inDouble();
+ List kubernetes_objects = kruizeObject.getKubernetes_objects();
+
+ for (K8sObject k8sObject : kubernetes_objects) {
+ String namespace = k8sObject.getNamespace();
+ String workload = k8sObject.getName();
+ String workload_type = k8sObject.getType();
+ HashMap containerDataMap = k8sObject.getContainerDataMap();
+
+ for (Map.Entry entry : containerDataMap.entrySet()) {
+ ContainerData containerData = entry.getValue();
+ String containerName = containerData.getContainer_name();
if (null == interval_end_time) {
- LOGGER.info(KruizeConstants.APIMessages.NAMESPACE_USAGE_INFO);
+ LOGGER.info(KruizeConstants.APIMessages.CONTAINER_USAGE_INFO);
+ String queryToEncode = null;
+ if (null == maxDateQuery || maxDateQuery.isEmpty()) {
+ throw new NullPointerException("maxDate query cannot be empty or null");
+ }
+
+
+ LOGGER.debug("maxDateQuery: {}", maxDateQuery);
+ queryToEncode = maxDateQuery
+ .replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace)
+ .replace(AnalyzerConstants.CONTAINER_VARIABLE, containerName)
+ .replace(AnalyzerConstants.WORKLOAD_VARIABLE, workload)
+ .replace(AnalyzerConstants.WORKLOAD_TYPE_VARIABLE, workload_type);
+
String dateMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATE_ENDPOINT_WITH_QUERY,
dataSourceInfo.getUrl(),
- URLEncoder.encode(namespaceMaxDateQuery, CHARACTER_ENCODING)
+ URLEncoder.encode(queryToEncode, CHARACTER_ENCODING)
);
LOGGER.info(dateMetricsUrl);
client.setBaseURL(dateMetricsUrl);
@@ -2220,121 +2055,90 @@ public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, T
interval_start_time_epoc = interval_start_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
- ((long) interval_start_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC);
}
-
- HashMap namespaceDataResults = new HashMap<>();
- IntervalResults namespaceIntervalResults;
- HashMap namespaceResMap;
- HashMap namespaceResultMap;
- MetricResults namespaceMetricResults;
- MetricAggregationInfoResults namespaceMetricAggregationInfoResults;
-
- if (null == namespaceData) {
- namespaceData = new NamespaceData();
- namespaceData.setNamespace_name(namespace);
- k8sObject.setNamespaceData(namespaceData);
- }
+ HashMap containerDataResults = new HashMap<>();
+ IntervalResults intervalResults = null;
+ HashMap resMap = null;
+ HashMap resultMap = null;
+ MetricResults metricResults = null;
+ MetricAggregationInfoResults metricAggregationInfoResults = null;
List metricList = metricProfile.getSloInfo().getFunctionVariables();
// Iterate over metrics and aggregation functions
for (Metric metricEntry : metricList) {
- if (metricEntry.getName().startsWith(AnalyzerConstants.NAMESPACE) && !metricEntry.getName().equals("namespaceMaxDate")) {
- HashMap aggregationFunctions = metricEntry.getAggregationFunctionsMap();
- for (Map.Entry aggregationFunctionsEntry : aggregationFunctions.entrySet()) {
- String promQL = aggregationFunctionsEntry.getValue().getQuery();
- String format = null;
-
- // Determine format based on metric type
- List cpuFunction = Arrays.asList(AnalyzerConstants.MetricName.namespaceCpuRequest.toString(), AnalyzerConstants.MetricName.namespaceCpuLimit.toString(), AnalyzerConstants.MetricName.namespaceCpuUsage.toString(), AnalyzerConstants.MetricName.namespaceCpuThrottle.toString());
- List memFunction = Arrays.asList(AnalyzerConstants.MetricName.namespaceMemoryRequest.toString(), AnalyzerConstants.MetricName.namespaceMemoryLimit.toString(), AnalyzerConstants.MetricName.namespaceMemoryUsage.toString(), AnalyzerConstants.MetricName.namespaceMemoryRSS.toString());
- if (cpuFunction.contains(metricEntry.getName())) {
- format = KruizeConstants.JSONKeys.CORES;
- } else if (memFunction.contains(metricEntry.getName())) {
- format = KruizeConstants.JSONKeys.BYTES;
- }
+ HashMap aggregationFunctions = metricEntry.getAggregationFunctionsMap();
+ for (Map.Entry aggregationFunctionsEntry: aggregationFunctions.entrySet()) {
+ // Determine promQL query on metric type
+ String promQL = aggregationFunctionsEntry.getValue().getQuery();
+ String format = null;
+
+
+ // Determine format based on metric type - Todo move this metric profile
+ List cpuFunction = Arrays.asList(AnalyzerConstants.MetricName.cpuUsage.toString(), AnalyzerConstants.MetricName.cpuThrottle.toString(), AnalyzerConstants.MetricName.cpuLimit.toString(), AnalyzerConstants.MetricName.cpuRequest.toString());
+ List memFunction = Arrays.asList(AnalyzerConstants.MetricName.memoryLimit.toString(), AnalyzerConstants.MetricName.memoryRequest.toString(), AnalyzerConstants.MetricName.memoryRSS.toString(), AnalyzerConstants.MetricName.memoryUsage.toString());
+ if (cpuFunction.contains(metricEntry.getName())) {
+ format = KruizeConstants.JSONKeys.CORES;
+ } else if (memFunction.contains(metricEntry.getName())) {
+ format = KruizeConstants.JSONKeys.BYTES;
+ }
+
+ promQL = promQL
+ .replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace)
+ .replace(AnalyzerConstants.CONTAINER_VARIABLE, containerName)
+ .replace(AnalyzerConstants.MEASUREMENT_DURATION_IN_MIN_VARAIBLE, Integer.toString(measurementDurationMinutesInDouble.intValue()))
+ .replace(AnalyzerConstants.WORKLOAD_VARIABLE, workload)
+ .replace(AnalyzerConstants.WORKLOAD_TYPE_VARIABLE, workload_type);
- promQL = promQL
- .replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace)
- .replace(AnalyzerConstants.MEASUREMENT_DURATION_IN_MIN_VARAIBLE, Integer.toString(measurementDurationMinutesInDouble.intValue()));
-
- // If promQL is determined, fetch metrics from the datasource
- if (promQL != null) {
- LOGGER.info(promQL);
- String namespaceMetricsUrl;
- try {
- namespaceMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY,
- dataSourceInfo.getUrl(),
- URLEncoder.encode(promQL, CHARACTER_ENCODING),
- interval_start_time_epoc,
- interval_end_time_epoc,
- measurementDurationMinutesInDouble.intValue() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE);
- client.setBaseURL(namespaceMetricsUrl);
- JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, "");
- JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class);
- JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT);
- // Process fetched metrics
- if (null != resultArray && !resultArray.isEmpty()) {
- resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(
- KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT).get(0)
- .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants
- .DataSourceQueryJSONKeys.VALUES);
- sdf.setTimeZone(TimeZone.getTimeZone(KruizeConstants.TimeUnitsExt.TimeZones.UTC));
-
- // Iterate over fetched metrics
- Timestamp sTime = new Timestamp(interval_start_time_epoc);
- for (JsonElement element : resultArray) {
- JsonArray valueArray = element.getAsJsonArray();
- long epochTime = valueArray.get(0).getAsLong();
- double value = valueArray.get(1).getAsDouble();
- String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC));
- Date date = sdf.parse(timestamp);
- Timestamp eTime = new Timestamp(date.getTime());
-
- // Prepare interval results
- if (namespaceDataResults.containsKey(eTime)) {
- namespaceIntervalResults = namespaceDataResults.get(eTime);
- namespaceResMap = namespaceIntervalResults.getMetricResultsMap();
- } else {
- namespaceIntervalResults = new IntervalResults();
- namespaceResMap = new HashMap<>();
- }
- AnalyzerConstants.MetricName metricName = AnalyzerConstants.MetricName.valueOf(metricEntry.getName());
- if (namespaceResMap.containsKey(metricName)) {
- namespaceMetricResults = namespaceResMap.get(metricName);
- namespaceMetricAggregationInfoResults = namespaceMetricResults.getAggregationInfoResult();
- } else {
- namespaceMetricResults = new MetricResults();
- namespaceMetricAggregationInfoResults = new MetricAggregationInfoResults();
- }
-
- Method method = MetricAggregationInfoResults.class.getDeclaredMethod(KruizeConstants.APIMessages.SET + aggregationFunctionsEntry.getKey().substring(0, 1).toUpperCase() + aggregationFunctionsEntry.getKey().substring(1), Double.class);
- method.invoke(namespaceMetricAggregationInfoResults, value);
- namespaceMetricAggregationInfoResults.setFormat(format);
- namespaceMetricResults.setAggregationInfoResult(namespaceMetricAggregationInfoResults);
- namespaceMetricResults.setName(metricEntry.getName());
- namespaceMetricResults.setFormat(format);
- namespaceResMap.put(metricName, namespaceMetricResults);
- namespaceIntervalResults.setMetricResultsMap(namespaceResMap);
- namespaceIntervalResults.setIntervalStartTime(sTime); //Todo this will change
- namespaceIntervalResults.setIntervalEndTime(eTime);
- namespaceIntervalResults.setDurationInMinutes((double) ((eTime.getTime() - sTime.getTime())
- / ((long) KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE
- * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC)));
- namespaceDataResults.put(eTime, namespaceIntervalResults);
- sTime = eTime;
- }
+ // If promQL is determined, fetch metrics from the datasource
+ if (promQL != null) {
+ LOGGER.info(promQL);
+ String podMetricsUrl;
+ try {
+ podMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY,
+ dataSourceInfo.getUrl(),
+ URLEncoder.encode(promQL, CHARACTER_ENCODING),
+ interval_start_time_epoc,
+ interval_end_time_epoc,
+ measurementDurationMinutesInDouble.intValue() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE);
+ LOGGER.info(podMetricsUrl);
+ client.setBaseURL(podMetricsUrl);
+ JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, "");
+ JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class);
+ JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT);
+ // Process fetched metrics
+ if (null != resultArray && !resultArray.isEmpty()) {
+ resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(
+ KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT).get(0)
+ .getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants
+ .DataSourceQueryJSONKeys.VALUES);
+ sdf.setTimeZone(TimeZone.getTimeZone(KruizeConstants.TimeUnitsExt.TimeZones.UTC));
+
+ // Iterate over fetched metrics
+ Timestamp sTime = new Timestamp(interval_start_time_epoc);
+ for (JsonElement element : resultArray) {
+ JsonArray valueArray = element.getAsJsonArray();
+ long epochTime = valueArray.get(0).getAsLong();
+ double value = valueArray.get(1).getAsDouble();
+ String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC));
+ Date date = sdf.parse(timestamp);
+ Timestamp eTime = new Timestamp(date.getTime());
+
+ // Prepare interval results
+ prepareIntervalResults(containerDataResults, intervalResults, resMap, metricResults,
+ metricAggregationInfoResults, sTime, eTime, metricEntry, aggregationFunctionsEntry, value, format);
}
- } catch (Exception e) {
- throw new RuntimeException(e);
}
+ } catch (Exception e) {
+ throw new RuntimeException(e);
}
}
- namespaceData.setResults(namespaceDataResults);
- if (!namespaceDataResults.isEmpty()) {
- setInterval_end_time(Collections.max(namespaceDataResults.keySet()));
- }
}
}
+
+ containerData.setResults(containerDataResults);
+ if (!containerDataResults.isEmpty())
+ setInterval_end_time(Collections.max(containerDataResults.keySet())); //TODO Temp fix invalid date is set if experiment having two container with different last seen date
+
}
}
} catch (Exception e) {
@@ -2342,4 +2146,65 @@ public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, T
throw new Exception(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.METRIC_EXCEPTION + e.getMessage());
}
}
+
+ /**
+ * Fetches max date query for namespace and containers from performance profile
+ * @param metricProfile performance profile to be used
+ */
+ private String getMaxDateQuery(PerformanceProfile metricProfile, String metricName) {
+ List metrics = metricProfile.getSloInfo().getFunctionVariables();
+ for (Metric metric: metrics) {
+ String name = metric.getName();
+ if(name.equals(metricName)) {
+ return metric.getAggregationFunctionsMap().get("max").getQuery();
+ }
+ }
+ return null;
+ }
+
+ /**
+ * prepares interval results for namespace and container experiments
+ */
+ private void prepareIntervalResults(Map dataResultsMap, IntervalResults intervalResults,
+ HashMap resMap, MetricResults metricResults,
+ MetricAggregationInfoResults metricAggregationInfoResults, Timestamp sTime, Timestamp eTime, Metric metricEntry,
+ Map.Entry aggregationFunctionsEntry, double value, String format) throws Exception {
+ try {
+ if (dataResultsMap.containsKey(eTime)) {
+ intervalResults = dataResultsMap.get(eTime);
+ resMap = intervalResults.getMetricResultsMap();
+ } else {
+ intervalResults = new IntervalResults();
+ resMap = new HashMap<>();
+ }
+ AnalyzerConstants.MetricName metricName = AnalyzerConstants.MetricName.valueOf(metricEntry.getName());
+ if (resMap.containsKey(metricName)) {
+ metricResults = resMap.get(metricName);
+ metricAggregationInfoResults = metricResults.getAggregationInfoResult();
+ } else {
+ metricResults = new MetricResults();
+ metricAggregationInfoResults = new MetricAggregationInfoResults();
+ }
+
+ Method method = MetricAggregationInfoResults.class.getDeclaredMethod(KruizeConstants.APIMessages.SET + aggregationFunctionsEntry.getKey().substring(0, 1).toUpperCase() + aggregationFunctionsEntry.getKey().substring(1), Double.class);
+ method.invoke(metricAggregationInfoResults, value);
+ metricAggregationInfoResults.setFormat(format);
+ metricResults.setAggregationInfoResult(metricAggregationInfoResults);
+ metricResults.setName(metricEntry.getName());
+ metricResults.setFormat(format);
+ resMap.put(metricName, metricResults);
+ intervalResults.setMetricResultsMap(resMap);
+ intervalResults.setIntervalStartTime(sTime); //Todo this will change
+ intervalResults.setIntervalEndTime(eTime);
+ intervalResults.setDurationInMinutes((double) ((eTime.getTime() - sTime.getTime())
+ / ((long) KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE
+ * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC)));
+ dataResultsMap.put(eTime, intervalResults);
+ sTime = eTime;
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw new Exception(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.METRIC_EXCEPTION + e.getMessage());
+ }
+ }
}
+
diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java b/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java
index b06ccad7c..eab5ec99e 100644
--- a/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java
+++ b/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java
@@ -11,6 +11,7 @@
import com.autotune.analyzer.recommendations.objects.MappedRecommendationForTimestamp;
import com.autotune.analyzer.utils.AnalyzerConstants;
import com.autotune.analyzer.utils.AnalyzerErrorConstants;
+import com.autotune.analyzer.utils.ExperimentTypeUtil;
import com.autotune.common.data.ValidationOutputData;
import com.autotune.common.data.metrics.AggregationFunctions;
import com.autotune.common.data.metrics.Metric;
@@ -61,42 +62,15 @@ public static KruizeObject convertCreateExperimentAPIObjToKruizeObject(CreateExp
List kubernetesAPIObjectsList = createExperimentAPIObject.getKubernetesObjects();
for (KubernetesAPIObject kubernetesAPIObject : kubernetesAPIObjectsList) {
K8sObject k8sObject = null;
- // Verify the experiment type.
- // If the experiment type is null, default is container type experiment.
- // TODO: Update to make this field mandatory and validate if it is a container type.
- if (null == kubernetesAPIObject.getExperimentType() || kubernetesAPIObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT)) {
+ // check if exp type is null to support remote monitoring experiments
+ if (createExperimentAPIObject.isContainerExperiment()) {
// container recommendations experiment type
- k8sObject = new K8sObject(kubernetesAPIObject.getName(), kubernetesAPIObject.getType(), kubernetesAPIObject.getNamespace());
- k8sObject.setExperimentType(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT);
- // check if namespace data is also set for container-type experiments
- if (null != kubernetesAPIObject.getNamespaceAPIObjects()) {
- throw new Exception(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.NAMESPACE_DATA_NOT_NULL_FOR_CONTAINER_EXP);
- } else {
- k8sObject.setNamespaceData(new NamespaceData());
- }
- List containerAPIObjects = kubernetesAPIObject.getContainerAPIObjects();
- HashMap containerDataHashMap = new HashMap<>();
- for (ContainerAPIObject containerAPIObject : containerAPIObjects) {
- ContainerData containerData = new ContainerData(containerAPIObject.getContainer_name(),
- containerAPIObject.getContainer_image_name(), new ContainerRecommendations(), null);
- containerDataHashMap.put(containerData.getContainer_name(), containerData);
- }
- k8sObject.setContainerDataMap(containerDataHashMap);
- } else if (kubernetesAPIObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) {
+ k8sObject = createContainerExperiment(kubernetesAPIObject);
+ } else if (createExperimentAPIObject.isNamespaceExperiment()) {
// namespace recommendations experiment type
- k8sObject = new K8sObject();
- k8sObject.setNamespace(kubernetesAPIObject.getNamespaceAPIObjects().getnamespace_name());
- k8sObject.setExperimentType(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT);
- if (null != kubernetesAPIObject.getContainerAPIObjects()) {
- throw new Exception(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.CONTAINER_DATA_NOT_NULL_FOR_NAMESPACE_EXP);
- } else {
- HashMap containerDataHashMap = new HashMap<>();
- k8sObject.setContainerDataMap(containerDataHashMap);
- }
- NamespaceAPIObject namespaceAPIObject = kubernetesAPIObject.getNamespaceAPIObjects();
- k8sObject.setNamespaceData(new NamespaceData(namespaceAPIObject.getnamespace_name(), new NamespaceRecommendations(), null));
+ k8sObject = createNamespaceExperiment(kubernetesAPIObject);
}
- LOGGER.info("Experiment Type: " + k8sObject.getExperimentType());
+ LOGGER.debug("Experiment Type: " + createExperimentAPIObject.getExperimentType());
k8sObjectList.add(k8sObject);
}
kruizeObject.setKubernetes_objects(k8sObjectList);
@@ -107,6 +81,7 @@ public static KruizeObject convertCreateExperimentAPIObjToKruizeObject(CreateExp
kruizeObject.setMode(createExperimentAPIObject.getMode());
kruizeObject.setPerformanceProfile(createExperimentAPIObject.getPerformanceProfile());
kruizeObject.setDataSource(createExperimentAPIObject.getDatasource());
+ kruizeObject.setExperimentType(createExperimentAPIObject.getExperimentType());
kruizeObject.setSloInfo(createExperimentAPIObject.getSloInfo());
kruizeObject.setTrial_settings(createExperimentAPIObject.getTrialSettings());
kruizeObject.setRecommendation_settings(createExperimentAPIObject.getRecommendationSettings());
@@ -125,6 +100,33 @@ public static KruizeObject convertCreateExperimentAPIObjToKruizeObject(CreateExp
return kruizeObject;
}
+ // Generates K8sObject for container type experiments from KubernetesAPIObject
+ public static K8sObject createContainerExperiment(KubernetesAPIObject kubernetesAPIObject) {
+ K8sObject k8sObject = new K8sObject(kubernetesAPIObject.getName(), kubernetesAPIObject.getType(), kubernetesAPIObject.getNamespace());
+ k8sObject.setNamespaceData(new NamespaceData());
+ List containerAPIObjects = kubernetesAPIObject.getContainerAPIObjects();
+ HashMap containerDataHashMap = new HashMap<>();
+ for (ContainerAPIObject containerAPIObject : containerAPIObjects) {
+ ContainerData containerData = new ContainerData(containerAPIObject.getContainer_name(),
+ containerAPIObject.getContainer_image_name(), new ContainerRecommendations(), null);
+ containerDataHashMap.put(containerData.getContainer_name(), containerData);
+ }
+ k8sObject.setContainerDataMap(containerDataHashMap);
+ return k8sObject;
+ }
+
+ // Generates K8sObject for namespace type experiments from KubernetesAPIObject
+ public static K8sObject createNamespaceExperiment(KubernetesAPIObject kubernetesAPIObject) {
+ K8sObject k8sObject = new K8sObject();
+ k8sObject.setNamespace(kubernetesAPIObject.getNamespaceAPIObjects().getnamespace_name());
+ HashMap containerDataHashMap = new HashMap<>();
+ k8sObject.setContainerDataMap(containerDataHashMap);
+ NamespaceAPIObject namespaceAPIObject = kubernetesAPIObject.getNamespaceAPIObjects();
+ k8sObject.setNamespaceData(new NamespaceData(namespaceAPIObject.getnamespace_name(), new NamespaceRecommendations(), null));
+ return k8sObject;
+ }
+
+
public static ListRecommendationsAPIObject convertKruizeObjectToListRecommendationSO(
KruizeObject kruizeObject,
boolean getLatest,
@@ -135,13 +137,14 @@ public static ListRecommendationsAPIObject convertKruizeObjectToListRecommendati
listRecommendationsAPIObject.setApiVersion(AnalyzerConstants.VersionConstants.APIVersionConstants.CURRENT_LIST_RECOMMENDATIONS_VERSION);
listRecommendationsAPIObject.setExperimentName(kruizeObject.getExperimentName());
listRecommendationsAPIObject.setClusterName(kruizeObject.getClusterName());
+ listRecommendationsAPIObject.setExperimentType(kruizeObject.getExperimentType());
List kubernetesAPIObjects = new ArrayList<>();
KubernetesAPIObject kubernetesAPIObject;
for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) {
kubernetesAPIObject = new KubernetesAPIObject(k8sObject.getName(), k8sObject.getType(), k8sObject.getNamespace());
// namespace recommendations experiment type
- if (k8sObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) {
+ if (kruizeObject.isNamespaceExperiment()) {
NamespaceAPIObject namespaceAPIObject;
NamespaceData clonedNamespaceData = Utils.getClone(k8sObject.getNamespaceData(), NamespaceData.class);
diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/CreateExperimentAPIObject.java b/src/main/java/com/autotune/analyzer/serviceObjects/CreateExperimentAPIObject.java
index 6bd68e576..6985beff2 100644
--- a/src/main/java/com/autotune/analyzer/serviceObjects/CreateExperimentAPIObject.java
+++ b/src/main/java/com/autotune/analyzer/serviceObjects/CreateExperimentAPIObject.java
@@ -18,6 +18,8 @@
import com.autotune.analyzer.kruizeObject.RecommendationSettings;
import com.autotune.analyzer.kruizeObject.SloInfo;
import com.autotune.analyzer.utils.AnalyzerConstants;
+import com.autotune.analyzer.utils.ExperimentTypeAware;
+import com.autotune.analyzer.utils.ExperimentTypeUtil;
import com.autotune.common.data.ValidationOutputData;
import com.autotune.common.k8sObjects.TrialSettings;
import com.autotune.utils.KruizeConstants;
@@ -28,7 +30,7 @@
/**
* Simulating the KruizeObject class for the CreateExperiment API
*/
-public class CreateExperimentAPIObject extends BaseSO {
+public class CreateExperimentAPIObject extends BaseSO implements ExperimentTypeAware {
@SerializedName(KruizeConstants.JSONKeys.CLUSTER_NAME)
private String clusterName;
@SerializedName(KruizeConstants.JSONKeys.PERFORMANCE_PROFILE)
@@ -47,6 +49,8 @@ public class CreateExperimentAPIObject extends BaseSO {
private RecommendationSettings recommendationSettings;
@SerializedName(KruizeConstants.JSONKeys.DATASOURCE) //TODO: to be used in future
private String datasource;
+ @SerializedName(KruizeConstants.JSONKeys.EXPERIMENT_TYPE) //TODO: to be used in future
+ private String experimentType;
private AnalyzerConstants.ExperimentStatus status;
private String experiment_id; // this id is UUID and getting set at createExperiment API
private ValidationOutputData validationData; // This object indicates if this API object is valid or invalid
@@ -147,6 +151,25 @@ public void setDatasource(String datasource) {
this.datasource = datasource;
}
+ @Override
+ public String getExperimentType() {
+ return experimentType;
+ }
+
+ public void setExperimentType(String experimentType) {
+ this.experimentType = experimentType;
+ }
+
+ @Override
+ public boolean isNamespaceExperiment() {
+ return ExperimentTypeUtil.isNamespaceExperiment(experimentType);
+ }
+
+ @Override
+ public boolean isContainerExperiment() {
+ return ExperimentTypeUtil.isContainerExperiment(experimentType);
+ }
+
@Override
public String toString() {
return "CreateExperimentAPIObject{" +
@@ -159,6 +182,7 @@ public String toString() {
", targetCluster='" + targetCluster + '\'' +
", kubernetesAPIObjects=" + kubernetesAPIObjects.toString() +
", trialSettings=" + trialSettings +
+ ", experimentType=" + experimentType +
", recommendationSettings=" + recommendationSettings +
'}';
}
diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/KubernetesAPIObject.java b/src/main/java/com/autotune/analyzer/serviceObjects/KubernetesAPIObject.java
index e3ab41b62..0a6d52ecf 100644
--- a/src/main/java/com/autotune/analyzer/serviceObjects/KubernetesAPIObject.java
+++ b/src/main/java/com/autotune/analyzer/serviceObjects/KubernetesAPIObject.java
@@ -28,9 +28,6 @@ public class KubernetesAPIObject {
private String type;
private String name;
private String namespace;
- // Optional field to determine if the experiment type is 'container' or 'namespace'.
- // TODO: Update to make this field mandatory in the future.
- private String experiment_type;
@SerializedName(KruizeConstants.JSONKeys.CONTAINERS)
private List containerAPIObjects;
@SerializedName(KruizeConstants.JSONKeys.NAMESPACES)
@@ -60,10 +57,6 @@ public String getNamespace() {
return namespace;
}
- public String getExperimentType() {
- return experiment_type;
- }
-
@JsonProperty(KruizeConstants.JSONKeys.CONTAINERS)
public List getContainerAPIObjects() {
return containerAPIObjects;
@@ -82,10 +75,6 @@ public void setNamespaceAPIObject(NamespaceAPIObject namespaceAPIObject) {
this.namespaceAPIObject = namespaceAPIObject;
}
- public void setExperimentType(String experimentType) {
- this.experiment_type = experimentType;
- }
-
@Override
public String toString() {
return "KubernetesObject{" +
diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java b/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java
index b5b796a95..86d57abfd 100644
--- a/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java
+++ b/src/main/java/com/autotune/analyzer/serviceObjects/ListRecommendationsAPIObject.java
@@ -15,14 +15,18 @@
*******************************************************************************/
package com.autotune.analyzer.serviceObjects;
+import com.autotune.analyzer.utils.ExperimentTypeAware;
+import com.autotune.analyzer.utils.ExperimentTypeUtil;
import com.autotune.utils.KruizeConstants;
import com.google.gson.annotations.SerializedName;
import java.util.List;
-public class ListRecommendationsAPIObject extends BaseSO{
+public class ListRecommendationsAPIObject extends BaseSO implements ExperimentTypeAware {
@SerializedName(KruizeConstants.JSONKeys.CLUSTER_NAME)
private String clusterName;
+ @SerializedName(KruizeConstants.JSONKeys.EXPERIMENT_TYPE)
+ private String experimentType;
@SerializedName(KruizeConstants.JSONKeys.KUBERNETES_OBJECTS)
private List kubernetesObjects;
@@ -42,4 +46,25 @@ public List getKubernetesObjects() {
public void setKubernetesObjects(List kubernetesObjects) {
this.kubernetesObjects = kubernetesObjects;
}
+
+ @Override
+ public String getExperimentType() {
+ return experimentType;
+ }
+
+ public void setExperimentType(String experimentType) {
+ this.experimentType = experimentType;
+ }
+
+ @Override
+ public boolean isNamespaceExperiment() {
+ return ExperimentTypeUtil.isNamespaceExperiment(experimentType);
+ }
+
+ @Override
+ public boolean isContainerExperiment() {
+ return ExperimentTypeUtil.isContainerExperiment(experimentType);
+ }
+
+
}
diff --git a/src/main/java/com/autotune/analyzer/services/CreateExperiment.java b/src/main/java/com/autotune/analyzer/services/CreateExperiment.java
index e938b9d06..b66259f7d 100644
--- a/src/main/java/com/autotune/analyzer/services/CreateExperiment.java
+++ b/src/main/java/com/autotune/analyzer/services/CreateExperiment.java
@@ -16,14 +16,19 @@
package com.autotune.analyzer.services;
+import com.autotune.analyzer.exceptions.InvalidExperimentType;
import com.autotune.analyzer.exceptions.KruizeResponse;
import com.autotune.analyzer.experiment.ExperimentInitiator;
import com.autotune.analyzer.kruizeObject.KruizeObject;
import com.autotune.analyzer.serviceObjects.Converters;
import com.autotune.analyzer.serviceObjects.CreateExperimentAPIObject;
+import com.autotune.analyzer.serviceObjects.KubernetesAPIObject;
import com.autotune.analyzer.utils.AnalyzerConstants;
import com.autotune.analyzer.utils.AnalyzerErrorConstants;
import com.autotune.common.data.ValidationOutputData;
+import com.autotune.common.data.result.ContainerData;
+import com.autotune.common.data.result.NamespaceData;
+import com.autotune.common.k8sObjects.K8sObject;
import com.autotune.database.dao.ExperimentDAO;
import com.autotune.database.dao.ExperimentDAOImpl;
import com.autotune.database.service.ExperimentDBService;
@@ -42,10 +47,7 @@
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
@@ -98,6 +100,23 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)
for (CreateExperimentAPIObject createExperimentAPIObject : createExperimentAPIObjects) {
createExperimentAPIObject.setExperiment_id(Utils.generateID(createExperimentAPIObject.toString()));
createExperimentAPIObject.setStatus(AnalyzerConstants.ExperimentStatus.IN_PROGRESS);
+ // validating the kubernetes objects and experiment type
+ for (KubernetesAPIObject kubernetesAPIObject: createExperimentAPIObject.getKubernetesObjects()) {
+ if (createExperimentAPIObject.isContainerExperiment()) {
+ createExperimentAPIObject.setExperimentType(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT);
+ // check if namespace data is also set for container-type experiments
+ if (null != kubernetesAPIObject.getNamespaceAPIObjects()) {
+ throw new InvalidExperimentType(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.NAMESPACE_DATA_NOT_NULL_FOR_CONTAINER_EXP);
+ }
+ } else if (createExperimentAPIObject.isNamespaceExperiment()) {
+ if (null != kubernetesAPIObject.getContainerAPIObjects()) {
+ throw new InvalidExperimentType(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.CONTAINER_DATA_NOT_NULL_FOR_NAMESPACE_EXP);
+ }
+ if (AnalyzerConstants.REMOTE.equalsIgnoreCase(createExperimentAPIObject.getTargetCluster())) {
+ throw new InvalidExperimentType(AnalyzerErrorConstants.APIErrors.CreateExperimentAPI.NAMESPACE_EXP_NOT_SUPPORTED_FOR_REMOTE);
+ }
+ }
+ }
KruizeObject kruizeObject = Converters.KruizeObjectConverters.convertCreateExperimentAPIObjToKruizeObject(createExperimentAPIObject);
if (null != kruizeObject)
kruizeExpList.add(kruizeObject);
@@ -130,6 +149,8 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)
e.printStackTrace();
LOGGER.error("Unknown exception caught: " + e.getMessage());
sendErrorResponse(inputData, response, e, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Internal Server Error: " + e.getMessage());
+ } catch (InvalidExperimentType e) {
+ sendErrorResponse(inputData, response, null, HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
} finally {
if (null != timerCreateExp) {
MetricsConfig.timerCreateExp = MetricsConfig.timerBCreateExp.tag("status", statusValue).register(MetricsConfig.meterRegistry());
diff --git a/src/main/java/com/autotune/analyzer/services/GenerateRecommendations.java b/src/main/java/com/autotune/analyzer/services/GenerateRecommendations.java
index 43669db12..64d05fe9c 100644
--- a/src/main/java/com/autotune/analyzer/services/GenerateRecommendations.java
+++ b/src/main/java/com/autotune/analyzer/services/GenerateRecommendations.java
@@ -15,12 +15,14 @@
*******************************************************************************/
package com.autotune.analyzer.services;
+import com.autotune.analyzer.exceptions.FetchMetricsError;
import com.autotune.analyzer.kruizeObject.KruizeObject;
import com.autotune.analyzer.recommendations.engine.RecommendationEngine;
import com.autotune.analyzer.serviceObjects.ContainerAPIObject;
import com.autotune.analyzer.serviceObjects.Converters;
import com.autotune.analyzer.serviceObjects.ListRecommendationsAPIObject;
import com.autotune.analyzer.utils.AnalyzerConstants;
+import com.autotune.analyzer.utils.AnalyzerErrorConstants;
import com.autotune.analyzer.utils.GsonUTCDateAdapter;
import com.autotune.common.data.dataSourceQueries.PromQLDataSourceQueries;
import com.autotune.common.data.metrics.MetricAggregationInfoResults;
@@ -118,6 +120,9 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)
LOGGER.error("Validation failed: {}", validationMessage);
sendErrorResponse(response, null, HttpServletResponse.SC_BAD_REQUEST, validationMessage);
}
+ } catch (FetchMetricsError e) {
+ LOGGER.error(AnalyzerErrorConstants.APIErrors.generateRecommendationsAPI.ERROR_FETCHING_METRICS);
+ sendErrorResponse(response, new Exception(e), HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
} catch (Exception e) {
LOGGER.error("Exception occurred while processing request: " + e.getMessage());
sendErrorResponse(response, e, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage());
diff --git a/src/main/java/com/autotune/analyzer/services/UpdateRecommendations.java b/src/main/java/com/autotune/analyzer/services/UpdateRecommendations.java
index 6715635cd..903378655 100644
--- a/src/main/java/com/autotune/analyzer/services/UpdateRecommendations.java
+++ b/src/main/java/com/autotune/analyzer/services/UpdateRecommendations.java
@@ -15,6 +15,7 @@
*******************************************************************************/
package com.autotune.analyzer.services;
+import com.autotune.analyzer.exceptions.FetchMetricsError;
import com.autotune.analyzer.kruizeObject.KruizeObject;
import com.autotune.analyzer.recommendations.engine.RecommendationEngine;
import com.autotune.analyzer.serviceObjects.ContainerAPIObject;
@@ -115,6 +116,8 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)
sendErrorResponse(response, null, HttpServletResponse.SC_BAD_REQUEST, validationMessage, experiment_name, intervalEndTimeStr);
}
+ } catch (FetchMetricsError e) {
+ sendErrorResponse(response, new Exception(e), HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage(), experiment_name, intervalEndTimeStr);
} catch (Exception e) {
LOGGER.error(String.format(AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.UPDATE_RECOMMENDATIONS_FAILED_COUNT, calCount));
e.printStackTrace();
diff --git a/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java b/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java
index 3bfaec3ba..a279ea77a 100644
--- a/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java
+++ b/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java
@@ -146,6 +146,14 @@ public static final class updateResultsAPI {
}
+ public static final class generateRecommendationsAPI {
+ public static final String ERROR_FETCHING_METRICS = "Error while fetching metrics.";
+
+ private generateRecommendationsAPI() {
+
+ }
+ }
+
public static final class ListRecommendationsAPI {
public static final String RECOMMENDATION_DOES_NOT_EXIST_EXCPTN = "Recommendation does not exist";
public static final String RECOMMENDATION_DOES_NOT_EXIST_MSG = "Recommendation for timestamp - \" %s \" does not exist";
@@ -165,6 +173,7 @@ public static final class CreateExperimentAPI {
public static final String NAMESPACE_AND_CONTAINER_NOT_NULL = "Only one of Namespace or Container information can be specified.";
public static final String CONTAINER_DATA_NOT_NULL_FOR_NAMESPACE_EXP = "Can not specify container data for namespace experiment";
public static final String NAMESPACE_DATA_NOT_NULL_FOR_CONTAINER_EXP = "Can not specify namespace data for container experiment";
+ public static final String NAMESPACE_EXP_NOT_SUPPORTED_FOR_REMOTE = "Namespace experiment type is not supported for remote monitoring use case.";
private CreateExperimentAPI() {
diff --git a/src/main/java/com/autotune/analyzer/utils/ExperimentTypeAware.java b/src/main/java/com/autotune/analyzer/utils/ExperimentTypeAware.java
new file mode 100644
index 000000000..c8fd45dec
--- /dev/null
+++ b/src/main/java/com/autotune/analyzer/utils/ExperimentTypeAware.java
@@ -0,0 +1,28 @@
+/*******************************************************************************
+ * Copyright (c) 2024 Red Hat, IBM Corporation and others.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *******************************************************************************/
+package com.autotune.analyzer.utils;
+
+/**
+ * Interface to be implemented by classes with an experiment type.
+ */
+public interface ExperimentTypeAware {
+ // Retrieves the experiment type associated with the implementing class.
+ String getExperimentType();
+ // checks if the experiment type is namespace
+ boolean isNamespaceExperiment();
+ // checks if the experiment type is container
+ boolean isContainerExperiment();
+}
diff --git a/src/main/java/com/autotune/analyzer/utils/ExperimentTypeUtil.java b/src/main/java/com/autotune/analyzer/utils/ExperimentTypeUtil.java
new file mode 100644
index 000000000..591ea4d14
--- /dev/null
+++ b/src/main/java/com/autotune/analyzer/utils/ExperimentTypeUtil.java
@@ -0,0 +1,30 @@
+/*******************************************************************************
+ * Copyright (c) 2024 Red Hat, IBM Corporation and others.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *******************************************************************************/
+
+package com.autotune.analyzer.utils;
+
+/**
+ * This class contains utility functions to determine experiment type
+ */
+public class ExperimentTypeUtil {
+ public static boolean isContainerExperiment(String experimentType) {
+ return experimentType == null || experimentType.equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT);
+ }
+
+ public static boolean isNamespaceExperiment(String experimentType) {
+ return experimentType != null && experimentType.equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT);
+ }
+}
diff --git a/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java b/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java
index 5b10e1184..5404a9d4b 100644
--- a/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java
+++ b/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java
@@ -1,5 +1,6 @@
package com.autotune.common.datasource;
+import com.autotune.analyzer.exceptions.FetchMetricsError;
import com.autotune.analyzer.exceptions.MonitoringAgentNotFoundException;
import com.autotune.analyzer.exceptions.TooManyRecursiveCallsException;
import com.autotune.analyzer.utils.AnalyzerConstants;
@@ -176,7 +177,7 @@ public ArrayList getAppsForLayer(DataSourceInfo dataSource, String query
} catch (TooManyRecursiveCallsException e) {
e.printStackTrace();
}
- } catch (IOException | NoSuchAlgorithmException | KeyStoreException | KeyManagementException e) {
+ } catch (IOException | NoSuchAlgorithmException | KeyStoreException | KeyManagementException | FetchMetricsError e) {
LOGGER.error("Unable to proceed due to invalid connection to URL: "+ queryURL);
}
return valuesList;
diff --git a/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java b/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java
index 82865cc19..418add724 100644
--- a/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java
+++ b/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java
@@ -15,6 +15,7 @@
*******************************************************************************/
package com.autotune.common.datasource.prometheus;
+import com.autotune.analyzer.exceptions.FetchMetricsError;
import com.autotune.analyzer.utils.AnalyzerConstants;
import com.autotune.common.auth.AuthenticationStrategy;
import com.autotune.common.auth.AuthenticationStrategyFactory;
@@ -181,6 +182,8 @@ public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query)
e.printStackTrace();
} catch (KeyManagementException e) {
e.printStackTrace();
+ } catch (FetchMetricsError e) {
+ e.printStackTrace();
}
return null;
}
diff --git a/src/main/java/com/autotune/common/k8sObjects/K8sObject.java b/src/main/java/com/autotune/common/k8sObjects/K8sObject.java
index 5771f80c0..27db1b190 100644
--- a/src/main/java/com/autotune/common/k8sObjects/K8sObject.java
+++ b/src/main/java/com/autotune/common/k8sObjects/K8sObject.java
@@ -13,7 +13,6 @@ public class K8sObject {
private String type; // TODO: Change to ENUM
private String name;
private String namespace;
- private String experiment_type;
@SerializedName(KruizeConstants.JSONKeys.CONTAINERS)
private HashMap containerDataMap;
@SerializedName(KruizeConstants.JSONKeys.NAMESPACES)
@@ -31,10 +30,6 @@ public String getType() {
return type;
}
- public String getExperimentType() {
- return experiment_type;
- }
-
public void setType(String type) {
this.type = type;
}
@@ -47,10 +42,6 @@ public void setName(String name) {
this.name = name;
}
- public void setExperimentType(String experiment_type) {
- this.experiment_type = experiment_type;
- }
-
public String getNamespace() {
return namespace;
}
diff --git a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java
index 46f163cd2..00b733d8c 100644
--- a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java
+++ b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java
@@ -51,6 +51,8 @@ public ValidationOutputData addExperimentToDB(KruizeExperimentEntry kruizeExperi
tx = session.beginTransaction();
session.persist(kruizeExperimentEntry);
tx.commit();
+ // TODO: remove native sql query and transient
+ updateExperimentTypeInKruizeExperimentEntry(kruizeExperimentEntry);
validationOutputData.setSuccess(true);
statusValue = "success";
} catch (HibernateException e) {
@@ -322,6 +324,7 @@ public ValidationOutputData addRecommendationToDB(KruizeRecommendationEntry reco
tx = session.beginTransaction();
session.persist(recommendationEntry);
tx.commit();
+ updateExperimentTypeInKruizeRecommendationEntry(recommendationEntry);
validationOutputData.setSuccess(true);
statusValue = "success";
} else {
@@ -616,6 +619,8 @@ public List loadAllExperiments() throws Exception {
Timer.Sample timerLoadAllExp = Timer.start(MetricsConfig.meterRegistry());
try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) {
entries = session.createQuery(DBConstants.SQLQUERY.SELECT_FROM_EXPERIMENTS, KruizeExperimentEntry.class).list();
+ // TODO: remove native sql query and transient
+ getExperimentTypeInKruizeExperimentEntry(entries);
statusValue = "success";
} catch (Exception e) {
LOGGER.error("Not able to load experiment due to {}", e.getMessage());
@@ -720,6 +725,8 @@ public List loadExperimentByName(String experimentName) t
try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) {
entries = session.createQuery(DBConstants.SQLQUERY.SELECT_FROM_EXPERIMENTS_BY_EXP_NAME, KruizeExperimentEntry.class)
.setParameter("experimentName", experimentName).list();
+ // TODO: remove native sql query and transient
+ getExperimentTypeInKruizeExperimentEntry(entries);
statusValue = "success";
} catch (Exception e) {
LOGGER.error("Not able to load experiment {} due to {}", experimentName, e.getMessage());
@@ -820,6 +827,7 @@ public List loadRecommendationsByExperimentName(Strin
try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) {
recommendationEntries = session.createQuery(DBConstants.SQLQUERY.SELECT_FROM_RECOMMENDATIONS_BY_EXP_NAME, KruizeRecommendationEntry.class)
.setParameter("experimentName", experimentName).list();
+ getExperimentTypeInKruizeRecommendationsEntry(recommendationEntries);
statusValue = "success";
} catch (Exception e) {
LOGGER.error("Not able to load recommendations due to {}", e.getMessage());
@@ -851,6 +859,7 @@ public KruizeRecommendationEntry loadRecommendationsByExperimentNameAndDate(Stri
if (cluster_name != null)
kruizeRecommendationEntryQuery.setParameter(CLUSTER_NAME, cluster_name);
recommendationEntries = kruizeRecommendationEntryQuery.getSingleResult();
+ getExperimentTypeInSingleKruizeRecommendationsEntry(recommendationEntries);
statusValue = "success";
} catch (NoResultException e) {
LOGGER.debug("Generating new recommendation for Experiment name : %s interval_end_time: %S", experimentName, interval_end_time);
@@ -1054,4 +1063,95 @@ public List loadAllDataSources() throws Exception {
}
return entries;
}
+
+ private void getExperimentTypeInKruizeExperimentEntry(List entries) throws Exception {
+ try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) {
+ for (KruizeExperimentEntry entry: entries) {
+ if (isTargetCluserLocal(entry.getTarget_cluster())) {
+ String sql = DBConstants.SQLQUERY.SELECT_EXPERIMENT_EXP_TYPE;
+ Query query = session.createNativeQuery(sql);
+ query.setParameter("experiment_id", entry.getExperiment_id());
+ List experimentType = query.getResultList();
+ if (null != experimentType && !experimentType.isEmpty()) {
+ entry.setExperimentType(experimentType.get(0));
+ }
+ }
+ }
+ } catch (Exception e) {
+ LOGGER.error("Not able to get experiment type from experiment entry due to {}", e.getMessage());
+ throw new Exception("Error while loading experiment type from database due to : " + e.getMessage());
+ }
+ }
+
+ private void updateExperimentTypeInKruizeExperimentEntry(KruizeExperimentEntry kruizeExperimentEntry) throws Exception {
+ try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) {
+ if (isTargetCluserLocal(kruizeExperimentEntry.getTarget_cluster())) {
+ Transaction tx = session.beginTransaction();
+ String sql = DBConstants.SQLQUERY.UPDATE_EXPERIMENT_EXP_TYPE;
+ Query query = session.createNativeQuery(sql);
+ query.setParameter("experiment_type", kruizeExperimentEntry.getExperimentType());
+ query.setParameter("experiment_name", kruizeExperimentEntry.getExperiment_name());
+ query.executeUpdate();
+ tx.commit();
+ }
+ } catch (Exception e) {
+ LOGGER.error("Not able to update experiment type in experiment entry due to {}", e.getMessage());
+ throw new Exception("Error while updating experiment type to database due to : " + e.getMessage());
+ }
+ }
+
+ private void getExperimentTypeInKruizeRecommendationsEntry(List entries) throws Exception {
+ for (KruizeRecommendationEntry recomEntry: entries) {
+ getExperimentTypeInSingleKruizeRecommendationsEntry(recomEntry);
+ }
+ }
+
+ private void getExperimentTypeInSingleKruizeRecommendationsEntry(KruizeRecommendationEntry recomEntry) throws Exception {
+ List expEntries = loadExperimentByName(recomEntry.getExperiment_name());
+ if (null != expEntries && !expEntries.isEmpty()) {
+ if (isTargetCluserLocal(expEntries.get(0).getTarget_cluster())) {
+ try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) {
+ String sql = DBConstants.SQLQUERY.SELECT_RECOMMENDATIONS_EXP_TYPE;
+ Query query = session.createNativeQuery(sql);
+ query.setParameter("experiment_type", recomEntry.getExperimentType());
+ query.setParameter("experiment_name", recomEntry.getExperiment_name());
+ List exType = query.getResultList();
+ if (null != exType && !exType.isEmpty()) {
+ recomEntry.setExperimentType(exType.get(0));
+ }
+ } catch (Exception e) {
+ LOGGER.error("Not able to get experiment type in recommendation entry due to {}", e.getMessage());
+ throw new Exception("Error while updating experiment type to recommendation due to : " + e.getMessage());
+ }
+ }
+ }
+ }
+
+ private void updateExperimentTypeInKruizeRecommendationEntry(KruizeRecommendationEntry recommendationEntry) throws Exception {
+ List entries = loadExperimentByName(recommendationEntry.getExperiment_name());
+ if (null != entries && !entries.isEmpty()) {
+ if (isTargetCluserLocal(entries.get(0).getTarget_cluster())) {
+ try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) {
+ Transaction tx = session.beginTransaction();
+ String sql = DBConstants.SQLQUERY.UPDATE_RECOMMENDATIONS_EXP_TYPE;
+ Query query = session.createNativeQuery(sql);
+ query.setParameter("experiment_type", recommendationEntry.getExperimentType());
+ query.setParameter("experiment_name", recommendationEntry.getExperiment_name());
+ query.setParameter("interval_end_time", recommendationEntry.getInterval_end_time());
+ query.executeUpdate();
+ tx.commit();
+ } catch (Exception e) {
+ LOGGER.error("Not able to update experiment type in recommendation entry due to {}", e.getMessage());
+ throw new Exception("Error while updating experiment type to recommendation due to : " + e.getMessage());
+ }
+ }
+ }
+ }
+
+ private boolean isTargetCluserLocal(String targetCluster) {
+ if (AnalyzerConstants.LOCAL.equalsIgnoreCase(targetCluster)) {
+ return true;
+ }
+ return false;
+ }
}
diff --git a/src/main/java/com/autotune/database/helper/DBConstants.java b/src/main/java/com/autotune/database/helper/DBConstants.java
index 8771dcf51..62fa006ac 100644
--- a/src/main/java/com/autotune/database/helper/DBConstants.java
+++ b/src/main/java/com/autotune/database/helper/DBConstants.java
@@ -76,6 +76,10 @@ public static final class SQLQUERY {
" WHERE container->>'container_name' = :container_name" +
" AND container->>'container_image_name' = :container_image_name" +
" ))";
+ public static final String UPDATE_EXPERIMENT_EXP_TYPE = "UPDATE kruize_experiments SET experiment_type = :experiment_type WHERE experiment_name = :experiment_name";
+ public static final String UPDATE_RECOMMENDATIONS_EXP_TYPE = "UPDATE kruize_recommendations SET experiment_type = :experiment_type WHERE experiment_name = :experiment_name and interval_end_time = :interval_end_time";
+ public static final String SELECT_EXPERIMENT_EXP_TYPE = "SELECT experiment_type from kruize_experiments WHERE experiment_id = :experiment_id";
+ public static final String SELECT_RECOMMENDATIONS_EXP_TYPE = "SELECT experiment_type from kruize_recommendations WHERE experiment_name = :experiment_name";
}
diff --git a/src/main/java/com/autotune/database/helper/DBHelpers.java b/src/main/java/com/autotune/database/helper/DBHelpers.java
index 4a8b96322..23069d348 100644
--- a/src/main/java/com/autotune/database/helper/DBHelpers.java
+++ b/src/main/java/com/autotune/database/helper/DBHelpers.java
@@ -26,6 +26,7 @@
import com.autotune.analyzer.serviceObjects.*;
import com.autotune.analyzer.utils.AnalyzerConstants;
import com.autotune.analyzer.utils.AnalyzerErrorConstants;
+import com.autotune.analyzer.utils.ExperimentTypeUtil;
import com.autotune.analyzer.utils.GsonUTCDateAdapter;
import com.autotune.common.data.dataSourceMetadata.*;
import com.autotune.common.data.result.ContainerData;
@@ -300,6 +301,8 @@ public static KruizeExperimentEntry convertCreateAPIObjToExperimentDBObj(CreateE
kruizeExperimentEntry.setStatus(AnalyzerConstants.ExperimentStatus.IN_PROGRESS);
kruizeExperimentEntry.setMeta_data(null);
kruizeExperimentEntry.setDatasource(null);
+ kruizeExperimentEntry.setExperimentType(apiObject.getExperimentType());
+
ObjectMapper objectMapper = new ObjectMapper();
try {
kruizeExperimentEntry.setExtended_data(
@@ -377,14 +380,13 @@ public static ListRecommendationsAPIObject getListRecommendationAPIObjectForDB(K
for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) {
if (null == k8sObject)
continue;
- if (null == k8sObject.getContainerDataMap() && k8sObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT))
+ if (null == k8sObject.getContainerDataMap() && kruizeObject.isContainerExperiment())
continue;
- if (k8sObject.getContainerDataMap().isEmpty() && k8sObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT))
+ if (k8sObject.getContainerDataMap().isEmpty() && kruizeObject.isContainerExperiment())
continue;
KubernetesAPIObject kubernetesAPIObject = new KubernetesAPIObject(k8sObject.getName(), k8sObject.getType(), k8sObject.getNamespace());
boolean matchFound = false;
- kubernetesAPIObject.setExperimentType(k8sObject.getExperimentType());
- if (k8sObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) {
+ if (kruizeObject.isNamespaceExperiment()) {
// saving namespace recommendations
NamespaceData clonedNamespaceData = Utils.getClone(k8sObject.getNamespaceData(), NamespaceData.class);
if (null == clonedNamespaceData)
@@ -456,6 +458,7 @@ public static ListRecommendationsAPIObject getListRecommendationAPIObjectForDB(K
listRecommendationsAPIObject.setClusterName(kruizeObject.getClusterName());
listRecommendationsAPIObject.setExperimentName(kruizeObject.getExperimentName());
listRecommendationsAPIObject.setKubernetesObjects(kubernetesAPIObjectList);
+ listRecommendationsAPIObject.setExperimentType(kruizeObject.getExperimentType());
}
return listRecommendationsAPIObject;
}
@@ -482,10 +485,12 @@ public static KruizeRecommendationEntry convertKruizeObjectTORecommendation(Krui
kruizeRecommendationEntry.setVersion(KruizeConstants.KRUIZE_RECOMMENDATION_API_VERSION.LATEST.getVersionNumber());
kruizeRecommendationEntry.setExperiment_name(listRecommendationsAPIObject.getExperimentName());
kruizeRecommendationEntry.setCluster_name(listRecommendationsAPIObject.getClusterName());
+ kruizeRecommendationEntry.setExperimentType(listRecommendationsAPIObject.getExperimentType());
+
Timestamp endInterval = null;
// todo : what happens if two k8 objects or Containers with different timestamp
for (KubernetesAPIObject k8sObject : listRecommendationsAPIObject.getKubernetesObjects()) {
- if (k8sObject.getExperimentType().equalsIgnoreCase(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT)) {
+ if (listRecommendationsAPIObject.isNamespaceExperiment()) {
endInterval = k8sObject.getNamespaceAPIObjects().getnamespaceRecommendations().getData().keySet().stream().max(Timestamp::compareTo).get();
} else {
for (ContainerAPIObject containerAPIObject : k8sObject.getContainerAPIObjects()) {
@@ -528,6 +533,7 @@ public static List convertExperimentEntryToCreateExpe
CreateExperimentAPIObject apiObj = new Gson().fromJson(extended_data_rawJson, CreateExperimentAPIObject.class);
apiObj.setExperiment_id(entry.getExperiment_id());
apiObj.setStatus(entry.getStatus());
+ apiObj.setExperimentType(entry.getExperimentType());
createExperimentAPIObjects.add(apiObj);
} catch (Exception e) {
LOGGER.error("Error in converting to apiObj from db object due to : {}", e.getMessage());
@@ -610,7 +616,6 @@ private static List convertK8sObjectListToKubernetesAPIObje
public static List convertRecommendationEntryToRecommendationAPIObject(
List kruizeRecommendationEntryList) throws InvalidConversionOfRecommendationEntryException {
- LOGGER.info("Hello 3: convertRecommendationEntryToRecommendationAPIObject" + kruizeRecommendationEntryList.size());
if (null == kruizeRecommendationEntryList)
return null;
if (kruizeRecommendationEntryList.size() == 0)
diff --git a/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java b/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java
index 935254a34..01908cdcd 100644
--- a/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java
+++ b/src/main/java/com/autotune/database/table/KruizeExperimentEntry.java
@@ -17,7 +17,9 @@
import com.autotune.analyzer.utils.AnalyzerConstants;
import com.autotune.database.helper.GenerateExperimentID;
+import com.autotune.utils.KruizeConstants;
import com.fasterxml.jackson.databind.JsonNode;
+import com.google.gson.annotations.SerializedName;
import jakarta.persistence.*;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.type.SqlTypes;
@@ -55,6 +57,8 @@ public class KruizeExperimentEntry {
private String mode;
private String target_cluster;
private String performance_profile;
+ @Transient
+ private String experiment_type;
@Enumerated(EnumType.STRING)
private AnalyzerConstants.ExperimentStatus status;
@JdbcTypeCode(SqlTypes.JSON)
@@ -154,4 +158,13 @@ public JsonNode getDatasource() {
public void setDatasource(JsonNode datasource) {
this.datasource = datasource;
}
+
+ public String getExperimentType() {
+ return experiment_type;
+ }
+
+ public void setExperimentType(String experimentType) {
+ this.experiment_type = experimentType;
+ }
+
}
diff --git a/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java b/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java
index 9dfd076b1..d3d490f0c 100644
--- a/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java
+++ b/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java
@@ -27,6 +27,8 @@ public class KruizeRecommendationEntry {
private String cluster_name;
@JdbcTypeCode(SqlTypes.JSON)
private JsonNode extended_data;
+ @Transient
+ private String experiment_type;
public String getExperiment_name() {
return experiment_name;
@@ -67,4 +69,12 @@ public String getVersion() {
public void setVersion(String version) {
this.version = version;
}
+
+ public String getExperimentType() {
+ return experiment_type;
+ }
+
+ public void setExperimentType(String experimentType) {
+ this.experiment_type = experimentType;
+ }
}
diff --git a/src/main/java/com/autotune/utils/GenericRestApiClient.java b/src/main/java/com/autotune/utils/GenericRestApiClient.java
index 8a6e6ea8a..1e6809683 100644
--- a/src/main/java/com/autotune/utils/GenericRestApiClient.java
+++ b/src/main/java/com/autotune/utils/GenericRestApiClient.java
@@ -15,6 +15,7 @@
*******************************************************************************/
package com.autotune.utils;
+import com.autotune.analyzer.exceptions.FetchMetricsError;
import com.autotune.common.auth.AuthenticationStrategy;
import com.autotune.common.auth.AuthenticationStrategyFactory;
import com.autotune.common.datasource.DataSourceInfo;
@@ -75,7 +76,7 @@ public GenericRestApiClient(DataSourceInfo dataSourceInfo) {
* @return Json object which contains API response.
* @throws IOException
*/
- public JSONObject fetchMetricsJson(String methodType, String queryString) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException {
+ public JSONObject fetchMetricsJson(String methodType, String queryString) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException, FetchMetricsError {
System.setProperty("https.protocols", "TLSv1.2");
String jsonOutputInString = "";
SSLContext sslContext = SSLContexts.custom().loadTrustMaterial((chain, authType) -> true).build(); // Trust all certificates
@@ -96,6 +97,8 @@ public JSONObject fetchMetricsJson(String methodType, String queryString) throws
LOGGER.info("Executing request: {}", httpRequestBase.getRequestLine());
jsonOutputInString = httpclient.execute(httpRequestBase, new StringResponseHandler());
+ } catch (Exception e) {
+ throw new FetchMetricsError(e.getMessage());
}
return new JSONObject(jsonOutputInString);
}
diff --git a/src/main/java/com/autotune/utils/KruizeConstants.java b/src/main/java/com/autotune/utils/KruizeConstants.java
index 4180fe902..15779cdae 100644
--- a/src/main/java/com/autotune/utils/KruizeConstants.java
+++ b/src/main/java/com/autotune/utils/KruizeConstants.java
@@ -159,6 +159,7 @@ public static final class JSONKeys {
// Metadata Section
public static final String EXPERIMENT_ID = "experiment_id";
public static final String EXPERIMENT_NAME = "experiment_name";
+ public static final String EXPERIMENT_TYPE = "experiment_type";
// Deployments Section
public static final String DEPLOYMENTS = "deployments";
public static final String NAMESPACE = "namespace";