Skip to content

Commit

Permalink
Merge pull request #1171 from msvinaykumar/getmaxdate
Browse files Browse the repository at this point in the history
get Max Date from promQL
  • Loading branch information
dinogun authored Apr 23, 2024
2 parents 79a824d + 4dc7573 commit ce8d1b1
Show file tree
Hide file tree
Showing 5 changed files with 76 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,27 @@ public String validate() {
return validationFailureMsg;
}

public String validate_local() { //TODO Instead of relying on the 'local=true' check everywhere, aim to avoid this complexity by introducing a higher-level abstraction in the code.

String validationFailureMsg = "";
// Check if experiment_name is provided
if (experimentName == null || experimentName.isEmpty()) {
validationFailureMsg += AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.EXPERIMENT_NAME_MANDATORY + ", ";
}

// Check if interval_end_time is provided
if (intervalEndTimeStr != null) {
if (!Utils.DateUtils.isAValidDate(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT, intervalEndTimeStr)) {
validationFailureMsg += String.format(AnalyzerErrorConstants.APIErrors.ListRecommendationsAPI.INVALID_TIMESTAMP_MSG, intervalEndTimeStr);
}
}

// Check if interval_start_time is provided
// TODO: to be considered in future

return validationFailureMsg;
}

/**
* Prepares recommendations based on the input params received in the previous step.
*
Expand All @@ -210,14 +231,18 @@ public KruizeObject prepareRecommendations(int calCount) {
Map<String, KruizeObject> mainKruizeExperimentMAP = new ConcurrentHashMap<>();
Map<String, Terms> terms = new HashMap<>();
ValidationOutputData validationOutputData;
interval_end_time = Utils.DateUtils.getTimeStampFrom(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT,
intervalEndTimeStr);
setInterval_end_time(interval_end_time);
Timestamp interval_start_time = null;
if (intervalEndTimeStr != null) { //TODO remove this check and avoid same if across this flow
interval_end_time = Utils.DateUtils.getTimeStampFrom(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT,
intervalEndTimeStr);
setInterval_end_time(interval_end_time);
}
KruizeObject kruizeObject = createKruizeObject();
if (!kruizeObject.getValidation_data().isSuccess())
return kruizeObject;
// continue to generate recommendation when kruizeObject is successfully created
setKruizeObject(kruizeObject);
mainKruizeExperimentMAP.put(kruizeObject.getExperimentName(), kruizeObject);
// continue to generate recommendation when kruizeObject is successfully created
try {
// set the default terms if the terms aren't provided by the user
if (kruizeObject.getTerms() == null)
Expand All @@ -230,8 +255,9 @@ public KruizeObject prepareRecommendations(int calCount) {
LOGGER.debug("Experiment: {}, Datasource: {}", kruizeObject.getExperimentName(), dataSource);

int maxDay = Terms.getMaxDays(terms);
Timestamp interval_start_time = Timestamp.valueOf(Objects.requireNonNull(getInterval_end_time()).toLocalDateTime().minusDays(maxDay));

if (intervalEndTimeStr != null) { //TODO remove this check and avoid same if across this flow
interval_start_time = Timestamp.valueOf(Objects.requireNonNull(getInterval_end_time()).toLocalDateTime().minusDays(maxDay));
}
// update the KruizeObject to have the results data from the available datasource
try {
String errorMsg = getResults(mainKruizeExperimentMAP, kruizeObject, experimentName, interval_start_time, dataSource);
Expand Down Expand Up @@ -1374,11 +1400,9 @@ private String getResults(Map<String, KruizeObject> mainKruizeExperimentMAP, Kru
*/
public void fetchMetricsBasedOnDatasource(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo) throws Exception {
try {
// Convert timestamps to epoch time
long interval_end_time_epoc = interval_end_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
- ((long) interval_end_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE);
long interval_start_time_epoc = interval_start_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
- ((long) interval_start_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC);
long interval_end_time_epoc = 0;
long interval_start_time_epoc = 0;
SimpleDateFormat sdf = new SimpleDateFormat(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT, Locale.ROOT);

// Get MetricsProfile name and list of promQL to fetch
Map<AnalyzerConstants.MetricName, String> promQls = new HashMap<>();
Expand All @@ -1396,6 +1420,39 @@ public void fetchMetricsBasedOnDatasource(KruizeObject kruizeObject, Timestamp i
for (Map.Entry<String, ContainerData> entry : containerDataMap.entrySet()) {
ContainerData containerData = entry.getValue();
String containerName = containerData.getContainer_name();
if (null == interval_end_time) {
LOGGER.info("Determine the date of the last activity for the container based on its usage. ");
String dateMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATE_ENDPOINT_WITH_QUERY,
dataSourceInfo.getUrl(),
URLEncoder.encode(String.format(PromQLDataSourceQueries.MAX_DATE, containerName, namespace), CHARACTER_ENCODING)
);
LOGGER.info(dateMetricsUrl);
JSONObject genericJsonObject = new GenericRestApiClient(dateMetricsUrl).fetchMetricsJson("get", "");
JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class);
JsonArray resultArray = jsonObject.getAsJsonObject(KruizeConstants.JSONKeys.DATA).getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT);
// Process fetched metrics
if (null != resultArray && !resultArray.isEmpty()) {
resultArray = resultArray.get(0)
.getAsJsonObject().getAsJsonArray("value");
long epochTime = resultArray.get(0).getAsLong();
String timestamp = sdf.format(new Date(epochTime * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC));
Date date = sdf.parse(timestamp);
Timestamp dateTS = new Timestamp(date.getTime());
interval_end_time_epoc = dateTS.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
- ((long) dateTS.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE);
int maxDay = Terms.getMaxDays(kruizeObject.getTerms());
LOGGER.info("maxDay : {}", maxDay);
Timestamp startDateTS = Timestamp.valueOf(Objects.requireNonNull(dateTS).toLocalDateTime().minusDays(maxDay));
interval_start_time_epoc = startDateTS.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
- ((long) startDateTS.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC);
}
} else {
// Convert timestamps to epoch time
interval_end_time_epoc = interval_end_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
- ((long) interval_end_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE);
interval_start_time_epoc = interval_start_time.getTime() / KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC
- ((long) interval_start_time.getTimezoneOffset() * KruizeConstants.TimeConv.NO_OF_MSECS_IN_SEC);
}
HashMap<Timestamp, IntervalResults> containerDataResults = new HashMap<>();
IntervalResults intervalResults;
HashMap<AnalyzerConstants.MetricName, MetricResults> resMap;
Expand Down Expand Up @@ -1450,11 +1507,11 @@ public void fetchMetricsBasedOnDatasource(KruizeObject kruizeObject, Timestamp i
KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT).get(0)
.getAsJsonObject().getAsJsonArray(KruizeConstants.DataSourceConstants
.DataSourceQueryJSONKeys.VALUES);
SimpleDateFormat sdf = new SimpleDateFormat(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT, Locale.ROOT);
sdf.setTimeZone(TimeZone.getTimeZone(KruizeConstants.TimeUnitsExt.TimeZones.UTC));

// Iterate over fetched metrics
Timestamp sTime = interval_start_time;
Timestamp sTime = new Timestamp(interval_start_time_epoc);
;
for (JsonElement element : resultArray) {
JsonArray valueArray = element.getAsJsonArray();
long epochTime = valueArray.get(0).getAsLong();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ public void init(ServletConfig config) throws ServletException {
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
int calCount = ++requestCount;
LOGGER.debug("UpdateRecommendations API request count: {}", calCount);
LOGGER.debug("GenerateRecommendations API request count: {}", calCount);
String statusValue = "failure";
Timer.Sample timerBUpdateRecommendations = Timer.start(MetricsConfig.meterRegistry());
try {
Expand All @@ -101,7 +101,7 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)
// create recommendation engine object
RecommendationEngine recommendationEngine = new RecommendationEngine(experiment_name, intervalEndTimeStr, intervalStartTimeStr);
// validate and create KruizeObject if successful
String validationMessage = recommendationEngine.validate();
String validationMessage = recommendationEngine.validate_local();
if (validationMessage.isEmpty()) {
KruizeObject kruizeObject = recommendationEngine.prepareRecommendations(calCount);
if (kruizeObject.getValidation_data().isSuccess()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ public enum PromQLQuery {
MEMORY_USAGE("%s by(container, namespace) (%s_over_time(container_memory_working_set_bytes{container!='', container!='POD', pod!='', namespace!='', namespace!~'kube-.*|openshift|openshift-.*',namespace=\"%s\",container=\"%s\" }[%sm]))"),
MEMORY_RSS("%s by(container, namespace) (%s_over_time(container_memory_rss{container!='', container!='POD', pod!='', namespace!='', namespace!~'kube-.*|openshift|openshift-.*',namespace=\"%s\",container=\"%s\"}[%sm]))"),
MEMORY_LIMIT("%s by(container,namespace) (kube_pod_container_resource_limits{container!='', container!='POD', pod!='', namespace!='', namespace!~'kube-.*|openshift|openshift-.*', resource='memory', unit='byte', namespace=\"%s\",container=\"%s\" } * on(pod, namespace) group_left max by (container, pod, namespace) (kube_pod_status_phase{phase='Running'}))"),
MEMORY_REQUEST("%s by(container,namespace) (kube_pod_container_resource_requests{container!='', container!='POD', pod!='', namespace!='', namespace!~'kube-.*|openshift|openshift-.*', resource='memory', unit='byte',namespace=\"%s\",container=\"%s\"} * on(pod, namespace) group_left max by (container, pod, namespace) (kube_pod_status_phase{phase='Running'}))");
MEMORY_REQUEST("%s by(container,namespace) (kube_pod_container_resource_requests{container!='', container!='POD', pod!='', namespace!='', namespace!~'kube-.*|openshift|openshift-.*', resource='memory', unit='byte',namespace=\"%s\",container=\"%s\"} * on(pod, namespace) group_left max by (container, pod, namespace) (kube_pod_status_phase{phase='Running'}))"),
MAX_DATE("max(container_cpu_usage_seconds_total{container=\"%s\",namespace=\"%s\"} > 0)");
private final String query;

PromQLQuery(String query) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,5 @@ public class PromQLDataSourceQueries {
public static final String MEMORY_RSS = DataSourceQueries.PromQLQuery.MEMORY_RSS.getQuery();
public static final String MEMORY_LIMIT = DataSourceQueries.PromQLQuery.MEMORY_LIMIT.getQuery();
public static final String MEMORY_REQUEST = DataSourceQueries.PromQLQuery.MEMORY_REQUEST.getQuery();
public static final String MAX_DATE = DataSourceQueries.PromQLQuery.MAX_DATE.getQuery();
}
1 change: 1 addition & 0 deletions src/main/java/com/autotune/utils/KruizeConstants.java
Original file line number Diff line number Diff line change
Expand Up @@ -357,6 +357,7 @@ public static class DataSourceConstants {
public static final String PROMETHEUS_DEFAULT_SERVICE_PORT = "9090";
public static final String PROMETHEUS_REACHABILITY_QUERY = "up";
public static final String DATASOURCE_ENDPOINT_WITH_QUERY = "%s/api/v1/query_range?query=%s&start=%s&end=%s&step=%s";
public static final String DATE_ENDPOINT_WITH_QUERY = "%s/api/v1/query?query=%s";

public static class DataSourceDetailsInfoConstants {
private DataSourceDetailsInfoConstants() {
Expand Down

0 comments on commit ce8d1b1

Please sign in to comment.