Skip to content

Commit

Permalink
merged from RM to mvp_demo
Browse files Browse the repository at this point in the history
Signed-off-by: msvinaykumar <[email protected]>
  • Loading branch information
msvinaykumar committed Oct 4, 2023
1 parent 0d9091b commit 0dbb0c0
Show file tree
Hide file tree
Showing 6 changed files with 4 additions and 184 deletions.
11 changes: 0 additions & 11 deletions migrations/kruize_experiments_ddl.sql
Original file line number Diff line number Diff line change
@@ -1,20 +1,9 @@
create table IF NOT EXISTS kruize_experiments (experiment_id varchar(255) not null, cluster_name varchar(255), datasource jsonb, experiment_name varchar(255), extended_data jsonb, meta_data jsonb, mode varchar(255), performance_profile varchar(255), status varchar(255), target_cluster varchar(255), version varchar(255), primary key (experiment_id));
create table IF NOT EXISTS kruize_performance_profiles (name varchar(255) not null, k8s_type varchar(255), profile_version float(53) not null, slo jsonb, primary key (name));
<<<<<<< HEAD
create table IF NOT EXISTS kruize_recommendations (interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255) not null, extended_data jsonb, version varchar(255), primary key (cluster_name, experiment_name, interval_end_time)) PARTITION BY RANGE (interval_end_time);
create table IF NOT EXISTS kruize_results (interval_start_time timestamp(6) not null, interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255) not null, duration_minutes float(53) not null, extended_data jsonb, meta_data jsonb, version varchar(255), primary key (cluster_name, experiment_name, interval_end_time, interval_start_time)) PARTITION BY RANGE (interval_end_time);
alter table if exists kruize_experiments add constraint UK_experiment_name unique (experiment_name);
create index IF NOT EXISTS idx_recommendation_experiment_name on kruize_recommendations (experiment_name);
create index IF NOT EXISTS idx_recommendation_cluster_name on kruize_recommendations (cluster_name);
create index IF NOT EXISTS idx_recommendation_interval_end_time on kruize_recommendations (interval_end_time);
create index IF NOT EXISTS idx_result_experiment_name on kruize_results (experiment_name);
create index IF NOT EXISTS idx_result_cluster_name on kruize_results (cluster_name);
=======
create table IF NOT EXISTS kruize_recommendations (interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255), extended_data jsonb, version varchar(255), primary key (experiment_name, interval_end_time)) PARTITION BY RANGE (interval_end_time);
create table IF NOT EXISTS kruize_results (interval_start_time timestamp(6) not null, interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255) , duration_minutes float(53) not null, extended_data jsonb, meta_data jsonb, version varchar(255), primary key (experiment_name, interval_end_time, interval_start_time)) PARTITION BY RANGE (interval_end_time);
alter table if exists kruize_experiments add constraint UK_experiment_name unique (experiment_name);
create index IF NOT EXISTS idx_recommendation_experiment_name on kruize_recommendations (experiment_name);
create index IF NOT EXISTS idx_recommendation_interval_end_time on kruize_recommendations (interval_end_time);
create index IF NOT EXISTS idx_result_experiment_name on kruize_results (experiment_name);
>>>>>>> upstream/remote_monitoring
create index IF NOT EXISTS idx_result_interval_end_time on kruize_results (interval_end_time);
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
import static com.autotune.analyzer.recommendations.RecommendationConstants.RecommendationValueConstants.DEFAULT_MEMORY_THRESHOLD;
import static com.autotune.analyzer.utils.AnalyzerConstants.PercentileConstants.*;

public class PerformanceRecommendationEngine implements KruizeRecommendationEngine{
public class PerformanceRecommendationEngine implements KruizeRecommendationEngine {
private static final Logger LOGGER = LoggerFactory.getLogger(PerformanceRecommendationEngine.class);
private String name;
private String key;
Expand All @@ -57,38 +57,6 @@ public PerformanceRecommendationEngine(String name) {
this.name = name;
}

private static Timestamp getMonitoringStartTime(HashMap<Timestamp, IntervalResults> resultsHashMap,
DurationBasedRecommendationSubCategory durationBasedRecommendationSubCategory,
Timestamp endTime) {

// Convert the HashMap to a TreeMap to maintain sorted order based on IntervalEndTime
TreeMap<Timestamp, IntervalResults> sortedResultsHashMap = new TreeMap<>(Collections.reverseOrder());
sortedResultsHashMap.putAll(resultsHashMap);

double sum = 0.0;
Timestamp intervalEndTime = null;
Timestamp thresholdTime = calculateThresholdTimeBasedOnTerm(durationBasedRecommendationSubCategory, endTime);
for (Timestamp timestamp : sortedResultsHashMap.keySet()) {
if (!timestamp.after(endTime)) {
if (timestamp.before(thresholdTime)) {
// Breaking condition not met so we can be sure that data is not sufficient hence return null
return null;
}
sum = sum + sortedResultsHashMap.get(timestamp).getDurationInMinutes();
if (sum >= durationBasedRecommendationSubCategory.getGetDurationLowerBound()) {
// Storing the timestamp value in startTimestamp variable to return
intervalEndTime = timestamp;
break;
}

}
}
try {
return sortedResultsHashMap.get(intervalEndTime).getIntervalStartTime();
} catch (NullPointerException npe) {
return null;
}
}

/**
* Calculate the number of pods being used as per the latest results
Expand Down Expand Up @@ -522,7 +490,7 @@ private boolean populateRecommendation(String recommendationTerm,
RecommendationNotification recommendationNotification = new RecommendationNotification(RecommendationConstants.RecommendationNotification.ERROR_NUM_PODS_CANNOT_BE_ZERO);
notifications.add(recommendationNotification);
LOGGER.debug("Number of pods cannot be zero");
isSuccess = false;
isSuccess = false;
} else if (numPods < 0) {
RecommendationNotification recommendationNotification = new RecommendationNotification(RecommendationConstants.RecommendationNotification.ERROR_NUM_PODS_CANNOT_BE_NEGATIVE);
notifications.add(recommendationNotification);
Expand Down Expand Up @@ -1063,7 +1031,7 @@ public MappedRecommendationForEngine generateRecommendation(Timestamp monitoring
cpuThreshold,
memoryThreshold
);
} else {
} else {
RecommendationNotification notification = new RecommendationNotification(
RecommendationConstants.RecommendationNotification.INFO_NOT_ENOUGH_DATA);

Expand All @@ -1075,70 +1043,4 @@ public MappedRecommendationForEngine generateRecommendation(Timestamp monitoring
public void validateRecommendations() {

}

@Override
public boolean checkIfMinDataAvailable(ContainerData containerData) {
// Check if data available
if (null == containerData || null == containerData.getResults() || containerData.getResults().isEmpty()) {
return false;
}
// Initiate to the first sub category available
DurationBasedRecommendationSubCategory categoryToConsider = (DurationBasedRecommendationSubCategory) this.category.getRecommendationSubCategories()[0];
// Loop over categories to set the least category
for (RecommendationSubCategory recommendationSubCategory : this.category.getRecommendationSubCategories()) {
DurationBasedRecommendationSubCategory durationBasedRecommendationSubCategory = (DurationBasedRecommendationSubCategory) recommendationSubCategory;
if (durationBasedRecommendationSubCategory.getDuration() < categoryToConsider.getDuration()) {
categoryToConsider = durationBasedRecommendationSubCategory;
}
}
// Set bounds to check if we get minimum requirement satisfied
double lowerBound = categoryToConsider.getGetDurationLowerBound();
double sum = 0.0;
// Loop over the data to check if there is min data available
for (IntervalResults intervalResults : containerData.getResults().values()) {
sum = sum + intervalResults.getDurationInMinutes();
// We don't consider upper bound to check if sum is in-between as we may over shoot and end-up resulting false
if (sum >= lowerBound)
return true;
}
return false;
}

private static Timestamp calculateThresholdTimeBasedOnTerm(DurationBasedRecommendationSubCategory durationBasedRecommendationSubCategory, Timestamp endTime) {
// Check for null
if (null == durationBasedRecommendationSubCategory || null == endTime)
return null;
// Initialise threshold time
Timestamp thresholdTime = null;

// Extract the duration as count
int count = durationBasedRecommendationSubCategory.getDuration();
// Extract units
TimeUnit units = durationBasedRecommendationSubCategory.getRecommendationDurationUnits();

// Assuming units is hours by default
int totalDurationInHrs = count;

// Checking if it's days
if (units == TimeUnit.DAYS) {
totalDurationInHrs = count * KruizeConstants.TimeConv.NO_OF_HOURS_PER_DAY;
}
// TODO: Add checks for other timeunits like minutes, weeks & months if needed later

// Add Threshold based on term
if (durationBasedRecommendationSubCategory.getSubCategory().equalsIgnoreCase(KruizeConstants.JSONKeys.SHORT_TERM))
totalDurationInHrs = totalDurationInHrs + THRESHOLD_HRS_SHORT_TERM;
else if (durationBasedRecommendationSubCategory.getSubCategory().equalsIgnoreCase(KruizeConstants.JSONKeys.MEDIUM_TERM))
totalDurationInHrs = totalDurationInHrs + THRESHOLD_HRS_MEDIUM_TERM;
else if (durationBasedRecommendationSubCategory.getSubCategory().equalsIgnoreCase(KruizeConstants.JSONKeys.LONG_TERM))
totalDurationInHrs = totalDurationInHrs + THRESHOLD_HRS_LONG_TERM;

// Remove the number of hours from end time
long endTimeMillis = endTime.getTime();
long startTimeMillis = endTimeMillis - TimeUnit.HOURS.toMillis(totalDurationInHrs);

thresholdTime = new Timestamp(startTimeMillis);

return thresholdTime;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -162,15 +162,5 @@ public static RecommendationNotification getNotificationForTermAvailability(Reco
}


public class RecommendationUtils {
public static int getThreshold(int value, int failoverPercentage, boolean direction) {
if (direction) {
return Math.round(value + value * (failoverPercentage / 100.0f));
} else {
return Math.round(value - value * (failoverPercentage / 100.0f));
}
}
}

}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -56,14 +56,10 @@ public class UpdateResults extends HttpServlet {
private static final long serialVersionUID = 1L;
private static final Logger LOGGER = LoggerFactory.getLogger(UpdateResults.class);
public static ConcurrentHashMap<String, PerformanceProfile> performanceProfilesMap = new ConcurrentHashMap<>();
public static ConcurrentHashMap<String, KruizeObject> mainKruizeExperimentMAP;

@Override
public void init(ServletConfig config) throws ServletException {
super.init(config);
mainKruizeExperimentMAP = (ConcurrentHashMap<String, KruizeObject>) config.getServletContext().getAttribute(AnalyzerConstants.EXPERIMENT_MAP);
if (mainKruizeExperimentMAP == null)
mainKruizeExperimentMAP = new ConcurrentHashMap<>();
}

@Override
Expand All @@ -83,8 +79,6 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)
ExperimentInitiator experimentInitiator = new ExperimentInitiator();
experimentInitiator.validateAndAddExperimentResults(updateResultsAPIObjects);
List<UpdateResultsAPIObject> failureAPIObjs = experimentInitiator.getFailedUpdateResultsAPIObjects();
if (initialSize != mainKruizeExperimentMAP.size())
request.getServletContext().setAttribute(AnalyzerConstants.EXPERIMENT_MAP, mainKruizeExperimentMAP);
List<FailedUpdateResultsAPIObject> jsonObjectList = new ArrayList<>();
if (failureAPIObjs.size() > 0) {
failureAPIObjs.forEach(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,3 +24,4 @@
@Target(ElementType.FIELD)
public @interface Exclude {
}

0 comments on commit 0dbb0c0

Please sign in to comment.