From 79c2196b8de242f33385eef680eb261ab31c49af Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Fri, 13 Dec 2024 22:59:47 +0530 Subject: [PATCH 1/5] list Recommendations to support both rm and lm Signed-off-by: msvinaykumar --- migrations/kruize_local_ddl.sql | 2 +- .../engine/RecommendationEngine.java | 8 +- .../services/GenerateRecommendations.java | 2 +- .../services/ListRecommendations.java | 21 +- .../analyzer/utils/AnalyzerConstants.java | 1 + .../analyzer/workerimpl/BulkJobManager.java | 3 +- .../autotune/database/dao/ExperimentDAO.java | 11 + .../database/dao/ExperimentDAOImpl.java | 189 +++++++++--- .../autotune/database/helper/DBConstants.java | 13 +- .../autotune/database/helper/DBHelpers.java | 282 +++++++++++++----- .../database/init/KruizeHibernateUtil.java | 2 + .../database/service/ExperimentDBService.java | 97 +++++- .../table/KruizeRecommendationEntry.java | 13 + .../table/lm/KruizeLMRecommendationEntry.java | 82 +++++ 14 files changed, 583 insertions(+), 143 deletions(-) create mode 100644 src/main/java/com/autotune/database/table/lm/KruizeLMRecommendationEntry.java diff --git a/migrations/kruize_local_ddl.sql b/migrations/kruize_local_ddl.sql index fc5474b26..645dfbab1 100644 --- a/migrations/kruize_local_ddl.sql +++ b/migrations/kruize_local_ddl.sql @@ -5,4 +5,4 @@ create table IF NOT EXISTS kruize_dsmetadata (id serial, version varchar(255), d alter table kruize_lm_experiments add column metadata_id bigint references kruize_dsmetadata(id); alter table if exists kruize_lm_experiments add constraint UK_lm_experiment_name unique (experiment_name); create table IF NOT EXISTS kruize_metric_profiles (api_version varchar(255), kind varchar(255), metadata jsonb, name varchar(255) not null, k8s_type varchar(255), profile_version float(53) not null, slo jsonb, primary key (name)); -alter table kruize_recommendations add column experiment_type varchar(255); +create table IF NOT EXISTS kruize_lm_recommendations (interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255), extended_data jsonb, version varchar(255),experiment_type varchar(255), primary key (experiment_name, interval_end_time)) PARTITION BY RANGE (interval_end_time); diff --git a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java index cf1bb91b2..8f4331a1d 100644 --- a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java +++ b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java @@ -197,13 +197,13 @@ private KruizeObject createKruizeObject(String target_cluster) { KruizeObject kruizeObject = new KruizeObject(); try { - if (KruizeDeploymentInfo.is_ros_enabled){ - if(null == target_cluster || target_cluster.equalsIgnoreCase(AnalyzerConstants.REMOTE)){ + if (KruizeDeploymentInfo.is_ros_enabled) { + if (null == target_cluster || target_cluster.equalsIgnoreCase(AnalyzerConstants.REMOTE)) { new ExperimentDBService().loadExperimentFromDBByName(mainKruizeExperimentMAP, experimentName); - }else{ + } else { new ExperimentDBService().loadLMExperimentFromDBByName(mainKruizeExperimentMAP, experimentName); } - }else{ + } else { new ExperimentDBService().loadLMExperimentFromDBByName(mainKruizeExperimentMAP, experimentName); } diff --git a/src/main/java/com/autotune/analyzer/services/GenerateRecommendations.java b/src/main/java/com/autotune/analyzer/services/GenerateRecommendations.java index 28b681af6..de9c8a3c2 100644 --- a/src/main/java/com/autotune/analyzer/services/GenerateRecommendations.java +++ b/src/main/java/com/autotune/analyzer/services/GenerateRecommendations.java @@ -102,7 +102,7 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) // validate and create KruizeObject if successful String validationMessage = recommendationEngine.validate_local(); if (validationMessage.isEmpty()) { - KruizeObject kruizeObject = recommendationEngine.prepareRecommendations(calCount, null); + KruizeObject kruizeObject = recommendationEngine.prepareRecommendations(calCount, AnalyzerConstants.LOCAL); // todo target cluster is set to LOCAL always if (kruizeObject.getValidation_data().isSuccess()) { LOGGER.debug("UpdateRecommendations API request count: {} success", calCount); interval_end_time = Utils.DateUtils.getTimeStampFrom(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT, diff --git a/src/main/java/com/autotune/analyzer/services/ListRecommendations.java b/src/main/java/com/autotune/analyzer/services/ListRecommendations.java index ee533905f..73cfb272f 100644 --- a/src/main/java/com/autotune/analyzer/services/ListRecommendations.java +++ b/src/main/java/com/autotune/analyzer/services/ListRecommendations.java @@ -84,19 +84,26 @@ protected void doGet(HttpServletRequest request, HttpServletResponse response) t String experimentName = request.getParameter(AnalyzerConstants.ServiceConstants.EXPERIMENT_NAME); String latestRecommendation = request.getParameter(AnalyzerConstants.ServiceConstants.LATEST); String monitoringEndTime = request.getParameter(KruizeConstants.JSONKeys.MONITORING_END_TIME); + String rm = request.getParameter(AnalyzerConstants.ServiceConstants.RM); Timestamp monitoringEndTimestamp = null; Map mKruizeExperimentMap = new ConcurrentHashMap(); - ; boolean getLatest = true; boolean checkForTimestamp = false; boolean error = false; + boolean rmTable = false; if (null != latestRecommendation && !latestRecommendation.isEmpty() && latestRecommendation.equalsIgnoreCase(AnalyzerConstants.BooleanString.FALSE) ) { getLatest = false; } + if (null != rm + && !rm.isEmpty() + && rm.equalsIgnoreCase(AnalyzerConstants.BooleanString.TRUE) + ) { + rmTable = true; + } List kruizeObjectList = new ArrayList<>(); try { // Check if experiment name is passed @@ -104,7 +111,11 @@ protected void doGet(HttpServletRequest request, HttpServletResponse response) t // trim the experiment name to remove whitespaces experimentName = experimentName.trim(); try { - new ExperimentDBService().loadExperimentAndRecommendationsFromDBByName(mKruizeExperimentMap, experimentName); + if (rmTable) { + new ExperimentDBService().loadExperimentAndRecommendationsFromDBByName(mKruizeExperimentMap, experimentName); + } else { + new ExperimentDBService().loadLMExperimentAndRecommendationsFromDBByName(mKruizeExperimentMap, experimentName); + } } catch (Exception e) { LOGGER.error("Loading saved experiment {} failed: {} ", experimentName, e.getMessage()); } @@ -151,7 +162,11 @@ protected void doGet(HttpServletRequest request, HttpServletResponse response) t } } else { try { - new ExperimentDBService().loadAllExperimentsAndRecommendations(mKruizeExperimentMap); + if (rmTable) { + new ExperimentDBService().loadAllExperimentsAndRecommendations(mKruizeExperimentMap); + } else { + new ExperimentDBService().loadAllLMExperimentsAndRecommendations(mKruizeExperimentMap); + } } catch (Exception e) { LOGGER.error("Loading saved experiment {} failed: {} ", experimentName, e.getMessage()); } diff --git a/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java b/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java index ac40d8791..db256caaa 100644 --- a/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java +++ b/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java @@ -438,6 +438,7 @@ public static final class ServiceConstants { public static final String CLUSTER_NAME = "cluster_name"; public static final String VERBOSE = "verbose"; public static final String FALSE = "false"; + public static final String RM = "rm"; private ServiceConstants() { } diff --git a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java index d032e2b50..893180218 100644 --- a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java +++ b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java @@ -57,6 +57,7 @@ import static com.autotune.utils.KruizeConstants.KRUIZE_BULK_API.*; import static com.autotune.utils.KruizeConstants.KRUIZE_BULK_API.NotificationConstants.*; + /** * The `run` method processes bulk input to create experiments and generates resource optimization recommendations. * It handles the creation of experiment names based on various data source components, makes HTTP POST requests @@ -121,7 +122,7 @@ private static Map parseLabelString(String labelString) { public void run() { String statusValue = "failure"; MetricsConfig.activeJobs.incrementAndGet(); - Timer.Sample timerRunJob = Timer.start(MetricsConfig.meterRegistry()); + io.micrometer.core.instrument.Timer.Sample timerRunJob = Timer.start(MetricsConfig.meterRegistry()); DataSourceMetadataInfo metadataInfo = null; DataSourceManager dataSourceManager = new DataSourceManager(); DataSourceInfo datasource = null; diff --git a/src/main/java/com/autotune/database/dao/ExperimentDAO.java b/src/main/java/com/autotune/database/dao/ExperimentDAO.java index 07770bc64..a918c71de 100644 --- a/src/main/java/com/autotune/database/dao/ExperimentDAO.java +++ b/src/main/java/com/autotune/database/dao/ExperimentDAO.java @@ -6,6 +6,7 @@ import com.autotune.common.data.ValidationOutputData; import com.autotune.database.table.*; import com.autotune.database.table.lm.KruizeLMExperimentEntry; +import com.autotune.database.table.lm.KruizeLMRecommendationEntry; import java.sql.Timestamp; import java.util.List; @@ -25,6 +26,10 @@ public interface ExperimentDAO { // Add recommendation to DB public ValidationOutputData addRecommendationToDB(KruizeRecommendationEntry recommendationEntry); + // Add recommendation to DB + public ValidationOutputData addRecommendationToDB(KruizeLMRecommendationEntry recommendationEntry); + + // Add Performance Profile to DB public ValidationOutputData addPerformanceProfileToDB(KruizePerformanceProfileEntry kruizePerformanceProfileEntry); @@ -52,6 +57,8 @@ public interface ExperimentDAO { // If Kruize restarts load all recommendations List loadAllRecommendations() throws Exception; + List loadAllLMRecommendations() throws Exception; + // If Kruize restarts load all performance profiles List loadAllPerformanceProfiles() throws Exception; @@ -75,6 +82,8 @@ public interface ExperimentDAO { // Load all recommendations of a particular experiment List loadRecommendationsByExperimentName(String experimentName) throws Exception; + // Load all recommendations of a particular experiment + List loadLMRecommendationsByExperimentName(String experimentName) throws Exception; // Load a single Performance Profile based on name List loadPerformanceProfileByName(String performanceProfileName) throws Exception; @@ -88,6 +97,8 @@ public interface ExperimentDAO { // Load all recommendations of a particular experiment and interval end Time KruizeRecommendationEntry loadRecommendationsByExperimentNameAndDate(String experimentName, String cluster_name, Timestamp interval_end_time) throws Exception; + KruizeLMRecommendationEntry loadLMRecommendationsByExperimentNameAndDate(String experimentName, String cluster_name, Timestamp interval_end_time) throws Exception; + // Get KruizeResult Record List getKruizeResultsEntry(String experiment_name, String cluster_name, Timestamp interval_start_time, Timestamp interval_end_time) throws Exception; diff --git a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java index 35e0269f2..597c51f60 100644 --- a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java +++ b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java @@ -25,6 +25,7 @@ import com.autotune.database.init.KruizeHibernateUtil; import com.autotune.database.table.*; import com.autotune.database.table.lm.KruizeLMExperimentEntry; +import com.autotune.database.table.lm.KruizeLMRecommendationEntry; import com.autotune.utils.KruizeConstants; import com.autotune.utils.MetricsConfig; import io.micrometer.core.instrument.Timer; @@ -384,9 +385,48 @@ public ValidationOutputData addRecommendationToDB(KruizeRecommendationEntry reco tx = session.beginTransaction(); session.persist(recommendationEntry); tx.commit(); - if (null == recommendationEntry.getExperimentType() || recommendationEntry.getExperimentType().isEmpty()) { - updateExperimentTypeInKruizeRecommendationEntry(recommendationEntry); - } + validationOutputData.setSuccess(true); + statusValue = "success"; + } else { + tx = session.beginTransaction(); + existingRecommendationEntry.setExtended_data(recommendationEntry.getExtended_data()); + session.merge(existingRecommendationEntry); + tx.commit(); + validationOutputData.setSuccess(true); + statusValue = "success"; + } + } catch (Exception e) { + LOGGER.error("Not able to save recommendation due to {}", e.getMessage()); + if (tx != null) tx.rollback(); + e.printStackTrace(); + validationOutputData.setSuccess(false); + validationOutputData.setMessage(e.getMessage()); + //todo save error to API_ERROR_LOG + } + } catch (Exception e) { + LOGGER.error("Not able to save recommendation due to {}", e.getMessage()); + } finally { + if (null != timerAddRecDB) { + MetricsConfig.timerAddRecDB = MetricsConfig.timerBAddRecDB.tag("status", statusValue).register(MetricsConfig.meterRegistry()); + timerAddRecDB.stop(MetricsConfig.timerAddRecDB); + } + } + return validationOutputData; + } + + @Override + public ValidationOutputData addRecommendationToDB(KruizeLMRecommendationEntry recommendationEntry) { + ValidationOutputData validationOutputData = new ValidationOutputData(false, null, null); + Transaction tx = null; + String statusValue = "failure"; + Timer.Sample timerAddRecDB = Timer.start(MetricsConfig.meterRegistry()); + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + try { + KruizeLMRecommendationEntry existingRecommendationEntry = loadLMRecommendationsByExperimentNameAndDate(recommendationEntry.getExperiment_name(), recommendationEntry.getCluster_name(), recommendationEntry.getInterval_end_time()); + if (null == existingRecommendationEntry) { + tx = session.beginTransaction(); + session.persist(recommendationEntry); + tx.commit(); validationOutputData.setSuccess(true); statusValue = "success"; } else { @@ -717,6 +757,27 @@ public List loadAllExperiments() throws Exception { Timer.Sample timerLoadAllExp = Timer.start(MetricsConfig.meterRegistry()); try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { entries = session.createQuery(DBConstants.SQLQUERY.SELECT_FROM_EXPERIMENTS, KruizeExperimentEntry.class).list(); + statusValue = "success"; + } catch (Exception e) { + LOGGER.error("Not able to load experiment due to {}", e.getMessage()); + throw new Exception("Error while loading exsisting experiments from database due to : " + e.getMessage()); + } finally { + if (null != timerLoadAllExp) { + MetricsConfig.timerLoadAllExp = MetricsConfig.timerBLoadAllExp.tag("status", statusValue).register(MetricsConfig.meterRegistry()); + timerLoadAllExp.stop(MetricsConfig.timerLoadAllExp); + } + } + return entries; + } + + @Override + public List loadAllLMExperiments() throws Exception { + //todo load only experimentStatus=inprogress , playback may not require completed experiments + List entries = null; + String statusValue = "failure"; + Timer.Sample timerLoadAllExp = Timer.start(MetricsConfig.meterRegistry()); + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + entries = session.createQuery(DBConstants.SQLQUERY.SELECT_FROM_LM_EXPERIMENTS, KruizeLMExperimentEntry.class).list(); // TODO: remove native sql query and transient //getExperimentTypeInKruizeExperimentEntry(entries); statusValue = "success"; @@ -799,6 +860,28 @@ public List loadAllRecommendations() throws Exception return recommendationEntries; } + @Override + public List loadAllLMRecommendations() throws Exception { + List recommendationEntries = null; + String statusValue = "failure"; + Timer.Sample timerLoadAllRec = Timer.start(MetricsConfig.meterRegistry()); + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + recommendationEntries = session.createQuery( + DBConstants.SQLQUERY.SELECT_FROM_LM_RECOMMENDATIONS, + KruizeLMRecommendationEntry.class).list(); + statusValue = "success"; + } catch (Exception e) { + LOGGER.error("Not able to load recommendations due to {}", e.getMessage()); + throw new Exception("Error while loading existing recommendations from database due to : " + e.getMessage()); + } finally { + if (null != timerLoadAllRec) { + MetricsConfig.timerLoadAllRec = MetricsConfig.timerBLoadAllRec.tag("status", statusValue).register(MetricsConfig.meterRegistry()); + timerLoadAllRec.stop(MetricsConfig.timerLoadAllRec); + } + } + return recommendationEntries; + } + @Override public List loadAllPerformanceProfiles() throws Exception { String statusValue = "failure"; @@ -973,7 +1056,27 @@ public List loadRecommendationsByExperimentName(Strin try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { recommendationEntries = session.createQuery(DBConstants.SQLQUERY.SELECT_FROM_RECOMMENDATIONS_BY_EXP_NAME, KruizeRecommendationEntry.class) .setParameter("experimentName", experimentName).list(); - getExperimentTypeInKruizeRecommendationsEntry(recommendationEntries); + statusValue = "success"; + } catch (Exception e) { + LOGGER.error("Not able to load recommendations due to {}", e.getMessage()); + throw new Exception("Error while loading existing recommendations from database due to : " + e.getMessage()); + } finally { + if (null != timerLoadRecExpName) { + MetricsConfig.timerLoadRecExpName = MetricsConfig.timerBLoadRecExpName.tag("status", statusValue).register(MetricsConfig.meterRegistry()); + timerLoadRecExpName.stop(MetricsConfig.timerLoadRecExpName); + } + } + return recommendationEntries; + } + + @Override + public List loadLMRecommendationsByExperimentName(String experimentName) throws Exception { + List recommendationEntries = null; + String statusValue = "failure"; + Timer.Sample timerLoadRecExpName = Timer.start(MetricsConfig.meterRegistry()); + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + recommendationEntries = session.createQuery(DBConstants.SQLQUERY.SELECT_FROM_LM_RECOMMENDATIONS_BY_EXP_NAME, KruizeLMRecommendationEntry.class) + .setParameter("experimentName", experimentName).list(); statusValue = "success"; } catch (Exception e) { LOGGER.error("Not able to load recommendations due to {}", e.getMessage()); @@ -1005,7 +1108,40 @@ public KruizeRecommendationEntry loadRecommendationsByExperimentNameAndDate(Stri if (cluster_name != null) kruizeRecommendationEntryQuery.setParameter(CLUSTER_NAME, cluster_name); recommendationEntries = kruizeRecommendationEntryQuery.getSingleResult(); - getExperimentTypeInSingleKruizeRecommendationsEntry(recommendationEntries); + statusValue = "success"; + } catch (NoResultException e) { + LOGGER.debug("Generating new recommendation for Experiment name : %s interval_end_time: %S", experimentName, interval_end_time); + } catch (Exception e) { + LOGGER.error("Not able to load recommendations due to {}", e.getMessage()); + recommendationEntries = null; + throw new Exception("Error while loading existing recommendations from database due to : " + e.getMessage()); + } finally { + if (null != timerLoadRecExpNameDate) { + MetricsConfig.timerLoadRecExpNameDate = MetricsConfig.timerBLoadRecExpNameDate.tag("status", statusValue).register(MetricsConfig.meterRegistry()); + timerLoadRecExpNameDate.stop(MetricsConfig.timerLoadRecExpNameDate); + } + } + return recommendationEntries; + } + + @Override + public KruizeLMRecommendationEntry loadLMRecommendationsByExperimentNameAndDate(String experimentName, String cluster_name, Timestamp interval_end_time) throws Exception { + KruizeLMRecommendationEntry recommendationEntries = null; + String statusValue = "failure"; + String clusterCondtionSql = ""; + if (cluster_name != null) + clusterCondtionSql = String.format(" and k.%s = :%s ", KruizeConstants.JSONKeys.CLUSTER_NAME, KruizeConstants.JSONKeys.CLUSTER_NAME); + else + clusterCondtionSql = String.format(" and k.%s is null ", KruizeConstants.JSONKeys.CLUSTER_NAME); + + Timer.Sample timerLoadRecExpNameDate = Timer.start(MetricsConfig.meterRegistry()); + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + Query kruizeRecommendationEntryQuery = session.createQuery(SELECT_FROM_LM_RECOMMENDATIONS_BY_EXP_NAME_AND_END_TIME + clusterCondtionSql, KruizeLMRecommendationEntry.class) + .setParameter(KruizeConstants.JSONKeys.EXPERIMENT_NAME, experimentName) + .setParameter(KruizeConstants.JSONKeys.INTERVAL_END_TIME, interval_end_time); + if (cluster_name != null) + kruizeRecommendationEntryQuery.setParameter(CLUSTER_NAME, cluster_name); + recommendationEntries = kruizeRecommendationEntryQuery.getSingleResult(); statusValue = "success"; } catch (NoResultException e) { LOGGER.debug("Generating new recommendation for Experiment name : %s interval_end_time: %S", experimentName, interval_end_time); @@ -1266,54 +1402,13 @@ private void updateExperimentTypeInKruizeExperimentEntry(KruizeExperimentEntry k } }*/ - private void getExperimentTypeInKruizeRecommendationsEntry(List entries) throws Exception { - for (KruizeRecommendationEntry recomEntry : entries) { - getExperimentTypeInSingleKruizeRecommendationsEntry(recomEntry); - } - } private void getExperimentTypeInSingleKruizeRecommendationsEntry(KruizeRecommendationEntry recomEntry) throws Exception { List expEntries = loadExperimentByName(recomEntry.getExperiment_name()); - if (null != expEntries && !expEntries.isEmpty()) { - if (isTargetCluserLocal(expEntries.get(0).getTarget_cluster())) { - try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { - String sql = DBConstants.SQLQUERY.SELECT_RECOMMENDATIONS_EXP_TYPE; - Query query = session.createNativeQuery(sql); - // set experiment_type parameter in sql query - query.setParameter("experiment_name", recomEntry.getExperiment_name()); - List exType = query.getResultList(); - if (null != exType && !exType.isEmpty()) { - recomEntry.setExperimentType(exType.get(0)); - } - } catch (Exception e) { - LOGGER.error("Not able to get experiment type in recommendation entry due to {}", e.getMessage()); - throw new Exception("Error while updating experiment type to recommendation due to : " + e.getMessage()); - } - } - } - } - private void updateExperimentTypeInKruizeRecommendationEntry(KruizeRecommendationEntry recommendationEntry) throws Exception { - List entries = loadExperimentByName(recommendationEntry.getExperiment_name()); - if (null != entries && !entries.isEmpty()) { - if (isTargetCluserLocal(entries.get(0).getTarget_cluster())) { - try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { - Transaction tx = session.beginTransaction(); - String sql = DBConstants.SQLQUERY.UPDATE_RECOMMENDATIONS_EXP_TYPE; - Query query = session.createNativeQuery(sql); - query.setParameter("experiment_type", recommendationEntry.getExperimentType()); - query.setParameter("experiment_name", recommendationEntry.getExperiment_name()); - query.setParameter("interval_end_time", recommendationEntry.getInterval_end_time()); - query.executeUpdate(); - tx.commit(); - } catch (Exception e) { - LOGGER.error("Not able to update experiment type in recommendation entry due to {}", e.getMessage()); - throw new Exception("Error while updating experiment type to recommendation due to : " + e.getMessage()); - } - } - } } + private boolean isTargetCluserLocal(String targetCluster) { if (AnalyzerConstants.LOCAL.equalsIgnoreCase(targetCluster)) { return true; diff --git a/src/main/java/com/autotune/database/helper/DBConstants.java b/src/main/java/com/autotune/database/helper/DBConstants.java index 6a3bd6dc5..2e9b55f93 100644 --- a/src/main/java/com/autotune/database/helper/DBConstants.java +++ b/src/main/java/com/autotune/database/helper/DBConstants.java @@ -51,12 +51,19 @@ public static final class SQLQUERY { "k.interval_end_time = (SELECT MAX(e.interval_end_time) FROM KruizeResultsEntry e where e.experiment_name = :%s ) ", KruizeConstants.JSONKeys.EXPERIMENT_NAME, KruizeConstants.JSONKeys.EXPERIMENT_NAME); public static final String SELECT_FROM_RECOMMENDATIONS_BY_EXP_NAME = String.format("from KruizeRecommendationEntry k WHERE k.experiment_name = :experimentName"); + public static final String SELECT_FROM_LM_RECOMMENDATIONS_BY_EXP_NAME = String.format("from KruizeLMRecommendationEntry k WHERE k.experiment_name = :experimentName"); public static final String SELECT_FROM_RECOMMENDATIONS_BY_EXP_NAME_AND_END_TIME = String.format( "from KruizeRecommendationEntry k WHERE " + "k.experiment_name = :%s and " + "k.interval_end_time= :%s ", KruizeConstants.JSONKeys.EXPERIMENT_NAME, KruizeConstants.JSONKeys.INTERVAL_END_TIME); + public static final String SELECT_FROM_LM_RECOMMENDATIONS_BY_EXP_NAME_AND_END_TIME = String.format( + "from KruizeLMRecommendationEntry k WHERE " + + "k.experiment_name = :%s and " + + "k.interval_end_time= :%s ", + KruizeConstants.JSONKeys.EXPERIMENT_NAME, KruizeConstants.JSONKeys.INTERVAL_END_TIME); public static final String SELECT_FROM_RECOMMENDATIONS = "from KruizeRecommendationEntry"; + public static final String SELECT_FROM_LM_RECOMMENDATIONS = "from KruizeLMRecommendationEntry"; public static final String SELECT_FROM_PERFORMANCE_PROFILE = "from KruizePerformanceProfileEntry"; public static final String SELECT_FROM_PERFORMANCE_PROFILE_BY_NAME = "from KruizePerformanceProfileEntry k WHERE k.name = :name"; public static final String SELECT_FROM_METRIC_PROFILE = "from KruizeMetricProfileEntry"; @@ -78,17 +85,13 @@ public static final class SQLQUERY { " WHERE container->>'container_name' = :container_name" + " AND container->>'container_image_name' = :container_image_name" + " ))"; - public static final String UPDATE_EXPERIMENT_EXP_TYPE = "UPDATE kruize_experiments SET experiment_type = :experiment_type WHERE experiment_name = :experiment_name"; - public static final String UPDATE_RECOMMENDATIONS_EXP_TYPE = "UPDATE kruize_recommendations SET experiment_type = :experiment_type WHERE experiment_name = :experiment_name and interval_end_time = :interval_end_time"; - public static final String SELECT_EXPERIMENT_EXP_TYPE = "SELECT experiment_type from kruize_experiments WHERE experiment_id = :experiment_id"; - public static final String SELECT_RECOMMENDATIONS_EXP_TYPE = "SELECT experiment_type from kruize_recommendations WHERE experiment_name = :experiment_name"; - } public static final class TABLE_NAMES { public static final String KRUIZE_EXPERIMENTS = "kruize_experiments"; public static final String KRUIZE_RESULTS = "kruize_results"; public static final String KRUIZE_RECOMMENDATIONS = "kruize_recommendations"; + public static final String KRUIZE_LM_RECOMMENDATIONS = "kruize_lm_recommendations"; public static final String KRUIZE_PERFORMANCE_PROFILES = "kruize_performance_profiles"; } diff --git a/src/main/java/com/autotune/database/helper/DBHelpers.java b/src/main/java/com/autotune/database/helper/DBHelpers.java index e5cbc53cd..cc92b9d27 100644 --- a/src/main/java/com/autotune/database/helper/DBHelpers.java +++ b/src/main/java/com/autotune/database/helper/DBHelpers.java @@ -41,6 +41,7 @@ import com.autotune.common.k8sObjects.K8sObject; import com.autotune.database.table.*; import com.autotune.database.table.lm.KruizeLMExperimentEntry; +import com.autotune.database.table.lm.KruizeLMRecommendationEntry; import com.autotune.utils.KruizeConstants; import com.autotune.utils.Utils; import com.fasterxml.jackson.core.JsonProcessingException; @@ -209,8 +210,6 @@ public static void setRecommendationsToKruizeObject(List namespaceRecommendations = clonedNamespaceData.getNamespaceRecommendations().getData(); + if (null != monitoringEndTime && namespaceRecommendations.containsKey(monitoringEndTime)) { matchFound = true; NamespaceAPIObject namespaceAPIObject = null; @@ -559,6 +692,14 @@ public static ListRecommendationsAPIObject getListRecommendationAPIObjectForDB(K continue; HashMap recommendations = clonedContainerData.getContainerRecommendations().getData(); + if (null != monitoringEndTime && !recommendations.containsKey(monitoringEndTime)) { + try { + Timestamp endInterval = containerData.getContainerRecommendations().getData().keySet().stream().max(Timestamp::compareTo).get(); + monitoringEndTime = endInterval; + } catch (Exception e) { + LOGGER.error("Error while converting ContainerData to Timestamp due to and not able to save date into recommendation table: " + e.getMessage()); + } + } if (null != monitoringEndTime && recommendations.containsKey(monitoringEndTime)) { matchFound = true; ContainerAPIObject containerAPIObject = null; @@ -594,71 +735,6 @@ public static ListRecommendationsAPIObject getListRecommendationAPIObjectForDB(K return listRecommendationsAPIObject; } - public static KruizeRecommendationEntry convertKruizeObjectTORecommendation(KruizeObject kruizeObject, Timestamp monitoringEndTime) { - KruizeRecommendationEntry kruizeRecommendationEntry = null; - Boolean checkForTimestamp = false; - Boolean getLatest = true; - Gson gson = new GsonBuilder() - .disableHtmlEscaping() - .setPrettyPrinting() - .enableComplexMapKeySerialization() - .setDateFormat(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT) - .registerTypeAdapter(Date.class, new GsonUTCDateAdapter()) - .registerTypeAdapter(AnalyzerConstants.RecommendationItem.class, new RecommendationItemAdapter()) - .registerTypeAdapter(DeviceDetails.class, new DeviceDetailsAdapter()) - .create(); - try { - ListRecommendationsAPIObject listRecommendationsAPIObject = getListRecommendationAPIObjectForDB( - kruizeObject, monitoringEndTime); - if (null == listRecommendationsAPIObject) { - return null; - } - LOGGER.debug(new GsonBuilder() - .setPrettyPrinting() - .registerTypeAdapter(AnalyzerConstants.RecommendationItem.class, new RecommendationItemAdapter()) - .registerTypeAdapter(DeviceDetails.class, new DeviceDetailsAdapter()) - .create() - .toJson(listRecommendationsAPIObject)); - kruizeRecommendationEntry = new KruizeRecommendationEntry(); - kruizeRecommendationEntry.setVersion(KruizeConstants.KRUIZE_RECOMMENDATION_API_VERSION.LATEST.getVersionNumber()); - kruizeRecommendationEntry.setExperiment_name(listRecommendationsAPIObject.getExperimentName()); - kruizeRecommendationEntry.setCluster_name(listRecommendationsAPIObject.getClusterName()); - //kruizeRecommendationEntry.setExperimentType(listRecommendationsAPIObject.getExperimentType()); - - Timestamp endInterval = null; - // todo : what happens if two k8 objects or Containers with different timestamp - for (KubernetesAPIObject k8sObject : listRecommendationsAPIObject.getKubernetesObjects()) { - if (listRecommendationsAPIObject.isNamespaceExperiment()) { - endInterval = k8sObject.getNamespaceAPIObjects().getnamespaceRecommendations().getData().keySet().stream().max(Timestamp::compareTo).get(); - } else { - for (ContainerAPIObject containerAPIObject : k8sObject.getContainerAPIObjects()) { - endInterval = containerAPIObject.getContainerRecommendations().getData().keySet().stream().max(Timestamp::compareTo).get(); - break; - } - } - } - kruizeRecommendationEntry.setInterval_end_time(endInterval); - Map k8sObjectsMap = Map.of(KruizeConstants.JSONKeys.KUBERNETES_OBJECTS, listRecommendationsAPIObject.getKubernetesObjects()); - String k8sObjectString = gson.toJson(k8sObjectsMap); - ObjectMapper objectMapper = new ObjectMapper(); - DateFormat df = new SimpleDateFormat(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT); - objectMapper.setDateFormat(df); - try { - kruizeRecommendationEntry.setExtended_data( - objectMapper.readTree( - k8sObjectString - ) - ); - } catch (JsonProcessingException e) { - throw new Exception("Error while creating Extended data due to : " + e.getMessage()); - } - } catch (Exception e) { - kruizeRecommendationEntry = null; - LOGGER.error("Error while converting KruizeObject to KruizeRecommendationEntry due to {}", e.getMessage()); - e.printStackTrace(); - } - return kruizeRecommendationEntry; - } public static List convertLMExperimentEntryToCreateExperimentAPIObject(List entries) throws Exception { List createExperimentAPIObjects = new ArrayList<>(); @@ -671,6 +747,9 @@ public static List convertLMExperimentEntryToCreateEx CreateExperimentAPIObject apiObj = new Gson().fromJson(extended_data_rawJson, CreateExperimentAPIObject.class); apiObj.setExperiment_id(entry.getExperiment_id()); apiObj.setStatus(entry.getStatus()); + apiObj.setTargetCluster(entry.getTarget_cluster()); + apiObj.setMode(entry.getMode()); + apiObj.setExperimentType(entry.getExperiment_type()); createExperimentAPIObjects.add(apiObj); } catch (Exception e) { LOGGER.error("Error in converting to apiObj from db object due to : {}", e.getMessage()); @@ -708,6 +787,7 @@ public static List convertExperimentEntryToCreateExpe return createExperimentAPIObjects; } + public static List convertResultEntryToUpdateResultsAPIObject(List kruizeResultsEntries) { ObjectMapper mapper = new ObjectMapper(); DateFormat df = new SimpleDateFormat(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT); @@ -845,6 +925,74 @@ public static List convertRecommendationEntryToRec return listRecommendationsAPIObjectList; } + public static List convertLMRecommendationEntryToRecommendationAPIObject( + List kruizeRecommendationEntryList) throws InvalidConversionOfRecommendationEntryException { + if (null == kruizeRecommendationEntryList) + return null; + if (kruizeRecommendationEntryList.size() == 0) + return null; + Gson gson = new GsonBuilder() + .disableHtmlEscaping() + .setPrettyPrinting() + .enableComplexMapKeySerialization() + .setDateFormat(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT) + .registerTypeAdapter(Date.class, new GsonUTCDateAdapter()) + .registerTypeAdapter(AnalyzerConstants.RecommendationItem.class, new RecommendationItemAdapter()) + .registerTypeAdapter(DeviceDetails.class, new DeviceDetailsAdapter()) + .create(); + List listRecommendationsAPIObjectList = new ArrayList<>(); + for (KruizeLMRecommendationEntry kruizeRecommendationEntry : kruizeRecommendationEntryList) { + // Check if instance of KruizeRecommendationEntry is null + if (null == kruizeRecommendationEntry) { + // Throw an exception stating it cannot be null + throw new InvalidConversionOfRecommendationEntryException( + String.format( + AnalyzerErrorConstants.ConversionErrors.KruizeRecommendationError.NOT_NULL, + KruizeRecommendationEntry.class.getSimpleName() + ) + ); + } + // Create an Object Mapper to extract value from JSON Node + ObjectMapper objectMapper = new ObjectMapper(); + DateFormat df = new SimpleDateFormat(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT); + objectMapper.setDateFormat(df); + // Create a holder for recommendation object to save the result from object mapper + ListRecommendationsAPIObject listRecommendationsAPIObject = null; + JsonNode extendedData = kruizeRecommendationEntry.getExtended_data().get(KruizeConstants.JSONKeys.KUBERNETES_OBJECTS); + if (null == extendedData) + continue; + try { + // If successful, the object mapper returns the list recommendation API Object + List kubernetesAPIObjectList = new ArrayList<>(); + if (extendedData.isArray()) { + for (JsonNode node : extendedData) { + KubernetesAPIObject kubernetesAPIObject = gson.fromJson(objectMapper.writeValueAsString(node), KubernetesAPIObject.class); + if (null != kubernetesAPIObject) { + kubernetesAPIObjectList.add(kubernetesAPIObject); + } else { + LOGGER.debug("GSON failed to convert the DB Json object in convertRecommendationEntryToRecommendationAPIObject"); + } + } + } + if (null != kubernetesAPIObjectList) { + listRecommendationsAPIObject = new ListRecommendationsAPIObject(); + listRecommendationsAPIObject.setApiVersion(kruizeRecommendationEntry.getVersion()); + listRecommendationsAPIObject.setKubernetesObjects(kubernetesAPIObjectList); + listRecommendationsAPIObject.setExperimentName(kruizeRecommendationEntry.getExperiment_name()); + listRecommendationsAPIObject.setClusterName(kruizeRecommendationEntry.getCluster_name()); + } + } catch (JsonProcessingException e) { + e.printStackTrace(); + LOGGER.debug(e.getMessage()); + } + if (null != listRecommendationsAPIObject) + listRecommendationsAPIObjectList.add(listRecommendationsAPIObject); + } + if (listRecommendationsAPIObjectList.isEmpty()) + return null; + return listRecommendationsAPIObjectList; + } + public static KruizePerformanceProfileEntry convertPerfProfileObjToPerfProfileDBObj(PerformanceProfile performanceProfile) { KruizePerformanceProfileEntry kruizePerformanceProfileEntry = null; try { diff --git a/src/main/java/com/autotune/database/init/KruizeHibernateUtil.java b/src/main/java/com/autotune/database/init/KruizeHibernateUtil.java index 7d41041f1..841ac99a9 100644 --- a/src/main/java/com/autotune/database/init/KruizeHibernateUtil.java +++ b/src/main/java/com/autotune/database/init/KruizeHibernateUtil.java @@ -18,6 +18,7 @@ import com.autotune.database.table.*; import com.autotune.database.table.lm.KruizeLMExperimentEntry; +import com.autotune.database.table.lm.KruizeLMRecommendationEntry; import com.autotune.operator.KruizeDeploymentInfo; import org.hibernate.Session; import org.hibernate.SessionFactory; @@ -59,6 +60,7 @@ public static void buildSessionFactory() { configuration.addAnnotatedClass(KruizePerformanceProfileEntry.class); if (KruizeDeploymentInfo.local) { configuration.addAnnotatedClass(KruizeLMExperimentEntry.class); + configuration.addAnnotatedClass(KruizeLMRecommendationEntry.class); configuration.addAnnotatedClass(KruizeDataSourceEntry.class); configuration.addAnnotatedClass(KruizeDSMetadataEntry.class); configuration.addAnnotatedClass(KruizeMetricProfileEntry.class); diff --git a/src/main/java/com/autotune/database/service/ExperimentDBService.java b/src/main/java/com/autotune/database/service/ExperimentDBService.java index 029e10f7a..370cf632c 100644 --- a/src/main/java/com/autotune/database/service/ExperimentDBService.java +++ b/src/main/java/com/autotune/database/service/ExperimentDBService.java @@ -34,6 +34,7 @@ import com.autotune.database.helper.DBHelpers; import com.autotune.database.table.*; import com.autotune.database.table.lm.KruizeLMExperimentEntry; +import com.autotune.database.table.lm.KruizeLMRecommendationEntry; import com.autotune.operator.KruizeDeploymentInfo; import com.autotune.operator.KruizeOperator; import org.slf4j.Logger; @@ -42,6 +43,8 @@ import java.sql.Timestamp; import java.util.*; +import static com.autotune.operator.KruizeDeploymentInfo.is_ros_enabled; + public class ExperimentDBService { private static final long serialVersionUID = 1L; private static final Logger LOGGER = LoggerFactory.getLogger(ExperimentDBService.class); @@ -129,6 +132,26 @@ public void loadAllResults(Map mainKruizeExperimentMap) th } } + public void loadAllLMRecommendations(Map mainKruizeExperimentMap) throws Exception { + ExperimentInterface experimentInterface = new ExperimentInterfaceImpl(); + // Load Recommendations from DB and save to local + List recommendationEntries = experimentDAO.loadAllLMRecommendations(); + if (null != recommendationEntries && !recommendationEntries.isEmpty()) { + List recommendationsAPIObjects = null; + try { + recommendationsAPIObjects = DBHelpers.Converters.KruizeObjectConverters + .convertLMRecommendationEntryToRecommendationAPIObject(recommendationEntries); + } catch (InvalidConversionOfRecommendationEntryException e) { + e.printStackTrace(); + } + if (null != recommendationsAPIObjects && !recommendationsAPIObjects.isEmpty()) { + experimentInterface.addRecommendationsToLocalStorage(mainKruizeExperimentMap, + recommendationsAPIObjects, + true); + } + } + } + public void loadAllRecommendations(Map mainKruizeExperimentMap) throws Exception { ExperimentInterface experimentInterface = new ExperimentInterfaceImpl(); @@ -144,7 +167,6 @@ public void loadAllRecommendations(Map mainKruizeExperimen e.printStackTrace(); } if (null != recommendationsAPIObjects && !recommendationsAPIObjects.isEmpty()) { - experimentInterface.addRecommendationsToLocalStorage(mainKruizeExperimentMap, recommendationsAPIObjects, true); @@ -228,12 +250,32 @@ public void loadRecommendationsFromDBByName(Map mainKruize } } + public void loadLMRecommendationsFromDBByName(Map mainKruizeExperimentMap, String experimentName) throws Exception { + ExperimentInterface experimentInterface = new ExperimentInterfaceImpl(); + // Load Recommendations from DB and save to local + List recommendationEntries = experimentDAO.loadLMRecommendationsByExperimentName(experimentName); + if (null != recommendationEntries && !recommendationEntries.isEmpty()) { + List recommendationsAPIObjects + = null; + try { + recommendationsAPIObjects = DBHelpers.Converters.KruizeObjectConverters + .convertLMRecommendationEntryToRecommendationAPIObject(recommendationEntries); + } catch (InvalidConversionOfRecommendationEntryException e) { + e.printStackTrace(); + } + if (null != recommendationsAPIObjects && !recommendationsAPIObjects.isEmpty()) { + experimentInterface.addRecommendationsToLocalStorage(mainKruizeExperimentMap, + recommendationsAPIObjects, + true); + } + } + } + public ValidationOutputData addExperimentToDB(CreateExperimentAPIObject createExperimentAPIObject) { ValidationOutputData validationOutputData = new ValidationOutputData(false, null, null); try { KruizeLMExperimentEntry kruizeLMExperimentEntry = DBHelpers.Converters.KruizeObjectConverters.convertCreateAPIObjToExperimentDBObj(createExperimentAPIObject); - LOGGER.debug("is_ros_enabled:{} , targetCluster:{} ", KruizeDeploymentInfo.is_ros_enabled, createExperimentAPIObject.getTargetCluster()); - if (KruizeDeploymentInfo.is_ros_enabled && createExperimentAPIObject.getTargetCluster().equalsIgnoreCase(AnalyzerConstants.REMOTE)) { + if (is_ros_enabled && createExperimentAPIObject.getTargetCluster().equalsIgnoreCase(AnalyzerConstants.REMOTE)) { KruizeExperimentEntry oldKruizeExperimentEntry = new KruizeExperimentEntry(kruizeLMExperimentEntry); validationOutputData = this.experimentDAO.addExperimentToDB(oldKruizeExperimentEntry); } else { @@ -277,10 +319,22 @@ public ValidationOutputData addRecommendationToDB(Map expe LOGGER.error("Trying to locate Recommendation for non existent experiment: " + kruizeObject.getExperimentName()); return validationOutputData; // todo: need to set the correct message } - KruizeRecommendationEntry kr = DBHelpers.Converters.KruizeObjectConverters. - convertKruizeObjectTORecommendation(kruizeObject, interval_end_time); - if (null != kr) { - if (KruizeDeploymentInfo.local == true) { //todo this code will be removed + + if (KruizeDeploymentInfo.is_ros_enabled && kruizeObject.getTarget_cluster().equalsIgnoreCase(AnalyzerConstants.REMOTE)) { + KruizeRecommendationEntry kr = DBHelpers.Converters.KruizeObjectConverters. + convertKruizeObjectTORecommendation(kruizeObject, interval_end_time); + if (null != kr) { + ValidationOutputData tempValObj = new ExperimentDAOImpl().addRecommendationToDB(kr); + if (!tempValObj.isSuccess()) { + validationOutputData.setSuccess(false); + String errMsg = String.format("Experiment name : %s , Interval end time : %s | ", kruizeObject.getExperimentName(), interval_end_time); + validationOutputData.setMessage(validationOutputData.getMessage() + errMsg); + } + } + } else { + KruizeLMRecommendationEntry kr = DBHelpers.Converters.KruizeObjectConverters. + convertKruizeObjectTOLMRecommendation(kruizeObject, interval_end_time); + if (null != kr) { // Create a Calendar object and set the time with the timestamp Calendar localDateTime = Calendar.getInstance(TimeZone.getTimeZone("UTC")); localDateTime.setTime(kr.getInterval_end_time()); @@ -288,19 +342,20 @@ public ValidationOutputData addRecommendationToDB(Map expe int dayOfTheMonth = localDateTime.get(Calendar.DAY_OF_MONTH); try { synchronized (new Object()) { - dao.addPartitions(DBConstants.TABLE_NAMES.KRUIZE_RECOMMENDATIONS, String.format("%02d", localDateTime.get(Calendar.MONTH) + 1), String.valueOf(localDateTime.get(Calendar.YEAR)), dayOfTheMonth, DBConstants.PARTITION_TYPES.BY_DAY); + dao.addPartitions(DBConstants.TABLE_NAMES.KRUIZE_LM_RECOMMENDATIONS, String.format("%02d", localDateTime.get(Calendar.MONTH) + 1), String.valueOf(localDateTime.get(Calendar.YEAR)), dayOfTheMonth, DBConstants.PARTITION_TYPES.BY_DAY); } } catch (Exception e) { LOGGER.warn(e.getMessage()); } - } - ValidationOutputData tempValObj = new ExperimentDAOImpl().addRecommendationToDB(kr); - if (!tempValObj.isSuccess()) { - validationOutputData.setSuccess(false); - String errMsg = String.format("Experiment name : %s , Interval end time : %s | ", kruizeObject.getExperimentName(), interval_end_time); - validationOutputData.setMessage(validationOutputData.getMessage() + errMsg); + ValidationOutputData tempValObj = new ExperimentDAOImpl().addRecommendationToDB(kr); + if (!tempValObj.isSuccess()) { + validationOutputData.setSuccess(false); + String errMsg = String.format("Experiment name : %s , Interval end time : %s | ", kruizeObject.getExperimentName(), interval_end_time); + validationOutputData.setMessage(validationOutputData.getMessage() + errMsg); + } } } + if (validationOutputData.getMessage().equals("")) validationOutputData.setSuccess(true); return validationOutputData; @@ -435,6 +490,13 @@ public void loadExperimentAndRecommendationsFromDBByName(Map mainKruizeExperimentMap, String experimentName) throws Exception { + + loadLMExperimentFromDBByName(mainKruizeExperimentMap, experimentName); + + loadLMRecommendationsFromDBByName(mainKruizeExperimentMap, experimentName); + } + public void loadPerformanceProfileFromDBByName(Map performanceProfileMap, String performanceProfileName) throws Exception { List entries = experimentDAO.loadPerformanceProfileByName(performanceProfileName); if (null != entries && !entries.isEmpty()) { @@ -479,6 +541,13 @@ public void loadAllExperimentsAndRecommendations(Map mainK loadAllRecommendations(mainKruizeExperimentMap); } + public void loadAllLMExperimentsAndRecommendations(Map mainKruizeExperimentMap) throws Exception { + + loadAllLMExperiments(mainKruizeExperimentMap); + + loadAllLMRecommendations(mainKruizeExperimentMap); + } + public boolean updateExperimentStatus(KruizeObject kruizeObject, AnalyzerConstants.ExperimentStatus status) { kruizeObject.setStatus(status); // TODO update into database diff --git a/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java b/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java index d3d490f0c..355590361 100644 --- a/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java +++ b/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java @@ -1,5 +1,6 @@ package com.autotune.database.table; +import com.autotune.database.table.lm.KruizeLMRecommendationEntry; import com.fasterxml.jackson.databind.JsonNode; import jakarta.persistence.*; import org.hibernate.annotations.JdbcTypeCode; @@ -30,6 +31,18 @@ public class KruizeRecommendationEntry { @Transient private String experiment_type; + public KruizeRecommendationEntry(KruizeLMRecommendationEntry recommendationEntry) { + this.experiment_name = recommendationEntry.getExperiment_name(); + this.interval_end_time = recommendationEntry.getInterval_end_time(); + this.cluster_name = recommendationEntry.getCluster_name(); + this.extended_data = recommendationEntry.getExtended_data(); + this.version = recommendationEntry.getVersion(); + } + + public KruizeRecommendationEntry() { + + } + public String getExperiment_name() { return experiment_name; } diff --git a/src/main/java/com/autotune/database/table/lm/KruizeLMRecommendationEntry.java b/src/main/java/com/autotune/database/table/lm/KruizeLMRecommendationEntry.java new file mode 100644 index 000000000..061a7b74d --- /dev/null +++ b/src/main/java/com/autotune/database/table/lm/KruizeLMRecommendationEntry.java @@ -0,0 +1,82 @@ +package com.autotune.database.table.lm; + +import com.fasterxml.jackson.databind.JsonNode; +import jakarta.persistence.Entity; +import jakarta.persistence.Id; +import jakarta.persistence.Index; +import jakarta.persistence.Table; +import org.hibernate.annotations.JdbcTypeCode; +import org.hibernate.type.SqlTypes; + +import java.sql.Timestamp; + +@Entity +@Table(name = "kruize_lm_recommendations", indexes = { + @Index( + name = "idx_recommendation_experiment_name", + columnList = "experiment_name", + unique = false), + @Index( + name = "idx_recommendation_interval_end_time", + columnList = "interval_end_time", + unique = false) +}) +public class KruizeLMRecommendationEntry { + private String version; + @Id + private String experiment_name; + @Id + private Timestamp interval_end_time; + private String cluster_name; + @JdbcTypeCode(SqlTypes.JSON) + private JsonNode extended_data; + private String experiment_type; + + public String getExperiment_name() { + return experiment_name; + } + + public void setExperiment_name(String experiment_name) { + this.experiment_name = experiment_name; + } + + public Timestamp getInterval_end_time() { + return interval_end_time; + } + + public void setInterval_end_time(Timestamp interval_end_time) { + this.interval_end_time = interval_end_time; + } + + public String getCluster_name() { + return cluster_name; + } + + public void setCluster_name(String cluster_name) { + this.cluster_name = cluster_name; + } + + public JsonNode getExtended_data() { + return extended_data; + } + + public void setExtended_data(JsonNode extended_data) { + this.extended_data = extended_data; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public String getExperimentType() { + return experiment_type; + } + + public void setExperimentType(String experimentType) { + this.experiment_type = experimentType; + } +} From e542a9d24038c25386bfe9e39f9e9461b8d78a7d Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Sun, 15 Dec 2024 14:14:48 +0530 Subject: [PATCH 2/5] github checks fix Signed-off-by: msvinaykumar --- tests/scripts/helpers/kruize.py | 14 ++++++++++---- .../rest_apis/test_e2e_workflow.py | 13 ++++++++----- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/tests/scripts/helpers/kruize.py b/tests/scripts/helpers/kruize.py index 0e8035073..50985cc6a 100644 --- a/tests/scripts/helpers/kruize.py +++ b/tests/scripts/helpers/kruize.py @@ -150,10 +150,12 @@ def update_recommendations(experiment_name, startTime, endTime): # Description: This function obtains the recommendations from Kruize Autotune using listRecommendations API # Input Parameters: experiment name, flag indicating latest result and monitoring end time -def list_recommendations(experiment_name=None, latest=None, monitoring_end_time=None): +def list_recommendations(experiment_name=None, latest=None, monitoring_end_time=None, rm=False): PARAMS = "" print("\nListing the recommendations...") url = URL + "/listRecommendations" + if rm: + url += "?rm=true" print("URL = ", url) if experiment_name == None: @@ -391,6 +393,7 @@ def create_metric_profile(metric_profile_json_file): print(response.text) return response + # Description: This function deletes the metric profile # Input Parameters: metric profile input json def delete_metric_profile(input_json_file, invalid_header=False): @@ -447,6 +450,7 @@ def list_metric_profiles(name=None, verbose=None, logging=True): print("\n************************************************************") return response + # Description: This function generates recommendation for the given experiment_name def generate_recommendations(experiment_name): print("\n************************************************************") @@ -464,6 +468,7 @@ def generate_recommendations(experiment_name): print("\n************************************************************") return response + def post_bulk_api(input_json_file): print("\n************************************************************") print("Sending POST request to URL: ", f"{URL}/bulk") @@ -477,18 +482,19 @@ def post_bulk_api(input_json_file): print("Response JSON: ", response.json()) return response -def get_bulk_job_status(job_id,verbose=False): + +def get_bulk_job_status(job_id, verbose=False): print("\n************************************************************") url_basic = f"{URL}/bulk?job_id={job_id}" url_verbose = f"{URL}/bulk?job_id={job_id}&verbose={verbose}" getJobIDURL = url_basic if verbose: getJobIDURL = url_verbose - print("Sending GET request to URL ( verbose=",verbose," ): ", getJobIDURL) + print("Sending GET request to URL ( verbose=", verbose, " ): ", getJobIDURL) curl_command_verbose = f"curl -X GET '{getJobIDURL}'" print("Equivalent cURL command : ", curl_command_verbose) response = requests.get(url_verbose) print("Verbose GET Response Status Code: ", response.status_code) print("Verbose GET Response JSON: ", response.json()) - return response \ No newline at end of file + return response diff --git a/tests/scripts/remote_monitoring_tests/rest_apis/test_e2e_workflow.py b/tests/scripts/remote_monitoring_tests/rest_apis/test_e2e_workflow.py index a956ac732..27d22f82f 100644 --- a/tests/scripts/remote_monitoring_tests/rest_apis/test_e2e_workflow.py +++ b/tests/scripts/remote_monitoring_tests/rest_apis/test_e2e_workflow.py @@ -18,6 +18,7 @@ import pytest import sys + sys.path.append("../../") from helpers.fixtures import * @@ -120,10 +121,11 @@ def test_list_recommendations_multiple_exps_from_diff_json_files(cluster_type): data = response.json() assert response.status_code == SUCCESS_STATUS_CODE assert data[0]['experiment_name'] == experiment_name - assert data[0]['kubernetes_objects'][0]['containers'][0]['recommendations']['notifications'][NOTIFICATION_CODE_FOR_RECOMMENDATIONS_AVAILABLE][ + assert data[0]['kubernetes_objects'][0]['containers'][0]['recommendations']['notifications'][ + NOTIFICATION_CODE_FOR_RECOMMENDATIONS_AVAILABLE][ 'message'] == RECOMMENDATIONS_AVAILABLE - response = list_recommendations(experiment_name) + response = list_recommendations(experiment_name, rm=True) if response.status_code == SUCCESS_200_STATUS_CODE: recommendation_json = response.json() recommendation_section = recommendation_json[0]["kubernetes_objects"][0]["containers"][0][ @@ -133,13 +135,14 @@ def test_list_recommendations_multiple_exps_from_diff_json_files(cluster_type): assert INFO_RECOMMENDATIONS_AVAILABLE_CODE in high_level_notifications data_section = recommendation_section["data"] short_term_recommendation = \ - data_section[end_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")[:-4] + "Z"]["recommendation_terms"]["short_term"] + data_section[end_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")[:-4] + "Z"]["recommendation_terms"][ + "short_term"] short_term_notifications = short_term_recommendation["notifications"] for notification in short_term_notifications.values(): assert notification["type"] != "error" # Invoke list recommendations for the specified experiment - response = list_recommendations(experiment_name) + response = list_recommendations(experiment_name, rm=True) assert response.status_code == SUCCESS_200_STATUS_CODE list_reco_json = response.json() @@ -157,7 +160,7 @@ def test_list_recommendations_multiple_exps_from_diff_json_files(cluster_type): # Invoke list recommendations for a non-existing experiment experiment_name = "Non-existing-exp" - response = list_recommendations(experiment_name) + response = list_recommendations(experiment_name, rm=True) assert response.status_code == ERROR_STATUS_CODE data = response.json() From ad2936cbaa13fee6b39832b9069f684f263f11d0 Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Mon, 16 Dec 2024 18:44:51 +0530 Subject: [PATCH 3/5] resolving conflict Signed-off-by: msvinaykumar --- .../database/dao/ExperimentDAOImpl.java | 24 +------------------ 1 file changed, 1 insertion(+), 23 deletions(-) diff --git a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java index 597c51f60..b772a62ea 100644 --- a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java +++ b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java @@ -793,29 +793,7 @@ public List loadAllLMExperiments() throws Exception { return entries; } - @Override - public List loadAllLMExperiments() throws Exception { - //todo load only experimentStatus=inprogress , playback may not require completed experiments - List entries = null; - String statusValue = "failure"; - Timer.Sample timerLoadAllExp = Timer.start(MetricsConfig.meterRegistry()); - try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { - entries = session.createQuery(SELECT_FROM_LM_EXPERIMENTS, KruizeLMExperimentEntry.class).list(); - // TODO: remove native sql query and transient - //getExperimentTypeInKruizeExperimentEntry(entries); - statusValue = "success"; - } catch (Exception e) { - LOGGER.error("Not able to load experiment due to {}", e.getMessage()); - throw new Exception("Error while loading exsisting experiments from database due to : " + e.getMessage()); - } finally { - if (null != timerLoadAllExp) { - MetricsConfig.timerLoadAllExp = MetricsConfig.timerBLoadAllExp.tag("status", statusValue).register(MetricsConfig.meterRegistry()); - timerLoadAllExp.stop(MetricsConfig.timerLoadAllExp); - } - } - return entries; - } - + @Override public List loadAllResults() throws Exception { // TODO: load only experimentStatus=inProgress , playback may not require completed experiments From 514de698cdbd2379acfb2a7db4a7c22a40c947fc Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Fri, 13 Dec 2024 22:59:47 +0530 Subject: [PATCH 4/5] list Recommendations to support both rm and lm Signed-off-by: msvinaykumar --- src/main/java/com/autotune/database/dao/ExperimentDAO.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/com/autotune/database/dao/ExperimentDAO.java b/src/main/java/com/autotune/database/dao/ExperimentDAO.java index a918c71de..f4beb40bd 100644 --- a/src/main/java/com/autotune/database/dao/ExperimentDAO.java +++ b/src/main/java/com/autotune/database/dao/ExperimentDAO.java @@ -48,7 +48,7 @@ public interface ExperimentDAO { // If Kruize object restarts load all experiment which are in inprogress public List loadAllExperiments() throws Exception; - // If Kruize object restarts load all local monitoring experiments which are in inprogress + public List loadAllLMExperiments() throws Exception; // If Kruize object restarts load all results from the experiments which are in inprogress From f2f1d48874c2d2a3aa32d8391503b89c4b1693a1 Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Sun, 15 Dec 2024 15:57:26 +0530 Subject: [PATCH 5/5] listExperiments fix for concurrent rm and lm Signed-off-by: msvinaykumar --- .../analyzer/services/ListExperiments.java | 52 ++++++-- .../autotune/database/dao/ExperimentDAO.java | 2 + .../database/dao/ExperimentDAOImpl.java | 32 +++++ .../autotune/database/helper/DBConstants.java | 9 ++ .../database/service/ExperimentDBService.java | 20 +++ .../autotune/utils/KruizeSupportedTypes.java | 119 ++++++++---------- 6 files changed, 153 insertions(+), 81 deletions(-) diff --git a/src/main/java/com/autotune/analyzer/services/ListExperiments.java b/src/main/java/com/autotune/analyzer/services/ListExperiments.java index b8ca71447..cd8631aee 100644 --- a/src/main/java/com/autotune/analyzer/services/ListExperiments.java +++ b/src/main/java/com/autotune/analyzer/services/ListExperiments.java @@ -107,12 +107,14 @@ protected void doGet(HttpServletRequest request, HttpServletResponse response) t String latest = request.getParameter(LATEST); String recommendations = request.getParameter(KruizeConstants.JSONKeys.RECOMMENDATIONS); String experimentName = request.getParameter(EXPERIMENT_NAME); + String rm = request.getParameter(AnalyzerConstants.ServiceConstants.RM); String requestBody = request.getReader().lines().collect(Collectors.joining(System.lineSeparator())); StringBuilder clusterName = new StringBuilder(); List kubernetesAPIObjectList = new ArrayList<>(); boolean isJSONValid = true; Map mKruizeExperimentMap = new ConcurrentHashMap<>(); boolean error = false; + boolean rmTable = false; // validate Query params Set invalidParams = new HashSet<>(); for (String param : request.getParameterMap().keySet()) { @@ -120,6 +122,12 @@ protected void doGet(HttpServletRequest request, HttpServletResponse response) t invalidParams.add(param); } } + if (null != rm + && !rm.isEmpty() + && rm.equalsIgnoreCase(AnalyzerConstants.BooleanString.TRUE) + ) { + rmTable = true; + } try { if (invalidParams.isEmpty()) { // Set default values if absent @@ -142,13 +150,21 @@ protected void doGet(HttpServletRequest request, HttpServletResponse response) t // parse the requestBody JSON into corresponding classes parseInputJSON(requestBody, clusterName, kubernetesAPIObjectList); try { - new ExperimentDBService().loadExperimentFromDBByInputJSON(mKruizeExperimentMap, clusterName, kubernetesAPIObjectList); + if (rmTable) + new ExperimentDBService().loadExperimentFromDBByInputJSON(mKruizeExperimentMap, clusterName, kubernetesAPIObjectList); + else { + new ExperimentDBService().loadLMExperimentFromDBByInputJSON(mKruizeExperimentMap, clusterName, kubernetesAPIObjectList); + } } catch (Exception e) { LOGGER.error("Failed to load saved experiment data: {} ", e.getMessage()); } } else { // Fetch experiments data from the DB and check if the requested experiment exists - loadExperimentsFromDatabase(mKruizeExperimentMap, experimentName); + if (rmTable) { + loadExperimentsFromDatabase(mKruizeExperimentMap, experimentName); + } else { + loadLMExperimentsFromDatabase(mKruizeExperimentMap, experimentName); + } } // Check if experiment exists if (experimentName != null && !mKruizeExperimentMap.containsKey(experimentName)) { @@ -161,18 +177,18 @@ protected void doGet(HttpServletRequest request, HttpServletResponse response) t ); } if (!error) { - // create Gson Object - Gson gsonObj = createGsonObject(); + // create Gson Object + Gson gsonObj = createGsonObject(); - // Modify the JSON response here based on query params. - gsonStr = buildResponseBasedOnQuery(mKruizeExperimentMap, gsonObj, results, recommendations, latest, experimentName); - if (gsonStr.isEmpty()) { - gsonStr = generateDefaultResponse(); - } - response.getWriter().println(gsonStr); - response.getWriter().close(); - statusValue = "success"; + // Modify the JSON response here based on query params. + gsonStr = buildResponseBasedOnQuery(mKruizeExperimentMap, gsonObj, results, recommendations, latest, experimentName); + if (gsonStr.isEmpty()) { + gsonStr = generateDefaultResponse(); } + response.getWriter().println(gsonStr); + response.getWriter().close(); + statusValue = "success"; + } } catch (Exception e) { LOGGER.error("Exception: " + e.getMessage()); e.printStackTrace(); @@ -278,6 +294,18 @@ private void loadExperimentsFromDatabase(Map mKruizeExperi } } + private void loadLMExperimentsFromDatabase(Map mKruizeExperimentMap, String experimentName) { + try { + if (experimentName == null || experimentName.isEmpty()) + new ExperimentDBService().loadAllLMExperiments(mKruizeExperimentMap); + else + new ExperimentDBService().loadLMExperimentFromDBByName(mKruizeExperimentMap, experimentName); + + } catch (Exception e) { + LOGGER.error("Failed to load saved experiment data: {} ", e.getMessage()); + } + } + private Gson createGsonObject() { return new GsonBuilder() .disableHtmlEscaping() diff --git a/src/main/java/com/autotune/database/dao/ExperimentDAO.java b/src/main/java/com/autotune/database/dao/ExperimentDAO.java index f4beb40bd..b8cc9c897 100644 --- a/src/main/java/com/autotune/database/dao/ExperimentDAO.java +++ b/src/main/java/com/autotune/database/dao/ExperimentDAO.java @@ -108,6 +108,8 @@ public interface ExperimentDAO { List loadExperimentFromDBByInputJSON(StringBuilder clusterName, KubernetesAPIObject kubernetesAPIObject) throws Exception; + List loadLMExperimentFromDBByInputJSON(StringBuilder clusterName, KubernetesAPIObject kubernetesAPIObject) throws Exception; + // Load all the datasources List loadAllDataSources() throws Exception; diff --git a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java index b772a62ea..47c5d5b31 100644 --- a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java +++ b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java @@ -986,6 +986,38 @@ public List loadExperimentFromDBByInputJSON(StringBuilder return entries; } + @Override + public List loadLMExperimentFromDBByInputJSON(StringBuilder clusterName, KubernetesAPIObject kubernetesAPIObject) throws Exception { + //todo load only experimentStatus=inprogress , playback may not require completed experiments + List entries; + String statusValue = "failure"; + Timer.Sample timerLoadExpName = Timer.start(MetricsConfig.meterRegistry()); + try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) { + // assuming there will be only one container + ContainerAPIObject containerAPIObject = kubernetesAPIObject.getContainerAPIObjects().get(0); + // Set parameters for KubernetesObject and Container + Query query = session.createNativeQuery(SELECT_FROM_LM_EXPERIMENTS_BY_INPUT_JSON, KruizeLMExperimentEntry.class); + query.setParameter(CLUSTER_NAME, clusterName.toString()); + query.setParameter(KruizeConstants.JSONKeys.NAME, kubernetesAPIObject.getName()); + query.setParameter(KruizeConstants.JSONKeys.NAMESPACE, kubernetesAPIObject.getNamespace()); + query.setParameter(KruizeConstants.JSONKeys.TYPE, kubernetesAPIObject.getType()); + query.setParameter(KruizeConstants.JSONKeys.CONTAINER_NAME, containerAPIObject.getContainer_name()); + query.setParameter(KruizeConstants.JSONKeys.CONTAINER_IMAGE_NAME, containerAPIObject.getContainer_image_name()); + + entries = query.getResultList(); + statusValue = "success"; + } catch (Exception e) { + LOGGER.error("Error fetching experiment data: {}", e.getMessage()); + throw new Exception("Error while fetching experiment data from database: " + e.getMessage()); + } finally { + if (null != timerLoadExpName) { + MetricsConfig.timerLoadExpName = MetricsConfig.timerBLoadExpName.tag("status", statusValue).register(MetricsConfig.meterRegistry()); + timerLoadExpName.stop(MetricsConfig.timerLoadExpName); + } + } + return entries; + } + @Override public List loadResultsByExperimentName(String experimentName, String cluster_name, Timestamp calculated_start_time, Timestamp interval_end_time) throws Exception { diff --git a/src/main/java/com/autotune/database/helper/DBConstants.java b/src/main/java/com/autotune/database/helper/DBConstants.java index 2e9b55f93..fdcacc163 100644 --- a/src/main/java/com/autotune/database/helper/DBConstants.java +++ b/src/main/java/com/autotune/database/helper/DBConstants.java @@ -85,6 +85,15 @@ public static final class SQLQUERY { " WHERE container->>'container_name' = :container_name" + " AND container->>'container_image_name' = :container_image_name" + " ))"; + public static final String SELECT_FROM_LM_EXPERIMENTS_BY_INPUT_JSON = "SELECT * FROM kruize_lm_experiments WHERE cluster_name = :cluster_name " + + "AND EXISTS (SELECT 1 FROM jsonb_array_elements(extended_data->'kubernetes_objects') AS kubernetes_object" + + " WHERE kubernetes_object->>'name' = :name " + + " AND kubernetes_object->>'namespace' = :namespace " + + " AND kubernetes_object->>'type' = :type " + + " AND EXISTS (SELECT 1 FROM jsonb_array_elements(kubernetes_object->'containers') AS container" + + " WHERE container->>'container_name' = :container_name" + + " AND container->>'container_image_name' = :container_image_name" + + " ))"; } public static final class TABLE_NAMES { diff --git a/src/main/java/com/autotune/database/service/ExperimentDBService.java b/src/main/java/com/autotune/database/service/ExperimentDBService.java index 370cf632c..1572c4873 100644 --- a/src/main/java/com/autotune/database/service/ExperimentDBService.java +++ b/src/main/java/com/autotune/database/service/ExperimentDBService.java @@ -476,6 +476,26 @@ public void loadExperimentFromDBByInputJSON(Map mKruizeExp } } + public void loadLMExperimentFromDBByInputJSON(Map mKruizeExperimentMap, StringBuilder clusterName, List kubernetesAPIObjectList) throws Exception { + ExperimentInterface experimentInterface = new ExperimentInterfaceImpl(); + // assuming there will be only one Kubernetes object + KubernetesAPIObject kubernetesAPIObject = kubernetesAPIObjectList.get(0); + List entries = experimentDAO.loadLMExperimentFromDBByInputJSON(clusterName, kubernetesAPIObject); + if (null != entries && !entries.isEmpty()) { + List createExperimentAPIObjects = DBHelpers.Converters.KruizeObjectConverters.convertLMExperimentEntryToCreateExperimentAPIObject(entries); + if (!createExperimentAPIObjects.isEmpty()) { + List kruizeExpList = new ArrayList<>(); + for (CreateExperimentAPIObject createExperimentAPIObject : createExperimentAPIObjects) { + KruizeObject kruizeObject = Converters.KruizeObjectConverters.convertCreateExperimentAPIObjToKruizeObject(createExperimentAPIObject); + if (null != kruizeObject) { + kruizeExpList.add(kruizeObject); + } + } + experimentInterface.addExperimentToLocalStorage(mKruizeExperimentMap, kruizeExpList); + } + } + } + public void loadExperimentAndResultsFromDBByName(Map mainKruizeExperimentMap, String experimentName) throws Exception { loadExperimentFromDBByName(mainKruizeExperimentMap, experimentName); diff --git a/src/main/java/com/autotune/utils/KruizeSupportedTypes.java b/src/main/java/com/autotune/utils/KruizeSupportedTypes.java index d297efd2f..638fd2e5b 100644 --- a/src/main/java/com/autotune/utils/KruizeSupportedTypes.java +++ b/src/main/java/com/autotune/utils/KruizeSupportedTypes.java @@ -22,73 +22,54 @@ /** * Supported types to both Autotune and KruizeLayer objects */ -public class KruizeSupportedTypes -{ - private KruizeSupportedTypes() { } - - public static final Set DIRECTIONS_SUPPORTED = - new HashSet<>(Arrays.asList("minimize", "maximize")); - - public static final Set MONITORING_AGENTS_SUPPORTED = - new HashSet<>(Arrays.asList("prometheus")); - - public static final Set MODES_SUPPORTED = - new HashSet<>(Arrays.asList("experiment", "monitor")); - - public static final Set TARGET_CLUSTERS_SUPPORTED = - new HashSet<>(Arrays.asList("local", "remote")); - - public static final Set PRESENCE_SUPPORTED = - new HashSet<>(Arrays.asList("always", "", null)); - - public static final Set SLO_CLASSES_SUPPORTED = - new HashSet<>(Arrays.asList("throughput", "response_time", "resource_usage")); - - public static final Set LAYERS_SUPPORTED = - new HashSet<>(Arrays.asList("container", "hotspot", "quarkus")); - - public static final Set VALUE_TYPES_SUPPORTED = - new HashSet<>(Arrays.asList("double", "int", "string", "categorical")); - - public static final Set CLUSTER_TYPES_SUPPORTED = - new HashSet<>(Arrays.asList("kubernetes")); - - public static final Set K8S_TYPES_SUPPORTED = - new HashSet<>(Arrays.asList("minikube", "openshift", "icp", null)); - - public static final Set AUTH_TYPES_SUPPORTED = - new HashSet<>(Arrays.asList("saml", "oidc", "", null)); - - public static final Set LOGGING_TYPES_SUPPORTED = - new HashSet<>(Arrays.asList("all", "debug", "error", "info", "off", "warn")); - - public static final Set HPO_ALGOS_SUPPORTED = - new HashSet<>(Arrays.asList("optuna_tpe", "optuna_tpe_multivariate", "optuna_skopt", null)); - - public static final Set MATH_OPERATORS_SUPPORTED = - new HashSet<>(Arrays.asList("+", "-", "*", "/", "^","%","sin", "cos", "tan", "log")); - - public static final Set OBJECTIVE_FUNCTION_LIST = - new HashSet<>(Arrays.asList("(( throughput / transaction_response_time) / max_response_time) * 100", - "request_sum/request_count", - "(1.25 * request_count) - (1.5 * (request_sum / request_count)) - (0.25 * request_max)", - "((request_count / (request_sum / request_count)) / request_max) * 100")); - - public static final Set KUBERNETES_OBJECTS_SUPPORTED = - new HashSet<>(Arrays.asList("deployment", "pod", "container", "namespace")); - - public static final Set DSMETADATA_QUERY_PARAMS_SUPPORTED = new HashSet<>(Arrays.asList( - "datasource", "cluster_name", "namespace", "verbose" - )); - - public static final Set SUPPORTED_FORMATS = - new HashSet<>(Arrays.asList("cores", "m", "Bytes", "bytes", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "kB", "KB", "MB", "GB", "TB", "PB", "EB", "K", "k", "M", "G", "T", "P", "E")); - - public static final Set QUERY_PARAMS_SUPPORTED = new HashSet<>(Arrays.asList( - "experiment_name", "results", "recommendations", "latest" - )); - - public static final Set LIST_METRIC_PROFILES_QUERY_PARAMS_SUPPORTED = new HashSet<>(Arrays.asList( - "name", "verbose" - )); +public class KruizeSupportedTypes { + public static final Set DIRECTIONS_SUPPORTED = + new HashSet<>(Arrays.asList("minimize", "maximize")); + public static final Set MONITORING_AGENTS_SUPPORTED = + new HashSet<>(Arrays.asList("prometheus")); + public static final Set MODES_SUPPORTED = + new HashSet<>(Arrays.asList("experiment", "monitor")); + public static final Set TARGET_CLUSTERS_SUPPORTED = + new HashSet<>(Arrays.asList("local", "remote")); + public static final Set PRESENCE_SUPPORTED = + new HashSet<>(Arrays.asList("always", "", null)); + public static final Set SLO_CLASSES_SUPPORTED = + new HashSet<>(Arrays.asList("throughput", "response_time", "resource_usage")); + public static final Set LAYERS_SUPPORTED = + new HashSet<>(Arrays.asList("container", "hotspot", "quarkus")); + public static final Set VALUE_TYPES_SUPPORTED = + new HashSet<>(Arrays.asList("double", "int", "string", "categorical")); + public static final Set CLUSTER_TYPES_SUPPORTED = + new HashSet<>(Arrays.asList("kubernetes")); + public static final Set K8S_TYPES_SUPPORTED = + new HashSet<>(Arrays.asList("minikube", "openshift", "icp", null)); + public static final Set AUTH_TYPES_SUPPORTED = + new HashSet<>(Arrays.asList("saml", "oidc", "", null)); + public static final Set LOGGING_TYPES_SUPPORTED = + new HashSet<>(Arrays.asList("all", "debug", "error", "info", "off", "warn")); + public static final Set HPO_ALGOS_SUPPORTED = + new HashSet<>(Arrays.asList("optuna_tpe", "optuna_tpe_multivariate", "optuna_skopt", null)); + public static final Set MATH_OPERATORS_SUPPORTED = + new HashSet<>(Arrays.asList("+", "-", "*", "/", "^", "%", "sin", "cos", "tan", "log")); + public static final Set OBJECTIVE_FUNCTION_LIST = + new HashSet<>(Arrays.asList("(( throughput / transaction_response_time) / max_response_time) * 100", + "request_sum/request_count", + "(1.25 * request_count) - (1.5 * (request_sum / request_count)) - (0.25 * request_max)", + "((request_count / (request_sum / request_count)) / request_max) * 100")); + public static final Set KUBERNETES_OBJECTS_SUPPORTED = + new HashSet<>(Arrays.asList("deployment", "pod", "container", "namespace")); + public static final Set DSMETADATA_QUERY_PARAMS_SUPPORTED = new HashSet<>(Arrays.asList( + "datasource", "cluster_name", "namespace", "verbose" + )); + public static final Set SUPPORTED_FORMATS = + new HashSet<>(Arrays.asList("cores", "m", "Bytes", "bytes", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "kB", "KB", "MB", "GB", "TB", "PB", "EB", "K", "k", "M", "G", "T", "P", "E")); + public static final Set QUERY_PARAMS_SUPPORTED = new HashSet<>(Arrays.asList( + "experiment_name", "results", "recommendations", "latest", "rm" + )); + public static final Set LIST_METRIC_PROFILES_QUERY_PARAMS_SUPPORTED = new HashSet<>(Arrays.asList( + "name", "verbose" + )); + + private KruizeSupportedTypes() { + } }