From bf6422b3d866ff5728b2006604ab18615a103e4d Mon Sep 17 00:00:00 2001 From: Chandrakala Subramanyam Date: Tue, 5 Nov 2024 12:36:14 +0530 Subject: [PATCH 01/15] Removed the autotune job from the PR check workflow Signed-off-by: Chandrakala Subramanyam --- .github/workflows/test-on-pr.yaml | 62 +------------------------------ 1 file changed, 1 insertion(+), 61 deletions(-) diff --git a/.github/workflows/test-on-pr.yaml b/.github/workflows/test-on-pr.yaml index 0453557e7..2b1d04445 100644 --- a/.github/workflows/test-on-pr.yaml +++ b/.github/workflows/test-on-pr.yaml @@ -13,68 +13,8 @@ on: workflow_dispatch: jobs: - # This workflow contains two jobs called "build autotune" and "build crc" - build_autotune: - # The type of runner that the job will run on - runs-on: ubuntu-20.04 - - steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v3 - - name: Setup Minikube - uses: manusa/actions-setup-minikube@v2.3.0 - with: - minikube version: 'v1.16.0' - kubernetes version: 'v1.19.2' - - name: Build autotune - run: | - echo Build autotune - pr_number=${{ github.event.pull_request.number }} - echo "pr_number=${pr_number}" >> "$GITHUB_ENV" - ./build.sh -i autotune_operator:pr_${pr_number} - docker images | grep autotune - - name: Check cluster info on minikube - run: | - kubectl cluster-info - kubectl get pods -n kube-system - - name: Install Prometheus on minikube - run: | - echo Install Prometheus on minikube - cd scripts - ./prometheus_on_minikube.sh -as - - name: Deploy kruize in experiment mode - run: | - echo Deploy kruize in experiment mode - cp ./manifests/autotune/autotune-operator-deployment.yaml_template ./manifests/autotune/autotune-operator-deployment.yaml_template.old - sed -e "s/imagePullPolicy: Always/imagePullPolicy: IfNotPresent/g" ./manifests/autotune/autotune-operator-deployment.yaml_template.old > ./manifests/autotune/autotune-operator-deployment.yaml_template - echo "***************************************************************" - cat ./manifests/autotune/autotune-operator-deployment.yaml_template - echo "***************************************************************" - - echo "PR_NUMBER = ${{ env.pr_number }}" - ./deploy.sh -c minikube -i autotune_operator:pr_${{ env.pr_number }} - - sleep 20 - - name: Capture ffdc logs - if: always() - run: | - echo "Capturing ffdc logs" - ./scripts/ffdc.sh -d ${GITHUB_WORKSPACE} - - name: Archive results - if: always() - run: | - cd ${GITHUB_WORKSPACE} - tar cvf autotune_results.tar kruize_*log.txt - - - name: Upload results - if: always() - uses: actions/upload-artifact@v3 - with: - name: autotune-results - path: ./autotune_results.tar - retention-days: 2 - + # This workflow builds the kruize image and runs an end-to-end test to validate the remote monitoring workflow build_crc: # The type of runner that the job will run on runs-on: ubuntu-20.04 From 9aac9851b8b333f8771321bd0a249025ba47ec01 Mon Sep 17 00:00:00 2001 From: Chandrakala Subramanyam Date: Mon, 11 Nov 2024 12:09:53 +0530 Subject: [PATCH 02/15] Addressing a CVE by upgrading jetty version Signed-off-by: Chandrakala Subramanyam --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 76c84e658..0fc4ce587 100644 --- a/pom.xml +++ b/pom.xml @@ -10,7 +10,7 @@ 4.13.2 20240303 - 9.4.55.v20240627 + 9.4.56.v20240826 2.17.1 17 0.14.1 From 55d725e9da744a3a6ef32599337bfa49de65d748 Mon Sep 17 00:00:00 2001 From: Chandrakala Subramanyam Date: Mon, 11 Nov 2024 23:06:48 +0530 Subject: [PATCH 03/15] Updated jetty version to 10.0.24 Signed-off-by: Chandrakala Subramanyam --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 0fc4ce587..908096302 100644 --- a/pom.xml +++ b/pom.xml @@ -10,7 +10,7 @@ 4.13.2 20240303 - 9.4.56.v20240826 + 10.0.24 2.17.1 17 0.14.1 From 5d32ced2c1d5dc8b7e3e26286c2c2861b25878dc Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Tue, 12 Nov 2024 15:10:50 +0530 Subject: [PATCH 04/15] Datasource Exception Handler Signed-off-by: msvinaykumar --- src/main/java/com/autotune/Autotune.java | 18 +- .../engine/RecommendationEngine.java | 4 +- .../datasource/DataSourceCollection.java | 231 ++++++++------- .../common/datasource/DataSourceManager.java | 84 +++--- .../DataSourceMetadataOperator.java | 68 ++--- .../common/datasource/DataSourceOperator.java | 26 +- .../datasource/DataSourceOperatorImpl.java | 272 +++++++++--------- .../PrometheusDataOperatorImpl.java | 114 +++----- .../handler/MetricCollectionHandler.java | 34 ++- .../autotune/utils/GenericRestApiClient.java | 17 +- 10 files changed, 452 insertions(+), 416 deletions(-) diff --git a/src/main/java/com/autotune/Autotune.java b/src/main/java/com/autotune/Autotune.java index ad6ad9834..543edd94c 100644 --- a/src/main/java/com/autotune/Autotune.java +++ b/src/main/java/com/autotune/Autotune.java @@ -24,6 +24,9 @@ import com.autotune.analyzer.utils.AnalyzerConstants; import com.autotune.common.datasource.DataSourceCollection; import com.autotune.common.datasource.DataSourceInfo; +import com.autotune.common.exceptions.datasource.DataSourceAlreadyExist; +import com.autotune.common.exceptions.datasource.DataSourceNotServiceable; +import com.autotune.common.exceptions.datasource.UnsupportedDataSourceProvider; import com.autotune.database.helper.DBConstants; import com.autotune.database.init.KruizeHibernateUtil; import com.autotune.experimentManager.core.ExperimentManager; @@ -31,7 +34,10 @@ import com.autotune.operator.KruizeDeploymentInfo; import com.autotune.service.HealthService; import com.autotune.service.InitiateListener; -import com.autotune.utils.*; +import com.autotune.utils.CloudWatchAppender; +import com.autotune.utils.KruizeConstants; +import com.autotune.utils.MetricsConfig; +import com.autotune.utils.ServerContext; import com.autotune.utils.filter.KruizeCORSFilter; import io.prometheus.client.exporter.MetricsServlet; import io.prometheus.client.hotspot.DefaultExports; @@ -50,8 +56,12 @@ import javax.servlet.DispatcherType; import java.io.File; +import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; import java.util.EnumSet; import java.util.HashMap; import java.util.Scanner; @@ -124,7 +134,7 @@ public static void main(String[] args) { //Regenerate a Hibernate session following the creation of new tables KruizeHibernateUtil.buildSessionFactory(); } catch (Exception | K8sTypeNotSupportedException | MonitoringAgentNotSupportedException | - MonitoringAgentNotFoundException e) { + MonitoringAgentNotFoundException e) { e.printStackTrace(); System.exit(1); } @@ -170,7 +180,7 @@ public static void main(String[] args) { /** * Set up the data sources available at installation time from config file */ - private static void setUpDataSources() { + private static void setUpDataSources() throws UnsupportedDataSourceProvider, DataSourceNotServiceable, DataSourceAlreadyExist, IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { DataSourceCollection dataSourceCollection = DataSourceCollection.getInstance(); dataSourceCollection.addDataSourcesFromConfigFile(KruizeConstants.CONFIG_FILE); } @@ -190,7 +200,7 @@ private static void checkAvailableDataSources() { DataSourceCollection dataSourceCollection = DataSourceCollection.getInstance(); LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceInfoMsgs.CHECKING_AVAILABLE_DATASOURCE); HashMap dataSources = dataSourceCollection.getDataSourcesCollection(); - for (String name: dataSources.keySet()) { + for (String name : dataSources.keySet()) { DataSourceInfo dataSource = dataSources.get(name); String dataSourceName = dataSource.getName(); String url = dataSource.getUrl().toString(); diff --git a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java index f157550a4..ebb74fe7c 100644 --- a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java +++ b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java @@ -2062,7 +2062,7 @@ private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz dataSourceInfo.getUrl(), URLEncoder.encode(queryToEncode, CHARACTER_ENCODING) ); - LOGGER.info(dateMetricsUrl); + LOGGER.debug(dateMetricsUrl); client.setBaseURL(dateMetricsUrl); JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, ""); JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class); @@ -2163,7 +2163,7 @@ private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz interval_start_time_epoc, interval_end_time_epoc, measurementDurationMinutesInDouble.intValue() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE); - LOGGER.info(podMetricsUrl); + LOGGER.debug(podMetricsUrl); client.setBaseURL(podMetricsUrl); JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, ""); JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class); diff --git a/src/main/java/com/autotune/common/datasource/DataSourceCollection.java b/src/main/java/com/autotune/common/datasource/DataSourceCollection.java index 6734adc1c..d497f1751 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceCollection.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceCollection.java @@ -16,8 +16,8 @@ package com.autotune.common.datasource; import com.autotune.common.auth.AuthenticationConfig; -import com.autotune.common.exceptions.datasource.*; import com.autotune.common.data.ValidationOutputData; +import com.autotune.common.exceptions.datasource.*; import com.autotune.common.utils.CommonUtils; import com.autotune.database.service.ExperimentDBService; import com.autotune.utils.KruizeConstants; @@ -31,6 +31,9 @@ import java.io.InputStream; import java.net.URL; import java.nio.charset.StandardCharsets; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; import java.util.HashMap; import java.util.List; @@ -43,13 +46,22 @@ private DataSourceCollection() { this.dataSourceCollection = new HashMap<>(); } + /** + * Returns the instance of dataSourceCollection class + * + * @return DataSourceCollection instance + */ + public static DataSourceCollection getInstance() { + return dataSourceCollectionInstance; + } + public void loadDataSourcesFromDB() { try { LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceInfoMsgs.CHECKING_AVAILABLE_DATASOURCE_FROM_DB); List availableDataSources = new ExperimentDBService().loadAllDataSources(); if (null == availableDataSources) { LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceInfoMsgs.NO_DATASOURCE_FOUND_IN_DB); - }else { + } else { for (DataSourceInfo dataSourceInfo : availableDataSources) { LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceSuccessMsgs.DATASOURCE_FOUND + dataSourceInfo.getName()); dataSourceCollection.put(dataSourceInfo.getName(), dataSourceInfo); @@ -61,16 +73,10 @@ public void loadDataSourcesFromDB() { } } - /** - * Returns the instance of dataSourceCollection class - * @return DataSourceCollection instance - */ - public static DataSourceCollection getInstance() { - return dataSourceCollectionInstance; - } /** * Returns the hashmap of data sources + * * @return HashMap containing dataSourceInfo objects */ public HashMap getDataSourcesCollection() { @@ -79,116 +85,111 @@ public HashMap getDataSourcesCollection() { /** * Adds datasource to collection + * * @param datasource DataSourceInfo object containing details of datasource */ - public void addDataSource(DataSourceInfo datasource) { + public void addDataSource(DataSourceInfo datasource) throws DataSourceAlreadyExist, IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException, DataSourceNotServiceable, UnsupportedDataSourceProvider { final String name = datasource.getName(); final String provider = datasource.getProvider(); ValidationOutputData addedToDB = null; LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceInfoMsgs.ADDING_DATASOURCE + name); - try { - if (dataSourceCollection.containsKey(name)) { - throw new DataSourceAlreadyExist(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_ALREADY_EXIST); - } - if (provider.equalsIgnoreCase(KruizeConstants.SupportedDatasources.PROMETHEUS)) { - LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceInfoMsgs.VERIFYING_DATASOURCE_REACHABILITY + name); - DataSourceOperatorImpl op = DataSourceOperatorImpl.getInstance().getOperator(KruizeConstants.SupportedDatasources.PROMETHEUS); - if (op.isServiceable(datasource) == CommonUtils.DatasourceReachabilityStatus.REACHABLE) { - LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceSuccessMsgs.DATASOURCE_SERVICEABLE); - // add the data source to DB - addedToDB = new ExperimentDBService().addDataSourceToDB(datasource); - if (addedToDB.isSuccess()) { - LOGGER.info("Datasource added to the DB successfully."); - } else { - LOGGER.error("Failed to add datasource to DB: {}", addedToDB.getMessage()); - } - dataSourceCollection.put(name, datasource); - LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceSuccessMsgs.DATASOURCE_ADDED); + if (dataSourceCollection.containsKey(name)) { + throw new DataSourceAlreadyExist(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_ALREADY_EXIST); + } + + if (provider.equalsIgnoreCase(KruizeConstants.SupportedDatasources.PROMETHEUS)) { + LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceInfoMsgs.VERIFYING_DATASOURCE_REACHABILITY + name); + DataSourceOperatorImpl op = DataSourceOperatorImpl.getInstance().getOperator(KruizeConstants.SupportedDatasources.PROMETHEUS); + if (op.isServiceable(datasource) == CommonUtils.DatasourceReachabilityStatus.REACHABLE) { + LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceSuccessMsgs.DATASOURCE_SERVICEABLE); + // add the data source to DB + addedToDB = new ExperimentDBService().addDataSourceToDB(datasource); + if (addedToDB.isSuccess()) { + LOGGER.info("Datasource added to the DB successfully."); } else { - throw new DataSourceNotServiceable(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_NOT_SERVICEABLE); + LOGGER.error("Failed to add datasource to DB: {}", addedToDB.getMessage()); } + dataSourceCollection.put(name, datasource); + LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceSuccessMsgs.DATASOURCE_ADDED); } else { - throw new UnsupportedDataSourceProvider(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.UNSUPPORTED_DATASOURCE_PROVIDER); + throw new DataSourceNotServiceable(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_NOT_SERVICEABLE); } - } catch (UnsupportedDataSourceProvider e) { - LOGGER.error(e.getMessage()); - } catch (DataSourceNotServiceable e) { - LOGGER.error(e.getMessage()); - } catch (DataSourceAlreadyExist e) { - LOGGER.error(e.getMessage()); + } else { + throw new UnsupportedDataSourceProvider(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.UNSUPPORTED_DATASOURCE_PROVIDER); } + } /** * Loads the data sources available at installation time + * * @param configFileName name of the config file mounted */ - public void addDataSourcesFromConfigFile(String configFileName) { - try { - String configFile = System.getenv(configFileName); - JSONObject configObject = null; - - InputStream is = new FileInputStream(configFile); - String jsonTxt = new String(is.readAllBytes(), StandardCharsets.UTF_8); - configObject = new JSONObject(jsonTxt); - JSONArray dataSourceArr = configObject.getJSONArray(KruizeConstants.DataSourceConstants.KRUIZE_DATASOURCE); - - for (Object dataSourceObj: dataSourceArr) { - JSONObject dataSourceObject = (JSONObject) dataSourceObj; - String name = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_NAME); - // check the DB if the datasource already exists - try { - DataSourceInfo dataSourceInfo = new ExperimentDBService().loadDataSourceFromDBByName(name); - if (null != dataSourceInfo) { - LOGGER.error("Datasource: {} already exists!", name); - continue; - } - } catch (Exception e) { - LOGGER.error("Loading saved datasource {} failed: {} ", name, e.getMessage()); - } - String provider = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_PROVIDER); - String serviceName = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_SERVICE_NAME); - String namespace = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_SERVICE_NAMESPACE); - String dataSourceURL = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_URL); - AuthenticationConfig authConfig; - try { - JSONObject authenticationObj = dataSourceObject.optJSONObject(KruizeConstants.AuthenticationConstants.AUTHENTICATION); - // create the corresponding authentication object - authConfig = AuthenticationConfig.createAuthenticationConfigObject(authenticationObj); - } catch (Exception e) { - LOGGER.warn("Auth details are missing for datasource: {}", name); - authConfig = AuthenticationConfig.noAuth(); - } - - DataSourceInfo datasource; - // Validate input - if (!validateInput(name, provider, serviceName, dataSourceURL, namespace)) { + public void addDataSourcesFromConfigFile(String configFileName) throws UnsupportedDataSourceProvider, DataSourceNotServiceable, DataSourceAlreadyExist, IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + + String configFile = System.getenv(configFileName); + JSONObject configObject = null; + + InputStream is = new FileInputStream(configFile); + String jsonTxt = new String(is.readAllBytes(), StandardCharsets.UTF_8); + configObject = new JSONObject(jsonTxt); + JSONArray dataSourceArr = configObject.getJSONArray(KruizeConstants.DataSourceConstants.KRUIZE_DATASOURCE); + + for (Object dataSourceObj : dataSourceArr) { + JSONObject dataSourceObject = (JSONObject) dataSourceObj; + String name = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_NAME); + // check the DB if the datasource already exists + try { + DataSourceInfo dataSourceInfo = new ExperimentDBService().loadDataSourceFromDBByName(name); + if (null != dataSourceInfo) { + LOGGER.error("Datasource: {} already exists!", name); continue; } - if (dataSourceURL.isEmpty()) { - datasource = new DataSourceInfo(name, provider, serviceName, namespace, null); - } else { - datasource = new DataSourceInfo(name, provider, serviceName, namespace, new URL(dataSourceURL)); - } - // set the authentication config - datasource.setAuthenticationConfig(authConfig); - addDataSource(datasource); + } catch (Exception e) { + LOGGER.error("Loading saved datasource {} failed: {} ", name, e.getMessage()); } - } catch (IOException e) { - LOGGER.error(e.getMessage()); + String provider = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_PROVIDER); + String serviceName = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_SERVICE_NAME); + String namespace = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_SERVICE_NAMESPACE); + String dataSourceURL = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_URL); + AuthenticationConfig authConfig; + try { + JSONObject authenticationObj = dataSourceObject.optJSONObject(KruizeConstants.AuthenticationConstants.AUTHENTICATION); + // create the corresponding authentication object + authConfig = AuthenticationConfig.createAuthenticationConfigObject(authenticationObj); + } catch (Exception e) { + LOGGER.warn("Auth details are missing for datasource: {}", name); + authConfig = AuthenticationConfig.noAuth(); + } + + DataSourceInfo datasource; + // Validate input + if (!validateInput(name, provider, serviceName, dataSourceURL, namespace)) { + continue; + } + if (dataSourceURL.isEmpty()) { + datasource = new DataSourceInfo(name, provider, serviceName, namespace, null); + } else { + datasource = new DataSourceInfo(name, provider, serviceName, namespace, new URL(dataSourceURL)); + } + // set the authentication config + datasource.setAuthenticationConfig(authConfig); + addDataSource(datasource); } + } /** * validates the input parameters before creating dataSourceInfo objects - * @param name String containing name of the datasource - * @param provider String containing provider of the datasource + * + * @param name String containing name of the datasource + * @param provider String containing provider of the datasource * @param servicename String containing service name for the datasource - * @param url String containing URL of the data source - * @param namespace String containing namespace for the datasource service + * @param url String containing URL of the data source + * @param namespace String containing namespace for the datasource service * @return boolean returns true if validation is successful otherwise return false */ public boolean validateInput(String name, String provider, String servicename, String url, String namespace) { @@ -214,42 +215,38 @@ public boolean validateInput(String name, String provider, String servicename, S /** * deletes the datasource from the Hashmap + * * @param name String containing the name of the datasource to be deleted - * TODO: add db related operations + * TODO: add db related operations */ - public void deleteDataSource(String name) { - try { - if (name == null) { - throw new DataSourceMissingRequiredFiled(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.MISSING_DATASOURCE_NAME); - } - if (dataSourceCollection.containsKey(name)) { - dataSourceCollection.remove(name); - } else { - throw new DataSourceDoesNotExist(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_NOT_EXIST); - } - } catch (DataSourceMissingRequiredFiled e) { - LOGGER.error(e.getMessage()); - } catch (DataSourceDoesNotExist e) { - LOGGER.error(e.getMessage()); + public void deleteDataSource(String name) throws DataSourceMissingRequiredFiled, DataSourceDoesNotExist { + + if (name == null) { + throw new DataSourceMissingRequiredFiled(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.MISSING_DATASOURCE_NAME); + } + if (dataSourceCollection.containsKey(name)) { + dataSourceCollection.remove(name); + } else { + throw new DataSourceDoesNotExist(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_NOT_EXIST); } + } /** * updates the existing datasource in the Hashmap - * @param name String containing the name of the datasource to be updated + * + * @param name String containing the name of the datasource to be updated * @param newDataSource DataSourceInfo object with updated values - * TODO: add db related operations + * TODO: add db related operations */ - public void updateDataSource(String name, DataSourceInfo newDataSource) { - try { - if (dataSourceCollection.containsKey(name)) { - dataSourceCollection.remove(name); - addDataSource(newDataSource); - } else { - throw new DataSourceDoesNotExist(name + ": " + KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_NOT_EXIST); - } - } catch (DataSourceDoesNotExist e) { - LOGGER.error(e.getMessage()); + public void updateDataSource(String name, DataSourceInfo newDataSource) throws UnsupportedDataSourceProvider, DataSourceNotServiceable, DataSourceAlreadyExist, IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException, DataSourceDoesNotExist { + + if (dataSourceCollection.containsKey(name)) { + dataSourceCollection.remove(name); + addDataSource(newDataSource); + } else { + throw new DataSourceDoesNotExist(name + ": " + KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_NOT_EXIST); } + } } diff --git a/src/main/java/com/autotune/common/datasource/DataSourceManager.java b/src/main/java/com/autotune/common/datasource/DataSourceManager.java index cf28c73b4..1a1fc9e37 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceManager.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceManager.java @@ -17,25 +17,31 @@ import com.autotune.analyzer.utils.AnalyzerErrorConstants; import com.autotune.common.data.ValidationOutputData; +import com.autotune.common.data.dataSourceMetadata.DataSource; +import com.autotune.common.data.dataSourceMetadata.DataSourceCluster; +import com.autotune.common.data.dataSourceMetadata.DataSourceMetadataInfo; import com.autotune.common.exceptions.datasource.DataSourceDoesNotExist; -import com.autotune.common.data.dataSourceMetadata.*; import com.autotune.database.dao.ExperimentDAOImpl; import com.autotune.database.service.ExperimentDBService; import com.autotune.utils.KruizeConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; import java.util.HashMap; import java.util.Map; /** * DataSourceManager is an interface to manage (create and update) metadata * of data sources - * - * + *

+ *

* Currently Supported Implementations: - * - importMetadataFromDataSource - * - getMetadataFromDataSource + * - importMetadataFromDataSource + * - getMetadataFromDataSource * TODO - DB integration for update and delete functionalities */ public class DataSourceManager { @@ -47,29 +53,24 @@ public DataSourceManager() { /** * Imports Metadata for a specific data source using associated DataSourceInfo. + * * @param dataSourceInfo - * @param uniqueKey this is used as labels in query example container="xyz" namespace="abc" - * @param startTime Get metadata from starttime to endtime - * @param endTime Get metadata from starttime to endtime - * @param steps the interval between data points in a range query + * @param uniqueKey this is used as labels in query example container="xyz" namespace="abc" + * @param startTime Get metadata from starttime to endtime + * @param endTime Get metadata from starttime to endtime + * @param steps the interval between data points in a range query * @return */ - public DataSourceMetadataInfo importMetadataFromDataSource(DataSourceInfo dataSourceInfo,String uniqueKey,long startTime,long endTime,int steps) throws Exception { - try { - if (null == dataSourceInfo) { - throw new DataSourceDoesNotExist(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.MISSING_DATASOURCE_INFO); - } - DataSourceMetadataInfo dataSourceMetadataInfo = dataSourceMetadataOperator.createDataSourceMetadata(dataSourceInfo,uniqueKey, startTime, endTime, steps); - if (null == dataSourceMetadataInfo) { - LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.DATASOURCE_METADATA_INFO_NOT_AVAILABLE, "for datasource {}" + dataSourceInfo.getName()); - return null; - } - return dataSourceMetadataInfo; - } catch (Exception e) { - LOGGER.error(e.getMessage()); - throw e; + public DataSourceMetadataInfo importMetadataFromDataSource(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws DataSourceDoesNotExist, IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + if (null == dataSourceInfo) { + throw new DataSourceDoesNotExist(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.MISSING_DATASOURCE_INFO); } - + DataSourceMetadataInfo dataSourceMetadataInfo = dataSourceMetadataOperator.createDataSourceMetadata(dataSourceInfo, uniqueKey, startTime, endTime, steps); + if (null == dataSourceMetadataInfo) { + LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.DATASOURCE_METADATA_INFO_NOT_AVAILABLE, "for datasource {}" + dataSourceInfo.getName()); + return null; + } + return dataSourceMetadataInfo; } /** @@ -93,7 +94,7 @@ public DataSourceMetadataInfo getMetadataFromDataSource(DataSourceInfo dataSourc return dataSourceMetadataInfo; } catch (DataSourceDoesNotExist e) { LOGGER.error(e.getMessage()); - }catch (Exception e) { + } catch (Exception e) { LOGGER.error("Loading saved datasource metadata failed: {} ", e.getMessage()); } return null; @@ -101,9 +102,10 @@ public DataSourceMetadataInfo getMetadataFromDataSource(DataSourceInfo dataSourc /** * Updates metadata of the specified data source and metadata object - * @param dataSource The information about the data source to be updated. + * + * @param dataSource The information about the data source to be updated. * @param dataSourceMetadataInfo The existing DataSourceMetadataInfo object containing the current - * metadata information of the data source. + * metadata information of the data source. */ public void updateMetadataFromDataSource(DataSourceInfo dataSource, DataSourceMetadataInfo dataSourceMetadataInfo) { try { @@ -113,7 +115,7 @@ public void updateMetadataFromDataSource(DataSourceInfo dataSource, DataSourceMe if (null == dataSourceMetadataInfo) { throw new DataSourceDoesNotExist(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.DATASOURCE_METADATA_INFO_NOT_AVAILABLE); } - dataSourceMetadataOperator.updateDataSourceMetadata(dataSource,"",0,0,0); + dataSourceMetadataOperator.updateDataSourceMetadata(dataSource, "", 0, 0, 0); } catch (Exception e) { LOGGER.error(e.getMessage()); } @@ -121,6 +123,7 @@ public void updateMetadataFromDataSource(DataSourceInfo dataSource, DataSourceMe /** * Deletes metadata of the specified data source + * * @param dataSource The metadata associated with the specified data source to be deleted. */ public void deleteMetadataFromDataSource(DataSourceInfo dataSource) { @@ -137,8 +140,9 @@ public void deleteMetadataFromDataSource(DataSourceInfo dataSource) { /** * Adds Metadata object to DB + * * @param dataSourceMetadataInfo DataSourceMetadataInfo object - * Note - It's assumed that metadata will be added to database after validating dataSourceMetadataInfo object + * Note - It's assumed that metadata will be added to database after validating dataSourceMetadataInfo object */ public void addMetadataToDB(DataSourceMetadataInfo dataSourceMetadataInfo) { ValidationOutputData addedToDB = null; @@ -159,7 +163,7 @@ public void addMetadataToDB(DataSourceMetadataInfo dataSourceMetadataInfo) { private boolean checkIfDataSourceMetadataExists(String dataSourceName) { boolean isPresent = false; try { - DataSourceMetadataInfo dataSourceMetadataInfo = new ExperimentDBService().loadMetadataFromDBByName(dataSourceName,"false"); + DataSourceMetadataInfo dataSourceMetadataInfo = new ExperimentDBService().loadMetadataFromDBByName(dataSourceName, "false"); if (null != dataSourceMetadataInfo) { LOGGER.error("Metadata already exists for datasource: {}!", dataSourceName); isPresent = true; @@ -172,6 +176,7 @@ private boolean checkIfDataSourceMetadataExists(String dataSourceName) { /** * Fetches and deletes DataSourceMetadata of the specified datasource from Database + * * @param dataSourceInfo DataSourceInfo object */ public void deleteMetadataFromDBByDataSource(DataSourceInfo dataSourceInfo) { @@ -195,6 +200,7 @@ public void deleteMetadataFromDBByDataSource(DataSourceInfo dataSourceInfo) { /** * Deletes DataSourceMetadata entry from Database + * * @param dataSourceName datasource name */ public void deleteMetadataFromDB(String dataSourceName) { @@ -215,12 +221,13 @@ public void deleteMetadataFromDB(String dataSourceName) { /** * Fetches Datasource details from Database by name + * * @param dataSourceName Name of the datasource to be fetched * @return DataSourceInfo object of the specified datasource name */ public DataSourceInfo fetchDataSourceFromDBByName(String dataSourceName) { try { - if(null == dataSourceName || dataSourceName.isEmpty()) { + if (null == dataSourceName || dataSourceName.isEmpty()) { throw new Exception(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.MISSING_DATASOURCE_NAME); } DataSourceInfo datasource = new ExperimentDBService().loadDataSourceFromDBByName(dataSourceName); @@ -233,13 +240,14 @@ public DataSourceInfo fetchDataSourceFromDBByName(String dataSourceName) { /** * Fetches Datasource metadata details from Database by name - * @param dataSourceName Name of the datasource to be fetched - * @param verbose Flag indicating granularity of metadata to be fetched + * + * @param dataSourceName Name of the datasource to be fetched + * @param verbose Flag indicating granularity of metadata to be fetched * @return DataSourceMetadataInfo object of the specified datasource name */ public DataSourceMetadataInfo fetchDataSourceMetadataFromDBByName(String dataSourceName, String verbose) { try { - if(null == dataSourceName || dataSourceName.isEmpty()) { + if (null == dataSourceName || dataSourceName.isEmpty()) { throw new Exception(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.MISSING_DATASOURCE_NAME); } DataSourceMetadataInfo metadataInfo = new ExperimentDBService().loadMetadataFromDBByName(dataSourceName, verbose); @@ -255,13 +263,13 @@ public DataSourceMetadataInfo fetchDataSourceMetadataFromDBByName(String dataSou * This method processes the provided metadata includes only the datasource * names and their associated cluster names, pruning all other details. * - * @param dataSourceName Datasource name - * @param dataSourceMetadataInfo DataSourceMetadataInfo object containing granular metadata + * @param dataSourceName Datasource name + * @param dataSourceMetadataInfo DataSourceMetadataInfo object containing granular metadata * @return A new DataSourceMetadataInfo object containing only the cluster details. - * + *

* Note - It's assumed that Cluster view will be requested after validating dataSourceMetadataInfo object */ - public DataSourceMetadataInfo DataSourceMetadataClusterView(String dataSourceName, DataSourceMetadataInfo dataSourceMetadataInfo){ + public DataSourceMetadataInfo DataSourceMetadataClusterView(String dataSourceName, DataSourceMetadataInfo dataSourceMetadataInfo) { try { HashMap filteredDataSourceHashMap = new HashMap<>(); diff --git a/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java b/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java index 99c663402..f2f3a4b63 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java @@ -22,6 +22,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; import java.util.HashMap; /** @@ -55,9 +59,9 @@ public static DataSourceMetadataOperator getInstance() { * @param startTime Get metadata from starttime to endtime * @param endTime Get metadata from starttime to endtime * @param steps the interval between data points in a range query - * TODO - support multiple data sources + * TODO - support multiple data sources */ - public DataSourceMetadataInfo createDataSourceMetadata(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws Exception { + public DataSourceMetadataInfo createDataSourceMetadata(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { return processQueriesAndPopulateDataSourceMetadataInfo(dataSourceInfo, uniqueKey, startTime, endTime, steps); } @@ -97,8 +101,8 @@ public DataSourceMetadataInfo getDataSourceMetadataInfo(DataSourceInfo dataSourc * @param dataSourceInfo The DataSourceInfo object containing information about the * data source to be updated. *

- * TODO - Currently Create and Update functions have identical functionalities, based on UI workflow and requirements - * need to further enhance updateDataSourceMetadata() to support namespace, workload level granular updates + * TODO - Currently Create and Update functions have identical functionalities, based on UI workflow and requirements + * need to further enhance updateDataSourceMetadata() to support namespace, workload level granular updates */ public DataSourceMetadataInfo updateDataSourceMetadata(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws Exception { return processQueriesAndPopulateDataSourceMetadataInfo(dataSourceInfo, uniqueKey, startTime, endTime, steps); @@ -141,7 +145,7 @@ public void deleteDataSourceMetadata(DataSourceInfo dataSourceInfo) { * @return DataSourceMetadataInfo object with populated metadata fields * todo rename processQueriesAndFetchClusterMetadataInfo */ - public DataSourceMetadataInfo processQueriesAndPopulateDataSourceMetadataInfo(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws Exception { + public DataSourceMetadataInfo processQueriesAndPopulateDataSourceMetadataInfo(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { DataSourceMetadataHelper dataSourceDetailsHelper = new DataSourceMetadataHelper(); /** * Get DataSourceOperatorImpl instance on runtime based on dataSource provider @@ -152,37 +156,34 @@ public DataSourceMetadataInfo processQueriesAndPopulateDataSourceMetadataInfo(Da LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.DATASOURCE_OPERATOR_RETRIEVAL_FAILURE, dataSourceInfo.getProvider()); return null; } - /** * For the "prometheus" data source, fetches and processes data related to namespaces, workloads, and containers, * creating a comprehensive DataSourceMetadataInfo object that is then added to a list. * TODO - Process cluster metadata using a custom query */ - try { - String dataSourceName = dataSourceInfo.getName(); - String namespaceQuery = PromQLDataSourceQueries.NAMESPACE_QUERY; - String workloadQuery = PromQLDataSourceQueries.WORKLOAD_QUERY; - String containerQuery = PromQLDataSourceQueries.CONTAINER_QUERY; - if (null != uniqueKey && !uniqueKey.isEmpty()) { - LOGGER.debug("uniquekey: {}", uniqueKey); - namespaceQuery = namespaceQuery.replace("ADDITIONAL_LABEL", "," + uniqueKey); - workloadQuery = workloadQuery.replace("ADDITIONAL_LABEL", "," + uniqueKey); - containerQuery = containerQuery.replace("ADDITIONAL_LABEL", "," + uniqueKey); - } else { - namespaceQuery = namespaceQuery.replace("ADDITIONAL_LABEL", ""); - workloadQuery = workloadQuery.replace("ADDITIONAL_LABEL", ""); - containerQuery = containerQuery.replace("ADDITIONAL_LABEL", ""); - } - LOGGER.info("namespaceQuery: {}", namespaceQuery); - LOGGER.info("workloadQuery: {}", workloadQuery); - LOGGER.info("containerQuery: {}", containerQuery); - - JsonArray namespacesDataResultArray = op.getResultArrayForQuery(dataSourceInfo, namespaceQuery); - if (!op.validateResultArray(namespacesDataResultArray)) { - dataSourceMetadataInfo = dataSourceDetailsHelper.createDataSourceMetadataInfoObject(dataSourceName, null); - throw new Exception(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.NAMESPACE_QUERY_VALIDATION_FAILED); - } + String dataSourceName = dataSourceInfo.getName(); + String namespaceQuery = PromQLDataSourceQueries.NAMESPACE_QUERY; + String workloadQuery = PromQLDataSourceQueries.WORKLOAD_QUERY; + String containerQuery = PromQLDataSourceQueries.CONTAINER_QUERY; + if (null != uniqueKey && !uniqueKey.isEmpty()) { + LOGGER.debug("uniquekey: {}", uniqueKey); + namespaceQuery = namespaceQuery.replace("ADDITIONAL_LABEL", "," + uniqueKey); + workloadQuery = workloadQuery.replace("ADDITIONAL_LABEL", "," + uniqueKey); + containerQuery = containerQuery.replace("ADDITIONAL_LABEL", "," + uniqueKey); + } else { + namespaceQuery = namespaceQuery.replace("ADDITIONAL_LABEL", ""); + workloadQuery = workloadQuery.replace("ADDITIONAL_LABEL", ""); + containerQuery = containerQuery.replace("ADDITIONAL_LABEL", ""); + } + LOGGER.info("namespaceQuery: {}", namespaceQuery); + LOGGER.info("workloadQuery: {}", workloadQuery); + LOGGER.info("containerQuery: {}", containerQuery); + + JsonArray namespacesDataResultArray = op.getResultArrayForQuery(dataSourceInfo, namespaceQuery); + if (!op.validateResultArray(namespacesDataResultArray)) { + dataSourceMetadataInfo = dataSourceDetailsHelper.createDataSourceMetadataInfoObject(dataSourceName, null); + } else { /** * Key: Name of namespace * Value: DataSourceNamespace object corresponding to a namespace @@ -228,11 +229,10 @@ public DataSourceMetadataInfo processQueriesAndPopulateDataSourceMetadataInfo(Da } dataSourceDetailsHelper.updateContainerDataSourceMetadataInfoObject(dataSourceName, dataSourceMetadataInfo, datasourceWorkloads, datasourceContainers); - return getDataSourceMetadataInfo(dataSourceInfo); - } catch (Exception e) { - LOGGER.error(e.getMessage()); - throw e; } + + return null; + } } diff --git a/src/main/java/com/autotune/common/datasource/DataSourceOperator.java b/src/main/java/com/autotune/common/datasource/DataSourceOperator.java index f4fa77f86..ec7dcafff 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceOperator.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceOperator.java @@ -19,26 +19,33 @@ import com.google.gson.JsonArray; import org.json.JSONObject; +import java.io.IOException; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; + /** * DataSourceOperator is an abstraction which has a generic and implementation, * and it can also be implemented by each data source provider type. - * + *

* Currently Supported Implementations: - * - Prometheus - * - * The Implementation should have helper functions to perform operations related - * to datasource + * - Prometheus + *

+ * The Implementation should have helper functions to perform operations related + * to datasource */ public interface DataSourceOperator { /** * Returns the default service port for provider + * * @return String containing the port number */ String getDefaultServicePortForProvider(); /** * Returns the instance of specific operator class based on provider type + * * @param provider String containing the name of provider * @return instance of specific operator */ @@ -51,7 +58,7 @@ public interface DataSourceOperator { * @param dataSource DatasourceInfo object containing the datasource details * @return DatasourceReachabilityStatus */ - CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dataSource); + CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dataSource) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException; /** * executes specified query on datasource and returns the result value @@ -60,7 +67,7 @@ public interface DataSourceOperator { * @param query String containing the query to be executed * @return Object containing the result value for the specified query */ - Object getValueForQuery(DataSourceInfo dataSource, String query); + Object getValueForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException; /** * executes specified query on datasource and returns the JSON Object @@ -69,7 +76,7 @@ public interface DataSourceOperator { * @param query String containing the query to be executed * @return JSONObject for the specified query */ - JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query); + JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException; /** * executes specified query on datasource and returns the result array @@ -78,7 +85,7 @@ public interface DataSourceOperator { * @param query String containing the query to be executed * @return JsonArray containing the result array for the specified query */ - public JsonArray getResultArrayForQuery(DataSourceInfo dataSource, String query); + public JsonArray getResultArrayForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException; /** * Validates a JSON array to ensure it is not null, not a JSON null, and has at least one element. @@ -90,6 +97,7 @@ public interface DataSourceOperator { /** * returns query endpoint for datasource + * * @return String containing query endpoint */ String getQueryEndpoint(); diff --git a/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java b/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java index 5404a9d4b..ab3f5d535 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java @@ -1,12 +1,8 @@ package com.autotune.common.datasource; -import com.autotune.analyzer.exceptions.FetchMetricsError; import com.autotune.analyzer.exceptions.MonitoringAgentNotFoundException; import com.autotune.analyzer.exceptions.TooManyRecursiveCallsException; import com.autotune.analyzer.utils.AnalyzerConstants; -import com.autotune.common.auth.AuthenticationConfig; -import com.autotune.common.auth.AuthenticationStrategy; -import com.autotune.common.auth.AuthenticationStrategyFactory; import com.autotune.common.datasource.prometheus.PrometheusDataOperatorImpl; import com.autotune.common.exceptions.datasource.ServiceNotFound; import com.autotune.common.target.kubernetes.service.KubernetesServices; @@ -20,6 +16,7 @@ import org.json.JSONArray; import org.json.JSONObject; import org.slf4j.LoggerFactory; + import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; @@ -35,11 +32,13 @@ public class DataSourceOperatorImpl implements DataSourceOperator { private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(DataSourceOperatorImpl.class); private static DataSourceOperatorImpl dataSourceOperator = null; + protected DataSourceOperatorImpl() { } /** * Returns the instance of DataSourceOperatorImpl class + * * @return DataSourceOperatorImpl instance */ public static DataSourceOperatorImpl getInstance() { @@ -49,8 +48,119 @@ public static DataSourceOperatorImpl getInstance() { return dataSourceOperator; } + /** + * TODO: monitoring agent will be replaced by default datasource later + * returns DataSourceInfo objects for default datasource which is currently monitoring agent + * + * @return DataSourceInfo objects + */ + public static DataSourceInfo getMonitoringAgent(String dataSource) throws MonitoringAgentNotFoundException, MalformedURLException { + String monitoringAgentEndpoint; + DataSourceInfo monitoringAgent = null; + + if (dataSource.toLowerCase().equals(KruizeDeploymentInfo.monitoring_agent)) { + monitoringAgentEndpoint = KruizeDeploymentInfo.monitoring_agent_endpoint; + // Monitoring agent endpoint not set in the configmap + if (monitoringAgentEndpoint == null || monitoringAgentEndpoint.isEmpty()) { + monitoringAgentEndpoint = getServiceEndpoint(KruizeDeploymentInfo.monitoring_service); + } + if (dataSource.equals(AnalyzerConstants.PROMETHEUS_DATA_SOURCE)) { + monitoringAgent = new DataSourceInfo(KruizeDeploymentInfo.monitoring_agent, AnalyzerConstants.PROMETHEUS_DATA_SOURCE, null, null, new URL(monitoringAgentEndpoint)); + } + } + + if (monitoringAgent == null) { + LOGGER.error("Datasource " + dataSource + " not supported"); + } + + return monitoringAgent; + } + + /** + * TODO: To find a suitable place for this function later + * Gets the service endpoint for the datasource service through the cluster IP + * of the service. + * + * @return Endpoint of the service. + * @throws ServiceNotFound + */ + private static String getServiceEndpoint(String serviceName) { + //No endpoint was provided in the configmap, find the endpoint from the service. + KubernetesServices kubernetesServices = new KubernetesServicesImpl(); + List serviceList = kubernetesServices.getServicelist(null); + kubernetesServices.shutdownClient(); + String serviceEndpoint = null; + + try { + if (serviceName == null) { + throw new ServiceNotFound(); + } + + for (Service service : serviceList) { + String name = service.getMetadata().getName(); + if (name.toLowerCase().equals(serviceName)) { + String clusterIP = service.getSpec().getClusterIP(); + int port = service.getSpec().getPorts().get(0).getPort(); + LOGGER.debug(KruizeDeploymentInfo.cluster_type); + if (KruizeDeploymentInfo.k8s_type.equalsIgnoreCase(KruizeConstants.MINIKUBE)) { + serviceEndpoint = AnalyzerConstants.HTTP_PROTOCOL + "://" + clusterIP + ":" + port; + } + if (KruizeDeploymentInfo.k8s_type.equalsIgnoreCase(KruizeConstants.OPENSHIFT)) { + serviceEndpoint = AnalyzerConstants.HTTPS_PROTOCOL + "://" + clusterIP + ":" + port; + } + } + } + } catch (ServiceNotFound e) { + LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.SERVICE_NOT_FOUND); + } + + if (serviceEndpoint == null) { + LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.ENDPOINT_NOT_FOUND); + } + + return serviceEndpoint; + } + + /** + * TODO: To find a suitable place for this function later + * + * @param jsonObj The JSON that needs to be parsed + * @param key The key to search in the JSON + * @param values ArrayList to hold the key values in the JSON + * @param level Level of recursion + */ + static void parseJsonForKey(JSONObject jsonObj, String key, ArrayList values, int level) throws TooManyRecursiveCallsException { + level += 1; + + if (level > 30) + throw new TooManyRecursiveCallsException(); + + for (String keyStr : jsonObj.keySet()) { + Object keyvalue = jsonObj.get(keyStr); + + if (keyStr.equals(key)) + values.add(keyvalue.toString()); + + //for nested objects + if (keyvalue instanceof JSONObject) + parseJsonForKey((JSONObject) keyvalue, key, values, level); + + //for json array, iterate and recursively get values + if (keyvalue instanceof JSONArray) { + JSONArray jsonArray = (JSONArray) keyvalue; + for (int index = 0; index < jsonArray.length(); index++) { + Object jsonObject = jsonArray.get(index); + if (jsonObject instanceof JSONObject) { + parseJsonForKey((JSONObject) jsonObject, key, values, level); + } + } + } + } + } + /** * Returns the instance of specific operator class based on provider type + * * @param provider String containg the name of provider * @return instance of specific operator */ @@ -64,6 +174,7 @@ public DataSourceOperatorImpl getOperator(String provider) { /** * Returns the default service port for prometheus + * * @return String containing the port number */ @Override @@ -79,7 +190,7 @@ public String getDefaultServicePortForProvider() { * @return DatasourceReachabilityStatus */ @Override - public CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dataSource) { + public CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dataSource) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { return null; } @@ -91,18 +202,20 @@ public CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dat * @return Object containing the result value for the specified query */ @Override - public Object getValueForQuery(DataSourceInfo dataSource, String query) { + public Object getValueForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { return null; } /** * returns query endpoint for datasource + * * @return String containing query endpoint */ @Override public String getQueryEndpoint() { return null; } + /** * executes specified query on datasource and returns the JSON Object * @@ -111,7 +224,7 @@ public String getQueryEndpoint() { * @return JSONObject for the specified query */ @Override - public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) { + public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { return null; } @@ -123,7 +236,7 @@ public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) * @return JsonArray containing the result array for the specified query */ @Override - public JsonArray getResultArrayForQuery(DataSourceInfo dataSource, String query) { + public JsonArray getResultArrayForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { return null; } @@ -134,15 +247,18 @@ public JsonArray getResultArrayForQuery(DataSourceInfo dataSource, String query) * @return True if the JSON array is valid (not null, not a JSON null, and has at least one element), otherwise false. */ @Override - public boolean validateResultArray(JsonArray resultArray) { return false;} + public boolean validateResultArray(JsonArray resultArray) { + return false; + } /** * TODO: To find a suitable place for this function later * returns authentication token for datasource + * * @return String containing token */ public String getToken() throws IOException { - String fileName = KruizeConstants.AUTH_MOUNT_PATH+"token"; + String fileName = KruizeConstants.AUTH_MOUNT_PATH + "token"; String authToken = new String(Files.readAllBytes(Paths.get(fileName))); return authToken; } @@ -150,13 +266,14 @@ public String getToken() throws IOException { /** * TODO: To find a suitable place for this function later * Run the getAppsForLayer and return the list of applications matching the layer. + * * @param dataSource - * @param query getAppsForLayer query for the layer - * @param key The key to search for in the response + * @param query getAppsForLayer query for the layer + * @param key The key to search for in the response * @return ArrayList of all applications from the query * @throws MalformedURLException */ - public ArrayList getAppsForLayer(DataSourceInfo dataSource, String query, String key) throws MalformedURLException { + public ArrayList getAppsForLayer(DataSourceInfo dataSource, String query, String key) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { String dataSourceURL = dataSource.getUrl().toString(); String provider = dataSource.getProvider(); DataSourceOperator op = this.getOperator(provider); @@ -165,128 +282,19 @@ public ArrayList getAppsForLayer(DataSourceInfo dataSource, String query ArrayList valuesList = new ArrayList<>(); String queryURL = dataSourceURL + queryEndpoint + query; LOGGER.debug("Query URL is: {}", queryURL); - try { - // Create the client - GenericRestApiClient genericRestApiClient = new GenericRestApiClient(dataSource); - genericRestApiClient.setBaseURL(dataSourceURL + queryEndpoint); - JSONObject responseJson = genericRestApiClient.fetchMetricsJson("GET", query); - int level = 0; - try { - parseJsonForKey(responseJson, key, valuesList, level); - LOGGER.debug("Applications for the query: {}", valuesList.toString()); - } catch (TooManyRecursiveCallsException e) { - e.printStackTrace(); - } - } catch (IOException | NoSuchAlgorithmException | KeyStoreException | KeyManagementException | FetchMetricsError e) { - LOGGER.error("Unable to proceed due to invalid connection to URL: "+ queryURL); - } - return valuesList; - } - - /** - * TODO: monitoring agent will be replaced by default datasource later - * returns DataSourceInfo objects for default datasource which is currently monitoring agent - * @return DataSourceInfo objects - */ - public static DataSourceInfo getMonitoringAgent(String dataSource) throws MonitoringAgentNotFoundException, MalformedURLException { - String monitoringAgentEndpoint; - DataSourceInfo monitoringAgent = null; - - if (dataSource.toLowerCase().equals(KruizeDeploymentInfo.monitoring_agent)) { - monitoringAgentEndpoint = KruizeDeploymentInfo.monitoring_agent_endpoint; - // Monitoring agent endpoint not set in the configmap - if (monitoringAgentEndpoint == null || monitoringAgentEndpoint.isEmpty()) { - monitoringAgentEndpoint = getServiceEndpoint(KruizeDeploymentInfo.monitoring_service); - } - if (dataSource.equals(AnalyzerConstants.PROMETHEUS_DATA_SOURCE)) { - monitoringAgent = new DataSourceInfo(KruizeDeploymentInfo.monitoring_agent, AnalyzerConstants.PROMETHEUS_DATA_SOURCE, null, null, new URL(monitoringAgentEndpoint)); - } - } - - if (monitoringAgent == null) { - LOGGER.error("Datasource " + dataSource + " not supported"); - } - - return monitoringAgent; - } - - /** - * TODO: To find a suitable place for this function later - * Gets the service endpoint for the datasource service through the cluster IP - * of the service. - * @return Endpoint of the service. - * @throws ServiceNotFound - */ - private static String getServiceEndpoint(String serviceName) { - //No endpoint was provided in the configmap, find the endpoint from the service. - KubernetesServices kubernetesServices = new KubernetesServicesImpl(); - List serviceList = kubernetesServices.getServicelist(null); - kubernetesServices.shutdownClient(); - String serviceEndpoint = null; + // Create the client + GenericRestApiClient genericRestApiClient = new GenericRestApiClient(dataSource); + genericRestApiClient.setBaseURL(dataSourceURL + queryEndpoint); + JSONObject responseJson = genericRestApiClient.fetchMetricsJson("GET", query); + int level = 0; try { - if (serviceName == null) { - throw new ServiceNotFound(); - } - - for (Service service : serviceList) { - String name = service.getMetadata().getName(); - if (name.toLowerCase().equals(serviceName)) { - String clusterIP = service.getSpec().getClusterIP(); - int port = service.getSpec().getPorts().get(0).getPort(); - LOGGER.debug(KruizeDeploymentInfo.cluster_type); - if (KruizeDeploymentInfo.k8s_type.equalsIgnoreCase(KruizeConstants.MINIKUBE)) { - serviceEndpoint = AnalyzerConstants.HTTP_PROTOCOL + "://" + clusterIP + ":" + port; - } - if (KruizeDeploymentInfo.k8s_type.equalsIgnoreCase(KruizeConstants.OPENSHIFT)) { - serviceEndpoint = AnalyzerConstants.HTTPS_PROTOCOL + "://" + clusterIP + ":" + port; - } - } - } - } catch (ServiceNotFound e) { - LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.SERVICE_NOT_FOUND); + parseJsonForKey(responseJson, key, valuesList, level); + LOGGER.debug("Applications for the query: {}", valuesList.toString()); + } catch (TooManyRecursiveCallsException e) { + e.printStackTrace(); } - if (serviceEndpoint == null) { - LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.ENDPOINT_NOT_FOUND); - } - - return serviceEndpoint; - } - - /** - * TODO: To find a suitable place for this function later - * @param jsonObj The JSON that needs to be parsed - * @param key The key to search in the JSON - * @param values ArrayList to hold the key values in the JSON - * @param level Level of recursion - */ - static void parseJsonForKey(JSONObject jsonObj, String key, ArrayList values, int level) throws TooManyRecursiveCallsException { - level += 1; - - if (level > 30) - throw new TooManyRecursiveCallsException(); - - for (String keyStr : jsonObj.keySet()) { - Object keyvalue = jsonObj.get(keyStr); - - if (keyStr.equals(key)) - values.add(keyvalue.toString()); - - //for nested objects - if (keyvalue instanceof JSONObject) - parseJsonForKey((JSONObject) keyvalue, key, values, level); - - //for json array, iterate and recursively get values - if (keyvalue instanceof JSONArray) { - JSONArray jsonArray = (JSONArray) keyvalue; - for (int index = 0; index < jsonArray.length(); index++) { - Object jsonObject = jsonArray.get(index); - if (jsonObject instanceof JSONObject) { - parseJsonForKey((JSONObject) jsonObject, key, values, level); - } - } - } - } + return valuesList; } } diff --git a/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java b/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java index 56614d24a..9e86a4d50 100644 --- a/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java +++ b/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java @@ -15,20 +15,16 @@ *******************************************************************************/ package com.autotune.common.datasource.prometheus; -import com.autotune.analyzer.exceptions.FetchMetricsError; import com.autotune.analyzer.utils.AnalyzerConstants; -import com.autotune.common.auth.AuthenticationStrategy; -import com.autotune.common.auth.AuthenticationStrategyFactory; import com.autotune.common.datasource.DataSourceInfo; import com.autotune.common.datasource.DataSourceOperatorImpl; import com.autotune.common.utils.CommonUtils; import com.autotune.operator.KruizeDeploymentInfo; -import com.autotune.utils.KruizeConstants; import com.autotune.utils.GenericRestApiClient; -import com.google.gson.*; -import org.apache.http.conn.HttpHostConnectException; -import org.json.JSONArray; -import org.json.JSONException; +import com.autotune.utils.KruizeConstants; +import com.google.gson.JsonArray; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; import org.json.JSONObject; import org.slf4j.LoggerFactory; @@ -38,19 +34,22 @@ import java.security.NoSuchAlgorithmException; /** - * PrometheusDataOperatorImpl extends DataSourceOperatorImpl class - * This class provides Prometheus specific implementation for DataSourceOperator functions + * PrometheusDataOperatorImpl extends DataSourceOperatorImpl class + * This class provides Prometheus specific implementation for DataSourceOperator functions */ public class PrometheusDataOperatorImpl extends DataSourceOperatorImpl { private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(PrometheusDataOperatorImpl.class); - private static PrometheusDataOperatorImpl prometheusDataOperator = null;; + private static PrometheusDataOperatorImpl prometheusDataOperator = null; + ; + private PrometheusDataOperatorImpl() { super(); } /** * Returns the instance of PrometheusDataOperatorImpl class + * * @return PrometheusDataOperatorImpl instance */ public static PrometheusDataOperatorImpl getInstance() { @@ -62,6 +61,7 @@ public static PrometheusDataOperatorImpl getInstance() { /** * Returns the default service port for prometheus + * * @return String containing the port number */ @Override @@ -80,7 +80,7 @@ public String getDefaultServicePortForProvider() { * @return DatasourceReachabilityStatus */ @Override - public CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dataSource) { + public CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dataSource) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { String dataSourceStatus; Object queryResult; @@ -89,13 +89,13 @@ public CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dat queryResult = this.getValueForQuery(dataSource, query); - if (queryResult != null){ + if (queryResult != null) { dataSourceStatus = queryResult.toString(); } else { dataSourceStatus = "0"; } - if (dataSourceStatus.equalsIgnoreCase("1")){ + if (dataSourceStatus.equalsIgnoreCase("1")) { reachabilityStatus = CommonUtils.DatasourceReachabilityStatus.REACHABLE; } else { reachabilityStatus = CommonUtils.DatasourceReachabilityStatus.NOT_REACHABLE; @@ -111,23 +111,17 @@ public CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dat * @return Object containing the result value for the specified query */ @Override - public Object getValueForQuery(DataSourceInfo dataSource, String query) { - try { - JSONObject jsonObject = getJsonObjectForQuery(dataSource, query); - - if (null == jsonObject) { - return null; - } else { - return "1"; //if it returns HTTP STATUS_OK 200 - } + public Object getValueForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + JSONObject jsonObject = getJsonObjectForQuery(dataSource, query); - } catch (JSONException e) { - LOGGER.error(e.getMessage()); - } catch (NullPointerException e) { - LOGGER.error(e.getMessage()); + if (null == jsonObject) { + return null; + } else { + return "1"; //if it returns HTTP STATUS_OK 200 } - return null; + + } /** @@ -138,7 +132,7 @@ public Object getValueForQuery(DataSourceInfo dataSource, String query) { * @return JSONObject for the specified query */ @Override - public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) { + public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { // Create the client GenericRestApiClient apiClient = new GenericRestApiClient(dataSource); apiClient.setBaseURL(CommonUtils.getBaseDataSourceUrl( @@ -150,10 +144,10 @@ public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) return null; } - try { - JSONObject jsonObject = apiClient.fetchMetricsJson( - KruizeConstants.HttpConstants.MethodType.GET, - query); + + JSONObject jsonObject = apiClient.fetchMetricsJson( + KruizeConstants.HttpConstants.MethodType.GET, + query); /* TODO need to separate it out this logic form here if (!jsonObject.has(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.STATUS)) return null; @@ -168,26 +162,14 @@ public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) */ - return jsonObject; - - } catch (HttpHostConnectException e) { - LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_CONNECTION_FAILED); - } catch (IOException e) { - e.printStackTrace(); - } catch (NoSuchAlgorithmException e) { - e.printStackTrace(); - } catch (KeyStoreException e) { - e.printStackTrace(); - } catch (KeyManagementException e) { - e.printStackTrace(); - } catch (FetchMetricsError e) { - e.printStackTrace(); - } - return null; + return jsonObject; + + } /** * returns query endpoint for prometheus datasource + * * @return String containing query endpoint */ @Override @@ -201,45 +183,37 @@ public String getQueryEndpoint() { * @param dataSource DatasourceInfo object containing the datasource details * @param query String containing the query to be executed * @return JsonArray containing the result array for the specified query - * + *

* Example output JsonArray - * [ - * { - * "metric": { - * "__name__": "exampleMetric" - * }, - * "value": [1642612628.987, "1"] - * } + * { + * "metric": { + * "__name__": "exampleMetric" + * }, + * "value": [1642612628.987, "1"] + * } * ] */ @Override - public JsonArray getResultArrayForQuery(DataSourceInfo dataSource, String query) { - try { - JSONObject jsonObject = getJsonObjectForQuery(dataSource, query); + public JsonArray getResultArrayForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { - if (null == jsonObject) { - return null; - } + JSONObject jsonObject = getJsonObjectForQuery(dataSource, query); + if (null == jsonObject) { + return null; + } else { String jsonString = jsonObject.toString(); JsonObject parsedJsonObject = JsonParser.parseString(jsonString).getAsJsonObject(); JsonObject dataObject = parsedJsonObject.get(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.DATA).getAsJsonObject(); - if (dataObject.has(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT) && dataObject.get(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT).isJsonArray()) { JsonArray resultArray = dataObject.getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT); - if (null != resultArray) { return resultArray; } } - } catch (JsonParseException e) { - LOGGER.error(e.getMessage()); - throw e; - } catch (NullPointerException e) { - LOGGER.error(e.getMessage()); - throw e; } + return null; } diff --git a/src/main/java/com/autotune/experimentManager/handler/MetricCollectionHandler.java b/src/main/java/com/autotune/experimentManager/handler/MetricCollectionHandler.java index 0aa8c41d7..ad3a4e70d 100644 --- a/src/main/java/com/autotune/experimentManager/handler/MetricCollectionHandler.java +++ b/src/main/java/com/autotune/experimentManager/handler/MetricCollectionHandler.java @@ -42,6 +42,10 @@ import org.slf4j.LoggerFactory; import javax.servlet.ServletContext; +import java.io.IOException; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; import java.sql.Timestamp; import java.text.DecimalFormat; import java.util.ArrayList; @@ -121,8 +125,19 @@ public void execute(ExperimentTrial experimentTrial, TrialDetails trialDetails, if (null == ado) { // TODO: Return an error saying unsupported datasource } - String queryResult = (String) ado.getValueForQuery(experimentTrial.getDatasourceInfoHashMap() - .get(podMetric.getDatasource()), updatedPodQuery); + String queryResult = null; + try { + queryResult = (String) ado.getValueForQuery(experimentTrial.getDatasourceInfoHashMap() + .get(podMetric.getDatasource()), updatedPodQuery); + } catch (IOException e) { + throw new RuntimeException(e); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); + } catch (KeyStoreException e) { + throw new RuntimeException(e); + } catch (KeyManagementException e) { + throw new RuntimeException(e); + } if (null != queryResult && !queryResult.isEmpty() && !queryResult.isBlank()) { try { queryResult = queryResult.trim(); @@ -159,8 +174,19 @@ public void execute(ExperimentTrial experimentTrial, TrialDetails trialDetails, } if (null != updatedContainerQuery) { LOGGER.debug("Updated Query - " + updatedContainerQuery); - String queryResult = (String) ado.getValueForQuery(experimentTrial.getDatasourceInfoHashMap() - .get(containerMetric.getDatasource()), updatedContainerQuery); + String queryResult = null; + try { + queryResult = (String) ado.getValueForQuery(experimentTrial.getDatasourceInfoHashMap() + .get(containerMetric.getDatasource()), updatedContainerQuery); + } catch (IOException e) { + throw new RuntimeException(e); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); + } catch (KeyStoreException e) { + throw new RuntimeException(e); + } catch (KeyManagementException e) { + throw new RuntimeException(e); + } if (null != queryResult && !queryResult.isEmpty() && !queryResult.isBlank()) { try { queryResult = queryResult.trim(); diff --git a/src/main/java/com/autotune/utils/GenericRestApiClient.java b/src/main/java/com/autotune/utils/GenericRestApiClient.java index 99a3e8e13..45ad05598 100644 --- a/src/main/java/com/autotune/utils/GenericRestApiClient.java +++ b/src/main/java/com/autotune/utils/GenericRestApiClient.java @@ -40,6 +40,7 @@ import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; + import javax.net.ssl.SSLContext; import java.io.IOException; import java.net.URLEncoder; @@ -64,6 +65,7 @@ public class GenericRestApiClient { /** * constructor to set the authentication based on the datasourceInfo object + * * @param dataSourceInfo object containing the datasource details */ public GenericRestApiClient(DataSourceInfo dataSourceInfo) { @@ -74,12 +76,13 @@ public GenericRestApiClient(DataSourceInfo dataSourceInfo) { /** * This method appends queryString with baseURL and returns response in JSON using specified authentication. - * @param methodType Http methods like GET,POST,PATCH etc + * + * @param methodType Http methods like GET,POST,PATCH etc * @param queryString * @return Json object which contains API response. * @throws IOException */ - public JSONObject fetchMetricsJson(String methodType, String queryString) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException, FetchMetricsError { + public JSONObject fetchMetricsJson(String methodType, String queryString) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { String jsonResponse; try (CloseableHttpClient httpclient = setupHttpClient()) { @@ -104,6 +107,7 @@ public JSONObject fetchMetricsJson(String methodType, String queryString) throws /** * Common method to setup SSL context for trust-all certificates. + * * @return CloseableHttpClient */ private CloseableHttpClient setupHttpClient() throws NoSuchAlgorithmException, KeyStoreException, KeyManagementException { @@ -115,6 +119,7 @@ private CloseableHttpClient setupHttpClient() throws NoSuchAlgorithmException, K /** * Common method to apply authentication to the HTTP request. + * * @param httpRequestBase the HTTP request (GET, POST, etc.) */ private void applyAuthentication(HttpRequestBase httpRequestBase) { @@ -126,6 +131,7 @@ private void applyAuthentication(HttpRequestBase httpRequestBase) { /** * Method to call the Experiment API (e.g., to create an experiment) using POST request. + * * @param payload JSON payload containing the experiment details * @return API response code * @throws IOException @@ -158,6 +164,9 @@ public int callKruizeAPI(String payload) throws IOException, NoSuchAlgorithmExce } } + public void setBaseURL(String baseURL) { + this.baseURL = baseURL; + } private static class StringResponseHandler implements ResponseHandler { @Override @@ -173,8 +182,4 @@ public String handleResponse(HttpResponse response) throws IOException { } - - public void setBaseURL(String baseURL) { - this.baseURL = baseURL; - } } From d8d705c19c176061ec22c3202fa8b5ac67ada52f Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Tue, 12 Nov 2024 15:44:58 +0530 Subject: [PATCH 05/15] Datasource check should bypass and report error in log Signed-off-by: msvinaykumar --- src/main/java/com/autotune/Autotune.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/autotune/Autotune.java b/src/main/java/com/autotune/Autotune.java index 543edd94c..ce580aca2 100644 --- a/src/main/java/com/autotune/Autotune.java +++ b/src/main/java/com/autotune/Autotune.java @@ -122,7 +122,11 @@ public static void main(String[] args) { // load available datasources from db loadDataSourcesFromDB(); // setting up DataSources - setUpDataSources(); + try { + setUpDataSources(); + } catch (Exception e) { + LOGGER.error("***Failed to setup datasource connection due to : {}***", e.getMessage()); + } // checking available DataSources checkAvailableDataSources(); // load available metric profiles from db From 8f57c2e18b526f31449ca7b99ff1f0b7022bf372 Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Wed, 13 Nov 2024 16:44:41 +0530 Subject: [PATCH 06/15] incorporated review comments Signed-off-by: msvinaykumar --- src/main/java/com/autotune/Autotune.java | 3 ++- .../analyzer/utils/AnalyzerConstants.java | 19 ++++++++------- .../datasource/DataSourceCollection.java | 23 +++++++++++-------- .../common/datasource/DataSourceManager.java | 11 +++++---- .../com/autotune/utils/KruizeConstants.java | 17 ++++++++++++++ 5 files changed, 48 insertions(+), 25 deletions(-) diff --git a/src/main/java/com/autotune/Autotune.java b/src/main/java/com/autotune/Autotune.java index ce580aca2..cd5725b81 100644 --- a/src/main/java/com/autotune/Autotune.java +++ b/src/main/java/com/autotune/Autotune.java @@ -66,6 +66,7 @@ import java.util.HashMap; import java.util.Scanner; +import static com.autotune.utils.KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_CONNECTION_FAILED; import static com.autotune.utils.ServerContext.*; public class Autotune { @@ -125,7 +126,7 @@ public static void main(String[] args) { try { setUpDataSources(); } catch (Exception e) { - LOGGER.error("***Failed to setup datasource connection due to : {}***", e.getMessage()); + LOGGER.error(DATASOURCE_CONNECTION_FAILED, e.getMessage()); } // checking available DataSources checkAvailableDataSources(); diff --git a/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java b/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java index 740bb859a..f7ae69c9f 100644 --- a/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java +++ b/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java @@ -92,6 +92,7 @@ public class AnalyzerConstants { public static final String VERSION = "version"; public static final String DATASOURCE_NAME = "dataSourceName"; + private AnalyzerConstants() { } @@ -241,27 +242,23 @@ private AcceleratorConstants() { } public static final class AcceleratorMetricConstants { + public static final int TIMESTAMP_RANGE_CHECK_IN_MINUTES = 5; + private AcceleratorMetricConstants() { } - - public static final int TIMESTAMP_RANGE_CHECK_IN_MINUTES = 5; } public static final class SupportedAccelerators { - private SupportedAccelerators() { - - } public static final String A100_80_GB = "A100-80GB"; public static final String A100_40_GB = "A100-40GB"; public static final String H100_80_GB = "H100-80GB"; - } - - public static final class AcceleratorProfiles { - private AcceleratorProfiles () { + private SupportedAccelerators() { } + } + public static final class AcceleratorProfiles { // A100 40GB Profiles public static final String PROFILE_1G_5GB = "1g.5gb"; public static final String PROFILE_1G_10GB = "1g.10gb"; @@ -269,13 +266,15 @@ private AcceleratorProfiles () { public static final String PROFILE_3G_20GB = "3g.20gb"; public static final String PROFILE_4G_20GB = "4g.20gb"; public static final String PROFILE_7G_40GB = "7g.40gb"; - // A100 80GB & H100 80GB Profiles public static final String PROFILE_1G_20GB = "1g.20gb"; public static final String PROFILE_2G_20GB = "2g.20gb"; public static final String PROFILE_3G_40GB = "3g.40gb"; public static final String PROFILE_4G_40GB = "4g.40gb"; public static final String PROFILE_7G_80GB = "7g.80gb"; + private AcceleratorProfiles() { + + } } } diff --git a/src/main/java/com/autotune/common/datasource/DataSourceCollection.java b/src/main/java/com/autotune/common/datasource/DataSourceCollection.java index d497f1751..a67fbcfd1 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceCollection.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceCollection.java @@ -37,6 +37,9 @@ import java.util.HashMap; import java.util.List; +import static com.autotune.utils.KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.*; +import static com.autotune.utils.KruizeConstants.DataSourceConstants.DataSourceSuccessMsgs.DATASOURCE_ADDED; + public class DataSourceCollection { private static final Logger LOGGER = LoggerFactory.getLogger(DataSourceCollection.class); private static DataSourceCollection dataSourceCollectionInstance = new DataSourceCollection(); @@ -97,7 +100,7 @@ public void addDataSource(DataSourceInfo datasource) throws DataSourceAlreadyExi if (dataSourceCollection.containsKey(name)) { - throw new DataSourceAlreadyExist(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_ALREADY_EXIST); + throw new DataSourceAlreadyExist(DATASOURCE_ALREADY_EXIST); } if (provider.equalsIgnoreCase(KruizeConstants.SupportedDatasources.PROMETHEUS)) { @@ -108,14 +111,14 @@ public void addDataSource(DataSourceInfo datasource) throws DataSourceAlreadyExi // add the data source to DB addedToDB = new ExperimentDBService().addDataSourceToDB(datasource); if (addedToDB.isSuccess()) { - LOGGER.info("Datasource added to the DB successfully."); + LOGGER.info(DATASOURCE_ADDED); } else { - LOGGER.error("Failed to add datasource to DB: {}", addedToDB.getMessage()); + LOGGER.error("{}: {}", DATASOURCE_NOT_SERVICEABLE, addedToDB.getMessage()); } dataSourceCollection.put(name, datasource); - LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceSuccessMsgs.DATASOURCE_ADDED); + LOGGER.info(DATASOURCE_ADDED); } else { - throw new DataSourceNotServiceable(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_NOT_SERVICEABLE); + throw new DataSourceNotServiceable(DATASOURCE_NOT_SERVICEABLE); } } else { throw new UnsupportedDataSourceProvider(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.UNSUPPORTED_DATASOURCE_PROVIDER); @@ -145,11 +148,11 @@ public void addDataSourcesFromConfigFile(String configFileName) throws Unsupport try { DataSourceInfo dataSourceInfo = new ExperimentDBService().loadDataSourceFromDBByName(name); if (null != dataSourceInfo) { - LOGGER.error("Datasource: {} already exists!", name); + LOGGER.error("{} : {}", DATASOURCE_ALREADY_EXIST, name); continue; } } catch (Exception e) { - LOGGER.error("Loading saved datasource {} failed: {} ", name, e.getMessage()); + LOGGER.error(DATASOURCE_DB_LOAD_FAILED, name, e.getMessage()); } String provider = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_PROVIDER); String serviceName = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_SERVICE_NAME); @@ -161,7 +164,7 @@ public void addDataSourcesFromConfigFile(String configFileName) throws Unsupport // create the corresponding authentication object authConfig = AuthenticationConfig.createAuthenticationConfigObject(authenticationObj); } catch (Exception e) { - LOGGER.warn("Auth details are missing for datasource: {}", name); + LOGGER.warn(DATASOURCE_DB_AUTH_LOAD_FAILED, name, e.getMessage()); authConfig = AuthenticationConfig.noAuth(); } @@ -217,7 +220,7 @@ public boolean validateInput(String name, String provider, String servicename, S * deletes the datasource from the Hashmap * * @param name String containing the name of the datasource to be deleted - * TODO: add db related operations + * TODO: add db related operations */ public void deleteDataSource(String name) throws DataSourceMissingRequiredFiled, DataSourceDoesNotExist { @@ -237,7 +240,7 @@ public void deleteDataSource(String name) throws DataSourceMissingRequiredFiled, * * @param name String containing the name of the datasource to be updated * @param newDataSource DataSourceInfo object with updated values - * TODO: add db related operations + * TODO: add db related operations */ public void updateDataSource(String name, DataSourceInfo newDataSource) throws UnsupportedDataSourceProvider, DataSourceNotServiceable, DataSourceAlreadyExist, IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException, DataSourceDoesNotExist { diff --git a/src/main/java/com/autotune/common/datasource/DataSourceManager.java b/src/main/java/com/autotune/common/datasource/DataSourceManager.java index 1a1fc9e37..a8401970c 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceManager.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceManager.java @@ -34,6 +34,9 @@ import java.util.HashMap; import java.util.Map; +import static com.autotune.utils.KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.*; +import static com.autotune.utils.KruizeConstants.DataSourceConstants.DataSourceMetadataSuccessMsgs.METADATA_ADDED; + /** * DataSourceManager is an interface to manage (create and update) metadata * of data sources @@ -150,9 +153,9 @@ public void addMetadataToDB(DataSourceMetadataInfo dataSourceMetadataInfo) { // add the data source to DB addedToDB = new ExperimentDBService().addMetadataToDB(dataSourceMetadataInfo); if (addedToDB.isSuccess()) { - LOGGER.debug("Metadata added to the DB successfully."); + LOGGER.debug(METADATA_ADDED); } else { - LOGGER.error("Failed to add metadata to DB: {}", addedToDB.getMessage()); + LOGGER.error(LOAD_DATASOURCE_METADATA_TO_DB_ERROR, addedToDB.getMessage()); } } catch (Exception e) { LOGGER.error("Exception occurred while adding metadata : {} ", e.getMessage()); @@ -165,11 +168,11 @@ private boolean checkIfDataSourceMetadataExists(String dataSourceName) { try { DataSourceMetadataInfo dataSourceMetadataInfo = new ExperimentDBService().loadMetadataFromDBByName(dataSourceName, "false"); if (null != dataSourceMetadataInfo) { - LOGGER.error("Metadata already exists for datasource: {}!", dataSourceName); + LOGGER.error(METADATA_EXIST, dataSourceName); isPresent = true; } } catch (Exception e) { - LOGGER.error("Failed to load metadata for the datasource: {}: {} ", dataSourceName, e.getMessage()); + LOGGER.error(METADATA_LOAD_FROM_DB, dataSourceName, e.getMessage()); } return isPresent; } diff --git a/src/main/java/com/autotune/utils/KruizeConstants.java b/src/main/java/com/autotune/utils/KruizeConstants.java index 0eb900aab..999b61e55 100644 --- a/src/main/java/com/autotune/utils/KruizeConstants.java +++ b/src/main/java/com/autotune/utils/KruizeConstants.java @@ -449,6 +449,8 @@ public static class DataSourceErrorMsgs { public static final String UNSUPPORTED_DATASOURCE_PROVIDER = "Datasource provider is invalid."; public static final String DATASOURCE_NOT_SERVICEABLE = "Datasource is not serviceable."; public static final String DATASOURCE_CONNECTION_FAILED = "Datasource connection refused or timed out."; + public static final String DATASOURCE_DB_LOAD_FAILED = "Loading saved datasource {} details from db failed: {}"; + public static final String DATASOURCE_DB_AUTH_LOAD_FAILED = "Loading datasource {} AUTH details failed: {}"; public static final String DATASOURCE_ALREADY_EXIST = "Datasource with the name already exist."; public static final String DATASOURCE_NOT_EXIST = "Datasource with the name does not exist."; public static final String INVALID_DATASOURCE_URL = "Datasource url is not valid."; @@ -502,7 +504,21 @@ private DataSourceMetadataInfoConstants() { } } + public static class DataSourceMetadataSuccessMsgs { + public static final String METADATA_ADDED = "Metadata added to the DB successfully."; + public static final String DATASOURCE_DELETED = "Successfully deleted datasource: "; + public static final String DATASOURCE_FOUND = "Datasource found: "; + public static final String DATASOURCE_SERVICEABLE = "Datasource is serviceable."; + public static final String DATASOURCE_NOT_SERVICEABLE = "Datasource is not serviceable."; + + private DataSourceMetadataSuccessMsgs() { + + } + } + public static class DataSourceMetadataErrorMsgs { + public static final String METADATA_EXIST = "Metadata already exists for datasource: {}!"; + public static final String METADATA_LOAD_FROM_DB = "Failed to load metadata for the datasource: {}: {} "; public static final String MISSING_DATASOURCE_METADATA_DATASOURCE_NAME = "DataSourceMetadata Datasource name cannot be empty"; public static final String MISSING_DATASOURCE_METADATA_WORKLOAD_MAP = "DataSourceMetadata Workload data cannot be empty or null"; public static final String MISSING_DATASOURCE_METADATA_CONTAINER_MAP = "DataSourceMetadata Container data cannot be empty or null"; @@ -530,6 +546,7 @@ public static class DataSourceMetadataErrorMsgs { public static final String SET_CONTAINER_MAP_ERROR = "containerHashMap is null, no containers provided for workload: "; public static final String SET_NAMESPACE_MAP_ERROR = "namespaceHashMap is null, no namespaces provided for cluster: "; public static final String LOAD_DATASOURCE_FROM_DB_ERROR = "Error loading datasource - %s from DB: %s"; + public static final String LOAD_DATASOURCE_METADATA_TO_DB_ERROR = "Failed to add metadata to DB: {}"; public static final String LOAD_DATASOURCE_METADATA_FROM_DB_ERROR = "Error loading datasource - %s from DB: %s"; public static final String DATASOURCE_METADATA_VALIDATION_FAILURE_MSG = "Validation of imported metadata failed, mandatory fields missing: %s"; public static final String NAMESPACE_QUERY_VALIDATION_FAILED = "Validation failed for namespace data query."; From 553a1753289c9f6e58ecdf4ce566546c838987c9 Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Tue, 12 Nov 2024 16:13:04 +0530 Subject: [PATCH 07/15] BulkAPI New Json and Error Handling Signed-off-by: msvinaykumar --- .../serviceObjects/BulkJobStatus.java | 277 ++++++++---------- .../analyzer/services/BulkService.java | 77 +++-- .../analyzer/workerimpl/BulkJobManager.java | 190 ++++++------ .../autotune/utils/GenericRestApiClient.java | 90 +++++- .../com/autotune/utils/KruizeConstants.java | 67 +++++ 5 files changed, 410 insertions(+), 291 deletions(-) diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java b/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java index 1a988808e..926bf04b1 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java @@ -15,76 +15,116 @@ *******************************************************************************/ package com.autotune.analyzer.serviceObjects; +import com.autotune.utils.KruizeConstants; import com.fasterxml.jackson.annotation.JsonFilter; import com.fasterxml.jackson.annotation.JsonProperty; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.time.Instant; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; -import java.util.ArrayList; import java.util.Collections; -import java.util.List; +import java.util.HashMap; +import java.util.Map; import static com.autotune.utils.KruizeConstants.KRUIZE_BULK_API.JOB_ID; +import static com.autotune.utils.KruizeConstants.KRUIZE_BULK_API.NotificationConstants.Status.UNPROCESSED; /** * Bulk API Response payload Object. */ @JsonFilter("jobFilter") public class BulkJobStatus { + private static final Logger LOGGER = LoggerFactory.getLogger(BulkJobStatus.class); @JsonProperty(JOB_ID) private String jobID; private String status; private int total_experiments; private int processed_experiments; - private Data data; @JsonProperty("job_start_time") private String startTime; // Change to String to store formatted time @JsonProperty("job_end_time") private String endTime; // Change to String to store formatted time - private String message; + private Map notifications; + private Map experiments = Collections.synchronizedMap(new HashMap<>()); - public BulkJobStatus(String jobID, String status, Data data, Instant startTime) { + public BulkJobStatus(String jobID, String status, Instant startTime) { this.jobID = jobID; this.status = status; - this.data = data; setStartTime(startTime); } - public String getJobID() { - return jobID; + + // Method to set a notification in the map + public void setNotification(String key, Notification notification) { + if (notifications == null) { + notifications = new HashMap<>(); // Initialize if null + } + notifications.put(key, notification); } - public String getStatus() { - return status; + public String getJobID() { + return jobID; } - public void setStatus(String status) { - this.status = status; + public void setJobID(String jobID) { + this.jobID = jobID; } public String getStartTime() { return startTime; } - public void setStartTime(Instant startTime) { - this.startTime = formatInstantAsUTCString(startTime); - } - public void setStartTime(String startTime) { this.startTime = startTime; } + public void setStartTime(Instant startTime) { + this.startTime = formatInstantAsUTCString(startTime); + } + public String getEndTime() { return endTime; } + public void setEndTime(String endTime) { + this.endTime = endTime; + } + public void setEndTime(Instant endTime) { this.endTime = formatInstantAsUTCString(endTime); } - public void setEndTime(String endTime) { - this.endTime = endTime; + public Map getNotifications() { + return notifications; + } + + public void setNotifications(Map notifications) { + this.notifications = notifications; + } + + public Map getExperiments() { + return experiments; + } + + public void setExperiments(Map experiments) { + this.experiments = experiments; + } + + // Method to add a new experiment with "unprocessed" status and null notification + public synchronized Experiment addExperiment(String experimentName) { + Experiment experiment = new Experiment(experimentName); + experiments.put(experimentName, experiment); + return experiment; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; } public int getTotal_experiments() { @@ -103,14 +143,6 @@ public void setProcessed_experiments(int processed_experiments) { this.processed_experiments = processed_experiments; } - public Data getData() { - return data; - } - - public void setData(Data data) { - this.data = data; - } - // Utility function to format Instant into the required UTC format private String formatInstantAsUTCString(Instant instant) { DateTimeFormatter formatter = DateTimeFormatter @@ -120,183 +152,108 @@ private String formatInstantAsUTCString(Instant instant) { return formatter.format(instant); } - public String getMessage() { - return message; - } - - public void setMessage(String message) { - this.message = message; - } - - // Inner class for the data field - public static class Data { - private Experiments experiments; - private Recommendations recommendations; - - public Data(Experiments experiments, Recommendations recommendations) { - this.experiments = experiments; - this.recommendations = recommendations; - } - public Experiments getExperiments() { - return experiments; - } + public static enum NotificationType { + ERROR("error"), + WARNING("warning"), + INFO("info"); - public void setExperiments(Experiments experiments) { - this.experiments = experiments; - } + private final String type; - public Recommendations getRecommendations() { - return recommendations; + NotificationType(String type) { + this.type = type; } - public void setRecommendations(Recommendations recommendations) { - this.recommendations = recommendations; + public String getType() { + return type; } } - // Inner class for experiments - public static class Experiments { - @JsonProperty("new") - private List newExperiments; - @JsonProperty("updated") - private List updatedExperiments; - @JsonProperty("failed") - private List failedExperiments; - - public Experiments(List newExperiments, List updatedExperiments, List failedExperiments) { - this.newExperiments = newExperiments; - this.updatedExperiments = updatedExperiments; - this.failedExperiments = failedExperiments; - } + public static class Experiment { + private String name; + private Notification notification; // Empty by default + private Recommendation recommendation; - public List getNewExperiments() { - return newExperiments; + public Experiment(String name) { + this.name = name; + this.notification = null; // Start with null notification + this.recommendation = new Recommendation(UNPROCESSED); // Start with unprocessed status } - public void setNewExperiments(List newExperiments) { - this.newExperiments = newExperiments; + // Getters and setters + public Recommendation getRecommendation() { + return recommendation; } - public List getUpdatedExperiments() { - return updatedExperiments; - } - - public void setUpdatedExperiments(List updatedExperiments) { - this.updatedExperiments = updatedExperiments; - } - - public List getFailedExperiments() { - return failedExperiments; - } - - public void setFailedExperiments(List failedExperiments) { - this.failedExperiments = failedExperiments; + public void setNotification(Notification notification) { + this.notification = notification; } } - // Inner class for recommendations - public static class Recommendations { - private RecommendationData data; + public static class Recommendation { + private KruizeConstants.KRUIZE_BULK_API.NotificationConstants.Status status; + private Notification notification; // Notifications can hold multiple entries - public Recommendations(RecommendationData data) { - this.data = data; + public Recommendation(KruizeConstants.KRUIZE_BULK_API.NotificationConstants.Status status) { + this.status = status; } - public RecommendationData getData() { - return data; - } + // Getters and setters - public void setData(RecommendationData data) { - this.data = data; - } - } - - // Inner class for recommendation data - public static class RecommendationData { - private List processed = Collections.synchronizedList(new ArrayList<>()); - private List processing = Collections.synchronizedList(new ArrayList<>()); - private List unprocessed = Collections.synchronizedList(new ArrayList<>()); - private List failed = Collections.synchronizedList(new ArrayList<>()); - - public RecommendationData(List processed, List processing, List unprocessed, List failed) { - this.processed = processed; - this.processing = processing; - this.unprocessed = unprocessed; - this.failed = failed; + public KruizeConstants.KRUIZE_BULK_API.NotificationConstants.Status getStatus() { + return status; } - public List getProcessed() { - return processed; + public void setStatus(KruizeConstants.KRUIZE_BULK_API.NotificationConstants.Status status) { + this.status = status; } - public synchronized void setProcessed(List processed) { - this.processed = processed; + public Notification getNotification() { + return notification; } - public List getProcessing() { - return processing; + public void setNotification(Notification notification) { + this.notification = notification; } + } - public synchronized void setProcessing(List processing) { - this.processing = processing; - } + public static class Notification { + private NotificationType type; + private String message; + private int code; - public List getUnprocessed() { - return unprocessed; - } + // Constructor, getters, and setters - public synchronized void setUnprocessed(List unprocessed) { - this.unprocessed = unprocessed; + public Notification(NotificationType type, String message, int code) { + this.type = type; + this.message = message; + this.code = code; } - public List getFailed() { - return failed; + public NotificationType getType() { + return type; } - public synchronized void setFailed(List failed) { - this.failed = failed; + public void setType(NotificationType type) { + this.type = type; } - // Move elements from inqueue to progress - public synchronized void moveToProgress(String element) { - if (unprocessed.contains(element)) { - unprocessed.remove(element); - if (!processing.contains(element)) { - processing.add(element); - } - } + public String getMessage() { + return message; } - // Move elements from progress to completed - public synchronized void moveToCompleted(String element) { - if (processing.contains(element)) { - processing.remove(element); - if (!processed.contains(element)) { - processed.add(element); - } - } + public void setMessage(String message) { + this.message = message; } - // Move elements from progress to failed - public synchronized void moveToFailed(String element) { - if (processing.contains(element)) { - processing.remove(element); - if (!failed.contains(element)) { - failed.add(element); - } - } + public int getCode() { + return code; } - // Calculate the percentage of completion - public int completionPercentage() { - int totalTasks = processed.size() + processing.size() + unprocessed.size() + failed.size(); - if (totalTasks == 0) { - return (int) 0.0; - } - return (int) ((processed.size() * 100.0) / totalTasks); + public void setCode(int code) { + this.code = code; } + } - } } diff --git a/src/main/java/com/autotune/analyzer/services/BulkService.java b/src/main/java/com/autotune/analyzer/services/BulkService.java index dd4bc795f..3ac40f6f5 100644 --- a/src/main/java/com/autotune/analyzer/services/BulkService.java +++ b/src/main/java/com/autotune/analyzer/services/BulkService.java @@ -33,7 +33,6 @@ import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.time.Instant; -import java.util.ArrayList; import java.util.Map; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; @@ -66,38 +65,45 @@ public void init(ServletConfig config) throws ServletException { */ @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { - String jobID = req.getParameter(JOB_ID); - String verboseParam = req.getParameter(VERBOSE); - // If the parameter is not provided (null), default it to false - boolean verbose = verboseParam != null && Boolean.parseBoolean(verboseParam); - BulkJobStatus jobDetails = jobStatusMap.get(jobID); - resp.setContentType(JSON_CONTENT_TYPE); - resp.setCharacterEncoding(CHARACTER_ENCODING); - SimpleFilterProvider filters = new SimpleFilterProvider(); + try { + String jobID = req.getParameter(JOB_ID); + String verboseParam = req.getParameter(VERBOSE); + // If the parameter is not provided (null), default it to false + boolean verbose = verboseParam != null && Boolean.parseBoolean(verboseParam); + BulkJobStatus jobDetails; + synchronized (jobStatusMap) { + jobDetails = jobStatusMap.get(jobID); + } + resp.setContentType(JSON_CONTENT_TYPE); + resp.setCharacterEncoding(CHARACTER_ENCODING); + SimpleFilterProvider filters = new SimpleFilterProvider(); - if (jobDetails == null) { - sendErrorResponse( - resp, - null, - HttpServletResponse.SC_NOT_FOUND, - JOB_NOT_FOUND_MSG - ); - } else { - try { - resp.setStatus(HttpServletResponse.SC_OK); - // Return the JSON representation of the JobStatus object - ObjectMapper objectMapper = new ObjectMapper(); - if (!verbose) { - filters.addFilter("jobFilter", SimpleBeanPropertyFilter.serializeAllExcept("data")); - } else { - filters.addFilter("jobFilter", SimpleBeanPropertyFilter.serializeAll()); + if (jobDetails == null) { + sendErrorResponse( + resp, + null, + HttpServletResponse.SC_NOT_FOUND, + JOB_NOT_FOUND_MSG + ); + } else { + try { + resp.setStatus(HttpServletResponse.SC_OK); + // Return the JSON representation of the JobStatus object + ObjectMapper objectMapper = new ObjectMapper(); + if (!verbose) { + filters.addFilter("jobFilter", SimpleBeanPropertyFilter.serializeAllExcept("experiments")); + } else { + filters.addFilter("jobFilter", SimpleBeanPropertyFilter.serializeAll()); + } + objectMapper.setFilterProvider(filters); + String jsonResponse = objectMapper.writeValueAsString(jobDetails); + resp.getWriter().write(jsonResponse); + } catch (Exception e) { + e.printStackTrace(); } - objectMapper.setFilterProvider(filters); - String jsonResponse = objectMapper.writeValueAsString(jobDetails); - resp.getWriter().write(jsonResponse); - } catch (Exception e) { - e.printStackTrace(); } + } catch (Exception e) { + e.printStackTrace(); } } @@ -121,16 +127,7 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) // Generate a unique jobID String jobID = UUID.randomUUID().toString(); - BulkJobStatus.Data data = new BulkJobStatus.Data( - new BulkJobStatus.Experiments(new ArrayList<>(), new ArrayList<>(), new ArrayList<>()), - new BulkJobStatus.Recommendations(new BulkJobStatus.RecommendationData( - new ArrayList<>(), - new ArrayList<>(), - new ArrayList<>(), - new ArrayList<>() - )) - ); - jobStatusMap.put(jobID, new BulkJobStatus(jobID, IN_PROGRESS, data, Instant.now())); + jobStatusMap.put(jobID, new BulkJobStatus(jobID, IN_PROGRESS, Instant.now())); // Submit the job to be processed asynchronously executorService.submit(new BulkJobManager(jobID, jobStatusMap, payload)); diff --git a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java index c1d237a0c..c7acfd149 100644 --- a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java +++ b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java @@ -31,12 +31,14 @@ import com.autotune.utils.Utils; import com.fasterxml.jackson.core.JsonProcessingException; import com.google.gson.Gson; +import org.apache.http.conn.ConnectTimeoutException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.HttpURLConnection; +import java.net.SocketTimeoutException; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.sql.Timestamp; @@ -52,6 +54,7 @@ import static com.autotune.operator.KruizeDeploymentInfo.bulk_thread_pool_size; import static com.autotune.utils.KruizeConstants.KRUIZE_BULK_API.*; +import static com.autotune.utils.KruizeConstants.KRUIZE_BULK_API.NotificationConstants.*; /** @@ -117,113 +120,130 @@ private static Map parseLabelString(String labelString) { @Override public void run() { + BulkJobStatus jobData = jobStatusMap.get(jobID); try { - BulkJobStatus jobData = jobStatusMap.get(jobID); String labelString = getLabels(this.bulkInput.getFilter()); if (null == this.bulkInput.getDatasource()) { this.bulkInput.setDatasource(CREATE_EXPERIMENT_CONFIG_BEAN.getDatasourceName()); } DataSourceMetadataInfo metadataInfo = null; DataSourceManager dataSourceManager = new DataSourceManager(); - DataSourceInfo datasource = CommonUtils.getDataSourceInfo(this.bulkInput.getDatasource()); - JSONObject daterange = processDateRange(this.bulkInput.getTime_range()); - if (null != daterange) - metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource, labelString, (Long) daterange.get("start_time"), (Long) daterange.get("end_time"), (Integer) daterange.get("steps")); - else { - metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource, labelString, 0, 0, 0); + DataSourceInfo datasource = null; + try { + datasource = CommonUtils.getDataSourceInfo(this.bulkInput.getDatasource()); + } catch (Exception e) { + LOGGER.error(e.getMessage()); + e.printStackTrace(); + jobData.setStatus(FAILED); + BulkJobStatus.Notification notification = DATASOURCE_NOT_REG_INFO; + notification.setMessage(String.format(notification.getMessage(), e.getMessage())); + jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_BAD_REQUEST), notification); } - if (null == metadataInfo) { - jobData.setStatus(COMPLETED); - jobData.setMessage(NOTHING); - } else { - Map createExperimentAPIObjectMap = getExperimentMap(labelString, jobData, metadataInfo, datasource); //Todo Store this map in buffer and use it if BulkAPI pods restarts and support experiment_type - jobData.setTotal_experiments(createExperimentAPIObjectMap.size()); - jobData.setProcessed_experiments(0); - if (jobData.getTotal_experiments() > KruizeDeploymentInfo.BULK_API_LIMIT) { - jobStatusMap.get(jobID).setStatus(FAILED); - jobStatusMap.get(jobID).setMessage(String.format(LIMIT_MESSAGE, KruizeDeploymentInfo.BULK_API_LIMIT)); + if (null != datasource) { + JSONObject daterange = processDateRange(this.bulkInput.getTime_range()); + if (null != daterange) + metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource, labelString, (Long) daterange.get("start_time"), (Long) daterange.get("end_time"), (Integer) daterange.get("steps")); + else { + metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource, labelString, 0, 0, 0); + } + if (null == metadataInfo) { + jobData.setStatus(COMPLETED); + jobData.setEndTime(Instant.now()); + jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_OK), NOTHING_INFO); } else { - ExecutorService createExecutor = Executors.newFixedThreadPool(bulk_thread_pool_size); - ExecutorService generateExecutor = Executors.newFixedThreadPool(bulk_thread_pool_size); - for (CreateExperimentAPIObject apiObject : createExperimentAPIObjectMap.values()) { - createExecutor.submit(() -> { + Map createExperimentAPIObjectMap = getExperimentMap(labelString, jobData, metadataInfo, datasource); //Todo Store this map in buffer and use it if BulkAPI pods restarts and support experiment_type + jobData.setTotal_experiments(createExperimentAPIObjectMap.size()); + jobData.setProcessed_experiments(0); + if (jobData.getTotal_experiments() > KruizeDeploymentInfo.BULK_API_LIMIT) { + jobData.setStatus(FAILED); + jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_BAD_REQUEST), LIMIT_INFO); + } else { + ExecutorService createExecutor = Executors.newFixedThreadPool(bulk_thread_pool_size); + ExecutorService generateExecutor = Executors.newFixedThreadPool(bulk_thread_pool_size); + for (CreateExperimentAPIObject apiObject : createExperimentAPIObjectMap.values()) { String experiment_name = apiObject.getExperimentName(); - BulkJobStatus.Experiments newExperiments = jobData.getData().getExperiments(); - BulkJobStatus.RecommendationData recommendationData = jobData.getData().getRecommendations().getData(); - try { - // send request to createExperiment API for experiment creation - GenericRestApiClient apiClient = new GenericRestApiClient(datasource); - apiClient.setBaseURL(KruizeDeploymentInfo.experiments_url); - int responseCode; - boolean expriment_exists = false; + BulkJobStatus.Experiment experiment = jobData.addExperiment(experiment_name); + DataSourceInfo finalDatasource = datasource; + createExecutor.submit(() -> { try { - responseCode = apiClient.callKruizeAPI("[" + new Gson().toJson(apiObject) + "]"); - LOGGER.debug("API Response code: {}", responseCode); - if (responseCode == HttpURLConnection.HTTP_CREATED) { - newExperiments.setNewExperiments( - appendExperiments(newExperiments.getNewExperiments(), experiment_name)); - expriment_exists = true; - } else if (responseCode == HttpURLConnection.HTTP_CONFLICT) { - expriment_exists = true; - } else { - newExperiments.setFailedExperiments( - appendExperiments(newExperiments.getFailedExperiments(), experiment_name)); + // send request to createExperiment API for experiment creation + GenericRestApiClient apiClient = new GenericRestApiClient(finalDatasource); + apiClient.setBaseURL(KruizeDeploymentInfo.experiments_url); + GenericRestApiClient.HttpResponseWrapper responseCode; + boolean expriment_exists = false; + try { + responseCode = apiClient.callKruizeAPI("[" + new Gson().toJson(apiObject) + "]"); + LOGGER.debug("API Response code: {}", responseCode); + if (responseCode.getStatusCode() == HttpURLConnection.HTTP_CREATED) { + expriment_exists = true; + } else if (responseCode.getStatusCode() == HttpURLConnection.HTTP_CONFLICT) { + expriment_exists = true; + } else { + jobData.setProcessed_experiments(jobData.getProcessed_experiments() + 1); + experiment.setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, responseCode.getResponseBody().toString(), responseCode.getStatusCode())); + } + } catch (FetchMetricsError e) { + e.printStackTrace(); jobData.setProcessed_experiments(jobData.getProcessed_experiments() + 1); + experiment.setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_BAD_REQUEST)); } - } catch (FetchMetricsError e) { - e.printStackTrace(); - newExperiments.setFailedExperiments( - appendExperiments(newExperiments.getFailedExperiments(), experiment_name)); - jobData.setProcessed_experiments(jobData.getProcessed_experiments() + 1); - } - - if (expriment_exists) { - recommendationData.setUnprocessed( - appendExperiments(recommendationData.getUnprocessed(), experiment_name) - ); - generateExecutor.submit(() -> { - - // send request to generateRecommendations API - GenericRestApiClient recommendationApiClient = new GenericRestApiClient(datasource); - String encodedExperimentName; - encodedExperimentName = URLEncoder.encode(experiment_name, StandardCharsets.UTF_8); - recommendationApiClient.setBaseURL(String.format(KruizeDeploymentInfo.recommendations_url, encodedExperimentName)); - int recommendationResponseCode = 0; - try { - recommendationData.moveToProgress(experiment_name); - recommendationResponseCode = recommendationApiClient.callKruizeAPI(null); - LOGGER.debug("API Response code: {}", recommendationResponseCode); - } catch (Exception | FetchMetricsError e) { - e.printStackTrace(); - } - if (recommendationResponseCode == HttpURLConnection.HTTP_CREATED) { - recommendationData.moveToCompleted(experiment_name); - jobData.setProcessed_experiments(jobData.getProcessed_experiments() + 1); - if (jobData.getTotal_experiments() == jobData.getProcessed_experiments()) { - jobData.setStatus(COMPLETED); - jobStatusMap.get(jobID).setEndTime(Instant.now()); + if (expriment_exists) { + generateExecutor.submit(() -> { + // send request to generateRecommendations API + GenericRestApiClient recommendationApiClient = new GenericRestApiClient(finalDatasource); + String encodedExperimentName; + encodedExperimentName = URLEncoder.encode(experiment_name, StandardCharsets.UTF_8); + recommendationApiClient.setBaseURL(String.format(KruizeDeploymentInfo.recommendations_url, encodedExperimentName)); + GenericRestApiClient.HttpResponseWrapper recommendationResponseCode = null; + try { + recommendationResponseCode = recommendationApiClient.callKruizeAPI(null); + LOGGER.debug("API Response code: {}", recommendationResponseCode); + if (recommendationResponseCode.getStatusCode() == HttpURLConnection.HTTP_CREATED) { + jobData.setProcessed_experiments(jobData.getProcessed_experiments() + 1); + if (jobData.getTotal_experiments() == jobData.getProcessed_experiments()) { + jobData.setStatus(COMPLETED); + jobData.setEndTime(Instant.now()); + } + experiment.getRecommendation().setStatus(NotificationConstants.Status.PROCESSED); + } else { + experiment.getRecommendation().setStatus(NotificationConstants.Status.FAILED); + experiment.setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, recommendationResponseCode.getResponseBody().toString(), recommendationResponseCode.getStatusCode())); + } + } catch (Exception | FetchMetricsError e) { + e.printStackTrace(); + experiment.getRecommendation().setStatus(NotificationConstants.Status.FAILED); + experiment.getRecommendation().setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_BAD_REQUEST)); } - - } else { - recommendationData.moveToFailed(experiment_name); - } - }); + }); + } + } catch (Exception e) { + e.printStackTrace(); + experiment.setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_INTERNAL_ERROR)); } - - } catch (Exception e) { - e.printStackTrace(); - recommendationData.moveToFailed(experiment_name); - } - }); + }); + } } } } + } catch (IOException e) { + LOGGER.error(e.getMessage()); + jobData.setStatus("FAILED"); + jobData.setEndTime(Instant.now()); + + if (e instanceof SocketTimeoutException) { + jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_GATEWAY_TIMEOUT), DATASOURCE_GATEWAY_TIMEOUT_INFO); + } else if (e instanceof ConnectTimeoutException) { + jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_UNAVAILABLE), DATASOURCE_CONNECT_TIMEOUT_INFO); + } else { + jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_UNAVAILABLE), DATASOURCE_DOWN_INFO); + } } catch (Exception e) { LOGGER.error(e.getMessage()); e.printStackTrace(); - jobStatusMap.get(jobID).setStatus("FAILED"); - jobStatusMap.get(jobID).setMessage(e.getMessage()); + jobData.setStatus("FAILED"); + jobData.setEndTime(Instant.now()); + jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_INTERNAL_ERROR), new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_INTERNAL_ERROR)); } } diff --git a/src/main/java/com/autotune/utils/GenericRestApiClient.java b/src/main/java/com/autotune/utils/GenericRestApiClient.java index 45ad05598..cdb19ac94 100644 --- a/src/main/java/com/autotune/utils/GenericRestApiClient.java +++ b/src/main/java/com/autotune/utils/GenericRestApiClient.java @@ -22,6 +22,8 @@ import com.autotune.utils.authModels.APIKeysAuthentication; import com.autotune.utils.authModels.BasicAuthentication; import com.autotune.utils.authModels.BearerAccessToken; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; @@ -37,6 +39,7 @@ import org.apache.http.impl.client.HttpClients; import org.apache.http.ssl.SSLContexts; import org.apache.http.util.EntityUtils; +import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -57,6 +60,8 @@ public class GenericRestApiClient { private static final long serialVersionUID = 1L; private static final Logger LOGGER = LoggerFactory.getLogger(GenericRestApiClient.class); + private static final int MAX_RETRIES = 5; + private static final long INITIAL_BACKOFF_MS = 1000; // 1 second private String baseURL; private BasicAuthentication basicAuthentication; private BearerAccessToken bearerAccessToken; @@ -96,10 +101,41 @@ public JSONObject fetchMetricsJson(String methodType, String queryString) throws // Apply authentication applyAuthentication(httpRequestBase); - LOGGER.debug("Executing Prometheus metrics request: {}", httpRequestBase.getRequestLine()); + LOGGER.info("Executing Prometheus metrics request: {}", httpRequestBase.getRequestLine()); - // Execute the request - jsonResponse = httpclient.execute(httpRequestBase, new StringResponseHandler()); + // Execute the request and get the HttpResponse + HttpResponse response = httpclient.execute(httpRequestBase); + + // Get and print the response code + int responseCode = response.getStatusLine().getStatusCode(); + LOGGER.info("Response code: {}", responseCode); + + // Get the response body if needed + jsonResponse = new StringResponseHandler().handleResponse(response); + LOGGER.info("jsonResponse {}", jsonResponse); + + // Parse the JSON response + ObjectMapper objectMapper = new ObjectMapper(); + JsonNode rootNode = objectMapper.readTree(jsonResponse); + JsonNode resultNode = rootNode.path("data").path("result"); + JsonNode warningsNode = rootNode.path("warnings"); + + // Check if the result is empty and if there are specific warnings + if (resultNode.isArray() && resultNode.size() == 0) { + LOGGER.info("resultNode is empty"); + for (JsonNode warning : warningsNode) { + String warningMessage = warning.asText(); + LOGGER.info("warnings is {}", warningMessage); + if (warningMessage.contains("error reading from server") || warningMessage.contains("Please reduce your request rate")) { + LOGGER.warn("Warning detected: {}", warningMessage); + throw new IOException(warningMessage); + } else { + LOGGER.info("no warnings detected"); + } + } + } else { + LOGGER.info("resultNode is not empty"); + } } return new JSONObject(jsonResponse); } @@ -136,8 +172,8 @@ private void applyAuthentication(HttpRequestBase httpRequestBase) { * @return API response code * @throws IOException */ - public int callKruizeAPI(String payload) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException, FetchMetricsError { - + public HttpResponseWrapper callKruizeAPI(String payload) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException, FetchMetricsError { + HttpResponseWrapper httpResponseWrapper = null; // Create an HTTP client try (CloseableHttpClient httpclient = setupHttpClient()) { // Prepare the HTTP POST request @@ -156,14 +192,30 @@ public int callKruizeAPI(String payload) throws IOException, NoSuchAlgorithmExce // Get the status code from the response int responseCode = response.getStatusLine().getStatusCode(); LOGGER.debug("Response code: {}", responseCode); - return responseCode; + if (response.getEntity() != null) { + // Convert response entity to string + String responseBody = EntityUtils.toString(response.getEntity(), "UTF-8"); + try { + // Attempt to parse as JSON + JSONObject json = new JSONObject(responseBody); + httpResponseWrapper = new HttpResponseWrapper(responseCode, json); + } catch (JSONException e) { + // If JSON parsing fails, return as plain string + httpResponseWrapper = new HttpResponseWrapper(responseCode, responseBody); + } + } } catch (Exception e) { LOGGER.error("Error occurred while calling Kruize API: {}", e.getMessage()); throw new FetchMetricsError(e.getMessage()); } + } catch (Exception e) { + LOGGER.error("Error occurred while calling Kruize API: {}", e.getMessage()); + throw new FetchMetricsError(e.getMessage()); } + return httpResponseWrapper; } + public void setBaseURL(String baseURL) { this.baseURL = baseURL; } @@ -182,4 +234,30 @@ public String handleResponse(HttpResponse response) throws IOException { } + + public class HttpResponseWrapper { + private int statusCode; + private Object responseBody; + + public HttpResponseWrapper(int statusCode, Object responseBody) { + this.statusCode = statusCode; + this.responseBody = responseBody; + } + + public int getStatusCode() { + return statusCode; + } + + public Object getResponseBody() { + return responseBody; + } + + @Override + public String toString() { + return "HttpResponseWrapper{" + + "statusCode=" + statusCode + + ", responseBody=" + responseBody + + '}'; + } + } } diff --git a/src/main/java/com/autotune/utils/KruizeConstants.java b/src/main/java/com/autotune/utils/KruizeConstants.java index 999b61e55..6a6c936be 100644 --- a/src/main/java/com/autotune/utils/KruizeConstants.java +++ b/src/main/java/com/autotune/utils/KruizeConstants.java @@ -18,6 +18,7 @@ package com.autotune.utils; import com.autotune.analyzer.kruizeObject.CreateExperimentConfigBean; +import com.autotune.analyzer.serviceObjects.BulkJobStatus; import com.autotune.analyzer.utils.AnalyzerConstants; import java.text.SimpleDateFormat; @@ -812,5 +813,71 @@ public static final class KRUIZE_BULK_API { CREATE_EXPERIMENT_CONFIG_BEAN.setMeasurementDurationStr("15min"); CREATE_EXPERIMENT_CONFIG_BEAN.setMeasurementDuration(15); } + + public static class NotificationConstants { + + public static final BulkJobStatus.Notification JOB_NOT_FOUND_INFO = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.WARNING, + JOB_NOT_FOUND_MSG, + 404 + ); + public static final BulkJobStatus.Notification LIMIT_INFO = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.INFO, + LIMIT_MESSAGE, + 400 + ); + public static final BulkJobStatus.Notification NOTHING_INFO = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.INFO, + NOTHING, + 400 + ); + public static BulkJobStatus.Notification FETCH_METRIC_FAILURE = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.ERROR, + "Not able to fetch metrics", + 400 + ); + public static BulkJobStatus.Notification DATASOURCE_NOT_REG_INFO = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.ERROR, + "Datasource not registered with Kruize. (%s)", + 400 + ); + public static BulkJobStatus.Notification DATASOURCE_DOWN_INFO = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.ERROR, + "HttpHostConnectException: Unable to connect to the data source. Please try again later.", + 503 + ); + public static BulkJobStatus.Notification DATASOURCE_GATEWAY_TIMEOUT_INFO = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.ERROR, + "SocketTimeoutException: request timed out waiting for a data source response", + 504 + ); + public static BulkJobStatus.Notification DATASOURCE_CONNECT_TIMEOUT_INFO = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.ERROR, + "ConnectTimeoutException: cannot establish a data source connection in a given time frame due to connectivity issues", + 503 + ); + + + // More notification constants can be added here as needed + + public enum Status { + PROCESSED("PROCESSED"), + UNPROCESSED("UNPROCESSED"), + PROCESSING("PROCESSING"), + FAILED("FAILED"); + + private final String status; + + Status(String status) { + this.status = status; + } + + public String getStatus() { + return status; + } + } + + + } } } From 64586f92bbf9571b60724b1cbb3ca1cd50ed4d3e Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Tue, 12 Nov 2024 19:30:56 +0530 Subject: [PATCH 08/15] BulkAPI Documentation update Signed-off-by: msvinaykumar --- design/BulkAPI.md | 174 ++++++++++++++++++++++------------------------ 1 file changed, 83 insertions(+), 91 deletions(-) diff --git a/design/BulkAPI.md b/design/BulkAPI.md index 82c209d27..2119f8f2a 100644 --- a/design/BulkAPI.md +++ b/design/BulkAPI.md @@ -114,73 +114,68 @@ When verbose=true, additional detailed information about the job is provided. "status": "IN_PROGRESS", "total_experiments": 23, "processed_experiments": 22, - "data": { - "experiments": { - "new": [ - "prometheus-1|default|monitoring|node-exporter(daemonset)|node-exporter", - "prometheus-1|default|cadvisor|cadvisor(daemonset)|cadvisor", - "prometheus-1|default|monitoring|alertmanager-main(statefulset)|config-reloader", - "prometheus-1|default|monitoring|alertmanager-main(statefulset)|alertmanager", - "prometheus-1|default|monitoring|prometheus-operator(deployment)|kube-rbac-proxy", - "prometheus-1|default|kube-system|coredns(deployment)|coredns", - "prometheus-1|default|monitoring|prometheus-k8s(statefulset)|config-reloader", - "prometheus-1|default|monitoring|blackbox-exporter(deployment)|kube-rbac-proxy", - "prometheus-1|default|monitoring|prometheus-operator(deployment)|prometheus-operator", - "prometheus-1|default|monitoring|node-exporter(daemonset)|kube-rbac-proxy", - "prometheus-1|default|monitoring|kube-state-metrics(deployment)|kube-rbac-proxy-self", - "prometheus-1|default|monitoring|kube-state-metrics(deployment)|kube-state-metrics", - "prometheus-1|default|monitoring|kruize(deployment)|kruize", - "prometheus-1|default|monitoring|blackbox-exporter(deployment)|module-configmap-reloader", - "prometheus-1|default|monitoring|prometheus-k8s(statefulset)|prometheus", - "prometheus-1|default|monitoring|kube-state-metrics(deployment)|kube-rbac-proxy-main", - "prometheus-1|default|kube-system|kube-proxy(daemonset)|kube-proxy", - "prometheus-1|default|monitoring|prometheus-adapter(deployment)|prometheus-adapter", - "prometheus-1|default|monitoring|grafana(deployment)|grafana", - "prometheus-1|default|kube-system|kindnet(daemonset)|kindnet-cni", - "prometheus-1|default|monitoring|kruize-db-deployment(deployment)|kruize-db", - "prometheus-1|default|monitoring|blackbox-exporter(deployment)|blackbox-exporter" - ], - "updated": [], - "failed": null + "job_id": "5798a2df-6c67-467b-a3c2-befe634a0e3a", + "job_start_time": "2024-10-09T18:09:31.549Z", + "job_end_time": null, + "experiments": [ + { + "name": "prometheus-1|default|kube-system|coredns(deployment)|coredns", + "notification": {}, + "recommendation": { + "status": "unprocessed", + "notification": {} + } + }, + { + "name": "prometheus-1|default|kube-system|kindnet(deployment)|kindnet-cni", + "notification": {}, + "recommendation": { + "status": "processed", + "notification": {} + } }, - "recommendations": { - "data": { - "processed": [ - "prometheus-1|default|monitoring|alertmanager-main(statefulset)|config-reloader", - "prometheus-1|default|monitoring|node-exporter(daemonset)|node-exporter", - "prometheus-1|default|local-path-storage|local-path-provisioner(deployment)|local-path-provisioner", - "prometheus-1|default|monitoring|alertmanager-main(statefulset)|alertmanager", - "prometheus-1|default|monitoring|prometheus-operator(deployment)|kube-rbac-proxy", - "prometheus-1|default|kube-system|coredns(deployment)|coredns", - "prometheus-1|default|monitoring|blackbox-exporter(deployment)|kube-rbac-proxy", - "prometheus-1|default|monitoring|prometheus-k8s(statefulset)|config-reloader", - "prometheus-1|default|monitoring|prometheus-operator(deployment)|prometheus-operator", - "prometheus-1|default|monitoring|node-exporter(daemonset)|kube-rbac-proxy", - "prometheus-1|default|monitoring|kube-state-metrics(deployment)|kube-rbac-proxy-self", - "prometheus-1|default|monitoring|kube-state-metrics(deployment)|kube-state-metrics", - "prometheus-1|default|monitoring|kruize(deployment)|kruize", - "prometheus-1|default|monitoring|blackbox-exporter(deployment)|module-configmap-reloader", - "prometheus-1|default|monitoring|prometheus-k8s(statefulset)|prometheus", - "prometheus-1|default|monitoring|kube-state-metrics(deployment)|kube-rbac-proxy-main", - "prometheus-1|default|kube-system|kube-proxy(daemonset)|kube-proxy", - "prometheus-1|default|monitoring|prometheus-adapter(deployment)|prometheus-adapter", - "prometheus-1|default|monitoring|grafana(deployment)|grafana", - "prometheus-1|default|kube-system|kindnet(daemonset)|kindnet-cni", - "prometheus-1|default|monitoring|kruize-db-deployment(deployment)|kruize-db", - "prometheus-1|default|monitoring|blackbox-exporter(deployment)|blackbox-exporter" - ], - "processing": [ - "prometheus-1|default|cadvisor|cadvisor(daemonset)|cadvisor" - ], - "unprocessed": [ - ], - "failed": [] + { + "name": "prometheus-1|default|monitoring|kruize(deployment)|kruize", + "notification": {}, + "recommendation": { + "status": "processing", + "notification": {} + } + }, + { + "name": "prometheus-1|default|monitoring|kruize(deployment)|kruize", + "recommendation": { + "status": "failed", + "notifications": { + "400": { + "type": "error", + "message": "Not able to fetch metrics", + "code": 400 + } + } + } + }, + { + "name": "prometheus-1|default|monitoring|kruize(deployment)|kruize", + "notifications": { + "400": { + "type": "error", + "message": "Metric Profile not found", + "code": 400 + } + }, + "recommendation": { + "status": "failed", + "notifications": { + "400": { + "type": "error", + "message": "Not able to fetch metrics", + "code": 400 + } + } } } - }, - "job_id": "5798a2df-6c67-467b-a3c2-befe634a0e3a", - "job_start_time": "2024-10-09T18:09:31.549Z", - "job_end_time": null + ] } ``` @@ -205,40 +200,37 @@ resource optimization in Kubernetes environments. Below is a breakdown of the JS - **Type**: `Integer` - **Description**: Number of experiments that have been processed so far. -- **data**: - - **Type**: `Object` - - **Description**: Contains detailed information about the experiments and recommendations being processed. +- **experiments**: + - **Type**: `Array ` + - **Description**: Array of experiment objects, each containing details about individual experiments. + + - Each object in the `experiments` array has the following structure: - - **experiments**: - - **new**: - - **Type**: `Array of Strings` - - **Description**: List of new experiments that have been identified but not yet processed. + | Field | Type | Description | + |-------------------------|--------------|--------------------------------------------------------------------------| + | `name` | `string` | Name of the experiment, typically indicating a service name and deployment context. | + | `notification` | `object` | Notifications specific to this experiment (if any). | + | `recommendation` | `object` | Recommendation status and notifications specific to this experiment. | - - **updated**: - - **Type**: `Array of Strings` - - **Description**: List of experiments that were previously processed but have now been updated. + #### Recommendation Object - - **failed**: - - **Type**: `null or Array` - - **Description**: List of experiments that failed during processing. If no failures, the value is `null`. + The `recommendation` field within each experiment provides information about recommendation processing status and + errors (if any). - - **recommendations**: - - **data**: - - **processed**: - - **Type**: `Array of Strings` - - **Description**: List of experiments for which recommendations have already been processed. + | Field | Type | Description | + |-------------------------|--------------|--------------------------------------------------------------------------| + | `status` | `string` | Status of the recommendation (e.g., `"unprocessed"`, `"processed"`, `"processing"`, `"failed"`). | + | `notification` | `object` | Notifications related to recommendation processing. | - - **processing**: - - **Type**: `Array of Strings` - - **Description**: List of experiments that are currently being processed for recommendations. + #### Notification Object - - **unprocessed**: - - **Type**: `Array of Strings` - - **Description**: List of experiments that have not yet been processed for recommendations. + Both the `notification` and `recommendation.notification` fields may contain error messages or warnings as follows: - - **failed**: - - **Type**: `Array of Strings` - - **Description**: List of experiments for which the recommendation process failed. + | Field | Type | Description | + |-------------------------|--------------|----------------------------------------------------------------------------| + | `type` | `string` | Type of notification (e.g., `"info"`,`"error"`, `"warning"`). | + | `message` | `string` | Description of the notification message. | + | `code` | `integer` | HTTP-like code indicating the type of error (e.g., `400` for bad request). | - **job_id**: - **Type**: `String` From 3764424b14964cbee1123aa46053a363a14da0d4 Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Tue, 12 Nov 2024 20:40:51 +0530 Subject: [PATCH 09/15] BulkAPI error handling for kruizeAPI's Signed-off-by: msvinaykumar --- .../analyzer/services/BulkService.java | 13 +++++----- .../analyzer/workerimpl/BulkJobManager.java | 25 +++++++++---------- .../autotune/utils/GenericRestApiClient.java | 18 +------------ .../com/autotune/utils/KruizeConstants.java | 16 ++++++------ 4 files changed, 28 insertions(+), 44 deletions(-) diff --git a/src/main/java/com/autotune/analyzer/services/BulkService.java b/src/main/java/com/autotune/analyzer/services/BulkService.java index 3ac40f6f5..6813251f5 100644 --- a/src/main/java/com/autotune/analyzer/services/BulkService.java +++ b/src/main/java/com/autotune/analyzer/services/BulkService.java @@ -49,8 +49,8 @@ public class BulkService extends HttpServlet { private static final long serialVersionUID = 1L; private static final Logger LOGGER = LoggerFactory.getLogger(BulkService.class); + private static Map jobStatusMap = new ConcurrentHashMap<>(); private ExecutorService executorService = Executors.newFixedThreadPool(10); - private Map jobStatusMap = new ConcurrentHashMap<>(); @Override public void init(ServletConfig config) throws ServletException { @@ -71,9 +71,9 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws Se // If the parameter is not provided (null), default it to false boolean verbose = verboseParam != null && Boolean.parseBoolean(verboseParam); BulkJobStatus jobDetails; - synchronized (jobStatusMap) { - jobDetails = jobStatusMap.get(jobID); - } + LOGGER.info("Job ID: " + jobID); + jobDetails = jobStatusMap.get(jobID); + LOGGER.info("Job Status: " + jobDetails.getStatus()); resp.setContentType(JSON_CONTENT_TYPE); resp.setCharacterEncoding(CHARACTER_ENCODING); SimpleFilterProvider filters = new SimpleFilterProvider(); @@ -127,9 +127,10 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) // Generate a unique jobID String jobID = UUID.randomUUID().toString(); - jobStatusMap.put(jobID, new BulkJobStatus(jobID, IN_PROGRESS, Instant.now())); + BulkJobStatus jobStatus = new BulkJobStatus(jobID, IN_PROGRESS, Instant.now()); + jobStatusMap.put(jobID, jobStatus); // Submit the job to be processed asynchronously - executorService.submit(new BulkJobManager(jobID, jobStatusMap, payload)); + executorService.submit(new BulkJobManager(jobID, jobStatus, payload)); // Just sending a simple success response back // Return the jobID to the user diff --git a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java index c7acfd149..7bf1cb67e 100644 --- a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java +++ b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java @@ -16,7 +16,6 @@ package com.autotune.analyzer.workerimpl; -import com.autotune.analyzer.exceptions.FetchMetricsError; import com.autotune.analyzer.kruizeObject.RecommendationSettings; import com.autotune.analyzer.serviceObjects.*; import com.autotune.analyzer.utils.AnalyzerConstants; @@ -86,13 +85,12 @@ public class BulkJobManager implements Runnable { private static final Logger LOGGER = LoggerFactory.getLogger(BulkJobManager.class); private String jobID; - private Map jobStatusMap; private BulkInput bulkInput; + private BulkJobStatus jobData; - - public BulkJobManager(String jobID, Map jobStatusMap, BulkInput payload) { + public BulkJobManager(String jobID, BulkJobStatus jobData, BulkInput payload) { this.jobID = jobID; - this.jobStatusMap = jobStatusMap; + this.jobData = jobData; this.bulkInput = payload; } @@ -120,7 +118,6 @@ private static Map parseLabelString(String labelString) { @Override public void run() { - BulkJobStatus jobData = jobStatusMap.get(jobID); try { String labelString = getLabels(this.bulkInput.getFilter()); if (null == this.bulkInput.getDatasource()) { @@ -182,7 +179,7 @@ public void run() { jobData.setProcessed_experiments(jobData.getProcessed_experiments() + 1); experiment.setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, responseCode.getResponseBody().toString(), responseCode.getStatusCode())); } - } catch (FetchMetricsError e) { + } catch (Exception e) { e.printStackTrace(); jobData.setProcessed_experiments(jobData.getProcessed_experiments() + 1); experiment.setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_BAD_REQUEST)); @@ -210,10 +207,10 @@ public void run() { experiment.getRecommendation().setStatus(NotificationConstants.Status.FAILED); experiment.setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, recommendationResponseCode.getResponseBody().toString(), recommendationResponseCode.getStatusCode())); } - } catch (Exception | FetchMetricsError e) { + } catch (Exception e) { e.printStackTrace(); experiment.getRecommendation().setStatus(NotificationConstants.Status.FAILED); - experiment.getRecommendation().setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_BAD_REQUEST)); + experiment.getRecommendation().setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_INTERNAL_ERROR)); } }); } @@ -230,14 +227,16 @@ public void run() { LOGGER.error(e.getMessage()); jobData.setStatus("FAILED"); jobData.setEndTime(Instant.now()); - + BulkJobStatus.Notification notification; if (e instanceof SocketTimeoutException) { - jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_GATEWAY_TIMEOUT), DATASOURCE_GATEWAY_TIMEOUT_INFO); + notification = DATASOURCE_GATEWAY_TIMEOUT_INFO; } else if (e instanceof ConnectTimeoutException) { - jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_UNAVAILABLE), DATASOURCE_CONNECT_TIMEOUT_INFO); + notification = DATASOURCE_CONNECT_TIMEOUT_INFO; } else { - jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_UNAVAILABLE), DATASOURCE_DOWN_INFO); + notification = DATASOURCE_DOWN_INFO; } + notification.setMessage(String.format(notification.getMessage(), e.getMessage())); + jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_UNAVAILABLE), notification); } catch (Exception e) { LOGGER.error(e.getMessage()); e.printStackTrace(); diff --git a/src/main/java/com/autotune/utils/GenericRestApiClient.java b/src/main/java/com/autotune/utils/GenericRestApiClient.java index cdb19ac94..6c0ef9836 100644 --- a/src/main/java/com/autotune/utils/GenericRestApiClient.java +++ b/src/main/java/com/autotune/utils/GenericRestApiClient.java @@ -15,7 +15,6 @@ *******************************************************************************/ package com.autotune.utils; -import com.autotune.analyzer.exceptions.FetchMetricsError; import com.autotune.common.auth.AuthenticationStrategy; import com.autotune.common.auth.AuthenticationStrategyFactory; import com.autotune.common.datasource.DataSourceInfo; @@ -112,7 +111,6 @@ public JSONObject fetchMetricsJson(String methodType, String queryString) throws // Get the response body if needed jsonResponse = new StringResponseHandler().handleResponse(response); - LOGGER.info("jsonResponse {}", jsonResponse); // Parse the JSON response ObjectMapper objectMapper = new ObjectMapper(); @@ -122,19 +120,13 @@ public JSONObject fetchMetricsJson(String methodType, String queryString) throws // Check if the result is empty and if there are specific warnings if (resultNode.isArray() && resultNode.size() == 0) { - LOGGER.info("resultNode is empty"); for (JsonNode warning : warningsNode) { String warningMessage = warning.asText(); - LOGGER.info("warnings is {}", warningMessage); if (warningMessage.contains("error reading from server") || warningMessage.contains("Please reduce your request rate")) { LOGGER.warn("Warning detected: {}", warningMessage); throw new IOException(warningMessage); - } else { - LOGGER.info("no warnings detected"); } } - } else { - LOGGER.info("resultNode is not empty"); } } return new JSONObject(jsonResponse); @@ -172,7 +164,7 @@ private void applyAuthentication(HttpRequestBase httpRequestBase) { * @return API response code * @throws IOException */ - public HttpResponseWrapper callKruizeAPI(String payload) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException, FetchMetricsError { + public HttpResponseWrapper callKruizeAPI(String payload) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { HttpResponseWrapper httpResponseWrapper = null; // Create an HTTP client try (CloseableHttpClient httpclient = setupHttpClient()) { @@ -180,13 +172,11 @@ public HttpResponseWrapper callKruizeAPI(String payload) throws IOException, NoS HttpPost httpPost = new HttpPost(baseURL); httpPost.setHeader("Content-Type", "application/json"); httpPost.setHeader("Accept", "application/json"); - // If payload is present, set it in the request body if (payload != null) { StringEntity entity = new StringEntity(payload, StandardCharsets.UTF_8); httpPost.setEntity(entity); } - // Execute the request and return the response code try (CloseableHttpResponse response = httpclient.execute(httpPost)) { // Get the status code from the response @@ -204,13 +194,7 @@ public HttpResponseWrapper callKruizeAPI(String payload) throws IOException, NoS httpResponseWrapper = new HttpResponseWrapper(responseCode, responseBody); } } - } catch (Exception e) { - LOGGER.error("Error occurred while calling Kruize API: {}", e.getMessage()); - throw new FetchMetricsError(e.getMessage()); } - } catch (Exception e) { - LOGGER.error("Error occurred while calling Kruize API: {}", e.getMessage()); - throw new FetchMetricsError(e.getMessage()); } return httpResponseWrapper; } diff --git a/src/main/java/com/autotune/utils/KruizeConstants.java b/src/main/java/com/autotune/utils/KruizeConstants.java index 6a6c936be..5a6a92914 100644 --- a/src/main/java/com/autotune/utils/KruizeConstants.java +++ b/src/main/java/com/autotune/utils/KruizeConstants.java @@ -831,29 +831,29 @@ public static class NotificationConstants { NOTHING, 400 ); - public static BulkJobStatus.Notification FETCH_METRIC_FAILURE = new BulkJobStatus.Notification( + public static final BulkJobStatus.Notification FETCH_METRIC_FAILURE = new BulkJobStatus.Notification( BulkJobStatus.NotificationType.ERROR, "Not able to fetch metrics", 400 ); - public static BulkJobStatus.Notification DATASOURCE_NOT_REG_INFO = new BulkJobStatus.Notification( + public static final BulkJobStatus.Notification DATASOURCE_NOT_REG_INFO = new BulkJobStatus.Notification( BulkJobStatus.NotificationType.ERROR, "Datasource not registered with Kruize. (%s)", 400 ); - public static BulkJobStatus.Notification DATASOURCE_DOWN_INFO = new BulkJobStatus.Notification( + public static final BulkJobStatus.Notification DATASOURCE_DOWN_INFO = new BulkJobStatus.Notification( BulkJobStatus.NotificationType.ERROR, - "HttpHostConnectException: Unable to connect to the data source. Please try again later.", + "HttpHostConnectException: Unable to connect to the data source. Please try again later. (%s)", 503 ); - public static BulkJobStatus.Notification DATASOURCE_GATEWAY_TIMEOUT_INFO = new BulkJobStatus.Notification( + public static final BulkJobStatus.Notification DATASOURCE_GATEWAY_TIMEOUT_INFO = new BulkJobStatus.Notification( BulkJobStatus.NotificationType.ERROR, - "SocketTimeoutException: request timed out waiting for a data source response", + "SocketTimeoutException: request timed out waiting for a data source response. (%s)", 504 ); - public static BulkJobStatus.Notification DATASOURCE_CONNECT_TIMEOUT_INFO = new BulkJobStatus.Notification( + public static final BulkJobStatus.Notification DATASOURCE_CONNECT_TIMEOUT_INFO = new BulkJobStatus.Notification( BulkJobStatus.NotificationType.ERROR, - "ConnectTimeoutException: cannot establish a data source connection in a given time frame due to connectivity issues", + "ConnectTimeoutException: cannot establish a data source connection in a given time frame due to connectivity issues. (%s)", 503 ); From ad98bc98c08b5f51e6f91ea5ae9157baf4bbe8e7 Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Wed, 13 Nov 2024 11:36:59 +0530 Subject: [PATCH 10/15] incorporated review comments Signed-off-by: msvinaykumar --- design/BulkAPI.md | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/design/BulkAPI.md b/design/BulkAPI.md index 2119f8f2a..33e41dd90 100644 --- a/design/BulkAPI.md +++ b/design/BulkAPI.md @@ -120,26 +120,20 @@ When verbose=true, additional detailed information about the job is provided. "experiments": [ { "name": "prometheus-1|default|kube-system|coredns(deployment)|coredns", - "notification": {}, "recommendation": { - "status": "unprocessed", - "notification": {} + "status": "unprocessed" } }, { "name": "prometheus-1|default|kube-system|kindnet(deployment)|kindnet-cni", - "notification": {}, "recommendation": { - "status": "processed", - "notification": {} + "status": "processed" } }, { "name": "prometheus-1|default|monitoring|kruize(deployment)|kruize", - "notification": {}, "recommendation": { - "status": "processing", - "notification": {} + "status": "processing" } }, { @@ -207,7 +201,7 @@ resource optimization in Kubernetes environments. Below is a breakdown of the JS - Each object in the `experiments` array has the following structure: | Field | Type | Description | - |-------------------------|--------------|--------------------------------------------------------------------------| + |-------------------------|--------------|--------------------------------------------------------------------------| | `name` | `string` | Name of the experiment, typically indicating a service name and deployment context. | | `notification` | `object` | Notifications specific to this experiment (if any). | | `recommendation` | `object` | Recommendation status and notifications specific to this experiment. | @@ -218,7 +212,7 @@ resource optimization in Kubernetes environments. Below is a breakdown of the JS errors (if any). | Field | Type | Description | - |-------------------------|--------------|--------------------------------------------------------------------------| + |-------------------------|--------------|--------------------------------------------------------------------------| | `status` | `string` | Status of the recommendation (e.g., `"unprocessed"`, `"processed"`, `"processing"`, `"failed"`). | | `notification` | `object` | Notifications related to recommendation processing. | @@ -227,7 +221,7 @@ resource optimization in Kubernetes environments. Below is a breakdown of the JS Both the `notification` and `recommendation.notification` fields may contain error messages or warnings as follows: | Field | Type | Description | - |-------------------------|--------------|----------------------------------------------------------------------------| + |-------------------------|--------------|----------------------------------------------------------------------------| | `type` | `string` | Type of notification (e.g., `"info"`,`"error"`, `"warning"`). | | `message` | `string` | Description of the notification message. | | `code` | `integer` | HTTP-like code indicating the type of error (e.g., `400` for bad request). | From 69e97369786e8d4cbde6f948b1653a69fe2f238c Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Wed, 13 Nov 2024 11:51:23 +0530 Subject: [PATCH 11/15] incorporated review comments Signed-off-by: msvinaykumar --- design/BulkAPI.md | 29 +++++++++++++++++-- .../serviceObjects/BulkJobStatus.java | 16 +++++----- .../analyzer/workerimpl/BulkJobManager.java | 2 +- 3 files changed, 35 insertions(+), 12 deletions(-) diff --git a/design/BulkAPI.md b/design/BulkAPI.md index 33e41dd90..dd429ce29 100644 --- a/design/BulkAPI.md +++ b/design/BulkAPI.md @@ -109,6 +109,8 @@ GET /bulk?job_id=123e4567-e89b-12d3-a456-426614174000&verbose=true **Body (JSON):** When verbose=true, additional detailed information about the job is provided. +example 1: + ```json { "status": "IN_PROGRESS", @@ -173,6 +175,27 @@ When verbose=true, additional detailed information about the job is provided. } ``` +example 2: + +```json +{ + "status": "FAILED", + "total_experiments": 0, + "processed_experiments": 0, + "notifications": { + "503": { + "type": "ERROR", + "message": "HttpHostConnectException: Unable to connect to the data source. Please try again later. (receive series from Addr: 10.96.192.138:10901 LabelSets: {prometheus=\"monitoring/k8stage\", prometheus_replica=\"prometheus-k8stage-0\"},{prometheus=\"monitoring/k8stage\", prometheus_replica=\"prometheus-k8stage-1\"},{replica=\"thanos-ruler-0\", ruler_cluster=\"\"} MinTime: 1730222825216 MaxTime: 1731412800000: rpc error: code = Unknown desc = receive series from 01JBV2JN5SVN84D3HD5MVSGN3A: load chunks: get range reader: Please reduce your request rate)", + "code": 503 + } + }, + "job_id": "270fa4d9-2701-4ca0-b056-74229cc28498", + "job_start_time": "2024-11-12T15:05:46.362Z", + "job_end_time": "2024-11-12T15:06:05.301Z" +} + +``` + ### Response Parameters ## API Description: Experiment and Recommendation Processing Status @@ -201,7 +224,7 @@ resource optimization in Kubernetes environments. Below is a breakdown of the JS - Each object in the `experiments` array has the following structure: | Field | Type | Description | - |-------------------------|--------------|--------------------------------------------------------------------------| + |-------------------------|--------------|--------------------------------------------------------------------------| | `name` | `string` | Name of the experiment, typically indicating a service name and deployment context. | | `notification` | `object` | Notifications specific to this experiment (if any). | | `recommendation` | `object` | Recommendation status and notifications specific to this experiment. | @@ -212,7 +235,7 @@ resource optimization in Kubernetes environments. Below is a breakdown of the JS errors (if any). | Field | Type | Description | - |-------------------------|--------------|--------------------------------------------------------------------------| + |-------------------------|--------------|--------------------------------------------------------------------------| | `status` | `string` | Status of the recommendation (e.g., `"unprocessed"`, `"processed"`, `"processing"`, `"failed"`). | | `notification` | `object` | Notifications related to recommendation processing. | @@ -221,7 +244,7 @@ resource optimization in Kubernetes environments. Below is a breakdown of the JS Both the `notification` and `recommendation.notification` fields may contain error messages or warnings as follows: | Field | Type | Description | - |-------------------------|--------------|----------------------------------------------------------------------------| + |-------------------------|--------------|----------------------------------------------------------------------------| | `type` | `string` | Type of notification (e.g., `"info"`,`"error"`, `"warning"`). | | `message` | `string` | Description of the notification message. | | `code` | `integer` | HTTP-like code indicating the type of error (e.g., `400` for bad request). | diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java b/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java index 926bf04b1..4b6851a10 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java @@ -58,10 +58,10 @@ public BulkJobStatus(String jobID, String status, Instant startTime) { // Method to set a notification in the map public void setNotification(String key, Notification notification) { - if (notifications == null) { - notifications = new HashMap<>(); // Initialize if null + if (this.notifications == null) { + this.notifications = new HashMap<>(); // Initialize if null } - notifications.put(key, notification); + this.notifications.put(key, notification); } public String getJobID() { @@ -192,7 +192,7 @@ public void setNotification(Notification notification) { public static class Recommendation { private KruizeConstants.KRUIZE_BULK_API.NotificationConstants.Status status; - private Notification notification; // Notifications can hold multiple entries + private Notification notifications; // Notifications can hold multiple entries public Recommendation(KruizeConstants.KRUIZE_BULK_API.NotificationConstants.Status status) { this.status = status; @@ -208,12 +208,12 @@ public void setStatus(KruizeConstants.KRUIZE_BULK_API.NotificationConstants.Stat this.status = status; } - public Notification getNotification() { - return notification; + public Notification getNotifications() { + return notifications; } - public void setNotification(Notification notification) { - this.notification = notification; + public void setNotifications(Notification notifications) { + this.notifications = notifications; } } diff --git a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java index 7bf1cb67e..ae3604b3d 100644 --- a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java +++ b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java @@ -210,7 +210,7 @@ public void run() { } catch (Exception e) { e.printStackTrace(); experiment.getRecommendation().setStatus(NotificationConstants.Status.FAILED); - experiment.getRecommendation().setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_INTERNAL_ERROR)); + experiment.getRecommendation().setNotifications(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_INTERNAL_ERROR)); } }); } From 1c2b235f5928c60fecbfb971ecb0990c3a7131fe Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Wed, 13 Nov 2024 16:56:27 +0530 Subject: [PATCH 12/15] Considering failed experiments too for JOb status Signed-off-by: msvinaykumar --- .../analyzer/workerimpl/BulkJobManager.java | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java index ae3604b3d..a23a36ed3 100644 --- a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java +++ b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java @@ -183,6 +183,11 @@ public void run() { e.printStackTrace(); jobData.setProcessed_experiments(jobData.getProcessed_experiments() + 1); experiment.setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_BAD_REQUEST)); + } finally { + if (jobData.getTotal_experiments() == jobData.getProcessed_experiments()) { + jobData.setStatus(COMPLETED); + jobData.setEndTime(Instant.now()); + } } if (expriment_exists) { @@ -198,10 +203,6 @@ public void run() { LOGGER.debug("API Response code: {}", recommendationResponseCode); if (recommendationResponseCode.getStatusCode() == HttpURLConnection.HTTP_CREATED) { jobData.setProcessed_experiments(jobData.getProcessed_experiments() + 1); - if (jobData.getTotal_experiments() == jobData.getProcessed_experiments()) { - jobData.setStatus(COMPLETED); - jobData.setEndTime(Instant.now()); - } experiment.getRecommendation().setStatus(NotificationConstants.Status.PROCESSED); } else { experiment.getRecommendation().setStatus(NotificationConstants.Status.FAILED); @@ -211,12 +212,19 @@ public void run() { e.printStackTrace(); experiment.getRecommendation().setStatus(NotificationConstants.Status.FAILED); experiment.getRecommendation().setNotifications(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_INTERNAL_ERROR)); + } finally { + if (jobData.getTotal_experiments() == jobData.getProcessed_experiments()) { + jobData.setStatus(COMPLETED); + jobData.setEndTime(Instant.now()); + } } }); } } catch (Exception e) { e.printStackTrace(); experiment.setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_INTERNAL_ERROR)); + } finally { + } }); } From ce9b45cdbe76b80f41b841689b9863025fbeb53f Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Thu, 14 Nov 2024 11:55:27 +0530 Subject: [PATCH 13/15] incorporated review comments Signed-off-by: msvinaykumar --- design/BulkAPI.md | 23 +++++++------------ .../serviceObjects/BulkJobStatus.java | 8 +++---- .../analyzer/workerimpl/BulkJobManager.java | 8 +++---- 3 files changed, 16 insertions(+), 23 deletions(-) diff --git a/design/BulkAPI.md b/design/BulkAPI.md index dd429ce29..7fe7daa59 100644 --- a/design/BulkAPI.md +++ b/design/BulkAPI.md @@ -122,25 +122,25 @@ example 1: "experiments": [ { "name": "prometheus-1|default|kube-system|coredns(deployment)|coredns", - "recommendation": { + "recommendations": { "status": "unprocessed" } }, { "name": "prometheus-1|default|kube-system|kindnet(deployment)|kindnet-cni", - "recommendation": { + "recommendations": { "status": "processed" } }, { "name": "prometheus-1|default|monitoring|kruize(deployment)|kruize", - "recommendation": { + "recommendations": { "status": "processing" } }, { "name": "prometheus-1|default|monitoring|kruize(deployment)|kruize", - "recommendation": { + "recommendations": { "status": "failed", "notifications": { "400": { @@ -153,14 +153,7 @@ example 1: }, { "name": "prometheus-1|default|monitoring|kruize(deployment)|kruize", - "notifications": { - "400": { - "type": "error", - "message": "Metric Profile not found", - "code": 400 - } - }, - "recommendation": { + "recommendations": { "status": "failed", "notifications": { "400": { @@ -224,7 +217,7 @@ resource optimization in Kubernetes environments. Below is a breakdown of the JS - Each object in the `experiments` array has the following structure: | Field | Type | Description | - |-------------------------|--------------|--------------------------------------------------------------------------| + |-------------------------|--------------|--------------------------------------------------------------------------| | `name` | `string` | Name of the experiment, typically indicating a service name and deployment context. | | `notification` | `object` | Notifications specific to this experiment (if any). | | `recommendation` | `object` | Recommendation status and notifications specific to this experiment. | @@ -235,7 +228,7 @@ resource optimization in Kubernetes environments. Below is a breakdown of the JS errors (if any). | Field | Type | Description | - |-------------------------|--------------|--------------------------------------------------------------------------| + |-------------------------|--------------|--------------------------------------------------------------------------| | `status` | `string` | Status of the recommendation (e.g., `"unprocessed"`, `"processed"`, `"processing"`, `"failed"`). | | `notification` | `object` | Notifications related to recommendation processing. | @@ -244,7 +237,7 @@ resource optimization in Kubernetes environments. Below is a breakdown of the JS Both the `notification` and `recommendation.notification` fields may contain error messages or warnings as follows: | Field | Type | Description | - |-------------------------|--------------|----------------------------------------------------------------------------| + |-------------------------|--------------|----------------------------------------------------------------------------| | `type` | `string` | Type of notification (e.g., `"info"`,`"error"`, `"warning"`). | | `message` | `string` | Description of the notification message. | | `code` | `integer` | HTTP-like code indicating the type of error (e.g., `400` for bad request). | diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java b/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java index 4b6851a10..16da934bb 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java @@ -172,17 +172,17 @@ public String getType() { public static class Experiment { private String name; private Notification notification; // Empty by default - private Recommendation recommendation; + private Recommendation recommendations; public Experiment(String name) { this.name = name; this.notification = null; // Start with null notification - this.recommendation = new Recommendation(UNPROCESSED); // Start with unprocessed status + this.recommendations = new Recommendation(UNPROCESSED); // Start with unprocessed status } // Getters and setters - public Recommendation getRecommendation() { - return recommendation; + public Recommendation getRecommendations() { + return recommendations; } public void setNotification(Notification notification) { diff --git a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java index a23a36ed3..6c8ebffb7 100644 --- a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java +++ b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java @@ -203,15 +203,15 @@ public void run() { LOGGER.debug("API Response code: {}", recommendationResponseCode); if (recommendationResponseCode.getStatusCode() == HttpURLConnection.HTTP_CREATED) { jobData.setProcessed_experiments(jobData.getProcessed_experiments() + 1); - experiment.getRecommendation().setStatus(NotificationConstants.Status.PROCESSED); + experiment.getRecommendations().setStatus(NotificationConstants.Status.PROCESSED); } else { - experiment.getRecommendation().setStatus(NotificationConstants.Status.FAILED); + experiment.getRecommendations().setStatus(NotificationConstants.Status.FAILED); experiment.setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, recommendationResponseCode.getResponseBody().toString(), recommendationResponseCode.getStatusCode())); } } catch (Exception e) { e.printStackTrace(); - experiment.getRecommendation().setStatus(NotificationConstants.Status.FAILED); - experiment.getRecommendation().setNotifications(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_INTERNAL_ERROR)); + experiment.getRecommendations().setStatus(NotificationConstants.Status.FAILED); + experiment.getRecommendations().setNotifications(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_INTERNAL_ERROR)); } finally { if (jobData.getTotal_experiments() == jobData.getProcessed_experiments()) { jobData.setStatus(COMPLETED); From 1e12474651ec3c25e2f68c3bd14c577033e7bcf7 Mon Sep 17 00:00:00 2001 From: msvinaykumar Date: Thu, 14 Nov 2024 12:02:11 +0530 Subject: [PATCH 14/15] incorporated review comments Signed-off-by: msvinaykumar --- src/main/java/com/autotune/utils/GenericRestApiClient.java | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/main/java/com/autotune/utils/GenericRestApiClient.java b/src/main/java/com/autotune/utils/GenericRestApiClient.java index 6c0ef9836..42e1edf59 100644 --- a/src/main/java/com/autotune/utils/GenericRestApiClient.java +++ b/src/main/java/com/autotune/utils/GenericRestApiClient.java @@ -59,8 +59,6 @@ public class GenericRestApiClient { private static final long serialVersionUID = 1L; private static final Logger LOGGER = LoggerFactory.getLogger(GenericRestApiClient.class); - private static final int MAX_RETRIES = 5; - private static final long INITIAL_BACKOFF_MS = 1000; // 1 second private String baseURL; private BasicAuthentication basicAuthentication; private BearerAccessToken bearerAccessToken; @@ -100,14 +98,14 @@ public JSONObject fetchMetricsJson(String methodType, String queryString) throws // Apply authentication applyAuthentication(httpRequestBase); - LOGGER.info("Executing Prometheus metrics request: {}", httpRequestBase.getRequestLine()); + LOGGER.debug("Executing Prometheus metrics request: {}", httpRequestBase.getRequestLine()); // Execute the request and get the HttpResponse HttpResponse response = httpclient.execute(httpRequestBase); // Get and print the response code int responseCode = response.getStatusLine().getStatusCode(); - LOGGER.info("Response code: {}", responseCode); + LOGGER.debug("Response code: {}", responseCode); // Get the response body if needed jsonResponse = new StringResponseHandler().handleResponse(response); From 9fc55035b9990bfbbb281e8153f1be514fd7069d Mon Sep 17 00:00:00 2001 From: Chandrakala Subramanyam Date: Thu, 14 Nov 2024 12:43:53 +0530 Subject: [PATCH 15/15] Updated ubi version to 9.5 Signed-off-by: Chandrakala Subramanyam --- Dockerfile.autotune | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile.autotune b/Dockerfile.autotune index 032882440..cabb9c367 100644 --- a/Dockerfile.autotune +++ b/Dockerfile.autotune @@ -16,7 +16,7 @@ ########################################################## # Build Docker Image ########################################################## -FROM registry.access.redhat.com/ubi9/ubi-minimal:9.4 as mvnbuild-jdk21 +FROM registry.access.redhat.com/ubi9/ubi-minimal:9.5 as mvnbuild-jdk21 ARG USER=autotune ARG AUTOTUNE_HOME=/home/$USER @@ -48,7 +48,7 @@ RUN jlink --strip-debug --compress 2 --no-header-files --no-man-pages --module-p # Runtime Docker Image ########################################################## # Use ubi-minimal as the base image -FROM registry.access.redhat.com/ubi9/ubi-minimal:9.4 +FROM registry.access.redhat.com/ubi9/ubi-minimal:9.5 ARG AUTOTUNE_VERSION ARG USER=autotune