diff --git a/.github/workflows/test-on-pr.yaml b/.github/workflows/test-on-pr.yaml index 0453557e7..2b1d04445 100644 --- a/.github/workflows/test-on-pr.yaml +++ b/.github/workflows/test-on-pr.yaml @@ -13,68 +13,8 @@ on: workflow_dispatch: jobs: - # This workflow contains two jobs called "build autotune" and "build crc" - build_autotune: - # The type of runner that the job will run on - runs-on: ubuntu-20.04 - - steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v3 - - name: Setup Minikube - uses: manusa/actions-setup-minikube@v2.3.0 - with: - minikube version: 'v1.16.0' - kubernetes version: 'v1.19.2' - - name: Build autotune - run: | - echo Build autotune - pr_number=${{ github.event.pull_request.number }} - echo "pr_number=${pr_number}" >> "$GITHUB_ENV" - ./build.sh -i autotune_operator:pr_${pr_number} - docker images | grep autotune - - name: Check cluster info on minikube - run: | - kubectl cluster-info - kubectl get pods -n kube-system - - name: Install Prometheus on minikube - run: | - echo Install Prometheus on minikube - cd scripts - ./prometheus_on_minikube.sh -as - - name: Deploy kruize in experiment mode - run: | - echo Deploy kruize in experiment mode - cp ./manifests/autotune/autotune-operator-deployment.yaml_template ./manifests/autotune/autotune-operator-deployment.yaml_template.old - sed -e "s/imagePullPolicy: Always/imagePullPolicy: IfNotPresent/g" ./manifests/autotune/autotune-operator-deployment.yaml_template.old > ./manifests/autotune/autotune-operator-deployment.yaml_template - echo "***************************************************************" - cat ./manifests/autotune/autotune-operator-deployment.yaml_template - echo "***************************************************************" - - echo "PR_NUMBER = ${{ env.pr_number }}" - ./deploy.sh -c minikube -i autotune_operator:pr_${{ env.pr_number }} - - sleep 20 - - name: Capture ffdc logs - if: always() - run: | - echo "Capturing ffdc logs" - ./scripts/ffdc.sh -d ${GITHUB_WORKSPACE} - - name: Archive results - if: always() - run: | - cd ${GITHUB_WORKSPACE} - tar cvf autotune_results.tar kruize_*log.txt - - - name: Upload results - if: always() - uses: actions/upload-artifact@v3 - with: - name: autotune-results - path: ./autotune_results.tar - retention-days: 2 - + # This workflow builds the kruize image and runs an end-to-end test to validate the remote monitoring workflow build_crc: # The type of runner that the job will run on runs-on: ubuntu-20.04 diff --git a/Dockerfile.autotune b/Dockerfile.autotune index 032882440..cabb9c367 100644 --- a/Dockerfile.autotune +++ b/Dockerfile.autotune @@ -16,7 +16,7 @@ ########################################################## # Build Docker Image ########################################################## -FROM registry.access.redhat.com/ubi9/ubi-minimal:9.4 as mvnbuild-jdk21 +FROM registry.access.redhat.com/ubi9/ubi-minimal:9.5 as mvnbuild-jdk21 ARG USER=autotune ARG AUTOTUNE_HOME=/home/$USER @@ -48,7 +48,7 @@ RUN jlink --strip-debug --compress 2 --no-header-files --no-man-pages --module-p # Runtime Docker Image ########################################################## # Use ubi-minimal as the base image -FROM registry.access.redhat.com/ubi9/ubi-minimal:9.4 +FROM registry.access.redhat.com/ubi9/ubi-minimal:9.5 ARG AUTOTUNE_VERSION ARG USER=autotune diff --git a/design/BulkAPI.md b/design/BulkAPI.md index 82c209d27..7fe7daa59 100644 --- a/design/BulkAPI.md +++ b/design/BulkAPI.md @@ -109,79 +109,84 @@ GET /bulk?job_id=123e4567-e89b-12d3-a456-426614174000&verbose=true **Body (JSON):** When verbose=true, additional detailed information about the job is provided. +example 1: + ```json { "status": "IN_PROGRESS", "total_experiments": 23, "processed_experiments": 22, - "data": { - "experiments": { - "new": [ - "prometheus-1|default|monitoring|node-exporter(daemonset)|node-exporter", - "prometheus-1|default|cadvisor|cadvisor(daemonset)|cadvisor", - "prometheus-1|default|monitoring|alertmanager-main(statefulset)|config-reloader", - "prometheus-1|default|monitoring|alertmanager-main(statefulset)|alertmanager", - "prometheus-1|default|monitoring|prometheus-operator(deployment)|kube-rbac-proxy", - "prometheus-1|default|kube-system|coredns(deployment)|coredns", - "prometheus-1|default|monitoring|prometheus-k8s(statefulset)|config-reloader", - "prometheus-1|default|monitoring|blackbox-exporter(deployment)|kube-rbac-proxy", - "prometheus-1|default|monitoring|prometheus-operator(deployment)|prometheus-operator", - "prometheus-1|default|monitoring|node-exporter(daemonset)|kube-rbac-proxy", - "prometheus-1|default|monitoring|kube-state-metrics(deployment)|kube-rbac-proxy-self", - "prometheus-1|default|monitoring|kube-state-metrics(deployment)|kube-state-metrics", - "prometheus-1|default|monitoring|kruize(deployment)|kruize", - "prometheus-1|default|monitoring|blackbox-exporter(deployment)|module-configmap-reloader", - "prometheus-1|default|monitoring|prometheus-k8s(statefulset)|prometheus", - "prometheus-1|default|monitoring|kube-state-metrics(deployment)|kube-rbac-proxy-main", - "prometheus-1|default|kube-system|kube-proxy(daemonset)|kube-proxy", - "prometheus-1|default|monitoring|prometheus-adapter(deployment)|prometheus-adapter", - "prometheus-1|default|monitoring|grafana(deployment)|grafana", - "prometheus-1|default|kube-system|kindnet(daemonset)|kindnet-cni", - "prometheus-1|default|monitoring|kruize-db-deployment(deployment)|kruize-db", - "prometheus-1|default|monitoring|blackbox-exporter(deployment)|blackbox-exporter" - ], - "updated": [], - "failed": null + "job_id": "5798a2df-6c67-467b-a3c2-befe634a0e3a", + "job_start_time": "2024-10-09T18:09:31.549Z", + "job_end_time": null, + "experiments": [ + { + "name": "prometheus-1|default|kube-system|coredns(deployment)|coredns", + "recommendations": { + "status": "unprocessed" + } }, - "recommendations": { - "data": { - "processed": [ - "prometheus-1|default|monitoring|alertmanager-main(statefulset)|config-reloader", - "prometheus-1|default|monitoring|node-exporter(daemonset)|node-exporter", - "prometheus-1|default|local-path-storage|local-path-provisioner(deployment)|local-path-provisioner", - "prometheus-1|default|monitoring|alertmanager-main(statefulset)|alertmanager", - "prometheus-1|default|monitoring|prometheus-operator(deployment)|kube-rbac-proxy", - "prometheus-1|default|kube-system|coredns(deployment)|coredns", - "prometheus-1|default|monitoring|blackbox-exporter(deployment)|kube-rbac-proxy", - "prometheus-1|default|monitoring|prometheus-k8s(statefulset)|config-reloader", - "prometheus-1|default|monitoring|prometheus-operator(deployment)|prometheus-operator", - "prometheus-1|default|monitoring|node-exporter(daemonset)|kube-rbac-proxy", - "prometheus-1|default|monitoring|kube-state-metrics(deployment)|kube-rbac-proxy-self", - "prometheus-1|default|monitoring|kube-state-metrics(deployment)|kube-state-metrics", - "prometheus-1|default|monitoring|kruize(deployment)|kruize", - "prometheus-1|default|monitoring|blackbox-exporter(deployment)|module-configmap-reloader", - "prometheus-1|default|monitoring|prometheus-k8s(statefulset)|prometheus", - "prometheus-1|default|monitoring|kube-state-metrics(deployment)|kube-rbac-proxy-main", - "prometheus-1|default|kube-system|kube-proxy(daemonset)|kube-proxy", - "prometheus-1|default|monitoring|prometheus-adapter(deployment)|prometheus-adapter", - "prometheus-1|default|monitoring|grafana(deployment)|grafana", - "prometheus-1|default|kube-system|kindnet(daemonset)|kindnet-cni", - "prometheus-1|default|monitoring|kruize-db-deployment(deployment)|kruize-db", - "prometheus-1|default|monitoring|blackbox-exporter(deployment)|blackbox-exporter" - ], - "processing": [ - "prometheus-1|default|cadvisor|cadvisor(daemonset)|cadvisor" - ], - "unprocessed": [ - ], - "failed": [] + { + "name": "prometheus-1|default|kube-system|kindnet(deployment)|kindnet-cni", + "recommendations": { + "status": "processed" + } + }, + { + "name": "prometheus-1|default|monitoring|kruize(deployment)|kruize", + "recommendations": { + "status": "processing" + } + }, + { + "name": "prometheus-1|default|monitoring|kruize(deployment)|kruize", + "recommendations": { + "status": "failed", + "notifications": { + "400": { + "type": "error", + "message": "Not able to fetch metrics", + "code": 400 + } + } + } + }, + { + "name": "prometheus-1|default|monitoring|kruize(deployment)|kruize", + "recommendations": { + "status": "failed", + "notifications": { + "400": { + "type": "error", + "message": "Not able to fetch metrics", + "code": 400 + } + } } } + ] +} +``` + +example 2: + +```json +{ + "status": "FAILED", + "total_experiments": 0, + "processed_experiments": 0, + "notifications": { + "503": { + "type": "ERROR", + "message": "HttpHostConnectException: Unable to connect to the data source. Please try again later. (receive series from Addr: 10.96.192.138:10901 LabelSets: {prometheus=\"monitoring/k8stage\", prometheus_replica=\"prometheus-k8stage-0\"},{prometheus=\"monitoring/k8stage\", prometheus_replica=\"prometheus-k8stage-1\"},{replica=\"thanos-ruler-0\", ruler_cluster=\"\"} MinTime: 1730222825216 MaxTime: 1731412800000: rpc error: code = Unknown desc = receive series from 01JBV2JN5SVN84D3HD5MVSGN3A: load chunks: get range reader: Please reduce your request rate)", + "code": 503 + } }, - "job_id": "5798a2df-6c67-467b-a3c2-befe634a0e3a", - "job_start_time": "2024-10-09T18:09:31.549Z", - "job_end_time": null + "job_id": "270fa4d9-2701-4ca0-b056-74229cc28498", + "job_start_time": "2024-11-12T15:05:46.362Z", + "job_end_time": "2024-11-12T15:06:05.301Z" } + ``` ### Response Parameters @@ -205,40 +210,37 @@ resource optimization in Kubernetes environments. Below is a breakdown of the JS - **Type**: `Integer` - **Description**: Number of experiments that have been processed so far. -- **data**: - - **Type**: `Object` - - **Description**: Contains detailed information about the experiments and recommendations being processed. +- **experiments**: + - **Type**: `Array ` + - **Description**: Array of experiment objects, each containing details about individual experiments. + + - Each object in the `experiments` array has the following structure: - - **experiments**: - - **new**: - - **Type**: `Array of Strings` - - **Description**: List of new experiments that have been identified but not yet processed. + | Field | Type | Description | + |-------------------------|--------------|--------------------------------------------------------------------------| + | `name` | `string` | Name of the experiment, typically indicating a service name and deployment context. | + | `notification` | `object` | Notifications specific to this experiment (if any). | + | `recommendation` | `object` | Recommendation status and notifications specific to this experiment. | - - **updated**: - - **Type**: `Array of Strings` - - **Description**: List of experiments that were previously processed but have now been updated. + #### Recommendation Object - - **failed**: - - **Type**: `null or Array` - - **Description**: List of experiments that failed during processing. If no failures, the value is `null`. + The `recommendation` field within each experiment provides information about recommendation processing status and + errors (if any). - - **recommendations**: - - **data**: - - **processed**: - - **Type**: `Array of Strings` - - **Description**: List of experiments for which recommendations have already been processed. + | Field | Type | Description | + |-------------------------|--------------|--------------------------------------------------------------------------| + | `status` | `string` | Status of the recommendation (e.g., `"unprocessed"`, `"processed"`, `"processing"`, `"failed"`). | + | `notification` | `object` | Notifications related to recommendation processing. | - - **processing**: - - **Type**: `Array of Strings` - - **Description**: List of experiments that are currently being processed for recommendations. + #### Notification Object - - **unprocessed**: - - **Type**: `Array of Strings` - - **Description**: List of experiments that have not yet been processed for recommendations. + Both the `notification` and `recommendation.notification` fields may contain error messages or warnings as follows: - - **failed**: - - **Type**: `Array of Strings` - - **Description**: List of experiments for which the recommendation process failed. + | Field | Type | Description | + |-------------------------|--------------|----------------------------------------------------------------------------| + | `type` | `string` | Type of notification (e.g., `"info"`,`"error"`, `"warning"`). | + | `message` | `string` | Description of the notification message. | + | `code` | `integer` | HTTP-like code indicating the type of error (e.g., `400` for bad request). | - **job_id**: - **Type**: `String` diff --git a/pom.xml b/pom.xml index 76c84e658..908096302 100644 --- a/pom.xml +++ b/pom.xml @@ -10,7 +10,7 @@ 4.13.2 20240303 - 9.4.55.v20240627 + 10.0.24 2.17.1 17 0.14.1 diff --git a/src/main/java/com/autotune/Autotune.java b/src/main/java/com/autotune/Autotune.java index ad6ad9834..cd5725b81 100644 --- a/src/main/java/com/autotune/Autotune.java +++ b/src/main/java/com/autotune/Autotune.java @@ -24,6 +24,9 @@ import com.autotune.analyzer.utils.AnalyzerConstants; import com.autotune.common.datasource.DataSourceCollection; import com.autotune.common.datasource.DataSourceInfo; +import com.autotune.common.exceptions.datasource.DataSourceAlreadyExist; +import com.autotune.common.exceptions.datasource.DataSourceNotServiceable; +import com.autotune.common.exceptions.datasource.UnsupportedDataSourceProvider; import com.autotune.database.helper.DBConstants; import com.autotune.database.init.KruizeHibernateUtil; import com.autotune.experimentManager.core.ExperimentManager; @@ -31,7 +34,10 @@ import com.autotune.operator.KruizeDeploymentInfo; import com.autotune.service.HealthService; import com.autotune.service.InitiateListener; -import com.autotune.utils.*; +import com.autotune.utils.CloudWatchAppender; +import com.autotune.utils.KruizeConstants; +import com.autotune.utils.MetricsConfig; +import com.autotune.utils.ServerContext; import com.autotune.utils.filter.KruizeCORSFilter; import io.prometheus.client.exporter.MetricsServlet; import io.prometheus.client.hotspot.DefaultExports; @@ -50,12 +56,17 @@ import javax.servlet.DispatcherType; import java.io.File; +import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; import java.util.EnumSet; import java.util.HashMap; import java.util.Scanner; +import static com.autotune.utils.KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_CONNECTION_FAILED; import static com.autotune.utils.ServerContext.*; public class Autotune { @@ -112,7 +123,11 @@ public static void main(String[] args) { // load available datasources from db loadDataSourcesFromDB(); // setting up DataSources - setUpDataSources(); + try { + setUpDataSources(); + } catch (Exception e) { + LOGGER.error(DATASOURCE_CONNECTION_FAILED, e.getMessage()); + } // checking available DataSources checkAvailableDataSources(); // load available metric profiles from db @@ -124,7 +139,7 @@ public static void main(String[] args) { //Regenerate a Hibernate session following the creation of new tables KruizeHibernateUtil.buildSessionFactory(); } catch (Exception | K8sTypeNotSupportedException | MonitoringAgentNotSupportedException | - MonitoringAgentNotFoundException e) { + MonitoringAgentNotFoundException e) { e.printStackTrace(); System.exit(1); } @@ -170,7 +185,7 @@ public static void main(String[] args) { /** * Set up the data sources available at installation time from config file */ - private static void setUpDataSources() { + private static void setUpDataSources() throws UnsupportedDataSourceProvider, DataSourceNotServiceable, DataSourceAlreadyExist, IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { DataSourceCollection dataSourceCollection = DataSourceCollection.getInstance(); dataSourceCollection.addDataSourcesFromConfigFile(KruizeConstants.CONFIG_FILE); } @@ -190,7 +205,7 @@ private static void checkAvailableDataSources() { DataSourceCollection dataSourceCollection = DataSourceCollection.getInstance(); LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceInfoMsgs.CHECKING_AVAILABLE_DATASOURCE); HashMap dataSources = dataSourceCollection.getDataSourcesCollection(); - for (String name: dataSources.keySet()) { + for (String name : dataSources.keySet()) { DataSourceInfo dataSource = dataSources.get(name); String dataSourceName = dataSource.getName(); String url = dataSource.getUrl().toString(); diff --git a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java index f157550a4..ebb74fe7c 100644 --- a/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java +++ b/src/main/java/com/autotune/analyzer/recommendations/engine/RecommendationEngine.java @@ -2062,7 +2062,7 @@ private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz dataSourceInfo.getUrl(), URLEncoder.encode(queryToEncode, CHARACTER_ENCODING) ); - LOGGER.info(dateMetricsUrl); + LOGGER.debug(dateMetricsUrl); client.setBaseURL(dateMetricsUrl); JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, ""); JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class); @@ -2163,7 +2163,7 @@ private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz interval_start_time_epoc, interval_end_time_epoc, measurementDurationMinutesInDouble.intValue() * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE); - LOGGER.info(podMetricsUrl); + LOGGER.debug(podMetricsUrl); client.setBaseURL(podMetricsUrl); JSONObject genericJsonObject = client.fetchMetricsJson(KruizeConstants.APIMessages.GET, ""); JsonObject jsonObject = new Gson().fromJson(genericJsonObject.toString(), JsonObject.class); diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java b/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java index 1a988808e..16da934bb 100644 --- a/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java +++ b/src/main/java/com/autotune/analyzer/serviceObjects/BulkJobStatus.java @@ -15,76 +15,116 @@ *******************************************************************************/ package com.autotune.analyzer.serviceObjects; +import com.autotune.utils.KruizeConstants; import com.fasterxml.jackson.annotation.JsonFilter; import com.fasterxml.jackson.annotation.JsonProperty; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.time.Instant; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; -import java.util.ArrayList; import java.util.Collections; -import java.util.List; +import java.util.HashMap; +import java.util.Map; import static com.autotune.utils.KruizeConstants.KRUIZE_BULK_API.JOB_ID; +import static com.autotune.utils.KruizeConstants.KRUIZE_BULK_API.NotificationConstants.Status.UNPROCESSED; /** * Bulk API Response payload Object. */ @JsonFilter("jobFilter") public class BulkJobStatus { + private static final Logger LOGGER = LoggerFactory.getLogger(BulkJobStatus.class); @JsonProperty(JOB_ID) private String jobID; private String status; private int total_experiments; private int processed_experiments; - private Data data; @JsonProperty("job_start_time") private String startTime; // Change to String to store formatted time @JsonProperty("job_end_time") private String endTime; // Change to String to store formatted time - private String message; + private Map notifications; + private Map experiments = Collections.synchronizedMap(new HashMap<>()); - public BulkJobStatus(String jobID, String status, Data data, Instant startTime) { + public BulkJobStatus(String jobID, String status, Instant startTime) { this.jobID = jobID; this.status = status; - this.data = data; setStartTime(startTime); } - public String getJobID() { - return jobID; + + // Method to set a notification in the map + public void setNotification(String key, Notification notification) { + if (this.notifications == null) { + this.notifications = new HashMap<>(); // Initialize if null + } + this.notifications.put(key, notification); } - public String getStatus() { - return status; + public String getJobID() { + return jobID; } - public void setStatus(String status) { - this.status = status; + public void setJobID(String jobID) { + this.jobID = jobID; } public String getStartTime() { return startTime; } - public void setStartTime(Instant startTime) { - this.startTime = formatInstantAsUTCString(startTime); - } - public void setStartTime(String startTime) { this.startTime = startTime; } + public void setStartTime(Instant startTime) { + this.startTime = formatInstantAsUTCString(startTime); + } + public String getEndTime() { return endTime; } + public void setEndTime(String endTime) { + this.endTime = endTime; + } + public void setEndTime(Instant endTime) { this.endTime = formatInstantAsUTCString(endTime); } - public void setEndTime(String endTime) { - this.endTime = endTime; + public Map getNotifications() { + return notifications; + } + + public void setNotifications(Map notifications) { + this.notifications = notifications; + } + + public Map getExperiments() { + return experiments; + } + + public void setExperiments(Map experiments) { + this.experiments = experiments; + } + + // Method to add a new experiment with "unprocessed" status and null notification + public synchronized Experiment addExperiment(String experimentName) { + Experiment experiment = new Experiment(experimentName); + experiments.put(experimentName, experiment); + return experiment; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; } public int getTotal_experiments() { @@ -103,14 +143,6 @@ public void setProcessed_experiments(int processed_experiments) { this.processed_experiments = processed_experiments; } - public Data getData() { - return data; - } - - public void setData(Data data) { - this.data = data; - } - // Utility function to format Instant into the required UTC format private String formatInstantAsUTCString(Instant instant) { DateTimeFormatter formatter = DateTimeFormatter @@ -120,183 +152,108 @@ private String formatInstantAsUTCString(Instant instant) { return formatter.format(instant); } - public String getMessage() { - return message; - } - public void setMessage(String message) { - this.message = message; - } + public static enum NotificationType { + ERROR("error"), + WARNING("warning"), + INFO("info"); - // Inner class for the data field - public static class Data { - private Experiments experiments; - private Recommendations recommendations; + private final String type; - public Data(Experiments experiments, Recommendations recommendations) { - this.experiments = experiments; - this.recommendations = recommendations; - } - - public Experiments getExperiments() { - return experiments; - } - - public void setExperiments(Experiments experiments) { - this.experiments = experiments; - } - - public Recommendations getRecommendations() { - return recommendations; + NotificationType(String type) { + this.type = type; } - public void setRecommendations(Recommendations recommendations) { - this.recommendations = recommendations; + public String getType() { + return type; } } - // Inner class for experiments - public static class Experiments { - @JsonProperty("new") - private List newExperiments; - @JsonProperty("updated") - private List updatedExperiments; - @JsonProperty("failed") - private List failedExperiments; - - public Experiments(List newExperiments, List updatedExperiments, List failedExperiments) { - this.newExperiments = newExperiments; - this.updatedExperiments = updatedExperiments; - this.failedExperiments = failedExperiments; - } - - public List getNewExperiments() { - return newExperiments; - } - - public void setNewExperiments(List newExperiments) { - this.newExperiments = newExperiments; - } + public static class Experiment { + private String name; + private Notification notification; // Empty by default + private Recommendation recommendations; - public List getUpdatedExperiments() { - return updatedExperiments; + public Experiment(String name) { + this.name = name; + this.notification = null; // Start with null notification + this.recommendations = new Recommendation(UNPROCESSED); // Start with unprocessed status } - public void setUpdatedExperiments(List updatedExperiments) { - this.updatedExperiments = updatedExperiments; - } - - public List getFailedExperiments() { - return failedExperiments; + // Getters and setters + public Recommendation getRecommendations() { + return recommendations; } - public void setFailedExperiments(List failedExperiments) { - this.failedExperiments = failedExperiments; + public void setNotification(Notification notification) { + this.notification = notification; } } - // Inner class for recommendations - public static class Recommendations { - private RecommendationData data; + public static class Recommendation { + private KruizeConstants.KRUIZE_BULK_API.NotificationConstants.Status status; + private Notification notifications; // Notifications can hold multiple entries - public Recommendations(RecommendationData data) { - this.data = data; + public Recommendation(KruizeConstants.KRUIZE_BULK_API.NotificationConstants.Status status) { + this.status = status; } - public RecommendationData getData() { - return data; - } + // Getters and setters - public void setData(RecommendationData data) { - this.data = data; - } - } - - // Inner class for recommendation data - public static class RecommendationData { - private List processed = Collections.synchronizedList(new ArrayList<>()); - private List processing = Collections.synchronizedList(new ArrayList<>()); - private List unprocessed = Collections.synchronizedList(new ArrayList<>()); - private List failed = Collections.synchronizedList(new ArrayList<>()); - - public RecommendationData(List processed, List processing, List unprocessed, List failed) { - this.processed = processed; - this.processing = processing; - this.unprocessed = unprocessed; - this.failed = failed; + public KruizeConstants.KRUIZE_BULK_API.NotificationConstants.Status getStatus() { + return status; } - public List getProcessed() { - return processed; + public void setStatus(KruizeConstants.KRUIZE_BULK_API.NotificationConstants.Status status) { + this.status = status; } - public synchronized void setProcessed(List processed) { - this.processed = processed; + public Notification getNotifications() { + return notifications; } - public List getProcessing() { - return processing; + public void setNotifications(Notification notifications) { + this.notifications = notifications; } + } - public synchronized void setProcessing(List processing) { - this.processing = processing; - } + public static class Notification { + private NotificationType type; + private String message; + private int code; - public List getUnprocessed() { - return unprocessed; - } + // Constructor, getters, and setters - public synchronized void setUnprocessed(List unprocessed) { - this.unprocessed = unprocessed; + public Notification(NotificationType type, String message, int code) { + this.type = type; + this.message = message; + this.code = code; } - public List getFailed() { - return failed; + public NotificationType getType() { + return type; } - public synchronized void setFailed(List failed) { - this.failed = failed; + public void setType(NotificationType type) { + this.type = type; } - // Move elements from inqueue to progress - public synchronized void moveToProgress(String element) { - if (unprocessed.contains(element)) { - unprocessed.remove(element); - if (!processing.contains(element)) { - processing.add(element); - } - } + public String getMessage() { + return message; } - // Move elements from progress to completed - public synchronized void moveToCompleted(String element) { - if (processing.contains(element)) { - processing.remove(element); - if (!processed.contains(element)) { - processed.add(element); - } - } + public void setMessage(String message) { + this.message = message; } - // Move elements from progress to failed - public synchronized void moveToFailed(String element) { - if (processing.contains(element)) { - processing.remove(element); - if (!failed.contains(element)) { - failed.add(element); - } - } + public int getCode() { + return code; } - // Calculate the percentage of completion - public int completionPercentage() { - int totalTasks = processed.size() + processing.size() + unprocessed.size() + failed.size(); - if (totalTasks == 0) { - return (int) 0.0; - } - return (int) ((processed.size() * 100.0) / totalTasks); + public void setCode(int code) { + this.code = code; } + } - } } diff --git a/src/main/java/com/autotune/analyzer/services/BulkService.java b/src/main/java/com/autotune/analyzer/services/BulkService.java index dd4bc795f..6813251f5 100644 --- a/src/main/java/com/autotune/analyzer/services/BulkService.java +++ b/src/main/java/com/autotune/analyzer/services/BulkService.java @@ -33,7 +33,6 @@ import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.time.Instant; -import java.util.ArrayList; import java.util.Map; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; @@ -50,8 +49,8 @@ public class BulkService extends HttpServlet { private static final long serialVersionUID = 1L; private static final Logger LOGGER = LoggerFactory.getLogger(BulkService.class); + private static Map jobStatusMap = new ConcurrentHashMap<>(); private ExecutorService executorService = Executors.newFixedThreadPool(10); - private Map jobStatusMap = new ConcurrentHashMap<>(); @Override public void init(ServletConfig config) throws ServletException { @@ -66,38 +65,45 @@ public void init(ServletConfig config) throws ServletException { */ @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { - String jobID = req.getParameter(JOB_ID); - String verboseParam = req.getParameter(VERBOSE); - // If the parameter is not provided (null), default it to false - boolean verbose = verboseParam != null && Boolean.parseBoolean(verboseParam); - BulkJobStatus jobDetails = jobStatusMap.get(jobID); - resp.setContentType(JSON_CONTENT_TYPE); - resp.setCharacterEncoding(CHARACTER_ENCODING); - SimpleFilterProvider filters = new SimpleFilterProvider(); + try { + String jobID = req.getParameter(JOB_ID); + String verboseParam = req.getParameter(VERBOSE); + // If the parameter is not provided (null), default it to false + boolean verbose = verboseParam != null && Boolean.parseBoolean(verboseParam); + BulkJobStatus jobDetails; + LOGGER.info("Job ID: " + jobID); + jobDetails = jobStatusMap.get(jobID); + LOGGER.info("Job Status: " + jobDetails.getStatus()); + resp.setContentType(JSON_CONTENT_TYPE); + resp.setCharacterEncoding(CHARACTER_ENCODING); + SimpleFilterProvider filters = new SimpleFilterProvider(); - if (jobDetails == null) { - sendErrorResponse( - resp, - null, - HttpServletResponse.SC_NOT_FOUND, - JOB_NOT_FOUND_MSG - ); - } else { - try { - resp.setStatus(HttpServletResponse.SC_OK); - // Return the JSON representation of the JobStatus object - ObjectMapper objectMapper = new ObjectMapper(); - if (!verbose) { - filters.addFilter("jobFilter", SimpleBeanPropertyFilter.serializeAllExcept("data")); - } else { - filters.addFilter("jobFilter", SimpleBeanPropertyFilter.serializeAll()); + if (jobDetails == null) { + sendErrorResponse( + resp, + null, + HttpServletResponse.SC_NOT_FOUND, + JOB_NOT_FOUND_MSG + ); + } else { + try { + resp.setStatus(HttpServletResponse.SC_OK); + // Return the JSON representation of the JobStatus object + ObjectMapper objectMapper = new ObjectMapper(); + if (!verbose) { + filters.addFilter("jobFilter", SimpleBeanPropertyFilter.serializeAllExcept("experiments")); + } else { + filters.addFilter("jobFilter", SimpleBeanPropertyFilter.serializeAll()); + } + objectMapper.setFilterProvider(filters); + String jsonResponse = objectMapper.writeValueAsString(jobDetails); + resp.getWriter().write(jsonResponse); + } catch (Exception e) { + e.printStackTrace(); } - objectMapper.setFilterProvider(filters); - String jsonResponse = objectMapper.writeValueAsString(jobDetails); - resp.getWriter().write(jsonResponse); - } catch (Exception e) { - e.printStackTrace(); } + } catch (Exception e) { + e.printStackTrace(); } } @@ -121,18 +127,10 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) // Generate a unique jobID String jobID = UUID.randomUUID().toString(); - BulkJobStatus.Data data = new BulkJobStatus.Data( - new BulkJobStatus.Experiments(new ArrayList<>(), new ArrayList<>(), new ArrayList<>()), - new BulkJobStatus.Recommendations(new BulkJobStatus.RecommendationData( - new ArrayList<>(), - new ArrayList<>(), - new ArrayList<>(), - new ArrayList<>() - )) - ); - jobStatusMap.put(jobID, new BulkJobStatus(jobID, IN_PROGRESS, data, Instant.now())); + BulkJobStatus jobStatus = new BulkJobStatus(jobID, IN_PROGRESS, Instant.now()); + jobStatusMap.put(jobID, jobStatus); // Submit the job to be processed asynchronously - executorService.submit(new BulkJobManager(jobID, jobStatusMap, payload)); + executorService.submit(new BulkJobManager(jobID, jobStatus, payload)); // Just sending a simple success response back // Return the jobID to the user diff --git a/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java b/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java index 740bb859a..f7ae69c9f 100644 --- a/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java +++ b/src/main/java/com/autotune/analyzer/utils/AnalyzerConstants.java @@ -92,6 +92,7 @@ public class AnalyzerConstants { public static final String VERSION = "version"; public static final String DATASOURCE_NAME = "dataSourceName"; + private AnalyzerConstants() { } @@ -241,27 +242,23 @@ private AcceleratorConstants() { } public static final class AcceleratorMetricConstants { + public static final int TIMESTAMP_RANGE_CHECK_IN_MINUTES = 5; + private AcceleratorMetricConstants() { } - - public static final int TIMESTAMP_RANGE_CHECK_IN_MINUTES = 5; } public static final class SupportedAccelerators { - private SupportedAccelerators() { - - } public static final String A100_80_GB = "A100-80GB"; public static final String A100_40_GB = "A100-40GB"; public static final String H100_80_GB = "H100-80GB"; - } - - public static final class AcceleratorProfiles { - private AcceleratorProfiles () { + private SupportedAccelerators() { } + } + public static final class AcceleratorProfiles { // A100 40GB Profiles public static final String PROFILE_1G_5GB = "1g.5gb"; public static final String PROFILE_1G_10GB = "1g.10gb"; @@ -269,13 +266,15 @@ private AcceleratorProfiles () { public static final String PROFILE_3G_20GB = "3g.20gb"; public static final String PROFILE_4G_20GB = "4g.20gb"; public static final String PROFILE_7G_40GB = "7g.40gb"; - // A100 80GB & H100 80GB Profiles public static final String PROFILE_1G_20GB = "1g.20gb"; public static final String PROFILE_2G_20GB = "2g.20gb"; public static final String PROFILE_3G_40GB = "3g.40gb"; public static final String PROFILE_4G_40GB = "4g.40gb"; public static final String PROFILE_7G_80GB = "7g.80gb"; + private AcceleratorProfiles() { + + } } } diff --git a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java index c1d237a0c..6c8ebffb7 100644 --- a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java +++ b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java @@ -16,7 +16,6 @@ package com.autotune.analyzer.workerimpl; -import com.autotune.analyzer.exceptions.FetchMetricsError; import com.autotune.analyzer.kruizeObject.RecommendationSettings; import com.autotune.analyzer.serviceObjects.*; import com.autotune.analyzer.utils.AnalyzerConstants; @@ -31,12 +30,14 @@ import com.autotune.utils.Utils; import com.fasterxml.jackson.core.JsonProcessingException; import com.google.gson.Gson; +import org.apache.http.conn.ConnectTimeoutException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.HttpURLConnection; +import java.net.SocketTimeoutException; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.sql.Timestamp; @@ -52,6 +53,7 @@ import static com.autotune.operator.KruizeDeploymentInfo.bulk_thread_pool_size; import static com.autotune.utils.KruizeConstants.KRUIZE_BULK_API.*; +import static com.autotune.utils.KruizeConstants.KRUIZE_BULK_API.NotificationConstants.*; /** @@ -83,13 +85,12 @@ public class BulkJobManager implements Runnable { private static final Logger LOGGER = LoggerFactory.getLogger(BulkJobManager.class); private String jobID; - private Map jobStatusMap; private BulkInput bulkInput; + private BulkJobStatus jobData; - - public BulkJobManager(String jobID, Map jobStatusMap, BulkInput payload) { + public BulkJobManager(String jobID, BulkJobStatus jobData, BulkInput payload) { this.jobID = jobID; - this.jobStatusMap = jobStatusMap; + this.jobData = jobData; this.bulkInput = payload; } @@ -118,112 +119,138 @@ private static Map parseLabelString(String labelString) { @Override public void run() { try { - BulkJobStatus jobData = jobStatusMap.get(jobID); String labelString = getLabels(this.bulkInput.getFilter()); if (null == this.bulkInput.getDatasource()) { this.bulkInput.setDatasource(CREATE_EXPERIMENT_CONFIG_BEAN.getDatasourceName()); } DataSourceMetadataInfo metadataInfo = null; DataSourceManager dataSourceManager = new DataSourceManager(); - DataSourceInfo datasource = CommonUtils.getDataSourceInfo(this.bulkInput.getDatasource()); - JSONObject daterange = processDateRange(this.bulkInput.getTime_range()); - if (null != daterange) - metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource, labelString, (Long) daterange.get("start_time"), (Long) daterange.get("end_time"), (Integer) daterange.get("steps")); - else { - metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource, labelString, 0, 0, 0); + DataSourceInfo datasource = null; + try { + datasource = CommonUtils.getDataSourceInfo(this.bulkInput.getDatasource()); + } catch (Exception e) { + LOGGER.error(e.getMessage()); + e.printStackTrace(); + jobData.setStatus(FAILED); + BulkJobStatus.Notification notification = DATASOURCE_NOT_REG_INFO; + notification.setMessage(String.format(notification.getMessage(), e.getMessage())); + jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_BAD_REQUEST), notification); } - if (null == metadataInfo) { - jobData.setStatus(COMPLETED); - jobData.setMessage(NOTHING); - } else { - Map createExperimentAPIObjectMap = getExperimentMap(labelString, jobData, metadataInfo, datasource); //Todo Store this map in buffer and use it if BulkAPI pods restarts and support experiment_type - jobData.setTotal_experiments(createExperimentAPIObjectMap.size()); - jobData.setProcessed_experiments(0); - if (jobData.getTotal_experiments() > KruizeDeploymentInfo.BULK_API_LIMIT) { - jobStatusMap.get(jobID).setStatus(FAILED); - jobStatusMap.get(jobID).setMessage(String.format(LIMIT_MESSAGE, KruizeDeploymentInfo.BULK_API_LIMIT)); + if (null != datasource) { + JSONObject daterange = processDateRange(this.bulkInput.getTime_range()); + if (null != daterange) + metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource, labelString, (Long) daterange.get("start_time"), (Long) daterange.get("end_time"), (Integer) daterange.get("steps")); + else { + metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource, labelString, 0, 0, 0); + } + if (null == metadataInfo) { + jobData.setStatus(COMPLETED); + jobData.setEndTime(Instant.now()); + jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_OK), NOTHING_INFO); } else { - ExecutorService createExecutor = Executors.newFixedThreadPool(bulk_thread_pool_size); - ExecutorService generateExecutor = Executors.newFixedThreadPool(bulk_thread_pool_size); - for (CreateExperimentAPIObject apiObject : createExperimentAPIObjectMap.values()) { - createExecutor.submit(() -> { + Map createExperimentAPIObjectMap = getExperimentMap(labelString, jobData, metadataInfo, datasource); //Todo Store this map in buffer and use it if BulkAPI pods restarts and support experiment_type + jobData.setTotal_experiments(createExperimentAPIObjectMap.size()); + jobData.setProcessed_experiments(0); + if (jobData.getTotal_experiments() > KruizeDeploymentInfo.BULK_API_LIMIT) { + jobData.setStatus(FAILED); + jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_BAD_REQUEST), LIMIT_INFO); + } else { + ExecutorService createExecutor = Executors.newFixedThreadPool(bulk_thread_pool_size); + ExecutorService generateExecutor = Executors.newFixedThreadPool(bulk_thread_pool_size); + for (CreateExperimentAPIObject apiObject : createExperimentAPIObjectMap.values()) { String experiment_name = apiObject.getExperimentName(); - BulkJobStatus.Experiments newExperiments = jobData.getData().getExperiments(); - BulkJobStatus.RecommendationData recommendationData = jobData.getData().getRecommendations().getData(); - try { - // send request to createExperiment API for experiment creation - GenericRestApiClient apiClient = new GenericRestApiClient(datasource); - apiClient.setBaseURL(KruizeDeploymentInfo.experiments_url); - int responseCode; - boolean expriment_exists = false; + BulkJobStatus.Experiment experiment = jobData.addExperiment(experiment_name); + DataSourceInfo finalDatasource = datasource; + createExecutor.submit(() -> { try { - responseCode = apiClient.callKruizeAPI("[" + new Gson().toJson(apiObject) + "]"); - LOGGER.debug("API Response code: {}", responseCode); - if (responseCode == HttpURLConnection.HTTP_CREATED) { - newExperiments.setNewExperiments( - appendExperiments(newExperiments.getNewExperiments(), experiment_name)); - expriment_exists = true; - } else if (responseCode == HttpURLConnection.HTTP_CONFLICT) { - expriment_exists = true; - } else { - newExperiments.setFailedExperiments( - appendExperiments(newExperiments.getFailedExperiments(), experiment_name)); + // send request to createExperiment API for experiment creation + GenericRestApiClient apiClient = new GenericRestApiClient(finalDatasource); + apiClient.setBaseURL(KruizeDeploymentInfo.experiments_url); + GenericRestApiClient.HttpResponseWrapper responseCode; + boolean expriment_exists = false; + try { + responseCode = apiClient.callKruizeAPI("[" + new Gson().toJson(apiObject) + "]"); + LOGGER.debug("API Response code: {}", responseCode); + if (responseCode.getStatusCode() == HttpURLConnection.HTTP_CREATED) { + expriment_exists = true; + } else if (responseCode.getStatusCode() == HttpURLConnection.HTTP_CONFLICT) { + expriment_exists = true; + } else { + jobData.setProcessed_experiments(jobData.getProcessed_experiments() + 1); + experiment.setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, responseCode.getResponseBody().toString(), responseCode.getStatusCode())); + } + } catch (Exception e) { + e.printStackTrace(); jobData.setProcessed_experiments(jobData.getProcessed_experiments() + 1); - } - } catch (FetchMetricsError e) { - e.printStackTrace(); - newExperiments.setFailedExperiments( - appendExperiments(newExperiments.getFailedExperiments(), experiment_name)); - jobData.setProcessed_experiments(jobData.getProcessed_experiments() + 1); - } - - if (expriment_exists) { - recommendationData.setUnprocessed( - appendExperiments(recommendationData.getUnprocessed(), experiment_name) - ); - generateExecutor.submit(() -> { - - // send request to generateRecommendations API - GenericRestApiClient recommendationApiClient = new GenericRestApiClient(datasource); - String encodedExperimentName; - encodedExperimentName = URLEncoder.encode(experiment_name, StandardCharsets.UTF_8); - recommendationApiClient.setBaseURL(String.format(KruizeDeploymentInfo.recommendations_url, encodedExperimentName)); - int recommendationResponseCode = 0; - try { - recommendationData.moveToProgress(experiment_name); - recommendationResponseCode = recommendationApiClient.callKruizeAPI(null); - LOGGER.debug("API Response code: {}", recommendationResponseCode); - } catch (Exception | FetchMetricsError e) { - e.printStackTrace(); + experiment.setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_BAD_REQUEST)); + } finally { + if (jobData.getTotal_experiments() == jobData.getProcessed_experiments()) { + jobData.setStatus(COMPLETED); + jobData.setEndTime(Instant.now()); } - if (recommendationResponseCode == HttpURLConnection.HTTP_CREATED) { - recommendationData.moveToCompleted(experiment_name); - jobData.setProcessed_experiments(jobData.getProcessed_experiments() + 1); + } - if (jobData.getTotal_experiments() == jobData.getProcessed_experiments()) { - jobData.setStatus(COMPLETED); - jobStatusMap.get(jobID).setEndTime(Instant.now()); + if (expriment_exists) { + generateExecutor.submit(() -> { + // send request to generateRecommendations API + GenericRestApiClient recommendationApiClient = new GenericRestApiClient(finalDatasource); + String encodedExperimentName; + encodedExperimentName = URLEncoder.encode(experiment_name, StandardCharsets.UTF_8); + recommendationApiClient.setBaseURL(String.format(KruizeDeploymentInfo.recommendations_url, encodedExperimentName)); + GenericRestApiClient.HttpResponseWrapper recommendationResponseCode = null; + try { + recommendationResponseCode = recommendationApiClient.callKruizeAPI(null); + LOGGER.debug("API Response code: {}", recommendationResponseCode); + if (recommendationResponseCode.getStatusCode() == HttpURLConnection.HTTP_CREATED) { + jobData.setProcessed_experiments(jobData.getProcessed_experiments() + 1); + experiment.getRecommendations().setStatus(NotificationConstants.Status.PROCESSED); + } else { + experiment.getRecommendations().setStatus(NotificationConstants.Status.FAILED); + experiment.setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, recommendationResponseCode.getResponseBody().toString(), recommendationResponseCode.getStatusCode())); + } + } catch (Exception e) { + e.printStackTrace(); + experiment.getRecommendations().setStatus(NotificationConstants.Status.FAILED); + experiment.getRecommendations().setNotifications(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_INTERNAL_ERROR)); + } finally { + if (jobData.getTotal_experiments() == jobData.getProcessed_experiments()) { + jobData.setStatus(COMPLETED); + jobData.setEndTime(Instant.now()); + } } + }); + } + } catch (Exception e) { + e.printStackTrace(); + experiment.setNotification(new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_INTERNAL_ERROR)); + } finally { - } else { - recommendationData.moveToFailed(experiment_name); - } - }); } - - } catch (Exception e) { - e.printStackTrace(); - recommendationData.moveToFailed(experiment_name); - } - }); + }); + } } } } + } catch (IOException e) { + LOGGER.error(e.getMessage()); + jobData.setStatus("FAILED"); + jobData.setEndTime(Instant.now()); + BulkJobStatus.Notification notification; + if (e instanceof SocketTimeoutException) { + notification = DATASOURCE_GATEWAY_TIMEOUT_INFO; + } else if (e instanceof ConnectTimeoutException) { + notification = DATASOURCE_CONNECT_TIMEOUT_INFO; + } else { + notification = DATASOURCE_DOWN_INFO; + } + notification.setMessage(String.format(notification.getMessage(), e.getMessage())); + jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_UNAVAILABLE), notification); } catch (Exception e) { LOGGER.error(e.getMessage()); e.printStackTrace(); - jobStatusMap.get(jobID).setStatus("FAILED"); - jobStatusMap.get(jobID).setMessage(e.getMessage()); + jobData.setStatus("FAILED"); + jobData.setEndTime(Instant.now()); + jobData.setNotification(String.valueOf(HttpURLConnection.HTTP_INTERNAL_ERROR), new BulkJobStatus.Notification(BulkJobStatus.NotificationType.ERROR, e.getMessage(), HttpURLConnection.HTTP_INTERNAL_ERROR)); } } diff --git a/src/main/java/com/autotune/common/datasource/DataSourceCollection.java b/src/main/java/com/autotune/common/datasource/DataSourceCollection.java index f5f3b2c49..9daf555a3 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceCollection.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceCollection.java @@ -16,8 +16,8 @@ package com.autotune.common.datasource; import com.autotune.common.auth.AuthenticationConfig; -import com.autotune.common.exceptions.datasource.*; import com.autotune.common.data.ValidationOutputData; +import com.autotune.common.exceptions.datasource.*; import com.autotune.common.utils.CommonUtils; import com.autotune.database.service.ExperimentDBService; import com.autotune.utils.KruizeConstants; @@ -31,9 +31,15 @@ import java.io.InputStream; import java.net.URL; import java.nio.charset.StandardCharsets; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; import java.util.HashMap; import java.util.List; +import static com.autotune.utils.KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.*; +import static com.autotune.utils.KruizeConstants.DataSourceConstants.DataSourceSuccessMsgs.DATASOURCE_ADDED; + public class DataSourceCollection { private static final Logger LOGGER = LoggerFactory.getLogger(DataSourceCollection.class); private static DataSourceCollection dataSourceCollectionInstance = new DataSourceCollection(); @@ -43,13 +49,22 @@ private DataSourceCollection() { this.dataSourceCollection = new HashMap<>(); } + /** + * Returns the instance of dataSourceCollection class + * + * @return DataSourceCollection instance + */ + public static DataSourceCollection getInstance() { + return dataSourceCollectionInstance; + } + public void loadDataSourcesFromDB() { try { LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceInfoMsgs.CHECKING_AVAILABLE_DATASOURCE_FROM_DB); List availableDataSources = new ExperimentDBService().loadAllDataSources(); if (null == availableDataSources) { LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceInfoMsgs.NO_DATASOURCE_FOUND_IN_DB); - }else { + } else { for (DataSourceInfo dataSourceInfo : availableDataSources) { LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceSuccessMsgs.DATASOURCE_FOUND + dataSourceInfo.getName()); dataSourceCollection.put(dataSourceInfo.getName(), dataSourceInfo); @@ -61,16 +76,10 @@ public void loadDataSourcesFromDB() { } } - /** - * Returns the instance of dataSourceCollection class - * @return DataSourceCollection instance - */ - public static DataSourceCollection getInstance() { - return dataSourceCollectionInstance; - } /** * Returns the hashmap of data sources + * * @return HashMap containing dataSourceInfo objects */ public HashMap getDataSourcesCollection() { @@ -79,102 +88,105 @@ public HashMap getDataSourcesCollection() { /** * Adds datasource to collection + * * @param datasource DataSourceInfo object containing details of datasource */ - public void addDataSource(DataSourceInfo datasource) { + public void addDataSource(DataSourceInfo datasource) throws DataSourceAlreadyExist, IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException, DataSourceNotServiceable, UnsupportedDataSourceProvider { final String name = datasource.getName(); final String provider = datasource.getProvider(); ValidationOutputData addedToDB = null; LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceInfoMsgs.ADDING_DATASOURCE + name); - try { - if (dataSourceCollection.containsKey(name)) { - throw new DataSourceAlreadyExist(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_ALREADY_EXIST); - } - if (provider.equalsIgnoreCase(KruizeConstants.SupportedDatasources.PROMETHEUS)) { - LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceInfoMsgs.VERIFYING_DATASOURCE_REACHABILITY + name); - DataSourceOperatorImpl op = DataSourceOperatorImpl.getInstance().getOperator(KruizeConstants.SupportedDatasources.PROMETHEUS); - if (op.isServiceable(datasource) == CommonUtils.DatasourceReachabilityStatus.REACHABLE) { - LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceSuccessMsgs.DATASOURCE_SERVICEABLE); - // add the data source to DB - addedToDB = new ExperimentDBService().addDataSourceToDB(datasource); - if (addedToDB.isSuccess()) { - LOGGER.info("Datasource added to the DB successfully."); - } else { - LOGGER.error("Failed to add datasource to DB: {}", addedToDB.getMessage()); - } - dataSourceCollection.put(name, datasource); - LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceSuccessMsgs.DATASOURCE_ADDED); + if (dataSourceCollection.containsKey(name)) { + throw new DataSourceAlreadyExist(DATASOURCE_ALREADY_EXIST); + } + + if (provider.equalsIgnoreCase(KruizeConstants.SupportedDatasources.PROMETHEUS)) { + LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceInfoMsgs.VERIFYING_DATASOURCE_REACHABILITY + name); + DataSourceOperatorImpl op = DataSourceOperatorImpl.getInstance().getOperator(KruizeConstants.SupportedDatasources.PROMETHEUS); + if (op.isServiceable(datasource) == CommonUtils.DatasourceReachabilityStatus.REACHABLE) { + LOGGER.info(KruizeConstants.DataSourceConstants.DataSourceSuccessMsgs.DATASOURCE_SERVICEABLE); + // add the data source to DB + addedToDB = new ExperimentDBService().addDataSourceToDB(datasource); + if (addedToDB.isSuccess()) { + LOGGER.info(DATASOURCE_ADDED); } else { - throw new DataSourceNotServiceable(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_NOT_SERVICEABLE); + LOGGER.error("{}: {}", DATASOURCE_NOT_SERVICEABLE, addedToDB.getMessage()); } + dataSourceCollection.put(name, datasource); + LOGGER.info(DATASOURCE_ADDED); } else { - throw new UnsupportedDataSourceProvider(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.UNSUPPORTED_DATASOURCE_PROVIDER); + throw new DataSourceNotServiceable(DATASOURCE_NOT_SERVICEABLE); } - } catch (UnsupportedDataSourceProvider e) { - LOGGER.error(e.getMessage()); - } catch (DataSourceNotServiceable e) { - LOGGER.error(e.getMessage()); - } catch (DataSourceAlreadyExist e) { - LOGGER.error(e.getMessage()); + } else { + throw new UnsupportedDataSourceProvider(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.UNSUPPORTED_DATASOURCE_PROVIDER); } + } /** * Loads the data sources available at installation time + * * @param configFileName name of the config file mounted */ - public void addDataSourcesFromConfigFile(String configFileName) { - try { - String configFile = System.getenv(configFileName); - JSONObject configObject = null; + public void addDataSourcesFromConfigFile(String configFileName) throws UnsupportedDataSourceProvider, DataSourceNotServiceable, DataSourceAlreadyExist, IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { - InputStream is = new FileInputStream(configFile); - String jsonTxt = new String(is.readAllBytes(), StandardCharsets.UTF_8); - configObject = new JSONObject(jsonTxt); - JSONArray dataSourceArr = configObject.getJSONArray(KruizeConstants.DataSourceConstants.KRUIZE_DATASOURCE); + String configFile = System.getenv(configFileName); + JSONObject configObject = null; - for (Object dataSourceObj: dataSourceArr) { - JSONObject dataSourceObject = (JSONObject) dataSourceObj; - String name = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_NAME); - // check the DB if the datasource already exists - try { - DataSourceInfo dataSourceInfo = new ExperimentDBService().loadDataSourceFromDBByName(name); - if (null != dataSourceInfo) { - LOGGER.error("Datasource: {} already exists!", name); - // add the auth details to local object - AuthenticationConfig authConfig = getAuthenticationDetails(dataSourceObject, name); - dataSourceInfo.setAuthenticationConfig(authConfig); - dataSourceCollection.put(name, dataSourceInfo); - continue; - } - } catch (Exception e) { - LOGGER.error("Loading saved datasource {} failed: {} ", name, e.getMessage()); - } - String provider = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_PROVIDER); - String serviceName = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_SERVICE_NAME); - String namespace = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_SERVICE_NAMESPACE); - String dataSourceURL = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_URL); - AuthenticationConfig authConfig = getAuthenticationDetails(dataSourceObject, name); - DataSourceInfo datasource; - // Validate input - if (!validateInput(name, provider, serviceName, dataSourceURL, namespace)) { + InputStream is = new FileInputStream(configFile); + String jsonTxt = new String(is.readAllBytes(), StandardCharsets.UTF_8); + configObject = new JSONObject(jsonTxt); + JSONArray dataSourceArr = configObject.getJSONArray(KruizeConstants.DataSourceConstants.KRUIZE_DATASOURCE); + + for (Object dataSourceObj : dataSourceArr) { + JSONObject dataSourceObject = (JSONObject) dataSourceObj; + String name = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_NAME); + // check the DB if the datasource already exists + try { + DataSourceInfo dataSourceInfo = new ExperimentDBService().loadDataSourceFromDBByName(name); + if (null != dataSourceInfo) { + LOGGER.error("Datasource: {} already exists!", name); + // add the auth details to local object + AuthenticationConfig authConfig = getAuthenticationDetails(dataSourceObject, name); + dataSourceInfo.setAuthenticationConfig(authConfig); + dataSourceCollection.put(name, dataSourceInfo); continue; } - if (dataSourceURL.isEmpty()) { - datasource = new DataSourceInfo(name, provider, serviceName, namespace, null); - } else { - datasource = new DataSourceInfo(name, provider, serviceName, namespace, new URL(dataSourceURL)); - } - // set the authentication config - datasource.setAuthenticationConfig(authConfig); - addDataSource(datasource); + } catch (Exception e) { + LOGGER.error(DATASOURCE_DB_LOAD_FAILED, name, e.getMessage()); } - } catch (IOException e) { - LOGGER.error(e.getMessage()); + String provider = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_PROVIDER); + String serviceName = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_SERVICE_NAME); + String namespace = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_SERVICE_NAMESPACE); + String dataSourceURL = dataSourceObject.getString(KruizeConstants.DataSourceConstants.DATASOURCE_URL); + AuthenticationConfig authConfig = getAuthenticationDetails(dataSourceObject, name); + try { + JSONObject authenticationObj = dataSourceObject.optJSONObject(KruizeConstants.AuthenticationConstants.AUTHENTICATION); + // create the corresponding authentication object + authConfig = AuthenticationConfig.createAuthenticationConfigObject(authenticationObj); + } catch (Exception e) { + LOGGER.warn(DATASOURCE_DB_AUTH_LOAD_FAILED, name, e.getMessage()); + authConfig = AuthenticationConfig.noAuth(); + } + + DataSourceInfo datasource; + // Validate input + if (!validateInput(name, provider, serviceName, dataSourceURL, namespace)) { + continue; + } + if (dataSourceURL.isEmpty()) { + datasource = new DataSourceInfo(name, provider, serviceName, namespace, null); + } else { + datasource = new DataSourceInfo(name, provider, serviceName, namespace, new URL(dataSourceURL)); + } + // set the authentication config + datasource.setAuthenticationConfig(authConfig); + addDataSource(datasource); } + } private AuthenticationConfig getAuthenticationDetails(JSONObject dataSourceObject, String name) { @@ -192,11 +204,12 @@ private AuthenticationConfig getAuthenticationDetails(JSONObject dataSourceObjec /** * validates the input parameters before creating dataSourceInfo objects - * @param name String containing name of the datasource - * @param provider String containing provider of the datasource + * + * @param name String containing name of the datasource + * @param provider String containing provider of the datasource * @param servicename String containing service name for the datasource - * @param url String containing URL of the data source - * @param namespace String containing namespace for the datasource service + * @param url String containing URL of the data source + * @param namespace String containing namespace for the datasource service * @return boolean returns true if validation is successful otherwise return false */ public boolean validateInput(String name, String provider, String servicename, String url, String namespace) { @@ -222,42 +235,38 @@ public boolean validateInput(String name, String provider, String servicename, S /** * deletes the datasource from the Hashmap + * * @param name String containing the name of the datasource to be deleted - * TODO: add db related operations + * TODO: add db related operations */ - public void deleteDataSource(String name) { - try { - if (name == null) { - throw new DataSourceMissingRequiredFiled(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.MISSING_DATASOURCE_NAME); - } - if (dataSourceCollection.containsKey(name)) { - dataSourceCollection.remove(name); - } else { - throw new DataSourceDoesNotExist(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_NOT_EXIST); - } - } catch (DataSourceMissingRequiredFiled e) { - LOGGER.error(e.getMessage()); - } catch (DataSourceDoesNotExist e) { - LOGGER.error(e.getMessage()); + public void deleteDataSource(String name) throws DataSourceMissingRequiredFiled, DataSourceDoesNotExist { + + if (name == null) { + throw new DataSourceMissingRequiredFiled(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.MISSING_DATASOURCE_NAME); + } + if (dataSourceCollection.containsKey(name)) { + dataSourceCollection.remove(name); + } else { + throw new DataSourceDoesNotExist(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_NOT_EXIST); } + } /** * updates the existing datasource in the Hashmap - * @param name String containing the name of the datasource to be updated + * + * @param name String containing the name of the datasource to be updated * @param newDataSource DataSourceInfo object with updated values - * TODO: add db related operations + * TODO: add db related operations */ - public void updateDataSource(String name, DataSourceInfo newDataSource) { - try { - if (dataSourceCollection.containsKey(name)) { - dataSourceCollection.remove(name); - addDataSource(newDataSource); - } else { - throw new DataSourceDoesNotExist(name + ": " + KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_NOT_EXIST); - } - } catch (DataSourceDoesNotExist e) { - LOGGER.error(e.getMessage()); + public void updateDataSource(String name, DataSourceInfo newDataSource) throws UnsupportedDataSourceProvider, DataSourceNotServiceable, DataSourceAlreadyExist, IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException, DataSourceDoesNotExist { + + if (dataSourceCollection.containsKey(name)) { + dataSourceCollection.remove(name); + addDataSource(newDataSource); + } else { + throw new DataSourceDoesNotExist(name + ": " + KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_NOT_EXIST); } + } } diff --git a/src/main/java/com/autotune/common/datasource/DataSourceManager.java b/src/main/java/com/autotune/common/datasource/DataSourceManager.java index cf28c73b4..a8401970c 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceManager.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceManager.java @@ -17,25 +17,34 @@ import com.autotune.analyzer.utils.AnalyzerErrorConstants; import com.autotune.common.data.ValidationOutputData; +import com.autotune.common.data.dataSourceMetadata.DataSource; +import com.autotune.common.data.dataSourceMetadata.DataSourceCluster; +import com.autotune.common.data.dataSourceMetadata.DataSourceMetadataInfo; import com.autotune.common.exceptions.datasource.DataSourceDoesNotExist; -import com.autotune.common.data.dataSourceMetadata.*; import com.autotune.database.dao.ExperimentDAOImpl; import com.autotune.database.service.ExperimentDBService; import com.autotune.utils.KruizeConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; import java.util.HashMap; import java.util.Map; +import static com.autotune.utils.KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.*; +import static com.autotune.utils.KruizeConstants.DataSourceConstants.DataSourceMetadataSuccessMsgs.METADATA_ADDED; + /** * DataSourceManager is an interface to manage (create and update) metadata * of data sources - * - * + *

+ *

* Currently Supported Implementations: - * - importMetadataFromDataSource - * - getMetadataFromDataSource + * - importMetadataFromDataSource + * - getMetadataFromDataSource * TODO - DB integration for update and delete functionalities */ public class DataSourceManager { @@ -47,29 +56,24 @@ public DataSourceManager() { /** * Imports Metadata for a specific data source using associated DataSourceInfo. + * * @param dataSourceInfo - * @param uniqueKey this is used as labels in query example container="xyz" namespace="abc" - * @param startTime Get metadata from starttime to endtime - * @param endTime Get metadata from starttime to endtime - * @param steps the interval between data points in a range query + * @param uniqueKey this is used as labels in query example container="xyz" namespace="abc" + * @param startTime Get metadata from starttime to endtime + * @param endTime Get metadata from starttime to endtime + * @param steps the interval between data points in a range query * @return */ - public DataSourceMetadataInfo importMetadataFromDataSource(DataSourceInfo dataSourceInfo,String uniqueKey,long startTime,long endTime,int steps) throws Exception { - try { - if (null == dataSourceInfo) { - throw new DataSourceDoesNotExist(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.MISSING_DATASOURCE_INFO); - } - DataSourceMetadataInfo dataSourceMetadataInfo = dataSourceMetadataOperator.createDataSourceMetadata(dataSourceInfo,uniqueKey, startTime, endTime, steps); - if (null == dataSourceMetadataInfo) { - LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.DATASOURCE_METADATA_INFO_NOT_AVAILABLE, "for datasource {}" + dataSourceInfo.getName()); - return null; - } - return dataSourceMetadataInfo; - } catch (Exception e) { - LOGGER.error(e.getMessage()); - throw e; + public DataSourceMetadataInfo importMetadataFromDataSource(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws DataSourceDoesNotExist, IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + if (null == dataSourceInfo) { + throw new DataSourceDoesNotExist(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.MISSING_DATASOURCE_INFO); } - + DataSourceMetadataInfo dataSourceMetadataInfo = dataSourceMetadataOperator.createDataSourceMetadata(dataSourceInfo, uniqueKey, startTime, endTime, steps); + if (null == dataSourceMetadataInfo) { + LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.DATASOURCE_METADATA_INFO_NOT_AVAILABLE, "for datasource {}" + dataSourceInfo.getName()); + return null; + } + return dataSourceMetadataInfo; } /** @@ -93,7 +97,7 @@ public DataSourceMetadataInfo getMetadataFromDataSource(DataSourceInfo dataSourc return dataSourceMetadataInfo; } catch (DataSourceDoesNotExist e) { LOGGER.error(e.getMessage()); - }catch (Exception e) { + } catch (Exception e) { LOGGER.error("Loading saved datasource metadata failed: {} ", e.getMessage()); } return null; @@ -101,9 +105,10 @@ public DataSourceMetadataInfo getMetadataFromDataSource(DataSourceInfo dataSourc /** * Updates metadata of the specified data source and metadata object - * @param dataSource The information about the data source to be updated. + * + * @param dataSource The information about the data source to be updated. * @param dataSourceMetadataInfo The existing DataSourceMetadataInfo object containing the current - * metadata information of the data source. + * metadata information of the data source. */ public void updateMetadataFromDataSource(DataSourceInfo dataSource, DataSourceMetadataInfo dataSourceMetadataInfo) { try { @@ -113,7 +118,7 @@ public void updateMetadataFromDataSource(DataSourceInfo dataSource, DataSourceMe if (null == dataSourceMetadataInfo) { throw new DataSourceDoesNotExist(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.DATASOURCE_METADATA_INFO_NOT_AVAILABLE); } - dataSourceMetadataOperator.updateDataSourceMetadata(dataSource,"",0,0,0); + dataSourceMetadataOperator.updateDataSourceMetadata(dataSource, "", 0, 0, 0); } catch (Exception e) { LOGGER.error(e.getMessage()); } @@ -121,6 +126,7 @@ public void updateMetadataFromDataSource(DataSourceInfo dataSource, DataSourceMe /** * Deletes metadata of the specified data source + * * @param dataSource The metadata associated with the specified data source to be deleted. */ public void deleteMetadataFromDataSource(DataSourceInfo dataSource) { @@ -137,8 +143,9 @@ public void deleteMetadataFromDataSource(DataSourceInfo dataSource) { /** * Adds Metadata object to DB + * * @param dataSourceMetadataInfo DataSourceMetadataInfo object - * Note - It's assumed that metadata will be added to database after validating dataSourceMetadataInfo object + * Note - It's assumed that metadata will be added to database after validating dataSourceMetadataInfo object */ public void addMetadataToDB(DataSourceMetadataInfo dataSourceMetadataInfo) { ValidationOutputData addedToDB = null; @@ -146,9 +153,9 @@ public void addMetadataToDB(DataSourceMetadataInfo dataSourceMetadataInfo) { // add the data source to DB addedToDB = new ExperimentDBService().addMetadataToDB(dataSourceMetadataInfo); if (addedToDB.isSuccess()) { - LOGGER.debug("Metadata added to the DB successfully."); + LOGGER.debug(METADATA_ADDED); } else { - LOGGER.error("Failed to add metadata to DB: {}", addedToDB.getMessage()); + LOGGER.error(LOAD_DATASOURCE_METADATA_TO_DB_ERROR, addedToDB.getMessage()); } } catch (Exception e) { LOGGER.error("Exception occurred while adding metadata : {} ", e.getMessage()); @@ -159,19 +166,20 @@ public void addMetadataToDB(DataSourceMetadataInfo dataSourceMetadataInfo) { private boolean checkIfDataSourceMetadataExists(String dataSourceName) { boolean isPresent = false; try { - DataSourceMetadataInfo dataSourceMetadataInfo = new ExperimentDBService().loadMetadataFromDBByName(dataSourceName,"false"); + DataSourceMetadataInfo dataSourceMetadataInfo = new ExperimentDBService().loadMetadataFromDBByName(dataSourceName, "false"); if (null != dataSourceMetadataInfo) { - LOGGER.error("Metadata already exists for datasource: {}!", dataSourceName); + LOGGER.error(METADATA_EXIST, dataSourceName); isPresent = true; } } catch (Exception e) { - LOGGER.error("Failed to load metadata for the datasource: {}: {} ", dataSourceName, e.getMessage()); + LOGGER.error(METADATA_LOAD_FROM_DB, dataSourceName, e.getMessage()); } return isPresent; } /** * Fetches and deletes DataSourceMetadata of the specified datasource from Database + * * @param dataSourceInfo DataSourceInfo object */ public void deleteMetadataFromDBByDataSource(DataSourceInfo dataSourceInfo) { @@ -195,6 +203,7 @@ public void deleteMetadataFromDBByDataSource(DataSourceInfo dataSourceInfo) { /** * Deletes DataSourceMetadata entry from Database + * * @param dataSourceName datasource name */ public void deleteMetadataFromDB(String dataSourceName) { @@ -215,12 +224,13 @@ public void deleteMetadataFromDB(String dataSourceName) { /** * Fetches Datasource details from Database by name + * * @param dataSourceName Name of the datasource to be fetched * @return DataSourceInfo object of the specified datasource name */ public DataSourceInfo fetchDataSourceFromDBByName(String dataSourceName) { try { - if(null == dataSourceName || dataSourceName.isEmpty()) { + if (null == dataSourceName || dataSourceName.isEmpty()) { throw new Exception(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.MISSING_DATASOURCE_NAME); } DataSourceInfo datasource = new ExperimentDBService().loadDataSourceFromDBByName(dataSourceName); @@ -233,13 +243,14 @@ public DataSourceInfo fetchDataSourceFromDBByName(String dataSourceName) { /** * Fetches Datasource metadata details from Database by name - * @param dataSourceName Name of the datasource to be fetched - * @param verbose Flag indicating granularity of metadata to be fetched + * + * @param dataSourceName Name of the datasource to be fetched + * @param verbose Flag indicating granularity of metadata to be fetched * @return DataSourceMetadataInfo object of the specified datasource name */ public DataSourceMetadataInfo fetchDataSourceMetadataFromDBByName(String dataSourceName, String verbose) { try { - if(null == dataSourceName || dataSourceName.isEmpty()) { + if (null == dataSourceName || dataSourceName.isEmpty()) { throw new Exception(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.MISSING_DATASOURCE_NAME); } DataSourceMetadataInfo metadataInfo = new ExperimentDBService().loadMetadataFromDBByName(dataSourceName, verbose); @@ -255,13 +266,13 @@ public DataSourceMetadataInfo fetchDataSourceMetadataFromDBByName(String dataSou * This method processes the provided metadata includes only the datasource * names and their associated cluster names, pruning all other details. * - * @param dataSourceName Datasource name - * @param dataSourceMetadataInfo DataSourceMetadataInfo object containing granular metadata + * @param dataSourceName Datasource name + * @param dataSourceMetadataInfo DataSourceMetadataInfo object containing granular metadata * @return A new DataSourceMetadataInfo object containing only the cluster details. - * + *

* Note - It's assumed that Cluster view will be requested after validating dataSourceMetadataInfo object */ - public DataSourceMetadataInfo DataSourceMetadataClusterView(String dataSourceName, DataSourceMetadataInfo dataSourceMetadataInfo){ + public DataSourceMetadataInfo DataSourceMetadataClusterView(String dataSourceName, DataSourceMetadataInfo dataSourceMetadataInfo) { try { HashMap filteredDataSourceHashMap = new HashMap<>(); diff --git a/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java b/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java index 99c663402..f2f3a4b63 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java @@ -22,6 +22,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; import java.util.HashMap; /** @@ -55,9 +59,9 @@ public static DataSourceMetadataOperator getInstance() { * @param startTime Get metadata from starttime to endtime * @param endTime Get metadata from starttime to endtime * @param steps the interval between data points in a range query - * TODO - support multiple data sources + * TODO - support multiple data sources */ - public DataSourceMetadataInfo createDataSourceMetadata(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws Exception { + public DataSourceMetadataInfo createDataSourceMetadata(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { return processQueriesAndPopulateDataSourceMetadataInfo(dataSourceInfo, uniqueKey, startTime, endTime, steps); } @@ -97,8 +101,8 @@ public DataSourceMetadataInfo getDataSourceMetadataInfo(DataSourceInfo dataSourc * @param dataSourceInfo The DataSourceInfo object containing information about the * data source to be updated. *

- * TODO - Currently Create and Update functions have identical functionalities, based on UI workflow and requirements - * need to further enhance updateDataSourceMetadata() to support namespace, workload level granular updates + * TODO - Currently Create and Update functions have identical functionalities, based on UI workflow and requirements + * need to further enhance updateDataSourceMetadata() to support namespace, workload level granular updates */ public DataSourceMetadataInfo updateDataSourceMetadata(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws Exception { return processQueriesAndPopulateDataSourceMetadataInfo(dataSourceInfo, uniqueKey, startTime, endTime, steps); @@ -141,7 +145,7 @@ public void deleteDataSourceMetadata(DataSourceInfo dataSourceInfo) { * @return DataSourceMetadataInfo object with populated metadata fields * todo rename processQueriesAndFetchClusterMetadataInfo */ - public DataSourceMetadataInfo processQueriesAndPopulateDataSourceMetadataInfo(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws Exception { + public DataSourceMetadataInfo processQueriesAndPopulateDataSourceMetadataInfo(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { DataSourceMetadataHelper dataSourceDetailsHelper = new DataSourceMetadataHelper(); /** * Get DataSourceOperatorImpl instance on runtime based on dataSource provider @@ -152,37 +156,34 @@ public DataSourceMetadataInfo processQueriesAndPopulateDataSourceMetadataInfo(Da LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.DATASOURCE_OPERATOR_RETRIEVAL_FAILURE, dataSourceInfo.getProvider()); return null; } - /** * For the "prometheus" data source, fetches and processes data related to namespaces, workloads, and containers, * creating a comprehensive DataSourceMetadataInfo object that is then added to a list. * TODO - Process cluster metadata using a custom query */ - try { - String dataSourceName = dataSourceInfo.getName(); - String namespaceQuery = PromQLDataSourceQueries.NAMESPACE_QUERY; - String workloadQuery = PromQLDataSourceQueries.WORKLOAD_QUERY; - String containerQuery = PromQLDataSourceQueries.CONTAINER_QUERY; - if (null != uniqueKey && !uniqueKey.isEmpty()) { - LOGGER.debug("uniquekey: {}", uniqueKey); - namespaceQuery = namespaceQuery.replace("ADDITIONAL_LABEL", "," + uniqueKey); - workloadQuery = workloadQuery.replace("ADDITIONAL_LABEL", "," + uniqueKey); - containerQuery = containerQuery.replace("ADDITIONAL_LABEL", "," + uniqueKey); - } else { - namespaceQuery = namespaceQuery.replace("ADDITIONAL_LABEL", ""); - workloadQuery = workloadQuery.replace("ADDITIONAL_LABEL", ""); - containerQuery = containerQuery.replace("ADDITIONAL_LABEL", ""); - } - LOGGER.info("namespaceQuery: {}", namespaceQuery); - LOGGER.info("workloadQuery: {}", workloadQuery); - LOGGER.info("containerQuery: {}", containerQuery); - - JsonArray namespacesDataResultArray = op.getResultArrayForQuery(dataSourceInfo, namespaceQuery); - if (!op.validateResultArray(namespacesDataResultArray)) { - dataSourceMetadataInfo = dataSourceDetailsHelper.createDataSourceMetadataInfoObject(dataSourceName, null); - throw new Exception(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.NAMESPACE_QUERY_VALIDATION_FAILED); - } + String dataSourceName = dataSourceInfo.getName(); + String namespaceQuery = PromQLDataSourceQueries.NAMESPACE_QUERY; + String workloadQuery = PromQLDataSourceQueries.WORKLOAD_QUERY; + String containerQuery = PromQLDataSourceQueries.CONTAINER_QUERY; + if (null != uniqueKey && !uniqueKey.isEmpty()) { + LOGGER.debug("uniquekey: {}", uniqueKey); + namespaceQuery = namespaceQuery.replace("ADDITIONAL_LABEL", "," + uniqueKey); + workloadQuery = workloadQuery.replace("ADDITIONAL_LABEL", "," + uniqueKey); + containerQuery = containerQuery.replace("ADDITIONAL_LABEL", "," + uniqueKey); + } else { + namespaceQuery = namespaceQuery.replace("ADDITIONAL_LABEL", ""); + workloadQuery = workloadQuery.replace("ADDITIONAL_LABEL", ""); + containerQuery = containerQuery.replace("ADDITIONAL_LABEL", ""); + } + LOGGER.info("namespaceQuery: {}", namespaceQuery); + LOGGER.info("workloadQuery: {}", workloadQuery); + LOGGER.info("containerQuery: {}", containerQuery); + + JsonArray namespacesDataResultArray = op.getResultArrayForQuery(dataSourceInfo, namespaceQuery); + if (!op.validateResultArray(namespacesDataResultArray)) { + dataSourceMetadataInfo = dataSourceDetailsHelper.createDataSourceMetadataInfoObject(dataSourceName, null); + } else { /** * Key: Name of namespace * Value: DataSourceNamespace object corresponding to a namespace @@ -228,11 +229,10 @@ public DataSourceMetadataInfo processQueriesAndPopulateDataSourceMetadataInfo(Da } dataSourceDetailsHelper.updateContainerDataSourceMetadataInfoObject(dataSourceName, dataSourceMetadataInfo, datasourceWorkloads, datasourceContainers); - return getDataSourceMetadataInfo(dataSourceInfo); - } catch (Exception e) { - LOGGER.error(e.getMessage()); - throw e; } + + return null; + } } diff --git a/src/main/java/com/autotune/common/datasource/DataSourceOperator.java b/src/main/java/com/autotune/common/datasource/DataSourceOperator.java index f4fa77f86..ec7dcafff 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceOperator.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceOperator.java @@ -19,26 +19,33 @@ import com.google.gson.JsonArray; import org.json.JSONObject; +import java.io.IOException; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; + /** * DataSourceOperator is an abstraction which has a generic and implementation, * and it can also be implemented by each data source provider type. - * + *

* Currently Supported Implementations: - * - Prometheus - * - * The Implementation should have helper functions to perform operations related - * to datasource + * - Prometheus + *

+ * The Implementation should have helper functions to perform operations related + * to datasource */ public interface DataSourceOperator { /** * Returns the default service port for provider + * * @return String containing the port number */ String getDefaultServicePortForProvider(); /** * Returns the instance of specific operator class based on provider type + * * @param provider String containing the name of provider * @return instance of specific operator */ @@ -51,7 +58,7 @@ public interface DataSourceOperator { * @param dataSource DatasourceInfo object containing the datasource details * @return DatasourceReachabilityStatus */ - CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dataSource); + CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dataSource) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException; /** * executes specified query on datasource and returns the result value @@ -60,7 +67,7 @@ public interface DataSourceOperator { * @param query String containing the query to be executed * @return Object containing the result value for the specified query */ - Object getValueForQuery(DataSourceInfo dataSource, String query); + Object getValueForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException; /** * executes specified query on datasource and returns the JSON Object @@ -69,7 +76,7 @@ public interface DataSourceOperator { * @param query String containing the query to be executed * @return JSONObject for the specified query */ - JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query); + JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException; /** * executes specified query on datasource and returns the result array @@ -78,7 +85,7 @@ public interface DataSourceOperator { * @param query String containing the query to be executed * @return JsonArray containing the result array for the specified query */ - public JsonArray getResultArrayForQuery(DataSourceInfo dataSource, String query); + public JsonArray getResultArrayForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException; /** * Validates a JSON array to ensure it is not null, not a JSON null, and has at least one element. @@ -90,6 +97,7 @@ public interface DataSourceOperator { /** * returns query endpoint for datasource + * * @return String containing query endpoint */ String getQueryEndpoint(); diff --git a/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java b/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java index 5404a9d4b..ab3f5d535 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceOperatorImpl.java @@ -1,12 +1,8 @@ package com.autotune.common.datasource; -import com.autotune.analyzer.exceptions.FetchMetricsError; import com.autotune.analyzer.exceptions.MonitoringAgentNotFoundException; import com.autotune.analyzer.exceptions.TooManyRecursiveCallsException; import com.autotune.analyzer.utils.AnalyzerConstants; -import com.autotune.common.auth.AuthenticationConfig; -import com.autotune.common.auth.AuthenticationStrategy; -import com.autotune.common.auth.AuthenticationStrategyFactory; import com.autotune.common.datasource.prometheus.PrometheusDataOperatorImpl; import com.autotune.common.exceptions.datasource.ServiceNotFound; import com.autotune.common.target.kubernetes.service.KubernetesServices; @@ -20,6 +16,7 @@ import org.json.JSONArray; import org.json.JSONObject; import org.slf4j.LoggerFactory; + import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; @@ -35,11 +32,13 @@ public class DataSourceOperatorImpl implements DataSourceOperator { private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(DataSourceOperatorImpl.class); private static DataSourceOperatorImpl dataSourceOperator = null; + protected DataSourceOperatorImpl() { } /** * Returns the instance of DataSourceOperatorImpl class + * * @return DataSourceOperatorImpl instance */ public static DataSourceOperatorImpl getInstance() { @@ -49,8 +48,119 @@ public static DataSourceOperatorImpl getInstance() { return dataSourceOperator; } + /** + * TODO: monitoring agent will be replaced by default datasource later + * returns DataSourceInfo objects for default datasource which is currently monitoring agent + * + * @return DataSourceInfo objects + */ + public static DataSourceInfo getMonitoringAgent(String dataSource) throws MonitoringAgentNotFoundException, MalformedURLException { + String monitoringAgentEndpoint; + DataSourceInfo monitoringAgent = null; + + if (dataSource.toLowerCase().equals(KruizeDeploymentInfo.monitoring_agent)) { + monitoringAgentEndpoint = KruizeDeploymentInfo.monitoring_agent_endpoint; + // Monitoring agent endpoint not set in the configmap + if (monitoringAgentEndpoint == null || monitoringAgentEndpoint.isEmpty()) { + monitoringAgentEndpoint = getServiceEndpoint(KruizeDeploymentInfo.monitoring_service); + } + if (dataSource.equals(AnalyzerConstants.PROMETHEUS_DATA_SOURCE)) { + monitoringAgent = new DataSourceInfo(KruizeDeploymentInfo.monitoring_agent, AnalyzerConstants.PROMETHEUS_DATA_SOURCE, null, null, new URL(monitoringAgentEndpoint)); + } + } + + if (monitoringAgent == null) { + LOGGER.error("Datasource " + dataSource + " not supported"); + } + + return monitoringAgent; + } + + /** + * TODO: To find a suitable place for this function later + * Gets the service endpoint for the datasource service through the cluster IP + * of the service. + * + * @return Endpoint of the service. + * @throws ServiceNotFound + */ + private static String getServiceEndpoint(String serviceName) { + //No endpoint was provided in the configmap, find the endpoint from the service. + KubernetesServices kubernetesServices = new KubernetesServicesImpl(); + List serviceList = kubernetesServices.getServicelist(null); + kubernetesServices.shutdownClient(); + String serviceEndpoint = null; + + try { + if (serviceName == null) { + throw new ServiceNotFound(); + } + + for (Service service : serviceList) { + String name = service.getMetadata().getName(); + if (name.toLowerCase().equals(serviceName)) { + String clusterIP = service.getSpec().getClusterIP(); + int port = service.getSpec().getPorts().get(0).getPort(); + LOGGER.debug(KruizeDeploymentInfo.cluster_type); + if (KruizeDeploymentInfo.k8s_type.equalsIgnoreCase(KruizeConstants.MINIKUBE)) { + serviceEndpoint = AnalyzerConstants.HTTP_PROTOCOL + "://" + clusterIP + ":" + port; + } + if (KruizeDeploymentInfo.k8s_type.equalsIgnoreCase(KruizeConstants.OPENSHIFT)) { + serviceEndpoint = AnalyzerConstants.HTTPS_PROTOCOL + "://" + clusterIP + ":" + port; + } + } + } + } catch (ServiceNotFound e) { + LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.SERVICE_NOT_FOUND); + } + + if (serviceEndpoint == null) { + LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.ENDPOINT_NOT_FOUND); + } + + return serviceEndpoint; + } + + /** + * TODO: To find a suitable place for this function later + * + * @param jsonObj The JSON that needs to be parsed + * @param key The key to search in the JSON + * @param values ArrayList to hold the key values in the JSON + * @param level Level of recursion + */ + static void parseJsonForKey(JSONObject jsonObj, String key, ArrayList values, int level) throws TooManyRecursiveCallsException { + level += 1; + + if (level > 30) + throw new TooManyRecursiveCallsException(); + + for (String keyStr : jsonObj.keySet()) { + Object keyvalue = jsonObj.get(keyStr); + + if (keyStr.equals(key)) + values.add(keyvalue.toString()); + + //for nested objects + if (keyvalue instanceof JSONObject) + parseJsonForKey((JSONObject) keyvalue, key, values, level); + + //for json array, iterate and recursively get values + if (keyvalue instanceof JSONArray) { + JSONArray jsonArray = (JSONArray) keyvalue; + for (int index = 0; index < jsonArray.length(); index++) { + Object jsonObject = jsonArray.get(index); + if (jsonObject instanceof JSONObject) { + parseJsonForKey((JSONObject) jsonObject, key, values, level); + } + } + } + } + } + /** * Returns the instance of specific operator class based on provider type + * * @param provider String containg the name of provider * @return instance of specific operator */ @@ -64,6 +174,7 @@ public DataSourceOperatorImpl getOperator(String provider) { /** * Returns the default service port for prometheus + * * @return String containing the port number */ @Override @@ -79,7 +190,7 @@ public String getDefaultServicePortForProvider() { * @return DatasourceReachabilityStatus */ @Override - public CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dataSource) { + public CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dataSource) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { return null; } @@ -91,18 +202,20 @@ public CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dat * @return Object containing the result value for the specified query */ @Override - public Object getValueForQuery(DataSourceInfo dataSource, String query) { + public Object getValueForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { return null; } /** * returns query endpoint for datasource + * * @return String containing query endpoint */ @Override public String getQueryEndpoint() { return null; } + /** * executes specified query on datasource and returns the JSON Object * @@ -111,7 +224,7 @@ public String getQueryEndpoint() { * @return JSONObject for the specified query */ @Override - public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) { + public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { return null; } @@ -123,7 +236,7 @@ public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) * @return JsonArray containing the result array for the specified query */ @Override - public JsonArray getResultArrayForQuery(DataSourceInfo dataSource, String query) { + public JsonArray getResultArrayForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { return null; } @@ -134,15 +247,18 @@ public JsonArray getResultArrayForQuery(DataSourceInfo dataSource, String query) * @return True if the JSON array is valid (not null, not a JSON null, and has at least one element), otherwise false. */ @Override - public boolean validateResultArray(JsonArray resultArray) { return false;} + public boolean validateResultArray(JsonArray resultArray) { + return false; + } /** * TODO: To find a suitable place for this function later * returns authentication token for datasource + * * @return String containing token */ public String getToken() throws IOException { - String fileName = KruizeConstants.AUTH_MOUNT_PATH+"token"; + String fileName = KruizeConstants.AUTH_MOUNT_PATH + "token"; String authToken = new String(Files.readAllBytes(Paths.get(fileName))); return authToken; } @@ -150,13 +266,14 @@ public String getToken() throws IOException { /** * TODO: To find a suitable place for this function later * Run the getAppsForLayer and return the list of applications matching the layer. + * * @param dataSource - * @param query getAppsForLayer query for the layer - * @param key The key to search for in the response + * @param query getAppsForLayer query for the layer + * @param key The key to search for in the response * @return ArrayList of all applications from the query * @throws MalformedURLException */ - public ArrayList getAppsForLayer(DataSourceInfo dataSource, String query, String key) throws MalformedURLException { + public ArrayList getAppsForLayer(DataSourceInfo dataSource, String query, String key) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { String dataSourceURL = dataSource.getUrl().toString(); String provider = dataSource.getProvider(); DataSourceOperator op = this.getOperator(provider); @@ -165,128 +282,19 @@ public ArrayList getAppsForLayer(DataSourceInfo dataSource, String query ArrayList valuesList = new ArrayList<>(); String queryURL = dataSourceURL + queryEndpoint + query; LOGGER.debug("Query URL is: {}", queryURL); - try { - // Create the client - GenericRestApiClient genericRestApiClient = new GenericRestApiClient(dataSource); - genericRestApiClient.setBaseURL(dataSourceURL + queryEndpoint); - JSONObject responseJson = genericRestApiClient.fetchMetricsJson("GET", query); - int level = 0; - try { - parseJsonForKey(responseJson, key, valuesList, level); - LOGGER.debug("Applications for the query: {}", valuesList.toString()); - } catch (TooManyRecursiveCallsException e) { - e.printStackTrace(); - } - } catch (IOException | NoSuchAlgorithmException | KeyStoreException | KeyManagementException | FetchMetricsError e) { - LOGGER.error("Unable to proceed due to invalid connection to URL: "+ queryURL); - } - return valuesList; - } - - /** - * TODO: monitoring agent will be replaced by default datasource later - * returns DataSourceInfo objects for default datasource which is currently monitoring agent - * @return DataSourceInfo objects - */ - public static DataSourceInfo getMonitoringAgent(String dataSource) throws MonitoringAgentNotFoundException, MalformedURLException { - String monitoringAgentEndpoint; - DataSourceInfo monitoringAgent = null; - - if (dataSource.toLowerCase().equals(KruizeDeploymentInfo.monitoring_agent)) { - monitoringAgentEndpoint = KruizeDeploymentInfo.monitoring_agent_endpoint; - // Monitoring agent endpoint not set in the configmap - if (monitoringAgentEndpoint == null || monitoringAgentEndpoint.isEmpty()) { - monitoringAgentEndpoint = getServiceEndpoint(KruizeDeploymentInfo.monitoring_service); - } - if (dataSource.equals(AnalyzerConstants.PROMETHEUS_DATA_SOURCE)) { - monitoringAgent = new DataSourceInfo(KruizeDeploymentInfo.monitoring_agent, AnalyzerConstants.PROMETHEUS_DATA_SOURCE, null, null, new URL(monitoringAgentEndpoint)); - } - } - - if (monitoringAgent == null) { - LOGGER.error("Datasource " + dataSource + " not supported"); - } - - return monitoringAgent; - } - - /** - * TODO: To find a suitable place for this function later - * Gets the service endpoint for the datasource service through the cluster IP - * of the service. - * @return Endpoint of the service. - * @throws ServiceNotFound - */ - private static String getServiceEndpoint(String serviceName) { - //No endpoint was provided in the configmap, find the endpoint from the service. - KubernetesServices kubernetesServices = new KubernetesServicesImpl(); - List serviceList = kubernetesServices.getServicelist(null); - kubernetesServices.shutdownClient(); - String serviceEndpoint = null; + // Create the client + GenericRestApiClient genericRestApiClient = new GenericRestApiClient(dataSource); + genericRestApiClient.setBaseURL(dataSourceURL + queryEndpoint); + JSONObject responseJson = genericRestApiClient.fetchMetricsJson("GET", query); + int level = 0; try { - if (serviceName == null) { - throw new ServiceNotFound(); - } - - for (Service service : serviceList) { - String name = service.getMetadata().getName(); - if (name.toLowerCase().equals(serviceName)) { - String clusterIP = service.getSpec().getClusterIP(); - int port = service.getSpec().getPorts().get(0).getPort(); - LOGGER.debug(KruizeDeploymentInfo.cluster_type); - if (KruizeDeploymentInfo.k8s_type.equalsIgnoreCase(KruizeConstants.MINIKUBE)) { - serviceEndpoint = AnalyzerConstants.HTTP_PROTOCOL + "://" + clusterIP + ":" + port; - } - if (KruizeDeploymentInfo.k8s_type.equalsIgnoreCase(KruizeConstants.OPENSHIFT)) { - serviceEndpoint = AnalyzerConstants.HTTPS_PROTOCOL + "://" + clusterIP + ":" + port; - } - } - } - } catch (ServiceNotFound e) { - LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.SERVICE_NOT_FOUND); + parseJsonForKey(responseJson, key, valuesList, level); + LOGGER.debug("Applications for the query: {}", valuesList.toString()); + } catch (TooManyRecursiveCallsException e) { + e.printStackTrace(); } - if (serviceEndpoint == null) { - LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.ENDPOINT_NOT_FOUND); - } - - return serviceEndpoint; - } - - /** - * TODO: To find a suitable place for this function later - * @param jsonObj The JSON that needs to be parsed - * @param key The key to search in the JSON - * @param values ArrayList to hold the key values in the JSON - * @param level Level of recursion - */ - static void parseJsonForKey(JSONObject jsonObj, String key, ArrayList values, int level) throws TooManyRecursiveCallsException { - level += 1; - - if (level > 30) - throw new TooManyRecursiveCallsException(); - - for (String keyStr : jsonObj.keySet()) { - Object keyvalue = jsonObj.get(keyStr); - - if (keyStr.equals(key)) - values.add(keyvalue.toString()); - - //for nested objects - if (keyvalue instanceof JSONObject) - parseJsonForKey((JSONObject) keyvalue, key, values, level); - - //for json array, iterate and recursively get values - if (keyvalue instanceof JSONArray) { - JSONArray jsonArray = (JSONArray) keyvalue; - for (int index = 0; index < jsonArray.length(); index++) { - Object jsonObject = jsonArray.get(index); - if (jsonObject instanceof JSONObject) { - parseJsonForKey((JSONObject) jsonObject, key, values, level); - } - } - } - } + return valuesList; } } diff --git a/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java b/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java index 56614d24a..9e86a4d50 100644 --- a/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java +++ b/src/main/java/com/autotune/common/datasource/prometheus/PrometheusDataOperatorImpl.java @@ -15,20 +15,16 @@ *******************************************************************************/ package com.autotune.common.datasource.prometheus; -import com.autotune.analyzer.exceptions.FetchMetricsError; import com.autotune.analyzer.utils.AnalyzerConstants; -import com.autotune.common.auth.AuthenticationStrategy; -import com.autotune.common.auth.AuthenticationStrategyFactory; import com.autotune.common.datasource.DataSourceInfo; import com.autotune.common.datasource.DataSourceOperatorImpl; import com.autotune.common.utils.CommonUtils; import com.autotune.operator.KruizeDeploymentInfo; -import com.autotune.utils.KruizeConstants; import com.autotune.utils.GenericRestApiClient; -import com.google.gson.*; -import org.apache.http.conn.HttpHostConnectException; -import org.json.JSONArray; -import org.json.JSONException; +import com.autotune.utils.KruizeConstants; +import com.google.gson.JsonArray; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; import org.json.JSONObject; import org.slf4j.LoggerFactory; @@ -38,19 +34,22 @@ import java.security.NoSuchAlgorithmException; /** - * PrometheusDataOperatorImpl extends DataSourceOperatorImpl class - * This class provides Prometheus specific implementation for DataSourceOperator functions + * PrometheusDataOperatorImpl extends DataSourceOperatorImpl class + * This class provides Prometheus specific implementation for DataSourceOperator functions */ public class PrometheusDataOperatorImpl extends DataSourceOperatorImpl { private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(PrometheusDataOperatorImpl.class); - private static PrometheusDataOperatorImpl prometheusDataOperator = null;; + private static PrometheusDataOperatorImpl prometheusDataOperator = null; + ; + private PrometheusDataOperatorImpl() { super(); } /** * Returns the instance of PrometheusDataOperatorImpl class + * * @return PrometheusDataOperatorImpl instance */ public static PrometheusDataOperatorImpl getInstance() { @@ -62,6 +61,7 @@ public static PrometheusDataOperatorImpl getInstance() { /** * Returns the default service port for prometheus + * * @return String containing the port number */ @Override @@ -80,7 +80,7 @@ public String getDefaultServicePortForProvider() { * @return DatasourceReachabilityStatus */ @Override - public CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dataSource) { + public CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dataSource) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { String dataSourceStatus; Object queryResult; @@ -89,13 +89,13 @@ public CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dat queryResult = this.getValueForQuery(dataSource, query); - if (queryResult != null){ + if (queryResult != null) { dataSourceStatus = queryResult.toString(); } else { dataSourceStatus = "0"; } - if (dataSourceStatus.equalsIgnoreCase("1")){ + if (dataSourceStatus.equalsIgnoreCase("1")) { reachabilityStatus = CommonUtils.DatasourceReachabilityStatus.REACHABLE; } else { reachabilityStatus = CommonUtils.DatasourceReachabilityStatus.NOT_REACHABLE; @@ -111,23 +111,17 @@ public CommonUtils.DatasourceReachabilityStatus isServiceable(DataSourceInfo dat * @return Object containing the result value for the specified query */ @Override - public Object getValueForQuery(DataSourceInfo dataSource, String query) { - try { - JSONObject jsonObject = getJsonObjectForQuery(dataSource, query); - - if (null == jsonObject) { - return null; - } else { - return "1"; //if it returns HTTP STATUS_OK 200 - } + public Object getValueForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + JSONObject jsonObject = getJsonObjectForQuery(dataSource, query); - } catch (JSONException e) { - LOGGER.error(e.getMessage()); - } catch (NullPointerException e) { - LOGGER.error(e.getMessage()); + if (null == jsonObject) { + return null; + } else { + return "1"; //if it returns HTTP STATUS_OK 200 } - return null; + + } /** @@ -138,7 +132,7 @@ public Object getValueForQuery(DataSourceInfo dataSource, String query) { * @return JSONObject for the specified query */ @Override - public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) { + public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { // Create the client GenericRestApiClient apiClient = new GenericRestApiClient(dataSource); apiClient.setBaseURL(CommonUtils.getBaseDataSourceUrl( @@ -150,10 +144,10 @@ public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) return null; } - try { - JSONObject jsonObject = apiClient.fetchMetricsJson( - KruizeConstants.HttpConstants.MethodType.GET, - query); + + JSONObject jsonObject = apiClient.fetchMetricsJson( + KruizeConstants.HttpConstants.MethodType.GET, + query); /* TODO need to separate it out this logic form here if (!jsonObject.has(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.STATUS)) return null; @@ -168,26 +162,14 @@ public JSONObject getJsonObjectForQuery(DataSourceInfo dataSource, String query) */ - return jsonObject; - - } catch (HttpHostConnectException e) { - LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.DATASOURCE_CONNECTION_FAILED); - } catch (IOException e) { - e.printStackTrace(); - } catch (NoSuchAlgorithmException e) { - e.printStackTrace(); - } catch (KeyStoreException e) { - e.printStackTrace(); - } catch (KeyManagementException e) { - e.printStackTrace(); - } catch (FetchMetricsError e) { - e.printStackTrace(); - } - return null; + return jsonObject; + + } /** * returns query endpoint for prometheus datasource + * * @return String containing query endpoint */ @Override @@ -201,45 +183,37 @@ public String getQueryEndpoint() { * @param dataSource DatasourceInfo object containing the datasource details * @param query String containing the query to be executed * @return JsonArray containing the result array for the specified query - * + *

* Example output JsonArray - * [ - * { - * "metric": { - * "__name__": "exampleMetric" - * }, - * "value": [1642612628.987, "1"] - * } + * { + * "metric": { + * "__name__": "exampleMetric" + * }, + * "value": [1642612628.987, "1"] + * } * ] */ @Override - public JsonArray getResultArrayForQuery(DataSourceInfo dataSource, String query) { - try { - JSONObject jsonObject = getJsonObjectForQuery(dataSource, query); + public JsonArray getResultArrayForQuery(DataSourceInfo dataSource, String query) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { - if (null == jsonObject) { - return null; - } + JSONObject jsonObject = getJsonObjectForQuery(dataSource, query); + if (null == jsonObject) { + return null; + } else { String jsonString = jsonObject.toString(); JsonObject parsedJsonObject = JsonParser.parseString(jsonString).getAsJsonObject(); JsonObject dataObject = parsedJsonObject.get(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.DATA).getAsJsonObject(); - if (dataObject.has(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT) && dataObject.get(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT).isJsonArray()) { JsonArray resultArray = dataObject.getAsJsonArray(KruizeConstants.DataSourceConstants.DataSourceQueryJSONKeys.RESULT); - if (null != resultArray) { return resultArray; } } - } catch (JsonParseException e) { - LOGGER.error(e.getMessage()); - throw e; - } catch (NullPointerException e) { - LOGGER.error(e.getMessage()); - throw e; } + return null; } diff --git a/src/main/java/com/autotune/experimentManager/handler/MetricCollectionHandler.java b/src/main/java/com/autotune/experimentManager/handler/MetricCollectionHandler.java index 0aa8c41d7..ad3a4e70d 100644 --- a/src/main/java/com/autotune/experimentManager/handler/MetricCollectionHandler.java +++ b/src/main/java/com/autotune/experimentManager/handler/MetricCollectionHandler.java @@ -42,6 +42,10 @@ import org.slf4j.LoggerFactory; import javax.servlet.ServletContext; +import java.io.IOException; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; import java.sql.Timestamp; import java.text.DecimalFormat; import java.util.ArrayList; @@ -121,8 +125,19 @@ public void execute(ExperimentTrial experimentTrial, TrialDetails trialDetails, if (null == ado) { // TODO: Return an error saying unsupported datasource } - String queryResult = (String) ado.getValueForQuery(experimentTrial.getDatasourceInfoHashMap() - .get(podMetric.getDatasource()), updatedPodQuery); + String queryResult = null; + try { + queryResult = (String) ado.getValueForQuery(experimentTrial.getDatasourceInfoHashMap() + .get(podMetric.getDatasource()), updatedPodQuery); + } catch (IOException e) { + throw new RuntimeException(e); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); + } catch (KeyStoreException e) { + throw new RuntimeException(e); + } catch (KeyManagementException e) { + throw new RuntimeException(e); + } if (null != queryResult && !queryResult.isEmpty() && !queryResult.isBlank()) { try { queryResult = queryResult.trim(); @@ -159,8 +174,19 @@ public void execute(ExperimentTrial experimentTrial, TrialDetails trialDetails, } if (null != updatedContainerQuery) { LOGGER.debug("Updated Query - " + updatedContainerQuery); - String queryResult = (String) ado.getValueForQuery(experimentTrial.getDatasourceInfoHashMap() - .get(containerMetric.getDatasource()), updatedContainerQuery); + String queryResult = null; + try { + queryResult = (String) ado.getValueForQuery(experimentTrial.getDatasourceInfoHashMap() + .get(containerMetric.getDatasource()), updatedContainerQuery); + } catch (IOException e) { + throw new RuntimeException(e); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); + } catch (KeyStoreException e) { + throw new RuntimeException(e); + } catch (KeyManagementException e) { + throw new RuntimeException(e); + } if (null != queryResult && !queryResult.isEmpty() && !queryResult.isBlank()) { try { queryResult = queryResult.trim(); diff --git a/src/main/java/com/autotune/utils/GenericRestApiClient.java b/src/main/java/com/autotune/utils/GenericRestApiClient.java index 99a3e8e13..42e1edf59 100644 --- a/src/main/java/com/autotune/utils/GenericRestApiClient.java +++ b/src/main/java/com/autotune/utils/GenericRestApiClient.java @@ -15,13 +15,14 @@ *******************************************************************************/ package com.autotune.utils; -import com.autotune.analyzer.exceptions.FetchMetricsError; import com.autotune.common.auth.AuthenticationStrategy; import com.autotune.common.auth.AuthenticationStrategyFactory; import com.autotune.common.datasource.DataSourceInfo; import com.autotune.utils.authModels.APIKeysAuthentication; import com.autotune.utils.authModels.BasicAuthentication; import com.autotune.utils.authModels.BearerAccessToken; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; @@ -37,9 +38,11 @@ import org.apache.http.impl.client.HttpClients; import org.apache.http.ssl.SSLContexts; import org.apache.http.util.EntityUtils; +import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; + import javax.net.ssl.SSLContext; import java.io.IOException; import java.net.URLEncoder; @@ -64,6 +67,7 @@ public class GenericRestApiClient { /** * constructor to set the authentication based on the datasourceInfo object + * * @param dataSourceInfo object containing the datasource details */ public GenericRestApiClient(DataSourceInfo dataSourceInfo) { @@ -74,12 +78,13 @@ public GenericRestApiClient(DataSourceInfo dataSourceInfo) { /** * This method appends queryString with baseURL and returns response in JSON using specified authentication. - * @param methodType Http methods like GET,POST,PATCH etc + * + * @param methodType Http methods like GET,POST,PATCH etc * @param queryString * @return Json object which contains API response. * @throws IOException */ - public JSONObject fetchMetricsJson(String methodType, String queryString) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException, FetchMetricsError { + public JSONObject fetchMetricsJson(String methodType, String queryString) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { String jsonResponse; try (CloseableHttpClient httpclient = setupHttpClient()) { @@ -95,8 +100,32 @@ public JSONObject fetchMetricsJson(String methodType, String queryString) throws LOGGER.debug("Executing Prometheus metrics request: {}", httpRequestBase.getRequestLine()); - // Execute the request - jsonResponse = httpclient.execute(httpRequestBase, new StringResponseHandler()); + // Execute the request and get the HttpResponse + HttpResponse response = httpclient.execute(httpRequestBase); + + // Get and print the response code + int responseCode = response.getStatusLine().getStatusCode(); + LOGGER.debug("Response code: {}", responseCode); + + // Get the response body if needed + jsonResponse = new StringResponseHandler().handleResponse(response); + + // Parse the JSON response + ObjectMapper objectMapper = new ObjectMapper(); + JsonNode rootNode = objectMapper.readTree(jsonResponse); + JsonNode resultNode = rootNode.path("data").path("result"); + JsonNode warningsNode = rootNode.path("warnings"); + + // Check if the result is empty and if there are specific warnings + if (resultNode.isArray() && resultNode.size() == 0) { + for (JsonNode warning : warningsNode) { + String warningMessage = warning.asText(); + if (warningMessage.contains("error reading from server") || warningMessage.contains("Please reduce your request rate")) { + LOGGER.warn("Warning detected: {}", warningMessage); + throw new IOException(warningMessage); + } + } + } } return new JSONObject(jsonResponse); } @@ -104,6 +133,7 @@ public JSONObject fetchMetricsJson(String methodType, String queryString) throws /** * Common method to setup SSL context for trust-all certificates. + * * @return CloseableHttpClient */ private CloseableHttpClient setupHttpClient() throws NoSuchAlgorithmException, KeyStoreException, KeyManagementException { @@ -115,6 +145,7 @@ private CloseableHttpClient setupHttpClient() throws NoSuchAlgorithmException, K /** * Common method to apply authentication to the HTTP request. + * * @param httpRequestBase the HTTP request (GET, POST, etc.) */ private void applyAuthentication(HttpRequestBase httpRequestBase) { @@ -126,39 +157,51 @@ private void applyAuthentication(HttpRequestBase httpRequestBase) { /** * Method to call the Experiment API (e.g., to create an experiment) using POST request. + * * @param payload JSON payload containing the experiment details * @return API response code * @throws IOException */ - public int callKruizeAPI(String payload) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException, FetchMetricsError { - + public HttpResponseWrapper callKruizeAPI(String payload) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + HttpResponseWrapper httpResponseWrapper = null; // Create an HTTP client try (CloseableHttpClient httpclient = setupHttpClient()) { // Prepare the HTTP POST request HttpPost httpPost = new HttpPost(baseURL); httpPost.setHeader("Content-Type", "application/json"); httpPost.setHeader("Accept", "application/json"); - // If payload is present, set it in the request body if (payload != null) { StringEntity entity = new StringEntity(payload, StandardCharsets.UTF_8); httpPost.setEntity(entity); } - // Execute the request and return the response code try (CloseableHttpResponse response = httpclient.execute(httpPost)) { // Get the status code from the response int responseCode = response.getStatusLine().getStatusCode(); LOGGER.debug("Response code: {}", responseCode); - return responseCode; - } catch (Exception e) { - LOGGER.error("Error occurred while calling Kruize API: {}", e.getMessage()); - throw new FetchMetricsError(e.getMessage()); + if (response.getEntity() != null) { + // Convert response entity to string + String responseBody = EntityUtils.toString(response.getEntity(), "UTF-8"); + try { + // Attempt to parse as JSON + JSONObject json = new JSONObject(responseBody); + httpResponseWrapper = new HttpResponseWrapper(responseCode, json); + } catch (JSONException e) { + // If JSON parsing fails, return as plain string + httpResponseWrapper = new HttpResponseWrapper(responseCode, responseBody); + } + } } } + return httpResponseWrapper; } + public void setBaseURL(String baseURL) { + this.baseURL = baseURL; + } + private static class StringResponseHandler implements ResponseHandler { @Override public String handleResponse(HttpResponse response) throws IOException { @@ -174,7 +217,29 @@ public String handleResponse(HttpResponse response) throws IOException { } - public void setBaseURL(String baseURL) { - this.baseURL = baseURL; + public class HttpResponseWrapper { + private int statusCode; + private Object responseBody; + + public HttpResponseWrapper(int statusCode, Object responseBody) { + this.statusCode = statusCode; + this.responseBody = responseBody; + } + + public int getStatusCode() { + return statusCode; + } + + public Object getResponseBody() { + return responseBody; + } + + @Override + public String toString() { + return "HttpResponseWrapper{" + + "statusCode=" + statusCode + + ", responseBody=" + responseBody + + '}'; + } } } diff --git a/src/main/java/com/autotune/utils/KruizeConstants.java b/src/main/java/com/autotune/utils/KruizeConstants.java index a081087b7..e407d9e1b 100644 --- a/src/main/java/com/autotune/utils/KruizeConstants.java +++ b/src/main/java/com/autotune/utils/KruizeConstants.java @@ -18,6 +18,7 @@ package com.autotune.utils; import com.autotune.analyzer.kruizeObject.CreateExperimentConfigBean; +import com.autotune.analyzer.serviceObjects.BulkJobStatus; import com.autotune.analyzer.utils.AnalyzerConstants; import java.text.SimpleDateFormat; @@ -449,6 +450,8 @@ public static class DataSourceErrorMsgs { public static final String UNSUPPORTED_DATASOURCE_PROVIDER = "Datasource provider is invalid."; public static final String DATASOURCE_NOT_SERVICEABLE = "Datasource is not serviceable."; public static final String DATASOURCE_CONNECTION_FAILED = "Datasource connection refused or timed out."; + public static final String DATASOURCE_DB_LOAD_FAILED = "Loading saved datasource {} details from db failed: {}"; + public static final String DATASOURCE_DB_AUTH_LOAD_FAILED = "Loading datasource {} AUTH details failed: {}"; public static final String DATASOURCE_ALREADY_EXIST = "Datasource with the name already exist."; public static final String DATASOURCE_NOT_EXIST = "Datasource with the name does not exist."; public static final String INVALID_DATASOURCE_URL = "Datasource url is not valid."; @@ -502,7 +505,21 @@ private DataSourceMetadataInfoConstants() { } } + public static class DataSourceMetadataSuccessMsgs { + public static final String METADATA_ADDED = "Metadata added to the DB successfully."; + public static final String DATASOURCE_DELETED = "Successfully deleted datasource: "; + public static final String DATASOURCE_FOUND = "Datasource found: "; + public static final String DATASOURCE_SERVICEABLE = "Datasource is serviceable."; + public static final String DATASOURCE_NOT_SERVICEABLE = "Datasource is not serviceable."; + + private DataSourceMetadataSuccessMsgs() { + + } + } + public static class DataSourceMetadataErrorMsgs { + public static final String METADATA_EXIST = "Metadata already exists for datasource: {}!"; + public static final String METADATA_LOAD_FROM_DB = "Failed to load metadata for the datasource: {}: {} "; public static final String MISSING_DATASOURCE_METADATA_DATASOURCE_NAME = "DataSourceMetadata Datasource name cannot be empty"; public static final String MISSING_DATASOURCE_METADATA_WORKLOAD_MAP = "DataSourceMetadata Workload data cannot be empty or null"; public static final String MISSING_DATASOURCE_METADATA_CONTAINER_MAP = "DataSourceMetadata Container data cannot be empty or null"; @@ -530,6 +547,7 @@ public static class DataSourceMetadataErrorMsgs { public static final String SET_CONTAINER_MAP_ERROR = "containerHashMap is null, no containers provided for workload: "; public static final String SET_NAMESPACE_MAP_ERROR = "namespaceHashMap is null, no namespaces provided for cluster: "; public static final String LOAD_DATASOURCE_FROM_DB_ERROR = "Error loading datasource - %s from DB: %s"; + public static final String LOAD_DATASOURCE_METADATA_TO_DB_ERROR = "Failed to add metadata to DB: {}"; public static final String LOAD_DATASOURCE_METADATA_FROM_DB_ERROR = "Error loading datasource - %s from DB: %s"; public static final String DATASOURCE_METADATA_VALIDATION_FAILURE_MSG = "Validation of imported metadata failed, mandatory fields missing: %s"; public static final String NAMESPACE_QUERY_VALIDATION_FAILED = "Validation failed for namespace data query."; @@ -795,5 +813,71 @@ public static final class KRUIZE_BULK_API { CREATE_EXPERIMENT_CONFIG_BEAN.setMeasurementDurationStr("15min"); CREATE_EXPERIMENT_CONFIG_BEAN.setMeasurementDuration(15); } + + public static class NotificationConstants { + + public static final BulkJobStatus.Notification JOB_NOT_FOUND_INFO = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.WARNING, + JOB_NOT_FOUND_MSG, + 404 + ); + public static final BulkJobStatus.Notification LIMIT_INFO = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.INFO, + LIMIT_MESSAGE, + 400 + ); + public static final BulkJobStatus.Notification NOTHING_INFO = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.INFO, + NOTHING, + 400 + ); + public static final BulkJobStatus.Notification FETCH_METRIC_FAILURE = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.ERROR, + "Not able to fetch metrics", + 400 + ); + public static final BulkJobStatus.Notification DATASOURCE_NOT_REG_INFO = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.ERROR, + "Datasource not registered with Kruize. (%s)", + 400 + ); + public static final BulkJobStatus.Notification DATASOURCE_DOWN_INFO = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.ERROR, + "HttpHostConnectException: Unable to connect to the data source. Please try again later. (%s)", + 503 + ); + public static final BulkJobStatus.Notification DATASOURCE_GATEWAY_TIMEOUT_INFO = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.ERROR, + "SocketTimeoutException: request timed out waiting for a data source response. (%s)", + 504 + ); + public static final BulkJobStatus.Notification DATASOURCE_CONNECT_TIMEOUT_INFO = new BulkJobStatus.Notification( + BulkJobStatus.NotificationType.ERROR, + "ConnectTimeoutException: cannot establish a data source connection in a given time frame due to connectivity issues. (%s)", + 503 + ); + + + // More notification constants can be added here as needed + + public enum Status { + PROCESSED("PROCESSED"), + UNPROCESSED("UNPROCESSED"), + PROCESSING("PROCESSING"), + FAILED("FAILED"); + + private final String status; + + Status(String status) { + this.status = status; + } + + public String getStatus() { + return status; + } + } + + + } } }