diff --git a/Dockerfile.autotune b/Dockerfile.autotune
index 4eb6020de..4cb7f414f 100644
--- a/Dockerfile.autotune
+++ b/Dockerfile.autotune
@@ -89,6 +89,9 @@ COPY --chown=${UID}:0 --from=mvnbuild-jdk17 ${AUTOTUNE_HOME}/src/autotune/jre/ $
# Copy the app binaries
COPY --chown=${UID}:0 --from=mvnbuild-jdk17 ${AUTOTUNE_HOME}/src/autotune/target/ ${AUTOTUNE_HOME}/app/target/
+# Grant execute permission
+RUN chmod -R +x $AUTOTUNE_HOME/app/target/bin/
+
EXPOSE 8080
ENV JAVA_HOME=${AUTOTUNE_HOME}/app/jre \
diff --git a/design/MonitoringModeAPI.md b/design/MonitoringModeAPI.md
index f937822dd..53f9f52d5 100644
--- a/design/MonitoringModeAPI.md
+++ b/design/MonitoringModeAPI.md
@@ -6,7 +6,65 @@ Documentation still in progress stay tuned.
**Note :** The ISO 8601 standard underpins all timestamp formats. An example of a valid timestamp in this format is
2022-01-23T18:25:43.511Z, which represents January 23, 2022, at 18:25:43.511 UTC.
-## CreateExperiment
+# Table of Contents
+
+1. [Resource Analysis Terms and Defaults](#resource-analysis-terms-and-defaults)
+ - [Terms, Duration & Threshold Table](#terms-duration--threshold-table)
+
+2. [API's](#apis)
+ - [Create Experiment API](#create-experiment-api)
+ - Introduction
+ - Example Request and Response
+ - Invalid Scenarios
+
+ - [Update Results API](#update-results-api)
+ - Introduction
+ - Example Request and Response
+ - Invalid Scenarios
+
+ - [List Experiments API](#list-experiments-api)
+ - Introduction
+ - Example Request and Response
+ - Invalid Scenarios
+
+ - [List Recommendations API](#list-recommendations-api)
+ - Introduction
+ - Example Request and Response
+ - Invalid Scenarios
+
+ - [Update Recommendations API](#update-recommendations-api)
+ - Introduction
+ - Example Request and Response
+ - Invalid Scenarios
+
+
+
+## Resource Analysis Terms and Defaults
+
+When analyzing resource utilization in Kubernetes, it's essential to define terms that specify the duration of past data considered for recommendations and the threshold for obtaining additional data. These terms help in categorizing and fine-tuning resource allocation.
+
+Below are the default terms used in resource analysis, along with their respective durations and thresholds:
+
+
+### Terms, Duration & Threshold Table
+
+| Term | Duration | Threshold |
+|------------|----------|-----------|
+| Short | 1 day | 6 hours |
+| Medium | 7 days | 6 hours |
+| Long | 15 days | 6 hours |
+
+**Duration**: The "duration" in the term analysis refers to the amount of historical data taken into account when assessing resource utilization.
+
+**Threshold**: The "threshold" is an additional buffer period. It encompasses the term duration and extends an extra time duration. This buffer accommodates any potential data gaps or misses within the term window, ensuring a comprehensive analysis.
+
+
+
+## API's
+
+
+### Create Experiment API
+
This is quick guide instructions to create experiments using input JSON as follows. For a more detailed guide,
see [Create Experiment](/design/CreateExperiment.md)
@@ -16,7 +74,13 @@ see [Create Experiment](/design/CreateExperiment.md)
`curl -H 'Accept: application/json' -X POST --data 'copy paste below JSON' http://:/createExperiment`
-```
+
+
+Example Request
+
+### Example Request
+
+```json
[
{
"version": "1.0",
@@ -51,10 +115,17 @@ see [Create Experiment](/design/CreateExperiment.md)
}
]
```
+
+
**Response**
-```
+
+Example Response
+
+### Example Response
+
+```json
{
"message": "Experiment registered successfully with Autotune. View registered experiments at /listExperiments",
"httpcode": 201,
@@ -62,8 +133,10 @@ see [Create Experiment](/design/CreateExperiment.md)
"status": "SUCCESS"
}
```
+
-## Update Metric Results
+
+### Update Results API
Update metric results using input JSON as follows. For a more detailed guide,
see [Update results](/design/UpdateResults.md)
@@ -82,7 +155,12 @@ see [Update results](/design/UpdateResults.md)
`curl -H 'Accept: application/json' -X POST --data 'copy paste below JSON' http://:/updateResults`
-```
+
+Example Request
+
+### Example Request
+
+```json
[
{
"version": "1.0",
@@ -314,12 +392,17 @@ see [Update results](/design/UpdateResults.md)
]
}
]
-
```
+
**Response**
-```
+
+Example Response
+
+### Example Response
+
+```json
{
"message": "Updated metrics results successfully with Autotune. View update results at /listExperiments",
"httpcode": 201,
@@ -327,6 +410,8 @@ see [Update results](/design/UpdateResults.md)
"status": "SUCCESS"
}
```
+
+
The UpdateResults API has been enhanced to support bulk uploads of up to 100 records at once. When all records are
successfully processed, the API will return the same success response as depicted above. However, if any or all of the
@@ -335,7 +420,12 @@ structure outlined below for handling duplicate records:
**Response**
-```
+
+Example Response
+
+### Example Response
+
+```json
{
"message": "Out of a total of 3 records, 3 failed to save",
"httpcode": 400,
@@ -385,6 +475,8 @@ structure outlined below for handling duplicate records:
]
}
```
+
+
**Response**
@@ -392,7 +484,12 @@ In the response below, among the three records, one record was successfully save
The failed records are indicated in the 'data' attribute using the 'error' attribute, allowing you to identify the
specific attribute causing the failures.
-```
+
+Example Response
+
+### Example Response
+
+```json
{
"message": "Out of a total of 3 records, 2 failed to save",
"httpcode": 400,
@@ -430,6 +527,10 @@ specific attribute causing the failures.
]
}
```
+
+
+
+## List Experiments API
**Request with experiment name parameter**
@@ -451,7 +552,12 @@ Returns the latest result of all the experiments
**Response for experiment name - `quarkus-resteasy-kruize-min-http-response-time-db_0`**
-```
+
+Example Response
+
+### Example Response
+
+```json
[
{
"version": "1.0",
@@ -634,6 +740,9 @@ Returns the latest result of all the experiments
]
```
+
+
+
**Request with results set to true and with experiment name parameter**
@@ -655,7 +764,12 @@ Returns all the results of all the experiments
**Response for experiment name - `quarkus-resteasy-kruize-min-http-response-time-db_0`**
-```
+
+Example Response
+
+### Example Response
+
+```json
[
{
"version": "1.0",
@@ -882,6 +996,9 @@ Returns all the results of all the experiments
]
```
+
+
+
**Request with results set to true, latest set to false and with experiment name parameter**
@@ -902,7 +1019,12 @@ Returns the latest recommendations of all the experiments
**Response for experiment name - `quarkus-resteasy-kruize-min-http-response-time-db_0`**
-```
+
+Example Response
+
+### Example Response
+
+```json
[
{
"version": "1.0",
@@ -1141,6 +1263,9 @@ Returns the latest recommendations of all the experiments
]
```
+
+
+
**Request with recommendations set to true with experiment name parameter**
@@ -1161,7 +1286,12 @@ Returns all the recommendations of all the experiments
**Response for experiment name - `quarkus-resteasy-kruize-min-http-response-time-db_0`**
-```
+
+Example Response
+
+### Example Response
+
+```json
[
{
"version": "1.0",
@@ -1438,6 +1568,9 @@ Returns all the recommendations of all the experiments
]
```
+
+
+
**Request with recommendations set to true, latest set to false and with experiment name parameter**
@@ -1472,7 +1605,8 @@ name parameter**
Returns all the recommendations and all the results of the specified experiment.
-## Recommendations
+
+### List Recommendations API
List recommendations output JSON as follows. Some parameters like CPU limit , ENV are optional.
@@ -1496,7 +1630,12 @@ If no parameter is passed API returns all the latest recommendations available f
**Response**
-```
+
+Example Response
+
+### Example Response
+
+```json
[
{
"experiment_name": "experiment_1",
@@ -1899,6 +2038,9 @@ If no parameter is passed API returns all the latest recommendations available f
]
```
+
+
+
**Request with experiment name parameter**
`GET /listRecommendations`
@@ -1909,7 +2051,12 @@ Returns the latest result of that experiment
**Response for experiment name - `quarkus-resteasy-kruize-min-http-response-time-db_0`**
-```
+
+Example Response
+
+### Example Response
+
+```json
[
{
"cluster_name": "cluster-one-division-bell",
@@ -2129,6 +2276,9 @@ Returns the latest result of that experiment
]
```
+
+
+
**Request with experiment name parameter and latest set to false**
`GET /listRecommendations`
@@ -2139,7 +2289,12 @@ Returns all the results of that experiment
**Response for experiment name - `quarkus-resteasy-kruize-min-http-response-time-db_0`**
-```
+
+Example Response
+
+### Example Response
+
+```json
[
{
"cluster_name": "cluster-one-division-bell",
@@ -2482,6 +2637,9 @@ Returns all the results of that experiment
]
```
+
+
+
**Request with experiment name parameter and monitoring end time set to a valid timestamp**
`GET /listRecommendations`
@@ -2493,7 +2651,13 @@ Returns the recommendation at a particular timestamp if it exists
**Response for experiment name - `quarkus-resteasy-kruize-min-http-response-time-db_0` and Monitoring End
Time - `2022-12-20T17:55:05.000Z`**
-```
+
+
+Example Response
+
+### Example Response
+
+```json
[
{
"cluster_name": "cluster-one-division-bell",
@@ -2698,13 +2862,17 @@ Time - `2022-12-20T17:55:05.000Z`**
]
```
+
+
+
### Invalid Scenarios:
-**Invalid experiment name**
+
+Invalid experiment name
`experiment_name=stub-experiment`
-```
+```json
{
"message": "Given experiment name - \" stub-experiment \" is not valid",
"httpcode": 400,
@@ -2712,12 +2880,14 @@ Time - `2022-12-20T17:55:05.000Z`**
"status": "ERROR"
}
```
+
-**Invalid Timestamp format**
+
+Invalid Timestamp format
`monitoring_end_time=Tony Stark` (Invalid Timestamp)
-```
+```json
{
"message": "Given timestamp - \" Tony Stark \" is not a valid timestamp format",
"httpcode": 400,
@@ -2725,12 +2895,15 @@ Time - `2022-12-20T17:55:05.000Z`**
"status": "ERROR"
}
```
+
+
-**Non Existing Timestamp**
+
+Non Existing Timestamp
`monitoring_end_time=2022-12-20T17:55:07.000Z`
-```
+```json
{
"message": "Recommendation for timestamp - \" 2022-12-20T17:55:07.000Z \" does not exist",
"httpcode": 400,
@@ -2738,8 +2911,12 @@ Time - `2022-12-20T17:55:05.000Z`**
"status": "ERROR"
}
```
+
+
-## Update Recommendations API
+
+
+### Update Recommendations API
Generate the recommendations for a specific experiment based on provided parameters.
@@ -2780,7 +2957,8 @@ success status code : 201
The response will contain a array of JSON object with the updated recommendations for the specified experiment.
-Example Response Body:
+
+Example Response Body
```json
[
@@ -2894,6 +3072,10 @@ Example Response Body:
]
```
+
+
+
+
**Error Responses**
| HTTP Status Code | Description |
@@ -2906,3 +3088,149 @@ Example Response Body:
| 400 | The Start time should precede the End time! | |
| 500 | Internal Server Error |
+---
+
+## Implementing Retry Mechanism for Kruize API Consumers
+
+When consuming a REST API, it's essential to handle scenarios where the API may respond with errors or encounter
+temporary issues such as a 504 Gateway Timeout. To ensure robustness and reliability, implementing a retry mechanism
+with exponential backoff is a good practice. In this guide, we'll discuss how to implement a retry mechanism with at
+least three attempts for the following scenarios:
+
+```POST /createExperiment```
+
+If the API responds with "Profile Name not found," implement retry logic.
+
+
+Example Response
+
+### Example Response
+
+```json
+{
+ "message": "Not Found: performance_profile does not exist: resource-optimization-openshift",
+ "httpcode": 400,
+ "documentationLink": "",
+ "status": "ERROR"
+}
+```
+
+
+
+```POST /updateResults```
+
+If the API responds with:
+
+- "Experiment_name not found"
+- "Profile_name not found"
+
+Implement retry logic.
+
+
+Example Response
+
+### Example Response
+
+```json
+{
+ "message": "Out of a total of 2 records, 1 failed to save",
+ "httpcode": 400,
+ "documentationLink": "",
+ "status": "ERROR",
+ "data": [
+ {
+ "interval_start_time": "2023-04-01T00:00:00.000Z",
+ "interval_end_time": "2023-04-01T00:15:00.000Z",
+ "errors": [
+ {
+ "message": "Not Found: experiment_name does not exist: quarkus-resteasy-kruize-min-http-response-time-db_1_1_1",
+ "httpcode": 400,
+ "documentationLink": "",
+ "status": "ERROR"
+ }
+ ],
+ "version": "3.0",
+ "experiment_name": "quarkus-resteasy-kruize-min-http-response-time-db_1_1_1"
+ }
+ ]
+}
+```
+
+```json
+{
+ "message": "Out of a total of 2 records, 1 failed to save",
+ "httpcode": 400,
+ "documentationLink": "",
+ "status": "ERROR",
+ "data": [
+ {
+ "interval_start_time": "2023-04-01T00:00:00.000Z",
+ "interval_end_time": "2023-04-01T00:15:00.000Z",
+ "errors": [
+ {
+ "message": "Not Found: performance_profile does not exist: resource-optimization-openshift",
+ "httpcode": 400,
+ "documentationLink": "",
+ "status": "ERROR"
+ }
+ ],
+ "version": "3.0",
+ "experiment_name": "quarkus-resteasy-kruize-min-http-response-time-db_1_1"
+ }
+ ]
+}
+```
+
+
+
+
+
+```POST /updateRecommendations?interval_end_time=&experiment_name=```
+
+If the API responds with:
+
+- "Experiment_name not found"
+- "interval_end_time not found"
+
+Implement retry logic.
+
+
+Example Response
+
+### Example Response
+
+```json
+{
+ "message": "Not Found: experiment_name does not exist: quarkus-resteasy-kruize-min-http-response-time-db_1_2",
+ "httpcode": 400,
+ "documentationLink": "",
+ "status": "ERROR"
+}
+```
+
+```json
+{
+ "message": "Not Found: interval_end_time does not exist: 2023-02-02T00:00:00.000Z",
+ "httpcode": 400,
+ "documentationLink": "",
+ "status": "ERROR"
+}
+```
+
+
+
+
+```POST /*```
+
+- Common Scenario
+
+If any of the APIs respond with a "504 Gateway Timeout" error, implement retry logic.
+
+**Retry Mechanism with Exponential Backoff**
+
+The retry mechanism should follow these steps:
+
+- Send the initial API request.
+- If the response indicates an error condition (as mentioned above), initiate the retry logic.
+- Perform a maximum of three retry attempts.
+- Use exponential backoff with jitter to determine the delay before each retry.
diff --git a/manifests/crc/BYODB-installation/minikube/kruize-crc-minikube.yaml b/manifests/crc/BYODB-installation/minikube/kruize-crc-minikube.yaml
index 9c88285a3..a3c858a58 100644
--- a/manifests/crc/BYODB-installation/minikube/kruize-crc-minikube.yaml
+++ b/manifests/crc/BYODB-installation/minikube/kruize-crc-minikube.yaml
@@ -65,7 +65,7 @@ spec:
spec:
containers:
- name: kruize
- image: kruize/autotune_operator:0.0.18_rm
+ image: kruize/autotune_operator:0.0.19.2_rm
imagePullPolicy: Always
volumeMounts:
- name: config-volume
@@ -188,7 +188,7 @@ metadata:
spec:
containers:
- name: kruize-ui-nginx-container
- image: quay.io/kruize/kruize-ui:0.0.1
+ image: quay.io/kruize/kruize-ui:0.0.2
imagePullPolicy: Always
env:
- name: KRUIZE_UI_ENV
diff --git a/manifests/crc/BYODB-installation/openshift/kruize-crc-openshift.yaml b/manifests/crc/BYODB-installation/openshift/kruize-crc-openshift.yaml
index 71c6c5965..ff1b8a8e0 100644
--- a/manifests/crc/BYODB-installation/openshift/kruize-crc-openshift.yaml
+++ b/manifests/crc/BYODB-installation/openshift/kruize-crc-openshift.yaml
@@ -65,7 +65,7 @@ spec:
spec:
containers:
- name: kruize
- image: kruize/autotune_operator:0.0.18_rm
+ image: kruize/autotune_operator:0.0.19.2_rm
imagePullPolicy: Always
volumeMounts:
- name: config-volume
@@ -194,7 +194,7 @@ metadata:
spec:
containers:
- name: kruize-ui-nginx-container
- image: quay.io/kruize/kruize-ui:0.0.1
+ image: quay.io/kruize/kruize-ui:0.0.2
imagePullPolicy: Always
env:
- name: KRUIZE_UI_ENV
diff --git a/manifests/crc/default-db-included-installation/minikube/kruize-crc-minikube.yaml b/manifests/crc/default-db-included-installation/minikube/kruize-crc-minikube.yaml
index 6e8f13272..f511eae52 100644
--- a/manifests/crc/default-db-included-installation/minikube/kruize-crc-minikube.yaml
+++ b/manifests/crc/default-db-included-installation/minikube/kruize-crc-minikube.yaml
@@ -143,7 +143,7 @@ spec:
spec:
containers:
- name: kruize
- image: kruize/autotune_operator:0.0.18_rm
+ image: kruize/autotune_operator:0.0.19.2_rm
imagePullPolicy: Always
volumeMounts:
- name: config-volume
@@ -195,6 +195,47 @@ spec:
port: 8080
targetPort: 8080
---
+apiVersion: batch/v1
+kind: CronJob
+metadata:
+ name: create-partition-cronjob
+ namespace: monitoring
+spec:
+ schedule: "0 0 25 * *" # Run on 25th of every month at midnight
+ jobTemplate:
+ spec:
+ template:
+ spec:
+ containers:
+ - name: kruizecronjob
+ image: kruize/autotune_operator:0.0.19.2_rm
+ imagePullPolicy: Always
+ volumeMounts:
+ - name: config-volume
+ mountPath: /etc/config
+ command:
+ - sh
+ - -c
+ - |
+ /home/autotune/app/target/bin/CreatePartition
+ args: [ "" ]
+ env:
+ - name: START_AUTOTUNE
+ value: "false"
+ - name: LOGGING_LEVEL
+ value: "info"
+ - name: ROOT_LOGGING_LEVEL
+ value: "error"
+ - name: DB_CONFIG_FILE
+ value: "/etc/config/dbconfigjson"
+ - name: KRUIZE_CONFIG_FILE
+ value: "/etc/config/kruizeconfigjson"
+ volumes:
+ - name: config-volume
+ configMap:
+ name: kruizeconfig
+ restartPolicy: OnFailure
+---
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
diff --git a/manifests/crc/default-db-included-installation/openshift/kruize-crc-openshift.yaml b/manifests/crc/default-db-included-installation/openshift/kruize-crc-openshift.yaml
index 1c452216c..f0f9d46b0 100644
--- a/manifests/crc/default-db-included-installation/openshift/kruize-crc-openshift.yaml
+++ b/manifests/crc/default-db-included-installation/openshift/kruize-crc-openshift.yaml
@@ -189,7 +189,7 @@ spec:
serviceAccountName: kruize-sa
containers:
- name: kruize
- image: kruize/autotune_operator:0.0.18_rm
+ image: kruize/autotune_operator:0.0.19.2_rm
imagePullPolicy: Always
volumeMounts:
- name: config-volume
@@ -248,6 +248,47 @@ spec:
port: 8080
targetPort: 8080
---
+apiVersion: batch/v1
+kind: CronJob
+metadata:
+ name: create-partition-cronjob
+ namespace: openshift-tuning
+spec:
+ schedule: "0 0 25 * *" # Run on 25th of every month at midnight
+ jobTemplate:
+ spec:
+ template:
+ spec:
+ containers:
+ - name: kruizecronjob
+ image: kruize/autotune_operator:0.0.19.2_rm
+ imagePullPolicy: Always
+ volumeMounts:
+ - name: config-volume
+ mountPath: /etc/config
+ command:
+ - sh
+ - -c
+ - |
+ /home/autotune/app/target/bin/CreatePartition
+ args: [ "" ]
+ env:
+ - name: START_AUTOTUNE
+ value: "false"
+ - name: LOGGING_LEVEL
+ value: "info"
+ - name: ROOT_LOGGING_LEVEL
+ value: "error"
+ - name: DB_CONFIG_FILE
+ value: "/etc/config/dbconfigjson"
+ - name: KRUIZE_CONFIG_FILE
+ value: "/etc/config/kruizeconfigjson"
+ volumes:
+ - name: config-volume
+ configMap:
+ name: kruizeconfig
+ restartPolicy: OnFailure
+---
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
diff --git a/migrations/kruize_experiments_ddl.sql b/migrations/kruize_experiments_ddl.sql
index 0d94195a3..8d9002b71 100644
--- a/migrations/kruize_experiments_ddl.sql
+++ b/migrations/kruize_experiments_ddl.sql
@@ -1,11 +1,9 @@
create table IF NOT EXISTS kruize_experiments (experiment_id varchar(255) not null, cluster_name varchar(255), datasource jsonb, experiment_name varchar(255), extended_data jsonb, meta_data jsonb, mode varchar(255), performance_profile varchar(255), status varchar(255), target_cluster varchar(255), version varchar(255), primary key (experiment_id));
create table IF NOT EXISTS kruize_performance_profiles (name varchar(255) not null, k8s_type varchar(255), profile_version float(53) not null, slo jsonb, primary key (name));
-create table IF NOT EXISTS kruize_recommendations (interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255) not null, extended_data jsonb, version varchar(255), primary key (cluster_name, experiment_name, interval_end_time)) PARTITION BY RANGE (interval_end_time);
-create table IF NOT EXISTS kruize_results (interval_start_time timestamp(6) not null, interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255) not null, duration_minutes float(53) not null, extended_data jsonb, meta_data jsonb, version varchar(255), primary key (cluster_name, experiment_name, interval_end_time, interval_start_time)) PARTITION BY RANGE (interval_end_time);
+create table IF NOT EXISTS kruize_recommendations (interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255), extended_data jsonb, version varchar(255), primary key (experiment_name, interval_end_time)) PARTITION BY RANGE (interval_end_time);
+create table IF NOT EXISTS kruize_results (interval_start_time timestamp(6) not null, interval_end_time timestamp(6) not null, experiment_name varchar(255) not null, cluster_name varchar(255) , duration_minutes float(53) not null, extended_data jsonb, meta_data jsonb, version varchar(255), primary key (experiment_name, interval_end_time, interval_start_time)) PARTITION BY RANGE (interval_end_time);
alter table if exists kruize_experiments add constraint UK_experiment_name unique (experiment_name);
create index IF NOT EXISTS idx_recommendation_experiment_name on kruize_recommendations (experiment_name);
-create index IF NOT EXISTS idx_recommendation_cluster_name on kruize_recommendations (cluster_name);
create index IF NOT EXISTS idx_recommendation_interval_end_time on kruize_recommendations (interval_end_time);
create index IF NOT EXISTS idx_result_experiment_name on kruize_results (experiment_name);
-create index IF NOT EXISTS idx_result_cluster_name on kruize_results (cluster_name);
create index IF NOT EXISTS idx_result_interval_end_time on kruize_results (interval_end_time);
diff --git a/pom.xml b/pom.xml
index f9266c548..6a10f3db3 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,7 +6,7 @@
org.autotune
autotune
- 0.0.19_mvp
+ 0.0.19.2_mvp
4.13.0
20201115
@@ -187,6 +187,10 @@
com.autotune.Autotune
Autotune
+
+ com.autotune.jobs.CreatePartition
+ CreatePartition
+
diff --git a/scripts/common_utils.sh b/scripts/common_utils.sh
index 3e75ac3a5..ab4c2c0e7 100755
--- a/scripts/common_utils.sh
+++ b/scripts/common_utils.sh
@@ -96,14 +96,17 @@ kruize_crc_start() {
if ($2=="name:") {
prev=$3;
print
- } else if ($1=="image:" && prev=="kruize") {
+ } else if ($1=="image:" && prev=="kruizecronjob") {
+ $2=image_name;
+ printf" %s %s\n", $1, $2;
+ } else if ($1=="image:" && prev=="kruize") {
$2=image_name;
printf" %s %s\n", $1, $2;
} else if ($1=="image:" && prev=="kruize-ui-nginx-container") {
- $2=ui_image_name;
- printf" %s %s\n", $1, $2;
+ $2=ui_image_name;
+ printf" %s %s\n", $1, $2;
} else { print }
- }' ${CRC_MANIFEST_FILE_OLD} >${CRC_MANIFEST_FILE}
+ }' ${CRC_MANIFEST_FILE_OLD} >${CRC_MANIFEST_FILE}
${kubectl_cmd} apply -f ${CRC_MANIFEST_FILE}
check_running kruize ${autotune_ns} kruize-ui
if [ "${err}" != "0" ]; then
diff --git a/src/main/java/com/autotune/Autotune.java b/src/main/java/com/autotune/Autotune.java
index c4710769d..029f84ea9 100644
--- a/src/main/java/com/autotune/Autotune.java
+++ b/src/main/java/com/autotune/Autotune.java
@@ -125,8 +125,11 @@ public static void main(String[] args) {
}
try {
- server.start();
- server.join();
+ String startAutotune = System.getenv("START_AUTOTUNE");
+ if (startAutotune == null || startAutotune.equalsIgnoreCase("true")) {
+ server.start();
+ server.join();
+ }
} catch (Exception e) {
LOGGER.error("Could not start the server!");
e.printStackTrace();
diff --git a/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java b/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java
index a1c4b7ad4..f2bce065e 100644
--- a/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java
+++ b/src/main/java/com/autotune/analyzer/experiment/ExperimentInitiator.java
@@ -38,6 +38,9 @@
import java.lang.reflect.Field;
import java.sql.Timestamp;
import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+
+import static com.autotune.analyzer.utils.AnalyzerErrorConstants.AutotuneObjectErrors.MISSING_EXPERIMENT_NAME;
/**
* Initiates new experiment data validations and push into queue for worker to
@@ -76,7 +79,7 @@ public static List getErrorMap(List errorMessages) {
}
);
responses = new ArrayList<>();
- groupSimilarMap.forEach((httpCode,errorText) ->
+ groupSimilarMap.forEach((httpCode, errorText) ->
{
responses.add(
new KruizeResponse(errorText, httpCode, "", "ERROR", null)
@@ -140,21 +143,37 @@ public void validateAndAddExperimentResults(List updateR
.failFast(false)
.buildValidatorFactory()
.getValidator();
-
+ Map mainKruizeExperimentMAP = new ConcurrentHashMap();
for (UpdateResultsAPIObject object : updateResultsAPIObjects) {
- Set> violations = validator.validate(object, UpdateResultsAPIObject.FullValidationSequence.class);
- if (violations.isEmpty()) {
- successUpdateResultsAPIObjects.add(object);
- } else {
- List errorReasons = new ArrayList<>();
- for (ConstraintViolation violation : violations) {
- String propertyPath = violation.getPropertyPath().toString();
- if (null != propertyPath && propertyPath.length() != 0) {
- errorReasons.add(getSerializedName(propertyPath, UpdateResultsAPIObject.class) + ": " + violation.getMessage());
- } else {
- errorReasons.add(violation.getMessage());
+ String experimentName = object.getExperimentName();
+ if (!mainKruizeExperimentMAP.containsKey(experimentName)) {
+ try {
+ new ExperimentDBService().loadExperimentFromDBByName(mainKruizeExperimentMAP, experimentName); // TODO try to avoid DB
+ } catch (Exception e) {
+ LOGGER.error(e.getMessage());
+ }
+ }
+ if (mainKruizeExperimentMAP.containsKey(experimentName)) {
+ object.setKruizeObject(mainKruizeExperimentMAP.get(object.getExperimentName()));
+ Set> violations = validator.validate(object, UpdateResultsAPIObject.FullValidationSequence.class);
+ if (violations.isEmpty()) {
+ successUpdateResultsAPIObjects.add(object);
+ } else {
+ List errorReasons = new ArrayList<>();
+ for (ConstraintViolation violation : violations) {
+ String propertyPath = violation.getPropertyPath().toString();
+ if (null != propertyPath && propertyPath.length() != 0) {
+ errorReasons.add(getSerializedName(propertyPath, UpdateResultsAPIObject.class) + ": " + violation.getMessage());
+ } else {
+ errorReasons.add(violation.getMessage());
+ }
}
+ object.setErrors(getErrorMap(errorReasons));
+ failedUpdateResultsAPIObjects.add(object);
}
+ } else {
+ List errorReasons = new ArrayList<>();
+ errorReasons.add(String.format("%s%s", MISSING_EXPERIMENT_NAME, experimentName));
object.setErrors(getErrorMap(errorReasons));
failedUpdateResultsAPIObjects.add(object);
}
diff --git a/src/main/java/com/autotune/analyzer/performanceProfiles/PerformanceProfileInterface/ResourceOptimizationOpenshiftImpl.java b/src/main/java/com/autotune/analyzer/performanceProfiles/PerformanceProfileInterface/ResourceOptimizationOpenshiftImpl.java
index 2d9758d39..1f76d5fd8 100644
--- a/src/main/java/com/autotune/analyzer/performanceProfiles/PerformanceProfileInterface/ResourceOptimizationOpenshiftImpl.java
+++ b/src/main/java/com/autotune/analyzer/performanceProfiles/PerformanceProfileInterface/ResourceOptimizationOpenshiftImpl.java
@@ -43,6 +43,7 @@
import static com.autotune.utils.KruizeConstants.TimeConv.*;
+
/**
* Util class to validate the performance profile metrics with the experiment results metrics.
*/
@@ -62,7 +63,7 @@ private void init() {
CostRecommendationEngine costRecommendationEngine = new CostRecommendationEngine();
// TODO: Create profile based engine
AnalyzerConstants.RegisterRecommendationEngineStatus _unused_status = registerEngine(costRecommendationEngine);
- PerformanceRecommendationEngine performanceRecommendationEngine = new PerformanceRecommendationEngine();
+ PerformanceRecommendationEngine performanceRecommendationEngine = new PerformanceRecommendationEngine();
_unused_status = registerEngine(performanceRecommendationEngine);
// TODO: Add profile based once recommendation algos are available
}
@@ -89,35 +90,26 @@ public List getEngines() {
@Override
public void generateRecommendation(KruizeObject kruizeObject, List experimentResultDataList, Timestamp interval_start_time, Timestamp interval_end_time) throws Exception {
/*
- To restrict the number of rows in the result set, the Load results operation involves locating the appropriate method and configuring the desired limitation.
- It's important to note that in order for the Limit rows feature to function correctly,
- the CreateExperiment API must adhere strictly to the trail settings' measurement duration and should not allow arbitrary values
+ The general strategy involves initially attempting the optimal query;
*/
- String experiment_name = kruizeObject.getExperimentName();
- int theoriticalLimitRows = (int) ((
- KruizeConstants.RecommendationEngineConstants.DurationBasedEngine.DurationAmount.LONG_TERM_DURATION_DAYS *
- KruizeConstants.DateFormats.MINUTES_FOR_DAY)
- / kruizeObject.getTrial_settings().getMeasurement_durationMinutes_inDouble());
-
- // Adding a 10% of value to make sure there are enough data points to gather the duration required
- // Will be deprecated once we extract exact number of rows from DB based on duration in minutes needed for each term
- int practicalLimitRows = RecommendationUtils.getThreshold(theoriticalLimitRows, 10, true);
-
- if (null != interval_start_time) {
- long diffMilliseconds = interval_end_time.getTime() - interval_start_time.getTime();
- long minutes = diffMilliseconds / (60 * 1000);
- int addToLimitRows = (int) (minutes / kruizeObject.getTrial_settings().getMeasurement_durationMinutes_inDouble());
- LOGGER.debug("add to limit rows set to {}", addToLimitRows);
- practicalLimitRows = practicalLimitRows + addToLimitRows;
- }
- LOGGER.debug("Limit rows set to {}", practicalLimitRows);
-
+ // Convert the Timestamp to a Calendar instance in UTC time zone
+ Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+ cal.setTimeInMillis(interval_end_time.getTime());
+ /*
+ * interval_start_time Subtract (LONG_TERM_DURATION_DAYS + THRESHOLD days)
+ * Incorporate a buffer period of "threshold days" to account for potential remote cluster downtime.
+ * This adjustment aims to align the cumulative hours' duration with LONG_TERM_DURATION_DAYS.
+ */
+ cal.add(Calendar.DAY_OF_MONTH, -(KruizeConstants.RecommendationEngineConstants.DurationBasedEngine.DurationAmount.LONG_TERM_DURATION_DAYS +
+ KruizeConstants.RecommendationEngineConstants.DurationBasedEngine.DurationAmount.LONG_TERM_DURATION_DAYS_THRESHOLD));
+ // Get the new Timestamp after subtracting 10 days
+ Timestamp calculated_start_time = new Timestamp(cal.getTimeInMillis());
Map mainKruizeExperimentMap = new HashMap<>();
+ String experiment_name = kruizeObject.getExperimentName();
mainKruizeExperimentMap.put(experiment_name, kruizeObject);
new ExperimentDBService().loadResultsFromDBByName(mainKruizeExperimentMap,
experiment_name,
- interval_end_time,
- practicalLimitRows);
+ calculated_start_time, interval_end_time);
//TODO: Will be updated once algo is completed
for (ExperimentResultData experimentResultData : experimentResultDataList) {
if (null != kruizeObject && null != experimentResultData) {
@@ -193,10 +185,10 @@ public void generateRecommendation(KruizeObject kruizeObject, List filteredResultsMap,
- Timestamp timestampToExtract,
- AnalyzerConstants.ResourceSetting resourceSetting,
- AnalyzerConstants.RecommendationItem recommendationItem,
- ArrayList notifications) {
+ Timestamp timestampToExtract,
+ AnalyzerConstants.ResourceSetting resourceSetting,
+ AnalyzerConstants.RecommendationItem recommendationItem,
+ ArrayList notifications) {
Double currentValue = null;
String format = null;
RecommendationConfigItem recommendationConfigItem = null;
@@ -120,8 +120,8 @@ public static double getDurationSummation(ContainerData containerData) {
}
public static Timestamp getMonitoringStartTime(HashMap resultsHashMap,
- Timestamp endTime,
- Double durationInHrs) {
+ Timestamp endTime,
+ Double durationInHrs) {
// Convert the HashMap to a TreeMap to maintain sorted order based on IntervalEndTime
TreeMap sortedResultsHashMap = new TreeMap<>(Collections.reverseOrder());
@@ -155,17 +155,12 @@ public static RecommendationNotification getNotificationForTermAvailability(Reco
recommendationNotification = new RecommendationNotification(RecommendationConstants.RecommendationNotification.INFO_SHORT_TERM_RECOMMENDATIONS_AVAILABLE);
} else if (recommendationTerm.getValue().equalsIgnoreCase(RecommendationConstants.RecommendationTerms.MEDIUM_TERM.getValue())) {
recommendationNotification = new RecommendationNotification(RecommendationConstants.RecommendationNotification.INFO_MEDIUM_TERM_RECOMMENDATIONS_AVAILABLE);
- } else if (recommendationTerm.getValue().equalsIgnoreCase(RecommendationConstants.RecommendationTerms.LONG_TERM.getValue())){
+ } else if (recommendationTerm.getValue().equalsIgnoreCase(RecommendationConstants.RecommendationTerms.LONG_TERM.getValue())) {
recommendationNotification = new RecommendationNotification(RecommendationConstants.RecommendationNotification.INFO_LONG_TERM_RECOMMENDATIONS_AVAILABLE);
}
return recommendationNotification;
}
- public static int getThreshold(int value, int failoverPercentage, boolean direction) {
- if (direction) {
- return Math.round(value + value * (failoverPercentage / 100.0f));
- } else {
- return Math.round(value - value * (failoverPercentage / 100.0f));
- }
- }
+
}
+
diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/BaseSO.java b/src/main/java/com/autotune/analyzer/serviceObjects/BaseSO.java
index bdea150e6..ce33262a0 100644
--- a/src/main/java/com/autotune/analyzer/serviceObjects/BaseSO.java
+++ b/src/main/java/com/autotune/analyzer/serviceObjects/BaseSO.java
@@ -15,7 +15,6 @@
*******************************************************************************/
package com.autotune.analyzer.serviceObjects;
-import com.autotune.analyzer.serviceObjects.verification.annotators.ExperimentNameExist;
import com.google.gson.annotations.SerializedName;
import org.hibernate.validator.constraints.NotBlank;
@@ -25,7 +24,6 @@ public abstract class BaseSO {
@SerializedName("version")
private String apiVersion;
@NotBlank(groups = InitialValidation.class)
- @ExperimentNameExist(groups = ExperimentNameExistValidation.class)
@SerializedName("experiment_name")
private String experimentName;
@@ -48,7 +46,4 @@ public void setExperimentName(String experimentName) {
public interface InitialValidation {
}
- public interface ExperimentNameExistValidation {
- }
-
}
diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java b/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java
index a5b11e4c3..156dce9e8 100644
--- a/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java
+++ b/src/main/java/com/autotune/analyzer/serviceObjects/Converters.java
@@ -182,109 +182,110 @@ public static ListRecommendationsAPIObject convertKruizeObjectToListRecommendati
}
return listRecommendationsAPIObject;
}
- public static ListRecommendationsAPIObject convertKruizeObjectToListRecommendationSO(
- KruizeObject kruizeObject,
- boolean getLatest,
- boolean checkForTimestamp,
- String monitoringEndTimestamp) {
- ListRecommendationsAPIObject listRecommendationsAPIObject = new ListRecommendationsAPIObject();
- try {
- listRecommendationsAPIObject.setApiVersion(kruizeObject.getApiVersion());
- listRecommendationsAPIObject.setExperimentName(kruizeObject.getExperimentName());
- listRecommendationsAPIObject.setClusterName(kruizeObject.getClusterName());
- List kubernetesAPIObjects = new ArrayList<>();
- KubernetesAPIObject kubernetesAPIObject;
- for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) {
- kubernetesAPIObject = new KubernetesAPIObject(k8sObject.getName(), k8sObject.getType(), k8sObject.getNamespace());
- HashMap containerDataMap = new HashMap<>();
- List containerAPIObjects = new ArrayList<>();
- for (ContainerData containerData : k8sObject.getContainerDataMap().values()) {
- ContainerAPIObject containerAPIObject;
- // if a Time stamp is passed it holds the priority than latest
- if (checkForTimestamp) {
- // This step causes a performance degradation, need to be replaced with a better flow of creating SO's
- ContainerData clonedContainerData = Utils.getClone(containerData, ContainerData.class);
- if (null != clonedContainerData) {
- HashMap recommendations = clonedContainerData.getContainerRecommendations().getData();
- Date medDate = Utils.DateUtils.getDateFrom(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT, monitoringEndTimestamp);
- Timestamp givenTimestamp = new Timestamp(medDate.getTime());
- if (recommendations.containsKey(givenTimestamp)) {
- List tempList = new ArrayList<>();
- for (Timestamp timestamp : recommendations.keySet()) {
- if (!timestamp.equals(givenTimestamp))
- tempList.add(timestamp);
- }
- for (Timestamp timestamp : tempList) {
- recommendations.remove(timestamp);
- }
- clonedContainerData.getContainerRecommendations().setData(recommendations);
- containerAPIObject = new ContainerAPIObject(clonedContainerData.getContainer_name(),
- clonedContainerData.getContainer_image_name(),
- clonedContainerData.getContainerRecommendations(),
- new ArrayList<>(clonedContainerData.getMetrics().values()));
- containerAPIObjects.add(containerAPIObject);
- }
- }
- } else if (getLatest) {
- // This step causes a performance degradation, need to be replaced with a better flow of creating SO's
- containerData = getLatestRecommendations(containerData);
- containerAPIObject = new ContainerAPIObject(containerData.getContainer_name(),
- containerData.getContainer_image_name(),
- containerData.getContainerRecommendations(),
- new ArrayList<>(containerData.getMetrics().values()));
- containerAPIObjects.add(containerAPIObject);
- } else {
- containerAPIObject = new ContainerAPIObject(containerData.getContainer_name(),
- containerData.getContainer_image_name(),
- containerData.getContainerRecommendations(),
- new ArrayList<>(containerData.getMetrics().values()));
- containerAPIObjects.add(containerAPIObject);
- containerDataMap.put(containerData.getContainer_name(), containerData);
- }
- }
- kubernetesAPIObject.setContainerAPIObjects(containerAPIObjects);
- kubernetesAPIObjects.add(kubernetesAPIObject);
- }
- listRecommendationsAPIObject.setKubernetesObjects(kubernetesAPIObjects);
- } catch (Exception e) {
- e.printStackTrace();
- }
- return listRecommendationsAPIObject;
- }
+
+ public static ListRecommendationsAPIObject convertKruizeObjectToListRecommendationSO(
+ KruizeObject kruizeObject,
+ boolean getLatest,
+ boolean checkForTimestamp,
+ String monitoringEndTimestamp) {
+ ListRecommendationsAPIObject listRecommendationsAPIObject = new ListRecommendationsAPIObject();
+ try {
+ listRecommendationsAPIObject.setApiVersion(kruizeObject.getApiVersion());
+ listRecommendationsAPIObject.setExperimentName(kruizeObject.getExperimentName());
+ listRecommendationsAPIObject.setClusterName(kruizeObject.getClusterName());
+ List kubernetesAPIObjects = new ArrayList<>();
+ KubernetesAPIObject kubernetesAPIObject;
+ for (K8sObject k8sObject : kruizeObject.getKubernetes_objects()) {
+ kubernetesAPIObject = new KubernetesAPIObject(k8sObject.getName(), k8sObject.getType(), k8sObject.getNamespace());
+ HashMap containerDataMap = new HashMap<>();
+ List containerAPIObjects = new ArrayList<>();
+ for (ContainerData containerData : k8sObject.getContainerDataMap().values()) {
+ ContainerAPIObject containerAPIObject;
+ // if a Time stamp is passed it holds the priority than latest
+ if (checkForTimestamp) {
+ // This step causes a performance degradation, need to be replaced with a better flow of creating SO's
+ ContainerData clonedContainerData = Utils.getClone(containerData, ContainerData.class);
+ if (null != clonedContainerData) {
+ HashMap recommendations = clonedContainerData.getContainerRecommendations().getData();
+ Date medDate = Utils.DateUtils.getDateFrom(KruizeConstants.DateFormats.STANDARD_JSON_DATE_FORMAT, monitoringEndTimestamp);
+ Timestamp givenTimestamp = new Timestamp(medDate.getTime());
+ if (recommendations.containsKey(givenTimestamp)) {
+ List tempList = new ArrayList<>();
+ for (Timestamp timestamp : recommendations.keySet()) {
+ if (!timestamp.equals(givenTimestamp))
+ tempList.add(timestamp);
+ }
+ for (Timestamp timestamp : tempList) {
+ recommendations.remove(timestamp);
+ }
+ clonedContainerData.getContainerRecommendations().setData(recommendations);
+ containerAPIObject = new ContainerAPIObject(clonedContainerData.getContainer_name(),
+ clonedContainerData.getContainer_image_name(),
+ clonedContainerData.getContainerRecommendations(),
+ new ArrayList<>(clonedContainerData.getMetrics().values()));
+ containerAPIObjects.add(containerAPIObject);
+ }
+ }
+ } else if (getLatest) {
+ // This step causes a performance degradation, need to be replaced with a better flow of creating SO's
+ containerData = getLatestRecommendations(containerData);
+ containerAPIObject = new ContainerAPIObject(containerData.getContainer_name(),
+ containerData.getContainer_image_name(),
+ containerData.getContainerRecommendations(),
+ new ArrayList<>(containerData.getMetrics().values()));
+ containerAPIObjects.add(containerAPIObject);
+ } else {
+ containerAPIObject = new ContainerAPIObject(containerData.getContainer_name(),
+ containerData.getContainer_image_name(),
+ containerData.getContainerRecommendations(),
+ new ArrayList<>(containerData.getMetrics().values()));
+ containerAPIObjects.add(containerAPIObject);
+ containerDataMap.put(containerData.getContainer_name(), containerData);
+ }
+ }
+ kubernetesAPIObject.setContainerAPIObjects(containerAPIObjects);
+ kubernetesAPIObjects.add(kubernetesAPIObject);
+ }
+ listRecommendationsAPIObject.setKubernetesObjects(kubernetesAPIObjects);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ return listRecommendationsAPIObject;
+ }
+
/**
- *
* @param containerData
* @return
*/
public static ContainerData getLatestRecommendations(ContainerData containerData) {
- ContainerData clonedContainerData = Utils.getClone(containerData, ContainerData.class);
- if (null != clonedContainerData) {
- HashMap recommendations = clonedContainerData.getContainerRecommendations().getData();
- Timestamp latestTimestamp = null;
- List tempList = new ArrayList<>();
- for (Timestamp timestamp : recommendations.keySet()) {
- if (null == latestTimestamp) {
- latestTimestamp = timestamp;
- } else {
- if (timestamp.after(latestTimestamp)) {
- tempList.add(latestTimestamp);
- latestTimestamp = timestamp;
- } else {
- tempList.add(timestamp);
- }
- }
- }
- for (Timestamp timestamp : tempList) {
- recommendations.remove(timestamp);
- }
- clonedContainerData.getContainerRecommendations().setData(recommendations);
- }
- return clonedContainerData;
- }
+ ContainerData clonedContainerData = Utils.getClone(containerData, ContainerData.class);
+ if (null != clonedContainerData) {
+ HashMap recommendations = clonedContainerData.getContainerRecommendations().getData();
+ Timestamp latestTimestamp = null;
+ List tempList = new ArrayList<>();
+ for (Timestamp timestamp : recommendations.keySet()) {
+ if (null == latestTimestamp) {
+ latestTimestamp = timestamp;
+ } else {
+ if (timestamp.after(latestTimestamp)) {
+ tempList.add(latestTimestamp);
+ latestTimestamp = timestamp;
+ } else {
+ tempList.add(timestamp);
+ }
+ }
+ }
+ for (Timestamp timestamp : tempList) {
+ recommendations.remove(timestamp);
+ }
+ clonedContainerData.getContainerRecommendations().setData(recommendations);
+ }
+ return clonedContainerData;
+ }
+
/**
- *
* @param containerData
*/
public static void getLatestResults(ContainerData containerData) {
@@ -317,6 +318,7 @@ public static ExperimentResultData convertUpdateResultsAPIObjToExperimentResultD
experimentResultData.setIntervalStartTime(updateResultsAPIObject.getStartTimestamp());
experimentResultData.setIntervalEndTime(updateResultsAPIObject.getEndTimestamp());
experimentResultData.setExperiment_name(updateResultsAPIObject.getExperimentName());
+ experimentResultData.setCluster_name(updateResultsAPIObject.getKruizeObject().getClusterName());
List kubernetesAPIObjectList = updateResultsAPIObject.getKubernetesObjects();
List k8sObjectList = new ArrayList<>();
for (KubernetesAPIObject kubernetesAPIObject : kubernetesAPIObjectList) {
diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/UpdateResultsAPIObject.java b/src/main/java/com/autotune/analyzer/serviceObjects/UpdateResultsAPIObject.java
index 43bc57867..9a9c728ec 100644
--- a/src/main/java/com/autotune/analyzer/serviceObjects/UpdateResultsAPIObject.java
+++ b/src/main/java/com/autotune/analyzer/serviceObjects/UpdateResultsAPIObject.java
@@ -16,6 +16,7 @@
package com.autotune.analyzer.serviceObjects;
import com.autotune.analyzer.exceptions.KruizeResponse;
+import com.autotune.analyzer.kruizeObject.KruizeObject;
import com.autotune.analyzer.serviceObjects.verification.annotators.CompareDate;
import com.autotune.analyzer.serviceObjects.verification.annotators.KubernetesElementsCheck;
import com.autotune.analyzer.serviceObjects.verification.annotators.PerformanceProfileCheck;
@@ -52,6 +53,8 @@ public class UpdateResultsAPIObject extends BaseSO {
private List errors;
+ private KruizeObject kruizeObject;
+
public Timestamp getStartTimestamp() {
return startTimestamp;
}
@@ -93,12 +96,18 @@ public String toString() {
'}';
}
- public interface EvaluateRemainingConstraints {
+ public KruizeObject getKruizeObject() {
+ return kruizeObject;
}
- @GroupSequence({UpdateResultsAPIObject.class, InitialValidation.class, ExperimentNameExistValidation.class, EvaluateRemainingConstraints.class})
- public interface FullValidationSequence {
+ public void setKruizeObject(KruizeObject kruizeObject) {
+ this.kruizeObject = kruizeObject;
}
+ public interface EvaluateRemainingConstraints {
+ }
+ @GroupSequence({UpdateResultsAPIObject.class, InitialValidation.class, EvaluateRemainingConstraints.class})
+ public interface FullValidationSequence {
+ }
}
diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/verification/annotators/ExperimentNameExist.java b/src/main/java/com/autotune/analyzer/serviceObjects/verification/annotators/ExperimentNameExist.java
deleted file mode 100644
index 8d0724ecb..000000000
--- a/src/main/java/com/autotune/analyzer/serviceObjects/verification/annotators/ExperimentNameExist.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*******************************************************************************
- * Copyright (c) 2023 Red Hat, IBM Corporation and others.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *******************************************************************************/
-package com.autotune.analyzer.serviceObjects.verification.annotators;
-
-import com.autotune.analyzer.serviceObjects.verification.validators.ExperimentNameExistValidator;
-import jakarta.validation.Constraint;
-import jakarta.validation.Payload;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-@Target({ElementType.FIELD})
-@Retention(RetentionPolicy.RUNTIME)
-@Constraint(validatedBy = ExperimentNameExistValidator.class)
-public @interface ExperimentNameExist {
- String message() default "Data does not match DB records";
-
- Class>[] groups() default {};
-
- Class extends Payload>[] payload() default {};
-
-
-}
diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/CompareDateValidator.java b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/CompareDateValidator.java
index caef8701e..6565113e6 100644
--- a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/CompareDateValidator.java
+++ b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/CompareDateValidator.java
@@ -33,11 +33,15 @@ public void initialize(CompareDate constraintAnnotation) {
@Override
public boolean isValid(UpdateResultsAPIObject updateResultsAPIObject, ConstraintValidatorContext context) {
boolean success = false;
- if (null != updateResultsAPIObject.getStartTimestamp() && null != updateResultsAPIObject.getEndTimestamp()) {
- int comparisonResult = updateResultsAPIObject.getStartTimestamp().compareTo(updateResultsAPIObject.getEndTimestamp());
- if (comparisonResult < 0) {
- success = true;
+ try {
+ if (null != updateResultsAPIObject.getStartTimestamp() && null != updateResultsAPIObject.getEndTimestamp()) {
+ int comparisonResult = updateResultsAPIObject.getStartTimestamp().compareTo(updateResultsAPIObject.getEndTimestamp());
+ if (comparisonResult < 0) {
+ success = true;
+ }
}
+ } catch (Exception e) {
+ LOGGER.error(e.getMessage());
}
return success;
}
diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/ExperimentNameExistValidator.java b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/ExperimentNameExistValidator.java
deleted file mode 100644
index 2d88053d8..000000000
--- a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/ExperimentNameExistValidator.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*******************************************************************************
- * Copyright (c) 2023 Red Hat, IBM Corporation and others.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *******************************************************************************/
-package com.autotune.analyzer.serviceObjects.verification.validators;
-
-import com.autotune.analyzer.serviceObjects.verification.annotators.ExperimentNameExist;
-import com.autotune.analyzer.services.UpdateResults;
-import com.autotune.database.service.ExperimentDBService;
-import jakarta.validation.ConstraintValidator;
-import jakarta.validation.ConstraintValidatorContext;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ExperimentNameExistValidator implements ConstraintValidator {
- private static final Logger LOGGER = LoggerFactory.getLogger(ExperimentNameExistValidator.class);
-
- // You can inject your database access/repository here to fetch the data
-
- @Override
- public boolean isValid(String experimentName, ConstraintValidatorContext context) {
- boolean success = false;
- String errorMessage = "";
- if (!UpdateResults.mainKruizeExperimentMAP.containsKey(experimentName)) {
- // Retrieve the data from the database
- try {
- new ExperimentDBService().loadExperimentFromDBByName(UpdateResults.mainKruizeExperimentMAP, experimentName);
- } catch (Exception e) {
- LOGGER.error("Loading saved experiment {} failed: {} ", experimentName, e.getMessage());
- errorMessage = String.format("failed to load from DB due to %s", e.getMessage());
- }
- }
-
- if (UpdateResults.mainKruizeExperimentMAP.containsKey(experimentName)) {
- success = true;
- } else {
- context.disableDefaultConstraintViolation();
- context.buildConstraintViolationWithTemplate(String.format("%s not found %s", experimentName, errorMessage))
- .addPropertyNode("")
- .addConstraintViolation();
- }
- return success;
- }
-}
-
diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java
index 486df2245..4b577ef42 100644
--- a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java
+++ b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/KubernetesElementsValidator.java
@@ -26,6 +26,7 @@
import jakarta.validation.ConstraintValidatorContext;
public class KubernetesElementsValidator implements ConstraintValidator {
+
@Override
public void initialize(KubernetesElementsCheck constraintAnnotation) {
ConstraintValidator.super.initialize(constraintAnnotation);
@@ -34,8 +35,9 @@ public void initialize(KubernetesElementsCheck constraintAnnotation) {
@Override
public boolean isValid(UpdateResultsAPIObject updateResultsAPIObject, ConstraintValidatorContext context) {
boolean success = false;
+ String errorMessage = "";
try {
- KruizeObject kruizeObject = UpdateResults.mainKruizeExperimentMAP.get(updateResultsAPIObject.getExperimentName());
+ KruizeObject kruizeObject = updateResultsAPIObject.getKruizeObject();
PerformanceProfile performanceProfile = UpdateResults.performanceProfilesMap.get(kruizeObject.getPerformanceProfile());
ExperimentResultData resultData = Converters.KruizeObjectConverters.convertUpdateResultsAPIObjToExperimentResultData(updateResultsAPIObject);
String expName = kruizeObject.getExperimentName();
@@ -95,6 +97,7 @@ public boolean isValid(UpdateResultsAPIObject updateResultsAPIObject, Constraint
} else {
success = true;
}
+
} catch (Exception e) {
context.disableDefaultConstraintViolation();
context.buildConstraintViolationWithTemplate(e.getMessage())
diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/PerformanceProfileValidator.java b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/PerformanceProfileValidator.java
index b8f7c0f34..a34de275b 100644
--- a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/PerformanceProfileValidator.java
+++ b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/PerformanceProfileValidator.java
@@ -31,6 +31,8 @@
import java.util.concurrent.ConcurrentHashMap;
+import static com.autotune.analyzer.utils.AnalyzerErrorConstants.AutotuneObjectErrors.MISSING_PERF_PROFILE;
+
public class PerformanceProfileValidator implements ConstraintValidator {
private static final Logger LOGGER = LoggerFactory.getLogger(PerformanceProfileValidator.class);
@@ -46,13 +48,19 @@ public boolean isValid(UpdateResultsAPIObject updateResultsAPIObject, Constraint
and then validate the Performance Profile data
*/
try {
- KruizeObject kruizeObject = UpdateResults.mainKruizeExperimentMAP.get(updateResultsAPIObject.getExperimentName());
+ KruizeObject kruizeObject = updateResultsAPIObject.getKruizeObject();
if (UpdateResults.performanceProfilesMap.isEmpty() || !UpdateResults.performanceProfilesMap.containsKey(kruizeObject.getPerformanceProfile())) {
ConcurrentHashMap tempPerformanceProfilesMap = new ConcurrentHashMap<>();
new ExperimentDBService().loadAllPerformanceProfiles(tempPerformanceProfilesMap);
UpdateResults.performanceProfilesMap.putAll(tempPerformanceProfilesMap);
}
- PerformanceProfile performanceProfile = UpdateResults.performanceProfilesMap.get(kruizeObject.getPerformanceProfile());
+ PerformanceProfile performanceProfile = null;
+ if (UpdateResults.performanceProfilesMap.containsKey(kruizeObject.getPerformanceProfile())) {
+ performanceProfile = UpdateResults.performanceProfilesMap.get(kruizeObject.getPerformanceProfile());
+ } else {
+ throw new Exception(String.format("%s%s", MISSING_PERF_PROFILE, kruizeObject.getPerformanceProfile()));
+ }
+
ExperimentResultData resultData = Converters.KruizeObjectConverters.convertUpdateResultsAPIObjToExperimentResultData(updateResultsAPIObject);
// validate the 'resultdata' with the performance profile
String errorMsg = PerformanceProfileUtil.validateResults(performanceProfile, resultData);
@@ -64,10 +72,11 @@ public boolean isValid(UpdateResultsAPIObject updateResultsAPIObject, Constraint
.addPropertyNode("Performance profile")
.addConstraintViolation();
}
+
} catch (Exception e) {
context.disableDefaultConstraintViolation();
context.buildConstraintViolationWithTemplate(e.getMessage())
- .addPropertyNode("Performance profile")
+ .addPropertyNode("")
.addConstraintViolation();
}
return success;
diff --git a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/TimeDifferenceValidator.java b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/TimeDifferenceValidator.java
index 37c7603d2..31e98a841 100644
--- a/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/TimeDifferenceValidator.java
+++ b/src/main/java/com/autotune/analyzer/serviceObjects/verification/validators/TimeDifferenceValidator.java
@@ -38,19 +38,26 @@ public void initialize(TimeDifferenceCheck constraintAnnotation) {
@Override
public boolean isValid(UpdateResultsAPIObject updateResultsAPIObject, ConstraintValidatorContext context) {
boolean success = false;
-
- KruizeObject kruizeObject = UpdateResults.mainKruizeExperimentMAP.get(updateResultsAPIObject.getExperimentName());
-
- IntervalResults intervalResults = new IntervalResults(updateResultsAPIObject.getStartTimestamp(), updateResultsAPIObject.getEndTimestamp());
- Double durationInSeconds = intervalResults.getDuration_in_seconds();
- String measurementDurationInMins = kruizeObject.getTrial_settings().getMeasurement_durationMinutes();
- Double parsedMeasurementDuration = Double.parseDouble(measurementDurationInMins.substring(0, measurementDurationInMins.length() - 3));
- // Calculate the lower and upper bounds for the acceptable range i.e. +-5 seconds
- double lowerRange = Math.abs((parsedMeasurementDuration * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE) - (KruizeConstants.TimeConv.MEASUREMENT_DURATION_THRESHOLD_SECONDS));
- double upperRange = (parsedMeasurementDuration * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE) + (KruizeConstants.TimeConv.MEASUREMENT_DURATION_THRESHOLD_SECONDS);
- if ((durationInSeconds >= lowerRange && durationInSeconds <= upperRange))
- success = true;
+ try {
+ KruizeObject kruizeObject = updateResultsAPIObject.getKruizeObject();
+ IntervalResults intervalResults = new IntervalResults(updateResultsAPIObject.getStartTimestamp(), updateResultsAPIObject.getEndTimestamp());
+ Double durationInSeconds = intervalResults.getDuration_in_seconds();
+ String measurementDurationInMins = kruizeObject.getTrial_settings().getMeasurement_durationMinutes();
+ Double parsedMeasurementDuration = Double.parseDouble(measurementDurationInMins.substring(0, measurementDurationInMins.length() - 3));
+ // Calculate the lower and upper bounds for the acceptable range i.e. +-5 seconds
+ double lowerRange = Math.abs((parsedMeasurementDuration * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE) - (KruizeConstants.TimeConv.MEASUREMENT_DURATION_THRESHOLD_SECONDS));
+ double upperRange = (parsedMeasurementDuration * KruizeConstants.TimeConv.NO_OF_SECONDS_PER_MINUTE) + (KruizeConstants.TimeConv.MEASUREMENT_DURATION_THRESHOLD_SECONDS);
+ if ((durationInSeconds >= lowerRange && durationInSeconds <= upperRange))
+ success = true;
+ } catch (Exception e) {
+ LOGGER.error(e.getMessage());
+ e.printStackTrace();
+ context.disableDefaultConstraintViolation();
+ context.buildConstraintViolationWithTemplate(e.getMessage())
+ .addPropertyNode("Time : ")
+ .addConstraintViolation();
+ }
return success;
}
-}
\ No newline at end of file
+}
diff --git a/src/main/java/com/autotune/analyzer/services/UpdateRecommendations.java b/src/main/java/com/autotune/analyzer/services/UpdateRecommendations.java
index 9721aae7c..6997367a9 100644
--- a/src/main/java/com/autotune/analyzer/services/UpdateRecommendations.java
+++ b/src/main/java/com/autotune/analyzer/services/UpdateRecommendations.java
@@ -38,7 +38,6 @@
import io.micrometer.core.instrument.Timer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import io.micrometer.core.instrument.Timer;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
@@ -48,12 +47,17 @@
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.sql.Timestamp;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import static com.autotune.analyzer.utils.AnalyzerConstants.ServiceConstants.CHARACTER_ENCODING;
import static com.autotune.analyzer.utils.AnalyzerConstants.ServiceConstants.JSON_CONTENT_TYPE;
+import static com.autotune.analyzer.utils.AnalyzerErrorConstants.AutotuneObjectErrors.MISSING_EXPERIMENT_NAME;
+import static com.autotune.analyzer.utils.AnalyzerErrorConstants.AutotuneObjectErrors.MISSING_INTERVAL_END_TIME;
/**
*
@@ -151,19 +155,18 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)
List experimentResultDataList = new ArrayList<>();
ExperimentResultData experimentResultData = null;
Map mainKruizeExperimentMAP = new ConcurrentHashMap<>();
+ KruizeObject kruizeObject = null;
try {
- String clusterName = null;
- if (mainKruizeExperimentMAP.containsKey(experiment_name)) {
- clusterName = mainKruizeExperimentMAP.get(experiment_name).getClusterName();
- } else {
- new ExperimentDBService().loadExperimentFromDBByName(mainKruizeExperimentMAP, experiment_name);
- request.getServletContext().setAttribute(AnalyzerConstants.EXPERIMENT_MAP, mainKruizeExperimentMAP);
- if (null != mainKruizeExperimentMAP.get(experiment_name)) {
- clusterName = mainKruizeExperimentMAP.get(experiment_name).getClusterName();
- }
+ new ExperimentDBService().loadExperimentFromDBByName(mainKruizeExperimentMAP, experiment_name);
+ if (null != mainKruizeExperimentMAP.get(experiment_name)) {
+ kruizeObject = mainKruizeExperimentMAP.get(experiment_name);
+ }
+ if (null != kruizeObject)
+ experimentResultDataList = new ExperimentDBService().getExperimentResultData(experiment_name, kruizeObject, interval_start_time, interval_end_time); // Todo this object is not required
+ else {
+ sendErrorResponse(response, null, HttpServletResponse.SC_BAD_REQUEST, String.format("%s%s", MISSING_EXPERIMENT_NAME, experiment_name));
+ return;
}
- if (null != clusterName)
- experimentResultDataList = new ExperimentDBService().getExperimentResultData(experiment_name, clusterName, interval_start_time, interval_end_time); // Todo this object is not required
} catch (Exception e) {
sendErrorResponse(response, e, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage());
return;
@@ -172,7 +175,6 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)
if (experimentResultDataList.size() > 0) {
//generate recommendation
try {
- KruizeObject kruizeObject = mainKruizeExperimentMAP.get(experiment_name);
new ExperimentInitiator().generateAndAddRecommendations(kruizeObject, experimentResultDataList, interval_start_time, interval_end_time); // TODO: experimentResultDataList not required
ValidationOutputData validationOutputData = new ExperimentDBService().addRecommendationToDB(mainKruizeExperimentMAP, experimentResultDataList);
if (validationOutputData.isSuccess()) {
@@ -191,10 +193,10 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)
return;
}
} else {
- sendErrorResponse(response, null, HttpServletResponse.SC_BAD_REQUEST, AnalyzerErrorConstants.APIErrors.UpdateRecommendationsAPI.DATA_NOT_FOUND);
+ sendErrorResponse(response, null, HttpServletResponse.SC_BAD_REQUEST, String.format("%s%s", MISSING_INTERVAL_END_TIME, intervalEndTimeStr));
return;
}
- }catch (Exception e){
+ } catch (Exception e) {
LOGGER.error("Exception: " + e.getMessage());
e.printStackTrace();
sendErrorResponse(response, e, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage());
@@ -253,6 +255,7 @@ public boolean shouldSkipClass(Class> clazz) {
public void sendErrorResponse(HttpServletResponse response, Exception e, int httpStatusCode, String errorMsg) throws
IOException {
if (null != e) {
+ e.printStackTrace();
LOGGER.error(e.toString());
if (null == errorMsg) errorMsg = e.getMessage();
}
diff --git a/src/main/java/com/autotune/analyzer/services/UpdateResults.java b/src/main/java/com/autotune/analyzer/services/UpdateResults.java
index 341c83fc4..54b34ec05 100644
--- a/src/main/java/com/autotune/analyzer/services/UpdateResults.java
+++ b/src/main/java/com/autotune/analyzer/services/UpdateResults.java
@@ -18,7 +18,6 @@
import com.autotune.analyzer.exceptions.KruizeResponse;
import com.autotune.analyzer.experiment.ExperimentInitiator;
-import com.autotune.analyzer.kruizeObject.KruizeObject;
import com.autotune.analyzer.performanceProfiles.PerformanceProfile;
import com.autotune.analyzer.serviceObjects.FailedUpdateResultsAPIObject;
import com.autotune.analyzer.serviceObjects.UpdateResultsAPIObject;
@@ -43,7 +42,6 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
@@ -58,25 +56,17 @@ public class UpdateResults extends HttpServlet {
private static final long serialVersionUID = 1L;
private static final Logger LOGGER = LoggerFactory.getLogger(UpdateResults.class);
public static ConcurrentHashMap performanceProfilesMap = new ConcurrentHashMap<>();
- public static ConcurrentHashMap mainKruizeExperimentMAP;
@Override
public void init(ServletConfig config) throws ServletException {
super.init(config);
- mainKruizeExperimentMAP = (ConcurrentHashMap) config.getServletContext().getAttribute(AnalyzerConstants.EXPERIMENT_MAP);
- if (mainKruizeExperimentMAP == null)
- mainKruizeExperimentMAP = new ConcurrentHashMap<>();
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String statusValue = "failure";
Timer.Sample timerUpdateResults = Timer.start(MetricsConfig.meterRegistry());
- Map mKruizeExperimentMap = new ConcurrentHashMap();
try {
- int initialSize = mainKruizeExperimentMAP.size();
- performanceProfilesMap = (ConcurrentHashMap) getServletContext()
- .getAttribute(AnalyzerConstants.PerformanceProfileConstants.PERF_PROFILE_MAP);
String inputData = request.getReader().lines().collect(Collectors.joining());
List experimentResultDataList = new ArrayList<>();
List updateResultsAPIObjects = Arrays.asList(new Gson().fromJson(inputData, UpdateResultsAPIObject[].class));
@@ -89,8 +79,6 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)
ExperimentInitiator experimentInitiator = new ExperimentInitiator();
experimentInitiator.validateAndAddExperimentResults(updateResultsAPIObjects);
List failureAPIObjs = experimentInitiator.getFailedUpdateResultsAPIObjects();
- if (initialSize != mainKruizeExperimentMAP.size())
- request.getServletContext().setAttribute(AnalyzerConstants.EXPERIMENT_MAP, mainKruizeExperimentMAP);
List jsonObjectList = new ArrayList<>();
if (failureAPIObjs.size() > 0) {
failureAPIObjs.forEach(
diff --git a/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java b/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java
index 7a521d056..6f3fa013e 100644
--- a/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java
+++ b/src/main/java/com/autotune/analyzer/utils/AnalyzerErrorConstants.java
@@ -75,7 +75,9 @@ public static final class AutotuneObjectErrors {
public static final String MISSING_SLO_DATA = "No Performance Profile or SLO data is Present!";
public static final String SLO_REDUNDANCY_ERROR = "SLO Data and Performance Profile cannot exist simultaneously!";
public static final String DUPLICATE_PERF_PROFILE = "Performance Profile already exists: ";
- public static final String MISSING_PERF_PROFILE = "Performance Profile doesn't exist : ";
+ public static final String MISSING_PERF_PROFILE = "Not Found: performance_profile does not exist: ";
+ public static final String MISSING_EXPERIMENT_NAME = "Not Found: experiment_name does not exist: ";
+ public static final String MISSING_INTERVAL_END_TIME = "Not Found: interval_end_time does not exist: ";
public static final String UNSUPPORTED_EXPERIMENT = String.format("At present, the system does not support bulk entries!");
public static final String UNSUPPORTED_EXPERIMENT_RESULTS = String.format("At present, the system does not support bulk entries exceeding %s in quantity!", KruizeDeploymentInfo.bulk_update_results_limit);
public static final String UNSUPPORTED_BULK_KUBERNETES = "Bulk Kubernetes objects are currently unsupported!";
diff --git a/src/main/java/com/autotune/common/annotations/json/Exclude.java b/src/main/java/com/autotune/common/annotations/json/Exclude.java
index f78b39f1a..30fe81098 100644
--- a/src/main/java/com/autotune/common/annotations/json/Exclude.java
+++ b/src/main/java/com/autotune/common/annotations/json/Exclude.java
@@ -24,3 +24,4 @@
@Target(ElementType.FIELD)
public @interface Exclude {
}
+
diff --git a/src/main/java/com/autotune/common/data/result/ExperimentResultData.java b/src/main/java/com/autotune/common/data/result/ExperimentResultData.java
index 44905b074..94d7950bf 100644
--- a/src/main/java/com/autotune/common/data/result/ExperimentResultData.java
+++ b/src/main/java/com/autotune/common/data/result/ExperimentResultData.java
@@ -41,6 +41,8 @@ public class ExperimentResultData {
private ValidationOutputData validationOutputData;
private List kubernetes_objects;
+ private String cluster_name;
+
public String getExperiment_name() {
return experiment_name;
@@ -141,4 +143,12 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(experiment_name, intervalEndTime);
}
+
+ public String getCluster_name() {
+ return cluster_name;
+ }
+
+ public void setCluster_name(String cluster_name) {
+ this.cluster_name = cluster_name;
+ }
}
diff --git a/src/main/java/com/autotune/database/dao/ExperimentDAO.java b/src/main/java/com/autotune/database/dao/ExperimentDAO.java
index 9c60d84ce..1978ae7c4 100644
--- a/src/main/java/com/autotune/database/dao/ExperimentDAO.java
+++ b/src/main/java/com/autotune/database/dao/ExperimentDAO.java
@@ -50,7 +50,7 @@ public interface ExperimentDAO {
// Load all results for a particular experimentName
- List loadResultsByExperimentName(String experimentName, String cluster_name, Timestamp interval_start_time, Integer limitRows) throws Exception;
+ List loadResultsByExperimentName(String experimentName, String cluster_name, Timestamp interval_start_time, Timestamp interval_end_time) throws Exception;
// Load all recommendations of a particular experiment
List loadRecommendationsByExperimentName(String experimentName) throws Exception;
diff --git a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java
index 369b2030b..e4ee3bc80 100644
--- a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java
+++ b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java
@@ -26,12 +26,15 @@
import java.sql.Timestamp;
import java.time.LocalDateTime;
import java.time.YearMonth;
-import java.time.format.DateTimeFormatter;
+import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.IntStream;
+import static com.autotune.database.helper.DBConstants.DB_MESSAGES.DUPLICATE_KEY;
+import static com.autotune.database.helper.DBConstants.DB_MESSAGES.DUPLICATE_KEY_ALT;
import static com.autotune.database.helper.DBConstants.SQLQUERY.*;
+import static com.autotune.utils.KruizeConstants.JSONKeys.CLUSTER_NAME;
public class ExperimentDAOImpl implements ExperimentDAO {
private static final long serialVersionUID = 1L;
@@ -77,7 +80,7 @@ public void addPartitions(String tableName, String month, String year, int dayOf
Transaction tx;
try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) {
tx = session.beginTransaction();
- // Create a YearMonth object and get the current month and current year
+ // Create a YearMonth object
YearMonth yearMonth = YearMonth.of(Integer.parseInt(year), Integer.parseInt(month));
// check the partition type and create corresponding query
@@ -90,11 +93,11 @@ public void addPartitions(String tableName, String month, String year, int dayOf
session.createNativeQuery(daterange).executeUpdate();
});
} else if (partitionType.equalsIgnoreCase(DBConstants.PARTITION_TYPES.BY_15_DAYS)) {
- IntStream.range(1, 16).forEach(i -> {
- String daterange = String.format(DB_PARTITION_DATERANGE, tableName, year, month, String.format("%02d", i), tableName,
- year, month, String.format("%02d", i), year, month, String.format("%02d", i));
- session.createNativeQuery(daterange).executeUpdate();
- });
+ IntStream.range(1, 16).forEach(i -> {
+ String daterange = String.format(DB_PARTITION_DATERANGE, tableName, year, month, String.format("%02d", i), tableName,
+ year, month, String.format("%02d", i), year, month, String.format("%02d", i));
+ session.createNativeQuery(daterange).executeUpdate();
+ });
} else if (partitionType.equalsIgnoreCase(DBConstants.PARTITION_TYPES.BY_DAY)) {
String daterange = String.format(DB_PARTITION_DATERANGE, tableName, year, month, String.format("%02d", 1), tableName,
year, month, String.format("%02d", 1), year, month, String.format("%02d", 1));
@@ -166,10 +169,16 @@ public List addToDBAndFetchFailedResults(List addToDBAndFetchFailedResults(List addToDBAndFetchFailedResults(List addToDBAndFetchFailedResults(List loadExperimentByName(String experimentName) t
}
@Override
- public List loadResultsByExperimentName(String experimentName, String cluster_name, Timestamp interval_end_time, Integer limitRows) throws Exception {
+ public List loadResultsByExperimentName(String experimentName, String cluster_name, Timestamp calculated_start_time, Timestamp interval_end_time) throws Exception {
// TODO: load only experimentStatus=inProgress , playback may not require completed experiments
List kruizeResultsEntries = null;
String statusValue = "failure";
+ String clusterCondtionSql = "";
+ if (cluster_name != null)
+ clusterCondtionSql = String.format(" and k.%s = :%s ", KruizeConstants.JSONKeys.CLUSTER_NAME, KruizeConstants.JSONKeys.CLUSTER_NAME);
+ else
+ clusterCondtionSql = String.format(" and k.%s is null ", KruizeConstants.JSONKeys.CLUSTER_NAME);
Timer.Sample timerLoadResultsExpName = Timer.start(MetricsConfig.meterRegistry());
+ LOGGER.debug("startTime : {} , endTime : {}", calculated_start_time, interval_end_time);
try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) {
- if (null != limitRows && null != interval_end_time) {
- kruizeResultsEntries = session.createQuery(DBConstants.SQLQUERY.SELECT_FROM_RESULTS_BY_EXP_NAME_AND_DATE_RANGE_AND_LIMIT, KruizeResultsEntry.class)
- .setParameter(KruizeConstants.JSONKeys.CLUSTER_NAME, cluster_name)
+ if (null != calculated_start_time && null != interval_end_time) {
+ Query kruizeResultsEntryQuery = session.createQuery(DBConstants.SQLQUERY.SELECT_FROM_RESULTS_BY_EXP_NAME_AND_DATE_RANGE_AND_LIMIT + clusterCondtionSql, KruizeResultsEntry.class)
.setParameter(KruizeConstants.JSONKeys.EXPERIMENT_NAME, experimentName)
- .setParameter(KruizeConstants.JSONKeys.INTERVAL_END_TIME, interval_end_time)
- .setMaxResults(limitRows)
- .list();
+ .setParameter(KruizeConstants.JSONKeys.CALCULATED_START_TIME, calculated_start_time)
+ .setParameter(KruizeConstants.JSONKeys.INTERVAL_END_TIME, interval_end_time);
+ if (cluster_name != null)
+ kruizeResultsEntryQuery.setParameter(CLUSTER_NAME, cluster_name);
+ kruizeResultsEntries = kruizeResultsEntryQuery.list();
statusValue = "success";
} else {
kruizeResultsEntries = session.createQuery(DBConstants.SQLQUERY.SELECT_FROM_RESULTS_BY_EXP_NAME, KruizeResultsEntry.class)
@@ -524,16 +565,23 @@ public List loadRecommendationsByExperimentName(Strin
public KruizeRecommendationEntry loadRecommendationsByExperimentNameAndDate(String experimentName, String cluster_name, Timestamp interval_end_time) throws Exception {
KruizeRecommendationEntry recommendationEntries = null;
String statusValue = "failure";
+ String clusterCondtionSql = "";
+ if (cluster_name != null)
+ clusterCondtionSql = String.format(" and k.%s = :%s ", KruizeConstants.JSONKeys.CLUSTER_NAME, KruizeConstants.JSONKeys.CLUSTER_NAME);
+ else
+ clusterCondtionSql = String.format(" and k.%s is null ", KruizeConstants.JSONKeys.CLUSTER_NAME);
+
Timer.Sample timerLoadRecExpNameDate = Timer.start(MetricsConfig.meterRegistry());
try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) {
- recommendationEntries = session.createQuery(SELECT_FROM_RECOMMENDATIONS_BY_EXP_NAME_AND_END_TIME, KruizeRecommendationEntry.class)
- .setParameter(KruizeConstants.JSONKeys.CLUSTER_NAME, cluster_name)
+ Query kruizeRecommendationEntryQuery = session.createQuery(SELECT_FROM_RECOMMENDATIONS_BY_EXP_NAME_AND_END_TIME + clusterCondtionSql, KruizeRecommendationEntry.class)
.setParameter(KruizeConstants.JSONKeys.EXPERIMENT_NAME, experimentName)
- .setParameter(KruizeConstants.JSONKeys.INTERVAL_END_TIME, interval_end_time)
- .getSingleResult();
+ .setParameter(KruizeConstants.JSONKeys.INTERVAL_END_TIME, interval_end_time);
+ if (cluster_name != null)
+ kruizeRecommendationEntryQuery.setParameter(CLUSTER_NAME, cluster_name);
+ recommendationEntries = kruizeRecommendationEntryQuery.getSingleResult();
statusValue = "success";
} catch (NoResultException e) {
- LOGGER.debug("Generating mew recommendation for Experiment name : %s interval_end_time: %S", experimentName, interval_end_time);
+ LOGGER.debug("Generating new recommendation for Experiment name : %s interval_end_time: %S", experimentName, interval_end_time);
} catch (Exception e) {
LOGGER.error("Not able to load recommendations due to {}", e.getMessage());
recommendationEntries = null;
@@ -571,27 +619,29 @@ public List loadPerformanceProfileByName(String p
@Override
public List getKruizeResultsEntry(String experiment_name, String cluster_name, Timestamp interval_start_time, Timestamp interval_end_time) throws Exception {
List kruizeResultsEntryList = new ArrayList<>();
+ String clusterCondtionSql = "";
+ if (cluster_name != null)
+ clusterCondtionSql = String.format(" and k.%s = :%s ", KruizeConstants.JSONKeys.CLUSTER_NAME, KruizeConstants.JSONKeys.CLUSTER_NAME);
+ else
+ clusterCondtionSql = String.format(" and k.%s is null ", KruizeConstants.JSONKeys.CLUSTER_NAME);
try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) {
-
+ Query kruizeResultsEntryQuery = null;
if (interval_start_time != null && interval_end_time != null) {
- kruizeResultsEntryList = session.createQuery(SELECT_FROM_RESULTS_BY_EXP_NAME_AND_START_END_TIME, KruizeResultsEntry.class)
- .setParameter(KruizeConstants.JSONKeys.CLUSTER_NAME, cluster_name)
+ kruizeResultsEntryQuery = session.createQuery(SELECT_FROM_RESULTS_BY_EXP_NAME_AND_START_END_TIME + clusterCondtionSql, KruizeResultsEntry.class)
.setParameter(KruizeConstants.JSONKeys.EXPERIMENT_NAME, experiment_name)
.setParameter(KruizeConstants.JSONKeys.INTERVAL_START_TIME, interval_start_time)
- .setParameter(KruizeConstants.JSONKeys.INTERVAL_END_TIME, interval_end_time)
- .getResultList();
+ .setParameter(KruizeConstants.JSONKeys.INTERVAL_END_TIME, interval_end_time);
} else if (interval_end_time != null) {
- kruizeResultsEntryList = session.createQuery(SELECT_FROM_RESULTS_BY_EXP_NAME_AND_END_TIME, KruizeResultsEntry.class)
- .setParameter(KruizeConstants.JSONKeys.CLUSTER_NAME, cluster_name)
+ kruizeResultsEntryQuery = session.createQuery(SELECT_FROM_RESULTS_BY_EXP_NAME_AND_END_TIME + clusterCondtionSql, KruizeResultsEntry.class)
.setParameter(KruizeConstants.JSONKeys.EXPERIMENT_NAME, experiment_name)
- .setParameter(KruizeConstants.JSONKeys.INTERVAL_END_TIME, interval_end_time)
- .getResultList();
+ .setParameter(KruizeConstants.JSONKeys.INTERVAL_END_TIME, interval_end_time);
} else {
- kruizeResultsEntryList = session.createQuery(SELECT_FROM_RESULTS_BY_EXP_NAME_AND_MAX_END_TIME, KruizeResultsEntry.class)
- .setParameter(KruizeConstants.JSONKeys.CLUSTER_NAME, cluster_name)
- .setParameter(KruizeConstants.JSONKeys.EXPERIMENT_NAME, experiment_name)
- .getResultList();
+ kruizeResultsEntryQuery = session.createQuery(SELECT_FROM_RESULTS_BY_EXP_NAME_AND_MAX_END_TIME + clusterCondtionSql, KruizeResultsEntry.class)
+ .setParameter(KruizeConstants.JSONKeys.EXPERIMENT_NAME, experiment_name);
}
+ if (cluster_name != null)
+ kruizeResultsEntryQuery.setParameter(CLUSTER_NAME, cluster_name);
+ kruizeResultsEntryList = kruizeResultsEntryQuery.getResultList();
} catch (NoResultException e) {
LOGGER.error(DBConstants.DB_MESSAGES.DATA_NOT_FOUND_KRUIZE_RESULTS, experiment_name, interval_end_time);
kruizeResultsEntryList = null;
@@ -602,4 +652,15 @@ public List getKruizeResultsEntry(String experiment_name, St
}
return kruizeResultsEntryList;
}
+
+ public YearMonth buildDateForNextMonth(YearMonth yearMonth) {
+ int year = yearMonth.getYear();
+ int month = yearMonth.getMonthValue() + 1; // increment by one as we need to create the partition for the next month
+ if (month > 12) {
+ month = 1;
+ year += 1;
+ }
+ yearMonth = YearMonth.of(year, month);
+ return yearMonth;
+ }
}
diff --git a/src/main/java/com/autotune/database/helper/DBConstants.java b/src/main/java/com/autotune/database/helper/DBConstants.java
index ba5341808..0e9f8cfc5 100644
--- a/src/main/java/com/autotune/database/helper/DBConstants.java
+++ b/src/main/java/com/autotune/database/helper/DBConstants.java
@@ -9,22 +9,34 @@ public static final class SQLQUERY {
public static final String SELECT_FROM_EXPERIMENTS_BY_EXP_NAME = "from KruizeExperimentEntry k WHERE k.experiment_name = :experimentName";
public static final String SELECT_FROM_RESULTS = "from KruizeResultsEntry";
public static final String SELECT_FROM_RESULTS_BY_EXP_NAME = "from KruizeResultsEntry k WHERE k.experiment_name = :experimentName";
- public static final String SELECT_FROM_RESULTS_BY_EXP_NAME_AND_DATE_AND_LIMIT = String.format("from KruizeResultsEntry k " +
- "WHERE k.cluster_name = :%s and k.experiment_name = :%s and" +
- " k.interval_end_time <= :%s ORDER BY k.interval_end_time DESC",
- KruizeConstants.JSONKeys.CLUSTER_NAME, KruizeConstants.JSONKeys.EXPERIMENT_NAME, KruizeConstants.JSONKeys.INTERVAL_END_TIME);
- public static final String SELECT_FROM_RESULTS_BY_EXP_NAME_AND_DATE_RANGE_AND_LIMIT = String.format("from KruizeResultsEntry k " +
- "WHERE k.cluster_name = :%s and k.experiment_name = :%s and k.interval_end_time <= :%s " +
- "ORDER BY k.interval_end_time DESC",
- KruizeConstants.JSONKeys.CLUSTER_NAME, KruizeConstants.JSONKeys.EXPERIMENT_NAME, KruizeConstants.JSONKeys.INTERVAL_END_TIME);
+ public static final String SELECT_FROM_RESULTS_BY_EXP_NAME_AND_DATE_RANGE_AND_LIMIT =
+ String.format("from KruizeResultsEntry k " +
+ "WHERE k.experiment_name = :%s and " +
+ "k.interval_end_time >= :%s and " +
+ "k.interval_end_time <= :%s ",
+ KruizeConstants.JSONKeys.EXPERIMENT_NAME,
+ KruizeConstants.JSONKeys.CALCULATED_START_TIME,
+ KruizeConstants.JSONKeys.INTERVAL_END_TIME);
public static final String SELECT_FROM_RESULTS_BY_EXP_NAME_AND_START_END_TIME = String.format("from KruizeResultsEntry k " +
- "WHERE k.cluster_name = :%s and k.experiment_name = :%s and k.interval_start_time >= :%s and " +
- "k.interval_end_time <= :%s",
- KruizeConstants.JSONKeys.CLUSTER_NAME, KruizeConstants.JSONKeys.EXPERIMENT_NAME, KruizeConstants.JSONKeys.INTERVAL_START_TIME, KruizeConstants.JSONKeys.INTERVAL_END_TIME);
- public static final String SELECT_FROM_RESULTS_BY_EXP_NAME_AND_END_TIME = String.format("from KruizeResultsEntry k WHERE k.cluster_name = :%s and k.experiment_name = :%s and k.interval_end_time = :%s", KruizeConstants.JSONKeys.CLUSTER_NAME, KruizeConstants.JSONKeys.EXPERIMENT_NAME, KruizeConstants.JSONKeys.INTERVAL_END_TIME);
- public static final String SELECT_FROM_RESULTS_BY_EXP_NAME_AND_MAX_END_TIME = String.format("from KruizeResultsEntry k WHERE k.cluster_name = :%s and k.experiment_name = :%s and k.interval_end_time = (SELECT MAX(e.interval_end_time) FROM KruizeResultsEntry e where e.experiment_name = :%s )", KruizeConstants.JSONKeys.CLUSTER_NAME, KruizeConstants.JSONKeys.EXPERIMENT_NAME, KruizeConstants.JSONKeys.EXPERIMENT_NAME);
+ "WHERE k.experiment_name = :%s and k.interval_start_time >= :%s and " +
+ "k.interval_end_time <= :%s ",
+ KruizeConstants.JSONKeys.EXPERIMENT_NAME, KruizeConstants.JSONKeys.INTERVAL_START_TIME, KruizeConstants.JSONKeys.INTERVAL_END_TIME);
+ public static final String SELECT_FROM_RESULTS_BY_EXP_NAME_AND_END_TIME = String.format(
+ "from KruizeResultsEntry k WHERE " +
+ "k.experiment_name = :%s " +
+ "and k.interval_end_time = :%s ",
+ KruizeConstants.JSONKeys.EXPERIMENT_NAME, KruizeConstants.JSONKeys.INTERVAL_END_TIME);
+ public static final String SELECT_FROM_RESULTS_BY_EXP_NAME_AND_MAX_END_TIME = String.format(
+ "from KruizeResultsEntry k WHERE " +
+ "k.experiment_name = :%s and " +
+ "k.interval_end_time = (SELECT MAX(e.interval_end_time) FROM KruizeResultsEntry e where e.experiment_name = :%s ) ",
+ KruizeConstants.JSONKeys.EXPERIMENT_NAME, KruizeConstants.JSONKeys.EXPERIMENT_NAME);
public static final String SELECT_FROM_RECOMMENDATIONS_BY_EXP_NAME = String.format("from KruizeRecommendationEntry k WHERE k.experiment_name = :experimentName");
- public static final String SELECT_FROM_RECOMMENDATIONS_BY_EXP_NAME_AND_END_TIME = String.format("from KruizeRecommendationEntry k WHERE k.cluster_name= :%s and k.experiment_name = :%s and k.interval_end_time= :%s", KruizeConstants.JSONKeys.CLUSTER_NAME, KruizeConstants.JSONKeys.EXPERIMENT_NAME, KruizeConstants.JSONKeys.INTERVAL_END_TIME);
+ public static final String SELECT_FROM_RECOMMENDATIONS_BY_EXP_NAME_AND_END_TIME = String.format(
+ "from KruizeRecommendationEntry k WHERE " +
+ "k.experiment_name = :%s and " +
+ "k.interval_end_time= :%s ",
+ KruizeConstants.JSONKeys.EXPERIMENT_NAME, KruizeConstants.JSONKeys.INTERVAL_END_TIME);
public static final String SELECT_FROM_RECOMMENDATIONS = "from KruizeRecommendationEntry";
public static final String SELECT_FROM_PERFORMANCE_PROFILE = "from KruizePerformanceProfileEntry";
public static final String SELECT_FROM_PERFORMANCE_PROFILE_BY_NAME = "from KruizePerformanceProfileEntry k WHERE k.name = :name";
@@ -46,10 +58,14 @@ public static final class PARTITION_TYPES {
public static final String BY_MONTH = "by_month";
public static final String BY_15_DAYS = "by_fifteen_days";
public static final String BY_DAY = "by_day";
+ public static final int PARTITION_DAY = 25;
+ public static final int LAST_N_DAYS = 15;
}
+
public static final class DB_MESSAGES {
public static final String RECORD_ALREADY_EXISTS = "A record with the name %s already exists within the timestamp range starting from %s and ending on %s.";
public static final String DUPLICATE_KEY = "duplicate key value";
+ public static final String DUPLICATE_KEY_ALT = "A different object with the same identifier value was already associated with the session";
public static final String NO_PARTITION_RELATION = "no partition of relation";
public static final String CREATE_PARTITION_RETRY = "Create partition and retry !";
public static final String INVALID_PARTITION_TYPE = "Invalid Partition Type";
diff --git a/src/main/java/com/autotune/database/helper/DBHelpers.java b/src/main/java/com/autotune/database/helper/DBHelpers.java
index 9f3c43e45..ff52c785a 100644
--- a/src/main/java/com/autotune/database/helper/DBHelpers.java
+++ b/src/main/java/com/autotune/database/helper/DBHelpers.java
@@ -305,7 +305,7 @@ public static KruizeResultsEntry convertExperimentResultToExperimentResultsTable
kruizeResultsEntry = new KruizeResultsEntry();
kruizeResultsEntry.setVersion(experimentResultData.getVersion());
kruizeResultsEntry.setExperiment_name(experimentResultData.getExperiment_name());
- kruizeResultsEntry.setCluster_name(UpdateResults.mainKruizeExperimentMAP.get(experimentResultData.getExperiment_name()).getClusterName());
+ kruizeResultsEntry.setCluster_name(experimentResultData.getCluster_name());
kruizeResultsEntry.setInterval_start_time(experimentResultData.getIntervalStartTime());
kruizeResultsEntry.setInterval_end_time(experimentResultData.getIntervalEndTime());
kruizeResultsEntry.setDuration_minutes(
@@ -324,6 +324,7 @@ public static KruizeResultsEntry convertExperimentResultToExperimentResultsTable
} catch (JsonProcessingException e) {
throw new Exception("Error while creating Extended data due to : " + e.getMessage());
}
+
} catch (Exception e) {
kruizeResultsEntry = null;
LOGGER.error("Error while converting ExperimentResultData to ExperimentResultsTable due to {}", e.getMessage());
diff --git a/src/main/java/com/autotune/database/service/ExperimentDBService.java b/src/main/java/com/autotune/database/service/ExperimentDBService.java
index 7f790e9de..7a2422ac8 100644
--- a/src/main/java/com/autotune/database/service/ExperimentDBService.java
+++ b/src/main/java/com/autotune/database/service/ExperimentDBService.java
@@ -142,16 +142,17 @@ public void loadAllPerformanceProfiles(Map performan
}
}
- public void loadResultsFromDBByName(Map mainKruizeExperimentMap, String experimentName, Timestamp interval_end_time, Integer limitRows) throws Exception {
+ public void loadResultsFromDBByName(Map mainKruizeExperimentMap, String experimentName, Timestamp calculated_start_time, Timestamp interval_end_time) throws Exception {
ExperimentInterface experimentInterface = new ExperimentInterfaceImpl();
KruizeObject kruizeObject = mainKruizeExperimentMap.get(experimentName);
// Load results from the DB and save to local
- List kruizeResultsEntries = experimentDAO.loadResultsByExperimentName(experimentName, kruizeObject.getClusterName(), interval_end_time, limitRows);
+ List kruizeResultsEntries = experimentDAO.loadResultsByExperimentName(experimentName, kruizeObject.getClusterName(), calculated_start_time, interval_end_time);
if (null != kruizeResultsEntries && !kruizeResultsEntries.isEmpty()) {
List updateResultsAPIObjects = DBHelpers.Converters.KruizeObjectConverters.convertResultEntryToUpdateResultsAPIObject(kruizeResultsEntries);
if (null != updateResultsAPIObjects && !updateResultsAPIObjects.isEmpty()) {
List resultDataList = new ArrayList<>();
for (UpdateResultsAPIObject updateResultsAPIObject : updateResultsAPIObjects) {
+ updateResultsAPIObject.setKruizeObject(kruizeObject);
try {
ExperimentResultData experimentResultData = Converters.KruizeObjectConverters.convertUpdateResultsAPIObjToExperimentResultData(updateResultsAPIObject);
if (experimentResultData != null)
@@ -204,11 +205,16 @@ public ValidationOutputData addExperimentToDB(CreateExperimentAPIObject createEx
public List addResultsToDB(List resultDataList) {
List kruizeResultsEntryList = new ArrayList<>();
List failedUpdateResultsAPIObjects = new ArrayList<>();
+ List failedResultsEntries = new ArrayList<>();
for (ExperimentResultData resultData : resultDataList) {
KruizeResultsEntry kruizeResultsEntry = DBHelpers.Converters.KruizeObjectConverters.convertExperimentResultToExperimentResultsTable(resultData);
- kruizeResultsEntryList.add(kruizeResultsEntry);
+ if (null != kruizeResultsEntry.getErrorReasons() && kruizeResultsEntry.getErrorReasons().size() > 0) {
+ failedResultsEntries.add(kruizeResultsEntry);
+ } else {
+ kruizeResultsEntryList.add(kruizeResultsEntry);
+ }
}
- List failedResultsEntries = experimentDAO.addToDBAndFetchFailedResults(kruizeResultsEntryList);
+ failedResultsEntries.addAll(experimentDAO.addToDBAndFetchFailedResults(kruizeResultsEntryList));
failedUpdateResultsAPIObjects = DBHelpers.Converters.KruizeObjectConverters.convertResultEntryToUpdateResultsAPIObject(failedResultsEntries);
return failedUpdateResultsAPIObjects;
}
@@ -342,12 +348,13 @@ public boolean updateExperimentStatus(KruizeObject kruizeObject, AnalyzerConstan
}
- public List getExperimentResultData(String experiment_name, String clusterName, Timestamp interval_start_time, Timestamp interval_end_time) throws Exception {
+ public List getExperimentResultData(String experiment_name, KruizeObject kruizeObject, Timestamp interval_start_time, Timestamp interval_end_time) throws Exception {
List experimentResultDataList = new ArrayList<>();
- List kruizeResultsEntryList = experimentDAO.getKruizeResultsEntry(experiment_name, clusterName, interval_start_time, interval_end_time);
+ List kruizeResultsEntryList = experimentDAO.getKruizeResultsEntry(experiment_name, kruizeObject.getClusterName(), interval_start_time, interval_end_time);
if (null != kruizeResultsEntryList) {
List updateResultsAPIObjects = DBHelpers.Converters.KruizeObjectConverters.convertResultEntryToUpdateResultsAPIObject(kruizeResultsEntryList);
for (UpdateResultsAPIObject updateObject : updateResultsAPIObjects) {
+ updateObject.setKruizeObject(kruizeObject);
experimentResultDataList.add(
Converters.KruizeObjectConverters.convertUpdateResultsAPIObjToExperimentResultData(updateObject)
);
diff --git a/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java b/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java
index 845bd30c1..9dfd076b1 100644
--- a/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java
+++ b/src/main/java/com/autotune/database/table/KruizeRecommendationEntry.java
@@ -13,10 +13,6 @@
name = "idx_recommendation_experiment_name",
columnList = "experiment_name",
unique = false),
- @Index(
- name = "idx_recommendation_cluster_name",
- columnList = "cluster_name",
- unique = false),
@Index(
name = "idx_recommendation_interval_end_time",
columnList = "interval_end_time",
diff --git a/src/main/java/com/autotune/database/table/KruizeResultsEntry.java b/src/main/java/com/autotune/database/table/KruizeResultsEntry.java
index 3c99d61fb..4e2082d1f 100644
--- a/src/main/java/com/autotune/database/table/KruizeResultsEntry.java
+++ b/src/main/java/com/autotune/database/table/KruizeResultsEntry.java
@@ -33,9 +33,6 @@
@Index(
name = "idx_result_experiment_name",
columnList = "experiment_name"),
- @Index(
- name = "idx_result_cluster_name",
- columnList = "experiment_name"),
@Index(
name = "idx_result_interval_end_time",
columnList = "interval_end_time")
diff --git a/src/main/java/com/autotune/jobs/CreatePartition.java b/src/main/java/com/autotune/jobs/CreatePartition.java
index 72d0d1154..347d7d7a3 100644
--- a/src/main/java/com/autotune/jobs/CreatePartition.java
+++ b/src/main/java/com/autotune/jobs/CreatePartition.java
@@ -3,31 +3,55 @@
import com.autotune.analyzer.exceptions.K8sTypeNotSupportedException;
import com.autotune.analyzer.exceptions.MonitoringAgentNotFoundException;
import com.autotune.analyzer.exceptions.MonitoringAgentNotSupportedException;
+import com.autotune.database.dao.ExperimentDAOImpl;
+import com.autotune.database.helper.DBConstants;
import com.autotune.database.init.KruizeHibernateUtil;
import com.autotune.operator.InitializeDeployment;
+import com.autotune.utils.MetricsConfig;
+import io.micrometer.core.instrument.Timer;
import org.hibernate.Session;
-import org.hibernate.SessionFactory;
+import org.hibernate.Transaction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.time.YearMonth;
+
public class CreatePartition {
private static final Logger LOGGER = LoggerFactory.getLogger(CreatePartition.class);
public static void main(String[] args) {
LOGGER.info("Checking Liveliness probe DB connection...");
+ Transaction tx = null;
+ String statusValue = "failure";
+ Timer.Sample timerAddBulkResultsDB = Timer.start(MetricsConfig.meterRegistry());
try {
InitializeDeployment.setup_deployment_info();
+ // create partitions
+ try (Session session = KruizeHibernateUtil.getSessionFactory().openSession()) {
+ tx = session.beginTransaction();
+ // Get the current year and month
+ YearMonth yearMonth = new ExperimentDAOImpl().buildDateForNextMonth(YearMonth.now());
+ // Fixing the partition type to 'by_month'
+ new ExperimentDAOImpl().addPartitions(DBConstants.TABLE_NAMES.KRUIZE_RESULTS, String.format("%02d", yearMonth.getMonthValue()), String.valueOf(yearMonth.getYear()), 1, DBConstants.PARTITION_TYPES.BY_MONTH);
+ new ExperimentDAOImpl().addPartitions(DBConstants.TABLE_NAMES.KRUIZE_RECOMMENDATIONS, String.format("%02d", yearMonth.getMonthValue()), String.valueOf(yearMonth.getYear()), 1, DBConstants.PARTITION_TYPES.BY_MONTH);
+ statusValue = "success";
+ tx.commit();
+ LOGGER.info("Partition creation successful!");
+ } catch (Exception partitionException) {
+ LOGGER.error(partitionException.getMessage());
+ tx.commit();
+ } finally {
+ if (null != timerAddBulkResultsDB) {
+ MetricsConfig.timerAddBulkResultsDB = MetricsConfig.timerBAddBulkResultsDB.tag("status", statusValue).register(MetricsConfig.meterRegistry());
+ timerAddBulkResultsDB.stop(MetricsConfig.timerAddBulkResultsDB);
+ }
+ }
} catch (Exception | K8sTypeNotSupportedException | MonitoringAgentNotSupportedException |
MonitoringAgentNotFoundException e) {
e.printStackTrace();
System.exit(1);
}
- SessionFactory factory = KruizeHibernateUtil.getSessionFactory();
-
- Session session = factory.openSession();
-
- session.close();
LOGGER.info("DB Liveliness probe connection successful!");
}
}
diff --git a/src/main/java/com/autotune/utils/KruizeConstants.java b/src/main/java/com/autotune/utils/KruizeConstants.java
index 8e4012cf2..f11ea8675 100644
--- a/src/main/java/com/autotune/utils/KruizeConstants.java
+++ b/src/main/java/com/autotune/utils/KruizeConstants.java
@@ -187,6 +187,8 @@ public static final class JSONKeys {
public static final String CONTAINER_IMAGE_NAME = "container_image_name";
public static final String RECOMMENDATION_SETTINGS = "recommendation_settings";
public static final String INTERVAL_START_TIME = "interval_start_time";
+
+ public static final String CALCULATED_START_TIME = "calculated_start_time";
public static final String INTERVAL_END_TIME = "interval_end_time";
public static final String DURATION_IN_MINUTES = "duration_in_minutes";
public static final String DURATION_IN_HOURS = "duration_in_hours";
@@ -442,7 +444,7 @@ public static final class DurationAmount {
public static final int SHORT_TERM_DURATION_DAYS = 1;
public static final int MEDIUM_TERM_DURATION_DAYS = 7;
public static final int LONG_TERM_DURATION_DAYS = 15;
-
+ public static final int LONG_TERM_DURATION_DAYS_THRESHOLD = 2;
private DurationAmount() {
}