From 8bf74c84036b123d3e86f7dde39da902f8544baa Mon Sep 17 00:00:00 2001 From: Saad Khan Date: Tue, 26 Nov 2024 02:10:13 +0530 Subject: [PATCH 1/5] add filtration changes in the bulk API Signed-off-by: Saad Khan --- .../analyzer/services/DSMetadataService.java | 2 +- .../analyzer/workerimpl/BulkJobManager.java | 34 +++++++-- .../dataSourceQueries/DataSourceQueries.java | 8 +- .../common/datasource/DataSourceManager.java | 21 ++++-- .../DataSourceMetadataOperator.java | 75 ++++++++++++++++--- 5 files changed, 113 insertions(+), 27 deletions(-) diff --git a/src/main/java/com/autotune/analyzer/services/DSMetadataService.java b/src/main/java/com/autotune/analyzer/services/DSMetadataService.java index 4f786b419..f24bf190a 100644 --- a/src/main/java/com/autotune/analyzer/services/DSMetadataService.java +++ b/src/main/java/com/autotune/analyzer/services/DSMetadataService.java @@ -133,7 +133,7 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) return; } - DataSourceMetadataInfo metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource,"",0,0,0); + DataSourceMetadataInfo metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource,"",0,0,0, null, null); // Validate imported metadataInfo object DataSourceMetadataValidation validationObject = new DataSourceMetadataValidation(); diff --git a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java index c4eb77237..ebf2a6aa4 100644 --- a/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java +++ b/src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java @@ -50,6 +50,7 @@ import java.util.concurrent.Executors; import java.util.regex.Matcher; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static com.autotune.operator.KruizeDeploymentInfo.bulk_thread_pool_size; import static com.autotune.utils.KruizeConstants.KRUIZE_BULK_API.*; @@ -121,8 +122,15 @@ public void run() { DataSourceMetadataInfo metadataInfo = null; DataSourceManager dataSourceManager = new DataSourceManager(); DataSourceInfo datasource = null; + String labelString = null; + Map includeResourcesMap = null; + Map excludeResourcesMap = null; try { - String labelString = getLabels(this.bulkInput.getFilter()); + if (this.bulkInput.getFilter() != null) { + labelString = getLabels(this.bulkInput.getFilter()); + includeResourcesMap = buildRegexFilters(this.bulkInput.getFilter().getInclude()); + excludeResourcesMap = buildRegexFilters(this.bulkInput.getFilter().getExclude()); + } if (null == this.bulkInput.getDatasource()) { this.bulkInput.setDatasource(CREATE_EXPERIMENT_CONFIG_BEAN.getDatasourceName()); } @@ -137,10 +145,13 @@ public void run() { } if (null != datasource) { JSONObject daterange = processDateRange(this.bulkInput.getTime_range()); - if (null != daterange) - metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource, labelString, (Long) daterange.get(START_TIME), (Long) daterange.get(END_TIME), (Integer) daterange.get(STEPS)); + if (null != daterange) { + metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource, labelString, (Long) daterange.get(START_TIME), + (Long) daterange.get(END_TIME), (Integer) daterange.get(STEPS), includeResourcesMap, excludeResourcesMap); + } else { - metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource, labelString, 0, 0, 0); + metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource, labelString, 0, 0, + 0, includeResourcesMap, excludeResourcesMap); } if (null == metadataInfo) { setFinalJobStatus(COMPLETED,String.valueOf(HttpURLConnection.HTTP_OK),NOTHING_INFO,datasource); @@ -314,7 +325,7 @@ private String getLabels(BulkInput.FilterWrapper filter) { String uniqueKey = null; try { // Process labels in the 'include' section - if (filter != null && filter.getInclude() != null) { + if (filter.getInclude() != null) { // Initialize StringBuilder for uniqueKey StringBuilder includeLabelsBuilder = new StringBuilder(); Map includeLabels = filter.getInclude().getLabels(); @@ -337,6 +348,19 @@ private String getLabels(BulkInput.FilterWrapper filter) { return uniqueKey; } + private Map buildRegexFilters(BulkInput.Filter filter) { + Map resourceFilters = new HashMap<>(); + if (filter != null) { + resourceFilters.put("namespaceRegex", filter.getNamespace() != null ? + filter.getNamespace().stream().map(String::trim).collect(Collectors.joining("|")) : ""); + resourceFilters.put("workloadRegex", filter.getWorkload() != null ? + filter.getWorkload().stream().map(String::trim).collect(Collectors.joining("|")) : ""); + resourceFilters.put("containerRegex", filter.getContainers() != null ? + filter.getContainers().stream().map(String::trim).collect(Collectors.joining("|")) : ""); + } + return resourceFilters; + } + private JSONObject processDateRange(BulkInput.TimeRange timeRange) { //TODO: add validations for the time range JSONObject dateRange = null; diff --git a/src/main/java/com/autotune/common/data/dataSourceQueries/DataSourceQueries.java b/src/main/java/com/autotune/common/data/dataSourceQueries/DataSourceQueries.java index a06d016aa..a889566c4 100644 --- a/src/main/java/com/autotune/common/data/dataSourceQueries/DataSourceQueries.java +++ b/src/main/java/com/autotune/common/data/dataSourceQueries/DataSourceQueries.java @@ -7,12 +7,12 @@ */ public class DataSourceQueries { public enum PromQLQuery { - NAMESPACE_QUERY("sum by (namespace) ( avg_over_time(kube_namespace_status_phase{namespace!=\"\" ADDITIONAL_LABEL}[15d]))"), - WORKLOAD_INFO_QUERY("sum by (namespace, workload, workload_type) ( avg_over_time(namespace_workload_pod:kube_pod_owner:relabel{workload!=\"\" ADDITIONAL_LABEL}[15d]))"), + NAMESPACE_QUERY("sum by (namespace) ( avg_over_time(kube_namespace_status_phase{%s ADDITIONAL_LABEL}[15d]))"), + WORKLOAD_INFO_QUERY("sum by (namespace, workload, workload_type) ( avg_over_time(namespace_workload_pod:kube_pod_owner:relabel{%s ADDITIONAL_LABEL}[15d]))"), CONTAINER_INFO_QUERY("sum by (container, image, workload, workload_type, namespace) (" + - " avg_over_time(kube_pod_container_info{container!=\"\" ADDITIONAL_LABEL}[15d]) *" + + " avg_over_time(kube_pod_container_info{%s ADDITIONAL_LABEL}[15d]) *" + " on (pod, namespace,prometheus_replica) group_left(workload, workload_type)" + - " avg_over_time(namespace_workload_pod:kube_pod_owner:relabel{workload!=\"\" ADDITIONAL_LABEL}[15d])" + + " avg_over_time(namespace_workload_pod:kube_pod_owner:relabel{workload!~\"\" ADDITIONAL_LABEL}[15d])" + ")"); private final String query; diff --git a/src/main/java/com/autotune/common/datasource/DataSourceManager.java b/src/main/java/com/autotune/common/datasource/DataSourceManager.java index a8401970c..142f97c4b 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceManager.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceManager.java @@ -62,15 +62,20 @@ public DataSourceManager() { * @param startTime Get metadata from starttime to endtime * @param endTime Get metadata from starttime to endtime * @param steps the interval between data points in a range query + * @param includeResources + * @param excludeResources * @return */ - public DataSourceMetadataInfo importMetadataFromDataSource(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws DataSourceDoesNotExist, IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + public DataSourceMetadataInfo importMetadataFromDataSource(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, + long endTime, int steps, Map includeResources, + Map excludeResources) throws DataSourceDoesNotExist, IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { if (null == dataSourceInfo) { throw new DataSourceDoesNotExist(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.MISSING_DATASOURCE_INFO); } - DataSourceMetadataInfo dataSourceMetadataInfo = dataSourceMetadataOperator.createDataSourceMetadata(dataSourceInfo, uniqueKey, startTime, endTime, steps); + DataSourceMetadataInfo dataSourceMetadataInfo = dataSourceMetadataOperator.createDataSourceMetadata(dataSourceInfo, + uniqueKey, startTime, endTime, steps, includeResources, excludeResources); if (null == dataSourceMetadataInfo) { - LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.DATASOURCE_METADATA_INFO_NOT_AVAILABLE, "for datasource {}" + dataSourceInfo.getName()); + LOGGER.error(DATASOURCE_METADATA_INFO_NOT_AVAILABLE, "for datasource {}" + dataSourceInfo.getName()); return null; } return dataSourceMetadataInfo; @@ -91,7 +96,7 @@ public DataSourceMetadataInfo getMetadataFromDataSource(DataSourceInfo dataSourc String dataSourceName = dataSource.getName(); DataSourceMetadataInfo dataSourceMetadataInfo = dataSourceMetadataOperator.getDataSourceMetadataInfo(dataSource); if (null == dataSourceMetadataInfo) { - LOGGER.error(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.DATASOURCE_METADATA_INFO_NOT_AVAILABLE, "for datasource {}" + dataSourceName); + LOGGER.error(DATASOURCE_METADATA_INFO_NOT_AVAILABLE, "for datasource {}" + dataSourceName); return null; } return dataSourceMetadataInfo; @@ -116,9 +121,9 @@ public void updateMetadataFromDataSource(DataSourceInfo dataSource, DataSourceMe throw new DataSourceDoesNotExist(KruizeConstants.DataSourceConstants.DataSourceErrorMsgs.MISSING_DATASOURCE_INFO); } if (null == dataSourceMetadataInfo) { - throw new DataSourceDoesNotExist(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.DATASOURCE_METADATA_INFO_NOT_AVAILABLE); + throw new DataSourceDoesNotExist(DATASOURCE_METADATA_INFO_NOT_AVAILABLE); } - dataSourceMetadataOperator.updateDataSourceMetadata(dataSource, "", 0, 0, 0); + dataSourceMetadataOperator.updateDataSourceMetadata(dataSource, "", 0, 0, 0, null, null); } catch (Exception e) { LOGGER.error(e.getMessage()); } @@ -236,7 +241,7 @@ public DataSourceInfo fetchDataSourceFromDBByName(String dataSourceName) { DataSourceInfo datasource = new ExperimentDBService().loadDataSourceFromDBByName(dataSourceName); return datasource; } catch (Exception e) { - LOGGER.error(String.format(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.LOAD_DATASOURCE_FROM_DB_ERROR, dataSourceName, e.getMessage())); + LOGGER.error(String.format(LOAD_DATASOURCE_FROM_DB_ERROR, dataSourceName, e.getMessage())); } return null; } @@ -256,7 +261,7 @@ public DataSourceMetadataInfo fetchDataSourceMetadataFromDBByName(String dataSou DataSourceMetadataInfo metadataInfo = new ExperimentDBService().loadMetadataFromDBByName(dataSourceName, verbose); return metadataInfo; } catch (Exception e) { - LOGGER.error(String.format(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.LOAD_DATASOURCE_METADATA_FROM_DB_ERROR, dataSourceName, e.getMessage())); + LOGGER.error(String.format(LOAD_DATASOURCE_METADATA_FROM_DB_ERROR, dataSourceName, e.getMessage())); } return null; } diff --git a/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java b/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java index ff50be82d..b74b4c63e 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java @@ -31,7 +31,10 @@ import java.security.KeyManagementException; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; +import java.util.Arrays; import java.util.HashMap; +import java.util.List; +import java.util.Map; import static com.autotune.analyzer.utils.AnalyzerConstants.ServiceConstants.CHARACTER_ENCODING; @@ -66,10 +69,14 @@ public static DataSourceMetadataOperator getInstance() { * @param startTime Get metadata from starttime to endtime * @param endTime Get metadata from starttime to endtime * @param steps the interval between data points in a range query - * TODO - support multiple data sources + * TODO - support multiple data sources + * @param includeResources + * @param excludeResources */ - public DataSourceMetadataInfo createDataSourceMetadata(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { - return processQueriesAndPopulateDataSourceMetadataInfo(dataSourceInfo, uniqueKey, startTime, endTime, steps); + public DataSourceMetadataInfo createDataSourceMetadata(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, + long endTime, int steps, Map includeResources, + Map excludeResources) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + return processQueriesAndPopulateDataSourceMetadataInfo(dataSourceInfo, uniqueKey, startTime, endTime, steps, includeResources, excludeResources); } /** @@ -111,8 +118,10 @@ public DataSourceMetadataInfo getDataSourceMetadataInfo(DataSourceInfo dataSourc * TODO - Currently Create and Update functions have identical functionalities, based on UI workflow and requirements * need to further enhance updateDataSourceMetadata() to support namespace, workload level granular updates */ - public DataSourceMetadataInfo updateDataSourceMetadata(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws Exception { - return processQueriesAndPopulateDataSourceMetadataInfo(dataSourceInfo, uniqueKey, startTime, endTime, steps); + public DataSourceMetadataInfo updateDataSourceMetadata(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, + long endTime, int steps, Map includeResources, + Map excludeResources) throws Exception { + return processQueriesAndPopulateDataSourceMetadataInfo(dataSourceInfo, uniqueKey, startTime, endTime, steps, includeResources, excludeResources); } /** @@ -149,10 +158,15 @@ public void deleteDataSourceMetadata(DataSourceInfo dataSourceInfo) { * @param startTime Get metadata from starttime to endtime * @param endTime Get metadata from starttime to endtime * @param steps the interval between data points in a range query + * @param includeResources + * @param excludeResources * @return DataSourceMetadataInfo object with populated metadata fields * todo rename processQueriesAndFetchClusterMetadataInfo */ - public DataSourceMetadataInfo processQueriesAndPopulateDataSourceMetadataInfo(DataSourceInfo dataSourceInfo, String uniqueKey, long startTime, long endTime, int steps) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { + public DataSourceMetadataInfo processQueriesAndPopulateDataSourceMetadataInfo(DataSourceInfo dataSourceInfo, String uniqueKey, + long startTime, long endTime, int steps, + Map includeResources, + Map excludeResources) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { DataSourceMetadataHelper dataSourceDetailsHelper = new DataSourceMetadataHelper(); /** * Get DataSourceOperatorImpl instance on runtime based on dataSource provider @@ -168,11 +182,26 @@ public DataSourceMetadataInfo processQueriesAndPopulateDataSourceMetadataInfo(Da * creating a comprehensive DataSourceMetadataInfo object that is then added to a list. * TODO - Process cluster metadata using a custom query */ + // Keys for the map + List fields = Arrays.asList("namespace", "workload", "container"); + // Map for storing queries + Map queries = new HashMap<>(); + + // Populate filters for each field + fields.forEach(field -> { + String includeRegex = includeResources.getOrDefault(field + "Regex", ""); + String excludeRegex = excludeResources.getOrDefault(field + "Regex", ""); + String filter = constructDynamicFilter(field, includeRegex, excludeRegex); + String queryTemplate = getQueryTemplate(field); // Helper to map fields to PromQL queries + queries.put(field, String.format(queryTemplate, filter)); + }); + + // Construct queries + String namespaceQuery = queries.get("namespace"); + String workloadQuery = queries.get("workload"); + String containerQuery = queries.get("container"); String dataSourceName = dataSourceInfo.getName(); - String namespaceQuery = PromQLDataSourceQueries.NAMESPACE_QUERY; - String workloadQuery = PromQLDataSourceQueries.WORKLOAD_QUERY; - String containerQuery = PromQLDataSourceQueries.CONTAINER_QUERY; if (null != uniqueKey && !uniqueKey.isEmpty()) { LOGGER.debug("uniquekey: {}", uniqueKey); namespaceQuery = namespaceQuery.replace(KruizeConstants.KRUIZE_BULK_API.ADDITIONAL_LABEL, "," + uniqueKey); @@ -244,6 +273,34 @@ public DataSourceMetadataInfo processQueriesAndPopulateDataSourceMetadataInfo(Da } + // Helper function to map fields to query templates + private String getQueryTemplate(String field) { + return switch (field) { + case "namespace" -> PromQLDataSourceQueries.NAMESPACE_QUERY; + case "workload" -> PromQLDataSourceQueries.WORKLOAD_QUERY; + case "container" -> PromQLDataSourceQueries.CONTAINER_QUERY; + default -> throw new IllegalArgumentException("Unknown field: " + field); + }; + } + + String constructDynamicFilter(String field, String includeRegex, String excludeRegex) { + StringBuilder filterBuilder = new StringBuilder(); + if (includeRegex.isEmpty() && excludeRegex.isEmpty()) { + filterBuilder.append(String.format("%s!=''", field)); + } + if (!includeRegex.isEmpty()) { + filterBuilder.append(String.format("%s=~\"%s\"", field, includeRegex)); + } + if (!excludeRegex.isEmpty()) { + if (!filterBuilder.isEmpty()) { + filterBuilder.append(","); + } + filterBuilder.append(String.format("%s!~\"%s\"", field, excludeRegex)); + } + LOGGER.info("filterBuilder: {}", filterBuilder); + return filterBuilder.toString(); + } + private JsonArray fetchQueryResults(DataSourceInfo dataSourceInfo, String query, long startTime, long endTime, int steps) throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException { GenericRestApiClient client = new GenericRestApiClient(dataSourceInfo); String metricsUrl; From b2383c7df563f57e33a16d80287649e73cf7ba31 Mon Sep 17 00:00:00 2001 From: Saad Khan Date: Mon, 9 Dec 2024 10:42:10 +0530 Subject: [PATCH 2/5] docs update and minor fix for filter Signed-off-by: Saad Khan --- design/BulkAPI.md | 29 ++++++++++++------- .../analyzer/services/DSMetadataService.java | 2 +- 2 files changed, 19 insertions(+), 12 deletions(-) diff --git a/design/BulkAPI.md b/design/BulkAPI.md index 2d73b8d0c..566dd5de1 100644 --- a/design/BulkAPI.md +++ b/design/BulkAPI.md @@ -28,18 +28,23 @@ progress of the job. { "filter": { "exclude": { - "namespace": [], - "workload": [], - "containers": [], - "labels": {} + "namespace": ["cadvisor", "openshift-tuning", "openshift-monitoring", "thanos-bench"], + "workload": ["osd-rebalance-infra-nodes-28887030", "blackbox-exporter", "thanos-query"], + "containers": ["tfb-0", "alertmanager"], + "labels": { + "org_id": "ABCOrga", + "source_id": "ZZZ", + "cluster_id": "ABG" + } }, "include": { - "namespace": [], - "workload": [], - "containers": [], + "namespace": ["cadvisor", "openshift-tuning", "openshift-monitoring", "thanos-bench"], + "workload": ["osd-rebalance-infra-nodes-28887030", "blackbox-exporter", "thanos-query"], + "containers": ["tfb-0", "alertmanager"], "labels": { - "key1": "value1", - "key2": "value2" + "org_id": "ABCOrga", + "source_id": "ZZZ", + "cluster_id": "ABG" } } }, @@ -105,10 +110,12 @@ The specified time range determines the period over which the data is analyzed t - The `start` timestamp precedes the `end` timestamp. #### 2. **Request Payload with `exclude` filter specified:** -TBA + +- **`exclude`** filters out namespaces like `"cadvisor"` and workloads like `"blackbox-exporter"`, along with containers and labels that match the specified values. So, we'll generate create experiments and generate recommendations for every namespace, workload and containers except those. #### 3. **Request Payload with `include` filter specified:** -TBA + +- **`include`** explicitly selects the namespaces, workloads, containers, and labels to be queried. So, for only those we'll create experiments and get the recommendations. ### GET Request: diff --git a/src/main/java/com/autotune/analyzer/services/DSMetadataService.java b/src/main/java/com/autotune/analyzer/services/DSMetadataService.java index f24bf190a..762a99d7a 100644 --- a/src/main/java/com/autotune/analyzer/services/DSMetadataService.java +++ b/src/main/java/com/autotune/analyzer/services/DSMetadataService.java @@ -133,7 +133,7 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) return; } - DataSourceMetadataInfo metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource,"",0,0,0, null, null); + DataSourceMetadataInfo metadataInfo = dataSourceManager.importMetadataFromDataSource(datasource,"",0,0,0, new HashMap<>(), new HashMap<>()); // Validate imported metadataInfo object DataSourceMetadataValidation validationObject = new DataSourceMetadataValidation(); From 88742cf9a05d4608919e50a853fe773795cc75bb Mon Sep 17 00:00:00 2001 From: Saad Khan Date: Fri, 13 Dec 2024 14:46:20 +0530 Subject: [PATCH 3/5] fix conflicts Signed-off-by: Saad Khan --- .../autotune/common/datasource/DataSourceMetadataOperator.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java b/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java index b74b4c63e..4fb1552e1 100644 --- a/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java +++ b/src/main/java/com/autotune/common/datasource/DataSourceMetadataOperator.java @@ -217,7 +217,7 @@ public DataSourceMetadataInfo processQueriesAndPopulateDataSourceMetadataInfo(Da LOGGER.info("containerQuery: {}", containerQuery); JsonArray namespacesDataResultArray = fetchQueryResults(dataSourceInfo, namespaceQuery, startTime, endTime, steps); - LOGGER.debug("namespacesDataResultArray: {}", namespacesDataResultArray); + LOGGER.info("namespacesDataResultArray: {}", namespacesDataResultArray); if (!op.validateResultArray(namespacesDataResultArray)) { dataSourceMetadataInfo = dataSourceDetailsHelper.createDataSourceMetadataInfoObject(dataSourceName, null); } else { @@ -226,6 +226,7 @@ public DataSourceMetadataInfo processQueriesAndPopulateDataSourceMetadataInfo(Da * Value: DataSourceNamespace object corresponding to a namespace */ HashMap datasourceNamespaces = dataSourceDetailsHelper.getActiveNamespaces(namespacesDataResultArray); + LOGGER.info("datasourceNamespaces: {}", datasourceNamespaces.keySet()); dataSourceMetadataInfo = dataSourceDetailsHelper.createDataSourceMetadataInfoObject(dataSourceName, datasourceNamespaces); /** From 9d484afec6ff6c25bb289e0aff22a389eec057f3 Mon Sep 17 00:00:00 2001 From: Saad Khan Date: Fri, 13 Dec 2024 18:13:22 +0530 Subject: [PATCH 4/5] update filter examples in the design doc Signed-off-by: Saad Khan --- design/BulkAPI.md | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/design/BulkAPI.md b/design/BulkAPI.md index 566dd5de1..d9d1864c4 100644 --- a/design/BulkAPI.md +++ b/design/BulkAPI.md @@ -28,9 +28,9 @@ progress of the job. { "filter": { "exclude": { - "namespace": ["cadvisor", "openshift-tuning", "openshift-monitoring", "thanos-bench"], - "workload": ["osd-rebalance-infra-nodes-28887030", "blackbox-exporter", "thanos-query"], - "containers": ["tfb-0", "alertmanager"], + "namespace": ["openshift-.*"], + "workload": [], + "containers": [], "labels": { "org_id": "ABCOrga", "source_id": "ZZZ", @@ -38,9 +38,9 @@ progress of the job. } }, "include": { - "namespace": ["cadvisor", "openshift-tuning", "openshift-monitoring", "thanos-bench"], - "workload": ["osd-rebalance-infra-nodes-28887030", "blackbox-exporter", "thanos-query"], - "containers": ["tfb-0", "alertmanager"], + "namespace": ["openshift-tuning"], + "workload": [], + "containers": [], "labels": { "org_id": "ABCOrga", "source_id": "ZZZ", @@ -111,11 +111,15 @@ The specified time range determines the period over which the data is analyzed t #### 2. **Request Payload with `exclude` filter specified:** -- **`exclude`** filters out namespaces like `"cadvisor"` and workloads like `"blackbox-exporter"`, along with containers and labels that match the specified values. So, we'll generate create experiments and generate recommendations for every namespace, workload and containers except those. +- **`exclude`** As shown in the example above, it filters out all namespaces starting with the name `openshift-` . So, we'll create experiments and generate recommendations for every namespace except those. #### 3. **Request Payload with `include` filter specified:** -- **`include`** explicitly selects the namespaces, workloads, containers, and labels to be queried. So, for only those we'll create experiments and get the recommendations. +- **`include`** As shown in the example above, it filters out the namespace `openshift-`. So, we'll create experiments and generate recommendations for every namespace starting with the specified name. + +#### 3. **Request Payload with both `include` and `exclude` filter specified:** + +- **`include`** As shown in the example above, it filters out all namespaces starting with the name `openshift-` but includes the `openshift-tuning` one. So, we'll create experiments and generate recommendations for the `openshift-tuning` namespace. ### GET Request: From e7c18b6c61062d76fbca36bd66f218f3d5ad3133 Mon Sep 17 00:00:00 2001 From: Saad Khan Date: Mon, 16 Dec 2024 14:26:08 +0530 Subject: [PATCH 5/5] fix issue when all experiments creation is skipped in case of some null namespaces Signed-off-by: Saad Khan --- .../data/dataSourceMetadata/DataSourceMetadataHelper.java | 2 +- src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/autotune/common/data/dataSourceMetadata/DataSourceMetadataHelper.java b/src/main/java/com/autotune/common/data/dataSourceMetadata/DataSourceMetadataHelper.java index 18a22bd57..c562cf1b4 100644 --- a/src/main/java/com/autotune/common/data/dataSourceMetadata/DataSourceMetadataHelper.java +++ b/src/main/java/com/autotune/common/data/dataSourceMetadata/DataSourceMetadataHelper.java @@ -427,7 +427,7 @@ public void updateContainerDataSourceMetadataInfoObject(String dataSourceName, D if (null == dataSourceNamespace) { LOGGER.debug(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.INVALID_DATASOURCE_METADATA_NAMESPACE); - return; + continue; } // Iterate over workloads in namespaceWorkloadMap diff --git a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java index a2a1de630..ca2094838 100644 --- a/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java +++ b/src/main/java/com/autotune/database/dao/ExperimentDAOImpl.java @@ -985,7 +985,7 @@ public KruizeRecommendationEntry loadRecommendationsByExperimentNameAndDate(Stri getExperimentTypeInSingleKruizeRecommendationsEntry(recommendationEntries); statusValue = "success"; } catch (NoResultException e) { - LOGGER.debug("Generating new recommendation for Experiment name : %s interval_end_time: %S", experimentName, interval_end_time); + LOGGER.debug("Generating new recommendation for Experiment name : {} interval_end_time: {}", experimentName, interval_end_time); } catch (Exception e) { LOGGER.error("Not able to load recommendations due to {}", e.getMessage()); recommendationEntries = null;