From 6c56b19e2ec9dd7c67dba068af6b27dfe8d2c497 Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Wed, 14 Feb 2024 11:12:25 +0100 Subject: [PATCH 01/19] Dump metric data to .csv --- main.py | 4 +- requirements.txt | Bin 646 -> 776 bytes src/metrics/scrapper.py | 111 ++++++++++++++++++++++++++++++++++++---- 3 files changed, 102 insertions(+), 13 deletions(-) diff --git a/main.py b/main.py index f247a04..bb331fd 100644 --- a/main.py +++ b/main.py @@ -14,8 +14,8 @@ def main(): v1 = client.CoreV1Api() - scrapper = Scrapper(url, namespace, metrics) - scrapper.make_queries() + scrapper = Scrapper(url, namespace, "test/", metrics) + scrapper.query_and_dump_metrics() if __name__ == '__main__': diff --git a/requirements.txt b/requirements.txt index 21b628cd0c23c40667bd0d7871de6c7b0e7657f1..dc4a0f374e800e718c6255e12c2733709f74830c 100644 GIT binary patch delta 139 zcmZo;?O@x`##CRxkjRk7kOCx&8R{5p8H^b87z}~fn1Ppp3n-e&kk638Py&PnV0mMp zoF!bom?0IYCJCsf2xtmOzX4c3NR str: + query = self._template.replace('$metric', metric) + query = query.replace('$namespace', self._namespace) + promql = scrape_utils.create_promql(self._url, query, 1, 60) + + return promql + + def _make_query(self, promql: str) -> Result[Response, str]: + try: + response = requests.get(promql, timeout=30) + except requests.exceptions.Timeout: + return Err(f'Timeout error.') + + if response.ok: + return Ok(response) + return Err(f'Error in query. Status code {response.status_code}. {response.content}') + + def _dump_data(self, metric: str, data: Dict): + df = self._create_dataframe_from_data(data) + df = self._sort_dataframe(df) + + result = self._prepare_path(metric) + if result.is_err(): + logger.error(f'{result.err_value}') + exit(1) + + df.to_csv(result.ok_value) + logger.info(f'{metric} data dumped') + + def _prepare_path(self, metric: str) -> Result[Path, str]: + output_file = f'{metric}.csv' + output_dir = Path(self._out_folder + output_file) + + try: + output_dir.mkdir(parents=True) + except OSError as e: + return Err(f'Error creating {output_dir}. {e}') + + return Ok(output_dir) + + def _create_dataframe_from_data(self, data: Dict) -> pd.DataFrame: + final_df = pd.DataFrame() + for pod_result_dict in data['result']: + column_name = pod_result_dict['metric']['pod'] + '_' + pod_result_dict['metric']['node'] + values = pod_result_dict['values'] + + pod_df = self._create_pod_df(column_name, values) + + final_df = pd.merge(final_df, pod_df, how='outer', left_index=True, right_index=True) + + return final_df + + def _sort_dataframe(self, df) -> pd.DataFrame: + columns = self._order(df.columns.tolist()) + df = df[columns] + + return df + + def _create_pod_df(self, column_name, values) -> pd.DataFrame: + pod_df = pd.DataFrame(values, columns=['Unix Timestamp', column_name]) + pod_df['Unix Timestamp'] = pd.to_datetime(pod_df['Unix Timestamp'], unit='s') + pod_df.set_index('Unix Timestamp', inplace=True) + + return pod_df + + # TODO this depends on pods name assigned in deployment + def _order(self, column_names: List) -> List: + def get_default_format_id(val): + return int(val.split('-')[1].split('_')[0]) + + columns_without_nodes = [] + columns_without_bootstrap = [] + nodes = [item if item.startswith('nodes') else columns_without_nodes.append(item) + for item in column_names] + bootstrap = [item if item.startswith('bootstrap') else columns_without_bootstrap.append(item) + for item in columns_without_nodes] + nodes.sort(key=get_default_format_id) + bootstrap.sort(key=get_default_format_id) + + return list(chain(columns_without_bootstrap, bootstrap, nodes)) From 3fac79a38947e152a4933c19d1657d1ecb2d6009 Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Wed, 14 Feb 2024 12:47:34 +0100 Subject: [PATCH 02/19] Added result to requirements.txt --- requirements.txt | Bin 776 -> 808 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/requirements.txt b/requirements.txt index dc4a0f374e800e718c6255e12c2733709f74830c..a8d1c7ad8b7cc1d0f9b07fcf652e4567602f8734 100644 GIT binary patch delta 40 ucmeBRTfw%0gIR%>fs3JtA(f$+p_Czqp@gB1!Ir^*L65 delta 7 OcmZ3%*1@)cgBbt`rUCc> From b7391ead2c6525658aad6ad8b17c6ea57d55e009 Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Thu, 15 Feb 2024 18:00:52 +0100 Subject: [PATCH 03/19] Fixed dumping --- main.py | 4 ++-- src/metrics/scrapper.py | 24 ++++++++++++++---------- 2 files changed, 16 insertions(+), 12 deletions(-) diff --git a/main.py b/main.py index bb331fd..16069e9 100644 --- a/main.py +++ b/main.py @@ -1,8 +1,8 @@ # Python Imports +import src.logging.logger from kubernetes import client, config # Project Imports - from src.metrics.scrapper import Scrapper @@ -10,7 +10,7 @@ def main(): config.load_kube_config("your_kubeconfig.yaml") url = "your_url" namespace = "'zerotesting'" - metrics = ["container_network_receive_bytes_total", "container_network_sent_bytes_total"] + metrics = ["container_network_receive_bytes_total", "container_network_transmit_bytes_total"] v1 = client.CoreV1Api() diff --git a/src/metrics/scrapper.py b/src/metrics/scrapper.py index 8401507..d8c0155 100644 --- a/src/metrics/scrapper.py +++ b/src/metrics/scrapper.py @@ -70,14 +70,14 @@ class Scrapper: def _prepare_path(self, metric: str) -> Result[Path, str]: output_file = f'{metric}.csv' - output_dir = Path(self._out_folder + output_file) + output_dir = Path(self._out_folder) try: - output_dir.mkdir(parents=True) + output_dir.mkdir(parents=True, exist_ok=True) except OSError as e: return Err(f'Error creating {output_dir}. {e}') - return Ok(output_dir) + return Ok(output_dir / output_file) def _create_dataframe_from_data(self, data: Dict) -> pd.DataFrame: final_df = pd.DataFrame() @@ -109,13 +109,17 @@ class Scrapper: def get_default_format_id(val): return int(val.split('-')[1].split('_')[0]) - columns_without_nodes = [] - columns_without_bootstrap = [] - nodes = [item if item.startswith('nodes') else columns_without_nodes.append(item) - for item in column_names] - bootstrap = [item if item.startswith('bootstrap') else columns_without_bootstrap.append(item) - for item in columns_without_nodes] + nodes = [] + bootstrap = [] + others = [] + for column in column_names: + if column.startswith('nodes'): + nodes.append(column) + elif column.startswith('bootstrap'): + bootstrap.append(column) + else: + others.append(column) nodes.sort(key=get_default_format_id) bootstrap.sort(key=get_default_format_id) - return list(chain(columns_without_bootstrap, bootstrap, nodes)) + return list(chain(others, bootstrap, nodes)) From 06b5f48db066802efbf1652d55785fb4e10828dd Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Fri, 16 Feb 2024 12:55:13 +0100 Subject: [PATCH 04/19] Update src/metrics/scrapper.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Álex Cabeza Romero --- src/metrics/scrapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/metrics/scrapper.py b/src/metrics/scrapper.py index d8c0155..f7471d1 100644 --- a/src/metrics/scrapper.py +++ b/src/metrics/scrapper.py @@ -82,7 +82,7 @@ class Scrapper: def _create_dataframe_from_data(self, data: Dict) -> pd.DataFrame: final_df = pd.DataFrame() for pod_result_dict in data['result']: - column_name = pod_result_dict['metric']['pod'] + '_' + pod_result_dict['metric']['node'] +column_name = f"{pod_result_dict['metric']['pod']}_{pod_result_dict['metric']['node']}" values = pod_result_dict['values'] pod_df = self._create_pod_df(column_name, values) From 9d6954ac909e21951ba3db0d11bf1ee7f844966c Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Fri, 16 Feb 2024 13:00:04 +0100 Subject: [PATCH 05/19] Continue to next metric if current one fails --- src/metrics/scrapper.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/metrics/scrapper.py b/src/metrics/scrapper.py index d8c0155..cfdbcb9 100644 --- a/src/metrics/scrapper.py +++ b/src/metrics/scrapper.py @@ -31,6 +31,7 @@ class Scrapper: result = self._make_query(promql) if result.is_err(): logger.warning(f'Error querying {metric}. {result.err_value}') + continue response = result.ok_value logger.info(f'Response: {response.status_code}') From 8670536a9ca95a5af6a3b46b423d56a28bdb77a7 Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Fri, 16 Feb 2024 13:04:03 +0100 Subject: [PATCH 06/19] Fixed weird commit identation from Github --- src/metrics/scrapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/metrics/scrapper.py b/src/metrics/scrapper.py index a1431d0..91391a0 100644 --- a/src/metrics/scrapper.py +++ b/src/metrics/scrapper.py @@ -83,7 +83,7 @@ class Scrapper: def _create_dataframe_from_data(self, data: Dict) -> pd.DataFrame: final_df = pd.DataFrame() for pod_result_dict in data['result']: -column_name = f"{pod_result_dict['metric']['pod']}_{pod_result_dict['metric']['node']}" + column_name = f"{pod_result_dict['metric']['pod']}_{pod_result_dict['metric']['node']}" values = pod_result_dict['values'] pod_df = self._create_pod_df(column_name, values) From 6e91884e8997f6b211a8ca6776bbb1eba8696d62 Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Wed, 21 Feb 2024 19:18:01 +0100 Subject: [PATCH 07/19] Changed scrapping configuration to .yaml file --- main.py | 5 ++--- scrape.yaml | 13 +++++++++++++ src/metrics/scrapper.py | 38 ++++++++++++++++++++++---------------- src/utils/__init__.py | 0 src/utils/file_utils.py | 14 ++++++++++++++ 5 files changed, 51 insertions(+), 19 deletions(-) create mode 100644 scrape.yaml create mode 100644 src/utils/__init__.py create mode 100644 src/utils/file_utils.py diff --git a/main.py b/main.py index 16069e9..f37f97b 100644 --- a/main.py +++ b/main.py @@ -7,14 +7,13 @@ from src.metrics.scrapper import Scrapper def main(): - config.load_kube_config("your_kubeconfig.yaml") - url = "your_url" namespace = "'zerotesting'" metrics = ["container_network_receive_bytes_total", "container_network_transmit_bytes_total"] + scrape_config = "scrape.yaml" v1 = client.CoreV1Api() - scrapper = Scrapper(url, namespace, "test/", metrics) + scrapper = Scrapper(url, scrape_config, "test/") scrapper.query_and_dump_metrics() diff --git a/scrape.yaml b/scrape.yaml new file mode 100644 index 0000000..e7a8194 --- /dev/null +++ b/scrape.yaml @@ -0,0 +1,13 @@ +scrape_config: + $__rate_interval: "60s" + step: "60s" + until_hours_ago: 1 +metrics_to_scrape: + - "libp2p_peers": "instance" + - "libp2p_open_streams": "instance-type-dir" + - "rate(libp2p_network_bytes_total{direction='in'}[$__rate_interval])": "instance-direction" + - "rate(libp2p_network_bytes_total{direction='out'}[$__rate_interval])": "instance-direction" + - "rate(container_network_receive_bytes_total{namespace='zerotesting'}[$__rate_interval])": "pod-node" + - "rate(container_network_transmit_bytes_total{namespace='zerotesting'}[$__rate_interval])": "pod-node" + - "sum by(job) (libp2p_gossipsub_low_peers_topics)" : "job" + - "sum by(job) (libp2p_gossipsub_healthy_peers_topics)": "job" \ No newline at end of file diff --git a/src/metrics/scrapper.py b/src/metrics/scrapper.py index 91391a0..36e73b7 100644 --- a/src/metrics/scrapper.py +++ b/src/metrics/scrapper.py @@ -10,23 +10,25 @@ from pathlib import Path # Project Imports from src.metrics import scrape_utils from result import Ok, Err, Result +from src.utils.file_utils import read_yaml_file logger = logging.getLogger(__name__) class Scrapper: - def __init__(self, url: str, namespace: str, out_folder: str, metrics: List): + def __init__(self, url: str, query_config_file: str, out_folder: str): self._url = url - self._namespace = namespace + self._query_config_file = query_config_file self._out_folder = out_folder - self._metrics = metrics # TODO make interval match value in cluster - self._template = 'irate($metric{namespace=$namespace}[3m])' def query_and_dump_metrics(self): - for metric in self._metrics: + query_config = read_yaml_file(self._query_config_file) + + for metric_dict_item in query_config['metrics_to_scrape']: + metric, column_name = next(iter(metric_dict_item.items())) logger.info(f'Querying {metric}') - promql = self._create_query(metric) + promql = self._create_query(metric, query_config['scrape_config']) result = self._make_query(promql) if result.is_err(): @@ -38,12 +40,14 @@ class Scrapper: data = response.json()['data'] logger.info(f'Dumping {metric} data to .csv') - self._dump_data(metric, data) + self._dump_data(metric, column_name, data) - def _create_query(self, metric: str) -> str: - query = self._template.replace('$metric', metric) - query = query.replace('$namespace', self._namespace) - promql = scrape_utils.create_promql(self._url, query, 1, 60) + def _create_query(self, metric: str, scrape_config: Dict) -> str: + if '__rate_interval' in metric: + metric = metric.replace('$__rate_interval', scrape_config['$__rate_interval']) + promql = scrape_utils.create_promql(self._url, metric, + scrape_config['until_hours_ago'], + scrape_config['step']) return promql @@ -57,8 +61,8 @@ class Scrapper: return Ok(response) return Err(f'Error in query. Status code {response.status_code}. {response.content}') - def _dump_data(self, metric: str, data: Dict): - df = self._create_dataframe_from_data(data) + def _dump_data(self, metric: str, column_name: str, data: Dict): + df = self._create_dataframe_from_data(data, column_name) df = self._sort_dataframe(df) result = self._prepare_path(metric) @@ -80,13 +84,15 @@ class Scrapper: return Ok(output_dir / output_file) - def _create_dataframe_from_data(self, data: Dict) -> pd.DataFrame: + def _create_dataframe_from_data(self, data: Dict, column_name: str) -> pd.DataFrame: final_df = pd.DataFrame() for pod_result_dict in data['result']: - column_name = f"{pod_result_dict['metric']['pod']}_{pod_result_dict['metric']['node']}" + column_name_items = column_name.split('-') + metric_result_info = pod_result_dict['metric'] + result_string = '_'.join(metric_result_info[key] for key in column_name_items) values = pod_result_dict['values'] - pod_df = self._create_pod_df(column_name, values) + pod_df = self._create_pod_df(result_string, values) final_df = pd.merge(final_df, pod_df, how='outer', left_index=True, right_index=True) diff --git a/src/utils/__init__.py b/src/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/utils/file_utils.py b/src/utils/file_utils.py new file mode 100644 index 0000000..641ee53 --- /dev/null +++ b/src/utils/file_utils.py @@ -0,0 +1,14 @@ +# Python Imports +import yaml +from pathlib import Path + +# Project Imports + + +def read_yaml_file(file_path: str): + path = Path(file_path) + + with open(path, 'r') as file: + data = yaml.safe_load(file) + + return data From b9c2d2ae57552fb36fa14aaec1fe7bcc03aa24a3 Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Wed, 21 Feb 2024 19:22:13 +0100 Subject: [PATCH 08/19] IDE error in previous commit. --- main.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/main.py b/main.py index f37f97b..c096798 100644 --- a/main.py +++ b/main.py @@ -7,8 +7,8 @@ from src.metrics.scrapper import Scrapper def main(): - namespace = "'zerotesting'" - metrics = ["container_network_receive_bytes_total", "container_network_transmit_bytes_total"] + config.load_kube_config("your_kubeconfig.yaml") + url = "your_url" scrape_config = "scrape.yaml" v1 = client.CoreV1Api() From 76a228be2284a1cfe1e86a011fe17427772ff299 Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Thu, 22 Feb 2024 18:32:47 +0100 Subject: [PATCH 09/19] Renamed logger --- main.py | 2 +- src/{logging => logger}/__init__.py | 0 src/{logging => logger}/logger.py | 4 ++-- src/{logging => logger}/logger_config.yaml | 0 4 files changed, 3 insertions(+), 3 deletions(-) rename src/{logging => logger}/__init__.py (100%) rename src/{logging => logger}/logger.py (54%) rename src/{logging => logger}/logger_config.yaml (100%) diff --git a/main.py b/main.py index c096798..c5f3771 100644 --- a/main.py +++ b/main.py @@ -1,5 +1,5 @@ # Python Imports -import src.logging.logger +import src.logger.logger from kubernetes import client, config # Project Imports diff --git a/src/logging/__init__.py b/src/logger/__init__.py similarity index 100% rename from src/logging/__init__.py rename to src/logger/__init__.py diff --git a/src/logging/logger.py b/src/logger/logger.py similarity index 54% rename from src/logging/logger.py rename to src/logger/logger.py index ba7b90f..b172122 100644 --- a/src/logging/logger.py +++ b/src/logger/logger.py @@ -1,8 +1,8 @@ import logging.config import yaml -with open('src/logging/logger_config.yaml', 'r') as f: +with open('src/logger/logger_config.yaml', 'r') as f: config = yaml.safe_load(f.read()) logging.config.dictConfig(config) -logger = logging.getLogger(__name__) +log = logging.getLogger(__name__) diff --git a/src/logging/logger_config.yaml b/src/logger/logger_config.yaml similarity index 100% rename from src/logging/logger_config.yaml rename to src/logger/logger_config.yaml From e091dcc0b432d541ef6cf4be975eae82e2873eb1 Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Thu, 22 Feb 2024 18:33:35 +0100 Subject: [PATCH 10/19] Extracted datetime.now() to facilitate testing --- src/metrics/scrape_utils.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/metrics/scrape_utils.py b/src/metrics/scrape_utils.py index c80dcec..ff864c9 100644 --- a/src/metrics/scrape_utils.py +++ b/src/metrics/scrape_utils.py @@ -2,12 +2,17 @@ import datetime +# Having now in an external function allows us to mock it in the tests +def _get_datetime_now() -> datetime.datetime: + return datetime.datetime.now() + + def create_promql(address: str, query: str, hours_passed: int, step: int) -> str: promql = address + "query_range?query=" + query start = datetime.datetime.timestamp( - datetime.datetime.now() - datetime.timedelta(hours=hours_passed)) - now = datetime.datetime.timestamp(datetime.datetime.now()) + _get_datetime_now() - datetime.timedelta(hours=hours_passed)) + now = datetime.datetime.timestamp(_get_datetime_now()) promql = (promql + "&start=" + str(start) + From 8a6d425e5e6425416c262dfc731d3a6d6f322744 Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Thu, 22 Feb 2024 18:34:17 +0100 Subject: [PATCH 11/19] Cleaned scrapper class --- src/metrics/scrapper.py | 47 ++++++++++++++++------------------------- 1 file changed, 18 insertions(+), 29 deletions(-) diff --git a/src/metrics/scrapper.py b/src/metrics/scrapper.py index 36e73b7..58150f1 100644 --- a/src/metrics/scrapper.py +++ b/src/metrics/scrapper.py @@ -1,16 +1,15 @@ # Python Imports -import requests import logging import pandas as pd from itertools import chain from typing import List, Dict -from requests import Response from pathlib import Path # Project Imports from src.metrics import scrape_utils from result import Ok, Err, Result from src.utils.file_utils import read_yaml_file +from src.utils.queries import get_query_data logger = logging.getLogger(__name__) @@ -20,28 +19,28 @@ class Scrapper: self._url = url self._query_config_file = query_config_file self._out_folder = out_folder + self._set_query_config() # TODO make interval match value in cluster def query_and_dump_metrics(self): - query_config = read_yaml_file(self._query_config_file) - - for metric_dict_item in query_config['metrics_to_scrape']: + for metric_dict_item in self._query_config['metrics_to_scrape']: metric, column_name = next(iter(metric_dict_item.items())) logger.info(f'Querying {metric}') - promql = self._create_query(metric, query_config['scrape_config']) + promql = self._create_query(metric, self._query_config['scrape_config']) - result = self._make_query(promql) - if result.is_err(): - logger.warning(f'Error querying {metric}. {result.err_value}') - continue - - response = result.ok_value - logger.info(f'Response: {response.status_code}') - data = response.json()['data'] + match get_query_data(promql): + case Ok(data): + logger.info(f'Successfully extracted {metric} data from response') + case Err(err): + logger.info(err) + continue logger.info(f'Dumping {metric} data to .csv') self._dump_data(metric, column_name, data) + def _set_query_config(self): + self._query_config = read_yaml_file(self._query_config_file) + def _create_query(self, metric: str, scrape_config: Dict) -> str: if '__rate_interval' in metric: metric = metric.replace('$__rate_interval', scrape_config['$__rate_interval']) @@ -51,25 +50,15 @@ class Scrapper: return promql - def _make_query(self, promql: str) -> Result[Response, str]: - try: - response = requests.get(promql, timeout=30) - except requests.exceptions.Timeout: - return Err(f'Timeout error.') - - if response.ok: - return Ok(response) - return Err(f'Error in query. Status code {response.status_code}. {response.content}') - def _dump_data(self, metric: str, column_name: str, data: Dict): - df = self._create_dataframe_from_data(data, column_name) - df = self._sort_dataframe(df) - result = self._prepare_path(metric) if result.is_err(): logger.error(f'{result.err_value}') exit(1) + df = self._create_dataframe_from_data(data, column_name) + df = self._sort_dataframe(df) + df.to_csv(result.ok_value) logger.info(f'{metric} data dumped') @@ -98,13 +87,13 @@ class Scrapper: return final_df - def _sort_dataframe(self, df) -> pd.DataFrame: + def _sort_dataframe(self, df: pd.DataFrame) -> pd.DataFrame: columns = self._order(df.columns.tolist()) df = df[columns] return df - def _create_pod_df(self, column_name, values) -> pd.DataFrame: + def _create_pod_df(self, column_name: str, values: List) -> pd.DataFrame: pod_df = pd.DataFrame(values, columns=['Unix Timestamp', column_name]) pod_df['Unix Timestamp'] = pd.to_datetime(pod_df['Unix Timestamp'], unit='s') pod_df.set_index('Unix Timestamp', inplace=True) From ccb8b31fa1995588bf8b7ca6330dce8cd4eac9d5 Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Thu, 22 Feb 2024 18:34:59 +0100 Subject: [PATCH 12/19] Moved get query data to external file --- src/utils/queries.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 src/utils/queries.py diff --git a/src/utils/queries.py b/src/utils/queries.py new file mode 100644 index 0000000..11036c8 --- /dev/null +++ b/src/utils/queries.py @@ -0,0 +1,21 @@ +# Python Imports +import logging +import requests +from typing import Dict +from result import Result, Err, Ok + + +logger = logging.getLogger(__name__) + + +def get_query_data(request: str) -> Result[Dict, str]: + try: + response = requests.get(request, timeout=30) + except requests.exceptions.Timeout: + return Err(f'Timeout error.') + + if response.ok: + logger.info(f'Response: {response.status_code}') + data = response.json()['data'] + return Ok(data) + return Err(f'Error in query. Status code {response.status_code}. {response.content}') From 68000ce923598136c577ebbb8e6745424514e0f8 Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Thu, 22 Feb 2024 18:36:07 +0100 Subject: [PATCH 13/19] Added tests --- src/metrics/tests/__init__.py | 0 src/metrics/tests/single_test_scrape.yaml | 6 + src/metrics/tests/test_scrape.yaml | 7 + src/metrics/tests/test_scrape_utils.py | 27 ++ src/metrics/tests/test_scrapper.py | 307 ++++++++++++++++++++++ 5 files changed, 347 insertions(+) create mode 100644 src/metrics/tests/__init__.py create mode 100644 src/metrics/tests/single_test_scrape.yaml create mode 100644 src/metrics/tests/test_scrape.yaml create mode 100644 src/metrics/tests/test_scrape_utils.py create mode 100644 src/metrics/tests/test_scrapper.py diff --git a/src/metrics/tests/__init__.py b/src/metrics/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/metrics/tests/single_test_scrape.yaml b/src/metrics/tests/single_test_scrape.yaml new file mode 100644 index 0000000..0d072c0 --- /dev/null +++ b/src/metrics/tests/single_test_scrape.yaml @@ -0,0 +1,6 @@ +scrape_config: + $__rate_interval: "60s" + step: "60s" + until_hours_ago: 1 +metrics_to_scrape: + - "metric1": "instance" \ No newline at end of file diff --git a/src/metrics/tests/test_scrape.yaml b/src/metrics/tests/test_scrape.yaml new file mode 100644 index 0000000..e6db3fb --- /dev/null +++ b/src/metrics/tests/test_scrape.yaml @@ -0,0 +1,7 @@ +scrape_config: + $__rate_interval: "60s" + step: "60s" + until_hours_ago: 1 +metrics_to_scrape: + - "metric1": "instance" + - "metric2[$__rate_interval])": "instance-direction" \ No newline at end of file diff --git a/src/metrics/tests/test_scrape_utils.py b/src/metrics/tests/test_scrape_utils.py new file mode 100644 index 0000000..6fc01f6 --- /dev/null +++ b/src/metrics/tests/test_scrape_utils.py @@ -0,0 +1,27 @@ +# Python Imports +import unittest +import datetime +from unittest.mock import patch, Mock + +# Project Imports +from src.metrics.scrape_utils import create_promql + + +class TestScrapeUtils(unittest.TestCase): + + @patch('src.metrics.scrape_utils._get_datetime_now') + def test_create_promql(self, mock_datetime_now): + address = "0.0.0.0:9090/api/" + query = "bandwidth" + hours_passed = 1 + step = 60 + + return_value_first = datetime.datetime(2024, 2, 22, 11, 0, 0) + return_value_second = datetime.datetime(2024, 2, 22, 12, 0, 0) + mock_datetime_now.side_effect = [return_value_first, return_value_second] + + result = create_promql(address, query, hours_passed, step) + expected_result = ("0.0.0.0:9090/api/query_range?query=bandwidth&start=1708592400.0&end" + "=1708599600.0&step=60") + + self.assertEqual(expected_result, result) diff --git a/src/metrics/tests/test_scrapper.py b/src/metrics/tests/test_scrapper.py new file mode 100644 index 0000000..46c3674 --- /dev/null +++ b/src/metrics/tests/test_scrapper.py @@ -0,0 +1,307 @@ +# Python Imports +import datetime +import os +import unittest +import logging +from pathlib import Path +from unittest.mock import patch +from result import Ok, Err +import pandas as pd + +# Project Imports +from src.metrics import scrapper + +logger = logging.getLogger(__name__) + + +@patch('src.metrics.scrapper.get_query_data') +class TestScrapper(unittest.TestCase): + + @classmethod + def tearDownClass(cls): + os.rmdir('test_results') + + def test_query_and_dump_metrics_single(self, mock_get_query_data): + test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", + "single_test_scrape.yaml", + "test_results/") + + data = {'result': [{'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [2, 5], [3, 5], + [4, 5], [5, 5]]}]} + + mock_get_query_data.side_effect = [Ok(data)] + + test_scrapper.query_and_dump_metrics() + + expected_data = { + 'Unix Timestamp': pd.to_datetime( + ['1970-01-01 00:00:01', '1970-01-01 00:00:02', '1970-01-01 00:00:03', + '1970-01-01 00:00:04', '1970-01-01 00:00:05']), + 'nodes-1': [5] * 5 + } + + expected_df = pd.DataFrame(expected_data) + + result = pd.read_csv('test_results/metric1.csv') + # Convert data type since it is lost when reading from a file + result['Unix Timestamp'] = pd.to_datetime(result['Unix Timestamp']) + + self.assertTrue(result.equals(expected_df)) + + os.remove('test_results/metric1.csv') + + def test_query_and_dump_metrics_multiple_column(self, mock_get_query_data): + test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", + "single_test_scrape.yaml", + "test_results/") + + data = {'result': [{'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [2, 5], [3, 5], + [4, 5], [5, 5]]}, + {'metric': {'instance': 'nodes-2'}, 'values': [[1, 6], [2, 6], [3, 6], + [4, 6], [5, 6]]} + ]} + + mock_get_query_data.side_effect = [Ok(data)] + + test_scrapper.query_and_dump_metrics() + + expected_data = { + 'Unix Timestamp': pd.to_datetime( + ['1970-01-01 00:00:01', '1970-01-01 00:00:02', '1970-01-01 00:00:03', + '1970-01-01 00:00:04', '1970-01-01 00:00:05']), + 'nodes-1': [5] * 5, + 'nodes-2': [6] * 5 + } + expected_df = pd.DataFrame(expected_data) + + result = pd.read_csv('test_results/metric1.csv') + # Convert data type since it is lost when reading from a file + result['Unix Timestamp'] = pd.to_datetime(result['Unix Timestamp']) + + self.assertTrue(result.equals(expected_df)) + + os.remove('test_results/metric1.csv') + + def test_query_and_dump_metrics_multiple_column_unordered(self, mock_get_query_data): + test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", + "single_test_scrape.yaml", + "test_results/") + + data = {'result': [{'metric': {'instance': 'nodes-2'}, 'values': [[1, 6], [2, 6], [3, 6], + [4, 6], [5, 6]]}, + {'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [2, 5], [3, 5], + [4, 5], [5, 5]]} + ]} + + mock_get_query_data.side_effect = [Ok(data)] + + test_scrapper.query_and_dump_metrics() + + expected_data = { + 'Unix Timestamp': pd.to_datetime( + ['1970-01-01 00:00:01', '1970-01-01 00:00:02', '1970-01-01 00:00:03', + '1970-01-01 00:00:04', '1970-01-01 00:00:05']), + 'nodes-1': [5] * 5, + 'nodes-2': [6] * 5 + } + expected_df = pd.DataFrame(expected_data) + + result = pd.read_csv('test_results/metric1.csv') + # Convert data type since it is lost when reading from a file + result['Unix Timestamp'] = pd.to_datetime(result['Unix Timestamp']) + + self.assertTrue(result.equals(expected_df)) + + os.remove('test_results/metric1.csv') + + def test_query_and_dump_metrics_multiple_data(self, mock_get_query_data): + test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", + "test_scrape.yaml", + "test_results/") + + data1 = {'result': [{'metric': {'instance': 'nodes-2'}, 'values': [[1, 6], [2, 6], [3, 6], + [4, 6], [5, 6]]}, + {'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [2, 5], [3, 5], + [4, 5], [5, 5]]} + ]} + data2 = {'result': [ + {'metric': {'instance': 'nodes-2', 'direction': 'in'}, + 'values': [[1, 6], [2, 6], [3, 6], + [4, 6], [5, 6]]}, + {'metric': {'instance': 'nodes-1', 'direction': 'out'}, + 'values': [[1, 5], [2, 5], [3, 5], + [4, 5], [5, 5]]} + ]} + + mock_get_query_data.side_effect = [Ok(data1), Ok(data2)] + + test_scrapper.query_and_dump_metrics() + + expected_data_1 = { + 'Unix Timestamp': pd.to_datetime( + ['1970-01-01 00:00:01', '1970-01-01 00:00:02', '1970-01-01 00:00:03', + '1970-01-01 00:00:04', '1970-01-01 00:00:05']), + 'nodes-1': [5] * 5, + 'nodes-2': [6] * 5 + } + expected_df1 = pd.DataFrame(expected_data_1) + + expected_data_2 = { + 'Unix Timestamp': pd.to_datetime( + ['1970-01-01 00:00:01', '1970-01-01 00:00:02', '1970-01-01 00:00:03', + '1970-01-01 00:00:04', '1970-01-01 00:00:05']), + 'nodes-1_out': [5] * 5, + 'nodes-2_in': [6] * 5 + } + expected_df2 = pd.DataFrame(expected_data_2) + + result1 = pd.read_csv('test_results/metric1.csv') + # Convert data type since it is lost when reading from a file + result1['Unix Timestamp'] = pd.to_datetime(result1['Unix Timestamp']) + + self.assertTrue(result1.equals(expected_df1)) + + result2 = pd.read_csv('test_results/metric2[$__rate_interval]).csv') + # Convert data type since it is lost when reading from a file + result2['Unix Timestamp'] = pd.to_datetime(result2['Unix Timestamp']) + + self.assertTrue(result2.equals(expected_df2)) + + os.remove('test_results/metric1.csv') + os.remove('test_results/metric2[$__rate_interval]).csv') + + @patch('src.metrics.scrapper.Scrapper._dump_data') + def test_query_and_dump_metrics_multiple_fail(self, mock_dump, mock_get_query_data): + test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", + "test_scrape.yaml", + "test_results/") + + err1 = "Err1" + err2 = "Err2" + + mock_get_query_data.side_effect = [Err(err1), Err(err2)] + + test_scrapper.query_and_dump_metrics() + + self.assertEqual(0, mock_dump.call_count) + + def test__set_query_config(self, _mock_get_query_data): + test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", + "single_test_scrape.yaml", + "test_results/") + + test_scrapper._set_query_config() + + expected_config = {'scrape_config': {'until_hours_ago': 1, 'step': "60s", + '$__rate_interval': '60s'}, + 'metrics_to_scrape': [{'metric1': 'instance'}]} + + self.assertEqual(expected_config, test_scrapper._query_config) + + @patch('src.metrics.scrape_utils._get_datetime_now') + def test__create_query(self, mock_datetime_now, _mock_get_query_data): + test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", + "single_test_scrape.yaml", + "test_results/") + + metric = "bandwidth" + scrape_config = {'until_hours_ago': 1, 'step': "60s", '$__rate_interval': '60s'} + + return_value_first = datetime.datetime(2024, 2, 22, 11, 0, 0) + return_value_second = datetime.datetime(2024, 2, 22, 12, 0, 0) + mock_datetime_now.side_effect = [return_value_first, return_value_second] + + result = test_scrapper._create_query(metric, scrape_config) + + expected_result = ('http://myurl:9090/api/v1/query_range?query=bandwidth&start=1708592400' + '.0&end=1708599600.0&step=60s') + + self.assertEqual(expected_result, result) + + @patch('src.metrics.scrape_utils._get_datetime_now') + def test__create_query_with_rate(self, mock_datetime_now, _mock_get_query_data): + test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", + "single_test_scrape.yaml", + "test_results/") + + metric = "bandwidth[$__rate_interval]" + scrape_config = {'until_hours_ago': 1, 'step': "60s", '$__rate_interval': '60s'} + + return_value_first = datetime.datetime(2024, 2, 22, 11, 0, 0) + return_value_second = datetime.datetime(2024, 2, 22, 12, 0, 0) + mock_datetime_now.side_effect = [return_value_first, return_value_second] + + result = test_scrapper._create_query(metric, scrape_config) + + expected_result = ( + 'http://myurl:9090/api/v1/query_range?query=bandwidth[60s]&start=1708592400' + '.0&end=1708599600.0&step=60s') + + self.assertEqual(expected_result, result) + + def test__dump_data(self, _mock_get_query_data): + test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", + "single_test_scrape.yaml", + "test_results/") + + data = {'result': [{'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [2, 5], [3, 5], + [4, 5], [5, 5]]}]} + + test_scrapper._dump_data('metric1', 'instance', data) + + expected_data = { + 'Unix Timestamp': pd.to_datetime( + ['1970-01-01 00:00:01', '1970-01-01 00:00:02', '1970-01-01 00:00:03', + '1970-01-01 00:00:04', '1970-01-01 00:00:05']), + 'nodes-1': [5] * 5 + } + expected_df = pd.DataFrame(expected_data) + + result = pd.read_csv('test_results/metric1.csv') + # Convert data type since it is lost when reading from a file + result['Unix Timestamp'] = pd.to_datetime(result['Unix Timestamp']) + + self.assertTrue(result.equals(expected_df)) + + os.remove('test_results/metric1.csv') + + @patch('src.metrics.scrapper.Scrapper._prepare_path') + def test__dump_data_err(self, mock_prepare_path, _mock_get_query_data): + test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "/") + + mock_prepare_path.return_value = Err("Error") + data = {} + + with self.assertRaises(SystemExit) as cm: + test_scrapper._dump_data('', '', data) + + self.assertEqual(cm.exception.code, 1) + + def test__prepare_path(self, _mock_get_query_data): + test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "test_path/") + + result = test_scrapper._prepare_path('metric1') + + self.assertEqual(Path('test_path/metric1.csv'), result.ok_value) + + os.rmdir('test_path/') + + def test__prepare_path_multiple(self, _mock_get_query_data): + test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "test_path_1/test_path_2") + + result = test_scrapper._prepare_path('metric1') + + self.assertEqual(Path('test_path_1/test_path_2/metric1.csv'), result.ok_value) + + os.rmdir('test_path_1/test_path_2/') + os.rmdir('test_path_1') + + @patch('src.metrics.scrapper.Path.mkdir') + def test__prepare_path_err(self, mock_mkdir, _mock_get_query_data): + test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "test_path_1/test_path_2") + + mock_mkdir.side_effect = OSError("Error") + + result = test_scrapper._prepare_path('metric1') + + self.assertIsInstance(result, Err) From 7426b7d845433524172a0fa37fc28d6de19f448c Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Fri, 23 Feb 2024 11:09:42 +0100 Subject: [PATCH 14/19] Finished tests in scrapper --- src/metrics/tests/test_scrapper.py | 122 +++++++++++++++++++++++++++++ 1 file changed, 122 insertions(+) diff --git a/src/metrics/tests/test_scrapper.py b/src/metrics/tests/test_scrapper.py index 46c3674..f0ccc35 100644 --- a/src/metrics/tests/test_scrapper.py +++ b/src/metrics/tests/test_scrapper.py @@ -305,3 +305,125 @@ class TestScrapper(unittest.TestCase): result = test_scrapper._prepare_path('metric1') self.assertIsInstance(result, Err) + + def test__create_dataframe_from_data(self, _mock_get_query_data): + test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") + + data = {'result': [{'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [2, 5], [3, 5], + [4, 5], [5, 5]]}]} + + result = test_scrapper._create_dataframe_from_data(data, 'instance') + + expected_data = { + 'Unix Timestamp': pd.to_datetime( + ['1970-01-01 00:00:01', '1970-01-01 00:00:02', '1970-01-01 00:00:03', + '1970-01-01 00:00:04', '1970-01-01 00:00:05']), + 'nodes-1': [5] * 5 + } + expected_df = pd.DataFrame(expected_data) + expected_df.set_index('Unix Timestamp', inplace=True) + + self.assertTrue(result.equals(expected_df)) + + def test__create_dataframe_from_data_multiple(self, _mock_get_query_data): + test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") + + data = {'result': [{'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [2, 5], [3, 5], + [4, 5], [5, 5]]}, + {'metric': {'instance': 'nodes-2'}, 'values': [[1, 6], [2, 6], [3, 6], + [4, 6], [5, 6]]} + ]} + + result = test_scrapper._create_dataframe_from_data(data, 'instance') + + expected_data = { + 'Unix Timestamp': pd.to_datetime( + ['1970-01-01 00:00:01', '1970-01-01 00:00:02', '1970-01-01 00:00:03', + '1970-01-01 00:00:04', '1970-01-01 00:00:05']), + 'nodes-1': [5] * 5, + 'nodes-2': [6] * 5 + } + expected_df = pd.DataFrame(expected_data) + expected_df.set_index('Unix Timestamp', inplace=True) + + self.assertTrue(result.equals(expected_df)) + + def test__create_dataframe_from_data_not_matching_times(self, _mock_get_query_data): + test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") + + data = {'result': [{'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [3, 5], [5, 5]]}, + {'metric': {'instance': 'nodes-2'}, 'values': [[1, 6], [2, 6], [4, 6]]} + ]} + + result = test_scrapper._create_dataframe_from_data(data, 'instance') + + expected_data = { + 'Unix Timestamp': pd.to_datetime( + ['1970-01-01 00:00:01', '1970-01-01 00:00:02', '1970-01-01 00:00:03', + '1970-01-01 00:00:04', '1970-01-01 00:00:05']), + 'nodes-1': [5, None, 5, None, 5], + 'nodes-2': [6, 6, None, 6, None] + } + expected_df = pd.DataFrame(expected_data) + expected_df.set_index('Unix Timestamp', inplace=True) + + self.assertTrue(result.equals(expected_df)) + + def test__sort_dataframe(self, _mock_get_query_data): + test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") + + data = {'result': [{'metric': {'instance': 'nodes-4'}, 'values': [[1, 5], [2, 5], [3, 5], + [4, 5], [5, 5]]}, + {'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [2, 5], [3, 5], + [4, 5], [5, 5]]}, + {'metric': {'instance': 'nodes-3'}, 'values': [[1, 5], [2, 5], [3, 5], + [4, 5], [5, 5]]} + ]} + + df = test_scrapper._create_dataframe_from_data(data, 'instance') + + result = test_scrapper._sort_dataframe(df) + + expected_columns = ['nodes-1', 'nodes-3', 'nodes-4'] + + self.assertEqual(expected_columns, result.columns.tolist()) + + def test__create_pod_df(self, _mock_get_query_data): + test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") + + values = [[1, 5], [2, 5], [3, 5], [4, 5], [5, 5]] + + result = test_scrapper._create_pod_df('nodes-1', values) + + expected_data = { + 'Unix Timestamp': pd.to_datetime( + ['1970-01-01 00:00:01', '1970-01-01 00:00:02', '1970-01-01 00:00:03', + '1970-01-01 00:00:04', '1970-01-01 00:00:05']), + 'nodes-1': [5] * 5 + } + expected_df = pd.DataFrame(expected_data) + expected_df.set_index('Unix Timestamp', inplace=True) + + self.assertTrue(result.equals(expected_df)) + + def test__order(self, _mock_get_query_data): + test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") + + columns = ['nodes-4', 'nodes-1', 'nodes-3'] + + result = test_scrapper._order(columns) + + expected_columns = ['nodes-1', 'nodes-3', 'nodes-4'] + + self.assertEqual(expected_columns, result) + + def test__order_bootstrap(self, _mock_get_query_data): + test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") + + columns = ['nodes-4', 'nodes-1', 'nodes-3', 'bootstrap-2'] + + result = test_scrapper._order(columns) + + expected_columns = ['bootstrap-2', 'nodes-1', 'nodes-3', 'nodes-4'] + + self.assertEqual(expected_columns, result) \ No newline at end of file From e49f11a7218f101f91cf0c70f2fc7ed4c9a6fde4 Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Fri, 23 Feb 2024 11:10:42 +0100 Subject: [PATCH 15/19] Added test for queries.py --- src/utils/queries.py | 1 + src/utils/tests/__init__.py | 0 src/utils/tests/test_queries.py | 36 +++++++++++++++++++++++++++++++++ 3 files changed, 37 insertions(+) create mode 100644 src/utils/tests/__init__.py create mode 100644 src/utils/tests/test_queries.py diff --git a/src/utils/queries.py b/src/utils/queries.py index 11036c8..b7a9694 100644 --- a/src/utils/queries.py +++ b/src/utils/queries.py @@ -18,4 +18,5 @@ def get_query_data(request: str) -> Result[Dict, str]: logger.info(f'Response: {response.status_code}') data = response.json()['data'] return Ok(data) + return Err(f'Error in query. Status code {response.status_code}. {response.content}') diff --git a/src/utils/tests/__init__.py b/src/utils/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/utils/tests/test_queries.py b/src/utils/tests/test_queries.py new file mode 100644 index 0000000..477554f --- /dev/null +++ b/src/utils/tests/test_queries.py @@ -0,0 +1,36 @@ +# Python Imports +import unittest +from unittest.mock import patch + +import requests.exceptions + +# Project Imports +from src.utils.queries import get_query_data + + +@patch('src.utils.queries.requests.get') +class TestQueries(unittest.TestCase): + + def test_get_query_data_correct(self, mock_requests_get): + mock_requests_get.return_value.ok = True + mock_requests_get.return_value.json.return_value = {'data': 'foo'} + + result = get_query_data('https://foo/bar/1') + + self.assertEqual(result.ok_value, 'foo') + + def test_get_query_data_timeout(self, mock_requests_get): + mock_requests_get.side_effect = requests.exceptions.Timeout + + result = get_query_data('https://foo/bar/1') + + self.assertEqual(result.err_value, 'Timeout error.') + + def test_get_query_data_error(self, mock_requests_get): + mock_requests_get.return_value.ok = False + mock_requests_get.return_value.status_code = 404 + mock_requests_get.return_value.content = 'bar' + + result = get_query_data('https://foo/bar/1') + + self.assertEqual(result.err_value, 'Error in query. Status code 404. bar') From 923bf4bf1ecc2b58a6eba14dd498d5d84592d04b Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Fri, 23 Feb 2024 11:16:31 +0100 Subject: [PATCH 16/19] Added typing for tests --- src/metrics/tests/test_scrape_utils.py | 4 +-- src/metrics/tests/test_scrapper.py | 42 +++++++++++++------------- src/utils/tests/test_queries.py | 8 ++--- 3 files changed, 27 insertions(+), 27 deletions(-) diff --git a/src/metrics/tests/test_scrape_utils.py b/src/metrics/tests/test_scrape_utils.py index 6fc01f6..592ad55 100644 --- a/src/metrics/tests/test_scrape_utils.py +++ b/src/metrics/tests/test_scrape_utils.py @@ -1,7 +1,7 @@ # Python Imports import unittest import datetime -from unittest.mock import patch, Mock +from unittest.mock import patch, MagicMock # Project Imports from src.metrics.scrape_utils import create_promql @@ -10,7 +10,7 @@ from src.metrics.scrape_utils import create_promql class TestScrapeUtils(unittest.TestCase): @patch('src.metrics.scrape_utils._get_datetime_now') - def test_create_promql(self, mock_datetime_now): + def test_create_promql(self, mock_datetime_now: MagicMock): address = "0.0.0.0:9090/api/" query = "bandwidth" hours_passed = 1 diff --git a/src/metrics/tests/test_scrapper.py b/src/metrics/tests/test_scrapper.py index f0ccc35..91083b6 100644 --- a/src/metrics/tests/test_scrapper.py +++ b/src/metrics/tests/test_scrapper.py @@ -4,7 +4,7 @@ import os import unittest import logging from pathlib import Path -from unittest.mock import patch +from unittest.mock import patch, MagicMock from result import Ok, Err import pandas as pd @@ -21,7 +21,7 @@ class TestScrapper(unittest.TestCase): def tearDownClass(cls): os.rmdir('test_results') - def test_query_and_dump_metrics_single(self, mock_get_query_data): + def test_query_and_dump_metrics_single(self, mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", "single_test_scrape.yaml", "test_results/") @@ -50,7 +50,7 @@ class TestScrapper(unittest.TestCase): os.remove('test_results/metric1.csv') - def test_query_and_dump_metrics_multiple_column(self, mock_get_query_data): + def test_query_and_dump_metrics_multiple_column(self, mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", "single_test_scrape.yaml", "test_results/") @@ -82,7 +82,7 @@ class TestScrapper(unittest.TestCase): os.remove('test_results/metric1.csv') - def test_query_and_dump_metrics_multiple_column_unordered(self, mock_get_query_data): + def test_query_and_dump_metrics_multiple_column_unordered(self, mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", "single_test_scrape.yaml", "test_results/") @@ -114,7 +114,7 @@ class TestScrapper(unittest.TestCase): os.remove('test_results/metric1.csv') - def test_query_and_dump_metrics_multiple_data(self, mock_get_query_data): + def test_query_and_dump_metrics_multiple_data(self, mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", "test_scrape.yaml", "test_results/") @@ -171,7 +171,7 @@ class TestScrapper(unittest.TestCase): os.remove('test_results/metric2[$__rate_interval]).csv') @patch('src.metrics.scrapper.Scrapper._dump_data') - def test_query_and_dump_metrics_multiple_fail(self, mock_dump, mock_get_query_data): + def test_query_and_dump_metrics_multiple_fail(self, mock_dump: MagicMock, mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", "test_scrape.yaml", "test_results/") @@ -185,7 +185,7 @@ class TestScrapper(unittest.TestCase): self.assertEqual(0, mock_dump.call_count) - def test__set_query_config(self, _mock_get_query_data): + def test__set_query_config(self, _mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", "single_test_scrape.yaml", "test_results/") @@ -199,7 +199,7 @@ class TestScrapper(unittest.TestCase): self.assertEqual(expected_config, test_scrapper._query_config) @patch('src.metrics.scrape_utils._get_datetime_now') - def test__create_query(self, mock_datetime_now, _mock_get_query_data): + def test__create_query(self, mock_datetime_now: MagicMock, _mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", "single_test_scrape.yaml", "test_results/") @@ -219,7 +219,7 @@ class TestScrapper(unittest.TestCase): self.assertEqual(expected_result, result) @patch('src.metrics.scrape_utils._get_datetime_now') - def test__create_query_with_rate(self, mock_datetime_now, _mock_get_query_data): + def test__create_query_with_rate(self, mock_datetime_now: MagicMock, _mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", "single_test_scrape.yaml", "test_results/") @@ -239,7 +239,7 @@ class TestScrapper(unittest.TestCase): self.assertEqual(expected_result, result) - def test__dump_data(self, _mock_get_query_data): + def test__dump_data(self, _mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", "single_test_scrape.yaml", "test_results/") @@ -266,7 +266,7 @@ class TestScrapper(unittest.TestCase): os.remove('test_results/metric1.csv') @patch('src.metrics.scrapper.Scrapper._prepare_path') - def test__dump_data_err(self, mock_prepare_path, _mock_get_query_data): + def test__dump_data_err(self, mock_prepare_path: MagicMock, _mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "/") mock_prepare_path.return_value = Err("Error") @@ -277,7 +277,7 @@ class TestScrapper(unittest.TestCase): self.assertEqual(cm.exception.code, 1) - def test__prepare_path(self, _mock_get_query_data): + def test__prepare_path(self, _mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "test_path/") result = test_scrapper._prepare_path('metric1') @@ -286,7 +286,7 @@ class TestScrapper(unittest.TestCase): os.rmdir('test_path/') - def test__prepare_path_multiple(self, _mock_get_query_data): + def test__prepare_path_multiple(self, _mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "test_path_1/test_path_2") result = test_scrapper._prepare_path('metric1') @@ -297,7 +297,7 @@ class TestScrapper(unittest.TestCase): os.rmdir('test_path_1') @patch('src.metrics.scrapper.Path.mkdir') - def test__prepare_path_err(self, mock_mkdir, _mock_get_query_data): + def test__prepare_path_err(self, mock_mkdir: MagicMock, _mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "test_path_1/test_path_2") mock_mkdir.side_effect = OSError("Error") @@ -306,7 +306,7 @@ class TestScrapper(unittest.TestCase): self.assertIsInstance(result, Err) - def test__create_dataframe_from_data(self, _mock_get_query_data): + def test__create_dataframe_from_data(self, _mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") data = {'result': [{'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [2, 5], [3, 5], @@ -325,7 +325,7 @@ class TestScrapper(unittest.TestCase): self.assertTrue(result.equals(expected_df)) - def test__create_dataframe_from_data_multiple(self, _mock_get_query_data): + def test__create_dataframe_from_data_multiple(self, _mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") data = {'result': [{'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [2, 5], [3, 5], @@ -348,7 +348,7 @@ class TestScrapper(unittest.TestCase): self.assertTrue(result.equals(expected_df)) - def test__create_dataframe_from_data_not_matching_times(self, _mock_get_query_data): + def test__create_dataframe_from_data_not_matching_times(self, _mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") data = {'result': [{'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [3, 5], [5, 5]]}, @@ -369,7 +369,7 @@ class TestScrapper(unittest.TestCase): self.assertTrue(result.equals(expected_df)) - def test__sort_dataframe(self, _mock_get_query_data): + def test__sort_dataframe(self, _mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") data = {'result': [{'metric': {'instance': 'nodes-4'}, 'values': [[1, 5], [2, 5], [3, 5], @@ -388,7 +388,7 @@ class TestScrapper(unittest.TestCase): self.assertEqual(expected_columns, result.columns.tolist()) - def test__create_pod_df(self, _mock_get_query_data): + def test__create_pod_df(self, _mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") values = [[1, 5], [2, 5], [3, 5], [4, 5], [5, 5]] @@ -406,7 +406,7 @@ class TestScrapper(unittest.TestCase): self.assertTrue(result.equals(expected_df)) - def test__order(self, _mock_get_query_data): + def test__order(self, _mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") columns = ['nodes-4', 'nodes-1', 'nodes-3'] @@ -417,7 +417,7 @@ class TestScrapper(unittest.TestCase): self.assertEqual(expected_columns, result) - def test__order_bootstrap(self, _mock_get_query_data): + def test__order_bootstrap(self, _mock_get_query_data: MagicMock): test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") columns = ['nodes-4', 'nodes-1', 'nodes-3', 'bootstrap-2'] diff --git a/src/utils/tests/test_queries.py b/src/utils/tests/test_queries.py index 477554f..53e1a40 100644 --- a/src/utils/tests/test_queries.py +++ b/src/utils/tests/test_queries.py @@ -1,6 +1,6 @@ # Python Imports import unittest -from unittest.mock import patch +from unittest.mock import patch, MagicMock import requests.exceptions @@ -11,7 +11,7 @@ from src.utils.queries import get_query_data @patch('src.utils.queries.requests.get') class TestQueries(unittest.TestCase): - def test_get_query_data_correct(self, mock_requests_get): + def test_get_query_data_correct(self, mock_requests_get: MagicMock): mock_requests_get.return_value.ok = True mock_requests_get.return_value.json.return_value = {'data': 'foo'} @@ -19,14 +19,14 @@ class TestQueries(unittest.TestCase): self.assertEqual(result.ok_value, 'foo') - def test_get_query_data_timeout(self, mock_requests_get): + def test_get_query_data_timeout(self, mock_requests_get: MagicMock): mock_requests_get.side_effect = requests.exceptions.Timeout result = get_query_data('https://foo/bar/1') self.assertEqual(result.err_value, 'Timeout error.') - def test_get_query_data_error(self, mock_requests_get): + def test_get_query_data_error(self, mock_requests_get: MagicMock): mock_requests_get.return_value.ok = False mock_requests_get.return_value.status_code = 404 mock_requests_get.return_value.content = 'bar' From 780474b0013955e03663f293147ebbd371d7d754 Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Fri, 23 Feb 2024 11:18:51 +0100 Subject: [PATCH 17/19] - --- src/utils/tests/test_queries.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/utils/tests/test_queries.py b/src/utils/tests/test_queries.py index 53e1a40..d60cebb 100644 --- a/src/utils/tests/test_queries.py +++ b/src/utils/tests/test_queries.py @@ -1,8 +1,7 @@ # Python Imports import unittest -from unittest.mock import patch, MagicMock - import requests.exceptions +from unittest.mock import patch, MagicMock # Project Imports from src.utils.queries import get_query_data From c6f0280676c94202945134a6aa5f7dd21eb98670 Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Fri, 23 Feb 2024 11:35:27 +0100 Subject: [PATCH 18/19] Changed way to open file, so works either with python -m unittest discover src or running from IDE --- src/metrics/tests/test_scrapper.py | 64 ++++++++++++++++++++---------- 1 file changed, 44 insertions(+), 20 deletions(-) diff --git a/src/metrics/tests/test_scrapper.py b/src/metrics/tests/test_scrapper.py index 91083b6..c7bc603 100644 --- a/src/metrics/tests/test_scrapper.py +++ b/src/metrics/tests/test_scrapper.py @@ -17,13 +17,17 @@ logger = logging.getLogger(__name__) @patch('src.metrics.scrapper.get_query_data') class TestScrapper(unittest.TestCase): + def setUp(self): + self.current_directory = os.path.dirname(os.path.abspath(__file__)) + @classmethod def tearDownClass(cls): os.rmdir('test_results') def test_query_and_dump_metrics_single(self, mock_get_query_data: MagicMock): + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", - "single_test_scrape.yaml", + file_path, "test_results/") data = {'result': [{'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [2, 5], [3, 5], @@ -51,8 +55,9 @@ class TestScrapper(unittest.TestCase): os.remove('test_results/metric1.csv') def test_query_and_dump_metrics_multiple_column(self, mock_get_query_data: MagicMock): + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", - "single_test_scrape.yaml", + file_path, "test_results/") data = {'result': [{'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [2, 5], [3, 5], @@ -83,8 +88,9 @@ class TestScrapper(unittest.TestCase): os.remove('test_results/metric1.csv') def test_query_and_dump_metrics_multiple_column_unordered(self, mock_get_query_data: MagicMock): + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", - "single_test_scrape.yaml", + file_path, "test_results/") data = {'result': [{'metric': {'instance': 'nodes-2'}, 'values': [[1, 6], [2, 6], [3, 6], @@ -115,8 +121,9 @@ class TestScrapper(unittest.TestCase): os.remove('test_results/metric1.csv') def test_query_and_dump_metrics_multiple_data(self, mock_get_query_data: MagicMock): + file_path = os.path.join(self.current_directory, 'test_scrape.yaml') test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", - "test_scrape.yaml", + file_path, "test_results/") data1 = {'result': [{'metric': {'instance': 'nodes-2'}, 'values': [[1, 6], [2, 6], [3, 6], @@ -172,9 +179,11 @@ class TestScrapper(unittest.TestCase): @patch('src.metrics.scrapper.Scrapper._dump_data') def test_query_and_dump_metrics_multiple_fail(self, mock_dump: MagicMock, mock_get_query_data: MagicMock): + file_path = os.path.join(self.current_directory, 'test_scrape.yaml') test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", - "test_scrape.yaml", + file_path, "test_results/") + file_path = os.path.join(self.current_directory, 'test_scrape.yaml') err1 = "Err1" err2 = "Err2" @@ -186,8 +195,9 @@ class TestScrapper(unittest.TestCase): self.assertEqual(0, mock_dump.call_count) def test__set_query_config(self, _mock_get_query_data: MagicMock): + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", - "single_test_scrape.yaml", + file_path, "test_results/") test_scrapper._set_query_config() @@ -200,8 +210,9 @@ class TestScrapper(unittest.TestCase): @patch('src.metrics.scrape_utils._get_datetime_now') def test__create_query(self, mock_datetime_now: MagicMock, _mock_get_query_data: MagicMock): + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", - "single_test_scrape.yaml", + file_path, "test_results/") metric = "bandwidth" @@ -220,8 +231,9 @@ class TestScrapper(unittest.TestCase): @patch('src.metrics.scrape_utils._get_datetime_now') def test__create_query_with_rate(self, mock_datetime_now: MagicMock, _mock_get_query_data: MagicMock): + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", - "single_test_scrape.yaml", + file_path, "test_results/") metric = "bandwidth[$__rate_interval]" @@ -240,8 +252,9 @@ class TestScrapper(unittest.TestCase): self.assertEqual(expected_result, result) def test__dump_data(self, _mock_get_query_data: MagicMock): + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') test_scrapper = scrapper.Scrapper("http://myurl:9090/api/v1/", - "single_test_scrape.yaml", + file_path, "test_results/") data = {'result': [{'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [2, 5], [3, 5], @@ -267,7 +280,8 @@ class TestScrapper(unittest.TestCase): @patch('src.metrics.scrapper.Scrapper._prepare_path') def test__dump_data_err(self, mock_prepare_path: MagicMock, _mock_get_query_data: MagicMock): - test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "/") + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') + test_scrapper = scrapper.Scrapper("", file_path, "/") mock_prepare_path.return_value = Err("Error") data = {} @@ -278,7 +292,8 @@ class TestScrapper(unittest.TestCase): self.assertEqual(cm.exception.code, 1) def test__prepare_path(self, _mock_get_query_data: MagicMock): - test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "test_path/") + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') + test_scrapper = scrapper.Scrapper("", file_path, "test_path/") result = test_scrapper._prepare_path('metric1') @@ -287,7 +302,8 @@ class TestScrapper(unittest.TestCase): os.rmdir('test_path/') def test__prepare_path_multiple(self, _mock_get_query_data: MagicMock): - test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "test_path_1/test_path_2") + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') + test_scrapper = scrapper.Scrapper("", file_path, "test_path_1/test_path_2") result = test_scrapper._prepare_path('metric1') @@ -298,7 +314,8 @@ class TestScrapper(unittest.TestCase): @patch('src.metrics.scrapper.Path.mkdir') def test__prepare_path_err(self, mock_mkdir: MagicMock, _mock_get_query_data: MagicMock): - test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "test_path_1/test_path_2") + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') + test_scrapper = scrapper.Scrapper("", file_path, "test_path_1/test_path_2") mock_mkdir.side_effect = OSError("Error") @@ -307,7 +324,8 @@ class TestScrapper(unittest.TestCase): self.assertIsInstance(result, Err) def test__create_dataframe_from_data(self, _mock_get_query_data: MagicMock): - test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') + test_scrapper = scrapper.Scrapper("", file_path, "") data = {'result': [{'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [2, 5], [3, 5], [4, 5], [5, 5]]}]} @@ -326,7 +344,8 @@ class TestScrapper(unittest.TestCase): self.assertTrue(result.equals(expected_df)) def test__create_dataframe_from_data_multiple(self, _mock_get_query_data: MagicMock): - test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') + test_scrapper = scrapper.Scrapper("", file_path, "") data = {'result': [{'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [2, 5], [3, 5], [4, 5], [5, 5]]}, @@ -349,7 +368,8 @@ class TestScrapper(unittest.TestCase): self.assertTrue(result.equals(expected_df)) def test__create_dataframe_from_data_not_matching_times(self, _mock_get_query_data: MagicMock): - test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') + test_scrapper = scrapper.Scrapper("", file_path, "") data = {'result': [{'metric': {'instance': 'nodes-1'}, 'values': [[1, 5], [3, 5], [5, 5]]}, {'metric': {'instance': 'nodes-2'}, 'values': [[1, 6], [2, 6], [4, 6]]} @@ -370,7 +390,8 @@ class TestScrapper(unittest.TestCase): self.assertTrue(result.equals(expected_df)) def test__sort_dataframe(self, _mock_get_query_data: MagicMock): - test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') + test_scrapper = scrapper.Scrapper("", file_path, "") data = {'result': [{'metric': {'instance': 'nodes-4'}, 'values': [[1, 5], [2, 5], [3, 5], [4, 5], [5, 5]]}, @@ -389,7 +410,8 @@ class TestScrapper(unittest.TestCase): self.assertEqual(expected_columns, result.columns.tolist()) def test__create_pod_df(self, _mock_get_query_data: MagicMock): - test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') + test_scrapper = scrapper.Scrapper("", file_path, "") values = [[1, 5], [2, 5], [3, 5], [4, 5], [5, 5]] @@ -407,7 +429,8 @@ class TestScrapper(unittest.TestCase): self.assertTrue(result.equals(expected_df)) def test__order(self, _mock_get_query_data: MagicMock): - test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') + test_scrapper = scrapper.Scrapper("", file_path, "") columns = ['nodes-4', 'nodes-1', 'nodes-3'] @@ -418,7 +441,8 @@ class TestScrapper(unittest.TestCase): self.assertEqual(expected_columns, result) def test__order_bootstrap(self, _mock_get_query_data: MagicMock): - test_scrapper = scrapper.Scrapper("", "single_test_scrape.yaml", "") + file_path = os.path.join(self.current_directory, 'single_test_scrape.yaml') + test_scrapper = scrapper.Scrapper("", file_path, "") columns = ['nodes-4', 'nodes-1', 'nodes-3', 'bootstrap-2'] From a16399506b9bd631210e3f45f5ba26e39ea8751e Mon Sep 17 00:00:00 2001 From: Alberto Soutullo Date: Mon, 26 Feb 2024 15:55:42 +0100 Subject: [PATCH 19/19] Added query_config parameter to init for better readability --- src/metrics/scrapper.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/metrics/scrapper.py b/src/metrics/scrapper.py index 58150f1..a134f02 100644 --- a/src/metrics/scrapper.py +++ b/src/metrics/scrapper.py @@ -17,6 +17,7 @@ logger = logging.getLogger(__name__) class Scrapper: def __init__(self, url: str, query_config_file: str, out_folder: str): self._url = url + self._query_config = None self._query_config_file = query_config_file self._out_folder = out_folder self._set_query_config()