Skip to content
This repository has been archived by the owner on Jun 26, 2024. It is now read-only.

Commit

Permalink
Cherry-pick #1331, #1335 and #1363 (#1372)
Browse files Browse the repository at this point in the history
* Adding support to upload performance kpi data to opensearch (#1331)
* Fixed the environment issue in CI (#1335)
* Added metadata with performance kpi data upload (#1363)

Signed-off-by: Pavel Macík <[email protected]>

Signed-off-by: Pavel Macík <[email protected]>
Co-authored-by: Sushanta Das <[email protected]>
  • Loading branch information
pmacik and tisutisu committed Jan 19, 2023
1 parent b82c512 commit e23698a
Show file tree
Hide file tree
Showing 5 changed files with 213 additions and 0 deletions.
8 changes: 8 additions & 0 deletions make/performance.mk
Original file line number Diff line number Diff line change
Expand Up @@ -49,3 +49,11 @@ test-performance-thresholds: yq
@[ $$($(YQ) eval '(.kpi[] | select(.name == "usage").metrics.[] | select(.name == "CPU_millicores").average) < $(TEST_PERFORMANCE_AVG_CPU)' $(TEST_PERFORMANCE_OUTPUT_DIR)/results/kpi.yaml) == "true" ]
@echo "Checking if maximal value of CPU "$$($(YQ) eval '(.kpi[] | select(.name == "usage").metrics.[] | select(.name == "CPU_millicores").maximum)' $(TEST_PERFORMANCE_OUTPUT_DIR)/results/kpi.yaml)" < $(TEST_PERFORMANCE_MAX_CPU) milicores of vCPU"
@[ $$($(YQ) eval '(.kpi[] | select(.name == "usage").metrics.[] | select(.name == "CPU_millicores").maximum) < $(TEST_PERFORMANCE_MAX_CPU)' $(TEST_PERFORMANCE_OUTPUT_DIR)/results/kpi.yaml) == "true" ]

.PHONY: test-performance-upload-kpi
## Upload KPI data to the open search instance
test-performance-upload-kpi: setup-venv
@echo Uploading kpi to the open search instance
$(Q)$(PYTHON_VENV_DIR)/bin/pip install -q -r ./test/performance/requirements.txt
OS_HOST=$(OPENSEARCH_HOST) OS_REGION=$(OPENSEARCH_REGION) KPI_YAML_FILE=$(TEST_PERFORMANCE_OUTPUT_DIR)/results/kpi.yaml TEST_PERFORMANCE_AVG_MEMORY=$(TEST_PERFORMANCE_AVG_MEMORY) TEST_PERFORMANCE_MAX_MEMORY=$(TEST_PERFORMANCE_MAX_MEMORY) TEST_PERFORMANCE_AVG_CPU=$(TEST_PERFORMANCE_AVG_CPU) TEST_PERFORMANCE_MAX_CPU=$(TEST_PERFORMANCE_MAX_CPU) $(PYTHON_VENV_DIR)/bin/python3 ./test/performance/upload_data.py

Empty file added test/performance/__init__.py
Empty file.
15 changes: 15 additions & 0 deletions test/performance/collect-kpi.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,13 @@ NS_PREFIX=${NS_PREFIX:-entanglement}

SBO_METRICS=$(find $METRICS -type f -name 'pod-info.service-binding-operator-*.csv')

SBO_VERSION=$(make operator-version --no-print-directory)
OPENSHIFT_RELEASE=$(oc version -o yaml | yq e '.openshiftVersion')
OPENSHIFT_VERSION=$(oc version -o yaml | yq e '.openshiftVersion' | grep -oP '^\d{1,2}.\d{1,2}.\d{1,2}')
RUN_TYPE=${RUN_TYPE:-default}
PULL_NUMBER=${PULL_NUMBER:-n/a}
PULL_PULL_SHA=${PULL_PULL_SHA:-n/a}

SCENARIOS="nosb-inv nosb-val sb-inc sb-inv sb-val"
#SCENARIOS="nosb-val"

Expand Down Expand Up @@ -52,3 +59,11 @@ for scenario in $SCENARIOS; do
cat $output >>$kpi_yaml
done
done

echo "execution_timestamp: $(date +%F\ %T)" >>$kpi_yaml
echo "sbo_version: $SBO_VERSION" >>$kpi_yaml
echo "openshift_version: $OPENSHIFT_VERSION" >>$kpi_yaml
echo "openshift_release: $OPENSHIFT_RELEASE" >>$kpi_yaml
echo "run_type: $RUN_TYPE" >>$kpi_yaml
echo "pull_number: $PULL_NUMBER" >> $kpi_yaml
echo "commit_id: ${PULL_PULL_SHA}" >> $kpi_yaml
5 changes: 5 additions & 0 deletions test/performance/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
boto3==1.23.10
botocore==1.26.10
requests==2.27.1
opensearch-py==2.0.1
PyYAML==6.0
185 changes: 185 additions & 0 deletions test/performance/upload_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,185 @@
"""
Python Module to interact with aws opensearch instance
"""

import os
import string
import random
from datetime import datetime

import yaml
import boto3
from opensearchpy import OpenSearch, RequestsHttpConnection, AWSV4SignerAuth


def create_index(os_client, index_name, number_of_shards=4):
""" Method to create index in opensearch instance """
index_body = {
'settings': {
'index': {
'number_of_shards': number_of_shards
}
}
}
response = os_client.indices.create(index_name, body=index_body)
print('\nCreating index:')
print(response)


def delete_index(os_client, index_name):
""" Method to delete index from opensearch instance """
response = os_client.indices.delete(
index=index_name
)
print('\nDeleting index:')
print(response)


def add_document_to_index(os_client, index_name, doc_id, document):
""" Add a document to the index """
response = os_client.index(
index=index_name,
body=document,
id=doc_id,
refresh=True
)
print('\nAdding document:')
print(response)


def delete_a_document(os_client, index_name, doc_id):
""" Delete a document from index """
response = os_client.delete(
index=index_name,
id=doc_id
)
print('\nDeleting document:')
print(response)


def search_document(os_client, index_name):
""" Sample search for the document """
qval = 'miller'
query = {
'size': 5,
'query': {
'multi_match': {
'query': qval,
'fields': ['title^2', 'director']
}
}
}
response = os_client.search(
body=query,
index=index_name
)
print('\nSearch results:')
print(response)


def setup_os_client():
""" Setup the open search client """
host = os.environ['OS_HOST'] # cluster endpoint, for ex: my-domain.us-east-1.es.amazonaws.com
region = os.environ['OS_REGION']
credentials = boto3.Session().get_credentials()
auth = AWSV4SignerAuth(credentials, region)

os_client = OpenSearch(
hosts=[{'host': host, 'port': 443}],
http_auth=auth,
use_ssl=True,
verify_certs=True,
connection_class=RequestsHttpConnection
)
return os_client


def read_metric_data(file_name):
""" Read from the metrics file and construct the document """
with open(file_name, encoding="utf-8") as file_d:
content = yaml.load(file_d, Loader=yaml.FullLoader)

kpi_data = content['kpi']
execution_timestamp = content['execution_timestamp']
sbo_version = content['sbo_version']
openshift_version = content['openshift_version']
openshift_release = content['openshift_release']
run_type = content['run_type']
pull_number = content['pull_number']
commit_id = content['commit_id']

for data in kpi_data:
if data['name'] == 'usage':
for metric in data['metrics']:
if metric['name'] == 'Memory_MiB':
memory_average = metric['average']
memory_maximum = metric['maximum']
elif metric['name'] == 'CPU_millicores':
cpu_average = metric['average']
cpu_maximum = metric['maximum']
dt_string = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
return {'upload_date': dt_string,
'memory_average': float(memory_average),
'memory_maximum': float(memory_maximum),
'cpu_average': float(cpu_average),
'cpu_maximum': float(cpu_maximum),
'memory_average_threshold': get_average_threshold_mem(),
'memory_maximum_threshold': get_maximum_threshold_mem(),
'cpu_average_threshold': get_average_threshold_cpu(),
'cpu_maximum_threshold': get_maximum_threshold_cpu(),
'run_type': run_type,
'pull_number': pull_number,
'commit_id': commit_id,
'execution_timestamp': execution_timestamp,
'sbo_version': sbo_version,
'openshift_version': openshift_version,
'openshift_release': openshift_release}


def generate_id():
""" Generate a random id of length 6 """
length = 6
return ''.join(random.choices(string.ascii_lowercase + string.digits, k=length))


def get_average_threshold_mem():
""" returns current threshold value 150 """
return float(os.environ['TEST_PERFORMANCE_AVG_MEMORY'])


def get_maximum_threshold_mem():
""" returns current threshold value 200 """
return float(os.environ['TEST_PERFORMANCE_MAX_MEMORY'])


def get_average_threshold_cpu():
""" returns current thresholds value 20 """
return float(os.environ['TEST_PERFORMANCE_AVG_CPU'])


def get_maximum_threshold_cpu():
""" returns current thresholds value 100 """
return float(os.environ['TEST_PERFORMANCE_MAX_CPU'])


if __name__ == '__main__':
OS_INDEX_NAME = 'sbo-perf-data'
client = setup_os_client()
# create_index(client, index_name)
# delete_index(client, index_name)
# delete_a_document(client, index_name, id)

metric_file_name = os.environ['KPI_YAML_FILE']
doc = read_metric_data(metric_file_name)
# doc = {'upload_date': '2022-12-14 06:30:30',
# 'memory_average' : 68.2,
# 'memory_maximum': 98.2,
# 'cpu_average': 10.5,
# 'cpu_maximum': 90.2,
# 'memory_average_threshold': 150,
# 'memory_maximum_threshold': 200,
# 'cpu_average_threshold': 20,
# 'cpu_maximum_threshold': 100}
RANDOM_DOC_ID = generate_id()
print(f"Random Generated ID: {RANDOM_DOC_ID}")
add_document_to_index(client, OS_INDEX_NAME, RANDOM_DOC_ID, doc)

0 comments on commit e23698a

Please sign in to comment.