Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ci] Fix: perf maybe failed when run locally, issue #3689 #3690

Merged
merged 2 commits into from
Dec 29, 2021
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions scripts/ci/ci-run-performance-tests-standalone.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,16 +24,16 @@ d_release="/tmp/perf_${tag}"

mkdir -p "${d_pull}"

python3 -m pip install coscmd PyYAML
python3 -m pip install coscmd PyYAML configargparse
## run perf for current
python perfs.py --output "${d_pull}" --bin "${BASE_DIR}/target/release/databend-benchmark" --host 127.0.0.1 --port 9001
python3 perfs.py --output "${d_pull}" --bin "${BASE_DIR}/target/release/databend-benchmark" --host 127.0.0.1 --port 9001

## run perf for latest release
if [ ! -d "${d_release}" ]; then
mkdir -p "${d_release}"
${BASE_DIR}/scripts/deploy/databend-query-standalone-from-release.sh "${tag}"
python perfs.py --output "${d_release}" --bin "${BASE_DIR}/target/release/databend-benchmark" --host 127.0.0.1 --port 9001
python3 perfs.py --output "${d_release}" --bin "${BASE_DIR}/target/release/databend-benchmark" --host 127.0.0.1 --port 9001
fi

## run comparation scripts
python compare.py -r "${d_release}" -p "${d_pull}"
python3 compare.py -r "${d_release}" -p "${d_pull}"
7 changes: 0 additions & 7 deletions tests/perfs/compare.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,10 @@
#!coding: utf-8
import yaml
import re
import subprocess
import os
import sys
import json

from datetime import datetime
from time import time, sleep

from argparse import ArgumentParser
from qcloud_cos import CosConfig
from qcloud_cos import CosS3Client
Expand Down Expand Up @@ -210,9 +206,6 @@ def compare_suit(releaser, pull, suit_file, suit_name, type, releaser_suit_url,
global stable
global stats

r = {}
p = {}

with open(os.path.join(releaser, suit_file)) as json_file:
releaser_result = json.load(json_file)

Expand Down
19 changes: 7 additions & 12 deletions tests/perfs/perfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,23 +5,18 @@
import configargparse
import yaml
import re
import subprocess
import os

from datetime import datetime
from time import time, sleep
from time import sleep

from subprocess import Popen
from subprocess import PIPE
from subprocess import CalledProcessError
from subprocess import TimeoutExpired
from argparse import ArgumentParser
from qcloud_cos import CosConfig
from qcloud_cos import CosS3Client
import sys
import logging

from qcloud_cos.cos_exception import CosException, CosServiceError
from qcloud_cos.cos_exception import CosServiceError

failures = 0
passed = 0
Expand Down Expand Up @@ -111,7 +106,7 @@ def execute(suit, bin_path, host, port, concurrency, iterations, output_dir,
json_path = os.path.join(output_dir, file_name)
S3key = os.path.join(S3path, file_name)
if type == "COS":
if rerun == "False":
if not rerun:
COScli = build_COSclient(secretID, secretKey, region, endpoint)
try:
response = COScli.get_object(
Expand All @@ -128,11 +123,10 @@ def execute(suit, bin_path, host, port, concurrency, iterations, output_dir,
logging.info("other issue occured, {}".format(
e.get_error_code()))
except ConnectionError as ce:
logging.info("timeout for {}".format(S3key))
logging.info("timeout for {}, with error {}".format(S3key, str(ce)))
else:
# S3 key exists in given bucket just return
index = json.load(response['Body'].get_raw_stream())
file_dict = {}
for elem in index['Contents']:
if elem['path'] == S3key:
logging.info(
Expand Down Expand Up @@ -175,6 +169,7 @@ def execute(suit, bin_path, host, port, concurrency, iterations, output_dir,
failures += 1
else:
passed += 1
print("Total time: {}s".format(total_time))


if __name__ == '__main__':
Expand Down Expand Up @@ -234,8 +229,8 @@ def execute(suit, bin_path, host, port, concurrency, iterations, output_dir,
env_var='ENDPOINT')
parser.add_argument(
'--rerun',
default="False",
help='if rerun set as true, it will rerun all perfs.yaml completely')
action='store_true',
help='if use `--rerun` set as true, it will rerun all perfs.yaml completely')
args = parser.parse_args()

for suit in conf['perfs']:
Expand Down