Skip to content

Commit

Permalink
Merge pull request #1504 from lhupfeldt/master
Browse files Browse the repository at this point in the history
Download full history - see issue 1468
  • Loading branch information
cyberw authored Aug 18, 2020
2 parents 2d0357c + 65752e2 commit 12bfb26
Show file tree
Hide file tree
Showing 7 changed files with 168 additions and 42 deletions.
12 changes: 8 additions & 4 deletions locust/argument_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,17 +287,17 @@ def setup_parser_arguments(parser):

stats_group = parser.add_argument_group("Request statistics options")
stats_group.add_argument(
'--csv',
'--csv', # Name repeated in 'parse_options'
dest="csv_prefix",
help="Store current request stats to files in CSV format. Setting this option will generate three files: [CSV_PREFIX]_stats.csv, [CSV_PREFIX]_stats_history.csv and [CSV_PREFIX]_failures.csv",
env_var="LOCUST_CSV",
)
stats_group.add_argument(
'--csv-full-history',
'--csv-full-history', # Name repeated in 'parse_options'
action='store_true',
default=False,
dest='stats_history_enabled',
help="Store each stats entry in CSV format to _stats_history.csv file",
help="Store each stats entry in CSV format to _stats_history.csv file. You must also specify the '--csv' argument to enable this.",
env_var="LOCUST_CSV_FULL_HISTORY",
)
stats_group.add_argument(
Expand Down Expand Up @@ -419,4 +419,8 @@ def get_parser(default_config_files=DEFAULT_CONFIG_FILES):


def parse_options(args=None):
return get_parser().parse_args(args=args)
parser = get_parser()
parsed_opts = parser.parse_args(args=args)
if parsed_opts.stats_history_enabled and (parsed_opts.csv_prefix is None):
parser.error("'--csv-full-history' requires '--csv'.")
return parsed_opts
4 changes: 2 additions & 2 deletions locust/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from .env import Environment
from .log import setup_logging, greenlet_exception_logger
from .stats import (print_error_report, print_percentile_stats, print_stats,
stats_printer, stats_writer, write_csv_files)
stats_printer, stats_writer, write_csv_files, write_stats_history_csv_header)
from .user import User
from .user.inspectuser import get_task_ratio_dict, print_task_ratio
from .util.timespan import parse_timespan
Expand Down Expand Up @@ -322,9 +322,9 @@ def timelimit_stop():
stats_printer_greenlet.link_exception(greenlet_exception_handler)

if options.csv_prefix:
write_stats_history_csv_header(options.csv_prefix) # Write file now to ensure full-history-file exists for download
gevent.spawn(stats_writer, environment, options.csv_prefix, full_history=options.stats_history_enabled).link_exception(greenlet_exception_handler)


def shutdown():
"""
Shut down locust by firing quitting event, printing/writing stats and exiting
Expand Down
22 changes: 15 additions & 7 deletions locust/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -745,8 +745,7 @@ def stats_printer_func():

def stats_writer(environment, base_filepath, full_history=False):
"""Writes the csv files for the locust run."""
with open(base_filepath + '_stats_history.csv', 'w') as f:
f.write(stats_history_csv_header())
write_stats_history_csv_header(base_filepath)
while True:
write_csv_files(environment, base_filepath, full_history)
gevent.sleep(CSV_STATS_INTERVAL_SEC)
Expand All @@ -758,8 +757,8 @@ def write_csv_files(environment, base_filepath, full_history=False):
csv_writer = csv.writer(f)
requests_csv(environment.stats, csv_writer)

with open(base_filepath + '_stats_history.csv', 'a') as f:
f.write(stats_history_csv(environment, full_history) + "\n")
with open(stats_history_file_name(base_filepath), 'a') as f:
f.write(stats_history_csv_rows(environment, full_history) + "\n")

with open(base_filepath + '_failures.csv', 'w') as f:
csv_writer = csv.writer(f)
Expand Down Expand Up @@ -809,10 +808,11 @@ def requests_csv(stats, csv_writer):

csv_writer.writerow(stats_row + percentile_row)

def stats_history_csv_header():

def write_stats_history_csv_header(base_filepath):
"""Headers for the stats history CSV"""

return ",".join((
header = ",".join((
"Timestamp",
"User Count",
"Type",
Expand All @@ -829,7 +829,10 @@ def stats_history_csv_header():
"Total Average Content Size",
)) + '\n'

def stats_history_csv(environment, all_entries=False):
with open(stats_history_file_name(base_filepath), 'w') as f:
f.write(header)

def stats_history_csv_rows(environment, all_entries=False):
"""
Return a string of CSV rows with the *current* stats. By default only includes the
Aggregated stats entry, but if all_entries is set to True, a row for each entry will
Expand Down Expand Up @@ -868,6 +871,11 @@ def stats_history_csv(environment, all_entries=False):

return "\n".join(rows)


def stats_history_file_name(base_filepath):
return base_filepath + '_stats_history.csv'


def failures_csv(stats, csv_writer):
""""Return the contents of the 'failures' tab as a CSV."""
csv_writer.writerow([
Expand Down
3 changes: 3 additions & 0 deletions locust/templates/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,9 @@ <h2>Edit running load test</h2>
<div style="display:none;">
<div style="margin-top:20px;">
<a href="./stats/requests/csv">Download request statistics CSV</a><br>
{% if stats_history_enabled %}
<a href="./stats/requests_full_history/csv">Download full request statistics history CSV</a><br>
{% endif %}
<a href="./stats/failures/csv">Download failures CSV</a><br>
<a href="./exceptions/csv">Download exceptions CSV</a>
</div>
Expand Down
10 changes: 9 additions & 1 deletion locust/test/test_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,4 +148,12 @@ def _(parser, **kw):
out.seek(0)
stdout = out.read()
self.assertIn("Custom boolean flag", stdout)
self.assertIn("Custom string arg", stdout)
self.assertIn("Custom string arg", stdout)

def test_csv_full_history_requires_csv(self):
with mock.patch("sys.stderr", new=StringIO()):
with self.assertRaises(SystemExit):
parse_options(args=[
"-f", "locustfile.py",
"--csv-full-history",
])
90 changes: 86 additions & 4 deletions locust/test/test_web.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import requests
from pyquery import PyQuery as pq

import locust
from locust import constant
from locust.argument_parser import get_parser, parse_options
from locust.user import User, task
Expand All @@ -23,12 +24,24 @@
from .util import create_tls_cert


class TestWebUI(LocustTestCase):
class _HeaderCheckMixin():
def _check_csv_headers(self, headers, exp_fn_prefix):
# Check common headers for csv file download request
self.assertIn('Content-Type', headers)
content_type = headers['Content-Type']
self.assertIn('text/csv', content_type)

self.assertIn('Content-disposition', headers)
disposition = headers['Content-disposition'] # e.g.: 'attachment; filename=requests_full_history_1597586811.5084946.csv'
self.assertIn(exp_fn_prefix, disposition)


class TestWebUI(LocustTestCase, _HeaderCheckMixin):
def setUp(self):
super(TestWebUI, self).setUp()

parser = get_parser(default_config_files=[])
self.environment.options = parser.parse_args([])
self.environment.parsed_options = parser.parse_args([])
self.stats = self.environment.stats

self.web_ui = self.environment.create_web_ui("127.0.0.1", 0)
Expand Down Expand Up @@ -130,16 +143,23 @@ def test_stats_rounding(self):
data = json.loads(response.text)
self.assertEqual(1, data["stats"][0]["min_response_time"])
self.assertEqual(1000, data["stats"][0]["max_response_time"])

def test_request_stats_csv(self):
self.stats.log_request("GET", "/test2", 120, 5612)
response = requests.get("http://127.0.0.1:%i/stats/requests/csv" % self.web_port)
self.assertEqual(200, response.status_code)
self._check_csv_headers(response.headers, 'requests')

def test_request_stats_full_history_csv_not_present(self):
self.stats.log_request("GET", "/test2", 120, 5612)
response = requests.get("http://127.0.0.1:%i/stats/requests_full_history/csv" % self.web_port)
self.assertEqual(404, response.status_code)

def test_failure_stats_csv(self):
self.stats.log_error("GET", "/", Exception("Error1337"))
response = requests.get("http://127.0.0.1:%i/stats/failures/csv" % self.web_port)
self.assertEqual(200, response.status_code)
self._check_csv_headers(response.headers, 'failures')

def test_request_stats_with_errors(self):
self.stats.log_error("GET", "/", Exception("Error1337"))
Expand Down Expand Up @@ -195,7 +215,8 @@ def test_exceptions_csv(self):

response = requests.get("http://127.0.0.1:%i/exceptions/csv" % self.web_port)
self.assertEqual(200, response.status_code)

self._check_csv_headers(response.headers, 'exceptions')

reader = csv.reader(StringIO(response.text))
rows = []
for row in reader:
Expand Down Expand Up @@ -345,3 +366,64 @@ def test_index_with_https(self):
from urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
self.assertEqual(200, requests.get("https://127.0.0.1:%i/" % self.web_port, verify=False).status_code)


class TestWebUIFullHistory(LocustTestCase, _HeaderCheckMixin):
STATS_BASE_NAME = "web_test"
STATS_FILENAME = "{}_stats.csv".format(STATS_BASE_NAME)
STATS_HISTORY_FILENAME = "{}_stats_history.csv".format(STATS_BASE_NAME)
STATS_FAILURES_FILENAME = "{}_failures.csv".format(STATS_BASE_NAME)

def setUp(self):
super(TestWebUIFullHistory, self).setUp()
self.remove_files_if_exists()

parser = get_parser(default_config_files=[])
self.environment.parsed_options = parser.parse_args(["--csv", self.STATS_BASE_NAME, "--csv-full-history"])
self.stats = self.environment.stats
self.stats.CSV_STATS_INTERVAL_SEC = 0.02

self.web_ui = self.environment.create_web_ui("127.0.0.1", 0)
self.web_ui.app.view_functions["request_stats"].clear_cache()
gevent.sleep(0.01)
self.web_port = self.web_ui.server.server_port

def tearDown(self):
super(TestWebUIFullHistory, self).tearDown()
self.web_ui.stop()
self.runner.quit()
self.remove_files_if_exists()

def remove_file_if_exists(self, filename):
if os.path.exists(filename):
os.remove(filename)

def remove_files_if_exists(self):
self.remove_file_if_exists(self.STATS_FILENAME)
self.remove_file_if_exists(self.STATS_HISTORY_FILENAME)
self.remove_file_if_exists(self.STATS_FAILURES_FILENAME)

def test_request_stats_full_history_csv(self):
self.stats.log_request("GET", "/test", 1.39764125, 2)
self.stats.log_request("GET", "/test", 999.9764125, 1000)
self.stats.log_request("GET", "/test2", 120, 5612)

# Call these two methods instead of the 'stats_writer' so that we avoid gevent wait loop
locust.stats.write_stats_history_csv_header(self.STATS_BASE_NAME)
locust.stats.write_csv_files(self.environment, self.STATS_BASE_NAME, full_history=True)

response = requests.get("http://127.0.0.1:%i/stats/requests_full_history/csv" % self.web_port)
self.assertEqual(200, response.status_code)
self._check_csv_headers(response.headers, 'requests_full_history')
self.assertIn('Content-Length', response.headers)

reader = csv.reader(StringIO(response.text))
rows = [r for r in reader]

self.assertEqual(4, len(rows))
self.assertEqual("Timestamp", rows[0][0])
self.assertEqual("GET", rows[1][2])
self.assertEqual("/test", rows[1][3])
self.assertEqual("/test2", rows[2][3])
self.assertEqual("", rows[3][2])
self.assertEqual("Aggregated", rows[3][3])
69 changes: 45 additions & 24 deletions locust/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,15 @@
from time import time

import gevent
from flask import Flask, make_response, jsonify, render_template, request
from flask import Flask, make_response, jsonify, render_template, request, send_file
from flask_basicauth import BasicAuth
from gevent import pywsgi

from locust import __version__ as version
from .exception import AuthCredentialsError
from .runners import MasterRunner
from .log import greenlet_exception_logger
from .stats import failures_csv, requests_csv, sort_stats
from .stats import failures_csv, requests_csv, stats_history_file_name, sort_stats
from .util.cache import memoize
from .util.rounding import proper_round
from .util.timespan import parse_timespan
Expand Down Expand Up @@ -136,6 +136,7 @@ def index():
step_time=options and options.step_time,
worker_count=worker_count,
is_step_load=environment.step_load,
stats_history_enabled=options and options.stats_history_enabled,
)

@app.route('/swarm', methods=["POST"])
Expand Down Expand Up @@ -172,33 +173,58 @@ def reset_stats():
environment.runner.stats.reset_all()
environment.runner.exceptions = {}
return "ok"


def _download_csv_suggest_file_name(suggest_filename_prefix):
"""Generate csv file download attachment filename suggestion.
Arguments:
suggest_filename_prefix: Prefix of the filename to suggest for saving the download. Will be appended with timestamp.
"""

return f"{suggest_filename_prefix}_{time()}.csv"

def _download_csv_response(csv_data, filename_prefix):
"""Generate csv file download response with 'csv_data'.
Arguments:
csv_data: CSV header and data rows.
filename_prefix: Prefix of the filename to suggest for saving the download. Will be appended with timestamp.
"""

response = make_response(csv_data)
response.headers["Content-type"] = "text/csv"
response.headers["Content-disposition"] = f"attachment;filename={_download_csv_suggest_file_name(filename_prefix)}"
return response

@app.route("/stats/requests/csv")
@self.auth_required_if_enabled
def request_stats_csv():
data = StringIO()
writer = csv.writer(data)
requests_csv(self.environment.runner.stats, writer)
response = make_response(data.getvalue())
file_name = "requests_{0}.csv".format(time())
disposition = "attachment;filename={0}".format(file_name)
response.headers["Content-type"] = "text/csv"
response.headers["Content-disposition"] = disposition
return response

return _download_csv_response(data.getvalue(), "requests")

@app.route("/stats/requests_full_history/csv")
@self.auth_required_if_enabled
def request_stats_full_history_csv():
options = self.environment.parsed_options
if options and options.stats_history_enabled:
return send_file(
os.path.abspath(stats_history_file_name(options.csv_prefix)),
mimetype="text/csv",
as_attachment=True, attachment_filename=_download_csv_suggest_file_name("requests_full_history"),
add_etags=True, cache_timeout=None, conditional=True, last_modified=None)

return make_response("Error: Server was not started with option to generate full history.", 404)

@app.route("/stats/failures/csv")
@self.auth_required_if_enabled
def failures_stats_csv():
data = StringIO()
writer = csv.writer(data)
failures_csv(self.environment.runner.stats, writer)
response = make_response(data.getvalue())
file_name = "failures_{0}.csv".format(time())
disposition = "attachment;filename={0}".format(file_name)
response.headers["Content-type"] = "text/csv"
response.headers["Content-disposition"] = disposition
return response

return _download_csv_response(data.getvalue(), "failures")

@app.route('/stats/requests')
@self.auth_required_if_enabled
@memoize(timeout=DEFAULT_CACHE_TIME, dynamic_timeout=True)
Expand Down Expand Up @@ -278,13 +304,8 @@ def exceptions_csv():
nodes = ", ".join(exc["nodes"])
writer.writerow([exc["count"], exc["msg"], exc["traceback"], nodes])

response = make_response(data.getvalue())
file_name = "exceptions_{0}.csv".format(time())
disposition = "attachment;filename={0}".format(file_name)
response.headers["Content-type"] = "text/csv"
response.headers["Content-disposition"] = disposition
return response

return _download_csv_response(data.getvalue(), "exceptions")

# start the web server
self.greenlet = gevent.spawn(self.start)
self.greenlet.link_exception(greenlet_exception_handler)
Expand Down

0 comments on commit 12bfb26

Please sign in to comment.