Skip to content

Commit

Permalink
New argument --csv-append
Browse files Browse the repository at this point in the history
Adds --csv-append flag to enable appending of the metrics CSV log.
This allows for tracking changes in response times over the course of long test runs.
  • Loading branch information
mehta-ankit committed Nov 15, 2019
1 parent fcb040c commit fb358ef
Show file tree
Hide file tree
Showing 2 changed files with 72 additions and 46 deletions.
31 changes: 20 additions & 11 deletions locust/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,14 +45,14 @@ def parse_options():
default="",
help="Host to bind the web interface to. Defaults to '' (all interfaces)"
)

parser.add_argument(
'-P', '--port', '--web-port',
type=int,
default=8089,
help="Port on which to run web host"
)

parser.add_argument(
'-f', '--locustfile',
default='locustfile',
Expand All @@ -66,6 +66,15 @@ def parse_options():
help="Store current request stats to files in CSV format.",
)

# if locust should append the CSV file instead of replacing it.
parser.add_argument(
'--csv-append',
action='store_true',
dest='csvappend',
default=False,
help="Set locust to append CSV file at set interval instead of replacing.",
)

# if locust should be run in distributed mode as master
parser.add_argument(
'--master',
Expand All @@ -79,14 +88,14 @@ def parse_options():
action='store_true',
help="Set locust to run in distributed mode with this process as slave"
)

# master host options
parser.add_argument(
'--master-host',
default="127.0.0.1",
help="Host or IP address of locust master for distributed load testing. Only used when running with --slave. Defaults to 127.0.0.1."
)

parser.add_argument(
'--master-port',
type=int,
Expand All @@ -99,7 +108,7 @@ def parse_options():
default="*",
help="Interfaces (hostname, ip) that locust master should bind to. Only used when running with --master. Defaults to * (all available interfaces)."
)

parser.add_argument(
'--master-bind-port',
type=int,
Expand Down Expand Up @@ -491,15 +500,15 @@ def timelimit_stop():
except socket.error as e:
logger.error("Failed to connect to the Locust master: %s", e)
sys.exit(-1)

if not options.only_summary and (options.print_stats or (options.no_web and not options.slave)):
# spawn stats printing greenlet
gevent.spawn(stats_printer)

if options.csvfilebase:
gevent.spawn(stats_writer, options.csvfilebase)
gevent.spawn(stats_writer, options.csvfilebase, options.csvappend)



def shutdown(code=0):
"""
Shut down locust by firing quitting event, printing/writing stats and exiting
Expand All @@ -514,16 +523,16 @@ def shutdown(code=0):
print_stats(runners.locust_runner.stats)
print_percentile_stats(runners.locust_runner.stats)
if options.csvfilebase:
write_stat_csvs(options.csvfilebase)
write_stat_csvs(options.csvfilebase, options.csvappend)
print_error_report()
sys.exit(code)

# install SIGTERM handler
def sig_term_handler():
logger.info("Got SIGTERM signal")
shutdown(0)
gevent.signal(signal.SIGTERM, sig_term_handler)

try:
logger.info("Starting Locust %s" % version)
main_greenlet.join()
Expand Down
87 changes: 52 additions & 35 deletions locust/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -712,32 +712,39 @@ def stats_printer():
print_stats(runners.locust_runner.stats)
gevent.sleep(CONSOLE_STATS_INTERVAL_SEC)

def stats_writer(base_filepath):
def stats_writer(base_filepath, append_file=False):
"""Writes the csv files for the locust run."""
write_stat_csvs(base_filepath, include_totals=(not append_file))
while True:
write_stat_csvs(base_filepath)
gevent.sleep(CSV_STATS_INTERVAL_SEC)
write_stat_csvs(base_filepath, append_file, include_totals=(not append_file))

def write_stat_csvs(base_filepath, append_file=False, include_totals=True):
"""
Writes the requests and distribution CSV files.
By default overwrites the existing file,
optionally appends the existing file
"""
file_mode = "a" if append_file else "w"

def write_stat_csvs(base_filepath):
"""Writes the requests and distribution csvs."""
with open(base_filepath + '_requests.csv', "w") as f:
f.write(requests_csv())
with open(base_filepath + '_requests.csv', file_mode) as f:
f.write(requests_csv(append_file, include_totals) + "\n")

with open(base_filepath + '_distribution.csv', 'w') as f:
f.write(distribution_csv())
with open(base_filepath + '_distribution.csv', file_mode) as f:
f.write(distribution_csv(append_file, include_totals) + "\n")


def sort_stats(stats):
return [stats[key] for key in sorted(six.iterkeys(stats))]


def requests_csv():
def requests_csv(append_file=False, include_totals=True):
from . import runners

"""Returns the contents of the 'requests' tab as CSV."""
rows = [
",".join([
rows = []
if not append_file:
rows.append(",".join([
'"Method"',
'"Name"',
'"# requests"',
Expand All @@ -748,10 +755,14 @@ def requests_csv():
'"Max response time"',
'"Average Content Size"',
'"Requests/s"',
])
]
]))

maybe_agg_stats = []

for s in chain(sort_stats(runners.locust_runner.request_stats), [runners.locust_runner.stats.total]):
if include_totals:
maybe_agg_stats = [runners.locust_runner.stats.total]

for s in chain(sort_stats(runners.locust_runner.request_stats), maybe_agg_stats):
rows.append('"%s","%s",%i,%i,%i,%i,%i,%i,%i,%.2f' % (
s.method,
s.name,
Expand All @@ -766,30 +777,36 @@ def requests_csv():
))
return "\n".join(rows)

def distribution_csv():
def distribution_csv(append_file=False, include_totals=True):
"""Returns the contents of the 'distribution' tab as CSV."""
from . import runners

rows = [",".join((
'"Name"',
'"# requests"',
'"50%"',
'"66%"',
'"75%"',
'"80%"',
'"90%"',
'"95%"',
'"98%"',
'"99%"',
'"99.9%"',
'"99.99%"',
'"100%"',
))]
for s in chain(sort_stats(runners.locust_runner.request_stats), [runners.locust_runner.stats.total]):
if s.num_requests:
rows.append(s.percentile(tpl='"%s",%i,%i,%i,%i,%i,%i,%i,%i,%i,%i,%i,%i'))
else:
rows.append('"%s",0,"N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A"' % s.name)
rows = []
if not append_file:
rows.append(",".join((
'"Name"',
'"# requests"',
'"50%"',
'"66%"',
'"75%"',
'"80%"',
'"90%"',
'"95%"',
'"98%"',
'"99%"',
'"99.9%"',
'"99.99%"',
'"100%"',
)))

maybe_agg_stats = []
if include_totals:
maybe_agg_stats = [runners.locust_runner.stats.total]
for s in chain(sort_stats(runners.locust_runner.request_stats), maybe_agg_stats):
if s.num_requests:
rows.append(s.percentile(tpl='"%s",%i,%i,%i,%i,%i,%i,%i,%i,%i,%i,%i,%i'))
else:
rows.append('"%s",0,"N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A"' % s.name)

return "\n".join(rows)

Expand Down

0 comments on commit fb358ef

Please sign in to comment.