Skip to content

Commit

Permalink
Merge pull request #612 from aldenpeterson-wf/stats-file
Browse files Browse the repository at this point in the history
Add ability to write csv stats files
  • Loading branch information
Mark Beacom authored Sep 7, 2017
2 parents 15b1296 + 985a584 commit 0fd5c5c
Show file tree
Hide file tree
Showing 4 changed files with 128 additions and 60 deletions.
14 changes: 14 additions & 0 deletions docs/quickstart.rst
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,20 @@ host defaults to 127.0.0.1)::

locust -f locust_files/my_locust_file.py --slave --master-host=192.168.0.100 --host=http://example.com

You may wish to consume your Locust results via a csv file. In this case, there are two ways to do this.

First, when running the webserver, you can retrieve a csv from ``localhost:8089/stats/requests/csv`` and ``localhost:8089/stats/distribution/csv``.
Second you can run Locust with a flag which will periodically save the csv file. This is particularly useful
if you plan on running Locust in an automated way with the ``--no-web`` flag::

locust -f locust_files/my_locust_file.py --csv=foobar --no-web -n10 -c1

You can also customize how frequently this is written if you desire faster (or slower) writing::


import locust.stats
locust.stats.CSV_STATS_INTERVAL_SEC = 5 # default is 2 seconds

.. note::

To see all available options type::
Expand Down
21 changes: 18 additions & 3 deletions locust/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

from . import web
from .log import setup_logging, console_logger
from .stats import stats_printer, print_percentile_stats, print_error_report, print_stats
from .stats import stats_printer, print_percentile_stats, print_error_report, print_stats, stats_writer, write_stat_csvs
from .inspectlocust import print_task_ratio, get_task_ratio_dict
from .core import Locust, HttpLocust
from .runners import MasterLocustRunner, SlaveLocustRunner, LocalLocustRunner
Expand Down Expand Up @@ -61,6 +61,16 @@ def parse_options():
help="Python module file to import, e.g. '../other.py'. Default: locustfile"
)

# A file that contains the current request stats.
parser.add_option(
'--csv', '--csv-base-name',
action='store',
type='str',
dest='csvfilebase',
default=None,
help="Store current request stats to files in CSV format.",
)

# if locust should be run in distributed mode as master
parser.add_option(
'--master',
Expand Down Expand Up @@ -443,17 +453,22 @@ def main():
if not options.only_summary and (options.print_stats or (options.no_web and not options.slave)):
# spawn stats printing greenlet
gevent.spawn(stats_printer)

if options.csvfilebase:
gevent.spawn(stats_writer, options.csvfilebase)


def shutdown(code=0):
"""
Shut down locust by firing quitting event, printing stats and exiting
Shut down locust by firing quitting event, printing/writing stats and exiting
"""
logger.info("Shutting down (exit code %s), bye." % code)

events.quitting.fire()
print_stats(runners.locust_runner.request_stats)
print_percentile_stats(runners.locust_runner.request_stats)

if options.csvfilebase:
write_stat_csvs(options.csvfilebase)
print_error_report()
sys.exit(code)

Expand Down
93 changes: 90 additions & 3 deletions locust/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,21 @@
import hashlib
import six
from six.moves import xrange
from itertools import chain

from . import events
from .exception import StopLocust
from .log import console_logger

STATS_NAME_WIDTH = 60

"""Default interval for how frequently the CSV file is written if this option
is configured."""
CSV_STATS_INTERVAL_SEC = 2

"""Default interval for how frequently results are written to console."""
CONSOLE_STATS_INTERVAL_SEC = 2

class RequestStatsAdditionError(Exception):
pass

Expand Down Expand Up @@ -513,7 +521,86 @@ def print_error_report():
console_logger.info("")

def stats_printer():
from .runners import locust_runner
from . import runners
while True:
print_stats(runners.locust_runner.request_stats)
gevent.sleep(CONSOLE_STATS_INTERVAL_SEC)

def stats_writer(base_filepath):
"""Writes the csv files for the locust run."""
while True:
print_stats(locust_runner.request_stats)
gevent.sleep(2)
write_stat_csvs(base_filepath)
gevent.sleep(CSV_STATS_INTERVAL_SEC)


def write_stat_csvs(base_filepath):
"""Writes the requests and distribution csvs."""
with open(base_filepath + '_requests.csv', "w") as f:
f.write(requests_csv())

with open(base_filepath + '_distribution.csv', 'w') as f:
f.write(distribution_csv())


def sort_stats(stats):
return [stats[key] for key in sorted(six.iterkeys(stats))]


def requests_csv():
from . import runners

"""Returns the contents of the 'requests' tab as CSV."""
rows = [
",".join([
'"Method"',
'"Name"',
'"# requests"',
'"# failures"',
'"Median response time"',
'"Average response time"',
'"Min response time"',
'"Max response time"',
'"Average Content Size"',
'"Requests/s"',
])
]

for s in chain(sort_stats(runners.locust_runner.request_stats), [runners.locust_runner.stats.aggregated_stats("Total", full_request_history=True)]):
rows.append('"%s","%s",%i,%i,%i,%i,%i,%i,%i,%.2f' % (
s.method,
s.name,
s.num_requests,
s.num_failures,
s.median_response_time,
s.avg_response_time,
s.min_response_time or 0,
s.max_response_time,
s.avg_content_length,
s.total_rps,
))
return "\n".join(rows)

def distribution_csv():
"""Returns the contents of the 'distribution' tab as CSV."""
from . import runners

rows = [",".join((
'"Name"',
'"# requests"',
'"50%"',
'"66%"',
'"75%"',
'"80%"',
'"90%"',
'"95%"',
'"98%"',
'"99%"',
'"100%"',
))]
for s in chain(sort_stats(runners.locust_runner.request_stats), [runners.locust_runner.stats.aggregated_stats("Total", full_request_history=True)]):
if s.num_requests:
rows.append(s.percentile(tpl='"%s",%i,%i,%i,%i,%i,%i,%i,%i,%i,%i'))
else:
rows.append('"%s",0,"N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A"' % s.name)

return "\n".join(rows)
60 changes: 6 additions & 54 deletions locust/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from . import runners
from .cache import memoize
from .runners import MasterLocustRunner
from locust.stats import median_from_dict
from .stats import sort_stats, median_from_dict, requests_csv, distribution_csv
from locust import __version__ as version

import logging
Expand Down Expand Up @@ -77,36 +77,8 @@ def reset_stats():

@app.route("/stats/requests/csv")
def request_stats_csv():
rows = [
",".join([
'"Method"',
'"Name"',
'"# requests"',
'"# failures"',
'"Median response time"',
'"Average response time"',
'"Min response time"',
'"Max response time"',
'"Average Content Size"',
'"Requests/s"',
])
]

for s in chain(_sort_stats(runners.locust_runner.request_stats), [runners.locust_runner.stats.aggregated_stats("Total", full_request_history=True)]):
rows.append('"%s","%s",%i,%i,%i,%i,%i,%i,%i,%.2f' % (
s.method,
s.name,
s.num_requests,
s.num_failures,
s.median_response_time,
s.avg_response_time,
s.min_response_time or 0,
s.max_response_time,
s.avg_content_length,
s.total_rps,
))

response = make_response("\n".join(rows))
response = make_response(requests_csv())

file_name = "requests_{0}.csv".format(time())
disposition = "attachment;filename={0}".format(file_name)
response.headers["Content-type"] = "text/csv"
Expand All @@ -115,26 +87,8 @@ def request_stats_csv():

@app.route("/stats/distribution/csv")
def distribution_stats_csv():
rows = [",".join((
'"Name"',
'"# requests"',
'"50%"',
'"66%"',
'"75%"',
'"80%"',
'"90%"',
'"95%"',
'"98%"',
'"99%"',
'"100%"',
))]
for s in chain(_sort_stats(runners.locust_runner.request_stats), [runners.locust_runner.stats.aggregated_stats("Total", full_request_history=True)]):
if s.num_requests:
rows.append(s.percentile(tpl='"%s",%i,%i,%i,%i,%i,%i,%i,%i,%i,%i'))
else:
rows.append('"%s",0,"N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A"' % s.name)

response = make_response("\n".join(rows))

response = make_response(distribution_csv())
file_name = "distribution_{0}.csv".format(time())
disposition = "attachment;filename={0}".format(file_name)
response.headers["Content-type"] = "text/csv"
Expand All @@ -145,7 +99,7 @@ def distribution_stats_csv():
@memoize(timeout=DEFAULT_CACHE_TIME, dynamic_timeout=True)
def request_stats():
stats = []
for s in chain(_sort_stats(runners.locust_runner.request_stats), [runners.locust_runner.stats.aggregated_stats("Total")]):
for s in chain(sort_stats(runners.locust_runner.request_stats), [runners.locust_runner.stats.aggregated_stats("Total")]):
stats.append({
"method": s.method,
"name": s.name,
Expand Down Expand Up @@ -223,5 +177,3 @@ def exceptions_csv():
def start(locust, options):
wsgi.WSGIServer((options.web_host, options.port), app, log=None).serve_forever()

def _sort_stats(stats):
return [stats[key] for key in sorted(six.iterkeys(stats))]

0 comments on commit 0fd5c5c

Please sign in to comment.