Skip to content

Commit

Permalink
run --append --concurrency=multiprocessing didn't use a suffix for th…
Browse files Browse the repository at this point in the history
…e main process. #880
  • Loading branch information
nedbat committed Dec 17, 2019
1 parent 3875113 commit 842f585
Show file tree
Hide file tree
Showing 3 changed files with 36 additions and 9 deletions.
7 changes: 6 additions & 1 deletion CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,12 @@ want to know what's different in 5.0 since 4.5.x, see :ref:`whatsnew5x`.
Unreleased
----------

Nothing yet.
- When using ``coverage run --concurrency=multiprocessing``, all data files
should be named with parallel-ready suffixes. 5.0 mistakenly named the main
process' file with no suffix when using ``--append``. This is now fixed,
closing `issue 880`_.

.. _issue 880: https://github.com/nedbat/coveragepy/issues/880


.. _changes_50:
Expand Down
10 changes: 6 additions & 4 deletions coverage/control.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,6 +252,11 @@ def _init(self):
# to.
self._debug = DebugControl(self.config.debug, self._debug_file)

if "multiprocessing" in (self.config.concurrency or ()):
# Multi-processing uses parallel for the subprocesses, so also use
# it for the main process.
self.config.parallel = True

# _exclude_re is a dict that maps exclusion list names to compiled regexes.
self._exclude_re = {}

Expand Down Expand Up @@ -393,16 +398,13 @@ def load(self):
def _init_for_start(self):
"""Initialization for start()"""
# Construct the collector.
concurrency = self.config.concurrency or []
concurrency = self.config.concurrency or ()
if "multiprocessing" in concurrency:
if not patch_multiprocessing:
raise CoverageException( # pragma: only jython
"multiprocessing is not supported on this Python"
)
patch_multiprocessing(rcfile=self.config.config_file)
# Multi-processing uses parallel for the subprocesses, so also use
# it for the main process.
self.config.parallel = True

dycon = self.config.dynamic_context
if not dycon or dycon == "none":
Expand Down
28 changes: 24 additions & 4 deletions tests/test_concurrency.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

"""Tests for concurrency libraries."""

import glob
import os
import random
import sys
Expand Down Expand Up @@ -360,6 +361,12 @@ def process_worker_main(args):
"""


def remove_files(*patterns):
for pattern in patterns:
for fname in glob.glob(pattern):
os.remove(fname)


@flaky(max_runs=30) # Sometimes a test fails due to inherent randomness. Try more times.
class MultiprocessingTest(CoverageTest):
"""Test support of the multiprocessing module."""
Expand All @@ -370,7 +377,7 @@ def setUp(self):
super(MultiprocessingTest, self).setUp()

def try_multiprocessing_code(
self, code, expected_out, the_module, concurrency="multiprocessing"
self, code, expected_out, the_module, nprocs, concurrency="multiprocessing", args=""
):
"""Run code using multiprocessing, it should produce `expected_out`."""
self.make_file("multi.py", code)
Expand All @@ -389,13 +396,18 @@ def try_multiprocessing_code(
if start_method and start_method not in multiprocessing.get_all_start_methods():
continue

out = self.run_command("coverage run multi.py %s" % (start_method,))
remove_files(".coverage", ".coverage.*")
cmd = "coverage run {args} multi.py {start_method}".format(
args=args, start_method=start_method,
)
out = self.run_command(cmd)
expected_cant_trace = cant_trace_msg(concurrency, the_module)

if expected_cant_trace is not None:
self.assertEqual(out, expected_cant_trace)
else:
self.assertEqual(out.rstrip(), expected_out)
self.assertEqual(len(glob.glob(".coverage.*")), nprocs + 1)

out = self.run_command("coverage combine")
self.assertEqual(out, "")
Expand All @@ -410,7 +422,15 @@ def test_multiprocessing_simple(self):
code = (SQUARE_OR_CUBE_WORK + MULTI_CODE).format(NPROCS=nprocs, UPTO=upto)
total = sum(x*x if x%2 else x*x*x for x in range(upto))
expected_out = "{nprocs} pids, total = {total}".format(nprocs=nprocs, total=total)
self.try_multiprocessing_code(code, expected_out, threading)
self.try_multiprocessing_code(code, expected_out, threading, nprocs)

def test_multiprocessing_append(self):
nprocs = 3
upto = 30
code = (SQUARE_OR_CUBE_WORK + MULTI_CODE).format(NPROCS=nprocs, UPTO=upto)
total = sum(x*x if x%2 else x*x*x for x in range(upto))
expected_out = "{nprocs} pids, total = {total}".format(nprocs=nprocs, total=total)
self.try_multiprocessing_code(code, expected_out, threading, nprocs, args="--append")

def test_multiprocessing_and_gevent(self):
nprocs = 3
Expand All @@ -421,7 +441,7 @@ def test_multiprocessing_and_gevent(self):
total = sum(sum(range((x + 1) * 100)) for x in range(upto))
expected_out = "{nprocs} pids, total = {total}".format(nprocs=nprocs, total=total)
self.try_multiprocessing_code(
code, expected_out, eventlet, concurrency="multiprocessing,eventlet"
code, expected_out, eventlet, nprocs, concurrency="multiprocessing,eventlet"
)

def try_multiprocessing_code_with_branching(self, code, expected_out):
Expand Down

0 comments on commit 842f585

Please sign in to comment.