Skip to content

Commit

Permalink
tests: fix coverage for eval tests (multiprocessing workaround)
Browse files Browse the repository at this point in the history
  • Loading branch information
ErikBjare committed Aug 14, 2024
1 parent 855a46b commit 9ef1ec4
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 0 deletions.
9 changes: 9 additions & 0 deletions gptme/eval/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,15 @@ def run_evals(
"""
Run evals for a list of tests.
"""
# For coverage to work with multiprocessing
# https://pytest-cov.readthedocs.io/en/latest/subprocess-support.html
try:
from pytest_cov.embed import cleanup_on_sigterm # fmt: skip
except ImportError:
pass
else:
cleanup_on_sigterm()

model_results = defaultdict(list)
with ProcessPoolExecutor(parallel) as executor:
model_futures_to_test = {
Expand Down
2 changes: 2 additions & 0 deletions tests/test_eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,13 @@
@pytest.mark.slow
def test_eval_cli():
runner = CliRunner()
test_set = ["hello"]
result = runner.invoke(
main,
[
"--model",
"openai/gpt-4o",
*test_set,
],
)
assert result
Expand Down

0 comments on commit 9ef1ec4

Please sign in to comment.