-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathtox.ini
246 lines (234 loc) · 8.51 KB
/
tox.ini
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
[gh-actions]
python =
3.8: py38, c_library, nbqaxdoctest
3.9: py38, c_library, nbqaxdoctest
3.10: py38, c_library, nbqaxdoctest
3.11: py39, c_library, coverage, nbqaxdoctest
[tox]
skip_missing_interpreters = true
isolated_build = true
envlist =
py3{8,9,10,11},
c_library,
coverage,
nbqaxdoctest,
package
[base_configs]
wheel = false
parallel_show_output = true
[testenv]
description = Run test suite under {basepython}
setenv =
PIP_DISABLE_VERSION_CHECK = 1
COVERAGE_FILE = {env:COVERAGE_FILE:{toxworkdir}/.coverage.{envname}}
PY_COLORS = 1
deps = -r requirements-dev.txt
passenv =
http_proxy
https_proxy
no_proxy
commands = coverage run -m pytest \
--benchmark-disable \
--cov={toxinidir}/cookiecutter_cruft_poetry_tox_pre_commit_ci_cd_instance \
--cov-config={toxinidir}/pyproject.toml \
--junitxml={toxworkdir}/junit.{envname}.xml \
--basetemp={envtmpdir} \
-n={env:PYTEST_XDIST_PROC_NR:auto} \
{env:TESTENV_SPECIFIED_PYTEST_FLAGS:} \
{posargs:tests}
wheel = {[base_configs]wheel}
parallel_show_output = {[base_configs]parallel_show_output}
[testenv:c_library]
description = {[testenv]description} on C-compiled modules (i.e., NOT pure Python code)
setenv =
{[testenv]setenv}
TESTENV_SPECIFIED_PYTEST_FLAGS = -m "not pure_python_only"
deps = {[testenv]deps}
passenv = {[testenv]passenv}
commands = {[testenv]commands}
wheel = true
parallel_show_output = {[testenv]parallel_show_output}
[testenv:py3{8,9,10,11}-benchmark-{pure_python,c_library}]
description = Run performance testing under {basepython}
setenv =
{[testenv]setenv}
pure_python: LIB_TYPE=pure_python
c_library: LIB_TYPE=c_library
deps = {[testenv]deps}
passenv = {[testenv]passenv}
allowlist_externals = bash
commands =
# Run benchmark in a shell to incorporate dynamic tag in output file names
bash -c '\
SYS_PLATFORM=$(python -c "import sys; print(sys.platform.title())"); \
BENCHMARK_FILE_TAG={env:LIB_TYPE}-$(make strong-version-tag); \
BENCHMARK_FILE_PREFIX="{toxinidir}/.benchmarks/$\{SYS_PLATFORM\}-C{basepython}/$\{BENCHMARK_FILE_TAG\}"; \
mkdir -p $\{BENCHMARK_FILE_PREFIX%/*\}; \
pytest \
--basetemp={envtmpdir} \
--benchmark-cprofile=cumtime \
--benchmark-disable-gc \
--benchmark-histogram="$\{BENCHMARK_FILE_PREFIX\}" \
--benchmark-json="$\{BENCHMARK_FILE_PREFIX\}".json \
--benchmark-min-rounds={env:BENCHMARK_MIN_ROUNDS:10} \
--benchmark-only \
--benchmark-save="$\{BENCHMARK_FILE_TAG\}" \
--benchmark-save-data \
--benchmark-sort=mean \
--benchmark-timer=time.process_time \
--benchmark-verbose \
--benchmark-warmup=on \
{posargs:tests}'
wheel =
pure_python: {[testenv]wheel}
c_library: {[testenv:c_library]wheel}
parallel_show_output = {[testenv]parallel_show_output}
[testenv:mutmut]
setenv =
{[testenv]setenv}
JUNIT_XML_REPORT_OUTPUT_ROOT={toxworkdir}/junit.{envname}
JUNIT_XML_REPORT_OUTPUT_PATH={env:JUNIT_XML_REPORT_OUTPUT_ROOT}.xml
JUNIT_HTML_REPORT_OUTPUT_PATH={env:JUNIT_XML_REPORT_OUTPUT_ROOT}.html
# To read emoji in the mutmut source:
LANG=en_US.UTF-8
deps =
{[testenv]deps}
nodeenv
passenv = {[testenv]passenv}
allowlist_externals = bash
commands =
# Run mutmut in a shell to remove escape metacharacters in "{[testenv]commands}"
# Note: mutmut 1 exit code is a fatal error.
# Other non-zero exit codes correspond to mutant statuses but indicate that
# mutmut itself was successful and therefore are ignored for this test environment.
# see: https://github.com/boxed/mutmut/blob/8bd1331429234ba572471d2ab6854b38e880f25d/mutmut/__init__.py#L1258
bash -c '\
mutmut run \
--paths-to-mutate=cookiecutter_cruft_poetry_tox_pre_commit_ci_cd_instance/ \
--runner="{[testenv]commands}" \
--tests-dir=tests/ || \
MUTMUT_EXIT_CODE=$?; \
if [[ "$\{MUTMUT_EXIT_CODE\}" -eq 1 ]]; \
then \
exit "$\{MUTMUT_EXIT_CODE\}"; \
fi'
mutmut results
# Export JUnit XML results to user-friendly static HTML page
bash -c '\
mutmut junitxml \
> {env:JUNIT_XML_REPORT_OUTPUT_PATH}'
nodeenv --python-virtualenv --jobs=8
npm install -g --no-package-lock --no-save xunit-viewer
# Inside of the virtualenv, '-g' installs into the virtualenv,
# as opposed to installing into the local `node_modules`.
xunit-viewer \
--results {env:JUNIT_XML_REPORT_OUTPUT_PATH} \
--title="Mutation Tests Report" \
--output {env:JUNIT_HTML_REPORT_OUTPUT_PATH}
bash -c '\
echo -e "Test report available at \033[4;36m \
{env:JUNIT_HTML_REPORT_OUTPUT_PATH} \
\033[0m"'
wheel = {[testenv]wheel}
[testenv:coverage]
description = [Run locally after tests]: Combine coverage data and create reports;
generates a diff coverage against `{env:DIFF_AGAINST:origin/master}`
(can be changed by setting `DIFF_AGAINST` env var)
deps =
coverage[toml]
diff_cover
skip_install = true
passenv =
{[testenv]passenv}
DIFF_AGAINST
setenv = COVERAGE_FILE={toxworkdir}/.coverage
commands =
coverage combine {toxworkdir}
coverage report
coverage xml -o {toxworkdir}/coverage.xml
coverage html -d {toxworkdir}/htmlcov
coverage json -o {toxworkdir}/coverage.json --pretty-print
diff-cover {toxworkdir}/coverage.xml \
--compare-branch={env:DIFF_AGAINST:origin/master} \
--diff-range-notation '..'
depends =
py3{8,9,10,11}
parallel_show_output = {[testenv]parallel_show_output}
[testenv:nbqaxdoctest]
description = Run notebook doctests directly via `xdoctest` (more efficient and
less error-prone than using the built-in `doctest` library or
running `xdoctest` via the `pytest` plugin). Note: if any test
fails, the test environment command fails with the FIRST non-zero
exit code encountered in the output job log.
deps = -r requirements-dev.txt
allowlist_externals = bash
commands =
bash -c '\
find {toxinidir}/cookiecutter_cruft_poetry_tox_pre_commit_ci_cd_instance \
-type d -name .ipynb_checkpoints -prune -false -o \
-type f -name "*.ipynb" \
| parallel --joblog {toxworkdir}/{envname}.log -j 0 -k nbqa xdoctest \{\} --colored true; \
NUM_FAILED_JOBS=$?; \
echo "*****************$NUM_FAILED_JOBS FAILED JOBS*****************"; \
cat {toxworkdir}/{envname}.log| awk "\$7 != 0"; \
FAILURE_EXIT_CODES=$(cat {toxworkdir}/{envname}.log | awk "NR>1 && \$7 != 0 \{print \$7 \}") && \
exit $\{FAILURE_EXIT_CODES:0:1\}'
# Run after testenv since auto-generated temp `.py` files interfere with pytest
depends =
py3{8,9,10,11}
wheel = {[base_configs]wheel}
parallel_show_output = {[base_configs]parallel_show_output}
[testenv:precommit]
description = Run `pre-commit` hooks to auto-format and lint the codebase
deps =
bandit # `nbqa-bandit`
pre-commit
# Pre-commit hooks that rely on finding their executables in the `.tox/precommit/bin`
mypy
nbqa
# Dependencies that are imported in source code and therefore needed for above pre-commit hook execution
emoji
hypothesis
pytest
pytest-benchmark
sphinx
typeguard
# PEP 561 compliant stub packages for mypy
types-emoji
# Prod dependencies
-r requirements.txt
skip_install = true
; For shfmt
setenv = GOCACHE={envdir}/go-build
passenv =
HOMEPATH # needed on Windows
PROGRAMDATA # needed on Windows
SKIP # hook ids to skip
TERM # (e.g. for mypy color output (https://github.com/tox-dev/tox/issues/1441#issuecomment-548063521))
commands = pre-commit run {posargs} -vv --all-files --color always
python -c 'print("hint: run `make install-pre-commit-hooks` to add checks as pre-commit hook")'
wheel = {[base_configs]wheel}
[testenv:package]
description = Build Python package and validate its long description
deps =
poetry >= 1.1.2
mypy >= 0.910
twine >= 1.12.1
readme-renderer[md] >= 24.0
skip_install = true
commands = poetry build
twine check {toxinidir}/dist/*
[testenv:security]
skip_install = true
deps = safety
commands = safety check --full-report -r {toxinidir}/requirements-all.txt \
--ignore=51457 # CVE-2022-42969
[testenv:docs]
extras =
docs
passenv = TERM
setenv =
PYTHONHASHSEED = 0
commands =
sphinx-build -n -T -W -b html -d {envtmpdir}/doctrees docs/source docs/_build/html
sphinx-build -n -T -W -b doctest -d {envtmpdir}/doctrees docs/source docs/_build/html