PR 664 triggered by: pull_request of refs/pull/664/merge branch (workflow run ID: 12439157133; number: 830; attempt: 1) #830
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
--- | |
name: 🧪 | |
on: # yamllint disable-line rule:truthy | |
merge_group: | |
push: # publishes to TestPyPI pushes to the main branch | |
branches: # any branch but not tag | |
- >- | |
** | |
- >- # NOTE: "branches-ignore" cannot be used with "branches" | |
!dependabot/** | |
- >- # NOTE: pre-commit.ci always creates a PR | |
!pre-commit-ci-update-config | |
tags-ignore: | |
- >- | |
** | |
pull_request: | |
paths-ignore: # NOTE: cannot be combined with "paths" | |
- docs/**.rst | |
types: | |
- opened # default | |
- synchronize # default | |
- reopened # default | |
- ready_for_review # used in PRs created from the release workflow | |
workflow_dispatch: | |
inputs: | |
release-version: | |
# github.event_name == 'workflow_dispatch' | |
# && github.event.inputs.release-version | |
description: >- | |
Target PEP440-compliant version to release. | |
Please, don't prepend `v`. | |
required: true | |
type: string | |
release-committish: | |
# github.event_name == 'workflow_dispatch' | |
# && github.event.inputs.release-committish | |
default: '' | |
description: >- | |
The commit to be released to PyPI and tagged | |
in Git as `release-version`. Normally, you | |
should keep this empty. | |
type: string | |
YOLO: | |
default: false | |
description: >- | |
Set this flag to disregard the outcome of the | |
test stage. The test results will block the | |
release otherwise. Only use this under | |
extraordinary circumstances to ignore the test | |
failures and cut the release regardless. | |
type: boolean | |
workflow_run: | |
workflows: | |
- ♲ manylinux containers | |
branches: | |
- devel | |
types: | |
- completed | |
concurrency: | |
group: >- | |
${{ | |
github.workflow | |
}}-${{ | |
github.ref_type | |
}}-${{ | |
github.event.pull_request.number || github.sha | |
}} | |
cancel-in-progress: true | |
env: | |
FORCE_COLOR: 1 # Request colored output from CLI tools supporting it | |
MYPY_FORCE_COLOR: 1 # MyPy's color enforcement | |
PIP_DISABLE_PIP_VERSION_CHECK: 1 # Hide "there's a newer pip" message | |
PIP_NO_PYTHON_VERSION_WARNING: 1 # Hide "this Python is deprecated" message | |
PIP_NO_WARN_SCRIPT_LOCATION: 1 # Hide "script dir is not in $PATH" message | |
PY_COLORS: 1 # Recognized by the `py` package, dependency of `pytest` | |
TOX_PARALLEL_NO_SPINNER: 1 # Disable tox's parallel run spinner animation | |
TOX_TESTENV_PASSENV: >- # Make tox-wrapped tools see color requests | |
FORCE_COLOR | |
MYPY_FORCE_COLOR | |
NO_COLOR | |
PY_COLORS | |
PYTEST_THEME | |
PYTEST_THEME_MODE | |
TOX_VERSION: tox < 4.12 | |
run-name: >- | |
${{ | |
github.event_name == 'workflow_dispatch' | |
&& format('📦 Releasing v{0}...', github.event.inputs.release-version) | |
|| '' | |
}} | |
${{ | |
github.event.pull_request.number && 'PR' || '' | |
}}${{ | |
!github.event.pull_request.number && 'Commit' || '' | |
}} | |
${{ github.event.pull_request.number || github.sha }} | |
triggered by: ${{ github.event_name }} of ${{ | |
github.ref | |
}} ${{ | |
github.ref_type | |
}} | |
(workflow run ID: ${{ | |
github.run_id | |
}}; number: ${{ | |
github.run_number | |
}}; attempt: ${{ | |
github.run_attempt | |
}}) | |
jobs: | |
lint: | |
uses: ./.github/workflows/reusable-linters.yml | |
pre-setup: | |
name: ⚙️ Pre-set global build settings | |
runs-on: ubuntu-latest | |
timeout-minutes: 1 | |
defaults: | |
run: | |
shell: python | |
outputs: | |
# NOTE: These aren't env vars because the `${{ env }}` context is | |
# NOTE: inaccessible when passing inputs to reusable workflows. | |
dists-artifact-name: python-package-distributions | |
dist-version: >- | |
${{ | |
steps.request-check.outputs.release-requested == 'true' | |
&& github.event.inputs.release-version | |
|| steps.scm-version.outputs.dist-version | |
}} | |
is-untagged-devel: >- | |
${{ steps.untagged-check.outputs.is-untagged-devel || false }} | |
release-requested: >- | |
${{ | |
steps.request-check.outputs.release-requested || false | |
}} | |
is-yolo-mode: >- | |
${{ | |
( | |
steps.request-check.outputs.release-requested == 'true' | |
&& github.event.inputs.YOLO | |
) | |
&& true || false | |
}} | |
profiling-enabled: >- | |
${{ steps.profiling-check.outputs.profiling-enabled || false }} | |
cache-key-files: >- | |
${{ steps.calc-cache-key-files.outputs.files-hash-key }} | |
git-tag: ${{ steps.git-tag.outputs.tag }} | |
sdist-artifact-name: ${{ steps.artifact-name.outputs.sdist }} | |
wheel-artifact-name: ${{ steps.artifact-name.outputs.wheel }} | |
changelog-patch-name: ${{ steps.changelog-patch-name.outputs.filename }} | |
changelog-draft-name-md: >- | |
${{ steps.changelog-draft-name.outputs.filename-base }}.md | |
changelog-draft-name-rst: >- | |
${{ steps.changelog-draft-name.outputs.filename-base }}.rst | |
steps: | |
- name: Switch to using Python 3.11 by default | |
uses: actions/[email protected] | |
with: | |
python-version: 3.11 | |
- name: >- | |
Mark the build as untagged '${{ | |
github.event.repository.default_branch | |
}}' branch build | |
id: untagged-check | |
if: >- | |
github.event_name == 'push' && | |
github.ref == format( | |
'refs/heads/{0}', github.event.repository.default_branch | |
) | |
run: | | |
from os import environ | |
from pathlib import Path | |
FILE_APPEND_MODE = 'a' | |
with Path(environ['GITHUB_OUTPUT']).open( | |
mode=FILE_APPEND_MODE, | |
) as outputs_file: | |
print('is-untagged-devel=true', file=outputs_file) | |
- name: Mark the build as "release request" | |
id: request-check | |
if: github.event_name == 'workflow_dispatch' | |
run: | | |
from os import environ | |
from pathlib import Path | |
FILE_APPEND_MODE = 'a' | |
with Path(environ['GITHUB_OUTPUT']).open( | |
mode=FILE_APPEND_MODE, | |
) as outputs_file: | |
print('release-requested=true', file=outputs_file) | |
- name: Enable profiling of the build | |
id: profiling-check | |
if: github.event_name != 'workflow_dispatch' | |
run: | | |
from os import environ | |
from pathlib import Path | |
FILE_APPEND_MODE = 'a' | |
with Path(environ['GITHUB_OUTPUT']).open( | |
mode=FILE_APPEND_MODE, | |
) as outputs_file: | |
print('profiling-enabled=true', file=outputs_file) | |
- name: Check out src from Git | |
if: >- | |
steps.request-check.outputs.release-requested != 'true' | |
uses: actions/[email protected] | |
with: | |
fetch-depth: >- | |
${{ | |
steps.request-check.outputs.release-requested == 'true' | |
&& 1 || 0 | |
}} | |
ref: ${{ github.event.inputs.release-committish }} | |
- name: >- | |
Calculate Python interpreter version hash value | |
for use in the cache key | |
if: >- | |
steps.request-check.outputs.release-requested != 'true' | |
id: calc-cache-key-py | |
run: | | |
from hashlib import sha512 | |
from os import environ | |
from pathlib import Path | |
from sys import version | |
FILE_APPEND_MODE = 'a' | |
hash = sha512(version.encode()).hexdigest() | |
with Path(environ['GITHUB_OUTPUT']).open( | |
mode=FILE_APPEND_MODE, | |
) as outputs_file: | |
print(f'py-hash-key={hash}', file=outputs_file) | |
- name: >- | |
Calculate dependency files' combined hash value | |
for use in the cache key | |
if: >- | |
steps.request-check.outputs.release-requested != 'true' | |
id: calc-cache-key-files | |
run: | | |
from hashlib import sha512 | |
from os import environ | |
from pathlib import Path | |
FILE_APPEND_MODE = 'a' | |
hashes_combo = sha512('-'.join(( | |
"${{ hashFiles('setup.cfg') }}", | |
"${{ hashFiles('tox.ini')}}", | |
"${{ hashFiles('pyproject.toml') }}", | |
"${{ hashFiles('.pre-commit-config.yaml') }}", | |
"${{ hashFiles('pytest.ini') }}", | |
"${{ hashFiles('requirements-build.*') }}", | |
"${{ hashFiles('docs/requirements.*') }}", | |
)).encode()).hexdigest() | |
with Path(environ['GITHUB_OUTPUT']).open( | |
mode=FILE_APPEND_MODE, | |
) as outputs_file: | |
print(f"files-hash-key={hashes_combo}", file=outputs_file) | |
- name: Set up pip cache | |
if: >- | |
steps.request-check.outputs.release-requested != 'true' | |
uses: actions/[email protected] | |
with: | |
path: >- | |
${{ | |
runner.os == 'Linux' | |
&& '~/.cache/pip' | |
|| '~/Library/Caches/pip' | |
}} | |
key: >- | |
${{ runner.os }}-pip-${{ | |
steps.calc-cache-key-py.outputs.py-hash-key }}-${{ | |
steps.calc-cache-key-files.outputs.files-hash-key }} | |
restore-keys: | | |
${{ runner.os }}-pip-${{ | |
steps.calc-cache-key-py.outputs.py-hash-key | |
}}- | |
${{ runner.os }}-pip- | |
${{ runner.os }}- | |
- name: Drop Git tags from HEAD for non-release requests | |
if: >- | |
steps.request-check.outputs.release-requested != 'true' | |
run: >- | |
git tag --points-at HEAD | |
| | |
xargs git tag --delete | |
shell: bash | |
- name: Set up versioning prerequisites | |
if: >- | |
steps.request-check.outputs.release-requested != 'true' | |
run: >- | |
python -m | |
pip install | |
--user | |
setuptools-scm | |
shell: bash | |
- name: Set the current dist version from Git | |
if: steps.request-check.outputs.release-requested != 'true' | |
id: scm-version | |
run: | | |
from os import environ | |
from pathlib import Path | |
import setuptools_scm | |
FILE_APPEND_MODE = 'a' | |
ver = setuptools_scm.get_version( | |
${{ | |
steps.untagged-check.outputs.is-untagged-devel == 'true' | |
&& 'local_scheme="no-local-version"' || '' | |
}} | |
) | |
with Path(environ['GITHUB_OUTPUT']).open( | |
mode=FILE_APPEND_MODE, | |
) as outputs_file: | |
print(f'dist-version={ver}', file=outputs_file) | |
print( | |
f'dist-version-for-filenames={ver.replace("+", "-")}', | |
file=outputs_file, | |
) | |
- name: Set the target Git tag | |
id: git-tag | |
run: | | |
from os import environ | |
from pathlib import Path | |
FILE_APPEND_MODE = 'a' | |
with Path(environ['GITHUB_OUTPUT']).open( | |
mode=FILE_APPEND_MODE, | |
) as outputs_file: | |
print( | |
"tag=v${{ | |
steps.request-check.outputs.release-requested == 'true' | |
&& github.event.inputs.release-version | |
|| steps.scm-version.outputs.dist-version | |
}}", | |
file=outputs_file, | |
) | |
- name: Set the expected dist artifact names | |
id: artifact-name | |
run: | | |
from os import environ | |
from pathlib import Path | |
FILE_APPEND_MODE = 'a' | |
with Path(environ['GITHUB_OUTPUT']).open( | |
mode=FILE_APPEND_MODE, | |
) as outputs_file: | |
print( | |
"sdist=ansible-pylibssh-${{ | |
steps.request-check.outputs.release-requested == 'true' | |
&& github.event.inputs.release-version | |
|| steps.scm-version.outputs.dist-version | |
}}.tar.gz", | |
file=outputs_file, | |
) | |
print( | |
"wheel=ansible_pylibssh-${{ | |
steps.request-check.outputs.release-requested == 'true' | |
&& github.event.inputs.release-version | |
|| steps.scm-version.outputs.dist-version | |
}}-cp3*-cp3*-*.whl", | |
file=outputs_file, | |
) | |
- name: Set the expected changelog patch filename | |
id: changelog-patch-name | |
run: | | |
from os import environ | |
from pathlib import Path | |
FILE_APPEND_MODE = 'a' | |
with Path(environ['GITHUB_OUTPUT']).open( | |
mode=FILE_APPEND_MODE, | |
) as outputs_file: | |
print('filename=0001-Generate-a-change-log-entry-for-v${{ | |
steps.request-check.outputs.release-requested == 'true' | |
&& github.event.inputs.release-version | |
|| steps.scm-version.outputs.dist-version-for-filenames | |
}}.patch', file=outputs_file) | |
- name: Set the expected changelog draft filename | |
id: changelog-draft-name | |
run: | | |
from os import environ | |
from pathlib import Path | |
FILE_APPEND_MODE = 'a' | |
with Path(environ['GITHUB_OUTPUT']).open( | |
mode=FILE_APPEND_MODE, | |
) as outputs_file: | |
print('filename-base=change-notes-v${{ | |
steps.request-check.outputs.release-requested == 'true' | |
&& github.event.inputs.release-version | |
|| steps.scm-version.outputs.dist-version-for-filenames | |
}}', file=outputs_file) | |
build-changelog: | |
name: >- | |
👷📝 ${{ needs.pre-setup.outputs.git-tag }} changelog | |
[mode: ${{ | |
fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
&& 'test' || '' | |
}}${{ | |
fromJSON(needs.pre-setup.outputs.release-requested) | |
&& 'release' || '' | |
}}${{ | |
( | |
!fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
&& !fromJSON(needs.pre-setup.outputs.release-requested) | |
) && 'nightly' || '' | |
}}] | |
needs: | |
- pre-setup | |
runs-on: ubuntu-latest | |
env: | |
TOXENV: make-changelog | |
steps: | |
- name: Switch to using Python 3.11 | |
uses: actions/[email protected] | |
with: | |
python-version: 3.11 | |
- name: Grab the source from Git | |
uses: actions/[email protected] | |
with: | |
fetch-depth: 1 # Enough for this job to generate the changelog | |
ref: ${{ github.event.inputs.release-committish }} | |
- name: >- | |
Calculate Python interpreter version hash value | |
for use in the cache key | |
id: calc-cache-key-py | |
run: | | |
from hashlib import sha512 | |
from os import environ | |
from pathlib import Path | |
from sys import version | |
FILE_APPEND_MODE = 'a' | |
hash = sha512(version.encode()).hexdigest() | |
with Path(environ['GITHUB_OUTPUT']).open( | |
mode=FILE_APPEND_MODE, | |
) as outputs_file: | |
print(f'py-hash-key={hash}', file=outputs_file) | |
shell: python | |
- name: Set up pip cache | |
uses: actions/[email protected] | |
with: | |
path: >- | |
${{ | |
runner.os == 'Linux' | |
&& '~/.cache/pip' | |
|| '~/Library/Caches/pip' | |
}} | |
key: >- | |
${{ runner.os }}-pip-${{ | |
steps.calc-cache-key-py.outputs.py-hash-key }}-${{ | |
needs.pre-setup.outputs.cache-key-files }} | |
restore-keys: | | |
${{ runner.os }}-pip-${{ | |
steps.calc-cache-key-py.outputs.py-hash-key | |
}}- | |
${{ runner.os }}-pip- | |
${{ runner.os }}- | |
- name: Install tox | |
run: >- | |
python -m | |
pip install | |
--user | |
'${{ env.TOX_VERSION }}' | |
- name: Pre-populate the tox env | |
run: >- | |
python -m | |
tox | |
--parallel auto | |
--parallel-live | |
--skip-missing-interpreters false | |
--notest | |
- name: Drop Git tags from HEAD for non-tag-create events | |
if: >- | |
!fromJSON(needs.pre-setup.outputs.release-requested) | |
run: >- | |
git tag --points-at HEAD | |
| | |
xargs git tag --delete | |
shell: bash | |
- name: Setup git user as [bot] | |
# Refs: | |
# * https://github.community/t/github-actions-bot-email-address/17204/6 | |
# * https://github.com/actions/checkout/issues/13#issuecomment-724415212 | |
uses: fregante/setup-git-user@v2 | |
- name: Generate changelog draft to a temporary file | |
run: >- | |
2>/dev/null | |
python -m | |
tox | |
--skip-missing-interpreters false | |
--skip-pkg-install | |
-- | |
'${{ needs.pre-setup.outputs.dist-version }}' | |
--draft | |
| | |
tee | |
'${{ needs.pre-setup.outputs.changelog-draft-name-rst }}' | |
shell: bash | |
- name: Sanitize the markdown changelog version | |
run: >- | |
sed | |
-i | |
-e 's/:commit:`\([0-9a-f]\+\)`/${{ | |
'' | |
}}https:\/\/github.com\/ansible\/pylibssh\/commit\/\1/g' | |
-e 's/:gh:`\([-.a-zA-Z0-9]\+\)`/https:\/\/github.com\/\1/g' | |
-e 's/:\(issue\|pr\):`\([0-9]\+\)`/#\2/g' | |
-e 's/:user:`\([-.a-zA-Z0-9]\+\)`/@\1/g' | |
-e 's/|project|/ansible-pylibssh/g' | |
'${{ needs.pre-setup.outputs.changelog-draft-name-rst }}' | |
shell: bash | |
- name: Install pandoc via apt | |
run: sudo apt install -y pandoc | |
- name: >- | |
Convert ${{ needs.pre-setup.outputs.changelog-draft-name-rst }} | |
into ${{ needs.pre-setup.outputs.changelog-draft-name-md }} | |
with a native pandoc run | |
run: >- | |
pandoc | |
--from=rst | |
--to=gfm | |
--output='${{ needs.pre-setup.outputs.changelog-draft-name-md }}' | |
'${{ needs.pre-setup.outputs.changelog-draft-name-rst }}' | |
- name: Render the changelog draft in the GitHub Job Summary | |
run: | | |
echo "# Changelog for ${{ | |
needs.pre-setup.outputs.git-tag | |
}}" >> "${GITHUB_STEP_SUMMARY}" | |
echo >> "${GITHUB_STEP_SUMMARY}" | |
echo >> "${GITHUB_STEP_SUMMARY}" | |
cat '${{ | |
needs.pre-setup.outputs.changelog-draft-name-md | |
}}' >> "${GITHUB_STEP_SUMMARY}" | |
shell: bash | |
- name: Generate changelog update with tox and stage it in Git | |
run: >- | |
python -m | |
tox | |
--parallel auto | |
--parallel-live | |
--skip-missing-interpreters false | |
--skip-pkg-install | |
-- | |
'${{ needs.pre-setup.outputs.dist-version }}' | |
--yes | |
- name: >- | |
Commit the changelog updates for release | |
${{ needs.pre-setup.outputs.git-tag }} in the local Git repo | |
run: >- | |
git commit -m | |
'Generate a change log entry for ${{ | |
needs.pre-setup.outputs.git-tag | |
}}' | |
- name: Log the changelog commit | |
run: git show --color | |
- name: Create a changelog update patch from the last Git commit | |
run: >- | |
git format-patch | |
--output='${{ needs.pre-setup.outputs.changelog-patch-name }}' | |
-1 HEAD | |
- name: Verify that expected patch got created | |
run: ls -1 '${{ needs.pre-setup.outputs.changelog-patch-name }}' | |
- name: Save the package bump patch as a GHA artifact | |
uses: actions/upload-artifact@v3 | |
with: | |
name: changelog | |
path: | | |
${{ needs.pre-setup.outputs.changelog-patch-name }} | |
${{ needs.pre-setup.outputs.changelog-draft-name-md }} | |
${{ needs.pre-setup.outputs.changelog-draft-name-rst }} | |
build-bin-macos: | |
name: >- | |
📦 ${{ needs.pre-setup.outputs.git-tag }} | |
[mode: ${{ | |
fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
&& 'nightly' || '' | |
}}${{ | |
fromJSON(needs.pre-setup.outputs.release-requested) | |
&& 'release' || '' | |
}}${{ | |
( | |
!fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
&& !fromJSON(needs.pre-setup.outputs.release-requested) | |
) && 'test' || '' | |
}}] macOS | |
needs: | |
- build-src | |
- pre-setup # transitive, for accessing settings | |
strategy: | |
matrix: | |
os: | |
- macos-13 | |
uses: ./.github/workflows/reusable-cibuildwheel.yml | |
with: | |
os: ${{ matrix.os }} | |
wheel-tags-to-skip: >- | |
${{ | |
!fromJSON(needs.pre-setup.outputs.release-requested) | |
&& '*_i686 | |
*-macosx_universal2 | |
*-musllinux_* | |
*-win32 | |
*_arm64 | |
pp*' | |
|| '' | |
}} | |
source-tarball-name: >- | |
${{ needs.pre-setup.outputs.sdist-artifact-name }} | |
dists-artifact-name: >- | |
${{ needs.pre-setup.outputs.dists-artifact-name }} | |
# cython-tracing: >- # Cython line tracing for coverage collection | |
# ${{ | |
# ( | |
# github.event_name == 'push' | |
# && contains(github.ref, 'refs/tags/') | |
# ) | |
# && 'false' | |
# || 'true' | |
# }} | |
cython-tracing: >- # Cython line tracing for coverage collection | |
${{ | |
fromJSON(needs.pre-setup.outputs.profiling-enabled) | |
&& 'true' | |
|| 'false' | |
}} | |
build-bin-manylinux-tested-arches: | |
name: >- | |
📦 ${{ needs.pre-setup.outputs.git-tag }} | |
[mode: ${{ | |
fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
&& 'nightly' || '' | |
}}${{ | |
fromJSON(needs.pre-setup.outputs.release-requested) | |
&& 'release' || '' | |
}}${{ | |
( | |
!fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
&& !fromJSON(needs.pre-setup.outputs.release-requested) | |
) && 'test' || '' | |
}}] manylinux | |
needs: | |
- build-src | |
- pre-setup # transitive, for accessing settings | |
strategy: | |
matrix: | |
manylinux-python-target: | |
# NOTE: Must be from this list: | |
# NOTE: $ podman run -it --rm \ | |
# NOTE: quay.io/pypa/manylinux2014_x86_64 \ | |
# NOTE: ls -1 /opt/python | |
- cp38-cp38 | |
- cp39-cp39 | |
- cp310-cp310 | |
- cp311-cp311 | |
- cp312-cp312 | |
manylinux-year-target: | |
- 2014 | |
- _2_24 | |
- _2_28 | |
manylinux-image-target: | |
# NOTE: Keep in sync with `build-manylinux-container-images.yml`. | |
# NOTE: Ordered from "heavy" to "fast". | |
- arch: x86_64 | |
qemu_arch: amd64 | |
include: | |
# NOTE: manylinux2010 only on x86_64 | |
- manylinux-python-target: cp38-cp38 | |
manylinux-image-target: | |
arch: x86_64 | |
qemu_arch: amd64 | |
manylinux-year-target: 2010 | |
- manylinux-python-target: cp39-cp39 | |
manylinux-image-target: | |
arch: x86_64 | |
qemu_arch: amd64 | |
manylinux-year-target: 2010 | |
- manylinux-python-target: cp310-cp310 | |
manylinux-image-target: | |
arch: x86_64 | |
qemu_arch: amd64 | |
manylinux-year-target: 2010 | |
# NOTE: manylinux1 caps out at Python 3.9 | |
- manylinux-python-target: cp38-cp38 | |
manylinux-image-target: | |
arch: x86_64 | |
qemu_arch: amd64 | |
manylinux-year-target: 1 | |
- manylinux-python-target: cp39-cp39 | |
manylinux-image-target: | |
arch: x86_64 | |
qemu_arch: amd64 | |
manylinux-year-target: 1 | |
exclude: | |
# NOTE: cp312-cp312 unavailable before _2_28 | |
- manylinux-python-target: cp312-cp312 | |
manylinux-year-target: 2014 | |
- manylinux-python-target: cp312-cp312 | |
manylinux-year-target: _2_24 | |
uses: ./.github/workflows/reusable-build-wheel.yml | |
with: | |
# qemu: ${{ matrix.qemu }} | |
source-tarball-name: >- | |
${{ needs.pre-setup.outputs.sdist-artifact-name }} | |
dists-artifact-name: >- | |
${{ needs.pre-setup.outputs.dists-artifact-name }} | |
wheel-artifact-name: ${{ needs.pre-setup.outputs.wheel-artifact-name }} | |
cache-key-files: ${{ needs.pre-setup.outputs.cache-key-files }} | |
release-requested: >- | |
${{ needs.pre-setup.outputs.release-requested }} | |
manylinux-python-target: ${{ matrix.manylinux-python-target }} | |
manylinux-year-target: ${{ matrix.manylinux-year-target }} | |
manylinux-image-target-arch: >- | |
${{ matrix.manylinux-image-target.arch }} | |
manylinux-image-target-qemu-arch: >- | |
${{ matrix.manylinux-image-target.qemu_arch }} | |
build-bin-manylinux-odd-arches: | |
name: >- | |
📦 ${{ needs.pre-setup.outputs.git-tag }} | |
[mode: ${{ | |
fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
&& 'nightly' || '' | |
}}${{ | |
fromJSON(needs.pre-setup.outputs.release-requested) | |
&& 'release' || '' | |
}}${{ | |
( | |
!fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
&& !fromJSON(needs.pre-setup.outputs.release-requested) | |
) && 'test' || '' | |
}}] manylinux | |
needs: | |
- build-src | |
- pre-setup # transitive, for accessing settings | |
strategy: | |
matrix: | |
manylinux-python-target: | |
# NOTE: Must be from this list: | |
# NOTE: $ podman run -it --rm \ | |
# NOTE: quay.io/pypa/manylinux2014_x86_64 \ | |
# NOTE: ls -1 /opt/python | |
- cp38-cp38 | |
- cp39-cp39 | |
- cp310-cp310 | |
- cp311-cp311 | |
- cp312-cp312 | |
manylinux-year-target: | |
- 2014 | |
- _2_24 | |
- _2_28 | |
manylinux-image-target: | |
# NOTE: Keep in sync with `build-manylinux-container-images.yml`. | |
# NOTE: Ordered from "heavy" to "fast". | |
- arch: aarch64 | |
qemu_arch: arm64 | |
- arch: s390x | |
- arch: ppc64le | |
exclude: | |
# NOTE: cp312-cp312 unavailable before _2_28 | |
- manylinux-python-target: cp312-cp312 | |
manylinux-year-target: 2014 | |
- manylinux-python-target: cp312-cp312 | |
manylinux-year-target: _2_24 | |
uses: ./.github/workflows/reusable-build-wheel.yml | |
with: | |
# qemu: ${{ matrix.qemu }} | |
source-tarball-name: >- | |
${{ needs.pre-setup.outputs.sdist-artifact-name }} | |
dists-artifact-name: >- | |
${{ needs.pre-setup.outputs.dists-artifact-name }} | |
wheel-artifact-name: ${{ needs.pre-setup.outputs.wheel-artifact-name }} | |
cache-key-files: ${{ needs.pre-setup.outputs.cache-key-files }} | |
release-requested: >- | |
${{ needs.pre-setup.outputs.release-requested }} | |
manylinux-python-target: ${{ matrix.manylinux-python-target }} | |
manylinux-year-target: ${{ matrix.manylinux-year-target }} | |
manylinux-image-target-arch: >- | |
${{ matrix.manylinux-image-target.arch }} | |
manylinux-image-target-qemu-arch: >- | |
${{ matrix.manylinux-image-target.qemu_arch }} | |
build-src: | |
name: >- | |
👷 an sdist 📦 ${{ needs.pre-setup.outputs.git-tag }} | |
under ${{ matrix.python-version }} | |
[mode: ${{ | |
fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
&& 'nightly' || '' | |
}}${{ | |
fromJSON(needs.pre-setup.outputs.release-requested) | |
&& 'release' || '' | |
}}${{ | |
( | |
!fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
&& !fromJSON(needs.pre-setup.outputs.release-requested) | |
) && 'test' || '' | |
}}] | |
needs: | |
- build-changelog | |
- pre-setup # transitive, for accessing settings | |
runs-on: ${{ matrix.runner-vm-os }} | |
strategy: | |
matrix: | |
python-version: | |
- >- | |
3.10 | |
- 3.9 | |
- 3.8 | |
runner-vm-os: | |
- ubuntu-22.04 | |
store-sdist-to-artifact: | |
- false | |
include: | |
- python-version: 3.12 | |
runner-vm-os: ubuntu-22.04 | |
store-sdist-to-artifact: true | |
env: | |
PEP517_BUILD_ARGS: --sdist | |
TOXENV: build-dists,metadata-validation | |
steps: | |
- name: Switch to using Python ${{ matrix.python-version }} | |
uses: actions/[email protected] | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Grab the source from Git | |
uses: actions/[email protected] | |
with: | |
fetch-depth: >- | |
${{ | |
fromJSON(needs.pre-setup.outputs.release-requested) | |
&& 1 || 0 | |
}} | |
ref: ${{ github.event.inputs.release-committish }} | |
- name: Fetch the GHA artifact with the version patch | |
uses: actions/download-artifact@v3 | |
with: | |
name: changelog | |
- name: Apply the changelog patch but do not commit it | |
run: git apply '${{ needs.pre-setup.outputs.changelog-patch-name }}' | |
shell: bash | |
- name: Delete the changelog patch file | |
run: rm -fv '${{ needs.pre-setup.outputs.changelog-patch-name }}' | |
shell: bash | |
- name: Pretend that changelog updates never happened | |
run: | | |
git diff --color=always | |
git update-index --assume-unchanged $(git ls-files --modified) | |
shell: bash | |
- name: >- | |
Calculate Python interpreter version hash value | |
for use in the cache key | |
id: calc-cache-key-py | |
run: | | |
from hashlib import sha512 | |
from os import environ | |
from pathlib import Path | |
from sys import version | |
FILE_APPEND_MODE = 'a' | |
hash = sha512(version.encode()).hexdigest() | |
with Path(environ['GITHUB_OUTPUT']).open( | |
mode=FILE_APPEND_MODE, | |
) as outputs_file: | |
print(f'py-hash-key={hash}', file=outputs_file) | |
shell: python | |
- name: Set up pip cache | |
uses: actions/[email protected] | |
with: | |
path: >- | |
${{ | |
runner.os == 'Linux' | |
&& '~/.cache/pip' | |
|| '~/Library/Caches/pip' | |
}} | |
key: >- | |
${{ runner.os }}-pip-${{ | |
steps.calc-cache-key-py.outputs.py-hash-key }}-${{ | |
needs.pre-setup.outputs.cache-key-files }} | |
restore-keys: | | |
${{ runner.os }}-pip-${{ | |
steps.calc-cache-key-py.outputs.py-hash-key | |
}}- | |
${{ runner.os }}-pip- | |
${{ runner.os }}- | |
- name: Install tox | |
run: >- | |
python -m | |
pip install | |
--user | |
'${{ env.TOX_VERSION }}' | |
- name: Pre-populate the tox env | |
run: >- | |
python -m | |
tox | |
--parallel auto | |
--parallel-live | |
--skip-missing-interpreters false | |
--notest | |
- name: Drop Git tags from HEAD for non-tag-create events | |
if: >- | |
!fromJSON(needs.pre-setup.outputs.release-requested) | |
run: >- | |
git tag --points-at HEAD | |
| | |
xargs git tag --delete | |
shell: bash | |
- name: Setup git user as [bot] | |
if: >- | |
fromJSON(needs.pre-setup.outputs.release-requested) | |
|| fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
uses: fregante/setup-git-user@v2 | |
- name: >- | |
Tag the release in the local Git repo | |
as ${{ needs.pre-setup.outputs.git-tag }} | |
for setuptools-scm to set the desired version | |
if: >- | |
fromJSON(needs.pre-setup.outputs.release-requested) | |
run: >- | |
git tag | |
-m '${{ needs.pre-setup.outputs.git-tag }}' | |
'${{ needs.pre-setup.outputs.git-tag }}' | |
-- | |
${{ | |
fromJSON(needs.pre-setup.outputs.release-requested) | |
&& github.event.inputs.release-committish || '' | |
}} | |
- name: Install tomlkit Python distribution package | |
if: >- | |
fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
run: >- | |
python -m pip install --user tomlkit | |
- name: Instruct setuptools-scm not to add a local version part | |
if: >- | |
fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
run: | | |
from pathlib import Path | |
import tomlkit | |
pyproject_toml_path = Path.cwd() / 'pyproject.toml' | |
pyproject_toml_txt = pyproject_toml_path.read_text() | |
pyproject_toml = tomlkit.loads(pyproject_toml_txt) | |
setuptools_scm_section = pyproject_toml['tool']['setuptools_scm'] | |
setuptools_scm_section['local_scheme'] = 'no-local-version' | |
patched_pyproject_toml_txt = tomlkit.dumps(pyproject_toml) | |
pyproject_toml_path.write_text(patched_pyproject_toml_txt) | |
shell: python | |
- name: Pretend that pyproject.toml is unchanged | |
if: >- | |
fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
run: | | |
git diff --color=always | |
git update-index --assume-unchanged pyproject.toml | |
- name: Build sdist and verify metadata | |
run: >- | |
python -m | |
tox | |
--parallel auto | |
--parallel-live | |
--skip-missing-interpreters false | |
--skip-pkg-install | |
- name: Verify that the artifact with expected name got created | |
run: >- | |
ls -1 | |
'dist/${{ needs.pre-setup.outputs.sdist-artifact-name }}' | |
- name: Store the source distribution package | |
if: fromJSON(matrix.store-sdist-to-artifact) | |
uses: actions/upload-artifact@v3 | |
with: | |
name: ${{ needs.pre-setup.outputs.dists-artifact-name }} | |
# NOTE: Exact expected file names are specified here | |
# NOTE: as a safety measure — if anything weird ends | |
# NOTE: up being in this dir or not all dists will be | |
# NOTE: produced, this will fail the workflow. | |
path: | | |
dist/${{ needs.pre-setup.outputs.sdist-artifact-name }} | |
retention-days: >- | |
${{ | |
( | |
fromJSON(needs.pre-setup.outputs.release-requested) | |
) && 7 || 4 | |
}} | |
build-rpms: | |
name: ${{ matrix.target-container.tag }} | |
needs: | |
- build-src | |
- pre-setup # transitive, for accessing settings | |
strategy: | |
matrix: | |
target-container: | |
- tag: fedora:39 | |
- tag: fedora:40 | |
# No matching package to install: 'python3dist(wheel)' | |
# - tag: centos/centos:stream9 | |
# registry: quay.io | |
- tag: ubi9/ubi:9.0.0 | |
registry: registry.access.redhat.com | |
- tag: ubi9/ubi:9.1 | |
registry: registry.access.redhat.com | |
- tag: ubi9/ubi:9.2 | |
registry: registry.access.redhat.com | |
- tag: ubi9/ubi:9.3 | |
registry: registry.access.redhat.com | |
- tag: ubi9/ubi:9.4 | |
registry: registry.access.redhat.com | |
runs-on: ubuntu-latest | |
container: | |
# NOTE: GHA has poor support for concat which is why I resorted to | |
# NOTE: using this ugly ternary syntax | |
image: >- | |
${{ | |
matrix.target-container.registry | |
&& matrix.target-container.registry | |
|| '' | |
}}${{ | |
matrix.target-container.registry | |
&& '/' | |
|| '' | |
}}${{ | |
matrix.target-container.tag | |
}} | |
continue-on-error: >- # Stub for in-matrix "allowed-failures" | |
${{ | |
contains(matrix.target-container.tag, '--DISABLED--') | |
&& true | |
|| false | |
}} | |
steps: | |
- name: Produce artifact name | |
id: distribution-meta | |
run: | | |
dist_tag=$(rpm --eval '%{?dist}') | |
echo "dist-tag=${dist_tag}" >> "${GITHUB_OUTPUT}" | |
- name: Patch CentOS Stream 8 repository URLs | |
if: >- | |
matrix.target-container.tag == 'centos/centos:stream8' | |
run: >- | |
sed -i | |
-e 's/mirrorlist/#mirrorlist/g' | |
-e | |
's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' | |
/etc/yum.repos.d/CentOS-* | |
shell: bash -eEuo pipefail {0} | |
- name: Enable EPEL repository | |
if: contains(matrix.target-container.tag, 'centos') | |
run: dnf install --assumeyes epel-release | |
- name: Install build tooling | |
run: >- | |
dnf install | |
--assumeyes | |
dnf-plugins-core | |
rpm-build | |
${{ | |
!contains(matrix.target-container.tag, 'ubi') | |
&& 'rpmdevtools rpmlint' | |
|| '' | |
}} | |
- name: Create rpmbuild directory structure on a community distro | |
if: >- | |
!contains(matrix.target-container.tag, 'ubi') | |
run: rpmdev-setuptree | |
- name: Create rpmbuild directory structure on RHEL | |
if: contains(matrix.target-container.tag, 'ubi') | |
run: mkdir -pv ~/rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS} | |
- name: Retrieve the project source from an sdist inside the GHA artifact | |
uses: re-actors/checkout-python-sdist@release/v1 | |
with: | |
source-tarball-name: ${{ needs.pre-setup.outputs.sdist-artifact-name }} | |
workflow-artifact-name: >- | |
${{ needs.pre-setup.outputs.dists-artifact-name }} | |
- name: Set an SCM version in the spec | |
run: >- | |
sed -i | |
"s#^\(Version:\s\+\).*#\1${{ needs.pre-setup.outputs.dist-version }}#" | |
packaging/rpm/ansible-pylibssh.spec | |
- name: Download all the dists # because `rpmlint` executes the spec file | |
uses: actions/download-artifact@v3 | |
with: | |
name: ${{ needs.pre-setup.outputs.dists-artifact-name }} | |
path: dist/ | |
- name: Lint the RPM spec file | |
if: >- | |
!contains(matrix.target-container.tag, 'ubi') | |
run: rpmlint packaging/rpm/ansible-pylibssh.spec | |
- name: Copy sdist to the sources dir | |
run: >- | |
cp -v | |
'dist/${{ needs.pre-setup.outputs.sdist-artifact-name }}' | |
~/rpmbuild/SOURCES/ | |
- name: Install static test dependencies missing from UBI9 | |
if: contains(matrix.target-container.tag, 'ubi9') | |
run: >- | |
rpm | |
-ivh | |
--nodeps | |
https://rpmfind.net/linux/centos-stream/"$( | |
rpm --eval '%{rhel}' | |
)"-stream/CRB/x86_64/os/Packages/python3-pytest-6.2.2-4${{ | |
steps.distribution-meta.outputs.dist-tag | |
}}.noarch.rpm | |
https://rpmfind.net/linux/centos-stream/"$( | |
rpm --eval '%{rhel}' | |
)"-stream/CRB/x86_64/os/Packages/python3-wheel-0.36.2-7${{ | |
steps.distribution-meta.outputs.dist-tag | |
}}.noarch.rpm | |
- name: Install static test dependencies missing from all UBIs | |
if: contains(matrix.target-container.tag, 'ubi') | |
run: >- | |
rpm | |
-ivh | |
--nodeps | |
https://rpmfind.net/linux/epel/"$( | |
rpm --eval '%{rhel}' | |
)"/Everything/x86_64/Packages/p/python3-pytest-cov-${{ | |
contains(matrix.target-container.tag, 'ubi9') | |
&& '4.0.0-2' | |
|| '2.6.0-1' | |
}}${{ steps.distribution-meta.outputs.dist-tag }}.noarch.rpm | |
https://rpmfind.net/linux/epel/"$( | |
rpm --eval '%{rhel}' | |
)"/Everything/x86_64/Packages/p/python3-pytest-forked-${{ | |
contains(matrix.target-container.tag, 'ubi9') | |
&& '1.4.0' | |
|| '1.0.2' | |
}}-1${{ | |
steps.distribution-meta.outputs.dist-tag | |
}}.noarch.rpm | |
https://rpmfind.net/linux/epel/"$( | |
rpm --eval '%{rhel}' | |
)"/Everything/x86_64/Packages/p/python3-pytest-xdist-${{ | |
contains(matrix.target-container.tag, 'ubi9') | |
&& '2.5.0-2' | |
|| '1.24.1-1' | |
}}${{ steps.distribution-meta.outputs.dist-tag }}.noarch.rpm | |
https://rpmfind.net/linux/epel/"$( | |
rpm --eval '%{rhel}' | |
)"/Everything/x86_64/Packages/t/tox-${{ | |
contains(matrix.target-container.tag, 'ubi9') | |
&& '3.28.0-1' | |
|| '3.4.0-2' | |
}}${{ steps.distribution-meta.outputs.dist-tag }}.noarch.rpm | |
https://rpmfind.net/linux/epel/"$( | |
rpm --eval '%{rhel}' | |
)"/Everything/x86_64/Packages/p/python3-execnet-${{ | |
contains(matrix.target-container.tag, 'ubi9') | |
&& '1.9.0-3' | |
|| '1.7.1-1' | |
}}${{ steps.distribution-meta.outputs.dist-tag }}.noarch.rpm | |
https://rpmfind.net/linux/epel/"$( | |
rpm --eval '%{rhel}' | |
)"/Everything/x86_64/Packages/p/python3-coverage-${{ | |
contains(matrix.target-container.tag, 'ubi9') | |
&& '6.2-1' | |
|| '4.5.1-9' | |
}}${{ steps.distribution-meta.outputs.dist-tag }}.x86_64.rpm | |
https://rpmfind.net/linux/epel/"$( | |
rpm --eval '%{rhel}' | |
)"/Everything/x86_64/Packages/p/python3-apipkg-${{ | |
contains(matrix.target-container.tag, 'ubi9') | |
&& '2.1.1-1' | |
|| '1.5-6' | |
}}${{ steps.distribution-meta.outputs.dist-tag }}.noarch.rpm | |
- name: Install static build requirements | |
run: dnf builddep --assumeyes --spec packaging/rpm/ansible-pylibssh.spec | |
- name: Fetch sources and patches on a community distro | |
if: >- | |
!contains(matrix.target-container.tag, 'ubi') | |
run: >- | |
spectool --all --get-files --sourcedir | |
packaging/rpm/ansible-pylibssh.spec | |
- name: Resolve and install dynamic build deps and build an SRPM on Fedora | |
# Ref: https://github.com/rpm-software-management/rpm/commit/58dcfdd | |
if: contains(matrix.target-container.tag, 'fedora') | |
run: | | |
while : | |
do | |
set +e | |
rpmbuild -br packaging/rpm/ansible-pylibssh.spec | |
rc="$?" | |
[ "${rc}" -eq 0 ] && break | |
[ "${rc}" -ne 11 ] && exit "${rc}" | |
set -e | |
dnf builddep --assumeyes \ | |
$HOME/rpmbuild/SRPMS/python-ansible-pylibssh-${{ | |
needs.pre-setup.outputs.dist-version | |
}}-1${{ | |
steps.distribution-meta.outputs.dist-tag | |
}}.buildreqs.nosrc.rpm | |
done | |
- name: Build an SRPM on RHELish | |
if: >- | |
!contains(matrix.target-container.tag, 'fedora') | |
run: >- | |
rpmbuild | |
${{ | |
contains(matrix.target-container.tag, 'ubi') | |
&& '--undefine=_disable_source_fetch' | |
|| '' | |
}} | |
-bs | |
packaging/rpm/ansible-pylibssh.spec | |
- name: Build binary RPMs | |
run: >- | |
rpmbuild | |
--rebuild | |
$HOME/rpmbuild/SRPMS/python-ansible-pylibssh-${{ | |
needs.pre-setup.outputs.dist-version | |
}}-1${{ | |
steps.distribution-meta.outputs.dist-tag | |
}}.src.rpm | |
- name: Install the packaged binary RPM on the system | |
run: >- | |
dnf | |
install | |
--assumeyes | |
$HOME/rpmbuild/RPMS/x86_64/python3-ansible-pylibssh-${{ | |
needs.pre-setup.outputs.dist-version | |
}}-1${{ | |
steps.distribution-meta.outputs.dist-tag | |
}}.x86_64.rpm | |
- name: Smoke-test the installed library | |
run: >- | |
python3 -c | |
'from pylibsshext.session import Session; print(Session())' | |
- name: Produce artifact name | |
id: artifact-name | |
run: >- | |
normalized_container_id=$( | |
echo '${{ matrix.target-container.tag }}' | sed 's#[.\/:]#--#g' | |
); | |
echo "artifact-id=${normalized_container_id}" >> "${GITHUB_OUTPUT}" | |
- name: Store RPM and SRPM as artifacts | |
uses: actions/upload-artifact@v3 | |
with: | |
name: ${{ steps.artifact-name.outputs.artifact-id }}--srpm-n-rpm | |
path: | | |
~/rpmbuild/SRPMS/ | |
~/rpmbuild/RPMS/ | |
test-linux: | |
name: 🧪 Test${{ '' }} # nest jobs under the same sidebar category | |
needs: | |
- build-bin-manylinux-tested-arches | |
- build-src | |
- pre-setup # transitive, for accessing settings | |
strategy: | |
matrix: | |
python-version: | |
- "3.12" | |
- "3.11" | |
- "3.10" | |
- 3.9 | |
- 3.8 | |
runner-vm-os: | |
- ubuntu-22.04 | |
- ubuntu-20.04 | |
dist-type: | |
- binary | |
- source | |
uses: ./.github/workflows/reusable-tests.yml | |
with: | |
python-version: ${{ matrix.python-version }} | |
runner-vm-os: ${{ matrix.runner-vm-os }} | |
dist-type: ${{ matrix.dist-type }} | |
release-requested: >- | |
${{ needs.pre-setup.outputs.release-requested }} | |
yolo: ${{ fromJSON(needs.pre-setup.outputs.is-yolo-mode) }} | |
cache-key-files: ${{ needs.pre-setup.outputs.cache-key-files }} | |
source-tarball-name: >- | |
${{ needs.pre-setup.outputs.sdist-artifact-name }} | |
dists-artifact-name: >- | |
${{ needs.pre-setup.outputs.dists-artifact-name }} | |
secrets: | |
codecov-token: ${{ secrets.CODECOV_TOKEN }} | |
test-macos: | |
name: 🧪 Test${{ '' }} # nest jobs under the same sidebar category | |
needs: | |
- build-bin-macos | |
- build-src | |
- pre-setup # transitive, for accessing settings | |
strategy: | |
matrix: | |
python-version: | |
- "3.12" | |
- "3.11" | |
- "3.10" | |
- 3.9 | |
- 3.8 | |
runner-vm-os: | |
- macos-13 | |
dist-type: | |
- binary | |
- source | |
uses: ./.github/workflows/reusable-tests.yml | |
with: | |
python-version: ${{ matrix.python-version }} | |
runner-vm-os: ${{ matrix.runner-vm-os }} | |
dist-type: ${{ matrix.dist-type }} | |
release-requested: >- | |
${{ needs.pre-setup.outputs.release-requested }} | |
yolo: ${{ fromJSON(needs.pre-setup.outputs.is-yolo-mode) }} | |
cache-key-files: ${{ needs.pre-setup.outputs.cache-key-files }} | |
source-tarball-name: >- | |
${{ needs.pre-setup.outputs.sdist-artifact-name }} | |
dists-artifact-name: >- | |
${{ needs.pre-setup.outputs.dists-artifact-name }} | |
secrets: | |
codecov-token: ${{ secrets.CODECOV_TOKEN }} | |
dist-meta: | |
name: Verify 🐍📦 metadata | |
needs: | |
- build-bin-macos | |
- build-bin-manylinux-odd-arches | |
- build-bin-manylinux-tested-arches | |
- build-src | |
- pre-setup # transitive, for accessing settings | |
runs-on: ubuntu-latest | |
env: | |
TOXENV: metadata-validation | |
steps: | |
- name: Switch to using Python 3.11 by default | |
uses: actions/[email protected] | |
with: | |
python-version: 3.11 | |
- name: Retrieve the project source from an sdist inside the GHA artifact | |
uses: re-actors/checkout-python-sdist@release/v1 | |
with: | |
source-tarball-name: ${{ needs.pre-setup.outputs.sdist-artifact-name }} | |
workflow-artifact-name: >- | |
${{ needs.pre-setup.outputs.dists-artifact-name }} | |
- name: >- | |
Calculate Python interpreter version hash value | |
for use in the cache key | |
id: calc-cache-key-py | |
run: | | |
from hashlib import sha512 | |
from os import environ | |
from pathlib import Path | |
from sys import version | |
FILE_APPEND_MODE = 'a' | |
hash = sha512(version.encode()).hexdigest() | |
with Path(environ['GITHUB_OUTPUT']).open( | |
mode=FILE_APPEND_MODE, | |
) as outputs_file: | |
print(f'py-hash-key={hash}', file=outputs_file) | |
shell: python | |
- name: Set up pip cache | |
uses: actions/[email protected] | |
with: | |
path: >- | |
${{ | |
runner.os == 'Linux' | |
&& '~/.cache/pip' | |
|| '~/Library/Caches/pip' | |
}} | |
key: >- | |
${{ runner.os }}-pip-${{ | |
steps.calc-cache-key-py.outputs.py-hash-key }}-${{ | |
needs.pre-setup.outputs.cache-key-files }} | |
restore-keys: | | |
${{ runner.os }}-pip-${{ | |
steps.calc-cache-key-py.outputs.py-hash-key | |
}}- | |
${{ runner.os }}-pip- | |
${{ runner.os }}- | |
- name: Install tox | |
run: >- | |
python -m | |
pip install | |
--user | |
'${{ env.TOX_VERSION }}' | |
- name: Pre-populate tox env | |
run: python -m tox -p auto --parallel-live -vvvv --notest | |
- name: Download all the dists | |
uses: actions/download-artifact@v3 | |
with: | |
name: ${{ needs.pre-setup.outputs.dists-artifact-name }} | |
path: dist/ | |
- name: Verify metadata | |
run: python -m tox -p auto --parallel-live -vvvv | |
check: # This job does nothing and is only used for the branch protection | |
if: always() | |
needs: | |
- build-rpms | |
- dist-meta | |
- lint | |
- pre-setup # transitive, for accessing settings | |
- test-linux | |
- test-macos | |
runs-on: Ubuntu-latest | |
steps: | |
- name: Decide whether the needed jobs succeeded or failed | |
uses: re-actors/alls-green@release/v1 | |
with: | |
allowed-failures: >- | |
${{ | |
fromJSON(needs.pre-setup.outputs.is-yolo-mode) | |
&& 'build-rpms, lint, test-linux, test-macos' | |
|| '' | |
}} | |
jobs: ${{ toJSON(needs) }} | |
publish-pypi: | |
name: >- | |
📦 | |
Publish ${{ needs.pre-setup.outputs.git-tag }} to PyPI | |
needs: | |
- check | |
- pre-setup # transitive, for accessing settings | |
if: >- | |
always() | |
&& needs.check.result == 'success' | |
&& fromJSON(needs.pre-setup.outputs.release-requested) | |
runs-on: ubuntu-latest | |
timeout-minutes: 5 # docker+network are slow sometimes | |
environment: | |
name: pypi | |
url: >- | |
https://pypi.org/project/ansible-pylibssh/${{ | |
needs.pre-setup.outputs.dist-version | |
}} | |
permissions: | |
contents: read # This job doesn't need to `git push` anything | |
id-token: write # PyPI Trusted Publishing (OIDC) | |
steps: | |
- name: Download all the dists | |
uses: actions/download-artifact@v3 | |
with: | |
name: >- | |
${{ needs.pre-setup.outputs.dists-artifact-name }} | |
path: dist/ | |
- name: >- | |
📦 | |
Publish ${{ needs.pre-setup.outputs.git-tag }} to PyPI | |
🔏 | |
uses: pypa/gh-action-pypi-publish@release/v1 | |
with: | |
attestations: true | |
publish-testpypi: | |
name: >- | |
📦 | |
Publish ${{ needs.pre-setup.outputs.git-tag }} to TestPyPI | |
needs: | |
- check | |
- pre-setup # transitive, for accessing settings | |
if: >- | |
always() | |
&& needs.check.result == 'success' | |
&& ( | |
fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
|| fromJSON(needs.pre-setup.outputs.release-requested) | |
) | |
runs-on: ubuntu-latest | |
timeout-minutes: 5 # docker+network are slow sometimes | |
environment: | |
name: testpypi | |
url: >- | |
https://test.pypi.org/project/ansible-pylibssh/${{ | |
needs.pre-setup.outputs.dist-version | |
}} | |
permissions: | |
contents: read # This job doesn't need to `git push` anything | |
id-token: write # PyPI Trusted Publishing (OIDC) | |
steps: | |
- name: Download all the dists | |
uses: actions/download-artifact@v3 | |
with: | |
name: >- | |
${{ needs.pre-setup.outputs.dists-artifact-name }} | |
path: dist/ | |
- name: >- | |
📦 | |
Publish ${{ needs.pre-setup.outputs.git-tag }} to TestPyPI | |
🔏 | |
uses: pypa/gh-action-pypi-publish@release/v1 | |
with: | |
attestations: true | |
repository-url: https://test.pypi.org/legacy/ | |
post-release-repo-update: | |
name: >- | |
Publish post-release Git tag | |
for ${{ needs.pre-setup.outputs.git-tag }} | |
needs: | |
- publish-pypi | |
- pre-setup # transitive, for accessing settings | |
if: >- | |
always() | |
&& needs.publish-pypi.result == 'success' | |
runs-on: ubuntu-latest | |
outputs: | |
pull_request_url: ${{ steps.pr.outputs.pull_request_url }} | |
permissions: | |
contents: write # Mandatory for `git push` to work | |
pull-requests: write | |
steps: | |
- name: Fetch the src snapshot # IMPORTANT: Must be before the tag check | |
uses: actions/checkout@v4 | |
with: | |
fetch-depth: 2 | |
ref: ${{ github.event.inputs.release-committish }} | |
- name: >- | |
Check if the requested tag ${{ needs.pre-setup.outputs.git-tag }} | |
is present and is pointing at the required commit ${{ | |
github.event.inputs.release-committish | |
}} | |
id: existing-remote-tag-check | |
run: | | |
set -eEuo pipefail | |
REMOTE_TAGGED_COMMIT_SHA="$( | |
git ls-remote --tags --refs $(git remote get-url origin) '${{ | |
needs.pre-setup.outputs.git-tag | |
}}' | awk '{print $1}' | |
)" | |
# NOTE: Since we're making a new change log commit on top of the one | |
# NOTE: that is the release workflow trigger, it'll be the one that's | |
# NOTE: tagged if the workflow run succeeded previously. So we need to | |
# NOTE: grab its parent for comparison, since new workflow runs will | |
# NOTE: generate a new changelog update commit and while its diff and | |
# NOTE: the parent may be the same, the metadata like the timestamp | |
# NOTE: would differ causing it to have a different commit SHA. But the | |
# NOTE: parent would be immutable so that's what we'll compare to the | |
# NOTE: release committish. | |
if [[ "${REMOTE_TAGGED_COMMIT_SHA}" == '' ]] | |
then | |
LAST_HUMAN_COMMIT_SHA= | |
else | |
LAST_HUMAN_COMMIT_SHA=$(git rev-parse "${REMOTE_TAGGED_COMMIT_SHA}"^) | |
fi | |
RELEASE_REQUEST_COMMIT_SHA=$(git rev-parse '${{ | |
github.event.inputs.release-committish || 'HEAD' | |
}}') | |
if [[ "${LAST_HUMAN_COMMIT_SHA}" == "${RELEASE_REQUEST_COMMIT_SHA}" ]] | |
then | |
echo "already-exists=true" >> "${GITHUB_OUTPUT}" | |
fi | |
- name: Setup git user as [bot] | |
if: steps.existing-remote-tag-check.outputs.already-exists != 'true' | |
# Refs: | |
# * https://github.community/t/github-actions-bot-email-address/17204/6 | |
# * https://github.com/actions/checkout/issues/13#issuecomment-724415212 | |
uses: fregante/setup-git-user@v2 | |
- name: Fetch the GHA artifact with the version patch | |
if: steps.existing-remote-tag-check.outputs.already-exists != 'true' | |
uses: actions/download-artifact@v3 | |
with: | |
name: changelog | |
- name: Apply the changelog patch | |
if: steps.existing-remote-tag-check.outputs.already-exists != 'true' | |
run: git am '${{ needs.pre-setup.outputs.changelog-patch-name }}' | |
shell: bash | |
- name: >- | |
Create a local 'release/${{ | |
needs.pre-setup.outputs.dist-version | |
}}' branch | |
if: steps.existing-remote-tag-check.outputs.already-exists != 'true' | |
run: >- | |
git checkout -b 'release/${{ | |
needs.pre-setup.outputs.dist-version | |
}}' | |
- name: >- | |
Tag the release in the local Git repo | |
as ${{ needs.pre-setup.outputs.git-tag }} | |
if: steps.existing-remote-tag-check.outputs.already-exists != 'true' | |
run: >- | |
git tag | |
-m '${{ needs.pre-setup.outputs.git-tag }}' | |
-m 'Published at https://pypi.org/project/ansible-pylibssh/${{ | |
needs.pre-setup.outputs.dist-version | |
}}' | |
-m 'This release has been produced by the following workflow run: ${{ | |
github.server_url | |
}}/${{ | |
github.repository | |
}}/actions/runs/${{ | |
github.run_id | |
}}' | |
'${{ needs.pre-setup.outputs.git-tag }}' | |
- name: Log two last commits | |
run: git log -2 --color --patch | |
- name: Log current Git status | |
run: git status --verbose | |
- name: >- | |
Push ${{ needs.pre-setup.outputs.git-tag }} tag corresponding | |
to the just published release back to GitHub | |
if: steps.existing-remote-tag-check.outputs.already-exists != 'true' | |
run: >- | |
git push --atomic origin | |
'release/${{ needs.pre-setup.outputs.dist-version }}' | |
'${{ needs.pre-setup.outputs.git-tag }}' | |
- name: pull-request-action | |
id: pr | |
uses: vsoch/[email protected] | |
env: | |
BRANCH_PREFIX: '' | |
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
PULL_REQUEST_BODY: >- | |
Automated changelog generation with the version | |
${{ needs.pre-setup.outputs.dist-version }}. | |
PULL_REQUEST_BRANCH: ${{ github.event.repository.default_branch }} | |
PULL_REQUEST_DRAFT: true | |
PULL_REQUEST_FROM_BRANCH: >- | |
release/${{ needs.pre-setup.outputs.dist-version }} | |
PULL_REQUEST_TITLE: >- | |
🔖 Release ${{ needs.pre-setup.outputs.git-tag }} | |
- name: Log the pull request details | |
run: | | |
echo "PR number: ${{ steps.pr.outputs.pull_request_number }}" | |
echo "PR URL: ${{ steps.pr.outputs.pull_request_url }}" | |
- name: Instruct the maintainers to trigger CI by undrafting the PR | |
env: | |
GITHUB_TOKEN: ${{ github.token }} | |
run: >- | |
gh pr comment | |
--body 'Please mark the PR as ready for review to trigger PR checks.' | |
--repo '${{ github.repository }}' | |
'${{ steps.pr.outputs.pull_request_number }}' | |
publish-github-release: | |
name: >- | |
Publish a GitHub Release for | |
${{ needs.pre-setup.outputs.git-tag }} | |
needs: | |
- post-release-repo-update | |
- pre-setup # transitive, for accessing settings | |
if: >- | |
always() | |
&& needs.post-release-repo-update.result == 'success' | |
runs-on: ubuntu-latest | |
timeout-minutes: 3 | |
permissions: | |
contents: write | |
discussions: write | |
id-token: write # IMPORTANT: mandatory for Sigstore signing | |
steps: | |
- name: Download all the dists | |
uses: actions/download-artifact@v3 | |
with: | |
name: ${{ needs.pre-setup.outputs.dists-artifact-name }} | |
path: dist/ | |
- name: Fetch the GHA artifact with the version patch | |
uses: actions/download-artifact@v3 | |
with: | |
name: changelog | |
- name: Figure out if the current version is a pre-release | |
id: release-maturity-check | |
run: | | |
from os import environ | |
from pathlib import Path | |
release_version = '${{ | |
needs.pre-setup.outputs.dist-version | |
}}' | |
FILE_APPEND_MODE = 'a' | |
is_pre_release = any( | |
hint_char in release_version | |
for hint_char in {'a', 'b', 'd', 'r'} | |
) | |
with Path(environ['GITHUB_OUTPUT']).open( | |
mode=FILE_APPEND_MODE, | |
) as outputs_file: | |
print( | |
f'is-pre-release={is_pre_release !s}'.lower(), | |
file=outputs_file, | |
) | |
shell: python | |
- name: Prepare the release notes file for the GitHub Releases | |
run: | | |
echo '## 📝 Release notes' | tee -a release-notes.md | |
echo | tee -a release-notes.md | |
echo | tee -a release-notes.md | |
echo '📦 PyPI page: https://pypi.org/project/ansible-pylibssh/${{ | |
needs.pre-setup.outputs.dist-version | |
}}' | tee -a release-notes.md | |
echo | tee -a release-notes.md | |
echo | tee -a release-notes.md | |
echo '${{ | |
steps.release-maturity-check.outputs.is-pre-release == 'true' | |
&& format( | |
'🚧 {0} is marked as a pre-release.', | |
needs.pre-setup.outputs.git-tag | |
) | |
|| format( | |
'🌱 {0} is marked as a stable release.', | |
needs.pre-setup.outputs.git-tag | |
) | |
}}' | tee -a release-notes.md | |
echo | tee -a release-notes.md | |
echo | tee -a release-notes.md | |
echo '🔗 This release has been produced by ' \ | |
'the following workflow run: ${{ | |
github.server_url | |
}}/${{ | |
github.repository | |
}}/actions/runs/${{ | |
github.run_id | |
}}' | tee -a release-notes.md | |
echo | tee -a release-notes.md | |
echo | tee -a release-notes.md | |
cat '${{ | |
needs.pre-setup.outputs.changelog-draft-name-md | |
}}' | tee -a release-notes.md | |
shell: bash | |
- name: Sign the dists with Sigstore | |
uses: sigstore/[email protected] | |
with: | |
inputs: >- | |
dist/${{ needs.pre-setup.outputs.sdist-artifact-name }} | |
dist/${{ needs.pre-setup.outputs.wheel-artifact-name }} | |
- name: >- | |
Publish a GitHub Release for | |
${{ needs.pre-setup.outputs.git-tag }} | |
with Sigstore-signed artifacts | |
uses: ncipollo/release-action@v1 | |
with: | |
allowUpdates: false | |
artifactErrorsFailBuild: false | |
artifacts: | | |
dist/${{ needs.pre-setup.outputs.sdist-artifact-name }} | |
dist/${{ needs.pre-setup.outputs.sdist-artifact-name }}.sigstore.json | |
dist/${{ needs.pre-setup.outputs.wheel-artifact-name }} | |
dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}.sigstore.json | |
artifactContentType: raw # Because whl and tgz are of different types | |
bodyFile: release-notes.md | |
discussionCategory: Announcements | |
draft: false | |
name: ${{ needs.pre-setup.outputs.git-tag }} | |
omitBodyDuringUpdate: true | |
omitName: false | |
omitNameDuringUpdate: true | |
omitPrereleaseDuringUpdate: true | |
prerelease: ${{ steps.release-maturity-check.outputs.is-pre-release }} | |
removeArtifacts: false | |
replacesArtifacts: false | |
tag: ${{ needs.pre-setup.outputs.git-tag }} | |
token: ${{ secrets.GITHUB_TOKEN }} | |
dumb-pypi: | |
name: Publish nightlies to Dumb PyPI # https://ansible.github.io/pylibssh/ | |
needs: | |
- check | |
- pre-setup # transitive, for accessing settings | |
if: >- | |
always() | |
&& needs.check.result == 'success' | |
&& ( | |
fromJSON(needs.pre-setup.outputs.is-untagged-devel) || | |
fromJSON(needs.pre-setup.outputs.release-requested) | |
) | |
runs-on: ubuntu-latest | |
environment: | |
name: github-pages | |
url: ${{ steps.deployment.outputs.page_url }}/simple/ansible-pylibssh/ | |
permissions: | |
contents: read # This job doesn't need to `git push` anything | |
pages: write # This allows to publish a GitHub Pages site | |
# `id-token` allows GitHub Pages to verify the deployment originates | |
# from an appropriate source through OpenID Connect, according to the | |
# README of the `actions/deploy-pages` action. | |
id-token: write | |
steps: | |
- name: Download the recent published versions from TestPyPI | |
run: >- | |
python -m | |
pip download | |
--index-url https://test.pypi.org/simple/ | |
--dest dist/ | |
--no-deps | |
--pre | |
ansible-pylibssh | |
- name: Download all the dists | |
uses: actions/download-artifact@v3 | |
with: | |
name: ${{ needs.pre-setup.outputs.dists-artifact-name }} | |
path: dist/ | |
- name: Switch to Python 3.11 | |
uses: actions/[email protected] | |
with: | |
python-version: 3.11 | |
- name: >- | |
Calculate Python interpreter version hash value | |
for use in the cache key | |
id: calc-cache-key-py | |
run: | | |
from hashlib import sha512 | |
from os import environ | |
from pathlib import Path | |
from sys import version | |
FILE_APPEND_MODE = 'a' | |
hash = sha512(version.encode()).hexdigest() | |
with Path(environ['GITHUB_OUTPUT']).open( | |
mode=FILE_APPEND_MODE, | |
) as outputs_file: | |
print(f'py-hash-key={hash}', file=outputs_file) | |
shell: python | |
- name: Set up pip cache | |
uses: actions/[email protected] | |
with: | |
path: >- | |
${{ | |
runner.os == 'Linux' | |
&& '~/.cache/pip' | |
|| '~/Library/Caches/pip' | |
}} | |
key: >- | |
${{ runner.os }}-pip-${{ | |
steps.calc-cache-key-py.outputs.py-hash-key }}-${{ | |
needs.pre-setup.outputs.cache-key-files }} | |
restore-keys: | | |
${{ runner.os }}-pip-${{ | |
steps.calc-cache-key-py.outputs.py-hash-key | |
}}- | |
${{ runner.os }}-pip- | |
${{ runner.os }}- | |
- name: Install dumb-pypi dist from PyPI | |
run: python -m pip install dumb-pypi --user | |
- name: Generate a dumb PyPI website | |
run: | | |
python -m dumb_pypi.main \ | |
--package-list <(ls dist/) \ | |
--packages-url https://raw.githubusercontent.com/${{ | |
github.repository | |
}}/gh-pages/dist \ | |
--output-dir gh-pages-dumb-pypi | |
shell: bash | |
- name: >- | |
Copy dists from this build and TestPyPI | |
to the generated dumb PyPI website dir | |
run: cp -av dist gh-pages-dumb-pypi/ | |
- name: Upload GitHub Pages artifact | |
uses: actions/upload-pages-artifact@v3 | |
with: | |
path: gh-pages-dumb-pypi | |
- name: Publish the dumb PyPI website to GitHub Pages | |
id: deployment | |
uses: actions/deploy-pages@v4 | |
... | |
# TODO: Test install from sdist | |
# | |
# TODO: Figure out if we can use Py_LIMITED_API / PEP 384: | |
# TODO: * https://docs.python.org/3/c-api/stable.html | |
# TODO: https://github.com/cython/cython/issues/2542 |