Skip to content

Commit

Permalink
build: artifact@4, with required immutability changes
Browse files Browse the repository at this point in the history
  • Loading branch information
nedbat committed Dec 20, 2023
1 parent 390fa6c commit 9bda95d
Show file tree
Hide file tree
Showing 4 changed files with 91 additions and 37 deletions.
18 changes: 11 additions & 7 deletions .github/workflows/coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ jobs:
coverage:
name: "${{ matrix.python-version }} on ${{ matrix.os }}"
runs-on: "${{ matrix.os }}-latest"
env:
MATRIX_ID: "${{ matrix.python-version }}.${{ matrix.os }}"

strategy:
matrix:
Expand Down Expand Up @@ -76,6 +78,7 @@ jobs:

- name: "Install dependencies"
run: |
echo matrix id: $MATRIX_ID
set -xe
python -VV
python -m site
Expand All @@ -94,12 +97,12 @@ jobs:
COVERAGE_RCFILE: "metacov.ini"
run: |
python -m coverage combine
mv .metacov .metacov.${{ matrix.python-version }}.${{ matrix.os }}
mv .metacov .metacov.$MATRIX_ID
- name: "Upload coverage data"
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: metacov
name: metacov-${{ env.MATRIX_ID }}
path: .metacov.*

combine:
Expand Down Expand Up @@ -131,9 +134,10 @@ jobs:
python igor.py zip_mods
- name: "Download coverage data"
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: metacov
pattern: metacov-*
merge-multiple: true

- name: "Combine and report"
id: combine
Expand All @@ -144,7 +148,7 @@ jobs:
python igor.py combine_html
- name: "Upload HTML report"
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: html_report
path: htmlcov
Expand Down Expand Up @@ -193,7 +197,7 @@ jobs:
- name: "Download coverage HTML report"
if: ${{ github.ref == 'refs/heads/master' }}
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: html_report
path: reports_repo/${{ env.report_dir }}
Expand Down
21 changes: 12 additions & 9 deletions .github/workflows/kit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ jobs:
wheels:
name: "${{ matrix.py }} ${{ matrix.os }} ${{ matrix.arch }} wheels"
runs-on: ${{ matrix.os }}-latest
env:
MATRIX_ID: "${{ matrix.py }}-${{ matrix.os }}-${{ matrix.arch }}"
strategy:
matrix:
include:
Expand Down Expand Up @@ -173,9 +175,9 @@ jobs:
ls -al wheelhouse/
- name: "Upload wheels"
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: dist
name: dist-${{ env.MATRIX_ID }}
path: wheelhouse/*.whl
retention-days: 7

Expand Down Expand Up @@ -207,9 +209,9 @@ jobs:
ls -al dist/
- name: "Upload sdist"
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: dist
name: dist-sdist
path: dist/*.tar.gz
retention-days: 7

Expand Down Expand Up @@ -245,9 +247,9 @@ jobs:
ls -al dist/
- name: "Upload wheels"
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: dist
name: dist-pypy
path: dist/*.whl
retention-days: 7

Expand All @@ -264,9 +266,10 @@ jobs:
id-token: write
steps:
- name: "Download artifacts"
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: dist
pattern: dist-*
merge-multiple: true

- name: "Sign artifacts"
uses: sigstore/[email protected]
Expand All @@ -278,7 +281,7 @@ jobs:
ls -alR
- name: "Upload signatures"
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: signatures
path: |
Expand Down
3 changes: 2 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -213,10 +213,11 @@ build_kits: ## Trigger GitHub to build kits
python ci/trigger_build_kits.py $(REPO_OWNER)

download_kits: ## Download the built kits from GitHub.
python ci/download_gha_artifacts.py $(REPO_OWNER)
python ci/download_gha_artifacts.py $(REPO_OWNER) 'dist-*' dist

check_kits: ## Check that dist/* are well-formed.
python -m twine check dist/*
@echo $$(ls -1 dist | wc -l) distribution kits

tag: ## Make a git tag with the version number.
git tag -a -m "Version $$(python setup.py --version)" $$(python setup.py --version)
Expand Down
86 changes: 66 additions & 20 deletions ci/download_gha_artifacts.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,10 @@

"""Use the GitHub API to download built artifacts."""

import collections
import datetime
import json
import fnmatch
import operator
import os
import os.path
import sys
Expand All @@ -13,6 +15,7 @@

import requests


def download_url(url, filename):
"""Download a file from `url` to `filename`."""
response = requests.get(url, stream=True)
Expand All @@ -23,6 +26,7 @@ def download_url(url, filename):
else:
raise RuntimeError(f"Fetching {url} produced: status={response.status_code}")


def unpack_zipfile(filename):
"""Unpack a zipfile, using the names in the zip."""
with open(filename, "rb") as fzip:
Expand All @@ -31,8 +35,10 @@ def unpack_zipfile(filename):
print(f" extracting {name}")
z.extract(name)


def utc2local(timestring):
"""Convert a UTC time into local time in a more readable form.
"""
Convert a UTC time into local time in a more readable form.
For example: '20201208T122900Z' to '2020-12-08 07:29:00'.
Expand All @@ -44,25 +50,65 @@ def utc2local(timestring):
local = utc + offset
return local.strftime("%Y-%m-%d %H:%M:%S")

dest = "dist"
repo_owner = sys.argv[1]
temp_zip = "artifacts.zip"

os.makedirs(dest, exist_ok=True)
os.chdir(dest)
def all_items(url, key):
"""
Get all items from a paginated GitHub URL.
r = requests.get(f"https://api.github.com/repos/{repo_owner}/actions/artifacts")
if r.status_code == 200:
dists = [a for a in r.json()["artifacts"] if a["name"] == "dist"]
if not dists:
print("No recent dists!")
else:
latest = max(dists, key=lambda a: a["created_at"])
print(f"Artifacts created at {utc2local(latest['created_at'])}")
download_url(latest["archive_download_url"], temp_zip)
`key` is the key in the top-level returned object that has a list of items.
"""
url += ("&" if "?" in url else "?") + "per_page=100"
while url:
response = requests.get(url)
response.raise_for_status()
data = response.json()
if isinstance(data, dict) and (msg := data.get("message")):
raise RuntimeError(f"URL {url!r} failed: {msg}")
yield from data.get(key, ())
try:
url = response.links.get("next").get("url")
except AttributeError:
url = None


def main(owner_repo, artifact_pattern, dest_dir):
"""
Download and unzip the latest artifacts matching a pattern.
`owner_repo` is a GitHub pair for the repo, like "nedbat/coveragepy".
`artifact_pattern` is a filename glob for the artifact name.
`dest_dir` is the directory to unpack them into.
"""
# Get all artifacts matching the pattern, grouped by name.
url = f"https://api.github.com/repos/{owner_repo}/actions/artifacts"
artifacts_by_name = collections.defaultdict(list)
for artifact in all_items(url, "artifacts"):
name = artifact["name"]
if not fnmatch.fnmatch(name, artifact_pattern):
continue
artifacts_by_name[name].append(artifact)

os.makedirs(dest_dir, exist_ok=True)
os.chdir(dest_dir)
temp_zip = "artifacts.zip"

# Download the latest of each name.
# I'd like to use created_at, because it seems like the better value to use,
# but it is in the wrong time zone, and updated_at is the same but correct.
# Bug report here: https://github.com/actions/upload-artifact/issues/488.
for name, artifacts in artifacts_by_name.items():
artifact = max(artifacts, key=operator.itemgetter("updated_at"))
print(
f"Downloading {artifact['name']}, "
+ f"size: {artifact['size_in_bytes']}, "
+ f"created: {utc2local(artifact['updated_at'])}"
)
download_url(artifact["archive_download_url"], temp_zip)
unpack_zipfile(temp_zip)
os.remove(temp_zip)
else:
print(f"Fetching artifacts returned status {r.status_code}:")
print(json.dumps(r.json(), indent=4))
sys.exit(1)


if __name__ == "__main__":
sys.exit(main(*sys.argv[1:]))

0 comments on commit 9bda95d

Please sign in to comment.