Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[pre-commit.ci] pre-commit autoupdate #225

Merged
merged 14 commits into from
May 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
68 changes: 10 additions & 58 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,12 @@ repos:
- --honor-noqa

- repo: https://github.com/Lucas-C/pre-commit-hooks.git
rev: v1.5.4
rev: v1.5.5
hooks:
- id: remove-tabs

- repo: https://github.com/python-jsonschema/check-jsonschema.git
rev: 0.27.3
rev: 0.28.1
hooks:
- id: check-github-actions
- id: check-github-workflows
Expand Down Expand Up @@ -67,7 +67,7 @@ repos:
- id: codespell

- repo: https://github.com/adrienverge/yamllint.git
rev: v1.33.0
rev: v1.35.1
hooks:
- id: yamllint
files: \.(yaml|yml)$
Expand All @@ -78,36 +78,20 @@ repos:
- --strict

- repo: https://github.com/PyCQA/flake8.git
rev: 6.1.0
rev: 7.0.0
hooks:
- id: flake8
alias: flake8-no-wps
name: flake8 WPS-excluded
args:
- --ignore
# NOTE: WPS326: Found implicit string concatenation
# NOTE: WPS332: Found walrus operator
- >-
D100,
D101,
D103,
D107,
E402,
E501,
additional_dependencies:
- flake8-2020 ~= 1.7.0
- flake8-pytest-style ~= 1.6.0

- repo: https://github.com/PyCQA/flake8.git
# NOTE: This is kept at v4 for until WPS starts supporting flake v5.
rev: 4.0.1 # enforce-version: 4.0.1
hooks:
- id: flake8
alias: flake8-only-wps
name: flake8 WPS-only
args:
- --ignore
# NOTE: WPS326: Found implicit string concatenation
# NOTE: WPS332: Found walrus operator
- >-
WPS102,
WPS110,
WPS111,
Expand All @@ -124,14 +108,14 @@ repos:
WPS440,
WPS441,
WPS453,
- --select
- WPS
additional_dependencies:
- wemake-python-styleguide ~= 0.17.0
- flake8-2020 ~= 1.7.0
- flake8-pytest-style ~= 1.6.0
- wemake-python-styleguide ~= 0.19.0
language_version: python3.11 # flake8-commas doesn't work w/ Python 3.12

- repo: https://github.com/PyCQA/pylint.git
rev: v3.0.3
rev: v3.1.0
hooks:
- id: pylint
args:
Expand All @@ -150,36 +134,4 @@ repos:
- --output-format
- colorized

- repo: local
hooks:
- id: enforced-flake8-version
name: Verify that enforced flake8 version stays unchanged
description: >-
This is a sanity check and fixer that makes sure that
the `flake8` version in this file remains matching the
corresponding request in the `# enforce-version` comment.
# Using Python here because using
# shell test does not always work in CIs:
entry: >-
python -c 'import pathlib, re, sys;
pre_commit_config = pathlib.Path(sys.argv[1]);
cfg_txt = pre_commit_config.read_text();
new_cfg_txt = re.sub(
r"(?P<spaces>\s+)rev:\s(?:\d+\.\d+\.\d+)\s{0,2}"
r"#\senforce-version:\s(?P<enforced_version>\d+\.\d+\.\d+)"
r"[ \t\f\v]*",
r"\g<spaces>rev: \g<enforced_version> "
r"# enforce-version: \g<enforced_version>",
cfg_txt,
);
cfg_txt != new_cfg_txt and
pre_commit_config.write_text(new_cfg_txt)
'
pass_filenames: true
language: system
files: >-
^\.pre-commit-config\.ya?ml$
types:
- yaml

...
87 changes: 44 additions & 43 deletions oidc-exchange.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import id # pylint: disable=redefined-builtin
import requests

_GITHUB_STEP_SUMMARY = Path(os.getenv("GITHUB_STEP_SUMMARY"))
_GITHUB_STEP_SUMMARY = Path(os.getenv('GITHUB_STEP_SUMMARY'))

# The top-level error message that gets rendered.
# This message wraps one of the other templates/messages defined below.
Expand Down Expand Up @@ -45,7 +45,7 @@
```

Learn more at https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#adding-permissions-settings.
"""
""" # noqa: S105; not a password

# Specialization of the token retrieval failure case, when we know that
# the failure cause is use within a third-party PR.
Expand All @@ -59,7 +59,7 @@
To fix this, change your publishing workflow to use an event that
forks of your repository cannot trigger (such as tag or release
creation, or a manually triggered workflow dispatch).
"""
""" # noqa: S105; not a password

# Rendered if the package index refuses the given OIDC token.
_SERVER_REFUSED_TOKEN_EXCHANGE_MESSAGE = """
Expand All @@ -71,7 +71,7 @@
also indicate an internal error on GitHub or PyPI's part.

{rendered_claims}
"""
""" # noqa: S105; not a password

_RENDERED_CLAIMS = """
The claims rendered below are **for debugging purposes only**. You should **not**
Expand All @@ -97,38 +97,38 @@

This strongly suggests a server configuration or downtime issue; wait
a few minutes and try again.
"""
""" # noqa: S105; not a password

# Rendered if the package index's token response isn't a valid API token payload.
_SERVER_TOKEN_RESPONSE_MALFORMED_MESSAGE = """
Token response error: the index gave us an invalid response.

This strongly suggests a server configuration or downtime issue; wait
a few minutes and try again.
"""
""" # noqa: S105; not a password


def die(msg: str) -> NoReturn:
with _GITHUB_STEP_SUMMARY.open("a", encoding="utf-8") as io:
with _GITHUB_STEP_SUMMARY.open('a', encoding='utf-8') as io:
print(_ERROR_SUMMARY_MESSAGE.format(message=msg), file=io)

# HACK: GitHub Actions' annotations don't work across multiple lines naively;
# translating `\n` into `%0A` (i.e., HTML percent-encoding) is known to work.
# See: https://github.com/actions/toolkit/issues/193
msg = msg.replace("\n", "%0A")
print(f"::error::Trusted publishing exchange failure: {msg}", file=sys.stderr)
msg = msg.replace('\n', '%0A')
print(f'::error::Trusted publishing exchange failure: {msg}', file=sys.stderr)
sys.exit(1)


def debug(msg: str):
print(f"::debug::{msg.title()}", file=sys.stderr)
print(f'::debug::{msg.title()}', file=sys.stderr)


def get_normalized_input(name: str) -> str | None:
name = f"INPUT_{name.upper()}"
name = f'INPUT_{name.upper()}'
if val := os.getenv(name):
return val
return os.getenv(name.replace("-", "_"))
return os.getenv(name.replace('-', '_'))


def assert_successful_audience_call(resp: requests.Response, domain: str):
Expand All @@ -140,81 +140,81 @@ def assert_successful_audience_call(resp: requests.Response, domain: str):
# This index supports OIDC, but forbids the client from using
# it (either because it's disabled, ratelimited, etc.)
die(
f"audience retrieval failed: repository at {domain} has trusted publishing disabled",
f'audience retrieval failed: repository at {domain} has trusted publishing disabled',
)
case HTTPStatus.NOT_FOUND:
# This index does not support OIDC.
die(
"audience retrieval failed: repository at "
f"{domain} does not indicate trusted publishing support",
'audience retrieval failed: repository at '
f'{domain} does not indicate trusted publishing support',
)
case other:
status = HTTPStatus(other)
# Unknown: the index may or may not support OIDC, but didn't respond with
# something we expect. This can happen if the index is broken, in maintenance mode,
# misconfigured, etc.
die(
"audience retrieval failed: repository at "
f"{domain} responded with unexpected {other}: {status.phrase}",
'audience retrieval failed: repository at '
f'{domain} responded with unexpected {other}: {status.phrase}',
)


def render_claims(token: str) -> str:
_, payload, _ = token.split(".", 2)
_, payload, _ = token.split('.', 2)

# urlsafe_b64decode needs padding; JWT payloads don't contain any.
payload += "=" * (4 - (len(payload) % 4))
payload += '=' * (4 - (len(payload) % 4))
claims = json.loads(base64.urlsafe_b64decode(payload))

def _get(name: str) -> str: # noqa: WPS430
return claims.get(name, "MISSING")
return claims.get(name, 'MISSING')

return _RENDERED_CLAIMS.format(
sub=_get("sub"),
repository=_get("repository"),
repository_owner=_get("repository_owner"),
repository_owner_id=_get("repository_owner_id"),
job_workflow_ref=_get("job_workflow_ref"),
ref=_get("ref"),
sub=_get('sub'),
repository=_get('repository'),
repository_owner=_get('repository_owner'),
repository_owner_id=_get('repository_owner_id'),
job_workflow_ref=_get('job_workflow_ref'),
ref=_get('ref'),
)


def event_is_third_party_pr() -> bool:
# Non-`pull_request` events cannot be from third-party PRs.
if os.getenv("GITHUB_EVENT_NAME") != "pull_request":
if os.getenv('GITHUB_EVENT_NAME') != 'pull_request':
return False

event_path = os.getenv("GITHUB_EVENT_PATH")
event_path = os.getenv('GITHUB_EVENT_PATH')
if not event_path:
# No GITHUB_EVENT_PATH indicates a weird GitHub or runner bug.
debug("unexpected: no GITHUB_EVENT_PATH to check")
debug('unexpected: no GITHUB_EVENT_PATH to check')
return False

try:
event = json.loads(Path(event_path).read_bytes())
except json.JSONDecodeError:
debug("unexpected: GITHUB_EVENT_PATH does not contain valid JSON")
debug('unexpected: GITHUB_EVENT_PATH does not contain valid JSON')
return False

try:
return event["pull_request"]["head"]["repo"]["fork"]
return event['pull_request']['head']['repo']['fork']
except KeyError:
return False


repository_url = get_normalized_input("repository-url")
repository_url = get_normalized_input('repository-url')
repository_domain = urlparse(repository_url).netloc
token_exchange_url = f"https://{repository_domain}/_/oidc/mint-token"
token_exchange_url = f'https://{repository_domain}/_/oidc/mint-token'

# Indices are expected to support `https://{domain}/_/oidc/audience`,
# which tells OIDC exchange clients which audience to use.
audience_url = f"https://{repository_domain}/_/oidc/audience"
audience_resp = requests.get(audience_url)
audience_url = f'https://{repository_domain}/_/oidc/audience'
audience_resp = requests.get(audience_url, timeout=5) # S113 wants a timeout
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@woodruffw I've set a timeout of 5 here and in the following invocation to please the linter. Should we set a lower value or split connect/read?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm, that timeout seems fine to me (assuming that's seconds, which I think it is). IMO it could be lower but I suspect the 99.99% case will fall well inside this (both for normal and outage responses).

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Alright, I'll let it be.

assert_successful_audience_call(audience_resp, repository_domain)

oidc_audience = audience_resp.json()["audience"]
oidc_audience = audience_resp.json()['audience']

debug(f"selected trusted publishing exchange endpoint: {token_exchange_url}")
debug(f'selected trusted publishing exchange endpoint: {token_exchange_url}')

try:
oidc_token = id.detect_credential(audience=oidc_audience)
Expand All @@ -229,7 +229,8 @@ def event_is_third_party_pr() -> bool:
# Now we can do the actual token exchange.
mint_token_resp = requests.post(
token_exchange_url,
json={"token": oidc_token},
json={'token': oidc_token},
timeout=5, # S113 wants a timeout
)

try:
Expand All @@ -246,9 +247,9 @@ def event_is_third_party_pr() -> bool:
# On failure, the JSON response includes the list of errors that
# occurred during minting.
if not mint_token_resp.ok:
reasons = "\n".join(
f"* `{error['code']}`: {error['description']}"
for error in mint_token_payload["errors"]
reasons = '\n'.join(
f'* `{error["code"]}`: {error["description"]}'
for error in mint_token_payload['errors']
)

rendered_claims = render_claims(oidc_token)
Expand All @@ -260,12 +261,12 @@ def event_is_third_party_pr() -> bool:
),
)

pypi_token = mint_token_payload.get("token")
pypi_token = mint_token_payload.get('token')
if pypi_token is None:
die(_SERVER_TOKEN_RESPONSE_MALFORMED_MESSAGE)

# Mask the newly minted PyPI token, so that we don't accidentally leak it in logs.
print(f"::add-mask::{pypi_token}", file=sys.stderr)
print(f'::add-mask::{pypi_token}', file=sys.stderr)

# This final print will be captured by the subshell in `twine-upload.sh`.
print(pypi_token)
14 changes: 7 additions & 7 deletions print-hash.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,23 +4,23 @@

packages_dir = pathlib.Path(sys.argv[1]).resolve().absolute()

print("Showing hash values of files to be uploaded:")
print('Showing hash values of files to be uploaded:')

for file_object in packages_dir.iterdir():
sha256 = hashlib.sha256()
md5 = hashlib.md5()
md5 = hashlib.md5() # noqa: S324; only use for reference
blake2_256 = hashlib.blake2b(digest_size=256 // 8)

print(file_object)
print("")
print('')

content = file_object.read_bytes()

sha256.update(content)
md5.update(content)
blake2_256.update(content)

print(f"SHA256: {sha256.hexdigest()}")
print(f"MD5: {md5.hexdigest()}")
print(f"BLAKE2-256: {blake2_256.hexdigest()}")
print("")
print(f'SHA256: {sha256.hexdigest()}')
print(f'MD5: {md5.hexdigest()}')
print(f'BLAKE2-256: {blake2_256.hexdigest()}')
print('')
Loading