diff --git a/.github/ISSUE_TEMPLATE/0_bug.yml b/.github/ISSUE_TEMPLATE/0_bug.yml index cfccd360ed..a53f6fba87 100644 --- a/.github/ISSUE_TEMPLATE/0_bug.yml +++ b/.github/ISSUE_TEMPLATE/0_bug.yml @@ -1,4 +1,3 @@ ---- name: Bug Report description: Create a bug report. labels: diff --git a/.github/ISSUE_TEMPLATE/1_feature.yml b/.github/ISSUE_TEMPLATE/1_feature.yml index a1e739821d..bc022b4122 100644 --- a/.github/ISSUE_TEMPLATE/1_feature.yml +++ b/.github/ISSUE_TEMPLATE/1_feature.yml @@ -1,4 +1,3 @@ ---- name: Feature Request description: Create a feature request. labels: diff --git a/.github/ISSUE_TEMPLATE/2_documentation.yml b/.github/ISSUE_TEMPLATE/2_documentation.yml index 68ae890de9..f3ce040892 100644 --- a/.github/ISSUE_TEMPLATE/2_documentation.yml +++ b/.github/ISSUE_TEMPLATE/2_documentation.yml @@ -1,4 +1,3 @@ ---- name: Documentation description: Create a documentation related issue. labels: diff --git a/.github/ISSUE_TEMPLATE/epic.yml b/.github/ISSUE_TEMPLATE/epic.yml index f9c412b177..9b3637f973 100644 --- a/.github/ISSUE_TEMPLATE/epic.yml +++ b/.github/ISSUE_TEMPLATE/epic.yml @@ -1,4 +1,3 @@ ---- name: Epic description: A collection of related tickets. labels: @@ -25,23 +24,82 @@ body: options: - label: I added a descriptive title required: true - - label: I searched open reports and couldn't find a duplicate + - label: I searched open issues and couldn't find a duplicate required: true + - type: textarea - id: summary + id: what attributes: - label: Summary + label: What? description: >- - Define the highlevel objectives to be accomplished in this epic. Include the - bigger picture of what is changing and/or the user story for why the - changes are desired/necessary. + What feature or problem will be addressed in this epic? + placeholder: Please describe here. validations: required: true + - type: textarea + id: why attributes: - label: Linked Issues & PRs - description: List all issues related to this epic. + label: Why? + description: >- + Why is the reported issue(s) a problem, or why is the proposed feature needed? + (Research and spike issues can be linked here.) value: | - - [ ] # + - [ ] + placeholder: Please describe here and/or link to relevant supporting issues. + validations: + required: true + + - type: textarea + id: user_impact + attributes: + label: User impact + description: >- + In what specific way(s) will users benefit from this change? (e.g. use cases or performance improvements) + placeholder: Please describe here. validations: required: true + + - type: textarea + id: goals + attributes: + label: Goals + description: >- + What goal(s) should this epic accomplish? + value: | + - [ ] + validations: + required: true + + - type: textarea + id: tasks + attributes: + label: Tasks + description: >- + What needs to be done to implement this change? + value: | + - [ ] + validations: + required: false + + - type: textarea + id: blocked_by + attributes: + label: 'This epic is blocked by:' + description: >- + Epics and issues that block this epic. + value: | + - [ ] + validations: + required: false + + - type: textarea + id: blocks + attributes: + label: 'This epic blocks:' + description: >- + Epics and issues that are blocked by this epic. + value: | + - [ ] + validations: + required: false diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 07210519aa..508818874b 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,15 +1,14 @@ -# To get started with Dependabot version updates, you'll need to specify which -# package ecosystems to update and where the package manifests are located. -# Please see the documentation for all configuration options: -# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates - version: 2 updates: - - package-ecosystem: "pip" - directory: "/docs/" + - package-ecosystem: pip + directory: /docs/ schedule: - interval: "weekly" + interval: weekly allow: # Allow only production updates for Sphinx - - dependency-name: "sphinx" - dependency-type: "production" + - dependency-name: sphinx + dependency-type: production + - package-ecosystem: github-actions + directory: /.github/workflows + schedule: + interval: weekly diff --git a/.github/labels.yml b/.github/labels.yml index ba799038bb..bcc616d339 100644 --- a/.github/labels.yml +++ b/.github/labels.yml @@ -1,39 +1,9 @@ # Builds -- name: build::review - description: trigger a build for this PR - color: "7B4052" + - name: build::review + description: trigger a build for this PR + color: '#7b4052' # Tags -- name: tag::noarch - description: related to noarch builds - color: "86C579" - aliases: [] - -# Deprecated -- name: 3_In_Progress - description: "[deprecated] use milestones/project boards" - color: "888888" -- name: 4_Needs_Review - description: "[deprecated] use milestones/project boards" - color: "888888" -- name: effort-high - description: "[deprecated]" - color: "888888" -- name: effort-low - description: "[deprecated] use good-first-issue" - color: "888888" -- name: effort-medium - description: "[deprecated]" - color: "888888" -- name: in_progress - description: "[deprecated] use milestones/project boards" - color: "888888" -- name: knowledge-high - description: "[deprecated]" - color: "888888" -- name: knowledge-low - description: "[deprecated] use good-first-issue" - color: "888888" -- name: knowledge-medium - description: "[deprecated]" - color: "888888" + - name: tag::noarch + description: related to noarch builds + color: '#86c579' diff --git a/.github/workflows/builds-review.yaml b/.github/workflows/builds-review.yaml index c10129b56f..d789e536c6 100644 --- a/.github/workflows/builds-review.yaml +++ b/.github/workflows/builds-review.yaml @@ -24,7 +24,7 @@ jobs: runs-on: ${{ matrix.runner }} steps: - name: Remove build label - uses: actions/github-script@v6 + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea with: github-token: ${{ secrets.CANARY_ACTION_TOKEN }} script: | @@ -46,14 +46,14 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - - uses: actions/checkout@v3 + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 with: ref: ${{ github.ref }} clean: true fetch-depth: 0 - name: Create and upload review build - uses: conda/actions/canary-release@v23.7.0 + uses: conda/actions/canary-release@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index 773cb76c96..07c7f75b12 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -1,4 +1,3 @@ ---- name: CLA on: @@ -19,7 +18,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check CLA - uses: conda/actions/check-cla@v24.2.0 + uses: conda/actions/check-cla@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 with: # [required] # A token with ability to comment, label, and modify the commit status diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 18ea421b87..03b32fc111 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -7,28 +7,28 @@ on: branches: - main paths: - - '.github/workflows/docs.yml' - - 'docs/**' + - .github/workflows/docs.yml + - docs/** # NOTE: github.event context is pull_request payload: # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request pull_request: paths: - - '.github/workflows/docs.yml' - - 'docs/**' + - .github/workflows/docs.yml + - docs/** jobs: docs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 with: fetch-depth: 0 - name: Setup - run : | + run: | make env-docs - name: Build the docs - run : | + run: | cd docs conda run --name conda-build-docs make html diff --git a/.github/workflows/issues.yml b/.github/workflows/issues.yml index 52b1cfee9e..7a114d6d41 100644 --- a/.github/workflows/issues.yml +++ b/.github/workflows/issues.yml @@ -1,4 +1,3 @@ ---- name: Automate Issues on: @@ -24,12 +23,12 @@ jobs: runs-on: ubuntu-latest steps: # remove [pending::feedback] - - uses: actions-ecosystem/action-remove-labels@v1.3.0 + - uses: actions-ecosystem/action-remove-labels@2ce5d41b4b6aa8503e285553f75ed56e0a40bae0 with: labels: ${{ env.FEEDBACK_LBL }} github_token: ${{ secrets.PROJECT_TOKEN }} # add [pending::support], if still open - - uses: actions-ecosystem/action-add-labels@v1.1.3 + - uses: actions-ecosystem/action-add-labels@18f1af5e3544586314bbe15c0273249c770b2daf if: github.event.issue.state == 'open' with: labels: ${{ env.SUPPORT_LBL }} diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index 99ec60667f..f13985fb0a 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -1,4 +1,3 @@ ---- name: Sync Labels on: @@ -20,20 +19,20 @@ jobs: GLOBAL: https://raw.githubusercontent.com/conda/infra/main/.github/global.yml LOCAL: .github/labels.yml steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 - id: has_local - uses: andstor/file-existence-action@v3.0.0 + uses: andstor/file-existence-action@076e0072799f4942c8bc574a82233e1e4d13e9d6 with: files: ${{ env.LOCAL }} - name: Global Only - uses: EndBug/label-sync@v2.3.3 + uses: EndBug/label-sync@52074158190acb45f3077f9099fea818aa43f97a if: steps.has_local.outputs.files_exists == 'false' with: config-file: ${{ env.GLOBAL }} delete-other-labels: true dry-run: ${{ github.event.inputs.dryrun }} - name: Global & Local - uses: EndBug/label-sync@v2.3.3 + uses: EndBug/label-sync@52074158190acb45f3077f9099fea818aa43f97a if: steps.has_local.outputs.files_exists == 'true' with: config-file: | diff --git a/.github/workflows/lock.yml b/.github/workflows/lock.yml index 7fd6b91347..2204b62dda 100644 --- a/.github/workflows/lock.yml +++ b/.github/workflows/lock.yml @@ -1,4 +1,3 @@ ---- name: Lock on: @@ -18,7 +17,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: dessant/lock-threads@v5 + - uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 with: # Number of days of inactivity before a closed issue is locked issue-inactive-days: 365 diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index 94143662d3..7d06584c86 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -1,4 +1,3 @@ ---- name: Add to Project on: @@ -14,7 +13,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/add-to-project@v1.0.0 + - uses: actions/add-to-project@9bfe908f2eaa7ba10340b31e314148fcfe6a2458 with: # issues are added to the Planning project # PRs are added to the Review project diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 2464e81e4b..bcda1fea30 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -1,4 +1,3 @@ ---- name: Stale on: @@ -34,12 +33,12 @@ jobs: days-before-issue-stale: 90 days-before-issue-close: 21 steps: - - uses: conda/actions/read-yaml@v24.2.0 + - uses: conda/actions/read-yaml@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 id: read_yaml with: path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml - - uses: actions/stale@v9 + - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e id: stale with: # Only issues with these labels are checked whether they are stale diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index e0cbf84cc3..29f98a129d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -18,7 +18,7 @@ on: # no payload schedule: # https://crontab.guru/#37_18_*_*_* - - cron: 37 18 * * * + - cron: 37 18 * * * concurrency: # Concurrency group that uses the workflow name and PR number if available @@ -45,13 +45,13 @@ jobs: code: ${{ steps.filter.outputs.code }} steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # dorny/paths-filter needs git clone for non-PR events # https://github.com/dorny/paths-filter#supported-workflows if: github.event_name != 'pull_request' - name: Filter Changes - uses: dorny/paths-filter@v3 + uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 id: filter with: filters: | @@ -102,7 +102,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 with: fetch-depth: 0 @@ -110,19 +110,20 @@ jobs: run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV - name: Cache Conda - uses: actions/cache@v4 + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 with: path: ~/conda_pkgs_dir key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3 + uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 with: condarc-file: .github/condarc run-post: false # skip post cleanup - name: Conda Install - run: conda install + run: > + conda install --yes --file tests/requirements.txt --file tests/requirements-${{ runner.os }}.txt @@ -142,19 +143,20 @@ jobs: run: conda list --show-channel-urls - name: Run Tests - run: pytest + run: > + pytest --cov=conda_build -n auto -m "${{ env.PYTEST_MARKER }}" - name: Upload Coverage - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed with: flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 with: name: test-results-${{ env.HASH }} path: | @@ -180,7 +182,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 with: fetch-depth: 0 @@ -188,19 +190,20 @@ jobs: run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-benchmark-$(date -u "+%Y%m")" >> $GITHUB_ENV - name: Cache Conda - uses: actions/cache@v4 + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 with: path: ~/conda_pkgs_dir key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3 + uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 with: condarc-file: .github/condarc run-post: false # skip post cleanup - name: Conda Install - run: conda install + run: > + conda install --yes --file tests/requirements.txt --file tests/requirements-${{ runner.os }}.txt @@ -226,7 +229,7 @@ jobs: run: conda list --show-channel-urls - name: Run Benchmarks - uses: CodSpeedHQ/action@v2 + uses: CodSpeedHQ/action@1dbf41f0ae41cebfe61e084e535aebe533409b4d with: token: ${{ secrets.CODSPEED_TOKEN }} run: $CONDA/envs/test/bin/pytest --codspeed @@ -259,7 +262,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 with: fetch-depth: 0 @@ -268,13 +271,13 @@ jobs: run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV - name: Cache Conda - uses: actions/cache@v4 + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 with: path: ~/conda_pkgs_dir key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3 + uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 with: condarc-file: .github\condarc run-post: false # skip post cleanup @@ -283,7 +286,8 @@ jobs: run: choco install visualstudio2017-workload-vctools - name: Conda Install - run: conda install + run: > + conda install --yes --file tests\requirements.txt --file tests\requirements-${{ runner.os }}.txt @@ -305,20 +309,21 @@ jobs: - name: Run Tests # Windows is sensitive to long paths, using `--basetemp=${{ runner.temp }} to # keep the test directories shorter - run: pytest + run: > + pytest --cov=conda_build --basetemp=${{ runner.temp }} -n auto -m "${{ env.PYTEST_MARKER }}" - name: Upload Coverage - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed with: flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 with: name: test-results-${{ env.HASH }} path: | @@ -357,7 +362,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 with: fetch-depth: 0 @@ -365,13 +370,13 @@ jobs: run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV - name: Cache Conda - uses: actions/cache@v4 + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 with: path: ~/conda_pkgs_dir key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3 + uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 with: condarc-file: .github/condarc run-post: false # skip post cleanup @@ -380,7 +385,8 @@ jobs: run: sudo xcode-select --switch /Applications/Xcode_11.7.app - name: Conda Install - run: conda install + run: > + conda install --yes --file tests/requirements.txt --file tests/requirements-${{ runner.os }}.txt @@ -400,19 +406,20 @@ jobs: run: conda list --show-channel-urls - name: Run Tests - run: pytest + run: > + pytest --cov=conda_build -n auto -m "${{ env.PYTEST_MARKER }}" - name: Upload Coverage - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed with: flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 with: name: test-results-${{ env.HASH }} path: | @@ -434,17 +441,17 @@ jobs: runs-on: ubuntu-latest steps: - name: Download Artifacts - uses: actions/download-artifact@v4 + uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 - name: Upload Combined Test Results # provides one downloadable archive of all matrix run test results for further analysis - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 with: name: test-results-${{ github.sha }}-all path: test-results-* - name: Test Summary - uses: test-summary/action@v2 + uses: test-summary/action@032c8a9cec6aaa3c20228112cae6ca10a3b29336 with: paths: test-results-*/test-report.xml @@ -456,7 +463,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Determine Success - uses: re-actors/alls-green@v1.2.2 + uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe with: # permit jobs to be skipped if there are no code changes (see changes job) allowed-skips: ${{ toJSON(needs) }} @@ -493,7 +500,7 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 with: ref: ${{ github.ref }} clean: true @@ -501,7 +508,7 @@ jobs: # Explicitly use Python 3.12 since each of the OSes has a different default Python - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d with: python-version: '3.12' @@ -527,7 +534,7 @@ jobs: Path(environ["GITHUB_ENV"]).write_text(f"ANACONDA_ORG_LABEL={label}") - name: Create & Upload - uses: conda/actions/canary-release@v24.2.0 + uses: conda/actions/canary-release@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2db1b692b1..da60f66ed2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,7 +18,7 @@ exclude: | repos: # generic verification and formatting - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: # standard end of line/end of file cleanup - id: mixed-line-ending @@ -38,7 +38,11 @@ repos: - id: check-merge-conflict # sort requirements files - id: file-contents-sorter - files: ^tests/requirements.*\.txt + files: | + (?x)^( + docs/requirements.txt | + tests/requirements.*\.txt + ) args: [--unique] # Python verification and formatting - repo: https://github.com/Lucas-C/pre-commit-hooks @@ -54,13 +58,40 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.4 + rev: v0.3.7 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff args: [--fix] # compatible replacement for black - id: ruff-format + - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks + rev: v2.13.0 + hooks: + - id: pretty-format-toml + args: [--autofix, --trailing-commas] + - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt + rev: 0.2.3 + hooks: + - id: yamlfmt + # ruamel.yaml doesn't line wrap correctly (?) so set width to 1M to avoid issues + args: [--mapping=2, --offset=2, --sequence=4, --width=1000000, --implicit_start] + exclude: | + (?x)^( + .authors.yml | + conda_build/templates/npm.yaml | + conda_build/templates/setuptools.yaml | + docs/click/meta.yaml | + docs/source/user-guide/tutorials/meta.yaml | + recipe/meta.yaml | + tests/ + ) + - repo: https://github.com/python-jsonschema/check-jsonschema + rev: 0.28.1 + hooks: + # verify github syntaxes + - id: check-github-workflows + - id: check-dependabot - repo: meta # see https://pre-commit.com/#meta-hooks hooks: diff --git a/.readthedocs.yml b/.readthedocs.yml index abdbda6254..64f8768db5 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,13 +1,13 @@ version: 2 build: - os: "ubuntu-22.04" + os: ubuntu-22.04 tools: - python: "3.11" + python: '3.11' python: install: - - requirements: docs/requirements.txt + - requirements: docs/requirements.txt # Build PDF, ePub and zipped HTML formats: diff --git a/conda_build/bdist_conda.py b/conda_build/bdist_conda.py index 6e965c409d..45a1ff845e 100644 --- a/conda_build/bdist_conda.py +++ b/conda_build/bdist_conda.py @@ -4,14 +4,15 @@ import sys import time from collections import defaultdict +from io import StringIO +from conda.cli.common import spec_from_line from setuptools.command.install import install from setuptools.dist import Distribution from setuptools.errors import BaseError, OptionError from . import api from .build import handle_anaconda_upload -from .conda_interface import StringIO, spec_from_line from .config import Config from .deprecations import deprecated from .metadata import MetaData diff --git a/conda_build/build.py b/conda_build/build.py index d0c939d9e8..88461ac941 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -26,23 +26,19 @@ import yaml from bs4 import UnicodeDammit from conda import __version__ as conda_version +from conda.auxlib.entity import EntityEncoder +from conda.base.constants import PREFIX_PLACEHOLDER from conda.base.context import context, reset_context from conda.core.prefix_data import PrefixData from conda.exceptions import CondaError, NoPackagesFoundError, UnsatisfiableError +from conda.gateways.disk.create import TemporaryDirectory from conda.models.channel import Channel +from conda.models.enums import FileMode, PathType +from conda.models.match_spec import MatchSpec +from conda.utils import url_path from . import __version__ as conda_build_version from . import environ, noarch_python, source, tarcheck, utils -from .conda_interface import ( - EntityEncoder, - FileMode, - MatchSpec, - PathType, - TemporaryDirectory, - env_path_backup_var_exists, - prefix_placeholder, - url_path, -) from .config import Config from .create_test import create_all_test_files from .deprecations import deprecated @@ -195,7 +191,7 @@ def have_prefix_files(files, prefix): """ prefix_bytes = prefix.encode(utils.codec) - prefix_placeholder_bytes = prefix_placeholder.encode(utils.codec) + prefix_placeholder_bytes = PREFIX_PLACEHOLDER.encode(utils.codec) searches = {prefix: prefix_bytes} if utils.on_win: # some windows libraries use unix-style path separators @@ -206,7 +202,7 @@ def have_prefix_files(files, prefix): double_backslash_prefix = prefix.replace("\\", "\\\\") double_backslash_prefix_bytes = double_backslash_prefix.encode(utils.codec) searches[double_backslash_prefix] = double_backslash_prefix_bytes - searches[prefix_placeholder] = prefix_placeholder_bytes + searches[PREFIX_PLACEHOLDER] = prefix_placeholder_bytes min_prefix = min(len(k) for k, _ in searches.items()) # mm.find is incredibly slow, so ripgrep is used to pre-filter the list. @@ -1149,13 +1145,13 @@ def get_files_with_prefix(m, replacements, files_in, prefix): prefix[0].upper() + prefix[1:], prefix[0].lower() + prefix[1:], prefix_u, - prefix_placeholder.replace("\\", "'"), - prefix_placeholder.replace("/", "\\"), + PREFIX_PLACEHOLDER.replace("\\", "'"), + PREFIX_PLACEHOLDER.replace("/", "\\"), ] # some python/json files store an escaped version of prefix pfx_variants.extend([pfx.replace("\\", "\\\\") for pfx in pfx_variants]) else: - pfx_variants = (prefix, prefix_placeholder) + pfx_variants = (prefix, PREFIX_PLACEHOLDER) # replacing \ with \\ here is for regex escaping re_test = ( b"(" @@ -2351,8 +2347,6 @@ def create_build_envs(m: MetaData, notest): ) except DependencyNeedsBuildingError as e: # subpackages are not actually missing. We just haven't built them yet. - from .conda_interface import MatchSpec - other_outputs = ( m.other_outputs.values() if hasattr(m, "other_outputs") @@ -2416,8 +2410,6 @@ def build( with utils.path_prepended(m.config.build_prefix): env = environ.get_dict(m=m) env["CONDA_BUILD_STATE"] = "BUILD" - if env_path_backup_var_exists: - env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] # this should be a no-op if source is already here if m.needs_source_for_render: @@ -3447,8 +3439,6 @@ def test( env.update(environ.get_dict(m=metadata, prefix=config.test_prefix)) env["CONDA_BUILD_STATE"] = "TEST" env["CONDA_BUILD"] = "1" - if env_path_backup_var_exists: - env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] if not metadata.config.activate or metadata.name() == "conda": # prepend bin (or Scripts) directory @@ -3531,8 +3521,6 @@ def test( env = dict(os.environ.copy()) env.update(environ.get_dict(m=metadata, prefix=metadata.config.test_prefix)) env["CONDA_BUILD_STATE"] = "TEST" - if env_path_backup_var_exists: - env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] if config.test_run_post: from .utils import get_installed_packages diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index f467b4c2bc..a966677471 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -17,7 +17,6 @@ from conda.common.io import dashlist from .. import api, build, source, utils -from ..conda_interface import add_parser_channels, cc_conda_build from ..config import ( get_channel_urls, get_or_merge_config, @@ -27,12 +26,16 @@ from .actions import KeyValueAction from .main_render import get_render_parser +try: + from conda.cli.helpers import add_parser_channels +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_channels + if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence - from ..conda_interface import ArgumentParser - def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: parser = get_render_parser() @@ -70,7 +73,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: action="store_false", help="Don't include the recipe inside the built package.", dest="include_recipe", - default=cc_conda_build.get("include_recipe", "true").lower() == "true", + default=context.conda_build.get("include_recipe", "true").lower() == "true", ) parser.add_argument( "-s", @@ -125,7 +128,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "Skip recipes for which there already exists an existing build " "(locally or in the channels)." ), - default=cc_conda_build.get("skip_existing", "false").lower() == "true", + default=context.conda_build.get("skip_existing", "false").lower() == "true", ) parser.add_argument( "--keep-old-work", @@ -145,7 +148,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "--quiet", action="store_true", help="do not display progress bar", - default=cc_conda_build.get("quiet", "false").lower() == "true", + default=context.conda_build.get("quiet", "false").lower() == "true", ) parser.add_argument( "--debug", @@ -155,12 +158,12 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: parser.add_argument( "--token", help="Token to pass through to anaconda upload", - default=cc_conda_build.get("anaconda_token"), + default=context.conda_build.get("anaconda_token"), ) parser.add_argument( "--user", help="User/organization to upload packages to on anaconda.org or pypi", - default=cc_conda_build.get("user"), + default=context.conda_build.get("user"), ) parser.add_argument( "--label", @@ -185,7 +188,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: ), type=int, choices=range(1, 23), - default=cc_conda_build.get( + default=context.conda_build.get( "zstd_compression_level", zstd_compression_level_default ), ) @@ -210,23 +213,23 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "--config-file", help="path to .pypirc file to use when uploading to pypi", default=( - abspath(expanduser(expandvars(cc_conda_build.get("pypirc")))) - if cc_conda_build.get("pypirc") - else cc_conda_build.get("pypirc") + abspath(expanduser(expandvars(pypirc))) + if (pypirc := context.conda_build.get("pypirc")) + else None ), ) pypi_grp.add_argument( "--repository", "-r", help="PyPI repository to upload to", - default=cc_conda_build.get("pypi_repository", "pypitest"), + default=context.conda_build.get("pypi_repository", "pypitest"), ) parser.add_argument( "--no-activate", action="store_false", help="do not activate the build and test envs; just prepend to PATH", dest="activate", - default=cc_conda_build.get("activate", "true").lower() == "true", + default=context.conda_build.get("activate", "true").lower() == "true", ) parser.add_argument( "--no-build-id", @@ -237,7 +240,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: ), dest="set_build_id", # note: inverted - dest stores positive logic - default=cc_conda_build.get("set_build_id", "true").lower() == "true", + default=context.conda_build.get("set_build_id", "true").lower() == "true", ) parser.add_argument( "--build-id-pat", @@ -246,7 +249,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "paths being too long." ), dest="build_id_pat", - default=cc_conda_build.get("build_id_pat", "{n}_{t}"), + default=context.conda_build.get("build_id_pat", "{n}_{t}"), ) parser.add_argument( "--croot", @@ -259,21 +262,22 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "--verify", action="store_true", help="run verification on recipes or packages when building", - default=cc_conda_build.get("verify", "true").lower() == "true", + default=context.conda_build.get("verify", "true").lower() == "true", ) parser.add_argument( "--no-verify", action="store_false", dest="verify", help="do not run verification on recipes or packages when building", - default=cc_conda_build.get("verify", "true").lower() == "true", + default=context.conda_build.get("verify", "true").lower() == "true", ) parser.add_argument( "--strict-verify", action="store_true", dest="exit_on_verify_error", help="Exit if any conda-verify check fail, instead of only printing them", - default=cc_conda_build.get("exit_on_verify_error", "false").lower() == "true", + default=context.conda_build.get("exit_on_verify_error", "false").lower() + == "true", ) parser.add_argument( "--output-folder", @@ -281,7 +285,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "folder to dump output package to. Package are moved here if build or test succeeds." " Destination folder must exist prior to using this." ), - default=cc_conda_build.get("output_folder"), + default=context.conda_build.get("output_folder"), ) parser.add_argument( "--no-prefix-length-fallback", @@ -350,7 +354,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "linked to any executables or shared libraries in built packages. This is disabled " "by default, but will be enabled by default in conda-build 4.0." ), - default=cc_conda_build.get("error_overlinking", "false").lower() == "true", + default=context.conda_build.get("error_overlinking", "false").lower() == "true", ) parser.add_argument( "--no-error-overlinking", @@ -361,7 +365,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "linked to any executables or shared libraries in built packages. This is currently " "the default behavior, but will change in conda-build 4.0." ), - default=cc_conda_build.get("error_overlinking", "false").lower() == "true", + default=context.conda_build.get("error_overlinking", "false").lower() == "true", ) parser.add_argument( "--error-overdepending", @@ -372,7 +376,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " "any of the files in this package." ), - default=cc_conda_build.get("error_overdepending", "false").lower() == "true", + default=context.conda_build.get("error_overdepending", "false").lower() + == "true", ) parser.add_argument( "--no-error-overdepending", @@ -383,7 +388,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " "any of the files in this package." ), - default=cc_conda_build.get("error_overdepending", "false").lower() == "true", + default=context.conda_build.get("error_overdepending", "false").lower() + == "true", ) parser.add_argument( "--long-test-prefix", @@ -393,7 +399,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "Linux and Mac. Prefix length matches the --prefix-length flag. This is on by " "default in conda-build 3.0+" ), - default=cc_conda_build.get("long_test_prefix", "true").lower() == "true", + default=context.conda_build.get("long_test_prefix", "true").lower() == "true", ) parser.add_argument( "--no-long-test-prefix", @@ -403,7 +409,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "Do not use a long prefix for the test prefix, as well as the build prefix." " Affects only Linux and Mac. Prefix length matches the --prefix-length flag. " ), - default=cc_conda_build.get("long_test_prefix", "true").lower() == "true", + default=context.conda_build.get("long_test_prefix", "true").lower() == "true", ) parser.add_argument( "--keep-going", @@ -420,16 +426,17 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "Path to store the source files (archives, git clones, etc.) during the build." ), default=( - abspath(expanduser(expandvars(cc_conda_build.get("cache_dir")))) - if cc_conda_build.get("cache_dir") - else cc_conda_build.get("cache_dir") + abspath(expanduser(expandvars(cache_dir))) + if (cache_dir := context.conda_build.get("cache_dir")) + else None ), ) parser.add_argument( "--no-copy-test-source-files", dest="copy_test_source_files", action="store_false", - default=cc_conda_build.get("copy_test_source_files", "true").lower() == "true", + default=context.conda_build.get("copy_test_source_files", "true").lower() + == "true", help=( "Disables copying the files necessary for testing the package into " "the info/test folder. Passing this argument means it may not be possible " @@ -445,7 +452,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "Merge the build and host directories, even when host section or compiler " "jinja2 is present" ), - default=cc_conda_build.get("merge_build_host", "false").lower() == "true", + default=context.conda_build.get("merge_build_host", "false").lower() == "true", ) parser.add_argument( "--stats-file", diff --git a/conda_build/cli/main_convert.py b/conda_build/cli/main_convert.py index c222c7173b..d30b725b3d 100644 --- a/conda_build/cli/main_convert.py +++ b/conda_build/cli/main_convert.py @@ -9,10 +9,9 @@ from conda.base.context import context from .. import api -from ..conda_interface import ArgumentParser if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence logging.basicConfig(level=logging.INFO) @@ -43,6 +42,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + parser = ArgumentParser( prog="conda convert", description=""" diff --git a/conda_build/cli/main_develop.py b/conda_build/cli/main_develop.py index 7e92163c58..9b680cbf5a 100644 --- a/conda_build/cli/main_develop.py +++ b/conda_build/cli/main_develop.py @@ -8,16 +8,23 @@ from conda.base.context import context from .. import api -from ..conda_interface import ArgumentParser, add_parser_prefix + +try: + from conda.cli.helpers import add_parser_prefix +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_prefix if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence logging.basicConfig(level=logging.INFO) def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + parser = ArgumentParser( prog="conda develop", description=""" diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index 404288052b..b1c47c0586 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -11,16 +11,23 @@ from conda.base.context import context from .. import api -from ..conda_interface import ArgumentParser, add_parser_prefix + +try: + from conda.cli.helpers import add_parser_prefix +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_prefix if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence logging.basicConfig(level=logging.INFO) def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + parser = ArgumentParser( prog="conda inspect", description="Tools for inspecting conda packages.", diff --git a/conda_build/cli/main_metapackage.py b/conda_build/cli/main_metapackage.py index 028b6f010e..91d2edcebb 100644 --- a/conda_build/cli/main_metapackage.py +++ b/conda_build/cli/main_metapackage.py @@ -9,16 +9,23 @@ from conda.base.context import context from .. import api -from ..conda_interface import ArgumentParser, add_parser_channels + +try: + from conda.cli.helpers import add_parser_channels +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_channels if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence logging.basicConfig(level=logging.INFO) def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + parser = ArgumentParser( prog="conda metapackage", description=""" diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 952ecb7d5b..a5cbb8b443 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -12,13 +12,18 @@ from yaml.parser import ParserError from .. import __version__, api -from ..conda_interface import ArgumentParser, add_parser_channels, cc_conda_build from ..config import get_channel_urls, get_or_merge_config from ..utils import LoggingContext from ..variants import get_package_variants, set_language_env_vars +try: + from conda.cli.helpers import add_parser_channels +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_channels + if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence log = logging.getLogger(__name__) @@ -44,7 +49,9 @@ def __call__(self, parser, namespace, values, option_string=None): ) -def get_render_parser(): +def get_render_parser() -> ArgumentParser: + from conda.cli.conda_argparse import ArgumentParser + p = ArgumentParser( prog="conda render", description=""" @@ -139,7 +146,7 @@ def get_render_parser(): "--old-build-string", dest="filename_hashing", action="store_false", - default=cc_conda_build.get("filename_hashing", "true").lower() == "true", + default=context.conda_build.get("filename_hashing", "true").lower() == "true", help=( "Disable hash additions to filenames to distinguish package " "variants from one another. NOTE: any filename collisions are " diff --git a/conda_build/cli/main_skeleton.py b/conda_build/cli/main_skeleton.py index dfc48ceb85..7013e2ffab 100644 --- a/conda_build/cli/main_skeleton.py +++ b/conda_build/cli/main_skeleton.py @@ -12,11 +12,10 @@ from conda.base.context import context from .. import api -from ..conda_interface import ArgumentParser from ..config import Config if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence thisdir = os.path.dirname(os.path.abspath(__file__)) @@ -24,6 +23,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + parser = ArgumentParser( prog="conda skeleton", description=""" diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index c6e31b24af..c5acfbfd06 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -4,14 +4,24 @@ import configparser as _configparser import os as _os +from builtins import input as _input from functools import partial as _partial from importlib import import_module as _import_module +from io import StringIO as _StringIO from conda import __version__ +from conda.auxlib.entity import EntityEncoder as _EntityEncoder +from conda.base.constants import PREFIX_PLACEHOLDER as _PREFIX_PLACEHOLDER from conda.base.context import context as _context from conda.base.context import determine_target_prefix as _determine_target_prefix from conda.base.context import non_x86_machines as _non_x86_linux_machines from conda.base.context import reset_context as _reset_context +from conda.cli.common import spec_from_line as _spec_from_line +from conda.cli.common import specs_from_args as _specs_from_args +from conda.cli.common import specs_from_url as _specs_from_url +from conda.cli.conda_argparse import ArgumentParser as _ArgumentParser +from conda.common.path import win_path_to_unix as _win_path_to_unix +from conda.common.toposort import _toposort as __toposort from conda.core.package_cache_data import ( ProgressiveFetchExtract as _ProgressiveFetchExtract, ) @@ -21,50 +31,286 @@ from conda.exceptions import LockError as _LockError from conda.exceptions import NoPackagesFoundError as _NoPackagesFoundError from conda.exceptions import PaddingError as _PaddingError +from conda.exceptions import ResolvePackageNotFound as _ResolvePackageNotFound from conda.exceptions import UnsatisfiableError as _UnsatisfiableError -from conda.exports import ( # noqa: F401 - ArgumentParser, - Channel, - Completer, - CondaSession, - EntityEncoder, - FileMode, - InstalledPackages, - MatchSpec, - NoPackagesFound, - PackageRecord, - PathType, - Resolve, - StringIO, - TemporaryDirectory, - TmpDownload, - Unsatisfiable, - VersionOrder, - _toposort, - add_parser_channels, - add_parser_prefix, - download, - human_bytes, - input, - lchmod, - normalized_version, - prefix_placeholder, - rm_rf, - spec_from_line, - specs_from_args, - specs_from_url, - symlink_conda, - unix_path_to_win, - untracked, - url_path, - walk_prefix, - win_path_to_unix, -) +from conda.exports import Completer as _Completer +from conda.exports import InstalledPackages as _InstalledPackages from conda.exports import get_index as _get_index +from conda.exports import symlink_conda as _symlink_conda +from conda.gateways.connection.download import TmpDownload as _TmpDownload +from conda.gateways.connection.download import download as _download +from conda.gateways.connection.session import CondaSession as _CondaSession +from conda.gateways.disk.create import TemporaryDirectory as _TemporaryDirectory +from conda.gateways.disk.link import lchmod as _lchmod from conda.gateways.disk.read import compute_sum as _compute_sum +from conda.misc import untracked as _untracked +from conda.misc import walk_prefix as _walk_prefix +from conda.models.channel import Channel as _Channel from conda.models.channel import get_conda_build_local_url as _get_conda_build_local_url +from conda.models.enums import FileMode as _FileMode +from conda.models.enums import PathType as _PathType +from conda.models.match_spec import MatchSpec as _MatchSpec +from conda.models.records import PackageRecord as _PackageRecord +from conda.models.version import VersionOrder as _VersionOrder +from conda.models.version import normalized_version as _normalized_version +from conda.resolve import Resolve as _Resolve +from conda.utils import human_bytes as _human_bytes +from conda.utils import unix_path_to_win as _unix_path_to_win +from conda.utils import url_path as _url_path from .deprecations import deprecated +from .utils import rm_rf as _rm_rf + +try: + from conda.cli.helpers import add_parser_channels as _add_parser_channels + from conda.cli.helpers import add_parser_prefix as _add_parser_prefix +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_channels as _add_parser_channels + from conda.cli.conda_argparse import add_parser_prefix as _add_parser_prefix + +deprecated.constant( + "24.5", + "24.7", + "Completer", + _Completer, + addendum="Unused.", +) +deprecated.constant( + "24.5", + "24.7", + "CondaSession", + _CondaSession, + addendum="Use `conda.gateways.connection.session.CondaSession` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "InstalledPackages", + _InstalledPackages, + addendum="Unused.", +) +deprecated.constant( + "24.5", + "24.7", + "NoPackagesFound", + _ResolvePackageNotFound, + addendum="Use `conda.exceptions.ResolvePackageNotFound` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "Unsatisfiable", + _UnsatisfiableError, + addendum="Use `conda.exceptions.UnsatisfiableError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "symlink_conda", + _symlink_conda, + addendum="Unused.", +) + + +deprecated.constant( + "24.5", + "24.7", + "ArgumentParser", + _ArgumentParser, + addendum="Use `conda.cli.conda_argparse.ArgumentParser` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "add_parser_channels", + _add_parser_channels, + addendum="Use `conda.cli.helpers.add_parser_channels` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "add_parser_prefix", + _add_parser_prefix, + addendum="Use `conda.cli.helpers.add_parser_prefix` instead.", +) + +deprecated.constant( + "24.5", + "24.7", + "Channel", + _Channel, + addendum="Use `conda.models.channel.Channel` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "FileMode", + _FileMode, + addendum="Use `conda.models.enums.FileMode` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "PathType", + _PathType, + addendum="Use `conda.models.enums.PathType` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "MatchSpec", + _MatchSpec, + addendum="Use `conda.models.match_spec.MatchSpec` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "PackageRecord", + _PackageRecord, + addendum="Use `conda.models.records.PackageRecord` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "VersionOrder", + _VersionOrder, + addendum="Use `conda.models.version.VersionOrder` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "normalized_version", + _normalized_version, + addendum="Use `conda.models.version.normalized_version` instead.", +) + +deprecated.constant( + "24.5", + "24.7", + "EntityEncoder", + _EntityEncoder, + addendum="Use `conda.auxlib.entity.EntityEncoder` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "Resolve", + _Resolve, + addendum="Use `conda.resolve.Resolve` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "TemporaryDirectory", + _TemporaryDirectory, + addendum="Use `conda.gateways.disk.create.TemporaryDirectory` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "TmpDownload", + _TmpDownload, + addendum="Use `conda.gateways.connection.download.TmpDownload` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "download", + _download, + addendum="Use `conda.gateways.connection.download.download` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "_toposort", + __toposort, + addendum="Use `conda.common.toposort._toposort` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "human_bytes", + _human_bytes, + addendum="Use `conda.utils.human_bytes` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "lchmod", + _lchmod, + addendum="Use `conda.gateways.disk.link.lchmod` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "prefix_placeholder", + _PREFIX_PLACEHOLDER, + addendum="Use `conda.base.constants.PREFIX_PLACEHOLDER` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "rm_rf", + _rm_rf, + addendum="Use `conda_build.utils.rm_rf` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "spec_from_line", + _spec_from_line, + addendum="Use `conda.cli.common.spec_from_line` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "specs_from_args", + _specs_from_args, + addendum="Use `conda.cli.common.specs_from_args` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "specs_from_url", + _specs_from_url, + addendum="Use `conda.cli.common.specs_from_url` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "unix_path_to_win", + _unix_path_to_win, + addendum="Use `conda.utils.unix_path_to_win` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "untracked", + _untracked, + addendum="Use `conda.misc.untracked` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "url_path", + _url_path, + addendum="Use `conda.utils.url_path` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "walk_prefix", + _walk_prefix, + addendum="Use `conda.misc.walk_prefix` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "win_path_to_unix", + _win_path_to_unix, + addendum="Use `conda.common.path.win_path_to_unix` instead.", +) deprecated.constant( "24.5", @@ -88,6 +334,21 @@ _import_module, addendum="Use `importlib.import_module` instead.", ) +deprecated.constant( + "24.5", + "24.7", + "StringIO", + _StringIO, + addendum="Use `io.StringIO` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "input", + _input, + addendum="Use `input` instead.", +) + deprecated.constant( "24.5", "24.7", @@ -264,19 +525,29 @@ _partial(_determine_target_prefix, _context), addendum="Use `conda.base.context.context.target_prefix` instead.", ) -cc_conda_build = _context.conda_build if hasattr(_context, "conda_build") else {} +deprecated.constant( + "24.5", + "24.7", + "cc_conda_build", + _context.conda_build, + addendum="Use `conda.base.context.context.conda_build` instead.", +) deprecated.constant( "24.5", "24.7", "get_conda_channel", - Channel.from_value, + _Channel.from_value, addendum="Use `conda.models.channel.Channel.from_value` instead.", ) -# When deactivating envs (e.g. switching from root to build/test) this env var is used, -# except the PR that removed this has been reverted (for now) and Windows doesn't need it. -env_path_backup_var_exists = _os.getenv("CONDA_PATH_BACKUP") +deprecated.constant( + "24.5", + "24.7", + "env_path_backup_var_exists", + _os.getenv("CONDA_PATH_BACKUP"), + addendum="Unused.", +) @deprecated( diff --git a/conda_build/config.py b/conda_build/config.py index 36599289c0..1e1f64de9d 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -17,8 +17,8 @@ from typing import TYPE_CHECKING from conda.base.context import context +from conda.utils import url_path -from .conda_interface import cc_conda_build, url_path from .deprecations import deprecated from .utils import ( get_build_folders, @@ -110,14 +110,16 @@ def _get_default_settings(): Setting("test_run_post", False), Setting( "filename_hashing", - cc_conda_build.get("filename_hashing", filename_hashing_default).lower() + context.conda_build.get( + "filename_hashing", filename_hashing_default + ).lower() == "true", ), Setting("keep_old_work", False), Setting( "_src_cache_root", - abspath(expanduser(expandvars(cc_conda_build.get("cache_dir")))) - if cc_conda_build.get("cache_dir") + abspath(expanduser(expandvars(cache_dir))) + if (cache_dir := context.conda_build.get("cache_dir")) else _src_cache_root_default, ), Setting("copy_test_source_files", True), @@ -142,30 +144,32 @@ def _get_default_settings(): # cli/main_build.py that this default will switch in conda-build 4.0. Setting( "error_overlinking", - cc_conda_build.get("error_overlinking", error_overlinking_default).lower() + context.conda_build.get( + "error_overlinking", error_overlinking_default + ).lower() == "true", ), Setting( "error_overdepending", - cc_conda_build.get( + context.conda_build.get( "error_overdepending", error_overdepending_default ).lower() == "true", ), Setting( "noarch_python_build_age", - cc_conda_build.get( + context.conda_build.get( "noarch_python_build_age", noarch_python_build_age_default ), ), Setting( "enable_static", - cc_conda_build.get("enable_static", enable_static_default).lower() + context.conda_build.get("enable_static", enable_static_default).lower() == "true", ), Setting( "no_rewrite_stdout_env", - cc_conda_build.get( + context.conda_build.get( "no_rewrite_stdout_env", no_rewrite_stdout_env_default ).lower() == "true", @@ -204,11 +208,13 @@ def _get_default_settings(): Setting("verify", True), Setting( "ignore_verify_codes", - cc_conda_build.get("ignore_verify_codes", ignore_verify_codes_default), + context.conda_build.get("ignore_verify_codes", ignore_verify_codes_default), ), Setting( "exit_on_verify_error", - cc_conda_build.get("exit_on_verify_error", exit_on_verify_error_default), + context.conda_build.get( + "exit_on_verify_error", exit_on_verify_error_default + ), ), # Recipes that have no host section, only build, should bypass the build/host line. # This is to make older recipes still work with cross-compiling. True cross-compiling @@ -226,17 +232,17 @@ def _get_default_settings(): Setting("_pip_cache_dir", None), Setting( "zstd_compression_level", - cc_conda_build.get( + context.conda_build.get( "zstd_compression_level", zstd_compression_level_default ), ), # this can be set to different values (currently only 2 means anything) to use package formats Setting( "conda_pkg_format", - cc_conda_build.get("pkg_format", conda_pkg_format_default), + context.conda_build.get("pkg_format", conda_pkg_format_default), ), Setting("suppress_variables", False), - Setting("build_id_pat", cc_conda_build.get("build_id_pat", "{n}_{t}")), + Setting("build_id_pat", context.conda_build.get("build_id_pat", "{n}_{t}")), ] @@ -453,7 +459,7 @@ def croot(self) -> str: """This is where source caches and work folders live""" if not self._croot: _bld_root_env = os.getenv("CONDA_BLD_PATH") - _bld_root_rc = cc_conda_build.get("root-dir") + _bld_root_rc = context.conda_build.get("root-dir") if _bld_root_env: self._croot = abspath(expanduser(_bld_root_env)) elif _bld_root_rc: diff --git a/conda_build/deprecations.py b/conda_build/deprecations.py index 494f0f85f1..f691b5192d 100644 --- a/conda_build/deprecations.py +++ b/conda_build/deprecations.py @@ -6,16 +6,22 @@ import sys import warnings +from argparse import Action from functools import wraps from types import ModuleType from typing import TYPE_CHECKING if TYPE_CHECKING: - from argparse import Action - from typing import Any, Callable + from argparse import ArgumentParser, Namespace + from typing import Any, Callable, ParamSpec, Self, TypeVar from packaging.version import Version + T = TypeVar("T") + P = ParamSpec("P") + + ActionType = TypeVar("ActionType", bound=type[Action]) + from . import __version__ @@ -30,7 +36,7 @@ class DeprecationHandler: _version_tuple: tuple[int, ...] | None _version_object: Version | None - def __init__(self, version: str): + def __init__(self: Self, version: str) -> None: """Factory to create a deprecation handle for the specified version. :param version: The version to compare against when checking deprecation statuses. @@ -52,14 +58,13 @@ def _get_version_tuple(version: str) -> tuple[int, ...] | None: except (AttributeError, ValueError): return None - def _version_less_than(self, version: str) -> bool: + def _version_less_than(self: Self, version: str) -> bool: """Test whether own version is less than the given version. :param version: Version string to compare against. """ - if self._version_tuple: - if version_tuple := self._get_version_tuple(version): - return self._version_tuple < version_tuple + if self._version_tuple and (version_tuple := self._get_version_tuple(version)): + return self._version_tuple < version_tuple # If self._version or version could not be represented by a simple # tuple[int, ...], do a more elaborate version parsing and comparison. @@ -68,19 +73,20 @@ def _version_less_than(self, version: str) -> bool: if self._version_object is None: try: - self._version_object = parse(self._version) + self._version_object = parse(self._version) # type: ignore[arg-type] except TypeError: + # TypeError: self._version could not be parsed self._version_object = parse("0.0.0.dev0+placeholder") return self._version_object < parse(version) def __call__( - self, + self: Self, deprecate_in: str, remove_in: str, *, addendum: str | None = None, stack: int = 0, - ) -> Callable[[Callable], Callable]: + ) -> Callable[[Callable[P, T]], Callable[P, T]]: """Deprecation decorator for functions, methods, & classes. :param deprecate_in: Version in which code will be marked as deprecated. @@ -89,12 +95,12 @@ def __call__( :param stack: Optional stacklevel increment. """ - def deprecated_decorator(func: Callable) -> Callable: + def deprecated_decorator(func: Callable[P, T]) -> Callable[P, T]: # detect function name and generate message category, message = self._generate_message( - deprecate_in, - remove_in, - f"{func.__module__}.{func.__qualname__}", + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=f"{func.__module__}.{func.__qualname__}", addendum=addendum, ) @@ -104,7 +110,7 @@ def deprecated_decorator(func: Callable) -> Callable: # alert user that it's time to remove something @wraps(func) - def inner(*args, **kwargs): + def inner(*args: P.args, **kwargs: P.kwargs) -> T: warnings.warn(message, category, stacklevel=2 + stack) return func(*args, **kwargs) @@ -114,7 +120,7 @@ def inner(*args, **kwargs): return deprecated_decorator def argument( - self, + self: Self, deprecate_in: str, remove_in: str, argument: str, @@ -122,7 +128,7 @@ def argument( rename: str | None = None, addendum: str | None = None, stack: int = 0, - ) -> Callable[[Callable], Callable]: + ) -> Callable[[Callable[P, T]], Callable[P, T]]: """Deprecation decorator for keyword arguments. :param deprecate_in: Version in which code will be marked as deprecated. @@ -133,16 +139,16 @@ def argument( :param stack: Optional stacklevel increment. """ - def deprecated_decorator(func: Callable) -> Callable: + def deprecated_decorator(func: Callable[P, T]) -> Callable[P, T]: # detect function name and generate message category, message = self._generate_message( - deprecate_in, - remove_in, - f"{func.__module__}.{func.__qualname__}({argument})", + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=f"{func.__module__}.{func.__qualname__}({argument})", # provide a default addendum if renaming and no addendum is provided - addendum=f"Use '{rename}' instead." - if rename and not addendum - else addendum, + addendum=( + f"Use '{rename}' instead." if rename and not addendum else addendum + ), ) # alert developer that it's time to remove something @@ -151,7 +157,7 @@ def deprecated_decorator(func: Callable) -> Callable: # alert user that it's time to remove something @wraps(func) - def inner(*args, **kwargs): + def inner(*args: P.args, **kwargs: P.kwargs) -> T: # only warn about argument deprecations if the argument is used if argument in kwargs: warnings.warn(message, category, stacklevel=2 + stack) @@ -168,22 +174,27 @@ def inner(*args, **kwargs): return deprecated_decorator def action( - self, + self: Self, deprecate_in: str, remove_in: str, - action: type[Action], + action: ActionType, *, addendum: str | None = None, stack: int = 0, - ): - class DeprecationMixin: - def __init__(inner_self, *args, **kwargs): + ) -> ActionType: + """Wraps any argparse.Action to issue a deprecation warning.""" + + class DeprecationMixin(Action): + category: type[Warning] + help: str # override argparse.Action's help type annotation + + def __init__(inner_self: Self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) category, message = self._generate_message( - deprecate_in, - remove_in, - ( + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=( # option_string are ordered shortest to longest, # use the longest as it's the most descriptive f"`{inner_self.option_strings[-1]}`" @@ -192,6 +203,7 @@ def __init__(inner_self, *args, **kwargs): else f"`{inner_self.dest}`" ), addendum=addendum, + deprecation_type=FutureWarning, ) # alert developer that it's time to remove something @@ -201,18 +213,26 @@ def __init__(inner_self, *args, **kwargs): inner_self.category = category inner_self.help = message - def __call__(inner_self, parser, namespace, values, option_string=None): + def __call__( + inner_self: Self, + parser: ArgumentParser, + namespace: Namespace, + values: Any, + option_string: str | None = None, + ) -> None: # alert user that it's time to remove something warnings.warn( - inner_self.help, inner_self.category, stacklevel=7 + stack + inner_self.help, + inner_self.category, + stacklevel=7 + stack, ) super().__call__(parser, namespace, values, option_string) - return type(action.__name__, (DeprecationMixin, action), {}) + return type(action.__name__, (DeprecationMixin, action), {}) # type: ignore[return-value] def module( - self, + self: Self, deprecate_in: str, remove_in: str, *, @@ -235,7 +255,7 @@ def module( ) def constant( - self, + self: Self, deprecate_in: str, remove_in: str, constant: str, @@ -257,10 +277,10 @@ def constant( module, fullname = self._get_module(stack) # detect function name and generate message category, message = self._generate_message( - deprecate_in, - remove_in, - f"{fullname}.{constant}", - addendum, + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=f"{fullname}.{constant}", + addendum=addendum, ) # alert developer that it's time to remove something @@ -280,10 +300,10 @@ def __getattr__(name: str) -> Any: raise AttributeError(f"module '{fullname}' has no attribute '{name}'") - module.__getattr__ = __getattr__ + module.__getattr__ = __getattr__ # type: ignore[method-assign] def topic( - self, + self: Self, deprecate_in: str, remove_in: str, *, @@ -301,10 +321,10 @@ def topic( """ # detect function name and generate message category, message = self._generate_message( - deprecate_in, - remove_in, - topic, - addendum, + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=topic, + addendum=addendum, ) # alert developer that it's time to remove something @@ -314,7 +334,7 @@ def topic( # alert user that it's time to remove something warnings.warn(message, category, stacklevel=2 + stack) - def _get_module(self, stack: int) -> tuple[ModuleType, str]: + def _get_module(self: Self, stack: int) -> tuple[ModuleType, str]: """Detect the module from which we are being called. :param stack: The stacklevel increment. @@ -333,13 +353,15 @@ def _get_module(self, stack: int) -> tuple[ModuleType, str]: # AttributeError: frame.f_code.co_filename is undefined pass else: - for module in sys.modules.values(): - if not isinstance(module, ModuleType): + # use a copy of sys.modules to avoid RuntimeError during iteration + # see https://github.com/conda/conda/issues/13754 + for loaded in tuple(sys.modules.values()): + if not isinstance(loaded, ModuleType): continue - if not hasattr(module, "__file__"): + if not hasattr(loaded, "__file__"): continue - if module.__file__ == filename: - return (module, module.__name__) + if loaded.__file__ == filename: + return (loaded, loaded.__name__) # If above failed, do an expensive import and costly getmodule call. import inspect @@ -351,18 +373,22 @@ def _get_module(self, stack: int) -> tuple[ModuleType, str]: raise DeprecatedError("unable to determine the calling module") def _generate_message( - self, + self: Self, deprecate_in: str, remove_in: str, prefix: str, addendum: str | None, + *, + deprecation_type: type[Warning] = DeprecationWarning, ) -> tuple[type[Warning] | None, str]: - """Deprecation decorator for functions, methods, & classes. + """Generate the standardized deprecation message and determine whether the + deprecation is pending, active, or past. :param deprecate_in: Version in which code will be marked as deprecated. :param remove_in: Version in which code is expected to be removed. :param prefix: The message prefix, usually the function name. :param addendum: Additional messaging. Useful to indicate what to do instead. + :param deprecation_type: The warning type to use for active deprecations. :return: The warning category (if applicable) and the message. """ category: type[Warning] | None @@ -370,7 +396,7 @@ def _generate_message( category = PendingDeprecationWarning warning = f"is pending deprecation and will be removed in {remove_in}." elif self._version_less_than(remove_in): - category = DeprecationWarning + category = deprecation_type warning = f"is deprecated and will be removed in {remove_in}." else: category = None diff --git a/conda_build/environ.py b/conda_build/environ.py index 36f6b78171..5a24d83172 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -38,11 +38,12 @@ PaddingError, UnsatisfiableError, ) -from conda.models.channel import prioritize_channels +from conda.gateways.disk.create import TemporaryDirectory +from conda.models.channel import Channel, prioritize_channels from conda.models.match_spec import MatchSpec +from conda.models.records import PackageRecord from . import utils -from .conda_interface import Channel, PackageRecord, TemporaryDirectory from .deprecations import deprecated from .exceptions import BuildLockError, DependencyNeedsBuildingError from .features import feature_list diff --git a/conda_build/index.py b/conda_build/index.py index 28a470f5c4..28f29063aa 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -11,10 +11,10 @@ from conda.base.context import context from conda.core.index import get_index from conda.exceptions import CondaHTTPError +from conda.utils import url_path from conda_index.index import update_index as _update_index from . import utils -from .conda_interface import url_path from .deprecations import deprecated from .utils import ( CONDA_PACKAGE_EXTENSION_V1, diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 7d7c61f8f9..b202a7eb68 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -15,13 +15,11 @@ from conda.api import Solver from conda.base.context import context +from conda.cli.common import specs_from_args from conda.core.index import get_index from conda.core.prefix_data import PrefixData from conda.models.records import PrefixRecord -from .conda_interface import ( - specs_from_args, -) from .os_utils.ldd import ( get_linkages, get_package_obj_files, diff --git a/conda_build/jinja_context.py b/conda_build/jinja_context.py index cc5c3b24c7..6ec2195eb0 100644 --- a/conda_build/jinja_context.py +++ b/conda_build/jinja_context.py @@ -10,24 +10,18 @@ import time from functools import partial from io import StringIO, TextIOBase +from typing import TYPE_CHECKING from warnings import warn import jinja2 import yaml - -try: - import tomllib # Python 3.11 -except: - import tomli as tomllib - -from typing import TYPE_CHECKING +from frozendict import deepfreeze from . import _load_setup_py_data from .environ import get_dict as get_environ from .exceptions import CondaBuildException from .render import get_env_dependencies from .utils import ( - HashableDict, apply_pin_expressions, check_call_env, copy_into, @@ -38,6 +32,11 @@ ) from .variants import DEFAULT_COMPILERS +try: + import tomllib # Python 3.11 +except: + import tomli as tomllib + if TYPE_CHECKING: from typing import IO, Any @@ -298,7 +297,7 @@ def pin_compatible( # There are two cases considered here (so far): # 1. Good packages that follow semver style (if not philosophy). For example, 1.2.3 # 2. Evil packages that cram everything alongside a single major version. For example, 9b - key = (m.name(), HashableDict(m.config.variant)) + key = (m.name(), deepfreeze(m.config.variant)) if key in cached_env_dependencies: pins = cached_env_dependencies[key] else: diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 01f3367d03..b05c27d8ae 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -18,15 +18,15 @@ from bs4 import UnicodeDammit from conda.base.context import context from conda.gateways.disk.read import compute_sum +from conda.models.match_spec import MatchSpec +from frozendict import deepfreeze -from . import exceptions, utils, variants -from .conda_interface import MatchSpec +from . import exceptions, utils from .config import Config, get_or_merge_config from .features import feature_list from .license_family import ensure_valid_license_family from .utils import ( DEFAULT_SUBDIRS, - HashableDict, ensure_list, expand_globs, find_recipe, @@ -34,9 +34,18 @@ insert_variant_versions, on_win, ) +from .variants import ( + dict_of_lists_to_list_of_dicts, + find_used_variables_in_batch_script, + find_used_variables_in_shell_script, + find_used_variables_in_text, + get_default_variant, + get_vars, + list_of_dicts_to_dict_of_lists, +) if TYPE_CHECKING: - from typing import Literal + from typing import Any, Literal try: import yaml @@ -156,7 +165,7 @@ def get_selectors(config: Config) -> dict[str, bool]: if arch == "32": d["x86"] = plat.endswith(("-32", "-64")) - defaults = variants.get_default_variant(config) + defaults = get_default_variant(config) py = config.variant.get("python", defaults["python"]) # there are times when python comes in as a tuple if not hasattr(py, "split"): @@ -268,38 +277,68 @@ def eval_selector(selector_string, namespace, variants_in_place): return eval_selector(next_string, namespace, variants_in_place) -def select_lines(data, namespace, variants_in_place): - lines = [] - - for i, line in enumerate(data.splitlines()): +@lru_cache(maxsize=None) +def _split_line_selector(text: str) -> tuple[tuple[str | None, str], ...]: + lines: list[tuple[str | None, str]] = [] + for line in text.splitlines(): line = line.rstrip() + # skip comment lines, include a blank line as a placeholder + if line.lstrip().startswith("#"): + lines.append((None, "")) + continue + + # include blank lines + if not line: + lines.append((None, "")) + continue + + # user may have quoted entire line to make YAML happy trailing_quote = "" if line and line[-1] in ("'", '"'): trailing_quote = line[-1] - if line.lstrip().startswith("#"): - # Don't bother with comment only lines - continue - m = sel_pat.match(line) - if m: - cond = m.group(3) - try: - if eval_selector(cond, namespace, variants_in_place): - lines.append(m.group(1) + trailing_quote) - except Exception as e: - sys.exit( - """\ -Error: Invalid selector in meta.yaml line %d: -offending line: -%s -exception: -%s -""" - % (i + 1, line, str(e)) - ) + # Checking for "[" and "]" before regex matching every line is a bit faster. + if ( + ("[" in line and "]" in line) + and (match := sel_pat.match(line)) + and (selector := match.group(3)) + ): + # found a selector + lines.append((selector, (match.group(1) + trailing_quote).rstrip())) else: + # no selector found + lines.append((None, line)) + return tuple(lines) + + +def select_lines(text: str, namespace: dict[str, Any], variants_in_place: bool) -> str: + lines = [] + selector_cache: dict[str, bool] = {} + for i, (selector, line) in enumerate(_split_line_selector(text)): + if not selector: + # no selector? include line as is lines.append(line) + else: + # include lines with a selector that evaluates to True + try: + if selector_cache[selector]: + lines.append(line) + except KeyError: + # KeyError: cache miss + try: + value = bool(eval_selector(selector, namespace, variants_in_place)) + selector_cache[selector] = value + if value: + lines.append(line) + except Exception as e: + sys.exit( + f"Error: Invalid selector in meta.yaml line {i + 1}:\n" + f"offending line:\n" + f"{line}\n" + f"exception:\n" + f"{e.__class__.__name__}: {e}\n" + ) return "\n".join(lines) + "\n" @@ -815,7 +854,7 @@ def toposort(output_metadata_map): will naturally lead to non-overlapping files in each package and also the correct files being present during the install and test procedures, provided they are run in this order.""" - from .conda_interface import _toposort + from conda.common.toposort import _toposort # We only care about the conda packages built by this recipe. Non-conda # packages get sorted to the end. @@ -889,8 +928,8 @@ def get_output_dicts_from_metadata(metadata, outputs=None): outputs.append(OrderedDict(name=metadata.name())) for out in outputs: if ( - "package:" in metadata.get_recipe_text() - and out.get("name") == metadata.name() + out.get("name") == metadata.name() + and "package:" in metadata.get_recipe_text() ): combine_top_level_metadata_with_output(metadata, out) return outputs @@ -956,15 +995,8 @@ def finalize_outputs_pass( fm = om if not output_d.get("type") or output_d.get("type").startswith("conda"): outputs[ - ( - fm.name(), - HashableDict( - { - k: copy.deepcopy(fm.config.variant[k]) - for k in fm.get_used_vars() - } - ), - ) + fm.name(), + deepfreeze({k: fm.config.variant[k] for k in fm.get_used_vars()}), ] = (output_d, fm) except exceptions.DependencyNeedsBuildingError as e: if not permit_unsatisfiable_variants: @@ -976,15 +1008,13 @@ def finalize_outputs_pass( f"{e.packages}" ) outputs[ - ( - metadata.name(), - HashableDict( - { - k: copy.deepcopy(metadata.config.variant[k]) - for k in metadata.get_used_vars() - } - ), - ) + metadata.name(), + deepfreeze( + { + k: metadata.config.variant[k] + for k in metadata.get_used_vars() + } + ), ] = (output_d, metadata) # in-place modification base_metadata.other_outputs = outputs @@ -992,12 +1022,8 @@ def finalize_outputs_pass( final_outputs = OrderedDict() for k, (out_d, m) in outputs.items(): final_outputs[ - ( - m.name(), - HashableDict( - {k: copy.deepcopy(m.config.variant[k]) for k in m.get_used_vars()} - ), - ) + m.name(), + deepfreeze({k: m.config.variant[k] for k in m.get_used_vars()}), ] = (out_d, m) return final_outputs @@ -1015,6 +1041,7 @@ def get_updated_output_dict_from_reparsed_metadata(original_dict, new_outputs): return output_d +@lru_cache(maxsize=200) def _filter_recipe_text(text, extract_pattern=None): if extract_pattern: match = re.search(extract_pattern, text, flags=re.MULTILINE | re.DOTALL) @@ -1665,7 +1692,6 @@ def build_id(self): raise RuntimeError( f"Couldn't extract raw recipe text for {self.name()} output" ) - raw_recipe_text = self.extract_package_and_build_text() raw_manual_build_string = re.search(r"\s*string:", raw_recipe_text) # user setting their own build string. Don't modify it. if manual_build_string and not ( @@ -2087,8 +2113,11 @@ def uses_vcs_in_build(self) -> Literal["git", "svn", "mercurial"] | None: return None def get_recipe_text( - self, extract_pattern=None, force_top_level=False, apply_selectors=True - ): + self, + extract_pattern: str | None = None, + force_top_level: bool = False, + apply_selectors: bool = True, + ) -> str: meta_path = self.meta_path if meta_path: recipe_text = read_meta_file(meta_path) @@ -2448,9 +2477,7 @@ def append_parent_metadata(self, out_metadata): def get_reduced_variant_set(self, used_variables): # reduce variable space to limit work we need to do - full_collapsed_variants = variants.list_of_dicts_to_dict_of_lists( - self.config.variants - ) + full_collapsed_variants = list_of_dicts_to_dict_of_lists(self.config.variants) reduced_collapsed_variants = full_collapsed_variants.copy() reduce_keys = set(self.config.variants[0].keys()) - set(used_variables) @@ -2482,7 +2509,7 @@ def get_reduced_variant_set(self, used_variables): # save only one element from this key reduced_collapsed_variants[key] = utils.ensure_list(next(iter(values))) - out = variants.dict_of_lists_to_list_of_dicts(reduced_collapsed_variants) + out = dict_of_lists_to_list_of_dicts(reduced_collapsed_variants) return out def get_output_metadata_set( @@ -2541,17 +2568,15 @@ def get_output_metadata_set( # also refine this collection as each output metadata object is # finalized - see the finalize_outputs_pass function all_output_metadata[ - ( - out_metadata.name(), - HashableDict( - { - k: copy.deepcopy(out_metadata.config.variant[k]) - for k in out_metadata.get_used_vars() - } - ), - ) + out_metadata.name(), + deepfreeze( + { + k: out_metadata.config.variant[k] + for k in out_metadata.get_used_vars() + } + ), ] = (out, out_metadata) - out_metadata_map[HashableDict(out)] = out_metadata + out_metadata_map[deepfreeze(out)] = out_metadata ref_metadata.other_outputs = out_metadata.other_outputs = ( all_output_metadata ) @@ -2578,12 +2603,7 @@ def get_output_metadata_set( ): conda_packages[ m.name(), - HashableDict( - { - k: copy.deepcopy(m.config.variant[k]) - for k in m.get_used_vars() - } - ), + deepfreeze({k: m.config.variant[k] for k in m.get_used_vars()}), ] = (output_d, m) elif output_d.get("type") == "wheel": if not output_d.get("requirements", {}).get("build") or not any( @@ -2633,21 +2653,14 @@ def get_output_metadata_set( return output_tuples def get_loop_vars(self): - _variants = ( - self.config.input_variants - if hasattr(self.config, "input_variants") - else self.config.variants - ) - return variants.get_vars(_variants, loop_only=True) + return get_vars(getattr(self.config, "input_variants", self.config.variants)) def get_used_loop_vars(self, force_top_level=False, force_global=False): - return { - var - for var in self.get_used_vars( - force_top_level=force_top_level, force_global=force_global - ) - if var in self.get_loop_vars() - } + loop_vars = self.get_loop_vars() + used_vars = self.get_used_vars( + force_top_level=force_top_level, force_global=force_global + ) + return set(loop_vars).intersection(used_vars) def get_rendered_recipe_text( self, permit_undefined_jinja=False, extract_pattern=None @@ -2720,11 +2733,7 @@ def get_used_vars(self, force_top_level=False, force_global=False): global used_vars_cache recipe_dir = self.path - # `HashableDict` does not handle lists of other dictionaries correctly. Also it - # is constructed inplace, taking references to sub-elements of the input dict - # and thus corrupting it. Also, this was being called in 3 places in this function - # so caching it is probably a good thing. - hashed_variants = HashableDict(copy.deepcopy(self.config.variant)) + hashed_variants = deepfreeze(self.config.variant) if hasattr(self.config, "used_vars"): used_vars = self.config.used_vars elif ( @@ -2827,7 +2836,7 @@ def _get_used_vars_meta_yaml(self, force_top_level=False, force_global=False): apply_selectors=False, ) - all_used_selectors = variants.find_used_variables_in_text( + all_used_selectors = find_used_variables_in_text( variant_keys, recipe_text, selectors_only=True ) @@ -2836,7 +2845,7 @@ def _get_used_vars_meta_yaml(self, force_top_level=False, force_global=False): force_global=force_global, apply_selectors=True, ) - all_used_reqs = variants.find_used_variables_in_text( + all_used_reqs = find_used_variables_in_text( variant_keys, recipe_text, selectors_only=False ) @@ -2847,9 +2856,7 @@ def _get_used_vars_meta_yaml(self, force_top_level=False, force_global=False): if force_global: used = all_used else: - requirements_used = variants.find_used_variables_in_text( - variant_keys, reqs_text - ) + requirements_used = find_used_variables_in_text(variant_keys, reqs_text) outside_reqs_used = all_used - requirements_used requirements_used = trim_build_only_deps(self, requirements_used) @@ -2862,16 +2869,12 @@ def _get_used_vars_build_scripts(self): buildsh = os.path.join(self.path, "build.sh") if os.path.isfile(buildsh): used_vars.update( - variants.find_used_variables_in_shell_script( - self.config.variant, buildsh - ) + find_used_variables_in_shell_script(self.config.variant, buildsh) ) bldbat = os.path.join(self.path, "bld.bat") if self.config.platform == "win" and os.path.isfile(bldbat): used_vars.update( - variants.find_used_variables_in_batch_script( - self.config.variant, bldbat - ) + find_used_variables_in_batch_script(self.config.variant, bldbat) ) return used_vars @@ -2884,15 +2887,11 @@ def _get_used_vars_output_script(self): script = os.path.join(self.path, this_output["script"]) if os.path.splitext(script)[1] == ".sh": used_vars.update( - variants.find_used_variables_in_shell_script( - self.config.variant, script - ) + find_used_variables_in_shell_script(self.config.variant, script) ) elif os.path.splitext(script)[1] == ".bat": used_vars.update( - variants.find_used_variables_in_batch_script( - self.config.variant, script - ) + find_used_variables_in_batch_script(self.config.variant, script) ) else: log = utils.get_logger(__name__) @@ -2903,7 +2902,7 @@ def _get_used_vars_output_script(self): return used_vars def get_variants_as_dict_of_lists(self): - return variants.list_of_dicts_to_dict_of_lists(self.config.variants) + return list_of_dicts_to_dict_of_lists(self.config.variants) def clean(self): """This ensures that clean is called with the correct build id""" diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index 6f15173f29..c07a7adb71 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -9,7 +9,8 @@ from pathlib import Path from typing import TYPE_CHECKING -from ..conda_interface import untracked +from conda.misc import untracked + from ..utils import on_linux, on_mac from .macho import otool from .pyldd import codefile_class, inspect_linkages, machofile diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index 9b14454c4f..d02cd2bd30 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -13,6 +13,8 @@ from pathlib import Path from subprocess import PIPE, Popen +from conda.models.version import VersionOrder + from ..utils import on_mac, on_win, rec_glob from .external import find_executable @@ -963,7 +965,6 @@ def get_static_lib_exports_dumpbin(filename): results.append((result, version)) except: pass - from ..conda_interface import VersionOrder results = sorted(results, key=lambda x: VersionOrder(x[1])) dumpbin_exe = results[-1][0] diff --git a/conda_build/post.py b/conda_build/post.py index eea8a584b6..30a4057a30 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -35,15 +35,13 @@ from typing import TYPE_CHECKING from conda.core.prefix_data import PrefixData +from conda.gateways.disk.create import TemporaryDirectory +from conda.gateways.disk.link import lchmod from conda.gateways.disk.read import compute_sum +from conda.misc import walk_prefix from conda.models.records import PrefixRecord from . import utils -from .conda_interface import ( - TemporaryDirectory, - lchmod, - walk_prefix, -) from .exceptions import OverDependingError, OverLinkingError, RunPathError from .inspect_pkg import which_package from .os_utils import external, macho diff --git a/conda_build/render.py b/conda_build/render.py index be17eaa461..78a9ed643e 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -10,7 +10,6 @@ import subprocess import sys import tarfile -import tempfile from collections import OrderedDict, defaultdict from contextlib import contextmanager from functools import lru_cache @@ -27,11 +26,14 @@ import yaml from conda.base.context import context +from conda.cli.common import specs_from_url from conda.core.package_cache_data import ProgressiveFetchExtract from conda.exceptions import UnsatisfiableError +from conda.gateways.disk.create import TemporaryDirectory +from conda.models.records import PackageRecord +from conda.models.version import VersionOrder from . import environ, exceptions, source, utils -from .conda_interface import PackageRecord, TemporaryDirectory, specs_from_url from .exceptions import DependencyNeedsBuildingError from .index import get_build_index from .metadata import MetaData, combine_top_level_metadata_with_output @@ -806,8 +808,6 @@ def distribute_variants( # which python version we prefer. `python_age` can use used to tweak which # python gets used here. if metadata.noarch or metadata.noarch_python: - from .conda_interface import VersionOrder - age = int( metadata.get_value( "build/noarch_python_build_age", metadata.config.noarch_python_build_age @@ -943,7 +943,7 @@ def open_recipe(recipe: str | os.PathLike | Path) -> Iterator[Path]: yield recipe elif recipe.suffixes in [[".tar"], [".tar", ".gz"], [".tgz"], [".tar", ".bz2"]]: # extract the recipe to a temporary directory - with tempfile.TemporaryDirectory() as tmp, tarfile.open(recipe, "r:*") as tar: + with TemporaryDirectory() as tmp, tarfile.open(recipe, "r:*") as tar: tar.extractall(path=tmp) yield Path(tmp) elif recipe.suffix == ".yaml": diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index 507086e4fe..4d65ef7cb1 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -21,15 +21,12 @@ import requests from conda.core.index import get_index from conda.exceptions import CondaError, CondaHTTPError +from conda.gateways.connection.download import TmpDownload, download +from conda.gateways.disk.create import TemporaryDirectory +from conda.models.match_spec import MatchSpec +from conda.resolve import Resolve from .. import environ -from ..conda_interface import ( - MatchSpec, - Resolve, - TemporaryDirectory, - TmpDownload, - download, -) from ..config import Config, get_or_merge_config from ..utils import check_call_env, on_linux, on_win from ..variants import get_default_variant @@ -355,19 +352,22 @@ def install_perl_get_core_modules(version): "my @modules = grep {Module::CoreList::is_core($_)} Module::CoreList->find_modules(qr/.*/); " 'print join "\n", @modules;', ] - all_core_modules = ( - subprocess.check_output(args, shell=False) - .decode("utf-8") - .replace("\r\n", "\n") - .split("\n") - ) + try: + all_core_modules = ( + subprocess.check_output(args, shell=False) + .decode("utf-8") + .replace("\r\n", "\n") + .split("\n") + ) + except Exception as e: + print( + f"Failed to query perl={version} for core modules list, ran:\n" + f"{' '.join(args)}" + ) + print(e.message) return all_core_modules except Exception as e: - print( - "Failed to query perl={} for core modules list, attempted command was:\n{}".format( - version, " ".join(args) - ) - ) + print(f"Failed to query perl={version} for core modules list.") print(e.message) return [] diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index 7140c9a89f..fbd959dba2 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -41,10 +41,11 @@ from typing import TYPE_CHECKING +from conda.base.context import context from conda.common.io import dashlist +from conda.gateways.disk.create import TemporaryDirectory from .. import source -from ..conda_interface import TemporaryDirectory, cc_conda_build from ..config import get_or_merge_config from ..license_family import allowed_license_families, guess_license_family from ..metadata import MetaData @@ -454,7 +455,7 @@ def add_parser(repos): cran.add_argument( "-m", "--variant-config-files", - default=cc_conda_build.get("skeleton_config_yaml", None), + default=context.conda_build.get("skeleton_config_yaml", None), help="""Variant config file to add. These yaml files can contain keys such as `cran_mirror`. Only one can be provided here.""", ) diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index 8dc6719f63..7df95a9ad5 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -12,6 +12,7 @@ import subprocess import sys from collections import OrderedDict, defaultdict +from io import StringIO from os import chdir, getcwd, listdir, makedirs from os.path import abspath, exists, isdir, isfile, join from shutil import copy2 @@ -22,17 +23,13 @@ import requests import yaml from conda.base.context import context +from conda.cli.common import spec_from_line +from conda.gateways.connection.download import download from conda.gateways.disk.read import compute_sum +from conda.models.version import normalized_version +from conda.utils import human_bytes from requests.packages.urllib3.util.url import parse_url -from ..conda_interface import ( - StringIO, - download, - human_bytes, - input, - normalized_version, - spec_from_line, -) from ..config import Config from ..environ import create_env from ..license_family import allowed_license_families, guess_license_family diff --git a/conda_build/source.py b/conda_build/source.py index 984fb239e8..c7b3d1921b 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -16,9 +16,11 @@ from urllib.parse import urljoin from conda.exceptions import CondaHTTPError +from conda.gateways.connection.download import download +from conda.gateways.disk.create import TemporaryDirectory from conda.gateways.disk.read import compute_sum +from conda.utils import url_path -from .conda_interface import TemporaryDirectory, download, url_path from .exceptions import MissingDependency from .os_utils import external from .utils import ( diff --git a/conda_build/utils.py b/conda_build/utils.py index 05b0d827ff..92de8b24a1 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -23,6 +23,7 @@ from collections import OrderedDict, defaultdict from functools import lru_cache from glob import glob +from io import StringIO from itertools import filterfalse from json.decoder import JSONDecodeError from locale import getpreferredencoding @@ -54,22 +55,18 @@ KNOWN_SUBDIRS, ) from conda.base.context import context +from conda.common.path import win_path_to_unix from conda.exceptions import CondaHTTPError +from conda.gateways.connection.download import download +from conda.gateways.disk.create import TemporaryDirectory from conda.gateways.disk.read import compute_sum from conda.models.channel import Channel from conda.models.match_spec import MatchSpec +from conda.models.records import PackageRecord +from conda.models.version import VersionOrder +from conda.utils import unix_path_to_win -from .conda_interface import ( - PackageRecord, - StringIO, - TemporaryDirectory, - VersionOrder, - cc_conda_build, - download, - unix_path_to_win, - win_path_to_unix, -) -from .conda_interface import rm_rf as _rm_rf +from .deprecations import deprecated from .exceptions import BuildLockError if TYPE_CHECKING: @@ -1407,6 +1404,7 @@ def get_installed_packages(path): return installed +@deprecated("24.5", "24.7", addendum="Use `frozendict.deepfreeze` instead.") def _convert_lists_to_sets(_dict): for k, v in _dict.items(): if hasattr(v, "keys"): @@ -1419,6 +1417,7 @@ def _convert_lists_to_sets(_dict): return _dict +@deprecated("24.5", "24.7", addendum="Use `frozendict.deepfreeze` instead.") class HashableDict(dict): """use hashable frozen dictionaries for resources and resource types so that they can be in sets""" @@ -1430,6 +1429,7 @@ def __hash__(self): return hash(json.dumps(self, sort_keys=True)) +@deprecated("24.5", "24.7", addendum="Use `frozendict.deepfreeze` instead.") def represent_hashabledict(dumper, data): value = [] @@ -1617,8 +1617,13 @@ def filter_info_files(files_list, prefix): ) -def rm_rf(path, config=None): - return _rm_rf(path) +@deprecated.argument("24.5", "24.7", "config") +def rm_rf(path): + from conda.core.prefix_data import delete_prefix_from_linked_data + from conda.gateways.disk.delete import rm_rf as rm_rf + + rm_rf(path) + delete_prefix_from_linked_data(path) # https://stackoverflow.com/a/31459386/1170370 @@ -1676,10 +1681,8 @@ def reset_deduplicator(): def get_logger(name, level=logging.INFO, dedupe=True, add_stdout_stderr_handlers=True): config_file = None - if cc_conda_build.get("log_config_file"): - config_file = abspath( - expanduser(expandvars(cc_conda_build.get("log_config_file"))) - ) + if log_config_file := context.conda_build.get("log_config_file"): + config_file = abspath(expanduser(expandvars(log_config_file))) # by loading config file here, and then only adding handlers later, people # should be able to override conda-build's logger settings here. if config_file: @@ -1780,22 +1783,24 @@ def merge_dicts_of_lists( return {k: dol1.get(k, no) + dol2.get(k, no) for k in keys} -def prefix_files(prefix): +def prefix_files(prefix: str | os.PathLike | Path) -> set[str]: """ Returns a set of all files in prefix. """ - res = set() - prefix_rep = prefix + os.path.sep - for root, dirs, files in walk(prefix): - for fn in files: - # this is relpath, just hacked to be faster - res.add(join(root, fn).replace(prefix_rep, "", 1)) - for dn in dirs: - path = join(root, dn) - if islink(path): - res.add(path.replace(prefix_rep, "", 1)) - res.update(expand_globs((path,), prefix)) - return res + prefix = f"{os.path.abspath(prefix)}{os.path.sep}" + prefix_files: set[str] = set() + for root, directories, files in walk(prefix): + # this is effectively os.path.relpath, just hacked to be faster + relroot = root[len(prefix) :].lstrip(os.path.sep) + # add all files + prefix_files.update(join(relroot, file) for file in files) + # add all symlink directories (they are "files") + prefix_files.update( + join(relroot, directory) + for directory in directories + if islink(join(root, directory)) + ) + return prefix_files def mmap_mmap( diff --git a/conda_build/variants.py b/conda_build/variants.py index c5bbe9a41e..1e2b1adc0c 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -3,6 +3,8 @@ """This file handles the parsing of feature specifications from files, ending up with a configuration matrix""" +from __future__ import annotations + import os.path import re import sys @@ -10,14 +12,19 @@ from copy import copy from functools import lru_cache from itertools import product +from pathlib import Path +from typing import TYPE_CHECKING import yaml from conda.base.context import context -from .conda_interface import cc_conda_build +from .deprecations import deprecated from .utils import ensure_list, get_logger, islist, on_win, trim_empty_keys from .version import _parse as parse_version +if TYPE_CHECKING: + from typing import Any, Iterable + DEFAULT_VARIANTS = { "python": f"{sys.version_info.major}.{sys.version_info.minor}", "numpy": { @@ -224,8 +231,8 @@ def find_config_files(metadata_or_path, config): if not files and not config.ignore_system_variants: # user config - if cc_conda_build.get("config_file"): - cfg = resolve(cc_conda_build["config_file"]) + if config_file := context.conda_build.get("config_file"): + cfg = resolve(config_file) else: cfg = resolve(os.path.join("~", "conda_build_config.yaml")) if os.path.isfile(cfg): @@ -694,21 +701,22 @@ def get_package_variants(recipedir_or_metadata, config=None, variants=None): return filter_combined_spec_to_used_keys(combined_spec, specs=specs) -def get_vars(variants, loop_only=False): +@deprecated.argument("24.5", "24.7", "loop_only") +def get_vars(variants: Iterable[dict[str, Any]]) -> set[str]: """For purposes of naming/identifying, provide a way of identifying which variables contribute to the matrix dimensionality""" - special_keys = {"pin_run_as_build", "zip_keys", "ignore_version"} - special_keys.update(set(ensure_list(variants[0].get("extend_keys")))) - loop_vars = [ - k - for k in variants[0] - if k not in special_keys - and ( - not loop_only - or any(variant[k] != variants[0][k] for variant in variants[1:]) - ) - ] - return loop_vars + first, *others = variants + special_keys = { + "pin_run_as_build", + "zip_keys", + "ignore_version", + *ensure_list(first.get("extend_keys")), + } + return { + var + for var in set(first) - special_keys + if any(first[var] != other[var] for other in others) + } @lru_cache(maxsize=None) @@ -758,23 +766,39 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False): return used_variables -def find_used_variables_in_shell_script(variant, file_path): - with open(file_path) as f: - text = f.read() - used_variables = set() - for v in variant: - variant_regex = r"(^[^$]*?\$\{?\s*%s\s*[\s|\}])" % v - if re.search(variant_regex, text, flags=re.MULTILINE | re.DOTALL): - used_variables.add(v) - return used_variables +def find_used_variables_in_shell_script( + variants: Iterable[str], + file_path: str | os.PathLike | Path, +) -> set[str]: + text = Path(file_path).read_text() + return { + variant + for variant in variants + if ( + variant in text # str in str is faster than re.search + and re.search( + rf"(^[^$]*?\$\{{?\s*{re.escape(variant)}\s*[\s|\}}])", + text, + flags=re.MULTILINE | re.DOTALL, + ) + ) + } -def find_used_variables_in_batch_script(variant, file_path): - with open(file_path) as f: - text = f.read() - used_variables = set() - for v in variant: - variant_regex = r"\%" + v + r"\%" - if re.search(variant_regex, text, flags=re.MULTILINE | re.DOTALL): - used_variables.add(v) - return used_variables +def find_used_variables_in_batch_script( + variants: Iterable[str], + file_path: str | os.PathLike | Path, +) -> set[str]: + text = Path(file_path).read_text() + return { + variant + for variant in variants + if ( + variant in text # str in str is faster than re.search + and re.search( + rf"\%{re.escape(variant)}\%", + text, + flags=re.MULTILINE | re.DOTALL, + ) + ) + } diff --git a/docs/requirements.txt b/docs/requirements.txt index 37666a374b..993e9ea9e4 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,11 +1,12 @@ +Pillow==10.0.1 +PyYAML==6.0.1 +Sphinx==7.2.6 conda-sphinx-theme==0.2.1 linkify-it-py==2.0.2 myst-parser==2.0.0 -Pillow==10.0.1 -PyYAML==6.0.1 +pylint==2.17.5 requests==2.31.0 ruamel.yaml==0.17.32 -Sphinx==7.2.6 sphinx-argparse==0.4.0 sphinx-autobuild==2021.3.14 sphinx-sitemap==2.5.1 @@ -18,4 +19,3 @@ sphinxcontrib-plantuml==0.26 sphinxcontrib-programoutput==0.17 sphinxcontrib-qthelp==1.0.6 sphinxcontrib-serializinghtml==1.1.9 -pylint==2.17.5 diff --git a/news/5237-select_lines-caching b/news/5237-select_lines-caching new file mode 100644 index 0000000000..434a832350 --- /dev/null +++ b/news/5237-select_lines-caching @@ -0,0 +1,19 @@ +### Enhancements + +* Add `conda_build.metadata._split_line_selector` to cache line-selector parsed text. (#5237) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/news/5276-deprecating-conda_interface b/news/5276-deprecating-conda_interface new file mode 100644 index 0000000000..701b9a53f1 --- /dev/null +++ b/news/5276-deprecating-conda_interface @@ -0,0 +1,56 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Deprecate `conda_build.conda_interface._toposort`. Use `conda.common.toposort._toposort` instead. (#5276) +* Deprecate `conda_build.conda_interface.add_parser_channels`. Use `conda.cli.helpers.add_parser_channels` instead. (#5276) +* Deprecate `conda_build.conda_interface.add_parser_prefix`. Use `conda.cli.helpers.add_parser_prefix` instead. (#5276) +* Deprecate `conda_build.conda_interface.ArgumentParser`. Use `conda.cli.conda_argparse.ArgumentParser` instead. (#5276) +* Deprecate `conda_build.conda_interface.cc_conda_build`. Use `conda.base.context.context.conda_build` instead. (#5276) +* Deprecate `conda_build.conda_interface.Channel`. Use `conda.models.channel.Channel` instead. (#5276) +* Deprecate `conda_build.conda_interface.Completer`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.CondaSession`. Use `conda.gateways.connection.session.CondaSession` instead. (#5276) +* Deprecate `conda_build.conda_interface.download`. Use `conda.gateways.connection.download.download` instead. (#5276) +* Deprecate `conda_build.conda_interface.EntityEncoder`. Use `conda.auxlib.entity.EntityEncoder` instead. (#5276) +* Deprecate `conda_build.conda_interface.env_path_backup_var_exists`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.FileMode`. Use `conda.models.enums.FileMode` instead. (#5276) +* Deprecate `conda_build.conda_interface.human_bytes`. Use `conda.utils.human_bytes` instead. (#5276) +* Deprecate `conda_build.conda_interface.input`. Use `input` instead. (#5276) +* Deprecate `conda_build.conda_interface.InstalledPackages`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.lchmod`. Use `conda.gateways.disk.link.lchmod` instead. (#5276) +* Deprecate `conda_build.conda_interface.MatchSpec`. Use `conda.models.match_spec.MatchSpec` instead. (#5276) +* Deprecate `conda_build.conda_interface.NoPackagesFound`. Use `conda.exceptions.ResolvePackageNotFound` instead. (#5276) +* Deprecate `conda_build.conda_interface.normalized_version`. Use `conda.models.version.normalized_version` instead. (#5276) +* Deprecate `conda_build.conda_interface.PackageRecord`. Use `conda.models.records.PackageRecord` instead. (#5276) +* Deprecate `conda_build.conda_interface.PathType`. Use `conda.models.enums.PathType` instead. (#5276) +* Deprecate `conda_build.conda_interface.prefix_placeholder`. Use `conda.base.constants.PREFIX_PLACEHOLDER` instead. (#5276) +* Deprecate `conda_build.conda_interface.Resolve`. Use `conda.resolve.Resolve` instead. (#5276) +* Deprecate `conda_build.conda_interface.rm_rf`. Use `conda_build.utils.rm_rf` instead. (#5276) +* Deprecate `conda_build.conda_interface.spec_from_line`. Use `conda.cli.common.spec_from_line` instead. (#5276) +* Deprecate `conda_build.conda_interface.specs_from_args`. Use `conda.cli.common.specs_from_args` instead. (#5276) +* Deprecate `conda_build.conda_interface.specs_from_url`. Use `conda.cli.common.specs_from_url` instead. (#5276) +* Deprecate `conda_build.conda_interface.StringIO`. Use `io.StringIO` instead. (#5276) +* Deprecate `conda_build.conda_interface.symlink_conda`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.TempDirectory`. Use `conda.gateways.disk.create.TemporaryDirectory` instead. (#5276) +* Deprecate `conda_build.conda_interface.TmpDownload`. Use `conda.gateways.connection.download.TmpDownload` instead. (#5276) +* Deprecate `conda_build.conda_interface.unix_path_to_win`. Use `conda.utils.unix_path_to_win` instead. (#5276) +* Deprecate `conda_build.conda_interface.Unsatisfiable`. Use `conda.exceptions.UnsatisfiableError` instead. (#5276) +* Deprecate `conda_build.conda_interface.untracked`. Use `conda.misc.untracked` instead. (#5276) +* Deprecate `conda_build.conda_interface.url_path`. Use `conda.utils.url_path` instead. (#5276) +* Deprecate `conda_build.conda_interface.VersionOrder`. Use `conda.models.version.VersionOrder` instead. (#5276) +* Deprecate `conda_build.conda_interface.walk_prefix`. Use `conda.misc.walk_prefix` instead. (#5276) +* Deprecate `conda_build.conda_interface.win_path_to_unix`. Use `conda.common.path.win_path_to_unix` instead. (#5276) + +### Docs + +* + +### Other + +* diff --git a/news/5280-deprecate-get_vars-loop_only b/news/5280-deprecate-get_vars-loop_only new file mode 100644 index 0000000000..e18d5cfe8c --- /dev/null +++ b/news/5280-deprecate-get_vars-loop_only @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Deprecate `conda_build.variants.get_vars(loop_only)`. Unused. (#5280) + +### Docs + +* + +### Other + +* diff --git a/news/5284-deprecate-HashableDict b/news/5284-deprecate-HashableDict new file mode 100644 index 0000000000..c411443395 --- /dev/null +++ b/news/5284-deprecate-HashableDict @@ -0,0 +1,21 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Deprecate `conda_build.utils.HashableDict`. Use `frozendict.deepfreeze` instead. (#5284) +* Deprecate `conda_build.utils._convert_lists_to_sets`. Use `frozendict.deepfreeze` instead. (#5284) +* Deprecate `conda_build.utils.represent_hashabledict`. Use `frozendict.deepfreeze` instead. (#5284) + +### Docs + +* + +### Other + +* diff --git a/pyproject.toml b/pyproject.toml index 02047ffadc..334c119996 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,16 +1,12 @@ [build-system] +build-backend = "hatchling.build" requires = [ "hatchling >=1.12.2", "hatch-vcs >=0.2.0", ] -build-backend = "hatchling.build" [project] -name = "conda-build" -description="tools for building conda packages" -readme = "README.md" authors = [{name = "Anaconda, Inc.", email = "conda@continuum.io"}] -license = {file = "LICENSE"} classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", @@ -24,9 +20,8 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy" + "Programming Language :: Python :: Implementation :: PyPy", ] -requires-python = ">=3.8" dependencies = [ "beautifulsoup4", "chardet", @@ -34,6 +29,7 @@ dependencies = [ "conda-index >=0.4.0", "conda-package-handling >=1.3", "filelock", + "frozendict >=2.4.2", "jinja2", "jsonschema >=4.19", "libarchive-c", @@ -47,34 +43,46 @@ dependencies = [ "tomli ; python_version<'3.11'", "tqdm", ] +description = "tools for building conda packages" dynamic = ["version"] +license = {file = "LICENSE"} +name = "conda-build" +readme = "README.md" +requires-python = ">=3.8" -[project.urls] -documentation = "https://docs.conda.io/projects/conda-build/en/stable/" -repository = "https://github.com/conda/conda-build" -changelog = "https://github.com/conda/conda-build/blob/main/CHANGELOG.md" +[project.entry-points.conda] +conda-build = "conda_build.plugin" + +[project.entry-points."distutils.commands"] +bdist_conda = "conda_build.bdist_conda:bdist_conda" [project.scripts] conda-build = "conda_build.cli.main_build:execute" conda-convert = "conda_build.cli.main_convert:execute" +conda-debug = "conda_build.cli.main_debug:execute" conda-develop = "conda_build.cli.main_develop:execute" conda-inspect = "conda_build.cli.main_inspect:execute" conda-metapackage = "conda_build.cli.main_metapackage:execute" conda-render = "conda_build.cli.main_render:execute" conda-skeleton = "conda_build.cli.main_skeleton:execute" -conda-debug = "conda_build.cli.main_debug:execute" - -[project.entry-points."distutils.commands"] -bdist_conda = "conda_build.bdist_conda:bdist_conda" -[project.entry-points.conda] -conda-build = "conda_build.plugin" +[project.urls] +changelog = "https://github.com/conda/conda-build/blob/main/CHANGELOG.md" +documentation = "https://docs.conda.io/projects/conda-build/en/stable/" +repository = "https://github.com/conda/conda-build" -[tool.hatch.version] -source = "vcs" +[tool.coverage.report] +exclude_lines = [ + "if TYPE_CHECKING:", # ignoring type checking imports +] +omit = ["conda_build/skeletons/_example_skeleton.py"] +show_missing = true +skip_covered = true +sort = "Miss" -[tool.hatch.version.raw-options] -local_scheme = "dirty-tag" +[tool.coverage.run] +# store relative paths in coverage information +relative_files = true [tool.hatch.build] include = ["conda_build", "conda_build/templates/*", "conda_build/cli-*.exe"] @@ -82,44 +90,13 @@ include = ["conda_build", "conda_build/templates/*", "conda_build/cli-*.exe"] [tool.hatch.build.hooks.vcs] version-file = "conda_build/_version.py" -[tool.coverage.run] -# store relative paths in coverage information -relative_files = true - -[tool.coverage.report] -show_missing = true -sort = "Miss" -skip_covered = true -omit = ["conda_build/skeletons/_example_skeleton.py"] -exclude_lines = [ - "if TYPE_CHECKING:", # ignoring type checking imports -] - -[tool.ruff] -target-version = "py38" +[tool.hatch.version] +source = "vcs" -[tool.ruff.lint] -# E, W = pycodestyle errors and warnings -# F = pyflakes -# I = isort -# UP = pyupgrade -# ISC = flake8-implicit-str-concat -# TCH = flake8-type-checking -# T10 = flake8-debugger -# FA = flake8-future-annotations -# see also https://docs.astral.sh/ruff/rules/ -select = ["E", "W", "F", "I", "UP", "ISC", "TCH", "T10", "FA"] -# E402 module level import not at top of file -# E722 do not use bare 'except' -# E731 do not assign a lambda expression, use a def -ignore = ["E402", "E722", "E731"] -pycodestyle = {max-line-length = 120} -flake8-type-checking = {exempt-modules = [], strict = true} +[tool.hatch.version.raw-options] +local_scheme = "dirty-tag" [tool.pytest.ini_options] -minversion = 3.0 -testpaths = ["tests"] -norecursedirs = ["tests/test-recipes/*"] addopts = [ "--color=yes", # "--cov=conda_build", # passed in test runner scripts instead (avoid debugger) @@ -143,13 +120,6 @@ doctest_optionflags = [ "ALLOW_UNICODE", "ELLIPSIS", ] -markers = [ - "serial: execute test serially (to avoid race conditions)", - "slow: execute the slow tests if active", - "sanity: execute the sanity tests", - "no_default_testing_config: used internally to disable monkeypatching for testing_config", - "benchmark: execute the benchmark tests", -] filterwarnings = [ # elevate conda's deprecated warning to an error "error::PendingDeprecationWarning:conda", @@ -160,3 +130,37 @@ filterwarnings = [ # ignore numpy.distutils error 'ignore:\s+`numpy.distutils` is deprecated:DeprecationWarning:conda_build._load_setup_py_data', ] +markers = [ + "serial: execute test serially (to avoid race conditions)", + "slow: execute the slow tests if active", + "sanity: execute the sanity tests", + "no_default_testing_config: used internally to disable monkeypatching for testing_config", + "benchmark: execute the benchmark tests", +] +minversion = 3.0 +norecursedirs = ["tests/test-recipes/*"] +testpaths = ["tests"] + +[tool.ruff] +target-version = "py38" + +[tool.ruff.lint] +flake8-type-checking = {exempt-modules = [], strict = true} +ignore = [ + "E402", # module level import not at top of file + "E722", # do not use bare 'except' + "E731", # do not assign a lambda expression, use a def +] +pycodestyle = {max-line-length = 120} +# see https://docs.astral.sh/ruff/rules/ +select = [ + "E", # pycodestyle errors + "F", # pyflakes + "FA", # flake8-future-annotations + "I", # isort + "ISC", # flake8-implicit-str-concat + "T10", # flake8-debugger + "TCH", # flake8-type-checking + "UP", # pyupgrade + "W", # pycodestyle warnings +] diff --git a/recipe/conda_build_config.yaml b/recipe/conda_build_config.yaml index 42847d7ead..3959a519bd 100644 --- a/recipe/conda_build_config.yaml +++ b/recipe/conda_build_config.yaml @@ -1,6 +1,6 @@ python: - - "3.8" - - "3.9" - - "3.10" - - "3.11" - - "3.12" + - '3.8' + - '3.9' + - '3.10' + - '3.11' + - '3.12' diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 0614caa12c..d1b6440118 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -34,6 +34,7 @@ requirements: - conda-index >=0.4.0 - conda-package-handling >=1.3 - filelock + - frozendict >=2.4.2 - jinja2 - jsonschema >=4.19 - m2-patch >=2.6 # [win] diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py index 0e98968d2f..9f4ce1cbb0 100644 --- a/tests/cli/test_main_build.py +++ b/tests/cli/test_main_build.py @@ -12,7 +12,6 @@ from conda_build import api from conda_build.cli import main_build, main_render -from conda_build.conda_interface import TemporaryDirectory from conda_build.config import ( Config, zstd_compression_level_default, @@ -265,20 +264,19 @@ def test_purge(testing_workdir, testing_metadata): @pytest.mark.serial -def test_purge_all(testing_workdir, testing_metadata): +def test_purge_all( + testing_workdir: str, testing_metadata: MetaData, tmp_path: Path +) -> None: """ purge-all clears out build folders as well as build packages in the osx-64 folders and such """ api.output_yaml(testing_metadata, "meta.yaml") - with TemporaryDirectory() as tmpdir: - testing_metadata.config.croot = tmpdir - outputs = api.build( - testing_workdir, config=testing_metadata.config, notest=True - ) - args = ["purge-all", "--croot", tmpdir] - main_build.execute(args) - assert not get_build_folders(testing_metadata.config.croot) - assert not any(os.path.isfile(fn) for fn in outputs) + testing_metadata.config.croot = str(tmp_path) + outputs = api.build(testing_workdir, config=testing_metadata.config, notest=True) + args = ["purge-all", f"--croot={tmp_path}"] + main_build.execute(args) + assert not get_build_folders(testing_metadata.config.croot) + assert not any(os.path.isfile(fn) for fn in outputs) @pytest.mark.serial diff --git a/tests/cli/test_main_convert.py b/tests/cli/test_main_convert.py index 0be658b9d3..9ff65849d9 100644 --- a/tests/cli/test_main_convert.py +++ b/tests/cli/test_main_convert.py @@ -3,9 +3,9 @@ import os import pytest +from conda.gateways.connection.download import download from conda_build.cli import main_convert -from conda_build.conda_interface import download from conda_build.tarcheck import TarCheck from conda_build.utils import on_win diff --git a/tests/cli/test_main_develop.py b/tests/cli/test_main_develop.py index ede3758cfb..c0c3cdca3d 100644 --- a/tests/cli/test_main_develop.py +++ b/tests/cli/test_main_develop.py @@ -3,8 +3,9 @@ import os import sys +from conda.gateways.connection.download import download + from conda_build.cli import main_develop -from conda_build.conda_interface import download from conda_build.utils import get_site_packages, tar_xf diff --git a/tests/cli/test_main_render.py b/tests/cli/test_main_render.py index 46ba3d75aa..ef5fdf077d 100644 --- a/tests/cli/test_main_render.py +++ b/tests/cli/test_main_render.py @@ -1,6 +1,5 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from __future__ import annotations import os @@ -13,7 +12,6 @@ from conda_build import api from conda_build.cli import main_render -from conda_build.conda_interface import TemporaryDirectory from ..utils import metadata_dir @@ -21,35 +19,34 @@ from pathlib import Path -def test_render_add_channel(): +def test_render_add_channel(tmp_path: Path) -> None: """This recipe requires the conda_build_test_requirement package, which is only on the conda_build_test channel. This verifies that the -c argument works for rendering.""" - with TemporaryDirectory() as tmpdir: - rendered_filename = os.path.join(tmpdir, "out.yaml") - args = [ - "-c", - "conda_build_test", - os.path.join(metadata_dir, "_recipe_requiring_external_channel"), - "--file", - rendered_filename, - ] - main_render.execute(args) - with open(rendered_filename) as rendered_file: - rendered_meta = yaml.safe_load(rendered_file) - required_package_string = [ - pkg - for pkg in rendered_meta["requirements"]["build"] - if "conda_build_test_requirement" in pkg - ][0] - required_package_details = required_package_string.split(" ") - assert len(required_package_details) > 1, ( - "Expected version number on successful " - f"rendering, but got only {required_package_details}" - ) - assert ( - required_package_details[1] == "1.0" - ), f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}" + rendered_filename = os.path.join(tmp_path, "out.yaml") + args = [ + "-c", + "conda_build_test", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + "--file", + rendered_filename, + ] + main_render.execute(args) + with open(rendered_filename) as rendered_file: + rendered_meta = yaml.safe_load(rendered_file) + required_package_string = [ + pkg + for pkg in rendered_meta["requirements"]["build"] + if "conda_build_test_requirement" in pkg + ][0] + required_package_details = required_package_string.split(" ") + assert len(required_package_details) > 1, ( + "Expected version number on successful " + f"rendering, but got only {required_package_details}" + ) + assert ( + required_package_details[1] == "1.0" + ), f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}" def test_render_with_empty_channel_fails(tmp_path: Path, empty_channel: Path) -> None: diff --git a/tests/requirements.txt b/tests/requirements.txt index 0219e4a0fc..acb3317206 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -5,6 +5,7 @@ conda-index >=0.4.0 conda-libmamba-solver # ensure we use libmamba conda-package-handling >=1.3 filelock +frozendict >=2.4.2 jinja2 jsonschema >=4.19 menuinst >=2 diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 5932bf4f1a..8871fcedf7 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -30,10 +30,10 @@ from conda.base.context import context, reset_context from conda.common.compat import on_linux, on_mac, on_win from conda.exceptions import ClobberError, CondaError, CondaMultiError, LinkError +from conda.utils import url_path from conda_index.api import update_index from conda_build import __version__, api, exceptions -from conda_build.conda_interface import url_path from conda_build.config import Config from conda_build.exceptions import ( CondaBuildException, diff --git a/tests/test_api_convert.py b/tests/test_api_convert.py index 7da9ede2d3..c0e46b7bf3 100644 --- a/tests/test_api_convert.py +++ b/tests/test_api_convert.py @@ -7,9 +7,9 @@ import tarfile import pytest +from conda.gateways.connection.download import download from conda_build import api -from conda_build.conda_interface import download from conda_build.utils import on_win, package_has_file from .utils import assert_package_consistency, metadata_dir diff --git a/tests/test_api_render.py b/tests/test_api_render.py index 7849daa01c..60a381ebf1 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -15,7 +15,6 @@ from conda.common.compat import on_win from conda_build import api, render -from conda_build.conda_interface import cc_conda_build from conda_build.variants import validate_spec from .utils import metadata_dir, variants_dir @@ -213,7 +212,7 @@ def test_noarch_with_no_platform_deps(testing_workdir, testing_config): assert len(build_ids) == 1 -def test_setting_condarc_vars_with_env_var_expansion(testing_workdir): +def test_setting_condarc_vars_with_env_var_expansion(testing_workdir, mocker): os.makedirs("config") # python won't be used - the stuff in the recipe folder will override it python_versions = ["2.6", "3.4", "3.11"] @@ -221,27 +220,25 @@ def test_setting_condarc_vars_with_env_var_expansion(testing_workdir): with open(os.path.join("config", "conda_build_config.yaml"), "w") as f: yaml.dump(config, f, default_flow_style=False) - cc_conda_build_backup = cc_conda_build.copy() - # hacky equivalent of changing condarc - # careful, this is global and affects other tests! make sure to clear it! - cc_conda_build.update( - {"config_file": "${TEST_WORKDIR}/config/conda_build_config.yaml"} + mocker.patch( + "conda.base.context.Context.conda_build", + new_callable=mocker.PropertyMock, + return_value={ + "config_file": "${TEST_WORKDIR}/config/conda_build_config.yaml", + **context.conda_build, + }, ) os.environ["TEST_WORKDIR"] = testing_workdir - try: - m = api.render( - os.path.join(variants_dir, "19_used_variables"), - bypass_env_check=True, - finalize=False, - )[0][0] - # this one should have gotten clobbered by the values in the recipe - assert m.config.variant["python"] not in python_versions - # this confirms that we loaded the config file correctly - assert len(m.config.squished_variants["bzip2"]) == 2 - finally: - cc_conda_build.clear() - cc_conda_build.update(cc_conda_build_backup) + m = api.render( + os.path.join(variants_dir, "19_used_variables"), + bypass_env_check=True, + finalize=False, + )[0][0] + # this one should have gotten clobbered by the values in the recipe + assert m.config.variant["python"] not in python_versions + # this confirms that we loaded the config file correctly + assert len(m.config.squished_variants["bzip2"]) == 2 def test_self_reference_run_exports_pin_subpackage_picks_up_version_correctly(): diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py index a4ff2d1ea7..35383913fb 100644 --- a/tests/test_deprecations.py +++ b/tests/test_deprecations.py @@ -1,268 +1,200 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import sys from argparse import ArgumentParser, _StoreTrueAction +from contextlib import nullcontext +from typing import TYPE_CHECKING import pytest from conda_build.deprecations import DeprecatedError, DeprecationHandler - -@pytest.fixture(scope="module") -def deprecated_v1() -> DeprecationHandler: - """Fixture mocking the conda_build.deprecations.deprecated object with `version=1.0`.""" - return DeprecationHandler("1.0") - - -@pytest.fixture(scope="module") -def deprecated_v2() -> DeprecationHandler: - """Fixture mocking the conda_build.deprecations.deprecated object with `version=2.0`.""" - return DeprecationHandler("2.0") - - -@pytest.fixture(scope="module") -def deprecated_v3() -> DeprecationHandler: - """Fixture mocking the conda_build.deprecations.deprecated object with `version=3.0`.""" - return DeprecationHandler("3.0") - - -def test_function_pending(deprecated_v1: DeprecationHandler): - """Calling a pending deprecation function displays associated warning.""" - - @deprecated_v1("2.0", "3.0") - def foo(): - return True - - with pytest.deprecated_call(match="pending deprecation"): - assert foo() - - -def test_function_deprecated(deprecated_v2: DeprecationHandler): - """Calling a deprecated function displays associated warning.""" - - @deprecated_v2("2.0", "3.0") - def foo(): - return True - - with pytest.deprecated_call(match="deprecated"): - assert foo() - - -def test_function_remove(deprecated_v3: DeprecationHandler): - """A function existing past its removal version raises an error.""" - with pytest.raises(DeprecatedError): - - @deprecated_v3("2.0", "3.0") +if TYPE_CHECKING: + from packaging.version import Version + + from conda_build.deprecations import DevDeprecationType, UserDeprecationType + +PENDING = pytest.param( + DeprecationHandler("1.0"), # deprecated + PendingDeprecationWarning, # warning + "pending deprecation", # message + id="pending", +) +FUTURE = pytest.param( + DeprecationHandler("2.0"), # deprecated + FutureWarning, # warning + "deprecated", # message + id="future", +) +DEPRECATED = pytest.param( + DeprecationHandler("2.0"), # deprecated + DeprecationWarning, # warning + "deprecated", # message + id="deprecated", +) +REMOVE = pytest.param( + DeprecationHandler("3.0"), # deprecated + None, # warning + None, # message + id="remove", +) + +parametrize_user = pytest.mark.parametrize( + "deprecated,warning,message", + [PENDING, FUTURE, REMOVE], +) +parametrize_dev = pytest.mark.parametrize( + "deprecated,warning,message", + [PENDING, DEPRECATED, REMOVE], +) + + +@parametrize_dev +def test_function( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Calling a deprecated function displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): + + @deprecated("2.0", "3.0") def foo(): return True - -def test_method_pending(deprecated_v1: DeprecationHandler): - """Calling a pending deprecation method displays associated warning.""" - - class Bar: - @deprecated_v1("2.0", "3.0") - def foo(self): - return True - - with pytest.deprecated_call(match="pending deprecation"): - assert Bar().foo() + with pytest.warns(warning, match=message): + assert foo() -def test_method_deprecated(deprecated_v2: DeprecationHandler): - """Calling a deprecated method displays associated warning.""" - - class Bar: - @deprecated_v2("2.0", "3.0") - def foo(self): - return True - - with pytest.deprecated_call(match="deprecated"): - assert Bar().foo() - - -def test_method_remove(deprecated_v3: DeprecationHandler): - """A method existing past its removal version raises an error.""" - with pytest.raises(DeprecatedError): +@parametrize_dev +def test_method( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Calling a deprecated method displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): class Bar: - @deprecated_v3("2.0", "3.0") + @deprecated("2.0", "3.0") def foo(self): return True + with pytest.warns(warning, match=message): + assert Bar().foo() -def test_class_pending(deprecated_v1: DeprecationHandler): - """Calling a pending deprecation class displays associated warning.""" - - @deprecated_v1("2.0", "3.0") - class Foo: - pass - with pytest.deprecated_call(match="pending deprecation"): - assert Foo() +@parametrize_dev +def test_class( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Calling a deprecated class displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): - -def test_class_deprecated(deprecated_v2: DeprecationHandler): - """Calling a deprecated class displays associated warning.""" - - @deprecated_v2("2.0", "3.0") - class Foo: - pass - - with pytest.deprecated_call(match="deprecated"): - assert Foo() - - -def test_class_remove(deprecated_v3: DeprecationHandler): - """A class existing past its removal version raises an error.""" - with pytest.raises(DeprecatedError): - - @deprecated_v3("2.0", "3.0") + @deprecated("2.0", "3.0") class Foo: pass + with pytest.warns(warning, match=message): + assert Foo() -def test_arguments_pending(deprecated_v1: DeprecationHandler): - """Calling a pending deprecation argument displays associated warning.""" - - @deprecated_v1.argument("2.0", "3.0", "three") - def foo(one, two): - return True - - # too many arguments, can only deprecate keyword arguments - with pytest.raises(TypeError): - assert foo(1, 2, 3) - - # alerting user to pending deprecation - with pytest.deprecated_call(match="pending deprecation"): - assert foo(1, 2, three=3) - # normal usage not needing deprecation - assert foo(1, 2) +@parametrize_dev +def test_arguments( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Calling a deprecated argument displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): - -def test_arguments_deprecated(deprecated_v2: DeprecationHandler): - """Calling a deprecated argument displays associated warning.""" - - @deprecated_v2.argument("2.0", "3.0", "three") - def foo(one, two): - return True - - # too many arguments, can only deprecate keyword arguments - with pytest.raises(TypeError): - assert foo(1, 2, 3) - - # alerting user to pending deprecation - with pytest.deprecated_call(match="deprecated"): - assert foo(1, 2, three=3) - - # normal usage not needing deprecation - assert foo(1, 2) - - -def test_arguments_remove(deprecated_v3: DeprecationHandler): - """An argument existing past its removal version raises an error.""" - with pytest.raises(DeprecatedError): - - @deprecated_v3.argument("2.0", "3.0", "three") + @deprecated.argument("2.0", "3.0", "three") def foo(one, two): return True - -def test_action_pending(deprecated_v1: DeprecationHandler): - """Calling a pending deprecation argparse.Action displays associated warning.""" - parser = ArgumentParser() - parser.add_argument( - "--foo", action=deprecated_v1.action("2.0", "3.0", _StoreTrueAction) - ) - - with pytest.deprecated_call(match="pending deprecation"): - parser.parse_args(["--foo"]) - - -def test_action_deprecated(deprecated_v2: DeprecationHandler): - """Calling a deprecated argparse.Action displays associated warning.""" - parser = ArgumentParser() - parser.add_argument( - "--foo", action=deprecated_v2.action("2.0", "3.0", _StoreTrueAction) - ) - - with pytest.deprecated_call(match="deprecated"): - parser.parse_args(["--foo"]) - - -def test_action_remove(deprecated_v3: DeprecationHandler): - """An argparse.Action existing past its removal version raises an error.""" - with pytest.raises(DeprecatedError): - ArgumentParser().add_argument( - "--foo", action=deprecated_v3.action("2.0", "3.0", _StoreTrueAction) + # too many arguments, can only deprecate keyword arguments + with pytest.raises(TypeError): + assert foo(1, 2, 3) + + # alerting user to pending deprecation + with pytest.warns(warning, match=message): + assert foo(1, 2, three=3) + + # normal usage not needing deprecation + assert foo(1, 2) + + +@parametrize_user +def test_action( + deprecated: DeprecationHandler, + warning: UserDeprecationType | None, + message: str | None, +) -> None: + """Calling a deprecated argparse.Action displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): + parser = ArgumentParser() + parser.add_argument( + "--foo", + action=deprecated.action("2.0", "3.0", _StoreTrueAction), ) - -def test_module_pending(deprecated_v1: DeprecationHandler): - """Importing a pending deprecation module displays associated warning.""" - with pytest.deprecated_call(match="pending deprecation"): - deprecated_v1.module("2.0", "3.0") - - -def test_module_deprecated(deprecated_v2: DeprecationHandler): - """Importing a deprecated module displays associated warning.""" - with pytest.deprecated_call(match="deprecated"): - deprecated_v2.module("2.0", "3.0") - - -def test_module_remove(deprecated_v3: DeprecationHandler): - """A module existing past its removal version raises an error.""" - with pytest.raises(DeprecatedError): - deprecated_v3.module("2.0", "3.0") - - -def test_constant_pending(deprecated_v1: DeprecationHandler): - """Using a pending deprecation constant displays associated warning.""" - deprecated_v1.constant("2.0", "3.0", "SOME_CONSTANT", 42) - module = sys.modules[__name__] - - with pytest.deprecated_call(match="pending deprecation"): - module.SOME_CONSTANT - - -def test_constant_deprecated(deprecated_v2: DeprecationHandler): - """Using a deprecated constant displays associated warning.""" - deprecated_v2.constant("2.0", "3.0", "SOME_CONSTANT", 42) - module = sys.modules[__name__] - - with pytest.deprecated_call(match="deprecated"): - module.SOME_CONSTANT - - -def test_constant_remove(deprecated_v3: DeprecationHandler): - """A constant existing past its removal version raises an error.""" - with pytest.raises(DeprecatedError): - deprecated_v3.constant("2.0", "3.0", "SOME_CONSTANT", 42) - - -def test_topic_pending(deprecated_v1: DeprecationHandler): - """Reaching a pending deprecation topic displays associated warning.""" - with pytest.deprecated_call(match="pending deprecation"): - deprecated_v1.topic("2.0", "3.0", topic="Some special topic") - - -def test_topic_deprecated(deprecated_v2: DeprecationHandler): - """Reaching a deprecated topic displays associated warning.""" - with pytest.deprecated_call(match="deprecated"): - deprecated_v2.topic("2.0", "3.0", topic="Some special topic") - - -def test_topic_remove(deprecated_v3: DeprecationHandler): - """A topic reached past its removal version raises an error.""" - with pytest.raises(DeprecatedError): - deprecated_v3.topic("2.0", "3.0", topic="Some special topic") - - -def test_version_fallback(): - """Test that conda_build can run even if deprecations can't parse the version.""" - deprecated = DeprecationHandler(None) # type: ignore + with pytest.warns(warning, match=message): + parser.parse_args(["--foo"]) + + +@parametrize_dev +def test_module( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Importing a deprecated module displays associated warning (or error).""" + with ( + pytest.warns(warning, match=message) + if warning + else pytest.raises(DeprecatedError) + ): + deprecated.module("2.0", "3.0") + + +@parametrize_dev +def test_constant( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Using a deprecated constant displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): + deprecated.constant("2.0", "3.0", "SOME_CONSTANT", 42) + module = sys.modules[__name__] + + with pytest.warns(warning, match=message): + module.SOME_CONSTANT + + +@parametrize_dev +def test_topic( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Reaching a deprecated topic displays associated warning (or error).""" + with ( + pytest.warns(warning, match=message) + if warning + else pytest.raises(DeprecatedError) + ): + deprecated.topic("2.0", "3.0", topic="Some special topic") + + +def test_version_fallback() -> None: + """Test that conda can run even if deprecations can't parse the version.""" + deprecated = DeprecationHandler(None) # type: ignore[arg-type] assert deprecated._version_less_than("0") assert deprecated._version_tuple is None - version = deprecated._version_object # type: ignore + version: Version = deprecated._version_object # type: ignore[assignment] assert version.major == version.minor == version.micro == 0 diff --git a/tests/test_jinja_context.py b/tests/test_jinja_context.py index 18ae32f7ab..f19ea31997 100644 --- a/tests/test_jinja_context.py +++ b/tests/test_jinja_context.py @@ -5,9 +5,9 @@ from typing import TYPE_CHECKING import pytest +from frozendict import deepfreeze from conda_build import jinja_context -from conda_build.utils import HashableDict if TYPE_CHECKING: from pathlib import Path @@ -99,7 +99,7 @@ def test_pin_subpackage_exact(testing_metadata): testing_metadata.meta["outputs"] = [output_dict] fm = testing_metadata.get_output_metadata(output_dict) testing_metadata.other_outputs = { - (name, HashableDict(testing_metadata.config.variant)): (output_dict, fm) + (name, deepfreeze(testing_metadata.config.variant)): (output_dict, fm) } pin = jinja_context.pin_subpackage(testing_metadata, name, exact=True) assert len(pin.split()) == 3 @@ -111,7 +111,7 @@ def test_pin_subpackage_expression(testing_metadata): testing_metadata.meta["outputs"] = [output_dict] fm = testing_metadata.get_output_metadata(output_dict) testing_metadata.other_outputs = { - (name, HashableDict(testing_metadata.config.variant)): (output_dict, fm) + (name, deepfreeze(testing_metadata.config.variant)): (output_dict, fm) } pin = jinja_context.pin_subpackage(testing_metadata, name) assert len(pin.split()) == 2 diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 0f6da9b089..1b9fc34258 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -57,14 +57,14 @@ def test_uses_vcs_in_metadata(testing_workdir, testing_metadata): def test_select_lines(): lines = "\n".join( ( - "", + "", # preserve leading newline "test", "test [abc] no", "test [abc] # no", " ' test ' ", ' " test " ', - "", - "# comment line", + "", # preserve newline + "# comment line", # preserve comment line (but not the comment) "test [abc]", " 'quoted # [abc] '", ' "quoted # [abc] yes "', @@ -74,19 +74,20 @@ def test_select_lines(): "test {{ JINJA_VAR[:2] }} # stuff yes [abc]", "test {{ JINJA_VAR[:2] }} # [abc] stuff yes", '{{ environ["test"] }} # [abc]', - "", # trailing newline + "", # preserve trailing newline ) ) assert select_lines(lines, {"abc": True}, variants_in_place=True) == "\n".join( ( - "", + "", # preserve leading newline "test", "test [abc] no", "test [abc] # no", " ' test '", ' " test "', - "", + "", # preserve newline + "", # preserve comment line (but not the comment) "test", " 'quoted'", ' "quoted"', @@ -96,20 +97,21 @@ def test_select_lines(): "test {{ JINJA_VAR[:2] }}", "test {{ JINJA_VAR[:2] }}", '{{ environ["test"] }}', - "", # trailing newline + "", # preserve trailing newline ) ) assert select_lines(lines, {"abc": False}, variants_in_place=True) == "\n".join( ( - "", + "", # preserve leading newline "test", "test [abc] no", "test [abc] # no", " ' test '", ' " test "', - "", + "", # preserve newline + "", # preserve comment line (but not the comment) "test {{ JINJA_VAR[:2] }}", - "", # trailing newline + "", # preserve trailing newline ) ) diff --git a/tests/test_misc.py b/tests/test_misc.py index bcdafcb196..4a5bb0d95c 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -4,9 +4,10 @@ from pathlib import Path import pytest +from conda.auxlib.entity import EntityEncoder +from conda.models.enums import PathType from conda_build._link import pyc_f -from conda_build.conda_interface import EntityEncoder, PathType @pytest.mark.parametrize( diff --git a/tests/test_source.py b/tests/test_source.py index 711407d153..1cae2f9997 100644 --- a/tests/test_source.py +++ b/tests/test_source.py @@ -5,10 +5,10 @@ import tarfile import pytest +from conda.gateways.disk.create import TemporaryDirectory from conda.gateways.disk.read import compute_sum from conda_build import source -from conda_build.conda_interface import TemporaryDirectory from conda_build.source import download_to_cache from conda_build.utils import reset_deduplicator diff --git a/tests/test_utils.py b/tests/test_utils.py index d245e65796..70a2981203 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -204,8 +204,11 @@ def test_logger_config_from_file(testing_workdir, capfd, mocker): handlers: [console] """ ) - cc_conda_build = mocker.patch.object(utils, "cc_conda_build") - cc_conda_build.get.return_value = test_file + mocker.patch( + "conda.base.context.Context.conda_build", + new_callable=mocker.PropertyMock, + return_value={"log_config_file": test_file}, + ) log = utils.get_logger(__name__) # default log level is INFO, but our config file should set level to DEBUG log.warn("test message") @@ -433,3 +436,25 @@ def test_is_conda_pkg(tmpdir, value: str, expected: bool, is_dir: bool, create: fp.write("test") assert utils.is_conda_pkg(value) == expected + + +def test_prefix_files(tmp_path: Path): + # all files within the prefix are found + (prefix := tmp_path / "prefix1").mkdir() + (file1 := prefix / "file1").touch() + (dirA := prefix / "dirA").mkdir() + (file2 := dirA / "file2").touch() + (dirB := prefix / "dirB").mkdir() + (file3 := dirB / "file3").touch() + + # files outside of the prefix are not found + (prefix2 := tmp_path / "prefix2").mkdir() + (prefix2 / "file4").touch() + (dirC := prefix2 / "dirC").mkdir() + (dirC / "file5").touch() + + # even if they are symlinked + (link1 := prefix / "dirC").symlink_to(dirC) + + paths = {str(path.relative_to(prefix)) for path in (file1, file2, file3, link1)} + assert paths == utils.prefix_files(str(prefix)) diff --git a/tests/test_variants.py b/tests/test_variants.py index 50e9cea4f2..e853f172fd 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -17,7 +17,10 @@ combine_specs, dict_of_lists_to_list_of_dicts, filter_combined_spec_to_used_keys, + find_used_variables_in_batch_script, + find_used_variables_in_shell_script, get_package_variants, + get_vars, validate_spec, ) @@ -700,3 +703,39 @@ def test_zip_key_filtering( } assert filter_combined_spec_to_used_keys(combined_spec, specs=specs) == expected + + +def test_get_vars(): + variants = [ + { + "python": "3.12", + "nodejs": "20", + "zip_keys": [], # ignored + }, + {"python": "3.12", "nodejs": "18"}, + {"python": "3.12", "nodejs": "20"}, + ] + + assert get_vars(variants) == {"nodejs"} + + +def test_find_used_variables_in_shell_script(tmp_path: Path) -> None: + variants = ("FOO", "BAR", "BAZ", "QUX") + (script := tmp_path / "script.sh").write_text( + f"${variants[0]}\n" + f"${{{variants[1]}}}\n" + f"${{{{{variants[2]}}}}}\n" + f"$${variants[3]}\n" + ) + assert find_used_variables_in_shell_script(variants, script) == {"FOO", "BAR"} + + +def test_find_used_variables_in_batch_script(tmp_path: Path) -> None: + variants = ("FOO", "BAR", "BAZ", "QUX") + (script := tmp_path / "script.sh").write_text( + f"%{variants[0]}%\n" + f"%%{variants[1]}%%\n" + f"${variants[2]}\n" + f"${{{variants[3]}}}\n" + ) + assert find_used_variables_in_batch_script(variants, script) == {"FOO", "BAR"} diff --git a/tests/utils.py b/tests/utils.py index 125cda7c91..b4ed64912b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -8,10 +8,9 @@ from pathlib import Path from typing import TYPE_CHECKING -from conda.base.context import context, reset_context +from conda.base.context import reset_context from conda.common.compat import on_mac -from conda_build.conda_interface import cc_conda_build from conda_build.metadata import MetaData if TYPE_CHECKING: @@ -153,7 +152,3 @@ def get_noarch_python_meta(meta): def reset_config(search_path=None): reset_context(search_path) - cc_conda_build.clear() - cc_conda_build.update( - context.conda_build if hasattr(context, "conda_build") else {} - )