diff --git a/.authors.yml b/.authors.yml index 89d9a7ae00..db03794b80 100644 --- a/.authors.yml +++ b/.authors.yml @@ -612,7 +612,7 @@ first_commit: 2015-08-30 06:44:37 - name: Marcel Bargull email: marcel.bargull@udo.edu - num_commits: 85 + num_commits: 88 first_commit: 2016-09-26 11:45:54 github: mbargull alternate_emails: @@ -1202,7 +1202,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 178 + num_commits: 204 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1240,7 +1240,7 @@ github: pre-commit-ci[bot] aliases: - pre-commit-ci[bot] - num_commits: 64 + num_commits: 71 first_commit: 2021-11-20 01:47:17 - name: Jacob Walls email: jacobtylerwalls@gmail.com @@ -1251,7 +1251,7 @@ github: beeankha alternate_emails: - beeankha@gmail.com - num_commits: 23 + num_commits: 27 first_commit: 2022-01-19 16:40:06 - name: Conda Bot email: 18747875+conda-bot@users.noreply.github.com @@ -1262,7 +1262,7 @@ alternate_emails: - ad-team+condabot@anaconda.com - 18747875+conda-bot@users.noreply.github.com - num_commits: 96 + num_commits: 53 first_commit: 2022-01-17 18:09:22 - name: Uwe L. Korn email: xhochy@users.noreply.github.com @@ -1310,7 +1310,7 @@ - name: dependabot[bot] email: 49699333+dependabot[bot]@users.noreply.github.com github: dependabot[bot] - num_commits: 4 + num_commits: 19 first_commit: 2022-05-31 04:34:40 - name: Serhii Kupriienko email: 79282962+skupr-anaconda@users.noreply.github.com @@ -1327,7 +1327,7 @@ - name: Jaime Rodríguez-Guerra email: jaimergp@users.noreply.github.com github: jaimergp - num_commits: 10 + num_commits: 13 first_commit: 2022-11-02 19:34:51 - name: Dave Clements email: tnabtaf@gmail.com @@ -1439,3 +1439,18 @@ num_commits: 1 first_commit: 2024-02-06 11:43:45 github: finnagin +- name: Justin Wood (Callek) + email: callek@gmail.com + num_commits: 1 + first_commit: 2024-04-29 16:21:41 + github: Callek +- name: Tobias Fischer + email: info@tobiasfischer.info + num_commits: 1 + first_commit: 2024-03-25 13:13:16 + github: Tobias-Fischer +- name: Yannik Tausch + email: dev@ytausch.de + num_commits: 5 + first_commit: 2024-03-19 18:09:38 + github: ytausch diff --git a/.devcontainer/post_create.sh b/.devcontainer/post_create.sh index 73ea60380c..766bcb9f29 100644 --- a/.devcontainer/post_create.sh +++ b/.devcontainer/post_create.sh @@ -24,4 +24,4 @@ echo "Installing dev dependencies" --file "$SRC_CONDA_BUILD/tests/requirements.txt" \ --file "$SRC_CONDA_BUILD/tests/requirements-Linux.txt" \ --file "$SRC_CONDA_BUILD/tests/requirements-ci.txt" \ - "conda>=23.5.0" + "conda>=23.7.0" diff --git a/.github/ISSUE_TEMPLATE/0_bug.yml b/.github/ISSUE_TEMPLATE/0_bug.yml index cfccd360ed..a53f6fba87 100644 --- a/.github/ISSUE_TEMPLATE/0_bug.yml +++ b/.github/ISSUE_TEMPLATE/0_bug.yml @@ -1,4 +1,3 @@ ---- name: Bug Report description: Create a bug report. labels: diff --git a/.github/ISSUE_TEMPLATE/1_feature.yml b/.github/ISSUE_TEMPLATE/1_feature.yml index a1e739821d..bc022b4122 100644 --- a/.github/ISSUE_TEMPLATE/1_feature.yml +++ b/.github/ISSUE_TEMPLATE/1_feature.yml @@ -1,4 +1,3 @@ ---- name: Feature Request description: Create a feature request. labels: diff --git a/.github/ISSUE_TEMPLATE/2_documentation.yml b/.github/ISSUE_TEMPLATE/2_documentation.yml index 68ae890de9..f3ce040892 100644 --- a/.github/ISSUE_TEMPLATE/2_documentation.yml +++ b/.github/ISSUE_TEMPLATE/2_documentation.yml @@ -1,4 +1,3 @@ ---- name: Documentation description: Create a documentation related issue. labels: diff --git a/.github/ISSUE_TEMPLATE/epic.yml b/.github/ISSUE_TEMPLATE/epic.yml index eca723f4b6..9b3637f973 100644 --- a/.github/ISSUE_TEMPLATE/epic.yml +++ b/.github/ISSUE_TEMPLATE/epic.yml @@ -1,4 +1,3 @@ ---- name: Epic description: A collection of related tickets. labels: diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 07210519aa..508818874b 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,15 +1,14 @@ -# To get started with Dependabot version updates, you'll need to specify which -# package ecosystems to update and where the package manifests are located. -# Please see the documentation for all configuration options: -# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates - version: 2 updates: - - package-ecosystem: "pip" - directory: "/docs/" + - package-ecosystem: pip + directory: /docs/ schedule: - interval: "weekly" + interval: weekly allow: # Allow only production updates for Sphinx - - dependency-name: "sphinx" - dependency-type: "production" + - dependency-name: sphinx + dependency-type: production + - package-ecosystem: github-actions + directory: /.github/workflows + schedule: + interval: weekly diff --git a/.github/labels.yml b/.github/labels.yml index ba799038bb..bcc616d339 100644 --- a/.github/labels.yml +++ b/.github/labels.yml @@ -1,39 +1,9 @@ # Builds -- name: build::review - description: trigger a build for this PR - color: "7B4052" + - name: build::review + description: trigger a build for this PR + color: '#7b4052' # Tags -- name: tag::noarch - description: related to noarch builds - color: "86C579" - aliases: [] - -# Deprecated -- name: 3_In_Progress - description: "[deprecated] use milestones/project boards" - color: "888888" -- name: 4_Needs_Review - description: "[deprecated] use milestones/project boards" - color: "888888" -- name: effort-high - description: "[deprecated]" - color: "888888" -- name: effort-low - description: "[deprecated] use good-first-issue" - color: "888888" -- name: effort-medium - description: "[deprecated]" - color: "888888" -- name: in_progress - description: "[deprecated] use milestones/project boards" - color: "888888" -- name: knowledge-high - description: "[deprecated]" - color: "888888" -- name: knowledge-low - description: "[deprecated] use good-first-issue" - color: "888888" -- name: knowledge-medium - description: "[deprecated]" - color: "888888" + - name: tag::noarch + description: related to noarch builds + color: '#86c579' diff --git a/.github/workflows/builds-review.yaml b/.github/workflows/builds-review.yaml index c10129b56f..488b188025 100644 --- a/.github/workflows/builds-review.yaml +++ b/.github/workflows/builds-review.yaml @@ -17,14 +17,16 @@ jobs: include: - runner: ubuntu-latest subdir: linux-64 - - runner: macos-latest + - runner: macos-14 + subdir: osx-arm64 + - runner: macos-13 subdir: osx-64 - runner: windows-latest subdir: win-64 runs-on: ${{ matrix.runner }} steps: - name: Remove build label - uses: actions/github-script@v6 + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea with: github-token: ${{ secrets.CANARY_ACTION_TOKEN }} script: | @@ -46,14 +48,14 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - - uses: actions/checkout@v3 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: ref: ${{ github.ref }} clean: true fetch-depth: 0 - name: Create and upload review build - uses: conda/actions/canary-release@v23.7.0 + uses: conda/actions/canary-release@976289d0cfd85139701b26ddd133abdd025a7b5f # v24.5.0 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index 0f793ef038..66df3b0f38 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -1,4 +1,3 @@ ---- name: CLA on: @@ -19,7 +18,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check CLA - uses: conda/actions/check-cla@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 + uses: conda/actions/check-cla@976289d0cfd85139701b26ddd133abdd025a7b5f # v24.5.0 with: # [required] # A token with ability to comment, label, and modify the commit status diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 18ea421b87..ef62e267f0 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -7,28 +7,28 @@ on: branches: - main paths: - - '.github/workflows/docs.yml' - - 'docs/**' + - .github/workflows/docs.yml + - docs/** # NOTE: github.event context is pull_request payload: # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request pull_request: paths: - - '.github/workflows/docs.yml' - - 'docs/**' + - .github/workflows/docs.yml + - docs/** jobs: docs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: fetch-depth: 0 - name: Setup - run : | + run: | make env-docs - name: Build the docs - run : | + run: | cd docs conda run --name conda-build-docs make html diff --git a/.github/workflows/issues.yml b/.github/workflows/issues.yml index 8391b0ee68..634bf13e4f 100644 --- a/.github/workflows/issues.yml +++ b/.github/workflows/issues.yml @@ -1,4 +1,3 @@ ---- name: Automate Issues on: @@ -24,12 +23,12 @@ jobs: runs-on: ubuntu-latest steps: # remove [pending::feedback] - - uses: actions-ecosystem/action-remove-labels@2ce5d41b4b6aa8503e285553f75ed56e0a40bae0 + - uses: actions-ecosystem/action-remove-labels@2ce5d41b4b6aa8503e285553f75ed56e0a40bae0 # v1.3.0 with: labels: ${{ env.FEEDBACK_LBL }} github_token: ${{ secrets.PROJECT_TOKEN }} # add [pending::support], if still open - - uses: actions-ecosystem/action-add-labels@18f1af5e3544586314bbe15c0273249c770b2daf + - uses: actions-ecosystem/action-add-labels@18f1af5e3544586314bbe15c0273249c770b2daf # v1.1.3 if: github.event.issue.state == 'open' with: labels: ${{ env.SUPPORT_LBL }} diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index 45cb754d33..0189478992 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -1,4 +1,3 @@ ---- name: Sync Labels on: @@ -20,20 +19,20 @@ jobs: GLOBAL: https://raw.githubusercontent.com/conda/infra/main/.github/global.yml LOCAL: .github/labels.yml steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - id: has_local - uses: andstor/file-existence-action@076e0072799f4942c8bc574a82233e1e4d13e9d6 + uses: andstor/file-existence-action@076e0072799f4942c8bc574a82233e1e4d13e9d6 # v3.0.0 with: files: ${{ env.LOCAL }} - name: Global Only - uses: EndBug/label-sync@52074158190acb45f3077f9099fea818aa43f97a + uses: EndBug/label-sync@52074158190acb45f3077f9099fea818aa43f97a # v2.3.3 if: steps.has_local.outputs.files_exists == 'false' with: config-file: ${{ env.GLOBAL }} delete-other-labels: true dry-run: ${{ github.event.inputs.dryrun }} - name: Global & Local - uses: EndBug/label-sync@52074158190acb45f3077f9099fea818aa43f97a + uses: EndBug/label-sync@52074158190acb45f3077f9099fea818aa43f97a # v2.3.3 if: steps.has_local.outputs.files_exists == 'true' with: config-file: | diff --git a/.github/workflows/lock.yml b/.github/workflows/lock.yml index c4294bea31..0b63dec318 100644 --- a/.github/workflows/lock.yml +++ b/.github/workflows/lock.yml @@ -1,4 +1,3 @@ ---- name: Lock on: @@ -18,7 +17,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 + - uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 # v5.0.1 with: # Number of days of inactivity before a closed issue is locked issue-inactive-days: 365 diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index 35a4fcfec0..297ac2263a 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -1,4 +1,3 @@ ---- name: Add to Project on: @@ -14,7 +13,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/add-to-project@2e5cc851ca7162e9eb510e6da6a5c64022e606a7 + - uses: actions/add-to-project@9bfe908f2eaa7ba10340b31e314148fcfe6a2458 # v1.0.1 with: # issues are added to the Planning project # PRs are added to the Review project diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index cfc9528718..bde3340fbc 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -1,4 +1,3 @@ ---- name: Stale on: @@ -34,12 +33,12 @@ jobs: days-before-issue-stale: 90 days-before-issue-close: 21 steps: - - uses: conda/actions/read-yaml@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 + - uses: conda/actions/read-yaml@976289d0cfd85139701b26ddd133abdd025a7b5f # v24.5.0 id: read_yaml with: path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml - - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e + - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9.0.0 id: stale with: # Only issues with these labels are checked whether they are stale diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ee71e1a826..37fb169862 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -18,7 +18,7 @@ on: # no payload schedule: # https://crontab.guru/#37_18_*_*_* - - cron: 37 18 * * * + - cron: 37 18 * * * concurrency: # Concurrency group that uses the workflow name and PR number if available @@ -45,13 +45,13 @@ jobs: code: ${{ steps.filter.outputs.code }} steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 # dorny/paths-filter needs git clone for non-PR events # https://github.com/dorny/paths-filter#supported-workflows if: github.event_name != 'pull_request' - name: Filter Changes - uses: dorny/paths-filter@v3 + uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 id: filter with: filters: | @@ -83,10 +83,10 @@ jobs: include: # minimum Python/conda combo - python-version: '3.8' - conda-version: 23.5.0 + conda-version: 23.7.0 test-type: serial - python-version: '3.8' - conda-version: 23.5.0 + conda-version: 23.7.0 test-type: parallel # maximum Python/conda combo - python-version: '3.12' @@ -102,7 +102,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: fetch-depth: 0 @@ -110,19 +110,20 @@ jobs: run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV - name: Cache Conda - uses: actions/cache@v4 + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 with: path: ~/conda_pkgs_dir key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3 + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca with: condarc-file: .github/condarc run-post: false # skip post cleanup - name: Conda Install - run: conda install + run: > + conda install --yes --file tests/requirements.txt --file tests/requirements-${{ runner.os }}.txt @@ -142,19 +143,20 @@ jobs: run: conda list --show-channel-urls - name: Run Tests - run: pytest + run: > + pytest --cov=conda_build -n auto -m "${{ env.PYTEST_MARKER }}" - name: Upload Coverage - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673 # v4.5.0 with: flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 with: name: test-results-${{ env.HASH }} path: | @@ -180,7 +182,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: fetch-depth: 0 @@ -188,19 +190,20 @@ jobs: run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-benchmark-$(date -u "+%Y%m")" >> $GITHUB_ENV - name: Cache Conda - uses: actions/cache@v4 + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 with: path: ~/conda_pkgs_dir key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3 + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca with: condarc-file: .github/condarc run-post: false # skip post cleanup - name: Conda Install - run: conda install + run: > + conda install --yes --file tests/requirements.txt --file tests/requirements-${{ runner.os }}.txt @@ -226,7 +229,7 @@ jobs: run: conda list --show-channel-urls - name: Run Benchmarks - uses: CodSpeedHQ/action@v2 + uses: CodSpeedHQ/action@f11c406b8c87cda176ff341ed4925bc98086f6d1 with: token: ${{ secrets.CODSPEED_TOKEN }} run: $CONDA/envs/test/bin/pytest --codspeed @@ -259,7 +262,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: fetch-depth: 0 @@ -268,13 +271,13 @@ jobs: run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV - name: Cache Conda - uses: actions/cache@v4 + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 with: path: ~/conda_pkgs_dir key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3 + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca with: condarc-file: .github\condarc run-post: false # skip post cleanup @@ -283,7 +286,8 @@ jobs: run: choco install visualstudio2017-workload-vctools - name: Conda Install - run: conda install + run: > + conda install --yes --file tests\requirements.txt --file tests\requirements-${{ runner.os }}.txt @@ -305,20 +309,21 @@ jobs: - name: Run Tests # Windows is sensitive to long paths, using `--basetemp=${{ runner.temp }} to # keep the test directories shorter - run: pytest + run: > + pytest --cov=conda_build --basetemp=${{ runner.temp }} -n auto -m "${{ env.PYTEST_MARKER }}" - name: Upload Coverage - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673 # v4.5.0 with: flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 with: name: test-results-${{ env.HASH }} path: | @@ -332,6 +337,10 @@ jobs: needs: changes if: github.event_name == 'schedule' || needs.changes.outputs.code == 'true' + # Old macOS needed for old SDK (see xcode step below) + # This is needed for some MACOSX_DEPLOYMENT_TARGET tests + # We could also install SDKs from a external provider in the future + # if we want to update this runner to a non-deprecated version runs-on: macos-11 defaults: run: @@ -357,7 +366,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: fetch-depth: 0 @@ -365,13 +374,13 @@ jobs: run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV - name: Cache Conda - uses: actions/cache@v4 + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 with: path: ~/conda_pkgs_dir key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3 + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca with: condarc-file: .github/condarc run-post: false # skip post cleanup @@ -380,7 +389,8 @@ jobs: run: sudo xcode-select --switch /Applications/Xcode_11.7.app - name: Conda Install - run: conda install + run: > + conda install --yes --file tests/requirements.txt --file tests/requirements-${{ runner.os }}.txt @@ -400,19 +410,20 @@ jobs: run: conda list --show-channel-urls - name: Run Tests - run: pytest + run: > + pytest --cov=conda_build -n auto -m "${{ env.PYTEST_MARKER }}" - name: Upload Coverage - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673 # v4.5.0 with: flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 with: name: test-results-${{ env.HASH }} path: | @@ -434,17 +445,17 @@ jobs: runs-on: ubuntu-latest steps: - name: Download Artifacts - uses: actions/download-artifact@v4 + uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e - name: Upload Combined Test Results # provides one downloadable archive of all matrix run test results for further analysis - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 with: name: test-results-${{ github.sha }}-all path: test-results-* - name: Test Summary - uses: test-summary/action@v2 + uses: test-summary/action@032c8a9cec6aaa3c20228112cae6ca10a3b29336 with: paths: test-results-*/test-report.xml @@ -456,7 +467,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Determine Success - uses: re-actors/alls-green@v1.2.2 + uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe with: # permit jobs to be skipped if there are no code changes (see changes job) allowed-skips: ${{ toJSON(needs) }} @@ -482,9 +493,9 @@ jobs: include: - runner: ubuntu-latest subdir: linux-64 - - runner: macos-latest + - runner: macos-13 subdir: osx-64 - - runner: macos-14 + - runner: macos-14 # FUTURE: Use -latest subdir: osx-arm64 - runner: windows-latest subdir: win-64 @@ -493,7 +504,7 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: ref: ${{ github.ref }} clean: true @@ -501,7 +512,7 @@ jobs: # Explicitly use Python 3.12 since each of the OSes has a different default Python - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d with: python-version: '3.12' @@ -527,7 +538,7 @@ jobs: Path(environ["GITHUB_ENV"]).write_text(f"ANACONDA_ORG_LABEL={label}") - name: Create & Upload - uses: conda/actions/canary-release@v24.2.0 + uses: conda/actions/canary-release@976289d0cfd85139701b26ddd133abdd025a7b5f # v24.5.0 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} diff --git a/.github/workflows/upload.yml b/.github/workflows/upload.yml new file mode 100644 index 0000000000..475131a0a1 --- /dev/null +++ b/.github/workflows/upload.yml @@ -0,0 +1,55 @@ +name: Upload release + +on: + # https://docs.github.com/en/webhooks/webhook-events-and-payloads#release + release: + types: [published] + +concurrency: + # Concurrency group that uses the workflow name and PR number if available + # or commit SHA as a fallback. If a new build is triggered under that + # concurrency group while a previous build is running it will be canceled. + # Repeated pushes to a PR will cancel all previous builds, while multiple + # merges to main will not cancel. + group: ${{ github.workflow }}-${{ github.ref_name || github.sha }} + cancel-in-progress: true + +permissions: + contents: write + +jobs: + # create source archive and upload it to the published release + # URL to the archive: https://github.com/conda//releases/download//-.tar.gz + upload: + if: '!github.event.repository.fork' + runs-on: ubuntu-latest + env: + ARCHIVE_NAME: ${{ github.event.repository.name }}-${{ github.ref_name }} + steps: + - name: Checkout Source + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + + - name: Create Release Directory + run: mkdir -p release + + - name: Archive Source + run: > + git archive + --prefix="${{ env.ARCHIVE_NAME }}/" + --output="release/${{ env.ARCHIVE_NAME }}.tar.gz" + HEAD + + - name: Compute Checksum + run: > + sha256sum "release/${{ env.ARCHIVE_NAME }}.tar.gz" + | awk '{print $1}' + > "release/${{ env.ARCHIVE_NAME }}.tar.gz.sha256sum" + + - name: Upload Archive + env: + GH_TOKEN: ${{ github.token }} + run: > + gh release upload + --clobber "${{ github.ref_name }}" + --repo "${{ github.repository }}" + release/* diff --git a/.mailmap b/.mailmap index 02df1bf754..4644f183c2 100644 --- a/.mailmap +++ b/.mailmap @@ -141,6 +141,7 @@ Joseph Hunkeler Juan Lasheras jlas Julian Rüth Julien Schueller +Justin Wood (Callek) Jürgen Gmach Jürgen Gmach Jędrzej Nowak Jedrzej Nowak Kai Tietz Kai Tietz <47363620+katietz@users.noreply.github.com> @@ -259,6 +260,7 @@ Thomas A Caswell Thomas A Caswell Thomas Holder Thomas Kluyver Tim Snyder +Tobias Fischer Todd Tomashek tomashek Todd Tomashek todd.m.tomashek Tom Davidson @@ -278,6 +280,7 @@ Wim Glenn wim glenn Wolf Vollprecht Wolfgang Ulmer Yann +Yannik Tausch Yoav Ram Yu Feng Zane Dufour zdog234 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 40c5acfabb..3d9674cf43 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -38,7 +38,11 @@ repos: - id: check-merge-conflict # sort requirements files - id: file-contents-sorter - files: ^tests/requirements.*\.txt + files: | + (?x)^( + docs/requirements.txt | + tests/requirements.*\.txt + ) args: [--unique] # Python verification and formatting - repo: https://github.com/Lucas-C/pre-commit-hooks @@ -54,13 +58,40 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.5 + rev: v0.4.9 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff args: [--fix] # compatible replacement for black - id: ruff-format + - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks + rev: v2.13.0 + hooks: + - id: pretty-format-toml + args: [--autofix, --trailing-commas] + - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt + rev: 0.2.3 + hooks: + - id: yamlfmt + # ruamel.yaml doesn't line wrap correctly (?) so set width to 1M to avoid issues + args: [--mapping=2, --offset=2, --sequence=4, --width=1000000, --implicit_start] + exclude: | + (?x)^( + .authors.yml | + conda_build/templates/npm.yaml | + conda_build/templates/setuptools.yaml | + docs/click/meta.yaml | + docs/source/user-guide/tutorials/meta.yaml | + recipe/meta.yaml | + tests/ + ) + - repo: https://github.com/python-jsonschema/check-jsonschema + rev: 0.28.5 + hooks: + # verify github syntaxes + - id: check-github-workflows + - id: check-dependabot - repo: meta # see https://pre-commit.com/#meta-hooks hooks: diff --git a/.readthedocs.yml b/.readthedocs.yml index abdbda6254..64f8768db5 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,13 +1,13 @@ version: 2 build: - os: "ubuntu-22.04" + os: ubuntu-22.04 tools: - python: "3.11" + python: '3.11' python: install: - - requirements: docs/requirements.txt + - requirements: docs/requirements.txt # Build PDF, ePub and zipped HTML formats: diff --git a/AUTHORS.md b/AUTHORS.md index 969994f016..73bac74a05 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -120,6 +120,7 @@ Authors are sorted alphabetically. * Juan Lasheras * Julian Rüth * Julien Schueller +* Justin Wood (Callek) * Jürgen Gmach * Jędrzej Nowak * Kai Tietz @@ -217,6 +218,7 @@ Authors are sorted alphabetically. * Thomas Holder * Thomas Kluyver * Tim Snyder +* Tobias Fischer * Todd Tomashek * Tom Davidson * Tom Pollard @@ -235,6 +237,7 @@ Authors are sorted alphabetically. * Wolf Vollprecht * Wolfgang Ulmer * Yann +* Yannik Tausch * Yoav Ram * Yu Feng * Zane Dufour diff --git a/CHANGELOG.md b/CHANGELOG.md index 42d745f874..8c2a863ce4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,156 @@ [//]: # (current developments) +## 24.5.1 (2024-05-23) + +### Bug fixes + +* Fix issue with modifying a `frozendict` when specifying `outputs/files` in `meta.yaml`. (#5342 via #5345) +* Fix excessive memory use in `inspect_linkages_lief`. (#5267 via #5348) + +### Deprecations + +* Mark `conda_build.metadata.toposort` as deprecated. Use `conda_build.metadata.toposort_outputs` instead. (#5342 via #5345) +* Mark `conda_build.metadata.check_circular_dependencies` as deprecated. Use `conda_build.metadata._check_circular_dependencies` instead. (#5342 via #5345) + +### Contributors + +* @beeankha +* @kenodegard +* @mbargull + + + +## 24.5.0 (2024-05-06) + +### Enhancements + +* Only fetch `lfs` files for specific `git_ref`. (#5202) +* Add `conda_build.metadata._split_line_selector` to cache line-selector parsed text. (#5237) +* Add `conda_build.render.open_recipe` context manager to detect the recipe type (file/`meta.yaml`, directory/recipe, or tarball/package) and properly handling any exit/close behavior. (#5238) +* For Windows users, the stub executables used for Python entrypoints in packages are now codesigned. (#5252) +* Require `conda >=23.7.0`. (#5271) + +### Bug fixes + +* Fix all CLI arguments to properly initialize `conda.base.context.context` with parsed arguments. Fixes issue with arguments not being processed (e.g., `--override-channels` was previously ignored). (#3693 via #5271) + +### Deprecations + +* Deprecate `conda_build.conda_interface.CONDA_VERSION` constant. Use `conda.__version__` instead. (#5222) +* Deprecate `conda_build.conda_interface.binstar_upload` constant. Use `conda.base.context.context.binstar_upload` instead. (#5222) +* Deprecate `conda_build.conda_interface.default_python` constant. Use `conda.base.context.context.default_python` instead. (#5222) +* Deprecate `conda_build.conda_interface.envs_dirs` constant. Use `conda.base.context.context.envs_dirs` instead. (#5222) +* Deprecate `conda_build.conda_interface.pkgs_dirs` constant. Use `conda.base.context.context.pkgs_dirs` instead. (#5222) +* Deprecate `conda_build.conda_interface.cc_platform` constant. Use `conda.base.context.context.platform` instead. (#5222) +* Deprecate `conda_build.conda_interface.root_dir` constant. Use `conda.base.context.context.root_prefix` instead. (#5222) +* Deprecate `conda_build.conda_interface.root_writable` constant. Use `conda.base.context.context.root_writable` instead. (#5222) +* Deprecate `conda_build.conda_interface.subdir` constant. Use `conda.base.context.context.subdir` instead. (#5222) +* Deprecate `conda_build.conda_interface.create_default_packages` constant. Use `conda.base.context.context.create_default_packages` instead. (#5222) +* Deprecate `conda_build.conda_interface.get_rc_urls` function. Use `conda.base.context.context.channels` instead. (#5222) +* Deprecate `conda_build.conda_interface.get_prefix` function. Use `conda.base.context.context.target_prefix` instead. (#5222) +* Deprecate `conda_build.conda_interface.get_conda_channel` function. Use `conda.models.channel.Channel.from_value` instead. (#5222) +* Deprecate `conda_build.conda_interface.reset_context` function. Use `conda.base.context.reset_context` instead. (#5222) +* Deprecate `conda_build.conda_interface.context` singleton. Use `conda.base.context.context` instead. (#5251) +* Deprecate `conda_build.conda_interface.configparser` module. Use `configparser` instead. (#5251) +* Deprecate `conda_build.conda_interface.os` module. Use `os` instead. (#5251) +* Deprecate `conda_build.conda_interface.partial` function. Use `functools.partial` instead. (#5251) +* Deprecate `conda_build.conda_interface.import_module` function. Use `importlib.import_module` instead. (#5251) +* Deprecate `conda_build.conda_interface.determine_target_prefix` function. Use `conda.base.context.determine_target_prefix` instead. (#5251) +* Deprecate `conda_build.conda_interface.non_x86_linux_machines` constant. Use `conda.base.context.non_x86_machines` instead. (#5251) +* Deprecate `conda_build.conda_interface.ProgressiveFetchExtract` class. Use `conda.core.package_cache.ProgressiveFetchExtract` instead. (#5251) +* Deprecate `conda_build.conda_interface.CondaError` class. Use `conda.exceptions.CondaError` instead. (#5251) +* Deprecate `conda_build.conda_interface.CondaHTTPError` class. Use `conda.exceptions.CondaHTTPError` instead. (#5251) +* Deprecate `conda_build.conda_interface.LinkError` class. Use `conda.exceptions.LinkError` instead. (#5251) +* Deprecate `conda_build.conda_interface.LockError` class. Use `conda.exceptions.LockError` instead. (#5251) +* Deprecate `conda_build.conda_interface.NoPackagesFoundError` class. Use `conda.exceptions.NoPackagesFoundError` instead. (#5251) +* Deprecate `conda_build.conda_interface.PaddingError` class. Use `conda.exceptions.PaddingError` instead. (#5251) +* Deprecate `conda_build.conda_interface.UnsatisfiableError` class. Use `conda.exceptions.UnsatisfiableError` instead. (#5251) +* Deprecate `conda_build.conda_interface.get_conda_build_local_url` class. Use `conda.models.channel.get_conda_build_local_url` instead. (#5251) +* Deprecate `conda_build.config.Config.override_channels`. Defer to `conda.base.context.context.channels` instead. (#5271, #5324) +* Deprecate `conda_build.conda_interface._toposort`. Use `conda.common.toposort._toposort` instead. (#5276) +* Deprecate `conda_build.conda_interface.add_parser_channels`. Use `conda.cli.helpers.add_parser_channels` instead. (#5276) +* Deprecate `conda_build.conda_interface.add_parser_prefix`. Use `conda.cli.helpers.add_parser_prefix` instead. (#5276) +* Deprecate `conda_build.conda_interface.ArgumentParser`. Use `conda.cli.conda_argparse.ArgumentParser` instead. (#5276) +* Deprecate `conda_build.conda_interface.cc_conda_build`. Use `conda.base.context.context.conda_build` instead. (#5276) +* Deprecate `conda_build.conda_interface.Channel`. Use `conda.models.channel.Channel` instead. (#5276) +* Deprecate `conda_build.conda_interface.Completer`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.CondaSession`. Use `conda.gateways.connection.session.CondaSession` instead. (#5276) +* Deprecate `conda_build.conda_interface.download`. Use `conda.gateways.connection.download.download` instead. (#5276) +* Deprecate `conda_build.conda_interface.EntityEncoder`. Use `conda.auxlib.entity.EntityEncoder` instead. (#5276) +* Deprecate `conda_build.conda_interface.env_path_backup_var_exists`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.FileMode`. Use `conda.models.enums.FileMode` instead. (#5276) +* Deprecate `conda_build.conda_interface.human_bytes`. Use `conda.utils.human_bytes` instead. (#5276) +* Deprecate `conda_build.conda_interface.input`. Use `input` instead. (#5276) +* Deprecate `conda_build.conda_interface.InstalledPackages`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.lchmod`. Use `conda.gateways.disk.link.lchmod` instead. (#5276) +* Deprecate `conda_build.conda_interface.MatchSpec`. Use `conda.models.match_spec.MatchSpec` instead. (#5276) +* Deprecate `conda_build.conda_interface.NoPackagesFound`. Use `conda.exceptions.ResolvePackageNotFound` instead. (#5276) +* Deprecate `conda_build.conda_interface.normalized_version`. Use `conda.models.version.normalized_version` instead. (#5276) +* Deprecate `conda_build.conda_interface.PackageRecord`. Use `conda.models.records.PackageRecord` instead. (#5276) +* Deprecate `conda_build.conda_interface.PathType`. Use `conda.models.enums.PathType` instead. (#5276) +* Deprecate `conda_build.conda_interface.prefix_placeholder`. Use `conda.base.constants.PREFIX_PLACEHOLDER` instead. (#5276) +* Deprecate `conda_build.conda_interface.Resolve`. Use `conda.resolve.Resolve` instead. (#5276) +* Deprecate `conda_build.conda_interface.rm_rf`. Use `conda_build.utils.rm_rf` instead. (#5276) +* Deprecate `conda_build.conda_interface.spec_from_line`. Use `conda.cli.common.spec_from_line` instead. (#5276) +* Deprecate `conda_build.conda_interface.specs_from_args`. Use `conda.cli.common.specs_from_args` instead. (#5276) +* Deprecate `conda_build.conda_interface.specs_from_url`. Use `conda.cli.common.specs_from_url` instead. (#5276) +* Deprecate `conda_build.conda_interface.StringIO`. Use `io.StringIO` instead. (#5276) +* Deprecate `conda_build.conda_interface.symlink_conda`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.TempDirectory`. Use `conda.gateways.disk.create.TemporaryDirectory` instead. (#5276) +* Deprecate `conda_build.conda_interface.TmpDownload`. Use `conda.gateways.connection.download.TmpDownload` instead. (#5276) +* Deprecate `conda_build.conda_interface.unix_path_to_win`. Use `conda.utils.unix_path_to_win` instead. (#5276) +* Deprecate `conda_build.conda_interface.Unsatisfiable`. Use `conda.exceptions.UnsatisfiableError` instead. (#5276) +* Deprecate `conda_build.conda_interface.untracked`. Use `conda.misc.untracked` instead. (#5276) +* Deprecate `conda_build.conda_interface.url_path`. Use `conda.utils.url_path` instead. (#5276) +* Deprecate `conda_build.conda_interface.VersionOrder`. Use `conda.models.version.VersionOrder` instead. (#5276) +* Deprecate `conda_build.conda_interface.walk_prefix`. Use `conda.misc.walk_prefix` instead. (#5276) +* Deprecate `conda_build.conda_interface.win_path_to_unix`. Use `conda.common.path.win_path_to_unix` instead. (#5276) +* Deprecate `conda_build.variants.get_vars(loop_only)`. (#5280) +* Deprecate `conda_build.utils.HashableDict`. Use `frozendict.deepfreeze` instead. (#5284) +* Deprecate `conda_build.utils._convert_lists_to_sets`. Use `frozendict.deepfreeze` instead. (#5284) +* Deprecate `conda_build.utils.represent_hashabledict`. Use `frozendict.deepfreeze` instead. (#5284) +* Deprecate `conda_build.config.noarch_python_build_age_default`. (#5298) +* Postpone `conda_build.index.channel_data` deprecation. (#5299) +* Remove `conda_build.api.get_output_file_path`. Use `conda_build.api.get_output_file_paths` instead. (#5299) +* Remove `conda_build.bdist_conda`. (#5299) +* Remove `conda_build.build.have_prefix_files`. (#5299) +* Remove `conda_build.conda_interface.get_index`. Use `conda.core.index.get_index` instead. (#5299) +* Remove `conda_build.conda_interface.get_version_from_git_tag`. Use `conda_build.environ.get_version_from_git_tag` instead. (#5299) +* Remove `conda_build.conda_interface.handle_proxy_407`. Handled by `conda.gateways.connection.session.CondaSession`. (#5299) +* Remove `conda_build.conda_interface.hashsum_file`. Use `conda.gateways.disk.read.compute_sum` instead. (#5299) +* Remove `conda_build.conda_interface.md5_file`. Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead. (#5299) +* Remove `conda_build.environ._load_all_json`. (#5299) +* Remove `conda_build.environ._load_json`. (#5299) +* Remove `conda_build.environ.cached_actions`. (#5299) +* Remove `conda_build.environ.Environment`. Use `conda.core.prefix_data.PrefixData` instead. (#5299) +* Remove `conda_build.environ.InvalidEnvironment`. (#5299) +* Remove `conda_build.environ.LINK_ACTION`. (#5299) +* Remove `conda_build.environ.PREFIX_ACTION`. (#5299) +* Remove `conda_build.index._apply_instructions`. Use `conda_index._apply_instructions` instead. (#5299) +* Remove `conda_build.index.DummyExecutor`. (#5299) +* Remove `conda_build.index.LOCK_TIMEOUT_SECS`. (#5299) +* Remove `conda_build.index.LOCKFILE_NAME`. (#5299) +* Remove `conda_build.index.MAX_THREADS_DEFAULT`. (#5299) + +### Other + +* Enable CodSpeed benchmarks for select tests. (#5233) + +### Contributors + +* @beeankha +* @conda-bot +* @jaimergp +* @Callek made their first contribution in https://github.com/conda/conda-build/pull/5252 +* @kenodegard +* @mbargull +* @Tobias-Fischer made their first contribution in https://github.com/conda/conda-build/pull/5202 +* @ytausch made their first contribution in https://github.com/conda/conda-build/pull/5214 +* @dependabot[bot] +* @pre-commit-ci[bot] + + + ## 24.3.0 (2024-03-15) ### Enhancements diff --git a/RELEASE.md b/RELEASE.md index d45614facc..fed9bd3a81 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -14,18 +14,18 @@ # Release Process -> **Note:** +> [!NOTE] > Throughout this document are references to the version number as `YY.M.[$patch_number]`, this should be replaced with the correct version number. Do **not** prefix the version with a lowercase `v`. ## 1. Open the release issue and cut a release branch. (do this ~1 week prior to release) -> **Note:** +> [!NOTE] > The new release branch should adhere to the naming convention of `YY.M.x` (make sure to put the `.x` at the end!). In the case of patch/hotfix releases, however, do NOT cut a new release branch; instead, use the previously-cut release branch with the appropriate `YY.M.x` version numbers. Use the issue template below to create the release issue. After creating the release issue, pin it for easy access.
-GitHub Issue Template +

Release Template

```markdown ### Summary @@ -45,7 +45,8 @@ Placeholder for `{{ repo.name }} YY.M.x` release. [conda-forge]: https://github.com/conda-forge/{{ repo.name }}-feedstock [ReadTheDocs]: https://readthedocs.com/projects/continuumio-{{ repo.name }}/ -#### The week before release week +
+

The week before release week

- [ ] Create release branch (named `YY.M.x`) - [ ] Ensure release candidates are being successfully built (see `conda-canary/label/rc-{{ repo.name }}-YY.M.x`) @@ -53,10 +54,14 @@ Placeholder for `{{ repo.name }} YY.M.x` release. - [ ] Test release candidates -#### Release week +
+ +
+

Release week

- [ ] Create release PR (see [release process][process]) - [ ] [Publish release][releases] +- [ ] Merge `YY.M.x` back into `main` - [ ] Activate the `YY.M.x` branch on [ReadTheDocs][ReadTheDocs] - [ ] Feedstocks - [ ] Bump version & update dependencies/tests in [Anaconda, Inc.'s feedstock][main] @@ -72,22 +77,56 @@ Placeholder for `{{ repo.name }} YY.M.x` release. - [ ] [Matrix (conda/conda)](https://matrix.to/#/#conda_conda:gitter.im) (this auto posts from Discourse) - Summary - [ ] [Twitter](https://twitter.com/condaproject) + +
```
-> **Note:** +If a patch release is necessary, reopen the original release issue and append the following template to the release issue summary. + +
+

Patch Release Template

+ +```markdown +
+

Patch YY.M.N

+ +- [ ] +- [ ] Create release PR (see [release process][process]) +- [ ] [Publish release][releases] +- [ ] Merge `YY.M.x` back into `main` +- [ ] Feedstocks + - [ ] Bump version & update dependencies/tests in [Anaconda, Inc.'s feedstock][main] + - [ ] Bump version & update dependencies/tests in [conda-forge feedstock][conda-forge] +- [ ] Hand off to the Anaconda packaging team + +
+``` + +
+ +> [!NOTE] > The [epic template][epic template] is perfect for this; remember to remove the **`epic`** label. ## 2. Alert various parties of the upcoming release. (do this ~1 week prior to release) Let various interested parties know about the upcoming release; at minimum, conda-forge maintainers should be informed. For major features, a blog post describing the new features should be prepared and posted once the release is completed (see the announcements section of the release issue). -## 3. Ensure `rever.xsh` and `news/TEMPLATE` are up to date. +## 3. Manually test canary build(s). + +### Canary Builds for Manual Testing + +Once the release PRs are filed, successful canary builds will be available on `https://anaconda.org/conda-canary/conda/files?channel=rc-{{ repo.name }}-YY.M.x` for manual testing. + +> [!NOTE] +> You do not need to apply the `build::review` label for release PRs; every commit to the release branch builds and uploads canary builds to the respective `rc-` label. + +## 4. Ensure `rever.xsh` and `news/TEMPLATE` are up to date. These are synced from [`conda/infrastructure`][infrastructure].
-

4. Run rever. (ideally done on the Monday of release week)

+

5. Run rever. (ideally done on the Monday of release week)

Currently, there are only 2 activities we use rever for, (1) aggregating the authors and (2) updating the changelog. Aggregating the authors can be an error-prone process and also suffers from builtin race conditions (_i.e._, to generate an updated `.authors.yml` we need an updated `.mailmap` but to have an updated `.mailmap` we need an updated `.authors.yml`). This is why the following steps are very heavy-handed (and potentially repetitive) in running rever commands, undoing commits, squashing/reordering commits, etc. @@ -119,9 +158,9 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut (rever) $ git checkout -b changelog-YY.M.[$patch_number] ``` -2. Run `rever --activities authors`: +2. Run `rever --activities authors `: - > **Note:** + > **Note:** > Include `--force` when re-running any rever commands for the same ``, otherwise, rever will skip the activity and no changes will be made (i.e., rever remembers if an activity has been run for a given version). ```bash @@ -166,7 +205,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut (rever) $ git commit -m "Update .authors.yml" ``` - - Rerun `rever --activities authors` and finally check that your `.mailmap` is correct by running: + - Rerun `rever --activities authors --force ` and finally check that your `.mailmap` is correct by running: ```bash git shortlog -se @@ -194,7 +233,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut - Continue repeating the above processes until the `.authors.yml` and `.mailmap` are corrected to your liking. After completing this, you will have at most two commits on your release branch: ```bash - (rever) $ git cherry -v main + (rever) $ git cherry -v + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap ``` @@ -202,7 +241,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 4. Review news snippets (ensure they are all using the correct Markdown format, **not** reStructuredText) and add additional snippets for undocumented PRs/changes as necessary. - > **Note:** + > **Note:** > We've found it useful to name news snippets with the following format: `-`. > > We've also found that we like to include the PR #s inline with the text itself, e.g.: @@ -213,7 +252,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut > * Add `win-arm64` as a known platform (subdir). (#11778) > ``` - - You can utilize [GitHub's compare view][compare] to review what changes are to be included in this release. + - You can utilize [GitHub's compare view][compare] to review what changes are to be included in this release. Make sure you compare the current release branch against the previous one (e.g., `24.5.x` would be compared against `24.3.x`) - Add a new news snippet for any PRs of importance that are missing. @@ -227,7 +266,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut - After completing this, you will have at most three commits on your release branch: ```bash - (rever) $ git cherry -v main + (rever) $ git cherry -v + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news @@ -235,7 +274,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 5. Run `rever --activities changelog`: - > **Note:** + > **Note:** > This has previously been a notoriously fickle step (likely due to incorrect regex patterns in the `rever.xsh` config file and missing `github` keys in `.authors.yml`) so beware of potential hiccups. If this fails, it's highly likely to be an innocent issue. ```bash @@ -254,7 +293,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut - After completing this, you will have at most three commits on your release branch: ```bash - (rever) $ git cherry -v main + (rever) $ git cherry -v + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news @@ -269,7 +308,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut - After completing this, you will have at most five commits on your release branch: ```bash - (rever) $ git cherry -v main + (rever) $ git cherry -v + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news @@ -291,7 +330,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut - After completing this, you will have at most six commits on your release branch: ```bash - (rever) $ git cherry -v main + (rever) $ git cherry -v + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news @@ -325,7 +364,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 11. [Create][new release] the release and **SAVE AS A DRAFT** with the following values: - > **Note:** + > **Note:** > Only publish the release after the release PR is merged, until then always **save as draft**. | Field | Value | @@ -336,22 +375,13 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
-## 5. Wait for review and approval of release PR. - -## 6. Manually test canary build(s). - -### Canary Builds for Manual Testing - -Once the release PRs are filed, successful canary builds will be available on `https://anaconda.org/conda-canary/conda/files?channel=rc-{{ repo.name }}-YY.M.x` for manual testing. - -> **Note:** -> You do not need to apply the `build::review` label for release PRs; every commit to the release branch builds and uploads canary builds to the respective `rc-` label. +## 6. Wait for review and approval of release PR. ## 7. Merge release PR and publish release. To publish the release, go to the project's release page (e.g., https://github.com/conda/conda/releases) and add the release notes from `CHANGELOG.md` to the draft release you created earlier. Then publish the release. -> **Note:** +> [!NOTE] > Release notes can be drafted and saved ahead of time. ## 8. Merge/cherry pick the release branch over to the `main` branch. @@ -367,19 +397,19 @@ To publish the release, go to the project's release page (e.g., https://github.c 4. Ensure that all of the commits being pulled in look accurate, then select "Create pull request". -> **Note:** +> [!NOTE] > Make sure NOT to push the "Update Branch" button. If there are [merge conflicts][merge conflicts], create a temporary "connector branch" dedicated to fixing merge conflicts separately from the `YY.M.x` and `main` branches. 5. Review and merge the pull request the same as any code change pull request. -> **Note:** +> [!NOTE] > The commits from the release branch need to be retained in order to be able to compare individual commits; in other words, a "merge commit" is required when merging the resulting pull request vs. a "squash merge". Protected branches will require permissions to be temporarily relaxed in order to enable this action. ## 9. Open PRs to bump [Anaconda Recipes][Anaconda Recipes] and [conda-forge][conda-forge] feedstocks to use `YY.M.[$patch_number]`. -> **Note:** +> [!NOTE] > Conda-forge's PRs will be auto-created via the `regro-cf-autotick-bot`. Follow the instructions below if any changes need to be made to the recipe that were not automatically added (these instructions are only necessary for anyone who is _not_ a conda-forge feedstock maintainer, since maintainers can push changes directly to the autotick branch): > - Create a new branch based off of autotick's branch (autotick's branches usually use the `regro-cf-autotick-bot:XX.YY.[$patch_number]_[short hash]` syntax) > - Add any changes via commits to that new branch @@ -392,7 +422,7 @@ To publish the release, go to the project's release page (e.g., https://github.c ## 10. Hand off to Anaconda's packaging team. -> **Note:** +> [!NOTE] > This step should NOT be done past Thursday morning EST; please start the process on a Monday, Tuesday, or Wednesday instead in order to avoid any potential debugging sessions over evenings or weekends.
diff --git a/conda_build/_link.py b/conda_build/_link.py index af841c0275..e8984fcd37 100644 --- a/conda_build/_link.py +++ b/conda_build/_link.py @@ -26,7 +26,7 @@ SITE_PACKAGES = "Lib/site-packages" else: BIN_DIR = join(PREFIX, "bin") - SITE_PACKAGES = "lib/python%s/site-packages" % sys.version[:3] + SITE_PACKAGES = f"lib/python{sys.version[:3]}/site-packages" # the list of these files is going to be store in info/_files FILES = [] @@ -110,20 +110,20 @@ def create_script(fn): dst = join(BIN_DIR, fn) if sys.platform == "win32": shutil.copy2(src, dst + "-script.py") - FILES.append("Scripts/%s-script.py" % fn) + FILES.append(f"Scripts/{fn}-script.py") shutil.copy2( join(THIS_DIR, "cli-%d.exe" % (8 * tuple.__itemsize__)), dst + ".exe" ) - FILES.append("Scripts/%s.exe" % fn) + FILES.append(f"Scripts/{fn}.exe") else: with open(src) as fi: data = fi.read() with open(dst, "w") as fo: - shebang = replace_long_shebang("#!%s\n" % normpath(sys.executable)) + shebang = replace_long_shebang(f"#!{normpath(sys.executable)}\n") fo.write(shebang) fo.write(data) os.chmod(dst, 0o775) - FILES.append("bin/%s" % fn) + FILES.append(f"bin/{fn}") def create_scripts(files): @@ -140,9 +140,9 @@ def main(): link_files("site-packages", SITE_PACKAGES, DATA["site-packages"]) link_files("Examples", "Examples", DATA["Examples"]) - with open(join(PREFIX, "conda-meta", "%s.files" % DATA["dist"]), "w") as fo: + with open(join(PREFIX, "conda-meta", "{}.files".format(DATA["dist"])), "w") as fo: for f in FILES: - fo.write("%s\n" % f) + fo.write(f"{f}\n") if __name__ == "__main__": diff --git a/conda_build/_load_setup_py_data.py b/conda_build/_load_setup_py_data.py index 9180c404fc..b2d8d0731b 100644 --- a/conda_build/_load_setup_py_data.py +++ b/conda_build/_load_setup_py_data.py @@ -111,7 +111,7 @@ def setup(**kw): exec(code, ns, ns) else: if not permit_undefined_jinja: - raise TypeError(f"{setup_file} is not a file that can be read") + raise TypeError("%s is not a file that can be read" % setup_file) # noqa: UP031 sys.modules["versioneer"] = versioneer diff --git a/conda_build/api.py b/conda_build/api.py index 2d4e3ef567..eaea8f50b8 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -17,11 +17,12 @@ import sys from os.path import dirname, expanduser, join from pathlib import Path +from typing import TYPE_CHECKING, Iterable # make the Config class available in the api namespace from .config import DEFAULT_PREFIX_LENGTH as _prefix_length from .config import Config, get_channel_urls, get_or_merge_config -from .deprecations import deprecated +from .metadata import MetaData, MetaDataTuple from .utils import ( CONDA_PACKAGE_EXTENSIONS, LoggingContext, @@ -32,26 +33,26 @@ on_win, ) +if TYPE_CHECKING: + from typing import Any, Literal + + StatsDict = dict[str, Any] + def render( - recipe_path, - config=None, - variants=None, - permit_unsatisfiable_variants=True, - finalize=True, - bypass_env_check=False, + recipe_path: str | os.PathLike | Path, + config: Config | None = None, + variants: dict[str, Any] | None = None, + permit_unsatisfiable_variants: bool = True, + finalize: bool = True, + bypass_env_check: bool = False, **kwargs, -): +) -> list[MetaDataTuple]: """Given path to a recipe, return the MetaData object(s) representing that recipe, with jinja2 templates evaluated. - Returns a list of (metadata, needs_download, needs_reparse in env) tuples""" - from collections import OrderedDict - - from conda.exceptions import NoPackagesFoundError - - from .exceptions import DependencyNeedsBuildingError - from .render import finalize_metadata, render_recipe + Returns a list of (metadata, need_download, need_reparse in env) tuples""" + from .render import render_metadata_tuples, render_recipe config = get_or_merge_config(config, **kwargs) @@ -63,53 +64,20 @@ def render( variants=variants, permit_unsatisfiable_variants=permit_unsatisfiable_variants, ) - output_metas = OrderedDict() - for meta, download, render_in_env in metadata_tuples: - if not meta.skip() or not config.trim_skip: - for od, om in meta.get_output_metadata_set( - permit_unsatisfiable_variants=permit_unsatisfiable_variants, - permit_undefined_jinja=not finalize, - bypass_env_check=bypass_env_check, - ): - if not om.skip() or not config.trim_skip: - if "type" not in od or od["type"] == "conda": - if finalize and not om.final: - try: - om = finalize_metadata( - om, - permit_unsatisfiable_variants=permit_unsatisfiable_variants, - ) - except (DependencyNeedsBuildingError, NoPackagesFoundError): - if not permit_unsatisfiable_variants: - raise - - # remove outputs section from output objects for simplicity - if not om.path and (outputs := om.get_section("outputs")): - om.parent_outputs = outputs - del om.meta["outputs"] - - output_metas[ - om.dist(), - om.config.variant.get("target_platform"), - tuple( - (var, om.config.variant[var]) - for var in om.get_used_vars() - ), - ] = (om, download, render_in_env) - else: - output_metas[ - f"{om.type}: {om.name()}", - om.config.variant.get("target_platform"), - tuple( - (var, om.config.variant[var]) - for var in om.get_used_vars() - ), - ] = (om, download, render_in_env) - - return list(output_metas.values()) - - -def output_yaml(metadata, file_path=None, suppress_outputs=False): + return render_metadata_tuples( + metadata_tuples, + config=config, + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + finalize=finalize, + bypass_env_check=bypass_env_check, + ) + + +def output_yaml( + metadata: MetaData, + file_path: str | os.PathLike | Path | None = None, + suppress_outputs: bool = False, +) -> str: """Save a rendered recipe in its final form to the path given by file_path""" from .render import output_yaml @@ -117,12 +85,16 @@ def output_yaml(metadata, file_path=None, suppress_outputs=False): def get_output_file_paths( - recipe_path_or_metadata, - no_download_source=False, - config=None, - variants=None, + recipe_path_or_metadata: str + | os.PathLike + | Path + | MetaData + | Iterable[MetaDataTuple], + no_download_source: bool = False, + config: Config | None = None, + variants: dict[str, Any] | None = None, **kwargs, -): +) -> list[str]: """Get output file paths for any packages that would be created by a recipe Both split packages (recipes with more than one output) and build matrices, @@ -132,22 +104,9 @@ def get_output_file_paths( config = get_or_merge_config(config, **kwargs) - if hasattr(recipe_path_or_metadata, "__iter__") and not isinstance( - recipe_path_or_metadata, str - ): - list_of_metas = [ - hasattr(item[0], "config") - for item in recipe_path_or_metadata - if len(item) == 3 - ] - - if list_of_metas and all(list_of_metas): - metadata = recipe_path_or_metadata - else: - raise ValueError(f"received mixed list of metas: {recipe_path_or_metadata}") - elif isinstance(recipe_path_or_metadata, (str, Path)): + if isinstance(recipe_path_or_metadata, (str, Path)): # first, render the parent recipe (potentially multiple outputs, depending on variants). - metadata = render( + metadata_tuples = render( recipe_path_or_metadata, no_download_source=no_download_source, variants=variants, @@ -155,44 +114,43 @@ def get_output_file_paths( finalize=True, **kwargs, ) + + elif isinstance(recipe_path_or_metadata, MetaData): + metadata_tuples = [MetaDataTuple(recipe_path_or_metadata, False, False)] + + elif isinstance(recipe_path_or_metadata, Iterable) and all( + isinstance(recipe, MetaDataTuple) + and isinstance(recipe.metadata, MetaData) + and isinstance(recipe.need_download, bool) + and isinstance(recipe.need_reparse, bool) + for recipe in recipe_path_or_metadata + ): + metadata_tuples = recipe_path_or_metadata + else: - assert hasattr( - recipe_path_or_metadata, "config" - ), f"Expecting metadata object - got {recipe_path_or_metadata}" - metadata = [(recipe_path_or_metadata, None, None)] - # Next, loop over outputs that each metadata defines + raise ValueError( + f"Unknown input type: {type(recipe_path_or_metadata)}; expecting " + "PathLike object, MetaData object, or a list of tuples containing " + "(MetaData, bool, bool)." + ) + + # Next, loop over outputs that each metadata defines outs = [] - for m, _, _ in metadata: - if m.skip(): - outs.append(get_skip_message(m)) + for metadata, _, _ in metadata_tuples: + if metadata.skip(): + outs.append(get_skip_message(metadata)) else: - outs.append(bldpkg_path(m)) - return sorted(list(set(outs))) + outs.append(bldpkg_path(metadata)) + return sorted(set(outs)) -@deprecated("24.3.0", "24.5.0", addendum="Use `get_output_file_paths` instead.") -def get_output_file_path( - recipe_path_or_metadata, - no_download_source=False, - config=None, - variants=None, +def check( + recipe_path: str | os.PathLike | Path, + no_download_source: bool = False, + config: Config | None = None, + variants: dict[str, Any] | None = None, **kwargs, -): - """Get output file paths for any packages that would be created by a recipe - - Both split packages (recipes with more than one output) and build matrices, - created with variants, contribute to the list of file paths here. - """ - return get_output_file_paths( - recipe_path_or_metadata, - no_download_source=no_download_source, - config=config, - variants=variants, - **kwargs, - ) - - -def check(recipe_path, no_download_source=False, config=None, variants=None, **kwargs): +) -> bool: """Check validity of input recipe path Verifies that recipe can be completely rendered, and that fields of the rendered recipe are @@ -209,16 +167,16 @@ def check(recipe_path, no_download_source=False, config=None, variants=None, **k def build( - recipe_paths_or_metadata, - post=None, - need_source_download=True, - build_only=False, - notest=False, - config=None, - variants=None, - stats=None, + recipe_paths_or_metadata: str | os.PathLike | Path | MetaData, + post: bool | None = None, + need_source_download: bool = True, + build_only: bool = False, + notest: bool = False, + config: Config | None = None, + variants: dict[str, Any] | None = None, + stats: StatsDict | None = None, **kwargs, -): +) -> list[str]: """Run the build step. If recipe paths are provided, renders recipe before building. @@ -230,16 +188,15 @@ def build( "other arguments (config) by keyword." ) - recipes = [] + recipes: list[str | MetaData] = [] for recipe in ensure_list(recipe_paths_or_metadata): - if isinstance(recipe, str): + if isinstance(recipe, (str, os.PathLike, Path)): for recipe in expand_globs(recipe, os.getcwd()): try: - recipe = find_recipe(recipe) + recipes.append(find_recipe(recipe)) except OSError: continue - recipes.append(recipe) - elif hasattr(recipe, "config"): + elif isinstance(recipe, MetaData): recipes.append(recipe) else: raise ValueError(f"Recipe passed was unrecognized object: {recipe}") @@ -263,12 +220,12 @@ def build( def test( - recipedir_or_package_or_metadata, - move_broken=True, - config=None, - stats=None, + recipedir_or_package_or_metadata: str | os.PathLike | Path | MetaData, + move_broken: bool = True, + config: Config | None = None, + stats: StatsDict | None = None, **kwargs, -): +) -> bool: """Run tests on either packages (.tar.bz2 or extracted) or recipe folders For a recipe folder, it renders the recipe enough to know what package to download, and obtains @@ -282,24 +239,22 @@ def test( # if people don't pass in an object to capture stats in, they won't get them returned. # We'll still track them, though. - if not stats: - stats = {} + stats = stats or {} with config: # This will create a new local build folder if and only if config # doesn't already have one. What this means is that if we're # running a test immediately after build, we use the one that the # build already provided - test_result = test( + return test( recipedir_or_package_or_metadata, config=config, move_broken=move_broken, stats=stats, ) - return test_result -def list_skeletons(): +def list_skeletons() -> list[str]: """List available skeletons for generating conda recipes from external sources. The returned list is generally the names of supported repositories (pypi, cran, etc.) @@ -315,8 +270,14 @@ def list_skeletons(): def skeletonize( - packages, repo, output_dir=".", version=None, recursive=False, config=None, **kwargs -): + packages: str | Iterable[str], + repo: Literal["cpan", "cran", "luarocks", "pypi", "rpm"], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, + config: Config | None = None, + **kwargs, +) -> None: """Generate a conda recipe from an external repo. Translates metadata from external sources into expected conda recipe format.""" @@ -355,7 +316,7 @@ def skeletonize( if arg in kwargs: del kwargs[arg] with config: - skeleton_return = module.skeletonize( + module.skeletonize( packages, output_dir=output_dir, version=version, @@ -363,42 +324,42 @@ def skeletonize( config=config, **kwargs, ) - return skeleton_return def develop( - recipe_dir, - prefix=sys.prefix, - no_pth_file=False, - build_ext=False, - clean=False, - uninstall=False, -): + recipe_dir: str | Iterable[str], + prefix: str = sys.prefix, + no_pth_file: bool = False, + build_ext: bool = False, + clean: bool = False, + uninstall: bool = False, +) -> None: """Install a Python package in 'development mode'. This works by creating a conda.pth file in site-packages.""" from .develop import execute recipe_dir = ensure_list(recipe_dir) - return execute(recipe_dir, prefix, no_pth_file, build_ext, clean, uninstall) + execute(recipe_dir, prefix, no_pth_file, build_ext, clean, uninstall) def convert( - package_file, - output_dir=".", - show_imports=False, - platforms=None, - force=False, - dependencies=None, - verbose=False, - quiet=True, - dry_run=False, -): + package_file: str, + output_dir: str = ".", + show_imports: bool = False, + platforms: str | Iterable[str] | None = None, + force: bool = False, + dependencies: str | Iterable[str] | None = None, + verbose: bool = False, + quiet: bool = True, + dry_run: bool = False, +) -> None: """Convert changes a package from one platform to another. It applies only to things that are portable, such as pure python, or header-only C/C++ libraries.""" from .convert import conda_convert platforms = ensure_list(platforms) + dependencies = ensure_list(dependencies) if package_file.endswith("tar.bz2"): return conda_convert( package_file, @@ -416,10 +377,10 @@ def convert( "Conversion from wheel packages is not implemented yet, stay tuned." ) else: - raise RuntimeError("cannot convert: %s" % package_file) + raise RuntimeError(f"cannot convert: {package_file}") -def test_installable(channel="defaults"): +def test_installable(channel: str = "defaults") -> bool: """Check to make sure that packages in channel are installable. This is a consistency check for the channel.""" from .inspect_pkg import test_installable @@ -428,14 +389,14 @@ def test_installable(channel="defaults"): def inspect_linkages( - packages, - prefix=sys.prefix, - untracked=False, - all_packages=False, - show_files=False, - groupby="package", - sysroot="", -): + packages: str | Iterable[str], + prefix: str | os.PathLike | Path = sys.prefix, + untracked: bool = False, + all_packages: bool = False, + show_files: bool = False, + groupby: Literal["package", "dependency"] = "package", + sysroot: str = "", +) -> str: from .inspect_pkg import inspect_linkages packages = ensure_list(packages) @@ -575,7 +536,7 @@ def debug( config.channel_urls = get_channel_urls(kwargs) - metadata_tuples: list[tuple[MetaData, bool, bool]] = [] + metadata_tuples: list[MetaDataTuple] = [] best_link_source_method = "skip" if isinstance(recipe_or_package_path_or_metadata_tuples, str): @@ -583,7 +544,7 @@ def debug( for metadata_conda_debug in metadatas_conda_debug: best_link_source_method = "symlink" metadata = MetaData(metadata_conda_debug, config, {}) - metadata_tuples.append((metadata, False, True)) + metadata_tuples.append(MetaDataTuple(metadata, False, True)) else: ext = os.path.splitext(recipe_or_package_path_or_metadata_tuples)[1] if not ext or not any(ext in _ for _ in CONDA_PACKAGE_EXTENSIONS): diff --git a/conda_build/bdist_conda.py b/conda_build/bdist_conda.py deleted file mode 100644 index 6e965c409d..0000000000 --- a/conda_build/bdist_conda.py +++ /dev/null @@ -1,297 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -import configparser -import sys -import time -from collections import defaultdict - -from setuptools.command.install import install -from setuptools.dist import Distribution -from setuptools.errors import BaseError, OptionError - -from . import api -from .build import handle_anaconda_upload -from .conda_interface import StringIO, spec_from_line -from .config import Config -from .deprecations import deprecated -from .metadata import MetaData -from .skeletons import pypi - -deprecated.module("24.3", "24.5") - - -class GetoptError(BaseError): - """The option table provided to 'fancy_getopt()' is bogus.""" - - -class CondaDistribution(Distribution): - """ - Distribution subclass that supports bdist_conda options - - This class is required if you want to pass any bdist_conda specific - options to setup(). To use, set distclass=CondaDistribution in setup(). - - Options that can be passed to setup() (must include - distclass=CondaDistribution): - - - conda_buildnum: The build number. Defaults to 0. Can be overridden on - the command line with the --buildnum flag. - - - conda_buildstr: The build string. Default is generated automatically - from the Python version, NumPy version if relevant, and the build - number, like py34_0. - - - conda_import_tests: Whether to automatically run import tests. The - default is True, which runs import tests for the all the modules in - "packages". Also allowed are False, which runs no tests, or a list of - module names to be tested on import. - - - conda_command_tests: Command line tests to run. Default is True, which - runs ``command --help`` for each ``command`` in the console_scripts and - gui_scripts entry_points. Also allowed are False, which doesn't run any - command tests, or a list of command tests to run. - - - conda_binary_relocation: Whether binary files should be made relocatable - (using install_name_tool on OS X or patchelf on Linux). The default is - True. See the "making packages relocatable" section in the conda build - documentation for more information on this. - - - conda_preserve_egg_dir: Whether to preserve the egg directory as - installed by setuptools. The default is True if the package depends on - setuptools or has a setuptools entry_points other than console_scripts - and gui_scripts. - - Command line options: - - --buildnum: Set the build number. Defaults to the conda_buildnum passed to - setup(), or 0. Overrides any conda_buildnum passed to setup(). - - """ - - # Unfortunately, there's no way to warn the users that they need to use - # distclass=CondaDistribution when they try to use a conda option to - # setup(). Distribution.__init__ will just print a warning when it sees an - # attr it doesn't recognize, and then it is discarded. - - # attr: default - conda_attrs = { - "conda_buildnum": 0, - "conda_buildstr": None, - "conda_import_tests": True, - "conda_command_tests": True, - "conda_binary_relocation": True, - "conda_preserve_egg_dir": None, - "conda_features": None, - "conda_track_features": None, - } - - def __init__(self, attrs=None): - given_attrs = {} - # We need to remove the attrs so that Distribution.__init__ doesn't - # warn about them. - if attrs: - for attr in self.conda_attrs: - if attr in attrs: - given_attrs[attr] = attrs.pop(attr) - - super().__init__(attrs) - - for attr in self.conda_attrs: - setattr(self.metadata, attr, given_attrs.get(attr, self.conda_attrs[attr])) - - -class bdist_conda(install): - description = "create a conda package" - config = Config( - build_id="bdist_conda" + "_" + str(int(time.time() * 1000)), build_is_host=True - ) - - def initialize_options(self): - super().initialize_options() - self.buildnum = None - self.anaconda_upload = False - - def finalize_options(self): - opt_dict = self.distribution.get_option_dict("install") - if self.prefix: - raise OptionError("--prefix is not allowed") - opt_dict["prefix"] = ("bdist_conda", self.config.host_prefix) - super().finalize_options() - - def run(self): - # Make sure the metadata has the conda attributes, even if the - # distclass isn't CondaDistribution. We primarily do this to simplify - # the code below. - - metadata = self.distribution.metadata - - for attr in CondaDistribution.conda_attrs: - if not hasattr(metadata, attr): - setattr(metadata, attr, CondaDistribution.conda_attrs[attr]) - - # The command line takes precedence - if self.buildnum is not None: - metadata.conda_buildnum = self.buildnum - - d = defaultdict(dict) - # PyPI allows uppercase letters but conda does not, so we fix the - # name here. - d["package"]["name"] = metadata.name.lower() - d["package"]["version"] = metadata.version - d["build"]["number"] = metadata.conda_buildnum - - # MetaData does the auto stuff if the build string is None - d["build"]["string"] = metadata.conda_buildstr - - d["build"]["binary_relocation"] = metadata.conda_binary_relocation - d["build"]["preserve_egg_dir"] = metadata.conda_preserve_egg_dir - d["build"]["features"] = metadata.conda_features - d["build"]["track_features"] = metadata.conda_track_features - - # XXX: I'm not really sure if it is correct to combine requires - # and install_requires - d["requirements"]["run"] = d["requirements"]["build"] = [ - spec_from_line(i) - for i in (metadata.requires or []) - + (getattr(self.distribution, "install_requires", []) or []) - ] + ["python"] - if hasattr(self.distribution, "tests_require"): - # A lot of packages use extras_require['test'], but - # tests_require is the one that is officially supported by - # setuptools. - d["test"]["requires"] = [ - spec_from_line(i) for i in self.distribution.tests_require or [] - ] - - d["about"]["home"] = metadata.url - # Don't worry about classifiers. This isn't skeleton pypi. We - # don't need to make this work with random stuff in the wild. If - # someone writes their setup.py wrong and this doesn't work, it's - # their fault. - d["about"]["license"] = metadata.license - d["about"]["summary"] = metadata.description - - # This is similar logic from conda skeleton pypi - entry_points = getattr(self.distribution, "entry_points", []) - if entry_points: - if isinstance(entry_points, str): - # makes sure it is left-shifted - newstr = "\n".join(x.strip() for x in entry_points.splitlines()) - c = configparser.ConfigParser() - entry_points = {} - try: - c.read_file(StringIO(newstr)) - except Exception as err: - # This seems to be the best error here - raise GetoptError( - "ERROR: entry-points not understood: " - + str(err) - + "\nThe string was" - + newstr - ) - else: - for section in c.sections(): - if section in ["console_scripts", "gui_scripts"]: - value = [ - f"{option}={c.get(section, option)}" - for option in c.options(section) - ] - entry_points[section] = value - else: - # Make sure setuptools is added as a dependency below - entry_points[section] = None - - if not isinstance(entry_points, dict): - raise GetoptError( - "ERROR: Could not add entry points. They were:\n" + entry_points - ) - else: - rs = entry_points.get("scripts", []) - cs = entry_points.get("console_scripts", []) - gs = entry_points.get("gui_scripts", []) - # We have *other* kinds of entry-points so we need - # setuptools at run-time - if not rs and not cs and not gs and len(entry_points) > 1: - d["requirements"]["run"].append("setuptools") - d["requirements"]["build"].append("setuptools") - entry_list = rs + cs + gs - if gs and self.config.platform == "osx": - d["build"]["osx_is_app"] = True - if len(cs + gs) != 0: - d["build"]["entry_points"] = entry_list - if metadata.conda_command_tests is True: - d["test"]["commands"] = list( - map(str, pypi.make_entry_tests(entry_list)) - ) - - if "setuptools" in d["requirements"]["run"]: - d["build"]["preserve_egg_dir"] = True - - if metadata.conda_import_tests: - if metadata.conda_import_tests is True: - d["test"]["imports"] = (self.distribution.packages or []) + ( - self.distribution.py_modules or [] - ) - else: - d["test"]["imports"] = metadata.conda_import_tests - - if metadata.conda_command_tests and not isinstance( - metadata.conda_command_tests, bool - ): - d["test"]["commands"] = list(map(str, metadata.conda_command_tests)) - - d = dict(d) - self.config.keep_old_work = True - m = MetaData.fromdict(d, config=self.config) - # Shouldn't fail, but do you really trust the code above? - m.check_fields() - m.config.set_build_id = False - m.config.variant["python"] = ".".join( - (str(sys.version_info.major), str(sys.version_info.minor)) - ) - api.build(m, build_only=True, notest=True) - self.config = m.config - # prevent changes in the build ID from here, so that we're working in the same prefix - # Do the install - super().run() - output = api.build(m, post=True, notest=True)[0] - api.test(output, config=m.config) - m.config.clean() - if self.anaconda_upload: - - class args: - anaconda_upload = self.anaconda_upload - - handle_anaconda_upload(output, args) - else: - no_upload_message = ( - """\ -# If you want to upload this package to anaconda.org later, type: -# -# $ anaconda upload %s -""" - % output - ) - print(no_upload_message) - - -# Distutils looks for user_options on the class (not instance). It also -# requires that it is an instance of list. So we do this here because we want -# to keep the options from the superclass (and because I don't feel like -# making a metaclass just to make this work). - -bdist_conda.user_options.extend( - [ - ( - "buildnum=", - None, - """The build number of - the conda package. Defaults to 0, or the conda_buildnum specified in the - setup() function. The command line flag overrides the option to - setup().""", - ), - ("anaconda-upload", None, ("""Upload the finished package to anaconda.org""")), - ] -) - -bdist_conda.boolean_options.extend(["anaconda-upload"]) diff --git a/conda_build/build.py b/conda_build/build.py index d0c939d9e8..42fe0d5c21 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -21,32 +21,34 @@ from collections import OrderedDict, deque from os.path import dirname, isdir, isfile, islink, join from pathlib import Path +from typing import TYPE_CHECKING import conda_package_handling.api import yaml from bs4 import UnicodeDammit from conda import __version__ as conda_version +from conda.auxlib.entity import EntityEncoder +from conda.base.constants import PREFIX_PLACEHOLDER from conda.base.context import context, reset_context from conda.core.prefix_data import PrefixData from conda.exceptions import CondaError, NoPackagesFoundError, UnsatisfiableError +from conda.gateways.disk.create import TemporaryDirectory from conda.models.channel import Channel +from conda.models.enums import FileMode, PathType +from conda.models.match_spec import MatchSpec +from conda.utils import url_path from . import __version__ as conda_build_version from . import environ, noarch_python, source, tarcheck, utils -from .conda_interface import ( - EntityEncoder, - FileMode, - MatchSpec, - PathType, - TemporaryDirectory, - env_path_backup_var_exists, - prefix_placeholder, - url_path, -) from .config import Config from .create_test import create_all_test_files from .deprecations import deprecated -from .exceptions import CondaBuildException, DependencyNeedsBuildingError +from .exceptions import ( + BuildScriptException, + CondaBuildException, + CondaBuildUserError, + DependencyNeedsBuildingError, +) from .index import _delegated_update_index, get_build_index from .metadata import FIELDS, MetaData from .os_utils import external @@ -63,6 +65,7 @@ execute_download_actions, expand_outputs, output_yaml, + render_metadata_tuples, render_recipe, reparse, try_download, @@ -89,6 +92,9 @@ if on_win: from . import windows +if TYPE_CHECKING: + from typing import Any, Iterable + if "bsd" in sys.platform: shell_path = "/bin/sh" elif utils.on_win: @@ -184,121 +190,6 @@ def prefix_replacement_excluded(path): return False -@deprecated("24.3", "24.5") -def have_prefix_files(files, prefix): - """ - Yields files that contain the current prefix in them, and modifies them - to replace the prefix with a placeholder. - - :param files: Filenames to check for instances of prefix - :type files: list of tuples containing strings (prefix, mode, filename) - """ - - prefix_bytes = prefix.encode(utils.codec) - prefix_placeholder_bytes = prefix_placeholder.encode(utils.codec) - searches = {prefix: prefix_bytes} - if utils.on_win: - # some windows libraries use unix-style path separators - forward_slash_prefix = prefix.replace("\\", "/") - forward_slash_prefix_bytes = forward_slash_prefix.encode(utils.codec) - searches[forward_slash_prefix] = forward_slash_prefix_bytes - # some windows libraries have double backslashes as escaping - double_backslash_prefix = prefix.replace("\\", "\\\\") - double_backslash_prefix_bytes = double_backslash_prefix.encode(utils.codec) - searches[double_backslash_prefix] = double_backslash_prefix_bytes - searches[prefix_placeholder] = prefix_placeholder_bytes - min_prefix = min(len(k) for k, _ in searches.items()) - - # mm.find is incredibly slow, so ripgrep is used to pre-filter the list. - # Really, ripgrep could be used on its own with a bit more work though. - rg_matches = [] - prefix_len = len(prefix) + 1 - rg = external.find_executable("rg") - if rg: - for rep_prefix, _ in searches.items(): - try: - args = [ - rg, - "--unrestricted", - "--no-heading", - "--with-filename", - "--files-with-matches", - "--fixed-strings", - "--text", - rep_prefix, - prefix, - ] - matches = subprocess.check_output(args) - rg_matches.extend( - matches.decode("utf-8").replace("\r\n", "\n").splitlines() - ) - except subprocess.CalledProcessError: - continue - # HACK: this is basically os.path.relpath, just simpler and faster - # NOTE: path normalization needs to be in sync with create_info_files - if utils.on_win: - rg_matches = [ - rg_match.replace("\\", "/")[prefix_len:] for rg_match in rg_matches - ] - else: - rg_matches = [rg_match[prefix_len:] for rg_match in rg_matches] - else: - print( - "WARNING: Detecting which files contain PREFIX is slow, installing ripgrep makes it faster." - " 'conda install ripgrep'" - ) - - for f in files: - if os.path.isabs(f): - f = f[prefix_len:] - if rg_matches and f not in rg_matches: - continue - path = os.path.join(prefix, f) - if prefix_replacement_excluded(path): - continue - - # dont try to mmap an empty file, and no point checking files that are smaller - # than the smallest prefix. - if os.stat(path).st_size < min_prefix: - continue - - try: - fi = open(path, "rb+") - except OSError: - log = utils.get_logger(__name__) - log.warn("failed to open %s for detecting prefix. Skipping it." % f) - continue - try: - mm = utils.mmap_mmap( - fi.fileno(), 0, tagname=None, flags=utils.mmap_MAP_PRIVATE - ) - except OSError: - mm = fi.read() - - mode = "binary" if mm.find(b"\x00") != -1 else "text" - if mode == "text": - # TODO :: Ask why we do not do this on Windows too?! - if not utils.on_win and mm.find(prefix_bytes) != -1: - # Use the placeholder for maximal backwards compatibility, and - # to minimize the occurrences of usernames appearing in built - # packages. - data = mm[:] - mm.close() - fi.close() - rewrite_file_with_new_prefix( - path, data, prefix_bytes, prefix_placeholder_bytes - ) - fi = open(path, "rb+") - mm = utils.mmap_mmap( - fi.fileno(), 0, tagname=None, flags=utils.mmap_MAP_PRIVATE - ) - for rep_prefix, rep_prefix_bytes in searches.items(): - if mm.find(rep_prefix_bytes) != -1: - yield (rep_prefix, mode, f) - mm.close() - fi.close() - - # It may be that when using the list form of passing args to subprocess # what matters is the number of arguments rather than the accumulated # string length. In that case, len(l[i]) should become 1, and we should @@ -885,12 +776,12 @@ def copy_recipe(m): yaml.dump(m.config.variant, f) -def copy_readme(m): +def copy_readme(m: MetaData): readme = m.get_value("about/readme") if readme: src = join(m.config.work_dir, readme) if not isfile(src): - sys.exit("Error: no readme file: %s" % readme) + raise CondaBuildUserError(f"`about/readme` file ({readme}) doesn't exist") dst = join(m.config.info_dir, readme) utils.copy_into(src, dst, m.config.timeout, locking=m.config.locking) if os.path.split(readme)[1] not in {"README.md", "README.rst", "README"}: @@ -1035,7 +926,7 @@ def copy_test_source_files(m, destination): ) except OSError as e: log = utils.get_logger(__name__) - log.warn( + log.warning( f"Failed to copy {f} into test files. Error was: {str(e)}" ) for ext in ".pyc", ".pyo": @@ -1149,13 +1040,13 @@ def get_files_with_prefix(m, replacements, files_in, prefix): prefix[0].upper() + prefix[1:], prefix[0].lower() + prefix[1:], prefix_u, - prefix_placeholder.replace("\\", "'"), - prefix_placeholder.replace("/", "\\"), + PREFIX_PLACEHOLDER.replace("\\", "'"), + PREFIX_PLACEHOLDER.replace("/", "\\"), ] # some python/json files store an escaped version of prefix pfx_variants.extend([pfx.replace("\\", "\\\\") for pfx in pfx_variants]) else: - pfx_variants = (prefix, prefix_placeholder) + pfx_variants = (prefix, PREFIX_PLACEHOLDER) # replacing \ with \\ here is for regex escaping re_test = ( b"(" @@ -1303,7 +1194,7 @@ def record_prefix_files(m, files_with_prefix): if fn in text_has_prefix_files: text_has_prefix_files.remove(fn) else: - ignored_because = " (not in build/%s_has_prefix_files)" % (mode) + ignored_because = f" (not in build/{mode}_has_prefix_files)" print( "{fn} ({mode}): {action}{reason}".format( @@ -1320,10 +1211,10 @@ def record_prefix_files(m, files_with_prefix): # make sure we found all of the files expected errstr = "" for f in text_has_prefix_files: - errstr += "Did not detect hard-coded path in %s from has_prefix_files\n" % f + errstr += f"Did not detect hard-coded path in {f} from has_prefix_files\n" for f in binary_has_prefix_files: errstr += ( - "Did not detect hard-coded path in %s from binary_has_prefix_files\n" % f + f"Did not detect hard-coded path in {f} from binary_has_prefix_files\n" ) if errstr: raise RuntimeError(errstr) @@ -1392,7 +1283,7 @@ def write_about_json(m): with open(join(m.config.info_dir, "about.json"), "w") as fo: d = {} for key, default in FIELDS["about"].items(): - value = m.get_value("about/%s" % key) + value = m.get_value(f"about/{key}") if value: d[key] = value if default is list: @@ -1448,7 +1339,7 @@ def write_info_json(m: MetaData): "# $ conda create --name --file " ) for dist in sorted(runtime_deps + [" ".join(m.dist().rsplit("-", 2))]): - fo.write("%s\n" % "=".join(dist.split())) + fo.write("{}\n".format("=".join(dist.split()))) mode_dict = {"mode": "w", "encoding": "utf-8"} with open(join(m.config.info_dir, "index.json"), **mode_dict) as fo: @@ -1471,10 +1362,10 @@ def get_entry_point_script_names(entry_point_scripts): for entry_point in entry_point_scripts: cmd = entry_point[: entry_point.find("=")].strip() if utils.on_win: - scripts.append("Scripts\\%s-script.py" % cmd) - scripts.append("Scripts\\%s.exe" % cmd) + scripts.append(f"Scripts\\{cmd}-script.py") + scripts.append(f"Scripts\\{cmd}.exe") else: - scripts.append("bin/%s" % cmd) + scripts.append(f"bin/{cmd}") return scripts @@ -1636,7 +1527,7 @@ def _recurse_symlink_to_size(path, seen=None): return _recurse_symlink_to_size(dest, seen=seen) elif not isfile(dest): # this is a symlink that points to nowhere, so is zero bytes - warnings.warn("file %s is a symlink with no target" % path, UserWarning) + warnings.warn(f"file {path} is a symlink with no target", UserWarning) return 0 return 0 @@ -1803,7 +1694,14 @@ def post_process_files(m: MetaData, initial_prefix_files): return new_files -def bundle_conda(output, metadata: MetaData, env, stats, **kw): +def bundle_conda( + output, + metadata: MetaData, + env, + stats, + new_prefix_files: set[str] = set(), + **kw, +): log = utils.get_logger(__name__) log.info("Packaging %s", metadata.dist()) get_all_replacements(metadata.config) @@ -1855,13 +1753,16 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): output["script"], args[0], ) - if "system32" in args[0] and "bash" in args[0]: - print( - "ERROR :: WSL bash.exe detected, this will not work (PRs welcome!). Please\n" - " use MSYS2 packages. Add `m2-base` and more (depending on what your" - " script needs) to `requirements/build` instead." + if ( + # WSL bash is always the same path, it is an alias to the default + # distribution as configured by the user + on_win and Path("C:\\Windows\\System32\\bash.exe").samefile(args[0]) + ): + raise CondaBuildUserError( + "WSL bash.exe is not supported. Please use MSYS2 packages. Add " + "`m2-base` and more (depending on what your script needs) to " + "`requirements/build` instead." ) - sys.exit(1) else: args = interpreter.split(" ") @@ -1880,8 +1781,7 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): var = var.split("=", 1)[0] elif var not in os.environ: warnings.warn( - "The environment variable '%s' specified in script_env is undefined." - % var, + f"The environment variable '{var}' specified in script_env is undefined.", UserWarning, ) val = "" @@ -1898,12 +1798,15 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): _write_activation_text(dest_file, metadata) bundle_stats = {} - utils.check_call_env( - [*args, dest_file], - cwd=metadata.config.work_dir, - env=env_output, - stats=bundle_stats, - ) + try: + utils.check_call_env( + [*args, dest_file], + cwd=metadata.config.work_dir, + env=env_output, + stats=bundle_stats, + ) + except subprocess.CalledProcessError as exc: + raise BuildScriptException(str(exc), caused_by=exc) from exc log_stats(bundle_stats, f"bundling {metadata.name()}") if stats is not None: stats[stats_key(metadata, f"bundle_{metadata.name()}")] = bundle_stats @@ -1911,10 +1814,26 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): if files: # Files is specified by the output # we exclude the list of files that we want to keep, so post-process picks them up as "new" - keep_files = { - os.path.normpath(pth) - for pth in utils.expand_globs(files, metadata.config.host_prefix) - } + if isinstance(files, dict): + # When file matching with include/exclude lists, only + # new_prefix_files are considered. Files in the PREFIX from other + # recipes (dependencies) are ignored + include = files.get("include") or [] + exclude = files.get("exclude") or [] + exclude_files = { + os.path.normpath(pth) + for pth in utils.expand_globs(exclude, metadata.config.host_prefix) + } + keep_files = { + os.path.normpath(pth) + for pth in utils.expand_globs(include, metadata.config.host_prefix) + } + keep_files = new_prefix_files.intersection(keep_files) - exclude_files + else: + keep_files = { + os.path.normpath(pth) + for pth in utils.expand_globs(files, metadata.config.host_prefix) + } pfx_files = set(utils.prefix_files(metadata.config.host_prefix)) initial_files = { item @@ -1925,7 +1844,7 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): } elif not output.get("script"): if not metadata.always_include_files(): - log.warn( + log.warning( "No files or script found for output {}".format(output.get("name")) ) build_deps = metadata.get_value("requirements/build") @@ -1963,7 +1882,9 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): initial_files.remove(f) has_matches = True if not has_matches: - log.warn("Glob %s from always_include_files does not match any files", pat) + log.warning( + "Glob %s from always_include_files does not match any files", pat + ) files = post_process_files(metadata, initial_files) if output.get("name") and output.get("name") != "conda": @@ -2022,7 +1943,7 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): from conda_verify.verify import Verify except ImportError: Verify = None - log.warn( + log.warning( "Importing conda-verify failed. Please be sure to test your packages. " "conda install conda-verify to make this message go away." ) @@ -2039,7 +1960,7 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): exit_on_error=metadata.config.exit_on_verify_error, ) except KeyError as e: - log.warn( + log.warning( "Package doesn't have necessary files. It might be too old to inspect." f"Legacy noarch packages are known to fail. Full message was {e}" ) @@ -2089,7 +2010,13 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): return final_outputs -def bundle_wheel(output, metadata: MetaData, env, stats): +def bundle_wheel( + output, + metadata: MetaData, + env, + stats, + new_prefix_files: set[str] = set(), +): ext = ".bat" if utils.on_win else ".sh" with TemporaryDirectory() as tmpdir, utils.tmp_chdir(metadata.config.work_dir): dest_file = os.path.join(metadata.config.work_dir, "wheel_output" + ext) @@ -2266,7 +2193,7 @@ def _write_activation_text(script_path, m): _write_sh_activation_text(fh, m) else: log = utils.get_logger(__name__) - log.warn( + log.warning( f"not adding activation to {script_path} - I don't know how to do so for " "this file type" ) @@ -2351,8 +2278,6 @@ def create_build_envs(m: MetaData, notest): ) except DependencyNeedsBuildingError as e: # subpackages are not actually missing. We just haven't built them yet. - from .conda_interface import MatchSpec - other_outputs = ( m.other_outputs.values() if hasattr(m, "other_outputs") @@ -2416,8 +2341,6 @@ def build( with utils.path_prepended(m.config.build_prefix): env = environ.get_dict(m=m) env["CONDA_BUILD_STATE"] = "BUILD" - if env_path_backup_var_exists: - env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] # this should be a no-op if source is already here if m.needs_source_for_render: @@ -2488,7 +2411,7 @@ def build( ): specs.append(vcs_source) - log.warn( + log.warning( "Your recipe depends on %s at build time (for templates), " "but you have not listed it as a build dependency. Doing " "so for this build.", @@ -2546,6 +2469,24 @@ def build( # Write out metadata for `conda debug`, making it obvious that this is what it is, must be done # after try_download() output_yaml(m, os.path.join(m.config.work_dir, "metadata_conda_debug.yaml")) + if m.config.verbose: + m_copy = m.copy() + for om, _, _ in render_metadata_tuples( + [(m_copy, False, False)], m_copy.config + ): + print( + "", + "Rendered as:", + "```yaml", + output_yaml(om).rstrip(), + "```", + "", + sep="\n", + ) + # Each iteration returns the whole meta yaml, and then we are supposed to remove + # the outputs we don't want. Instead we just take the first and print it fully + break + del m_copy # get_dir here might be just work, or it might be one level deeper, # dependening on the source. @@ -2580,9 +2521,12 @@ def build( with codecs.getwriter("utf-8")(open(build_file, "wb")) as bf: bf.write(script) - windows.build( - m, build_file, stats=build_stats, provision_only=provision_only - ) + try: + windows.build( + m, build_file, stats=build_stats, provision_only=provision_only + ) + except subprocess.CalledProcessError as exc: + raise BuildScriptException(str(exc), caused_by=exc) from exc else: build_file = join(m.path, "build.sh") if isfile(build_file) and script: @@ -2624,13 +2568,16 @@ def build( del env["CONDA_BUILD"] # this should raise if any problems occur while building - utils.check_call_env( - cmd, - env=env, - rewrite_stdout_env=rewrite_env, - cwd=src_dir, - stats=build_stats, - ) + try: + utils.check_call_env( + cmd, + env=env, + rewrite_stdout_env=rewrite_env, + cwd=src_dir, + stats=build_stats, + ) + except subprocess.CalledProcessError as exc: + raise BuildScriptException(str(exc), caused_by=exc) from exc utils.remove_pycache_from_scripts(m.config.host_prefix) if build_stats and not provision_only: log_stats(build_stats, f"building {m.name()}") @@ -2813,8 +2760,8 @@ def build( # This is wrong, files has not been expanded at this time and could contain # wildcards. Also well, I just do not understand this, because when this # does contain wildcards, the files in to_remove will slip back in. - if "files" in output_d: - output_d["files"] = set(output_d["files"]) - to_remove + if (files := output_d.get("files")) and not isinstance(files, dict): + output_d["files"] = set(files) - to_remove # copies the backed-up new prefix files into the newly created host env for f in new_prefix_files: @@ -2829,7 +2776,9 @@ def build( with utils.path_prepended(m.config.build_prefix): env = environ.get_dict(m=m) pkg_type = "conda" if not hasattr(m, "type") else m.type - newly_built_packages = bundlers[pkg_type](output_d, m, env, stats) + newly_built_packages = bundlers[pkg_type]( + output_d, m, env, stats, new_prefix_files + ) # warn about overlapping files. if "checksums" in output_d: for file, csum in output_d["checksums"].items(): @@ -2944,7 +2893,7 @@ def _construct_metadata_for_test_from_recipe(recipe_dir, config): render_recipe(recipe_dir, config=config, reset_build_id=False) )[0][1] log = utils.get_logger(__name__) - log.warn( + log.warning( "Testing based on recipes is deprecated as of conda-build 3.16.0. Please adjust " "your code to pass your desired conda package to test instead." ) @@ -2994,7 +2943,7 @@ def _construct_metadata_for_test_from_package(package, config): is_channel = True if not is_channel: - log.warn( + log.warning( "Copying package to conda-build croot. No packages otherwise alongside yours will" " be available unless you specify -c local. To avoid this warning, your package " "must reside in a channel structure with platform-subfolders. See more info on " @@ -3248,7 +3197,7 @@ def _write_test_run_script( tf.write(f'call "{shell_file}"\n') tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") else: - log.warn( + log.warning( "Found sh test file on windows. Ignoring this for now (PRs welcome)" ) elif os.path.splitext(shell_file)[1] == ".sh": @@ -3330,12 +3279,12 @@ def write_test_scripts( def test( - recipedir_or_package_or_metadata, - config, - stats, - move_broken=True, - provision_only=False, -): + recipedir_or_package_or_metadata: str | os.PathLike | Path | MetaData, + config: Config, + stats: dict, + move_broken: bool = True, + provision_only: bool = False, +) -> bool: """ Execute any test scripts for the given package. @@ -3415,9 +3364,9 @@ def test( os.path.dirname(prefix), "_".join( ( - "%s_prefix_moved" % name, + f"{name}_prefix_moved", metadata.dist(), - getattr(metadata.config, "%s_subdir" % name), + getattr(metadata.config, f"{name}_subdir"), ) ), ) @@ -3433,7 +3382,7 @@ def test( # Needs to come after create_files in case there's test/source_files shutil_move_more_retrying(config.work_dir, dest, "work") else: - log.warn( + log.warning( "Not moving work directory after build. Your package may depend on files " "in the work directory that are not included with your package" ) @@ -3447,8 +3396,6 @@ def test( env.update(environ.get_dict(m=metadata, prefix=config.test_prefix)) env["CONDA_BUILD_STATE"] = "TEST" env["CONDA_BUILD"] = "1" - if env_path_backup_var_exists: - env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] if not metadata.config.activate or metadata.name() == "conda": # prepend bin (or Scripts) directory @@ -3499,7 +3446,7 @@ def test( CondaError, AssertionError, ) as exc: - log.warn( + log.warning( "failed to get package records, retrying. exception was: %s", str(exc) ) tests_failed( @@ -3531,8 +3478,6 @@ def test( env = dict(os.environ.copy()) env.update(environ.get_dict(m=metadata, prefix=metadata.config.test_prefix)) env["CONDA_BUILD_STATE"] = "TEST" - if env_path_backup_var_exists: - env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] if config.test_run_post: from .utils import get_installed_packages @@ -3608,7 +3553,12 @@ def test( return True -def tests_failed(package_or_metadata, move_broken, broken_dir, config): +def tests_failed( + package_or_metadata: str | os.PathLike | Path | MetaData, + move_broken: bool, + broken_dir: str | os.PathLike | Path, + config: Config, +) -> None: """ Causes conda to exit if any of the given package's tests failed. @@ -3628,7 +3578,7 @@ def tests_failed(package_or_metadata, move_broken, broken_dir, config): log = utils.get_logger(__name__) try: shutil.move(pkg, dest) - log.warn( + log.warning( f"Tests failed for {os.path.basename(pkg)} - moving package to {broken_dir}" ) except OSError: @@ -3636,9 +3586,14 @@ def tests_failed(package_or_metadata, move_broken, broken_dir, config): _delegated_update_index( os.path.dirname(os.path.dirname(pkg)), verbose=config.debug, threads=1 ) - sys.exit("TESTS FAILED: " + os.path.basename(pkg)) + raise CondaBuildUserError("TESTS FAILED: " + os.path.basename(pkg)) +@deprecated( + "24.7", + "24.9", + addendum="`patchelf` is an explicit conda-build dependency on Linux so it will always be installed.", +) def check_external(): if on_linux: patchelf = external.find_executable("patchelf") @@ -3653,8 +3608,14 @@ def check_external(): def build_tree( - recipe_list, config, stats, build_only=False, post=None, notest=False, variants=None -): + recipe_list: Iterable[str | MetaData], + config: Config, + stats: dict, + build_only: bool = False, + post: bool | None = None, + notest: bool = False, + variants: dict[str, Any] | None = None, +) -> list[str]: to_build_recursive = [] recipe_list = deque(recipe_list) @@ -3736,8 +3697,7 @@ def build_tree( reset_build_id=not cfg.dirty, bypass_env_check=True, ) - # restrict to building only one variant for bdist_conda. The way it splits the build - # job breaks variants horribly. + if post in (True, False): metadata_tuples = metadata_tuples[:1] @@ -3788,7 +3748,7 @@ def build_tree( # downstreams can be a dict, for adding capability for worker labels if hasattr(downstreams, "keys"): downstreams = list(downstreams.keys()) - log.warn( + log.warning( "Dictionary keys for downstreams are being " "ignored right now. Coming soon..." ) @@ -3827,7 +3787,7 @@ def build_tree( UnsatisfiableError, DependencyNeedsBuildingError, ) as e: - log.warn( + log.warning( f"Skipping downstream test for spec {dep}; was " f"unsatisfiable. Error was {e}" ) @@ -4022,7 +3982,10 @@ def build_tree( return list(built_packages.keys()) -def handle_anaconda_upload(paths, config): +def handle_anaconda_upload( + paths: Iterable[str | os.PathLike | Path], + config: Config, +) -> None: from .os_utils.external import find_executable paths = utils.ensure_list(paths) @@ -4056,7 +4019,7 @@ def handle_anaconda_upload(paths, config): "# To have conda build upload to anaconda.org automatically, use\n" f"# {prompter}conda config --set anaconda_upload yes\n" ) - no_upload_message += f"anaconda upload{joiner}" + joiner.join(paths) + no_upload_message += f"anaconda upload{joiner}" + joiner.join(map(str, paths)) if not upload: print(no_upload_message) @@ -4064,7 +4027,7 @@ def handle_anaconda_upload(paths, config): if not anaconda: print(no_upload_message) - sys.exit( + raise CondaBuildUserError( "Error: cannot locate anaconda command (required for upload)\n" "# Try:\n" f"# {prompter}conda install anaconda-client" @@ -4121,11 +4084,11 @@ def handle_pypi_upload(wheels, config): try: utils.check_call_env(args + [f]) except: - utils.get_logger(__name__).warn( + utils.get_logger(__name__).warning( "wheel upload failed - is twine installed?" " Is this package registered?" ) - utils.get_logger(__name__).warn(f"Wheel file left in {f}") + utils.get_logger(__name__).warning(f"Wheel file left in {f}") else: print(f"anaconda_upload is not set. Not uploading wheels: {wheels}") diff --git a/conda_build/cli-32.exe b/conda_build/cli-32.exe index b17d9c7b23..eaf5188c31 100755 Binary files a/conda_build/cli-32.exe and b/conda_build/cli-32.exe differ diff --git a/conda_build/cli-64.exe b/conda_build/cli-64.exe index 7b7f9c67d2..0251e7a4bc 100755 Binary files a/conda_build/cli-64.exe and b/conda_build/cli-64.exe differ diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index bdcaaa25d6..13e129910d 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -17,7 +17,6 @@ from conda.common.io import dashlist from .. import api, build, source, utils -from ..conda_interface import add_parser_channels, cc_conda_build from ..config import ( get_channel_urls, get_or_merge_config, @@ -27,12 +26,16 @@ from .actions import KeyValueAction from .main_render import get_render_parser +try: + from conda.cli.helpers import add_parser_channels +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_channels + if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence - from ..conda_interface import ArgumentParser - def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: parser = get_render_parser() @@ -70,7 +73,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: action="store_false", help="Don't include the recipe inside the built package.", dest="include_recipe", - default=cc_conda_build.get("include_recipe", "true").lower() == "true", + default=context.conda_build.get("include_recipe", "true").lower() == "true", ) parser.add_argument( "-s", @@ -125,7 +128,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "Skip recipes for which there already exists an existing build " "(locally or in the channels)." ), - default=cc_conda_build.get("skip_existing", "false").lower() == "true", + default=context.conda_build.get("skip_existing", "false").lower() == "true", ) parser.add_argument( "--keep-old-work", @@ -145,7 +148,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "--quiet", action="store_true", help="do not display progress bar", - default=cc_conda_build.get("quiet", "false").lower() == "true", + default=context.conda_build.get("quiet", "false").lower() == "true", ) parser.add_argument( "--debug", @@ -155,12 +158,12 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: parser.add_argument( "--token", help="Token to pass through to anaconda upload", - default=cc_conda_build.get("anaconda_token"), + default=context.conda_build.get("anaconda_token"), ) parser.add_argument( "--user", help="User/organization to upload packages to on anaconda.org or pypi", - default=cc_conda_build.get("user"), + default=context.conda_build.get("user"), ) parser.add_argument( "--label", @@ -185,7 +188,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: ), type=int, choices=range(1, 23), - default=cc_conda_build.get( + default=context.conda_build.get( "zstd_compression_level", zstd_compression_level_default ), ) @@ -210,23 +213,23 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "--config-file", help="path to .pypirc file to use when uploading to pypi", default=( - abspath(expanduser(expandvars(cc_conda_build.get("pypirc")))) - if cc_conda_build.get("pypirc") - else cc_conda_build.get("pypirc") + abspath(expanduser(expandvars(pypirc))) + if (pypirc := context.conda_build.get("pypirc")) + else None ), ) pypi_grp.add_argument( "--repository", "-r", help="PyPI repository to upload to", - default=cc_conda_build.get("pypi_repository", "pypitest"), + default=context.conda_build.get("pypi_repository", "pypitest"), ) parser.add_argument( "--no-activate", action="store_false", help="do not activate the build and test envs; just prepend to PATH", dest="activate", - default=cc_conda_build.get("activate", "true").lower() == "true", + default=context.conda_build.get("activate", "true").lower() == "true", ) parser.add_argument( "--no-build-id", @@ -237,7 +240,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: ), dest="set_build_id", # note: inverted - dest stores positive logic - default=cc_conda_build.get("set_build_id", "true").lower() == "true", + default=context.conda_build.get("set_build_id", "true").lower() == "true", ) parser.add_argument( "--build-id-pat", @@ -246,7 +249,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "paths being too long." ), dest="build_id_pat", - default=cc_conda_build.get("build_id_pat", "{n}_{t}"), + default=context.conda_build.get("build_id_pat", "{n}_{t}"), ) parser.add_argument( "--croot", @@ -259,21 +262,22 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "--verify", action="store_true", help="run verification on recipes or packages when building", - default=cc_conda_build.get("verify", "true").lower() == "true", + default=context.conda_build.get("verify", "true").lower() == "true", ) parser.add_argument( "--no-verify", action="store_false", dest="verify", help="do not run verification on recipes or packages when building", - default=cc_conda_build.get("verify", "true").lower() == "true", + default=context.conda_build.get("verify", "true").lower() == "true", ) parser.add_argument( "--strict-verify", action="store_true", dest="exit_on_verify_error", help="Exit if any conda-verify check fail, instead of only printing them", - default=cc_conda_build.get("exit_on_verify_error", "false").lower() == "true", + default=context.conda_build.get("exit_on_verify_error", "false").lower() + == "true", ) parser.add_argument( "--output-folder", @@ -281,7 +285,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "folder to dump output package to. Package are moved here if build or test succeeds." " Destination folder must exist prior to using this." ), - default=cc_conda_build.get("output_folder"), + default=context.conda_build.get("output_folder"), ) parser.add_argument( "--no-prefix-length-fallback", @@ -350,7 +354,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "linked to any executables or shared libraries in built packages. This is disabled " "by default, but will be enabled by default in conda-build 4.0." ), - default=cc_conda_build.get("error_overlinking", "false").lower() == "true", + default=context.conda_build.get("error_overlinking", "false").lower() == "true", ) parser.add_argument( "--no-error-overlinking", @@ -361,7 +365,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "linked to any executables or shared libraries in built packages. This is currently " "the default behavior, but will change in conda-build 4.0." ), - default=cc_conda_build.get("error_overlinking", "false").lower() == "true", + default=context.conda_build.get("error_overlinking", "false").lower() == "true", ) parser.add_argument( "--error-overdepending", @@ -372,7 +376,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " "any of the files in this package." ), - default=cc_conda_build.get("error_overdepending", "false").lower() == "true", + default=context.conda_build.get("error_overdepending", "false").lower() + == "true", ) parser.add_argument( "--no-error-overdepending", @@ -383,7 +388,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " "any of the files in this package." ), - default=cc_conda_build.get("error_overdepending", "false").lower() == "true", + default=context.conda_build.get("error_overdepending", "false").lower() + == "true", ) parser.add_argument( "--long-test-prefix", @@ -393,7 +399,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "Linux and Mac. Prefix length matches the --prefix-length flag. This is on by " "default in conda-build 3.0+" ), - default=cc_conda_build.get("long_test_prefix", "true").lower() == "true", + default=context.conda_build.get("long_test_prefix", "true").lower() == "true", ) parser.add_argument( "--no-long-test-prefix", @@ -403,7 +409,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "Do not use a long prefix for the test prefix, as well as the build prefix." " Affects only Linux and Mac. Prefix length matches the --prefix-length flag. " ), - default=cc_conda_build.get("long_test_prefix", "true").lower() == "true", + default=context.conda_build.get("long_test_prefix", "true").lower() == "true", ) parser.add_argument( "--keep-going", @@ -420,16 +426,17 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "Path to store the source files (archives, git clones, etc.) during the build." ), default=( - abspath(expanduser(expandvars(cc_conda_build.get("cache_dir")))) - if cc_conda_build.get("cache_dir") - else cc_conda_build.get("cache_dir") + abspath(expanduser(expandvars(cache_dir))) + if (cache_dir := context.conda_build.get("cache_dir")) + else None ), ) parser.add_argument( "--no-copy-test-source-files", dest="copy_test_source_files", action="store_false", - default=cc_conda_build.get("copy_test_source_files", "true").lower() == "true", + default=context.conda_build.get("copy_test_source_files", "true").lower() + == "true", help=( "Disables copying the files necessary for testing the package into " "the info/test folder. Passing this argument means it may not be possible " @@ -445,7 +452,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "Merge the build and host directories, even when host section or compiler " "jinja2 is present" ), - default=cc_conda_build.get("merge_build_host", "false").lower() == "true", + default=context.conda_build.get("merge_build_host", "false").lower() == "true", ) parser.add_argument( "--stats-file", @@ -525,13 +532,13 @@ def check_action(recipe, config): def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + config = get_or_merge_config(None, **parsed.__dict__) - build.check_external() # change globals in build module, see comment there as well config.channel_urls = get_channel_urls(parsed.__dict__) - config.override_channels = parsed.override_channels config.verbose = not parsed.quiet or parsed.debug if "purge" in parsed.recipe: diff --git a/conda_build/cli/main_convert.py b/conda_build/cli/main_convert.py index ce92a71ddc..d30b725b3d 100644 --- a/conda_build/cli/main_convert.py +++ b/conda_build/cli/main_convert.py @@ -6,11 +6,12 @@ from os.path import abspath, expanduser from typing import TYPE_CHECKING +from conda.base.context import context + from .. import api -from ..conda_interface import ArgumentParser if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence logging.basicConfig(level=logging.INFO) @@ -41,6 +42,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + parser = ArgumentParser( prog="conda convert", description=""" @@ -125,6 +128,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + files = parsed.files del parsed.__dict__["files"] diff --git a/conda_build/cli/main_debug.py b/conda_build/cli/main_debug.py index 59689bfa05..731f964217 100644 --- a/conda_build/cli/main_debug.py +++ b/conda_build/cli/main_debug.py @@ -6,6 +6,8 @@ import sys from typing import TYPE_CHECKING +from conda.base.context import context + from .. import api from ..utils import on_win from . import validators as valid @@ -94,6 +96,7 @@ def get_parser() -> ArgumentParser: def execute(args: Sequence[str] | None = None) -> int: parser = get_parser() parsed = parser.parse_args(args) + context.__init__(argparse_args=parsed) try: activation_string = api.debug( diff --git a/conda_build/cli/main_develop.py b/conda_build/cli/main_develop.py index cb67c40696..9b680cbf5a 100644 --- a/conda_build/cli/main_develop.py +++ b/conda_build/cli/main_develop.py @@ -5,19 +5,26 @@ import logging from typing import TYPE_CHECKING -from conda.base.context import context, determine_target_prefix +from conda.base.context import context from .. import api -from ..conda_interface import ArgumentParser, add_parser_prefix + +try: + from conda.cli.helpers import add_parser_prefix +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_prefix if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence logging.basicConfig(level=logging.INFO) def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + parser = ArgumentParser( prog="conda develop", description=""" @@ -81,10 +88,11 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) - prefix = determine_target_prefix(context, parsed) + context.__init__(argparse_args=parsed) + api.develop( parsed.source, - prefix=prefix, + prefix=context.target_prefix, no_pth_file=parsed.no_pth_file, build_ext=parsed.build_ext, clean=parsed.clean, diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index eefbcf97da..b1c47c0586 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -8,19 +8,26 @@ from pprint import pprint from typing import TYPE_CHECKING -from conda.base.context import context, determine_target_prefix +from conda.base.context import context from .. import api -from ..conda_interface import ArgumentParser, add_parser_prefix + +try: + from conda.cli.helpers import add_parser_prefix +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_prefix if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence logging.basicConfig(level=logging.INFO) def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + parser = ArgumentParser( prog="conda inspect", description="Tools for inspecting conda packages.", @@ -189,6 +196,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: parser, parsed = parse_args(args) + context.__init__(argparse_args=parsed) if not parsed.subcommand: parser.print_help() @@ -199,7 +207,7 @@ def execute(args: Sequence[str] | None = None) -> int: print( api.inspect_linkages( parsed.packages, - prefix=determine_target_prefix(context, parsed), + prefix=context.target_prefix, untracked=parsed.untracked, all_packages=parsed.all, show_files=parsed.show_files, @@ -211,7 +219,7 @@ def execute(args: Sequence[str] | None = None) -> int: print( api.inspect_objects( parsed.packages, - prefix=determine_target_prefix(context, parsed), + prefix=context.target_prefix, groupby=parsed.groupby, ) ) diff --git a/conda_build/cli/main_metapackage.py b/conda_build/cli/main_metapackage.py index a11c581702..91d2edcebb 100644 --- a/conda_build/cli/main_metapackage.py +++ b/conda_build/cli/main_metapackage.py @@ -9,16 +9,23 @@ from conda.base.context import context from .. import api -from ..conda_interface import ArgumentParser, add_parser_channels + +try: + from conda.cli.helpers import add_parser_channels +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_channels if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence logging.basicConfig(level=logging.INFO) def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + parser = ArgumentParser( prog="conda metapackage", description=""" @@ -114,8 +121,12 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: - _, args = parse_args(args) - channel_urls = args.__dict__.get("channel") or args.__dict__.get("channels") or () - api.create_metapackage(channel_urls=channel_urls, **args.__dict__) + _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + + api.create_metapackage( + channel_urls=context.channels, + **parsed.__dict__, + ) return 0 diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 933528b114..6e6f2bfa41 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -8,16 +8,22 @@ from typing import TYPE_CHECKING import yaml +from conda.base.context import context from yaml.parser import ParserError from .. import __version__, api -from ..conda_interface import ArgumentParser, add_parser_channels, cc_conda_build from ..config import get_channel_urls, get_or_merge_config from ..utils import LoggingContext from ..variants import get_package_variants, set_language_env_vars +try: + from conda.cli.helpers import add_parser_channels +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_channels + if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence log = logging.getLogger(__name__) @@ -43,7 +49,9 @@ def __call__(self, parser, namespace, values, option_string=None): ) -def get_render_parser(): +def get_render_parser() -> ArgumentParser: + from conda.cli.conda_argparse import ArgumentParser + p = ArgumentParser( prog="conda render", description=""" @@ -58,7 +66,7 @@ def get_render_parser(): "--version", action="version", help="Show the conda-build version number and exit.", - version="conda-build %s" % __version__, + version=f"conda-build {__version__}", ) p.add_argument( "-n", @@ -138,7 +146,7 @@ def get_render_parser(): "--old-build-string", dest="filename_hashing", action="store_false", - default=cc_conda_build.get("filename_hashing", "true").lower() == "true", + default=context.conda_build.get("filename_hashing", "true").lower() == "true", help=( "Disable hash additions to filenames to distinguish package " "variants from one another. NOTE: any filename collisions are " @@ -194,6 +202,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) config = get_or_merge_config(None, **parsed.__dict__) @@ -205,8 +214,6 @@ def execute(args: Sequence[str] | None = None) -> int: config.channel_urls = get_channel_urls(parsed.__dict__) - config.override_channels = parsed.override_channels - if parsed.output: config.verbose = False config.debug = False diff --git a/conda_build/cli/main_skeleton.py b/conda_build/cli/main_skeleton.py index 1a87487e26..7013e2ffab 100644 --- a/conda_build/cli/main_skeleton.py +++ b/conda_build/cli/main_skeleton.py @@ -9,12 +9,13 @@ from importlib import import_module from typing import TYPE_CHECKING +from conda.base.context import context + from .. import api -from ..conda_interface import ArgumentParser from ..config import Config if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence thisdir = os.path.dirname(os.path.abspath(__file__)) @@ -22,6 +23,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + parser = ArgumentParser( prog="conda skeleton", description=""" @@ -51,6 +54,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: parser, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + config = Config(**parsed.__dict__) if not parsed.repo: diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py deleted file mode 100644 index c6e31b24af..0000000000 --- a/conda_build/conda_interface.py +++ /dev/null @@ -1,326 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -from __future__ import annotations - -import configparser as _configparser -import os as _os -from functools import partial as _partial -from importlib import import_module as _import_module - -from conda import __version__ -from conda.base.context import context as _context -from conda.base.context import determine_target_prefix as _determine_target_prefix -from conda.base.context import non_x86_machines as _non_x86_linux_machines -from conda.base.context import reset_context as _reset_context -from conda.core.package_cache_data import ( - ProgressiveFetchExtract as _ProgressiveFetchExtract, -) -from conda.exceptions import CondaError as _CondaError -from conda.exceptions import CondaHTTPError as _CondaHTTPError -from conda.exceptions import LinkError as _LinkError -from conda.exceptions import LockError as _LockError -from conda.exceptions import NoPackagesFoundError as _NoPackagesFoundError -from conda.exceptions import PaddingError as _PaddingError -from conda.exceptions import UnsatisfiableError as _UnsatisfiableError -from conda.exports import ( # noqa: F401 - ArgumentParser, - Channel, - Completer, - CondaSession, - EntityEncoder, - FileMode, - InstalledPackages, - MatchSpec, - NoPackagesFound, - PackageRecord, - PathType, - Resolve, - StringIO, - TemporaryDirectory, - TmpDownload, - Unsatisfiable, - VersionOrder, - _toposort, - add_parser_channels, - add_parser_prefix, - download, - human_bytes, - input, - lchmod, - normalized_version, - prefix_placeholder, - rm_rf, - spec_from_line, - specs_from_args, - specs_from_url, - symlink_conda, - unix_path_to_win, - untracked, - url_path, - walk_prefix, - win_path_to_unix, -) -from conda.exports import get_index as _get_index -from conda.gateways.disk.read import compute_sum as _compute_sum -from conda.models.channel import get_conda_build_local_url as _get_conda_build_local_url - -from .deprecations import deprecated - -deprecated.constant( - "24.5", - "24.7", - "configparser", - _configparser, - addendum="Use `configparser` instead.", -) -deprecated.constant("24.5", "24.7", "os", _os, addendum="Use `os` instead.") -deprecated.constant( - "24.5", - "24.7", - "partial", - _partial, - addendum="Use `functools.partial` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "import_module", - _import_module, - addendum="Use `importlib.import_module` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "context", - _context, - addendum="Use `conda.base.context.context` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "determine_target_prefix", - _determine_target_prefix, - addendum="Use `conda.base.context.determine_target_prefix` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "non_x86_linux_machines", - _non_x86_linux_machines, - addendum="Use `conda.base.context.non_x86_machines` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "ProgressiveFetchExtract", - _ProgressiveFetchExtract, - addendum="Use `conda.core.package_cache_data.ProgressiveFetchExtract` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "CondaError", - _CondaError, - addendum="Use `conda.exceptions.CondaError` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "CondaHTTPError", - _CondaHTTPError, - addendum="Use `conda.exceptions.CondaHTTPError` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "LinkError", - _LinkError, - addendum="Use `conda.exceptions.LinkError` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "LockError", - _LockError, - addendum="Use `conda.exceptions.LockError` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "NoPackagesFoundError", - _NoPackagesFoundError, - addendum="Use `conda.exceptions.NoPackagesFoundError` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "PaddingError", - _PaddingError, - addendum="Use `conda.exceptions.PaddingError` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "UnsatisfiableError", - _UnsatisfiableError, - addendum="Use `conda.exceptions.UnsatisfiableError` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "get_conda_build_local_url", - _get_conda_build_local_url, - addendum="Use `conda.models.channel.get_conda_build_local_url` instead.", -) -deprecated.constant( - "24.1.0", - "24.5.0", - "get_index", - _get_index, - addendum="Use `conda.core.index.get_index` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "reset_context", - _reset_context, - addendum="Use `conda.base.context.reset_context` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "binstar_upload", - _context.binstar_upload, - addendum="Use `conda.base.context.context.binstar_upload` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "default_python", - _context.default_python, - addendum="Use `conda.base.context.context.default_python` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "envs_dirs", - _context.envs_dirs, - addendum="Use `conda.base.context.context.envs_dirs` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "pkgs_dirs", - list(_context.pkgs_dirs), - addendum="Use `conda.base.context.context.pkgs_dirs` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "cc_platform", - _context.platform, - addendum="Use `conda.base.context.context.platform` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "root_dir", - _context.root_prefix, - addendum="Use `conda.base.context.context.root_prefix` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "root_writable", - _context.root_writable, - addendum="Use `conda.base.context.context.root_writable` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "subdir", - _context.subdir, - addendum="Use `conda.base.context.context.subdir` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "create_default_packages", - _context.create_default_packages, - addendum="Use `conda.base.context.context.create_default_packages` instead.", -) - -deprecated.constant( - "24.5", - "24.7", - "get_rc_urls", - lambda: list(_context.channels), - addendum="Use `conda.base.context.context.channels` instead.", -) -deprecated.constant( - "24.5", - "24.7", - "get_prefix", - _partial(_determine_target_prefix, _context), - addendum="Use `conda.base.context.context.target_prefix` instead.", -) -cc_conda_build = _context.conda_build if hasattr(_context, "conda_build") else {} - -deprecated.constant( - "24.5", - "24.7", - "get_conda_channel", - Channel.from_value, - addendum="Use `conda.models.channel.Channel.from_value` instead.", -) - -# When deactivating envs (e.g. switching from root to build/test) this env var is used, -# except the PR that removed this has been reverted (for now) and Windows doesn't need it. -env_path_backup_var_exists = _os.getenv("CONDA_PATH_BACKUP") - - -@deprecated( - "24.3", - "24.5", - addendum="Handled by `conda.gateways.connection.session.CondaSession`.", -) -def handle_proxy_407(x, y): - pass - - -deprecated.constant( - "24.3", - "24.5", - "hashsum_file", - _compute_sum, - addendum="Use `conda.gateways.disk.read.compute_sum` instead.", -) - - -@deprecated( - "24.3", - "24.5", - addendum="Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead.", -) -def md5_file(path: str | _os.PathLike) -> str: - return _compute_sum(path, "md5") - - -deprecated.constant( - "24.5", - "24.7", - "CONDA_VERSION", - __version__, - addendum="Use `conda.__version__` instead.", -) - - -@deprecated( - "24.3", - "24.5", - addendum="Use `conda_build.environ.get_version_from_git_tag` instead.", -) -def get_version_from_git_tag(tag): - from .environ import get_version_from_git_tag - - return get_version_from_git_tag(tag) diff --git a/conda_build/config.py b/conda_build/config.py index f6944eaee6..d782600f32 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -18,8 +18,8 @@ from typing import TYPE_CHECKING from conda.base.context import context +from conda.utils import url_path -from .conda_interface import cc_conda_build, url_path from .utils import ( get_build_folders, get_conda_operation_locks, @@ -31,6 +31,7 @@ if TYPE_CHECKING: from pathlib import Path + from typing import Any invocation_time = "" @@ -53,7 +54,6 @@ def set_invocation_time(): _src_cache_root_default = None error_overlinking_default = "false" error_overdepending_default = "false" -noarch_python_build_age_default = 0 enable_static_default = "false" no_rewrite_stdout_env_default = "false" ignore_verify_codes_default = [] @@ -89,7 +89,6 @@ def _get_default_settings(): Setting("dirty", False), Setting("include_recipe", True), Setting("no_download_source", False), - Setting("override_channels", False), Setting("skip_existing", False), Setting("token", None), Setting("user", None), @@ -111,14 +110,16 @@ def _get_default_settings(): Setting("test_run_post", False), Setting( "filename_hashing", - cc_conda_build.get("filename_hashing", filename_hashing_default).lower() + context.conda_build.get( + "filename_hashing", filename_hashing_default + ).lower() == "true", ), Setting("keep_old_work", False), Setting( "_src_cache_root", - abspath(expanduser(expandvars(cc_conda_build.get("cache_dir")))) - if cc_conda_build.get("cache_dir") + abspath(expanduser(expandvars(cache_dir))) + if (cache_dir := context.conda_build.get("cache_dir")) else _src_cache_root_default, ), Setting("copy_test_source_files", True), @@ -143,30 +144,26 @@ def _get_default_settings(): # cli/main_build.py that this default will switch in conda-build 4.0. Setting( "error_overlinking", - cc_conda_build.get("error_overlinking", error_overlinking_default).lower() + context.conda_build.get( + "error_overlinking", error_overlinking_default + ).lower() == "true", ), Setting( "error_overdepending", - cc_conda_build.get( + context.conda_build.get( "error_overdepending", error_overdepending_default ).lower() == "true", ), - Setting( - "noarch_python_build_age", - cc_conda_build.get( - "noarch_python_build_age", noarch_python_build_age_default - ), - ), Setting( "enable_static", - cc_conda_build.get("enable_static", enable_static_default).lower() + context.conda_build.get("enable_static", enable_static_default).lower() == "true", ), Setting( "no_rewrite_stdout_env", - cc_conda_build.get( + context.conda_build.get( "no_rewrite_stdout_env", no_rewrite_stdout_env_default ).lower() == "true", @@ -205,11 +202,13 @@ def _get_default_settings(): Setting("verify", True), Setting( "ignore_verify_codes", - cc_conda_build.get("ignore_verify_codes", ignore_verify_codes_default), + context.conda_build.get("ignore_verify_codes", ignore_verify_codes_default), ), Setting( "exit_on_verify_error", - cc_conda_build.get("exit_on_verify_error", exit_on_verify_error_default), + context.conda_build.get( + "exit_on_verify_error", exit_on_verify_error_default + ), ), # Recipes that have no host section, only build, should bypass the build/host line. # This is to make older recipes still work with cross-compiling. True cross-compiling @@ -227,17 +226,17 @@ def _get_default_settings(): Setting("_pip_cache_dir", None), Setting( "zstd_compression_level", - cc_conda_build.get( + context.conda_build.get( "zstd_compression_level", zstd_compression_level_default ), ), # this can be set to different values (currently only 2 means anything) to use package formats Setting( "conda_pkg_format", - cc_conda_build.get("pkg_format", conda_pkg_format_default), + context.conda_build.get("pkg_format", conda_pkg_format_default), ), Setting("suppress_variables", False), - Setting("build_id_pat", cc_conda_build.get("build_id_pat", "{n}_{t}")), + Setting("build_id_pat", context.conda_build.get("build_id_pat", "{n}_{t}")), ] @@ -291,6 +290,10 @@ def set_lang(variant, lang): for lang in ("perl", "lua", "python", "numpy", "r_base"): set_lang(self.variant, lang) + # --override-channels is a valid CLI argument but we no longer wish to set it here + # use conda.base.context.context.override_channels instead + kwargs.pop("override_channels", None) + self._build_id = kwargs.pop("build_id", getattr(self, "_build_id", "")) source_cache = kwargs.pop("cache_dir", None) croot = kwargs.pop("croot", None) @@ -322,7 +325,7 @@ def arch(self): @arch.setter def arch(self, value): log = get_logger(__name__) - log.warn( + log.warning( "Setting build arch. This is only useful when pretending to be on another " "arch, such as for rendering necessary dependencies on a non-native arch. " "I trust that you know what you're doing." @@ -338,7 +341,7 @@ def platform(self): @platform.setter def platform(self, value): log = get_logger(__name__) - log.warn( + log.warning( "Setting build platform. This is only useful when " "pretending to be on another platform, such as " "for rendering necessary dependencies on a non-native " @@ -450,7 +453,7 @@ def croot(self) -> str: """This is where source caches and work folders live""" if not self._croot: _bld_root_env = os.getenv("CONDA_BLD_PATH") - _bld_root_rc = cc_conda_build.get("root-dir") + _bld_root_rc = context.conda_build.get("root-dir") if _bld_root_env: self._croot = abspath(expanduser(_bld_root_env)) elif _bld_root_rc: @@ -816,7 +819,7 @@ def clean_pkgs(self): for folder in self.bldpkgs_dirs: rm_rf(folder) - def copy(self): + def copy(self) -> Config: new = copy.copy(self) # Use picke.loads(pickle.dumps(...) as a faster copy.deepcopy alternative. new.variant = pickle.loads(pickle.dumps(self.variant, -1)) @@ -843,7 +846,11 @@ def __exit__(self, e_type, e_value, traceback): self.clean(remove_folders=False) -def _get_or_merge_config(config, variant=None, **kwargs): +def _get_or_merge_config( + config: Config | None, + variant: dict[str, Any] | None = None, + **kwargs, +) -> Config: # This function should only ever be called via get_or_merge_config. # It only exists for us to monkeypatch a default config when running tests. if not config: @@ -859,7 +866,11 @@ def _get_or_merge_config(config, variant=None, **kwargs): return config -def get_or_merge_config(config, variant=None, **kwargs): +def get_or_merge_config( + config: Config | None, + variant: dict[str, Any] | None = None, + **kwargs, +) -> Config: """Always returns a new object - never changes the config that might be passed in.""" return _get_or_merge_config(config, variant=variant, **kwargs) diff --git a/conda_build/convert.py b/conda_build/convert.py index 793f0dc93c..e910d47e21 100644 --- a/conda_build/convert.py +++ b/conda_build/convert.py @@ -4,6 +4,8 @@ Tools for converting conda packages """ +from __future__ import annotations + import glob import hashlib import json @@ -14,8 +16,12 @@ import tarfile import tempfile from pathlib import Path +from typing import TYPE_CHECKING + +from .utils import ensure_list, filter_info_files, walk -from .utils import filter_info_files, walk +if TYPE_CHECKING: + from typing import Iterable def retrieve_c_extensions(file_path, show_imports=False): @@ -776,31 +782,35 @@ def convert_from_windows_to_unix( def conda_convert( - file_path, - output_dir=".", - show_imports=False, - platforms=None, - force=False, - dependencies=None, - verbose=False, - quiet=False, - dry_run=False, -): + file_path: str, + output_dir: str = ".", + show_imports: bool = False, + platforms: str | Iterable[str] | None = None, + force: bool = False, + dependencies: str | Iterable[str] | None = None, + verbose: bool = False, + quiet: bool = False, + dry_run: bool = False, +) -> None: """Convert a conda package between different platforms and architectures. Positional arguments: file_path (str) -- the file path to the source package's tar file output_dir (str) -- the file path to where to output the converted tar file show_imports (bool) -- show all C extensions found in the source package - platforms (str) -- the platforms to convert to: 'win-64', 'win-32', 'linux-64', + platforms list[str] -- the platforms to convert to: 'win-64', 'win-32', 'linux-64', 'linux-32', 'osx-64', or 'all' force (bool) -- force conversion of packages that contain C extensions - dependencies (List[str]) -- the new dependencies to add to the source package's + dependencies (list[str]) -- the new dependencies to add to the source package's existing dependencies verbose (bool) -- show output of items that are updated quiet (bool) -- hide all output except warnings and errors dry_run (bool) -- show which conversions will take place """ + + platforms = ensure_list(platforms) + dependencies = ensure_list(dependencies) + if show_imports: imports = retrieve_c_extensions(file_path) if len(imports) == 0: diff --git a/conda_build/create_test.py b/conda_build/create_test.py index 1a8a0f1c34..441fe4a17c 100644 --- a/conda_build/create_test.py +++ b/conda_build/create_test.py @@ -122,7 +122,7 @@ def _create_test_files( fo.write( f"{comment_char} tests for {m.dist()} (this is a generated file);\n" ) - fo.write("print('===== testing package: %s =====');\n" % m.dist()) + fo.write(f"print('===== testing package: {m.dist()} =====');\n") try: with open(test_file) as fi: @@ -134,7 +134,7 @@ def _create_test_files( fo.write( "# tests were not packaged with this module, and cannot be run\n" ) - fo.write("\nprint('===== %s OK =====');\n" % m.dist()) + fo.write(f"\nprint('===== {m.dist()} OK =====');\n") return ( out_file, bool(name) and isfile(out_file) and basename(test_file) != "no-file", @@ -175,8 +175,8 @@ def create_py_files(m: MetaData, test_dir: os.PathLike) -> bool: if imports: with open(tf, "a") as fo: for name in imports: - fo.write('print("import: %r")\n' % name) - fo.write("import %s\n" % name) + fo.write(f'print("import: {name!r}")\n') + fo.write(f"import {name}\n") fo.write("\n") return tf if (tf_exists or imports) else False @@ -202,8 +202,8 @@ def create_r_files(m: MetaData, test_dir: os.PathLike) -> bool: if imports: with open(tf, "a") as fo: for name in imports: - fo.write('print("library(%r)")\n' % name) - fo.write("library(%s)\n" % name) + fo.write(f'print("library({name!r})")\n') + fo.write(f"library({name})\n") fo.write("\n") return tf if (tf_exists or imports) else False @@ -225,11 +225,13 @@ def create_pl_files(m: MetaData, test_dir: os.PathLike) -> bool: break if tf_exists or imports: with open(tf, "a") as fo: - print(r'my $expected_version = "%s";' % m.version().rstrip("0"), file=fo) + print( + r'my $expected_version = "{}";'.format(m.version().rstrip("0")), file=fo + ) if imports: for name in imports: - print(r'print("import: %s\n");' % name, file=fo) - print("use %s;\n" % name, file=fo) + print(rf'print("import: {name}\n");', file=fo) + print(f"use {name};\n", file=fo) # Don't try to print version for complex imports if " " not in name: print( @@ -264,8 +266,8 @@ def create_lua_files(m: MetaData, test_dir: os.PathLike) -> bool: if imports: with open(tf, "a+") as fo: for name in imports: - print(r'print("require \"%s\"\n");' % name, file=fo) - print('require "%s"\n' % name, file=fo) + print(rf'print("require \"{name}\"\n");', file=fo) + print(f'require "{name}"\n', file=fo) return tf if (tf_exists or imports) else False diff --git a/conda_build/develop.py b/conda_build/develop.py index 5b83185fdc..d0e3d59fd6 100644 --- a/conda_build/develop.py +++ b/conda_build/develop.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import shutil import sys from os.path import abspath, exists, expanduser, isdir, join @@ -126,21 +128,20 @@ def _uninstall(sp_dir, pkg_path): def execute( - recipe_dirs, - prefix=sys.prefix, - no_pth_file=False, - build_ext=False, - clean=False, - uninstall=False, -): + recipe_dirs: list[str], + prefix: str = sys.prefix, + no_pth_file: bool = False, + build_ext: bool = False, + clean: bool = False, + uninstall: bool = False, +) -> None: if not isdir(prefix): sys.exit( - """\ -Error: environment does not exist: %s + f"""\ +Error: environment does not exist: {prefix} # # Use 'conda create' to create the environment first. #""" - % prefix ) assert find_executable("python", prefix=prefix) diff --git a/conda_build/environ.py b/conda_build/environ.py index 36f6b78171..3113ec7f8a 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -3,7 +3,6 @@ from __future__ import annotations import contextlib -import json import logging import multiprocessing import os @@ -38,12 +37,12 @@ PaddingError, UnsatisfiableError, ) -from conda.models.channel import prioritize_channels +from conda.gateways.disk.create import TemporaryDirectory +from conda.models.channel import Channel, prioritize_channels from conda.models.match_spec import MatchSpec +from conda.models.records import PackageRecord from . import utils -from .conda_interface import Channel, PackageRecord, TemporaryDirectory -from .deprecations import deprecated from .exceptions import BuildLockError, DependencyNeedsBuildingError from .features import feature_list from .index import get_build_index @@ -72,9 +71,6 @@ class InstallActionsType(TypedDict): log = getLogger(__name__) -deprecated.constant("24.3", "24.5", "PREFIX_ACTION", _PREFIX_ACTION := "PREFIX") -deprecated.constant("24.3", "24.5", "LINK_ACTION", _LINK_ACTION := "LINK") - # these are things that we provide env vars for more explicitly. This list disables the # pass-through of variant values to env vars for these keys. LANGUAGES = ("PERL", "LUA", "R", "NUMPY", "PYTHON") @@ -540,8 +536,7 @@ def meta_vars(meta: MetaData, skip_build_id=False): value = os.getenv(var_name) if value is None: warnings.warn( - "The environment variable '%s' specified in script_env is undefined." - % var_name, + f"The environment variable '{var_name}' specified in script_env is undefined.", UserWarning, ) else: @@ -818,71 +813,9 @@ def os_vars(m, prefix): return d -@deprecated("24.3", "24.5") -class InvalidEnvironment(Exception): - pass - - -# Stripped-down Environment class from conda-tools ( https://github.com/groutr/conda-tools ) -# Vendored here to avoid the whole dependency for just this bit. -@deprecated("24.3", "24.5") -def _load_json(path): - with open(path) as fin: - x = json.load(fin) - return x - - -@deprecated("24.3", "24.5") -def _load_all_json(path): - """ - Load all json files in a directory. Return dictionary with filenames mapped to json - dictionaries. - """ - root, _, files = next(utils.walk(path)) - result = {} - for f in files: - if f.endswith(".json"): - result[f] = _load_json(join(root, f)) - return result - - -@deprecated("24.3", "24.5", addendum="Use `conda.core.prefix_data.PrefixData` instead.") -class Environment: - def __init__(self, path): - """ - Initialize an Environment object. - - To reflect changes in the underlying environment, a new Environment object should be - created. - """ - self.path = path - self._meta = join(path, "conda-meta") - if os.path.isdir(path) and os.path.isdir(self._meta): - self._packages = {} - else: - raise InvalidEnvironment(f"Unable to load environment {path}") - - def _read_package_json(self): - if not self._packages: - self._packages = _load_all_json(self._meta) - - def package_specs(self): - """ - List all package specs in the environment. - """ - self._read_package_json() - json_objs = self._packages.values() - specs = [] - for i in json_objs: - p, v, b = i["name"], i["version"], i["build"] - specs.append(f"{p} {v} {b}") - return specs - - cached_precs: dict[ tuple[tuple[str | MatchSpec, ...], Any, Any, Any, bool], list[PackageRecord] ] = {} -deprecated.constant("24.3", "24.5", "cached_actions", cached_precs) last_index_ts = 0 @@ -921,7 +854,7 @@ def get_install_actions( capture = utils.capture for feature, value in feature_list: if value: - specs.append("%s@" % feature) + specs.append(f"{feature}@") bldpkgs_dirs = ensure_list(bldpkgs_dirs) @@ -955,7 +888,8 @@ def get_install_actions( with utils.LoggingContext(conda_log_level): with capture(): try: - precs = _install_actions(prefix, index, specs)["LINK"] + _actions = _install_actions(prefix, index, specs, subdir=subdir) + precs = _actions["LINK"] except (NoPackagesFoundError, UnsatisfiableError) as exc: raise DependencyNeedsBuildingError(exc, subdir=subdir) except ( @@ -968,7 +902,7 @@ def get_install_actions( BuildLockError, ) as exc: if "lock" in str(exc): - log.warn( + log.warning( "failed to get package records, retrying. exception was: %s", str(exc), ) @@ -989,7 +923,7 @@ def get_install_actions( ): pkg_dir = os.path.dirname(pkg_dir) folder += 1 - log.warn( + log.warning( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir, @@ -997,7 +931,7 @@ def get_install_actions( if pkg_dir in context.pkgs_dirs and os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retries < max_env_retry: - log.warn( + log.warning( "failed to get package records, retrying. exception was: %s", str(exc), ) @@ -1027,7 +961,7 @@ def get_install_actions( # specs are the raw specifications, not the conda-derived actual specs # We're testing that pip etc. are manually specified if not any( - re.match(r"^%s(?:$|[\s=].*)" % pkg, str(dep)) for dep in specs + re.match(rf"^{pkg}(?:$|[\s=].*)", str(dep)) for dep in specs ): precs = [prec for prec in precs if prec.name != pkg] cached_precs[(specs, env, subdir, channel_urls, disable_pip)] = precs.copy() @@ -1130,20 +1064,20 @@ def create_env( or isinstance(exc, PaddingError) ) and config.prefix_length > 80: if config.prefix_length_fallback: - log.warn( + log.warning( "Build prefix failed with prefix length %d", config.prefix_length, ) - log.warn("Error was: ") - log.warn(str(exc)) - log.warn( + log.warning("Error was: ") + log.warning(str(exc)) + log.warning( "One or more of your package dependencies needs to be rebuilt " "with a longer prefix length." ) - log.warn( + log.warning( "Falling back to legacy prefix length of 80 characters." ) - log.warn( + log.warning( "Your package will not install into prefixes > 80 characters." ) config.prefix_length = 80 @@ -1165,7 +1099,7 @@ def create_env( raise elif "lock" in str(exc): if retry < config.max_env_retry: - log.warn( + log.warning( "failed to create env, retrying. exception was: %s", str(exc), ) @@ -1191,7 +1125,7 @@ def create_env( ): pkg_dir = os.path.dirname(pkg_dir) folder += 1 - log.warn( + log.warning( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir, @@ -1199,7 +1133,7 @@ def create_env( if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: - log.warn( + log.warning( "failed to create env, retrying. exception was: %s", str(exc), ) @@ -1230,7 +1164,7 @@ def create_env( if isinstance(exc, AssertionError): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = os.path.dirname(os.path.dirname(str(exc))) - log.warn( + log.warning( "I think conda ended up with a partial extraction for %s. " "Removing the folder and retrying", pkg_dir, @@ -1238,7 +1172,7 @@ def create_env( if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: - log.warn( + log.warning( "failed to create env, retrying. exception was: %s", str(exc) ) create_env( @@ -1323,14 +1257,19 @@ def install_actions( prefix: str | os.PathLike | Path, index, specs: Iterable[str | MatchSpec], + subdir: str | None = None, ) -> InstallActionsType: # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L471 # but reduced to only the functionality actually used within conda-build. + subdir_kwargs = {} + if subdir not in (None, "", "noarch"): + subdir_kwargs["CONDA_SUBDIR"] = subdir with env_vars( { "CONDA_ALLOW_NON_CHANNEL_URLS": "true", "CONDA_SOLVER_IGNORE_TIMESTAMPS": "false", + **subdir_kwargs, }, callback=reset_context, ): @@ -1377,7 +1316,6 @@ def install_actions( del install_actions -@deprecated.argument("24.3", "24.5", "actions", rename="precs") def _execute_actions(prefix, precs): # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L575 # but reduced to only the functionality actually used within conda-build. @@ -1402,14 +1340,13 @@ def _execute_actions(prefix, precs): unlink_link_transaction.execute() -@deprecated.argument("24.3", "24.5", "actions", rename="precs") def _display_actions(prefix, precs): # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L58 # but reduced to only the functionality actually used within conda-build. builder = ["", "## Package Plan ##\n"] if prefix: - builder.append(" environment location: %s" % prefix) + builder.append(f" environment location: {prefix}") builder.append("") print("\n".join(builder)) @@ -1453,9 +1390,9 @@ def channel_filt(s): # string with new-style string formatting. fmt[pkg] = f"{{pkg:<{maxpkg}}} {{vers:<{maxver}}}" if maxchannels: - fmt[pkg] += " {channel:<%s}" % maxchannels + fmt[pkg] += f" {{channel:<{maxchannels}}}" if features[pkg]: - fmt[pkg] += " [{features:<%s}]" % maxfeatures + fmt[pkg] += f" [{{features:<{maxfeatures}}}]" lead = " " * 4 diff --git a/conda_build/exceptions.py b/conda_build/exceptions.py index f38706786a..c815b401a7 100644 --- a/conda_build/exceptions.py +++ b/conda_build/exceptions.py @@ -2,12 +2,14 @@ # SPDX-License-Identifier: BSD-3-Clause import textwrap +from conda import CondaError + SEPARATOR = "-" * 70 indent = lambda s: textwrap.fill(textwrap.dedent(s)) -class CondaBuildException(Exception): +class CondaBuildException(CondaError): pass @@ -107,22 +109,30 @@ class BuildLockError(CondaBuildException): """Raised when we failed to acquire a lock.""" -class OverLinkingError(RuntimeError): +class OverLinkingError(RuntimeError, CondaBuildException): def __init__(self, error, *args): self.error = error - self.msg = "overlinking check failed \n%s" % (error) + self.msg = f"overlinking check failed \n{error}" super().__init__(self.msg) -class OverDependingError(RuntimeError): +class OverDependingError(RuntimeError, CondaBuildException): def __init__(self, error, *args): self.error = error - self.msg = "overdepending check failed \n%s" % (error) + self.msg = f"overdepending check failed \n{error}" super().__init__(self.msg) -class RunPathError(RuntimeError): +class RunPathError(RuntimeError, CondaBuildException): def __init__(self, error, *args): self.error = error - self.msg = "runpaths check failed \n%s" % (error) + self.msg = f"runpaths check failed \n{error}" super().__init__(self.msg) + + +class BuildScriptException(CondaBuildException): + pass + + +class CondaBuildUserError(CondaBuildException): + pass diff --git a/conda_build/gui-32.exe b/conda_build/gui-32.exe index bee7e543c4..289c77ca9c 100755 Binary files a/conda_build/gui-32.exe and b/conda_build/gui-32.exe differ diff --git a/conda_build/gui-64.exe b/conda_build/gui-64.exe index 366a721736..c6cdccd2c5 100755 Binary files a/conda_build/gui-64.exe and b/conda_build/gui-64.exe differ diff --git a/conda_build/index.py b/conda_build/index.py index 28a470f5c4..fc72a3fd0d 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -1,58 +1,29 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import json import logging import os -import time -from concurrent.futures import Executor from functools import partial from os.path import dirname from conda.base.context import context from conda.core.index import get_index from conda.exceptions import CondaHTTPError +from conda.utils import url_path from conda_index.index import update_index as _update_index from . import utils -from .conda_interface import url_path -from .deprecations import deprecated from .utils import ( - CONDA_PACKAGE_EXTENSION_V1, - CONDA_PACKAGE_EXTENSION_V2, - JSONDecodeError, get_logger, - on_win, ) log = get_logger(__name__) -@deprecated("24.3", "24.5") -class DummyExecutor(Executor): - def map(self, func, *iterables): - for iterable in iterables: - for thing in iterable: - yield func(thing) - - local_index_timestamp = 0 cached_index = None local_subdir = "" local_output_folder = "" cached_channels = [] -_channel_data = {} -deprecated.constant("24.1", "24.5", "channel_data", _channel_data) - - -# TODO: support for libarchive seems to have broken ability to use multiple threads here. -# The new conda format is so much faster that it more than makes up for it. However, it -# would be nice to fix this at some point. -_MAX_THREADS_DEFAULT = os.cpu_count() or 1 -if on_win: # see https://github.com/python/cpython/commit/8ea0fd85bc67438f679491fae29dfe0a3961900a - _MAX_THREADS_DEFAULT = min(48, _MAX_THREADS_DEFAULT) -deprecated.constant("24.3", "24.5", "MAX_THREADS_DEFAULT", _MAX_THREADS_DEFAULT) -deprecated.constant("24.3", "24.5", "LOCK_TIMEOUT_SECS", 3 * 3600) -deprecated.constant("24.3", "24.5", "LOCKFILE_NAME", ".lock") # TODO: this is to make sure that the index doesn't leak tokens. It breaks use of private channels, though. # os.environ['CONDA_ADD_ANACONDA_TOKEN'] = "false" @@ -79,7 +50,6 @@ def get_build_index( global local_output_folder global cached_index global cached_channels - global _channel_data mtime = 0 channel_urls = list(utils.ensure_list(channel_urls)) @@ -154,55 +124,11 @@ def get_build_index( platform=subdir, ) - expanded_channels = {rec.channel for rec in cached_index} - - superchannel = {} - # we need channeldata.json too, as it is a more reliable source of run_exports data - for channel in expanded_channels: - if channel.scheme == "file": - location = channel.location - if utils.on_win: - location = location.lstrip("/") - elif not os.path.isabs(channel.location) and os.path.exists( - os.path.join(os.path.sep, channel.location) - ): - location = os.path.join(os.path.sep, channel.location) - channeldata_file = os.path.join( - location, channel.name, "channeldata.json" - ) - retry = 0 - max_retries = 1 - if os.path.isfile(channeldata_file): - while retry < max_retries: - try: - with open(channeldata_file, "r+") as f: - _channel_data[channel.name] = json.load(f) - break - except (OSError, JSONDecodeError): - time.sleep(0.2) - retry += 1 - else: - # download channeldata.json for url - if not context.offline: - try: - _channel_data[channel.name] = utils.download_channeldata( - channel.base_url + "/channeldata.json" - ) - except CondaHTTPError: - continue - # collapse defaults metachannel back into one superchannel, merging channeldata - if channel.base_url in context.default_channels and _channel_data.get( - channel.name - ): - packages = superchannel.get("packages", {}) - packages.update(_channel_data[channel.name]) - superchannel["packages"] = packages - _channel_data["defaults"] = superchannel local_index_timestamp = os.path.getmtime(index_file) local_subdir = subdir local_output_folder = output_folder cached_channels = channel_urls - return cached_index, local_index_timestamp, _channel_data + return cached_index, local_index_timestamp, None def _ensure_valid_channel(local_folder, subdir): @@ -251,54 +177,3 @@ def _delegated_update_index( current_index_versions=current_index_versions, debug=debug, ) - - -@deprecated( - "24.1.0", "24.5.0", addendum="Use `conda_index._apply_instructions` instead." -) -def _apply_instructions(subdir, repodata, instructions): - repodata.setdefault("removed", []) - utils.merge_or_update_dict( - repodata.get("packages", {}), - instructions.get("packages", {}), - merge=False, - add_missing_keys=False, - ) - # we could have totally separate instructions for .conda than .tar.bz2, but it's easier if we assume - # that a similarly-named .tar.bz2 file is the same content as .conda, and shares fixes - new_pkg_fixes = { - k.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2): v - for k, v in instructions.get("packages", {}).items() - } - - utils.merge_or_update_dict( - repodata.get("packages.conda", {}), - new_pkg_fixes, - merge=False, - add_missing_keys=False, - ) - utils.merge_or_update_dict( - repodata.get("packages.conda", {}), - instructions.get("packages.conda", {}), - merge=False, - add_missing_keys=False, - ) - - for fn in instructions.get("revoke", ()): - for key in ("packages", "packages.conda"): - if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == "packages.conda": - fn = fn.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2) - if fn in repodata[key]: - repodata[key][fn]["revoked"] = True - repodata[key][fn]["depends"].append("package_has_been_revoked") - - for fn in instructions.get("remove", ()): - for key in ("packages", "packages.conda"): - if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == "packages.conda": - fn = fn.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2) - popped = repodata[key].pop(fn, None) - if popped: - repodata["removed"].append(fn) - repodata["removed"].sort() - - return repodata diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 7d7c61f8f9..5747f3a7b8 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -15,13 +15,11 @@ from conda.api import Solver from conda.base.context import context +from conda.cli.common import specs_from_args from conda.core.index import get_index from conda.core.prefix_data import PrefixData from conda.models.records import PrefixRecord -from .conda_interface import ( - specs_from_args, -) from .os_utils.ldd import ( get_linkages, get_package_obj_files, @@ -134,7 +132,7 @@ def print_linkages( else sort_order.get(key[0], (4, key[0])) ), ): - output_string += "%s:\n" % prec + output_string += f"{prec}:\n" if show_files: for lib, path, binary in sorted(links): output_string += f" {lib} ({path}) from {binary}\n" @@ -218,8 +216,8 @@ def inspect_linkages( all_packages: bool = False, show_files: bool = False, groupby: Literal["package", "dependency"] = "package", - sysroot="", -): + sysroot: str = "", +) -> str: if not packages and not untracked and not all_packages: sys.exit("At least one package or --untracked or --all must be provided") elif on_win: @@ -260,7 +258,7 @@ def inspect_linkages( if relative: precs = list(which_package(relative, prefix)) if len(precs) > 1: - get_logger(__name__).warn( + get_logger(__name__).warning( "Warning: %s comes from multiple packages: %s", path, comma_join(map(str, precs)), @@ -298,7 +296,7 @@ def inspect_linkages( output_string += print_linkages(inverted_map[dep], show_files=show_files) else: - raise ValueError("Unrecognized groupby: %s" % groupby) + raise ValueError(f"Unrecognized groupby: {groupby}") if hasattr(output_string, "decode"): output_string = output_string.decode("utf-8") return output_string diff --git a/conda_build/jinja_context.py b/conda_build/jinja_context.py index cc5c3b24c7..307a13ecc9 100644 --- a/conda_build/jinja_context.py +++ b/conda_build/jinja_context.py @@ -10,24 +10,19 @@ import time from functools import partial from io import StringIO, TextIOBase +from subprocess import CalledProcessError +from typing import TYPE_CHECKING from warnings import warn import jinja2 import yaml - -try: - import tomllib # Python 3.11 -except: - import tomli as tomllib - -from typing import TYPE_CHECKING +from frozendict import deepfreeze from . import _load_setup_py_data from .environ import get_dict as get_environ from .exceptions import CondaBuildException from .render import get_env_dependencies from .utils import ( - HashableDict, apply_pin_expressions, check_call_env, copy_into, @@ -38,6 +33,11 @@ ) from .variants import DEFAULT_COMPILERS +try: + import tomllib # Python 3.11 +except: + import tomli as tomllib + if TYPE_CHECKING: from typing import IO, Any @@ -166,7 +166,12 @@ def load_setup_py_data( args.extend(["--recipe-dir", recipe_dir]) if permit_undefined_jinja: args.append("--permit-undefined-jinja") - check_call_env(args, env=env) + try: + check_call_env(args, env=env) + except CalledProcessError as exc: + raise CondaBuildException( + "Could not run load_setup_py_data in subprocess" + ) from exc # this is a file that the subprocess will have written with open( os.path.join(m.config.work_dir, "conda_build_loaded_setup_py.json") @@ -298,7 +303,7 @@ def pin_compatible( # There are two cases considered here (so far): # 1. Good packages that follow semver style (if not philosophy). For example, 1.2.3 # 2. Evil packages that cram everything alongside a single major version. For example, 9b - key = (m.name(), HashableDict(m.config.variant)) + key = (m.name(), deepfreeze(m.config.variant)) if key in cached_env_dependencies: pins = cached_env_dependencies[key] else: diff --git a/conda_build/license_family.py b/conda_build/license_family.py index 976cc1b33a..ab101274ae 100644 --- a/conda_build/license_family.py +++ b/conda_build/license_family.py @@ -29,7 +29,7 @@ gpl3_regex = re.compile("GPL[^2]*3") # match GPL3 gpl23_regex = re.compile("GPL[^2]*>= *2") # match GPL >= 2 cc_regex = re.compile(r"CC\w+") # match CC -punk_regex = re.compile("[%s]" % re.escape(string.punctuation)) # removes punks +punk_regex = re.compile(f"[{re.escape(string.punctuation)}]") # removes punks def match_gpl3(family): diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 01f3367d03..d3ee86f214 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -13,20 +13,21 @@ from collections import OrderedDict from functools import lru_cache from os.path import isfile, join -from typing import TYPE_CHECKING, overload +from typing import TYPE_CHECKING, NamedTuple, overload from bs4 import UnicodeDammit from conda.base.context import context from conda.gateways.disk.read import compute_sum +from conda.models.match_spec import MatchSpec +from frozendict import deepfreeze -from . import exceptions, utils, variants -from .conda_interface import MatchSpec +from . import exceptions, utils from .config import Config, get_or_merge_config +from .deprecations import deprecated from .features import feature_list from .license_family import ensure_valid_license_family from .utils import ( DEFAULT_SUBDIRS, - HashableDict, ensure_list, expand_globs, find_recipe, @@ -34,9 +35,21 @@ insert_variant_versions, on_win, ) +from .variants import ( + dict_of_lists_to_list_of_dicts, + find_used_variables_in_batch_script, + find_used_variables_in_shell_script, + find_used_variables_in_text, + get_default_variant, + get_vars, + list_of_dicts_to_dict_of_lists, +) if TYPE_CHECKING: - from typing import Literal + from typing import Any, Literal, Self + + OutputDict = dict[str, Any] + OutputTuple = tuple[OutputDict, "MetaData"] try: import yaml @@ -156,7 +169,7 @@ def get_selectors(config: Config) -> dict[str, bool]: if arch == "32": d["x86"] = plat.endswith(("-32", "-64")) - defaults = variants.get_default_variant(config) + defaults = get_default_variant(config) py = config.variant.get("python", defaults["python"]) # there are times when python comes in as a tuple if not hasattr(py, "split"): @@ -186,7 +199,7 @@ def get_selectors(config: Config) -> dict[str, bool]: if not np: np = defaults["numpy"] if config.verbose: - utils.get_logger(__name__).warn( + utils.get_logger(__name__).warning( "No numpy version specified in conda_build_config.yaml. " "Falling back to default numpy value of {}".format(defaults["numpy"]) ) @@ -268,38 +281,68 @@ def eval_selector(selector_string, namespace, variants_in_place): return eval_selector(next_string, namespace, variants_in_place) -def select_lines(data, namespace, variants_in_place): - lines = [] - - for i, line in enumerate(data.splitlines()): +@lru_cache(maxsize=None) +def _split_line_selector(text: str) -> tuple[tuple[str | None, str], ...]: + lines: list[tuple[str | None, str]] = [] + for line in text.splitlines(): line = line.rstrip() + # skip comment lines, include a blank line as a placeholder + if line.lstrip().startswith("#"): + lines.append((None, "")) + continue + + # include blank lines + if not line: + lines.append((None, "")) + continue + + # user may have quoted entire line to make YAML happy trailing_quote = "" if line and line[-1] in ("'", '"'): trailing_quote = line[-1] - if line.lstrip().startswith("#"): - # Don't bother with comment only lines - continue - m = sel_pat.match(line) - if m: - cond = m.group(3) - try: - if eval_selector(cond, namespace, variants_in_place): - lines.append(m.group(1) + trailing_quote) - except Exception as e: - sys.exit( - """\ -Error: Invalid selector in meta.yaml line %d: -offending line: -%s -exception: -%s -""" - % (i + 1, line, str(e)) - ) + # Checking for "[" and "]" before regex matching every line is a bit faster. + if ( + ("[" in line and "]" in line) + and (match := sel_pat.match(line)) + and (selector := match.group(3)) + ): + # found a selector + lines.append((selector, (match.group(1) + trailing_quote).rstrip())) else: + # no selector found + lines.append((None, line)) + return tuple(lines) + + +def select_lines(text: str, namespace: dict[str, Any], variants_in_place: bool) -> str: + lines = [] + selector_cache: dict[str, bool] = {} + for i, (selector, line) in enumerate(_split_line_selector(text)): + if not selector: + # no selector? include line as is lines.append(line) + else: + # include lines with a selector that evaluates to True + try: + if selector_cache[selector]: + lines.append(line) + except KeyError: + # KeyError: cache miss + try: + value = bool(eval_selector(selector, namespace, variants_in_place)) + selector_cache[selector] = value + if value: + lines.append(line) + except Exception as e: + sys.exit( + f"Error: Invalid selector in meta.yaml line {i + 1}:\n" + f"offending line:\n" + f"{line}\n" + f"exception:\n" + f"{e.__class__.__name__}: {e}\n" + ) return "\n".join(lines) + "\n" @@ -358,7 +401,7 @@ def ensure_valid_noarch_value(meta): build_noarch = meta.get("build", {}).get("noarch") if build_noarch and build_noarch not in NOARCH_TYPES: raise exceptions.CondaBuildException( - "Invalid value for noarch: %s" % build_noarch + f"Invalid value for noarch: {build_noarch}" ) @@ -369,7 +412,17 @@ def _get_all_dependencies(metadata, envs=("host", "build", "run")): return reqs -def check_circular_dependencies(render_order, config=None): +@deprecated( + "24.5.1", + "24.7.0", + addendum="Use `conda_build.metadata._check_circular_dependencies` instead.", +) +def check_circular_dependencies( + render_order: dict[dict[str, Any], MetaData], + config: Config | None = None, +): + # deprecated since the input type (render_order) changed + envs: tuple[str, ...] if config and config.host_subdir != config.build_subdir: # When cross compiling build dependencies are already built # and cannot come from the recipe as subpackages @@ -394,6 +447,57 @@ def check_circular_dependencies(render_order, config=None): raise exceptions.RecipeError(error) +def _check_circular_dependencies( + render_order: list[OutputTuple], + config: Config | None = None, +) -> None: + envs: tuple[str, ...] + if config and config.host_subdir != config.build_subdir: + # When cross compiling build dependencies are already built + # and cannot come from the recipe as subpackages + envs = ("host", "run") + else: + envs = ("build", "host", "run") + + pairs: list[tuple[str, str]] = [] + for idx, (_, metadata) in enumerate(render_order): + name = metadata.name() + for _, other_metadata in render_order[idx + 1 :]: + other_name = other_metadata.name() + if any( + name == dep.split(" ")[0] + for dep in _get_all_dependencies(other_metadata, envs=envs) + ) and any( + other_name == dep.split(" ")[0] + for dep in _get_all_dependencies(metadata, envs=envs) + ): + pairs.append((name, other_name)) + + if pairs: + error = "Circular dependencies in recipe: \n" + for pair in pairs: + error += " {} <-> {}\n".format(*pair) + raise exceptions.RecipeError(error) + + +def _check_run_constrained(metadata_tuples): + errors = [] + for _, metadata in metadata_tuples: + for dep in _get_all_dependencies(metadata, envs=("run_constrained",)): + if "{{" in dep: + # skip Jinja content; it might have not been rendered yet; we'll get it next call + continue + try: + MatchSpec(dep) + except ValueError as exc: + errors.append( + f"- Output '{metadata.name()}' has invalid run_constrained item: {dep}. " + f"Reason: {exc}" + ) + if errors: + raise exceptions.RecipeError("\n".join(["", *errors])) + + def _variants_equal(metadata, output_metadata): match = True for key, val in metadata.config.variant.items(): @@ -789,7 +893,7 @@ def _get_env_path(env_name_or_path): break bootstrap_metadir = os.path.join(env_name_or_path, "conda-meta") if not os.path.isdir(bootstrap_metadir): - print("Bootstrap environment '%s' not found" % env_name_or_path) + print(f"Bootstrap environment '{env_name_or_path}' not found") sys.exit(1) return env_name_or_path @@ -807,15 +911,14 @@ def _get_dependencies_from_environment(env_name_or_path): return {"requirements": {"build": bootstrap_requirements}} -def toposort(output_metadata_map): - """This function is used to work out the order to run the install scripts - for split packages based on any interdependencies. The result is just - a re-ordering of outputs such that we can run them in that order and - reset the initial set of files in the install prefix after each. This - will naturally lead to non-overlapping files in each package and also - the correct files being present during the install and test procedures, - provided they are run in this order.""" - from .conda_interface import _toposort +@deprecated( + "24.5.1", + "24.7.0", + addendum="Use `conda_build.metadata.toposort_outputs` instead.", +) +def toposort(output_metadata_map: dict[OutputDict, MetaData]): + # deprecated since input type (output_metadata_map) and output changed + from conda.common.toposort import _toposort # We only care about the conda packages built by this recipe. Non-conda # packages get sorted to the end. @@ -824,9 +927,9 @@ def toposort(output_metadata_map): for output_d in output_metadata_map if output_d.get("type", "conda").startswith("conda") ] - topodict = dict() - order = dict() - endorder = set() + topodict: dict[str, set[str]] = dict() + order: dict[str, int] = dict() + endorder: set[int] = set() for idx, (output_d, output_m) in enumerate(output_metadata_map.items()): if output_d.get("type", "conda").startswith("conda"): @@ -868,7 +971,69 @@ def toposort(output_metadata_map): return result -def get_output_dicts_from_metadata(metadata, outputs=None): +def _toposort_outputs(output_tuples: list[OutputTuple]) -> list[OutputTuple]: + """This function is used to work out the order to run the install scripts + for split packages based on any interdependencies. The result is just + a re-ordering of outputs such that we can run them in that order and + reset the initial set of files in the install prefix after each. This + will naturally lead to non-overlapping files in each package and also + the correct files being present during the install and test procedures, + provided they are run in this order.""" + from conda.common.toposort import _toposort + + # We only care about the conda packages built by this recipe. Non-conda + # packages get sorted to the end. + conda_outputs: dict[str, list[OutputTuple]] = {} + non_conda_outputs: list[OutputTuple] = [] + for output_tuple in output_tuples: + output_d, _ = output_tuple + if output_d.get("type", "conda").startswith("conda"): + # conda packages must have a name + # the same package name may be seen multiple times (variants) + conda_outputs.setdefault(output_d["name"], []).append(output_tuple) + elif "name" in output_d: + non_conda_outputs.append(output_tuple) + else: + # TODO: is it even possible to get here? and if so should we silently ignore or error? + utils.get_logger(__name__).warning( + "Found an output without a name, skipping" + ) + + # Iterate over conda packages, creating a mapping of package names to their + # dependencies to be used in toposort + name_to_dependencies: dict[str, set[str]] = {} + for name, same_name_outputs in conda_outputs.items(): + for output_d, output_metadata in same_name_outputs: + # dependencies for all of the variants + dependencies = ( + *output_metadata.get_value("requirements/run", []), + *output_metadata.get_value("requirements/host", []), + *( + output_metadata.get_value("requirements/build", []) + if not output_metadata.is_cross + else [] + ), + ) + name_to_dependencies.setdefault(name, set()).update( + dependency_name + for dependency in dependencies + if (dependency_name := dependency.split(" ")[0]) in conda_outputs + ) + + return [ + *( + output + for name in _toposort(name_to_dependencies) + for output in conda_outputs[name] + ), + *non_conda_outputs, + ] + + +def get_output_dicts_from_metadata( + metadata: MetaData, + outputs: list[dict[str, Any]] | None = None, +) -> list[dict[str, Any]]: outputs = outputs or metadata.get_section("outputs") if not outputs: @@ -889,8 +1054,8 @@ def get_output_dicts_from_metadata(metadata, outputs=None): outputs.append(OrderedDict(name=metadata.name())) for out in outputs: if ( - "package:" in metadata.get_recipe_text() - and out.get("name") == metadata.name() + out.get("name") == metadata.name() + and "package:" in metadata.get_recipe_text() ): combine_top_level_metadata_with_output(metadata, out) return outputs @@ -956,35 +1121,26 @@ def finalize_outputs_pass( fm = om if not output_d.get("type") or output_d.get("type").startswith("conda"): outputs[ - ( - fm.name(), - HashableDict( - { - k: copy.deepcopy(fm.config.variant[k]) - for k in fm.get_used_vars() - } - ), - ) + fm.name(), + deepfreeze({k: fm.config.variant[k] for k in fm.get_used_vars()}), ] = (output_d, fm) except exceptions.DependencyNeedsBuildingError as e: if not permit_unsatisfiable_variants: raise else: log = utils.get_logger(__name__) - log.warn( + log.warning( "Could not finalize metadata due to missing dependencies: " f"{e.packages}" ) outputs[ - ( - metadata.name(), - HashableDict( - { - k: copy.deepcopy(metadata.config.variant[k]) - for k in metadata.get_used_vars() - } - ), - ) + metadata.name(), + deepfreeze( + { + k: metadata.config.variant[k] + for k in metadata.get_used_vars() + } + ), ] = (output_d, metadata) # in-place modification base_metadata.other_outputs = outputs @@ -992,12 +1148,8 @@ def finalize_outputs_pass( final_outputs = OrderedDict() for k, (out_d, m) in outputs.items(): final_outputs[ - ( - m.name(), - HashableDict( - {k: copy.deepcopy(m.config.variant[k]) for k in m.get_used_vars()} - ), - ) + m.name(), + deepfreeze({k: m.config.variant[k] for k in m.get_used_vars()}), ] = (out_d, m) return final_outputs @@ -1015,6 +1167,7 @@ def get_updated_output_dict_from_reparsed_metadata(original_dict, new_outputs): return output_d +@lru_cache(maxsize=200) def _filter_recipe_text(text, extract_pattern=None): if extract_pattern: match = re.search(extract_pattern, text, flags=re.MULTILINE | re.DOTALL) @@ -1197,7 +1350,7 @@ def parse_again( log = utils.get_logger(__name__) if kw: - log.warn( + log.warning( "using unsupported internal conda-build function `parse_again`. Please use " "conda_build.api.render instead." ) @@ -1411,7 +1564,7 @@ def get_value(self, name, default=None, autotype=True): # is passed in with an index, e.g. get_value('source/0/git_url') if index is None: log = utils.get_logger(__name__) - log.warn( + log.warning( f"No index specified in get_value('{name}'). Assuming index 0." ) index = 0 @@ -1448,7 +1601,7 @@ def check_field(key, section): if section == "extra": continue if section not in FIELDS: - raise ValueError("unknown section: %s" % section) + raise ValueError(f"unknown section: {section}") for key_or_dict in submeta: if section in OPTIONALLY_ITERABLE_FIELDS and isinstance( key_or_dict, dict @@ -1462,17 +1615,17 @@ def check_field(key, section): def name(self) -> str: name = self.get_value("package/name", "") if not name and self.final: - sys.exit("Error: package/name missing in: %r" % self.meta_path) + sys.exit(f"Error: package/name missing in: {self.meta_path!r}") name = str(name) if name != name.lower(): - sys.exit("Error: package/name must be lowercase, got: %r" % name) + sys.exit(f"Error: package/name must be lowercase, got: {name!r}") check_bad_chrs(name, "package/name") return name def version(self) -> str: version = self.get_value("package/version", "") if not version and not self.get_section("outputs") and self.final: - sys.exit("Error: package/version missing in: %r" % self.meta_path) + sys.exit(f"Error: package/version missing in: {self.meta_path!r}") version = str(version) check_bad_chrs(version, "package/version") if self.final and version.startswith("."): @@ -1541,7 +1694,7 @@ def ms_depends(self, typ="run"): try: ms = MatchSpec(spec) except AssertionError: - raise RuntimeError("Invalid package specification: %r" % spec) + raise RuntimeError(f"Invalid package specification: {spec!r}") except (AttributeError, ValueError) as e: raise RuntimeError( "Received dictionary as spec. Note that pip requirements are " @@ -1550,7 +1703,7 @@ def ms_depends(self, typ="run"): if ms.name == self.name() and not ( typ == "build" and self.config.host_subdir != self.config.build_subdir ): - raise RuntimeError("%s cannot depend on itself" % self.name()) + raise RuntimeError(f"{self.name()} cannot depend on itself") for name, ver in name_ver_list: if ms.name == name: if self.noarch: @@ -1665,7 +1818,6 @@ def build_id(self): raise RuntimeError( f"Couldn't extract raw recipe text for {self.name()} output" ) - raw_recipe_text = self.extract_package_and_build_text() raw_manual_build_string = re.search(r"\s*string:", raw_recipe_text) # user setting their own build string. Don't modify it. if manual_build_string and not ( @@ -1679,7 +1831,7 @@ def build_id(self): out = build_string_from_metadata(self) if self.config.filename_hashing and self.final: hash_ = self.hash_dependencies() - if not re.findall("h[0-9a-f]{%s}" % self.config.hash_length, out): + if not re.findall(f"h[0-9a-f]{{{self.config.hash_length}}}", out): ret = out.rsplit("_", 1) try: int(ret[0]) @@ -1689,14 +1841,14 @@ def build_id(self): if len(ret) > 1: out = "_".join([out] + ret[1:]) else: - out = re.sub("h[0-9a-f]{%s}" % self.config.hash_length, hash_, out) + out = re.sub(f"h[0-9a-f]{{{self.config.hash_length}}}", hash_, out) return out def dist(self): return f"{self.name()}-{self.version()}-{self.build_id()}" def pkg_fn(self): - return "%s.tar.bz2" % self.dist() + return f"{self.dist()}.tar.bz2" def is_app(self): return bool(self.get_value("app/entry")) @@ -1704,8 +1856,8 @@ def is_app(self): def app_meta(self): d = {"type": "app"} if self.get_value("app/icon"): - d["icon"] = "%s.png" % compute_sum( - join(self.path, self.get_value("app/icon")), "md5" + d["icon"] = "{}.png".format( + compute_sum(join(self.path, self.get_value("app/icon")), "md5") ) for field, key in [ @@ -2087,8 +2239,11 @@ def uses_vcs_in_build(self) -> Literal["git", "svn", "mercurial"] | None: return None def get_recipe_text( - self, extract_pattern=None, force_top_level=False, apply_selectors=True - ): + self, + extract_pattern: str | None = None, + force_top_level: bool = False, + apply_selectors: bool = True, + ) -> str: meta_path = self.meta_path if meta_path: recipe_text = read_meta_file(meta_path) @@ -2173,7 +2328,7 @@ def extract_single_output_text( output = output_matches[output_index] if output_matches else "" except ValueError: if not self.path and self.meta.get("extra", {}).get("parent_recipe"): - utils.get_logger(__name__).warn( + utils.get_logger(__name__).warning( f"Didn't match any output in raw metadata. Target value was: {output_name}" ) output = "" @@ -2236,7 +2391,7 @@ def validate_features(self): "character in your recipe." ) - def copy(self): + def copy(self: Self) -> MetaData: new = copy.copy(self) new.config = self.config.copy() new.config.variant = copy.deepcopy(self.config.variant) @@ -2287,7 +2442,7 @@ def variant_in_source(self): # constrain the stored variants to only this version in the output # variant mapping if re.search( - r"\s*\{\{\s*%s\s*(?:.*?)?\}\}" % key, self.extract_source_text() + rf"\s*\{{\{{\s*{key}\s*(?:.*?)?\}}\}}", self.extract_source_text() ): return True return False @@ -2448,9 +2603,7 @@ def append_parent_metadata(self, out_metadata): def get_reduced_variant_set(self, used_variables): # reduce variable space to limit work we need to do - full_collapsed_variants = variants.list_of_dicts_to_dict_of_lists( - self.config.variants - ) + full_collapsed_variants = list_of_dicts_to_dict_of_lists(self.config.variants) reduced_collapsed_variants = full_collapsed_variants.copy() reduce_keys = set(self.config.variants[0].keys()) - set(used_variables) @@ -2482,21 +2635,21 @@ def get_reduced_variant_set(self, used_variables): # save only one element from this key reduced_collapsed_variants[key] = utils.ensure_list(next(iter(values))) - out = variants.dict_of_lists_to_list_of_dicts(reduced_collapsed_variants) + out = dict_of_lists_to_list_of_dicts(reduced_collapsed_variants) return out def get_output_metadata_set( self, - permit_undefined_jinja=False, - permit_unsatisfiable_variants=False, - bypass_env_check=False, - ): + permit_undefined_jinja: bool = False, + permit_unsatisfiable_variants: bool = False, + bypass_env_check: bool = False, + ) -> list[OutputTuple]: from .source import provide - out_metadata_map = {} + output_tuples: list[OutputTuple] = [] if self.final: - outputs = get_output_dicts_from_metadata(self)[0] - output_tuples = [(outputs, self)] + outputs = get_output_dicts_from_metadata(self) + output_tuples = [(outputs[0], self)] else: all_output_metadata = OrderedDict() @@ -2541,49 +2694,41 @@ def get_output_metadata_set( # also refine this collection as each output metadata object is # finalized - see the finalize_outputs_pass function all_output_metadata[ - ( - out_metadata.name(), - HashableDict( - { - k: copy.deepcopy(out_metadata.config.variant[k]) - for k in out_metadata.get_used_vars() - } - ), - ) + out_metadata.name(), + deepfreeze( + { + k: out_metadata.config.variant[k] + for k in out_metadata.get_used_vars() + } + ), ] = (out, out_metadata) - out_metadata_map[HashableDict(out)] = out_metadata + output_tuples.append((out, out_metadata)) ref_metadata.other_outputs = out_metadata.other_outputs = ( all_output_metadata ) except SystemExit: if not permit_undefined_jinja: raise - out_metadata_map = {} + output_tuples = [] - assert out_metadata_map, ( + assert output_tuples, ( "Error: output metadata set is empty. Please file an issue" " on the conda-build tracker at https://github.com/conda/conda-build/issues" ) - # format here is {output_dict: metadata_object} - render_order = toposort(out_metadata_map) - check_circular_dependencies(render_order, config=self.config) + render_order: list[OutputTuple] = _toposort_outputs(output_tuples) + _check_circular_dependencies(render_order, config=self.config) conda_packages = OrderedDict() non_conda_packages = [] - for output_d, m in render_order.items(): + for output_d, m in render_order: if not output_d.get("type") or output_d["type"] in ( "conda", "conda_v2", ): conda_packages[ m.name(), - HashableDict( - { - k: copy.deepcopy(m.config.variant[k]) - for k in m.get_used_vars() - } - ), + deepfreeze({k: m.config.variant[k] for k in m.get_used_vars()}), ] = (output_d, m) elif output_d.get("type") == "wheel": if not output_d.get("requirements", {}).get("build") or not any( @@ -2630,24 +2775,18 @@ def get_output_metadata_set( m.final = True final_conda_packages.append((out_d, m)) output_tuples = final_conda_packages + non_conda_packages + _check_run_constrained(output_tuples) return output_tuples def get_loop_vars(self): - _variants = ( - self.config.input_variants - if hasattr(self.config, "input_variants") - else self.config.variants - ) - return variants.get_vars(_variants, loop_only=True) + return get_vars(getattr(self.config, "input_variants", self.config.variants)) def get_used_loop_vars(self, force_top_level=False, force_global=False): - return { - var - for var in self.get_used_vars( - force_top_level=force_top_level, force_global=force_global - ) - if var in self.get_loop_vars() - } + loop_vars = self.get_loop_vars() + used_vars = self.get_used_vars( + force_top_level=force_top_level, force_global=force_global + ) + return set(loop_vars).intersection(used_vars) def get_rendered_recipe_text( self, permit_undefined_jinja=False, extract_pattern=None @@ -2720,11 +2859,7 @@ def get_used_vars(self, force_top_level=False, force_global=False): global used_vars_cache recipe_dir = self.path - # `HashableDict` does not handle lists of other dictionaries correctly. Also it - # is constructed inplace, taking references to sub-elements of the input dict - # and thus corrupting it. Also, this was being called in 3 places in this function - # so caching it is probably a good thing. - hashed_variants = HashableDict(copy.deepcopy(self.config.variant)) + hashed_variants = deepfreeze(self.config.variant) if hasattr(self.config, "used_vars"): used_vars = self.config.used_vars elif ( @@ -2827,7 +2962,7 @@ def _get_used_vars_meta_yaml(self, force_top_level=False, force_global=False): apply_selectors=False, ) - all_used_selectors = variants.find_used_variables_in_text( + all_used_selectors = find_used_variables_in_text( variant_keys, recipe_text, selectors_only=True ) @@ -2836,7 +2971,7 @@ def _get_used_vars_meta_yaml(self, force_top_level=False, force_global=False): force_global=force_global, apply_selectors=True, ) - all_used_reqs = variants.find_used_variables_in_text( + all_used_reqs = find_used_variables_in_text( variant_keys, recipe_text, selectors_only=False ) @@ -2847,9 +2982,7 @@ def _get_used_vars_meta_yaml(self, force_top_level=False, force_global=False): if force_global: used = all_used else: - requirements_used = variants.find_used_variables_in_text( - variant_keys, reqs_text - ) + requirements_used = find_used_variables_in_text(variant_keys, reqs_text) outside_reqs_used = all_used - requirements_used requirements_used = trim_build_only_deps(self, requirements_used) @@ -2862,16 +2995,12 @@ def _get_used_vars_build_scripts(self): buildsh = os.path.join(self.path, "build.sh") if os.path.isfile(buildsh): used_vars.update( - variants.find_used_variables_in_shell_script( - self.config.variant, buildsh - ) + find_used_variables_in_shell_script(self.config.variant, buildsh) ) bldbat = os.path.join(self.path, "bld.bat") if self.config.platform == "win" and os.path.isfile(bldbat): used_vars.update( - variants.find_used_variables_in_batch_script( - self.config.variant, bldbat - ) + find_used_variables_in_batch_script(self.config.variant, bldbat) ) return used_vars @@ -2884,26 +3013,22 @@ def _get_used_vars_output_script(self): script = os.path.join(self.path, this_output["script"]) if os.path.splitext(script)[1] == ".sh": used_vars.update( - variants.find_used_variables_in_shell_script( - self.config.variant, script - ) + find_used_variables_in_shell_script(self.config.variant, script) ) elif os.path.splitext(script)[1] == ".bat": used_vars.update( - variants.find_used_variables_in_batch_script( - self.config.variant, script - ) + find_used_variables_in_batch_script(self.config.variant, script) ) else: log = utils.get_logger(__name__) - log.warn( + log.warning( f"Not detecting used variables in output script {script}; conda-build only knows " "how to search .sh and .bat files right now." ) return used_vars def get_variants_as_dict_of_lists(self): - return variants.list_of_dicts_to_dict_of_lists(self.config.variants) + return list_of_dicts_to_dict_of_lists(self.config.variants) def clean(self): """This ensures that clean is called with the correct build id""" @@ -2973,3 +3098,9 @@ def get_test_deps(self, py_files, pl_files, lua_files, r_files): specs.extend(utils.ensure_list(self.config.extra_deps)) return specs + + +class MetaDataTuple(NamedTuple): + metadata: MetaData + need_download: bool + need_reparse: bool diff --git a/conda_build/noarch_python.py b/conda_build/noarch_python.py index fb81565b3d..1e80fcd2e4 100644 --- a/conda_build/noarch_python.py +++ b/conda_build/noarch_python.py @@ -26,7 +26,7 @@ def rewrite_script(fn, prefix): try: data = fi.read() except UnicodeDecodeError: # file is binary - sys.exit("[noarch_python] Noarch package contains binary script: %s" % fn) + sys.exit(f"[noarch_python] Noarch package contains binary script: {fn}") src_mode = os.stat(src).st_mode os.unlink(src) @@ -83,7 +83,7 @@ def handle_file(f, d, prefix): else: # this should be the built-in logging module, not conda-build's stuff, because this file is standalone. log = logging.getLogger(__name__) - log.debug("Don't know how to handle file: %s. Including it as-is." % f) + log.debug(f"Don't know how to handle file: {f}. Including it as-is.") def populate_files(m, files, prefix, entry_point_scripts=None): @@ -119,7 +119,7 @@ def transform(m, files, prefix): # Create *nix prelink script # Note: it's important to use LF newlines or it wont work if we build on Win - with open(join(bin_dir, ".%s-pre-link.sh" % name), "wb") as fo: + with open(join(bin_dir, f".{name}-pre-link.sh"), "wb") as fo: fo.write( b"""\ #!/bin/bash @@ -128,7 +128,7 @@ def transform(m, files, prefix): ) # Create windows prelink script (be nice and use Windows newlines) - with open(join(scripts_dir, ".%s-pre-link.bat" % name), "wb") as fo: + with open(join(scripts_dir, f".{name}-pre-link.bat"), "wb") as fo: fo.write( """\ @echo off diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index 6f15173f29..84e80b8e90 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -9,7 +9,8 @@ from pathlib import Path from typing import TYPE_CHECKING -from ..conda_interface import untracked +from conda.misc import untracked + from ..utils import on_linux, on_mac from .macho import otool from .pyldd import codefile_class, inspect_linkages, machofile @@ -43,7 +44,7 @@ def ldd(path): continue if "ld-linux" in line: continue - raise RuntimeError("Unexpected output from ldd: %s" % line) + raise RuntimeError(f"Unexpected output from ldd: {line}") return res @@ -51,7 +52,7 @@ def ldd(path): def get_linkages( obj_files: Iterable[str], prefix: str | os.PathLike | Path, - sysroot, + sysroot: str, ) -> dict[str, list[tuple[str, str]]]: return _get_linkages(tuple(obj_files), Path(prefix), sysroot) @@ -60,7 +61,7 @@ def get_linkages( def _get_linkages( obj_files: tuple[str], prefix: Path, - sysroot, + sysroot: str, ) -> dict[str, list[tuple[str, str]]]: linkages = {} for file in obj_files: diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index 9b14454c4f..b474a4897a 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -13,6 +13,8 @@ from pathlib import Path from subprocess import PIPE, Popen +from conda.models.version import VersionOrder + from ..utils import on_mac, on_win, rec_glob from .external import find_executable @@ -351,12 +353,12 @@ def _get_path_dirs(prefix): yield "/".join((prefix, "bin")) -def get_uniqueness_key(file): +def get_uniqueness_key(filename, file): binary = ensure_binary(file) if not binary: return EXE_FORMATS.UNKNOWN elif binary.format == EXE_FORMATS.MACHO: - return str(file) + return filename elif binary.format == EXE_FORMATS.ELF and ( # noqa binary.type == lief.ELF.ELF_CLASS.CLASS32 or binary.type == lief.ELF.ELF_CLASS.CLASS64 @@ -367,8 +369,8 @@ def get_uniqueness_key(file): ] if result: return result[0] - return str(file) - return str(file) + return filename + return filename def _get_resolved_location( @@ -503,13 +505,13 @@ def inspect_linkages_lief( for element in todo: todo.pop(0) filename2 = element[0] - binary = element[1] - if not binary: + binary2 = element[1] + if not binary2: continue - uniqueness_key = get_uniqueness_key(binary) + uniqueness_key = get_uniqueness_key(filename2, binary2) if uniqueness_key not in already_seen: parent_exe_dirname = None - if binary.format == EXE_FORMATS.PE: + if binary2.format == EXE_FORMATS.PE: tmp_filename = filename2 while tmp_filename: if ( @@ -525,17 +527,17 @@ def inspect_linkages_lief( if ".pyd" in filename2 or (os.sep + "DLLs" + os.sep) in filename2: parent_exe_dirname = envroot.replace(os.sep, "/") + "/DLLs" rpaths_by_binary[filename2] = get_rpaths( - binary, parent_exe_dirname, envroot.replace(os.sep, "/"), sysroot + binary2, parent_exe_dirname, envroot.replace(os.sep, "/"), sysroot ) tmp_filename = filename2 rpaths_transitive = [] - if binary.format == EXE_FORMATS.PE: + if binary2.format == EXE_FORMATS.PE: rpaths_transitive = rpaths_by_binary[tmp_filename] else: while tmp_filename: rpaths_transitive[:0] = rpaths_by_binary[tmp_filename] tmp_filename = parents_by_filename[tmp_filename] - libraries = get_libraries(binary) + libraries = get_libraries(binary2) if filename2 in libraries: # Happens on macOS, leading to cycles. libraries.remove(filename2) # RPATH is implicit everywhere except macOS, make it explicit to simplify things. @@ -544,14 +546,14 @@ def inspect_linkages_lief( "$RPATH/" + lib if not lib.startswith("/") and not lib.startswith("$") - and binary.format != EXE_FORMATS.MACHO # noqa + and binary2.format != EXE_FORMATS.MACHO # noqa else lib ) for lib in libraries ] for lib, orig in zip(libraries, these_orig): resolved = _get_resolved_location( - binary, + binary2, orig, exedir, exedir, @@ -566,7 +568,7 @@ def inspect_linkages_lief( # can be run case-sensitively if the user wishes. # """ - if binary.format == EXE_FORMATS.PE: + if binary2.format == EXE_FORMATS.PE: import random path_fixed = ( os.path.dirname(path_fixed) @@ -594,7 +596,7 @@ def inspect_linkages_lief( if recurse: if os.path.exists(resolved[0]): todo.append([resolved[0], lief.parse(resolved[0])]) - already_seen.add(get_uniqueness_key(binary)) + already_seen.add(uniqueness_key) return results @@ -963,7 +965,6 @@ def get_static_lib_exports_dumpbin(filename): results.append((result, version)) except: pass - from ..conda_interface import VersionOrder results = sorted(results, key=lambda x: VersionOrder(x[1])) dumpbin_exe = results[-1][0] @@ -1124,9 +1125,9 @@ def get_symbols(file, defined=True, undefined=True, notexported=False, arch="nat ) if binary.__class__ != lief.MachO.Binary: if isinstance(s, str): - s_name = "%s" % s + s_name = f"{s}" else: - s_name = "%s" % s.name + s_name = f"{s.name}" if s.exported and s.imported: print(f"Weird, symbol {s.name} is both imported and exported") if s.exported: @@ -1135,16 +1136,16 @@ def get_symbols(file, defined=True, undefined=True, notexported=False, arch="nat elif s.imported: is_undefined = False else: - s_name = "%s" % s.name + s_name = f"{s.name}" is_notexported = False if s.type & 1 else True # print("{:32s} : s.type 0b{:020b}, s.value 0b{:020b}".format(s.name, s.type, s.value)) # print("s.value 0b{:020b} :: s.type 0b{:020b}, {:32s}".format(s.value, s.type, s.name)) if notexported is True or is_notexported is False: if is_undefined and undefined: - res.append("%s" % s_name) + res.append(f"{s_name}") elif not is_undefined and defined: - res.append("%s" % s_name) + res.append(f"{s_name}") return res @@ -1173,6 +1174,10 @@ def __call__(self, *args, **kw): if not data: break sha1.update(data) + # update with file name, if its a different + # file with the same contents, we don't want + # to treat it as cached + sha1.update(os.path.realpath(arg).encode("utf-8")) arg = sha1.hexdigest() if isinstance(arg, list): newargs.append(tuple(arg)) diff --git a/conda_build/os_utils/macho.py b/conda_build/os_utils/macho.py index 516df7a0a6..8e02c8ee86 100644 --- a/conda_build/os_utils/macho.py +++ b/conda_build/os_utils/macho.py @@ -258,7 +258,7 @@ def _chmod(filename, mode): os.chmod(filename, mode) except (OSError, utils.PermissionError) as e: log = utils.get_logger(__name__) - log.warn(str(e)) + log.warning(str(e)) def install_name_tool(args, build_prefix=None, verbose=False): @@ -286,7 +286,7 @@ def add_rpath(path, rpath, build_prefix=None, verbose=False): args = ["-add_rpath", rpath, path] code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s\n" % path) + print(f"Skipping Mach-O dynamic shared library stub file {path}\n") return elif "would duplicate path, file already has LC_RPATH for:" in stderr: print("Skipping -add_rpath, file already has LC_RPATH set") @@ -304,7 +304,7 @@ def delete_rpath(path, rpath, build_prefix=None, verbose=False): args = ["-delete_rpath", rpath, path] code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s\n" % path) + print(f"Skipping Mach-O dynamic shared library stub file {path}\n") return elif "no LC_RPATH load command with path:" in stderr: print("Skipping -delete_rpath, file doesn't contain that LC_RPATH") @@ -341,7 +341,7 @@ def install_name_change(path, build_prefix, cb_func, dylibs, verbose=False): args.extend(("-change", dylibs[index]["name"], new_name, path)) code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s" % path) + print(f"Skipping Mach-O dynamic shared library stub file {path}") ret = False continue else: diff --git a/conda_build/os_utils/pyldd.py b/conda_build/os_utils/pyldd.py index ceffb1dbc6..ff48d5f891 100644 --- a/conda_build/os_utils/pyldd.py +++ b/conda_build/os_utils/pyldd.py @@ -1048,7 +1048,7 @@ def _get_magic_bit(path: Path) -> bytes: return None -def _trim_sysroot(sysroot): +def _trim_sysroot(sysroot: str) -> str: if sysroot: while sysroot.endswith("/") or sysroot.endswith("\\"): sysroot = sysroot[:-1] @@ -1066,7 +1066,7 @@ def _get_arch_if_native(arch): # TODO :: Consider memoizing instead of repeatedly scanning # TODO :: libc.so/libSystem.dylib when inspect_linkages(recurse=True) -def _inspect_linkages_this(filename, sysroot="", arch="native"): +def _inspect_linkages_this(filename, sysroot: str = "", arch="native"): """ :param filename: @@ -1100,7 +1100,7 @@ def _inspect_linkages_this(filename, sysroot="", arch="native"): # TODO :: Consider returning a tree structure or a dict when recurse is True? def inspect_linkages( - filename, resolve_filenames=True, recurse=True, sysroot="", arch="native" + filename, resolve_filenames=True, recurse=True, sysroot: str = "", arch="native" ): already_seen = set() todo = {filename} diff --git a/conda_build/post.py b/conda_build/post.py index eea8a584b6..7ed419eebf 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -35,15 +35,13 @@ from typing import TYPE_CHECKING from conda.core.prefix_data import PrefixData +from conda.gateways.disk.create import TemporaryDirectory +from conda.gateways.disk.link import lchmod from conda.gateways.disk.read import compute_sum +from conda.misc import walk_prefix from conda.models.records import PrefixRecord from . import utils -from .conda_interface import ( - TemporaryDirectory, - lchmod, - walk_prefix, -) from .exceptions import OverDependingError, OverLinkingError, RunPathError from .inspect_pkg import which_package from .os_utils import external, macho @@ -152,11 +150,11 @@ def write_pth(egg_path, config): with open( join( utils.get_site_packages(config.host_prefix, py_ver), - "%s.pth" % (fn.split("-")[0]), + "{}.pth".format(fn.split("-")[0]), ), "w", ) as fo: - fo.write("./%s\n" % fn) + fo.write(f"./{fn}\n") def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False): @@ -370,7 +368,7 @@ def find_lib(link, prefix, files, path=None): if link.startswith(prefix): link = normpath(link[len(prefix) + 1 :]) if not any(link == normpath(w) for w in files): - sys.exit("Error: Could not find %s" % link) + sys.exit(f"Error: Could not find {link}") return link if link.startswith("/"): # but doesn't start with the build prefix return @@ -384,7 +382,7 @@ def find_lib(link, prefix, files, path=None): for f in files: file_names[basename(f)].append(f) if link not in file_names: - sys.exit("Error: Could not find %s" % link) + sys.exit(f"Error: Could not find {link}") if len(file_names[link]) > 1: if path and basename(path) == link: # The link is for the file itself, just use it @@ -405,7 +403,7 @@ def find_lib(link, prefix, files, path=None): "Choosing the first one." ) return file_names[link][0] - print("Don't know how to find %s, skipping" % link) + print(f"Don't know how to find {link}, skipping") def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): @@ -419,8 +417,7 @@ def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): ) if not codefile_class(link, skip_symlinks=True): sys.exit( - "Error: Compiler runtime library in build prefix not found in host prefix %s" - % link + f"Error: Compiler runtime library in build prefix not found in host prefix {link}" ) else: print(f".. fixing linking of {link} in {path} instead") @@ -431,7 +428,7 @@ def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): return print(f"Fixing linking of {link} in {path}") - print("New link location is %s" % (link_loc)) + print(f"New link location is {link_loc}") lib_to_link = relpath(dirname(link_loc), "lib") # path_to_lib = utils.relative(path[len(prefix) + 1:]) @@ -649,7 +646,7 @@ def assert_relative_osx(path, host_prefix, build_prefix): for prefix in (host_prefix, build_prefix): if prefix and name.startswith(prefix): raise RuntimeError( - "library at %s appears to have an absolute path embedded" % path + f"library at {path} appears to have an absolute path embedded" ) @@ -1598,7 +1595,7 @@ def post_process_shared_lib(m, f, files, host_prefix=None): elif codefile == machofile: if m.config.host_platform != "osx": log = utils.get_logger(__name__) - log.warn( + log.warning( "Found Mach-O file but patching is only supported on macOS, skipping: %s", path, ) @@ -1634,7 +1631,7 @@ def fix_permissions(files, prefix): lchmod(path, new_mode) except (OSError, utils.PermissionError) as e: log = utils.get_logger(__name__) - log.warn(str(e)) + log.warning(str(e)) def check_menuinst_json(files, prefix) -> None: @@ -1772,7 +1769,7 @@ def check_symlinks(files, prefix, croot): if msgs: for msg in msgs: - print("Error: %s" % msg, file=sys.stderr) + print(f"Error: {msg}", file=sys.stderr) sys.exit(1) diff --git a/conda_build/render.py b/conda_build/render.py index be17eaa461..0c80df0005 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -10,12 +10,10 @@ import subprocess import sys import tarfile -import tempfile from collections import OrderedDict, defaultdict from contextlib import contextmanager from functools import lru_cache from os.path import ( - dirname, isabs, isdir, isfile, @@ -27,14 +25,17 @@ import yaml from conda.base.context import context +from conda.cli.common import specs_from_url from conda.core.package_cache_data import ProgressiveFetchExtract -from conda.exceptions import UnsatisfiableError +from conda.exceptions import NoPackagesFoundError, UnsatisfiableError +from conda.gateways.disk.create import TemporaryDirectory +from conda.models.records import PackageRecord +from conda.models.version import VersionOrder from . import environ, exceptions, source, utils -from .conda_interface import PackageRecord, TemporaryDirectory, specs_from_url from .exceptions import DependencyNeedsBuildingError from .index import get_build_index -from .metadata import MetaData, combine_top_level_metadata_with_output +from .metadata import MetaData, MetaDataTuple, combine_top_level_metadata_with_output from .utils import ( CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, @@ -47,7 +48,8 @@ ) if TYPE_CHECKING: - from typing import Iterator + import os + from typing import Any, Iterable, Iterator from .config import Config @@ -61,7 +63,7 @@ def odict_representer(dumper, data): yaml.add_representer(OrderedDict, odict_representer) -def bldpkg_path(m): +def bldpkg_path(m: MetaData) -> str: """ Returns path to built package's tarball given its ``Metadata``. """ @@ -113,7 +115,7 @@ def _categorize_deps(m, specs, exclude_pattern, variant): # for sake of comparison, ignore dashes and underscores if dash_or_under.sub("", key) == dash_or_under.sub( "", spec_name - ) and not re.search(r"%s\s+[0-9a-zA-Z\_\.\<\>\=\*]" % spec_name, spec): + ) and not re.search(rf"{spec_name}\s+[0-9a-zA-Z\_\.\<\>\=\*]", spec): dependencies.append(" ".join((spec_name, value))) elif exclude_pattern.match(spec): pass_through_deps.append(spec) @@ -737,14 +739,14 @@ def finalize_metadata( if build_unsat or host_unsat: m.final = False log = utils.get_logger(__name__) - log.warn( + log.warning( f"Returning non-final recipe for {m.dist()}; one or more dependencies " "was unsatisfiable:" ) if build_unsat: - log.warn(f"Build: {build_unsat}") + log.warning(f"Build: {build_unsat}") if host_unsat: - log.warn(f"Host: {host_unsat}") + log.warning(f"Host: {host_unsat}") else: m.final = True if is_top_level: @@ -793,42 +795,28 @@ def reparse(metadata): def distribute_variants( - metadata, + metadata: MetaData, variants, - permit_unsatisfiable_variants=False, - allow_no_other_outputs=False, - bypass_env_check=False, -): - rendered_metadata = {} + permit_unsatisfiable_variants: bool = False, + allow_no_other_outputs: bool = False, + bypass_env_check: bool = False, +) -> list[MetaDataTuple]: + rendered_metadata: dict[ + tuple[str, str, tuple[tuple[str, str], ...]], MetaDataTuple + ] = {} need_source_download = True # don't bother distributing python if it's a noarch package, and figure out # which python version we prefer. `python_age` can use used to tweak which # python gets used here. if metadata.noarch or metadata.noarch_python: - from .conda_interface import VersionOrder - - age = int( - metadata.get_value( - "build/noarch_python_build_age", metadata.config.noarch_python_build_age - ) - ) - versions = [] - for variant in variants: - if "python" in variant: - vo = variant["python"] - if vo not in versions: - versions.append(vo) - version_indices = sorted( - range(len(versions)), key=lambda k: VersionOrder(versions[k].split(" ")[0]) - ) - if age < 0: - age = 0 - elif age > len(versions) - 1: - age = len(versions) - 1 - build_ver = versions[version_indices[len(versions) - 1 - age]] + # filter variants by the newest Python version + version = sorted( + {version for variant in variants if (version := variant.get("python"))}, + key=lambda key: VersionOrder(key.split(" ")[0]), + )[-1] variants = filter_by_key_value( - variants, "python", build_ver, "noarch_python_reduction" + variants, "python", version, "noarch_python_reduction" ) # store these for reference later @@ -906,23 +894,25 @@ def distribute_variants( mv.config.variant.get("target_platform", mv.config.subdir), tuple((var, mv.config.variant.get(var)) for var in mv.get_used_vars()), ) - ] = (mv, need_source_download, None) + ] = MetaDataTuple(mv, need_source_download, False) # list of tuples. # each tuple item is a tuple of 3 items: - # metadata, need_download, need_reparse_in_env + # metadata, need_download, need_reparse return list(rendered_metadata.values()) -def expand_outputs(metadata_tuples): +def expand_outputs( + metadata_tuples: Iterable[MetaDataTuple], +) -> list[tuple[dict, MetaData]]: """Obtain all metadata objects for all outputs from recipe. Useful for outputting paths.""" - expanded_outputs = OrderedDict() + from copy import deepcopy - for _m, download, reparse in metadata_tuples: - from .build import get_all_replacements + from .build import get_all_replacements - get_all_replacements(_m.config) - from copy import deepcopy + expanded_outputs: dict[str, tuple[dict, MetaData]] = {} + for _m, download, reparse in metadata_tuples: + get_all_replacements(_m.config) for output_dict, m in deepcopy(_m).get_output_metadata_set( permit_unsatisfiable_variants=False ): @@ -943,7 +933,7 @@ def open_recipe(recipe: str | os.PathLike | Path) -> Iterator[Path]: yield recipe elif recipe.suffixes in [[".tar"], [".tar", ".gz"], [".tgz"], [".tar", ".bz2"]]: # extract the recipe to a temporary directory - with tempfile.TemporaryDirectory() as tmp, tarfile.open(recipe, "r:*") as tar: + with TemporaryDirectory() as tmp, tarfile.open(recipe, "r:*") as tar: tar.extractall(path=tmp) yield Path(tmp) elif recipe.suffix == ".yaml": @@ -957,11 +947,11 @@ def render_recipe( recipe_dir: str | os.PathLike | Path, config: Config, no_download_source: bool = False, - variants: dict | None = None, + variants: dict[str, Any] | None = None, permit_unsatisfiable_variants: bool = True, reset_build_id: bool = True, bypass_env_check: bool = False, -) -> list[tuple[MetaData, bool, bool]]: +) -> list[MetaDataTuple]: """Returns a list of tuples, each consisting of (metadata-object, needs_download, needs_render_in_env) @@ -994,7 +984,7 @@ def render_recipe( m.config.variant_config_files = [cbc_yaml] m.config.variants = get_package_variants(m, variants=variants) m.config.variant = m.config.variants[0] - return [(m, False, False)] + return [MetaDataTuple(m, False, False)] else: # merge any passed-in variants with any files found variants = get_package_variants(m, variants=variants) @@ -1010,6 +1000,59 @@ def render_recipe( ) +def render_metadata_tuples( + metadata_tuples: Iterable[MetaDataTuple], + config: Config, + permit_unsatisfiable_variants: bool = True, + finalize: bool = True, + bypass_env_check: bool = False, +) -> list[MetaDataTuple]: + output_metas: dict[tuple[str, str, tuple[tuple[str, str], ...]], MetaDataTuple] = {} + for meta, download, render_in_env in metadata_tuples: + if not meta.skip() or not config.trim_skip: + for od, om in meta.get_output_metadata_set( + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + permit_undefined_jinja=not finalize, + bypass_env_check=bypass_env_check, + ): + if not om.skip() or not config.trim_skip: + if "type" not in od or od["type"] == "conda": + if finalize and not om.final: + try: + om = finalize_metadata( + om, + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + ) + except (DependencyNeedsBuildingError, NoPackagesFoundError): + if not permit_unsatisfiable_variants: + raise + + # remove outputs section from output objects for simplicity + if not om.path and (outputs := om.get_section("outputs")): + om.parent_outputs = outputs + del om.meta["outputs"] + + output_metas[ + om.dist(), + om.config.variant.get("target_platform"), + tuple( + (var, om.config.variant[var]) + for var in om.get_used_vars() + ), + ] = MetaDataTuple(om, download, render_in_env) + else: + output_metas[ + f"{om.type}: {om.name()}", + om.config.variant.get("target_platform"), + tuple( + (var, om.config.variant[var]) + for var in om.get_used_vars() + ), + ] = MetaDataTuple(om, download, render_in_env) + + return list(output_metas.values()) + + # Keep this out of the function below so it can be imported by other modules. FIELDS = [ "package", @@ -1055,7 +1098,11 @@ def ignore_aliases(self, data): unicode = None # silence pyflakes about unicode not existing in py3 -def output_yaml(metadata, filename=None, suppress_outputs=False): +def output_yaml( + metadata: MetaData, + filename: str | os.PathLike | Path | None = None, + suppress_outputs: bool = False, +) -> str: local_metadata = metadata.copy() if ( suppress_outputs @@ -1070,13 +1117,9 @@ def output_yaml(metadata, filename=None, suppress_outputs=False): indent=2, ) if filename: - if any(sep in filename for sep in ("\\", "/")): - try: - os.makedirs(dirname(filename)) - except OSError: - pass - with open(filename, "w") as f: - f.write(output) - return "Wrote yaml to %s" % filename + filename = Path(filename) + filename.parent.mkdir(parents=True, exist_ok=True) + filename.write_text(output) + return f"Wrote yaml to {filename}" else: return output diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index 507086e4fe..31213054d1 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -4,6 +4,8 @@ Tools for converting CPAN packages to conda recipes. """ +from __future__ import annotations + import codecs import gzip import hashlib @@ -21,15 +23,12 @@ import requests from conda.core.index import get_index from conda.exceptions import CondaError, CondaHTTPError +from conda.gateways.connection.download import TmpDownload, download +from conda.gateways.disk.create import TemporaryDirectory +from conda.models.match_spec import MatchSpec +from conda.resolve import Resolve from .. import environ -from ..conda_interface import ( - MatchSpec, - Resolve, - TemporaryDirectory, - TmpDownload, - download, -) from ..config import Config, get_or_merge_config from ..utils import check_call_env, on_linux, on_win from ..variants import get_default_variant @@ -355,19 +354,22 @@ def install_perl_get_core_modules(version): "my @modules = grep {Module::CoreList::is_core($_)} Module::CoreList->find_modules(qr/.*/); " 'print join "\n", @modules;', ] - all_core_modules = ( - subprocess.check_output(args, shell=False) - .decode("utf-8") - .replace("\r\n", "\n") - .split("\n") - ) + try: + all_core_modules = ( + subprocess.check_output(args, shell=False) + .decode("utf-8") + .replace("\r\n", "\n") + .split("\n") + ) + except Exception as e: + print( + f"Failed to query perl={version} for core modules list, ran:\n" + f"{' '.join(args)}" + ) + print(e.message) return all_core_modules except Exception as e: - print( - "Failed to query perl={} for core modules list, attempted command was:\n{}".format( - version, " ".join(args) - ) - ) + print(f"Failed to query perl={version} for core modules list.") print(e.message) return [] @@ -384,15 +386,15 @@ def get_core_modules_for_this_perl_version(version, cache_dir): # meta_cpan_url="http://api.metacpan.org", def skeletonize( - packages, - output_dir=".", - version=None, - meta_cpan_url="https://fastapi.metacpan.org/v1", - recursive=False, - force=False, - config=None, - write_core=False, -): + packages: list[str], + output_dir: str = ".", + version: str | None = None, + meta_cpan_url: str = "https://fastapi.metacpan.org/v1", + recursive: bool = False, + force: bool = False, + config: Config | None = None, + write_core: bool = False, +) -> None: """ Loops over packages, outputting conda recipes converted from CPAN metata. """ @@ -509,9 +511,7 @@ def skeletonize( # packages, unless we're newer than what's in core if metacpan_api_is_core_version(meta_cpan_url, package): if not write_core: - print( - "We found core module %s. Skipping recipe creation." % packagename - ) + print(f"We found core module {packagename}. Skipping recipe creation.") continue d["useurl"] = "#" @@ -575,12 +575,11 @@ def skeletonize( version = None if exists(dir_path) and not force: print( - "Directory %s already exists and you have not specified --force " - % dir_path + f"Directory {dir_path} already exists and you have not specified --force " ) continue elif exists(dir_path) and force: - print("Directory %s already exists, but forcing recipe creation" % dir_path) + print(f"Directory {dir_path} already exists, but forcing recipe creation") try: d["homeurl"] = release_data["resources"]["homepage"] @@ -754,7 +753,7 @@ def deps_for_package( } packages_to_append = set() - print("Processing dependencies for %s..." % package, end="") + print(f"Processing dependencies for {package}...", end="") sys.stdout.flush() if not release_data.get("dependency"): @@ -1050,11 +1049,8 @@ def metacpan_api_is_core_version(cpan_url, module): return True else: sys.exit( - ( - "Error: Could not find module or distribution named" - " %s on MetaCPAN." - ) - % (module) + "Error: Could not find module or distribution named" + f" {module} on MetaCPAN." ) diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index 7140c9a89f..93958333fb 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -41,10 +41,11 @@ from typing import TYPE_CHECKING +from conda.base.context import context from conda.common.io import dashlist +from conda.gateways.disk.create import TemporaryDirectory from .. import source -from ..conda_interface import TemporaryDirectory, cc_conda_build from ..config import get_or_merge_config from ..license_family import allowed_license_families, guess_license_family from ..metadata import MetaData @@ -54,6 +55,8 @@ if TYPE_CHECKING: from typing import Literal + from ..config import Config + SOURCE_META = """\ {archive_keys} {git_url_key} {git_url} @@ -454,7 +457,7 @@ def add_parser(repos): cran.add_argument( "-m", "--variant-config-files", - default=cc_conda_build.get("skeleton_config_yaml", None), + default=context.conda_build.get("skeleton_config_yaml", None), help="""Variant config file to add. These yaml files can contain keys such as `cran_mirror`. Only one can be provided here.""", ) @@ -486,7 +489,7 @@ def dict_from_cran_lines(lines): # - Suggests in corpcor (k, v) = line.split(":", 1) except ValueError: - sys.exit("Error: Could not parse metadata (%s)" % line) + sys.exit(f"Error: Could not parse metadata ({line})") d[k] = v # if k not in CRAN_KEYS: # print("Warning: Unknown key %s" % k) @@ -594,7 +597,7 @@ def read_description_contents(fp): def get_archive_metadata(path, verbose=True): if verbose: - print("Reading package metadata from %s" % path) + print(f"Reading package metadata from {path}") if basename(path) == "DESCRIPTION": with open(path, "rb") as fp: return read_description_contents(fp) @@ -611,8 +614,8 @@ def get_archive_metadata(path, verbose=True): fp = zf.open(member, "r") return read_description_contents(fp) else: - sys.exit("Cannot extract a DESCRIPTION from file %s" % path) - sys.exit("%s does not seem to be a CRAN package (no DESCRIPTION) file" % path) + sys.exit(f"Cannot extract a DESCRIPTION from file {path}") + sys.exit(f"{path} does not seem to be a CRAN package (no DESCRIPTION) file") def get_latest_git_tag(config): @@ -635,12 +638,12 @@ def get_latest_git_tag(config): stdout = stdout.decode("utf-8") stderr = stderr.decode("utf-8") if stderr or p.returncode: - sys.exit("Error: git tag failed (%s)" % stderr) + sys.exit(f"Error: git tag failed ({stderr})") tags = stdout.strip().splitlines() if not tags: sys.exit("Error: no tags found") - print("Using tag %s" % tags[-1]) + print(f"Using tag {tags[-1]}") return tags[-1] @@ -680,7 +683,7 @@ def get_cran_archive_versions(cran_url, session, package, verbose=True): r.raise_for_status() except requests.exceptions.HTTPError as e: if e.response.status_code == 404: - print("No archive directory for package %s" % package) + print(f"No archive directory for package {package}") return [] raise versions = [] @@ -695,7 +698,7 @@ def get_cran_archive_versions(cran_url, session, package, verbose=True): def get_cran_index(cran_url, session, verbose=True): if verbose: - print("Fetching main index from %s" % cran_url) + print(f"Fetching main index from {cran_url}") r = session.get(cran_url + "/src/contrib/") r.raise_for_status() records = {} @@ -772,7 +775,7 @@ def package_to_inputs_dict( """ if isfile(package): return None - print("Parsing input package %s:" % package) + print(f"Parsing input package {package}:") package = strip_end(package, "/") package = strip_end(package, sep) if "github.com" in package: @@ -862,28 +865,36 @@ def remove_comments(template): def skeletonize( - in_packages, - output_dir=".", - output_suffix="", - add_maintainer=None, - version=None, - git_tag=None, - cran_url=None, - recursive=False, - archive=True, - version_compare=False, - update_policy="", - r_interp="r-base", - use_binaries_ver=None, - use_noarch_generic=False, - use_when_no_binary: Literal["error" | "src" | "old" | "old-src"] = "src", - use_rtools_win=False, - config=None, - variant_config_files=None, - allow_archived=False, - add_cross_r_base=False, - no_comments=False, -): + in_packages: list[str], + output_dir: str = ".", + output_suffix: str = "", + add_maintainer: str | None = None, + version: str | None = None, + git_tag: str | None = None, + cran_url: str | None = None, + recursive: bool = False, + archive: bool = True, + version_compare: bool = False, + update_policy: Literal[ + "error", + "skip-up-to-date", + "skip-existing", + "overwrite", + "merge-keep-build-num", + "merge-incr-build-num", + ] + | None = None, + r_interp: str = "r-base", + use_binaries_ver: str | None = None, + use_noarch_generic: bool = False, + use_when_no_binary: Literal["error", "src", "old", "old-src"] = "src", + use_rtools_win: bool = False, + config: Config | None = None, + variant_config_files: list[str] | None = None, + allow_archived: bool = False, + add_cross_r_base: bool = False, + no_comments: bool = False, +) -> None: if ( use_when_no_binary != "error" and use_when_no_binary != "src" @@ -1026,7 +1037,7 @@ def skeletonize( session = get_session(output_dir) cran_index = get_cran_index(cran_url, session) if pkg_name.lower() not in cran_index: - sys.exit("Package %s not found" % pkg_name) + sys.exit(f"Package {pkg_name} not found") package, cran_version = cran_index[pkg_name.lower()] if cran_version and (not version or version == cran_version): version = cran_version @@ -1037,8 +1048,7 @@ def skeletonize( sys.exit(1) elif not version and not cran_version and not allow_archived: print( - "ERROR: Package %s is archived; to build, use --allow-archived or a --version value" - % pkg_name + f"ERROR: Package {pkg_name} is archived; to build, use --allow-archived or a --version value" ) sys.exit(1) else: @@ -1088,7 +1098,11 @@ def skeletonize( script_env = [] extra_recipe_maintainers = [] build_number = 0 - if update_policy.startswith("merge") and inputs["old-metadata"]: + if ( + update_policy + and update_policy.startswith("merge") + and inputs["old-metadata"] + ): m = inputs["old-metadata"] patches = make_array(m, "source/patches") script_env = make_array(m, "build/script_env") @@ -1310,7 +1324,7 @@ def skeletonize( if cran_package is None: cran_package = get_archive_metadata(description_path) d["cran_metadata"] = "\n".join( - ["# %s" % line for line in cran_package["orig_lines"] if line] + [f"# {line}" for line in cran_package["orig_lines"] if line] ) # Render the source and binaryN keys @@ -1362,7 +1376,7 @@ def skeletonize( d["summary"] = " " + yaml_quote_string(cran_package["Description"]) if "Suggests" in cran_package and not no_comments: - d["suggests"] = "# Suggests: %s" % cran_package["Suggests"] + d["suggests"] = "# Suggests: {}".format(cran_package["Suggests"]) else: d["suggests"] = "" @@ -1574,7 +1588,7 @@ def skeletonize( ) package_list.append(lower_name) - d["%s_depends" % dep_type] = "".join(deps) + d[f"{dep_type}_depends"] = "".join(deps) if no_comments: global CRAN_BUILD_SH_SOURCE, CRAN_META @@ -1588,7 +1602,7 @@ def skeletonize( if update_policy == "error": raise RuntimeError( "directory already exists " - "(and --update-policy is 'error'): %s" % dir_path + f"(and --update-policy is 'error'): {dir_path}" ) elif update_policy == "overwrite": rm_rf(dir_path) @@ -1611,7 +1625,7 @@ def skeletonize( makedirs(join(dir_path)) except: pass - print("Writing recipe for %s" % package.lower()) + print(f"Writing recipe for {package.lower()}") with open(join(dir_path, "meta.yaml"), "w") as f: f.write(clear_whitespace(CRAN_META.format(**d))) if not exists(join(dir_path, "build.sh")) or update_policy == "overwrite": @@ -1668,14 +1682,14 @@ def get_outdated(output_dir, cran_index, packages=()): continue if recipe_name not in cran_index: - print("Skipping %s, not found on CRAN" % recipe) + print(f"Skipping {recipe}, not found on CRAN") continue version_compare( join(output_dir, recipe), cran_index[recipe_name][1].replace("-", "_") ) - print("Updating %s" % recipe) + print(f"Updating {recipe}") to_update.append(recipe_name) return to_update diff --git a/conda_build/skeletons/luarocks.py b/conda_build/skeletons/luarocks.py index 14d9c44f77..41ec499bad 100644 --- a/conda_build/skeletons/luarocks.py +++ b/conda_build/skeletons/luarocks.py @@ -8,6 +8,8 @@ # - mingw32 support (really any windows support, completely untested) # - replace manual "luajit -e require 'blah'" with built-in entry-point testing +from __future__ import annotations + import json import os import subprocess @@ -172,7 +174,7 @@ def package_exists(package_name): def getval(spec, k): if k not in spec: - raise Exception("Required key %s not in spec" % k) + raise Exception(f"Required key {k} not in spec") else: return spec[k] @@ -182,7 +184,7 @@ def warn_against_branches(branch): print("=========================================") print("") print("WARNING:") - print("Building a rock referenced to branch %s." % branch) + print(f"Building a rock referenced to branch {branch}.") print("This is not a tag. This is dangerous, because rebuilding") print("at a later date may produce a different package.") print("Please replace with a tag, git commit, or tarball.") @@ -224,7 +226,12 @@ def ensure_base_deps(deps): return deps -def skeletonize(packages, output_dir=".", version=None, recursive=False): +def skeletonize( + packages: list[str], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, +) -> None: # Check that we have Lua installed (any version) # Check that we have luarocks installed @@ -246,7 +253,7 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False): package = packages.pop() packagename = ( - "lua-%s" % package.lower() if package[:4] != "lua-" else package.lower() + f"lua-{package.lower()}" if package[:4] != "lua-" else package.lower() ) d = package_dicts.setdefault( package, @@ -365,13 +372,13 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False): modules = spec["build"]["platforms"][our_plat]["modules"] if modules: d["test_commands"] = INDENT.join( - [""] + ["""lua -e "require '%s'\"""" % r for r in modules.keys()] + [""] + [f"""lua -e "require '{r}'\"""" for r in modules.keys()] ) # If we didn't find any modules to import, import the base name if d["test_commands"] == "": d["test_commands"] = INDENT.join( - [""] + ["""lua -e "require '%s'" """ % d["rockname"]] + [""] + ["""lua -e "require '{}'" """.format(d["rockname"])] ) # Build the luarocks skeleton diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index 8dc6719f63..d3b716bc8b 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -4,6 +4,8 @@ Tools for converting PyPI packages to conda recipes. """ +from __future__ import annotations + import configparser import keyword import logging @@ -12,27 +14,25 @@ import subprocess import sys from collections import OrderedDict, defaultdict +from io import StringIO from os import chdir, getcwd, listdir, makedirs from os.path import abspath, exists, isdir, isfile, join from shutil import copy2 from tempfile import mkdtemp +from typing import TYPE_CHECKING from urllib.parse import urljoin, urlsplit import pkginfo import requests import yaml from conda.base.context import context +from conda.cli.common import spec_from_line +from conda.gateways.connection.download import download from conda.gateways.disk.read import compute_sum +from conda.models.version import normalized_version +from conda.utils import human_bytes from requests.packages.urllib3.util.url import parse_url -from ..conda_interface import ( - StringIO, - download, - human_bytes, - input, - normalized_version, - spec_from_line, -) from ..config import Config from ..environ import create_env from ..license_family import allowed_license_families, guess_license_family @@ -49,6 +49,9 @@ ) from ..version import _parse as parse_version +if TYPE_CHECKING: + from typing import Iterable + pypi_example = """ Examples: @@ -254,30 +257,27 @@ def _formating_value(attribute_name, attribute_value): def skeletonize( - packages, - output_dir=".", - version=None, - recursive=False, - all_urls=False, - pypi_url="https://pypi.io/pypi/", - noprompt=True, - version_compare=False, - python_version=None, - manual_url=False, - all_extras=False, - noarch_python=False, - config=None, - setup_options=None, - extra_specs=[], - pin_numpy=False, -): + packages: list[str], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, + all_urls: bool = False, + pypi_url: str = "https://pypi.io/pypi/", + noprompt: bool = True, + version_compare: bool = False, + python_version: str | None = None, + manual_url: bool = False, + all_extras: bool = False, + noarch_python: bool = False, + config: Config | None = None, + setup_options: str | Iterable[str] | None = None, + extra_specs: str | Iterable[str] | None = None, + pin_numpy: bool = False, +) -> None: package_dicts = {} - if not setup_options: - setup_options = [] - - if isinstance(setup_options, str): - setup_options = [setup_options] + setup_options = ensure_list(setup_options) + extra_specs = ensure_list(extra_specs) if not config: config = Config() @@ -300,7 +300,7 @@ def skeletonize( if not is_url: dir_path = join(output_dir, package.lower()) if exists(dir_path) and not version_compare: - raise RuntimeError("directory already exists: %s" % dir_path) + raise RuntimeError(f"directory already exists: {dir_path}") d = package_dicts.setdefault( package, { @@ -343,14 +343,12 @@ def skeletonize( else: # select the most visible version from PyPI. if not versions: - sys.exit( - "Error: Could not find any versions of package %s" % package - ) + sys.exit(f"Error: Could not find any versions of package {package}") if len(versions) > 1: - print("Warning, the following versions were found for %s" % package) + print(f"Warning, the following versions were found for {package}") for ver in versions: print(ver) - print("Using %s" % versions[-1]) + print(f"Using {versions[-1]}") print("Use --version to specify a different version.") d["version"] = versions[-1] @@ -404,7 +402,7 @@ def skeletonize( d = package_dicts[package] name = d["packagename"].lower() makedirs(join(output_dir, name)) - print("Writing recipe for %s" % package.lower()) + print(f"Writing recipe for {package.lower()}") with open(join(output_dir, name, "meta.yaml"), "w") as f: rendered_recipe = PYPI_META_HEADER.format(**d) @@ -642,8 +640,8 @@ def get_download_data( if not urls[0]["url"]: # The package doesn't have a url, or maybe it only has a wheel. sys.exit( - "Error: Could not build recipe for %s. " - "Could not find any valid urls." % package + f"Error: Could not build recipe for {package}. " + "Could not find any valid urls." ) U = parse_url(urls[0]["url"]) if not U.path: @@ -652,9 +650,9 @@ def get_download_data( fragment = U.fragment or "" digest = fragment.split("=") else: - sys.exit("Error: No source urls found for %s" % package) + sys.exit(f"Error: No source urls found for {package}") if len(urls) > 1 and not noprompt: - print("More than one source version is available for %s:" % package) + print(f"More than one source version is available for {package}:") if manual_url: for i, url in enumerate(urls): print( @@ -689,7 +687,7 @@ def get_download_data( filename = url["filename"] or "package" else: # User provided a URL, try to use it. - print("Using url %s" % package) + print(f"Using url {package}") pypiurl = package U = parse_url(package) digest = U.fragment.split("=") @@ -711,7 +709,7 @@ def version_compare(package, versions): recipe_dir = abspath(package.lower()) if not isdir(recipe_dir): - sys.exit("Error: no such directory: %s" % recipe_dir) + sys.exit(f"Error: no such directory: {recipe_dir}") m = MetaData(recipe_dir) local_version = nv(m.version()) print(f"Local recipe for {package} has version {local_version}") @@ -721,11 +719,11 @@ def version_compare(package, versions): # Comparing normalized versions, displaying non normalized ones new_versions = versions[: norm_versions.index(local_version)] if len(new_versions) > 0: - print("Following new versions of %s are avaliable" % (package)) + print(f"Following new versions of {package} are avaliable") for ver in new_versions: print(ver) else: - print("No new version for %s is available" % (package)) + print(f"No new version for {package} is available") sys.exit() @@ -828,7 +826,7 @@ def get_package_metadata( config, setup_options, ): - print("Downloading %s" % package) + print(f"Downloading {package}") print("PyPI URL: ", metadata["pypiurl"]) pkginfo = get_pkginfo( package, @@ -982,7 +980,7 @@ def _spec_from_line(line): ) spec = _spec_from_line(dep_orig) if spec is None: - sys.exit("Error: Could not parse: %s" % dep) + sys.exit(f"Error: Could not parse: {dep}") if marker: spec = " ".join((spec, marker)) @@ -1058,10 +1056,10 @@ def get_license_name(package, pkginfo, no_prompt=False, data=None): if no_prompt: return license_name elif "\n" not in license_name: - print('Using "%s" for the license' % license_name) + print(f'Using "{license_name}" for the license') else: # Some projects put the whole license text in this field - print("This is the license for %s" % package) + print(f"This is the license for {package}") print() print(license_name) print() @@ -1070,8 +1068,8 @@ def get_license_name(package, pkginfo, no_prompt=False, data=None): license_name = "UNKNOWN" else: license_name = input( - "No license could be found for %s on PyPI or in the source. " - "What license should I use? " % package + f"No license could be found for {package} on PyPI or in the source. " + "What license should I use? " ) return license_name @@ -1175,7 +1173,7 @@ def unpack(src_path, tempdir): if src_path.lower().endswith(decompressible_exts): tar_xf(src_path, tempdir) else: - raise Exception("not a valid source: %s" % src_path) + raise Exception(f"not a valid source: {src_path}") def get_dir(tempdir): @@ -1209,7 +1207,7 @@ def get_requirements(package, pkginfo, all_extras=True): try: extras_require = [pkginfo["extras_require"][x] for x in extras] except KeyError: - sys.exit("Error: Invalid extra features: [%s]" % ",".join(extras)) + sys.exit("Error: Invalid extra features: [{}]".format(",".join(extras))) # match PEP 508 environment markers; currently only matches the # subset of environment markers that compare to python_version # using a single basic Python comparison operator @@ -1297,10 +1295,10 @@ def get_pkginfo( else: new_hash_value = "" - print("Unpacking %s..." % package) + print(f"Unpacking {package}...") unpack(join(config.src_cache, filename), tempdir) print("done") - print("working in %s" % tempdir) + print(f"working in {tempdir}") src_dir = get_dir(tempdir) # TODO: find args parameters needed by run_setuppy run_setuppy( @@ -1366,7 +1364,7 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op ) stdlib_dir = join( config.host_prefix, - "Lib" if on_win else "lib/python%s" % python_version, + "Lib" if on_win else f"lib/python{python_version}", ) patch = join(temp_dir, "pypi-distutils.patch") @@ -1421,8 +1419,8 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op try: check_call_env(cmdargs, env=env) except subprocess.CalledProcessError: - print("$PYTHONPATH = %s" % env["PYTHONPATH"]) - sys.exit("Error: command failed: %s" % " ".join(cmdargs)) + print("$PYTHONPATH = {}".format(env["PYTHONPATH"])) + sys.exit("Error: command failed: {}".format(" ".join(cmdargs))) finally: chdir(cwd) diff --git a/conda_build/skeletons/rpm.py b/conda_build/skeletons/rpm.py index f0abb8c747..d44477171f 100644 --- a/conda_build/skeletons/rpm.py +++ b/conda_build/skeletons/rpm.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import argparse import gzip import hashlib @@ -9,13 +11,20 @@ from os import chmod, makedirs from os.path import basename, dirname, exists, join, splitext from textwrap import wrap +from typing import TYPE_CHECKING from urllib.request import urlopen from xml.etree import ElementTree as ET from ..license_family import guess_license_family from ..source import download_to_cache +from ..utils import ensure_list from .cran import yaml_quote_string +if TYPE_CHECKING: + from typing import Iterable + + from ..config import Config + # This is used in two places default_architecture = "x86_64" default_distro = "centos6" @@ -637,14 +646,14 @@ def write_conda_recipes( # Do I want to pass just the package name, the CDT and the arch and rely on # expansion to form the URL? I have been going backwards and forwards here. def write_conda_recipe( - packages, - distro, - output_dir, - architecture, - recursive, - override_arch, - dependency_add, - config, + packages: list[str], + distro: str, + output_dir: str, + architecture: str, + recursive: bool, + override_arch: bool, + dependency_add: list[str], + config: Config | None, ): cdt_name = distro bits = "32" if architecture in ("armv6", "armv7a", "i686", "i386") else "64" @@ -706,16 +715,18 @@ def write_conda_recipe( def skeletonize( - packages, - output_dir=".", - version=None, - recursive=False, - architecture=default_architecture, - override_arch=True, - dependency_add=[], - config=None, - distro=default_distro, + packages: list[str], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, + architecture: str = default_architecture, + override_arch: bool = True, + dependency_add: str | Iterable[str] | None = None, + config: Config | None = None, + distro: str = default_distro, ): + dependency_add = ensure_list(dependency_add) + write_conda_recipe( packages, distro, diff --git a/conda_build/source.py b/conda_build/source.py index 984fb239e8..983188dd5a 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -16,9 +16,11 @@ from urllib.parse import urljoin from conda.exceptions import CondaHTTPError +from conda.gateways.connection.download import download +from conda.gateways.disk.create import TemporaryDirectory from conda.gateways.disk.read import compute_sum +from conda.utils import url_path -from .conda_interface import TemporaryDirectory, download, url_path from .exceptions import MissingDependency from .os_utils import external from .utils import ( @@ -53,7 +55,7 @@ def append_hash_to_fn(fn, hash_value): def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): """Download a source to the local cache.""" if verbose: - log.info("Source cache directory is: %s" % cache_folder) + log.info(f"Source cache directory is: {cache_folder}") if not isdir(cache_folder) and not os.path.islink(cache_folder): os.makedirs(cache_folder) @@ -72,17 +74,17 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): hash_added = True break else: - log.warn( + log.warning( f"No hash (md5, sha1, sha256) provided for {unhashed_fn}. Source download forced. " "Add hash to recipe to use source cache." ) path = join(cache_folder, fn) if isfile(path): if verbose: - log.info("Found source in cache: %s" % fn) + log.info(f"Found source in cache: {fn}") else: if verbose: - log.info("Downloading source to cache: %s" % fn) + log.info(f"Downloading source to cache: {fn}") for url in source_urls: if "://" not in url: @@ -96,14 +98,14 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): url = "file:///" + expanduser(url[8:]).replace("\\", "/") try: if verbose: - log.info("Downloading %s" % url) + log.info(f"Downloading {url}") with LoggingContext(): download(url, path) except CondaHTTPError as e: - log.warn("Error: %s" % str(e).strip()) + log.warning(f"Error: {str(e).strip()}") rm_rf(path) except RuntimeError as e: - log.warn("Error: %s" % str(e).strip()) + log.warning(f"Error: {str(e).strip()}") rm_rf(path) else: if verbose: @@ -111,7 +113,7 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): break else: # no break rm_rf(path) - raise RuntimeError("Could not download %s" % url) + raise RuntimeError(f"Could not download {url}") hashed = None for tp in ("md5", "sha1", "sha256"): @@ -342,7 +344,7 @@ def git_mirror_checkout_recursive( ) checkout = output.decode("utf-8") if verbose: - print("checkout: %r" % checkout) + print(f"checkout: {checkout!r}") if checkout: check_call_env( [git, "checkout", checkout], @@ -465,7 +467,7 @@ def git_info(src_dir, build_prefix, git=None, verbose=True, fo=None): if not git: git = external.find_executable("git", build_prefix) if not git: - log.warn( + log.warning( "git not installed in root environment. Skipping recording of git info." ) return @@ -490,7 +492,7 @@ def git_info(src_dir, build_prefix, git=None, verbose=True, fo=None): stdout = check_output_env(cmd, stderr=stderr, cwd=src_dir, env=env) except CalledProcessError as e: if check_error: - raise Exception("git error: %s" % str(e)) + raise Exception(f"git error: {str(e)}") encoding = locale.getpreferredencoding() if not fo: encoding = sys.stdout.encoding @@ -533,7 +535,7 @@ def hg_source(source_dict, src_dir, hg_cache, verbose): # now clone in to work directory update = source_dict.get("hg_tag") or "tip" if verbose: - print("checkout: %r" % update) + print(f"checkout: {update!r}") check_call_env(["hg", "clone", cache_repo, src_dir], stdout=stdout, stderr=stderr) check_call_env( @@ -951,7 +953,7 @@ def try_apply_patch(patch, patch_args, cwd, stdout, stderr): exception = None if not isfile(path): - raise RuntimeError("Error: no such patch: %s" % path) + raise RuntimeError(f"Error: no such patch: {path}") if config.verbose: stdout = None diff --git a/conda_build/tarcheck.py b/conda_build/tarcheck.py index 3a98559187..374422f1e1 100644 --- a/conda_build/tarcheck.py +++ b/conda_build/tarcheck.py @@ -13,7 +13,7 @@ def dist_fn(fn): elif fn.endswith(".tar.bz2"): return fn[:-8] else: - raise Exception("did not expect filename: %r" % fn) + raise Exception(f"did not expect filename: {fn!r}") class TarCheck: @@ -51,9 +51,9 @@ def info_files(self): return for p in sorted(seta | setb): if p not in seta: - print("%r not in info/files" % p) + print(f"{p!r} not in info/files") if p not in setb: - print("%r not in tarball" % p) + print(f"{p!r} not in tarball") raise Exception("info/files") def index_json(self): diff --git a/conda_build/utils.py b/conda_build/utils.py index 05b0d827ff..e5e8d9c8c2 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -23,6 +23,7 @@ from collections import OrderedDict, defaultdict from functools import lru_cache from glob import glob +from io import StringIO from itertools import filterfalse from json.decoder import JSONDecodeError from locale import getpreferredencoding @@ -54,27 +55,24 @@ KNOWN_SUBDIRS, ) from conda.base.context import context +from conda.common.path import win_path_to_unix from conda.exceptions import CondaHTTPError +from conda.gateways.connection.download import download +from conda.gateways.disk.create import TemporaryDirectory from conda.gateways.disk.read import compute_sum from conda.models.channel import Channel from conda.models.match_spec import MatchSpec +from conda.models.records import PackageRecord +from conda.models.version import VersionOrder +from conda.utils import unix_path_to_win -from .conda_interface import ( - PackageRecord, - StringIO, - TemporaryDirectory, - VersionOrder, - cc_conda_build, - download, - unix_path_to_win, - win_path_to_unix, -) -from .conda_interface import rm_rf as _rm_rf from .exceptions import BuildLockError if TYPE_CHECKING: from typing import Mapping, TypeVar + from .metadata import MetaData + T = TypeVar("T") K = TypeVar("K") V = TypeVar("V") @@ -259,8 +257,8 @@ def _execute(self, *args, **kwargs): psutil = None psutil_exceptions = (OSError, ValueError) log = get_logger(__name__) - log.warn(f"psutil import failed. Error was {e}") - log.warn( + log.warning(f"psutil import failed. Error was {e}") + log.warning( "only disk usage and time statistics will be available. Install psutil to " "get CPU time and memory usage statistics." ) @@ -426,7 +424,7 @@ def bytes2human(n): if n >= prefix[s]: value = float(n) / prefix[s] return f"{value:.1f}{s}" - return "%sB" % n + return f"{n}B" def seconds2human(s): @@ -459,7 +457,7 @@ def get_recipe_abspath(recipe): tar_xf(recipe_tarfile, os.path.join(recipe_dir, "info")) need_cleanup = True else: - print("Ignoring non-recipe: %s" % recipe) + print(f"Ignoring non-recipe: {recipe}") return (None, None) else: recipe_dir = abspath(os.path.join(os.getcwd(), recipe)) @@ -596,7 +594,7 @@ def copy_into( src_folder = os.getcwd() if os.path.islink(src) and not os.path.exists(os.path.realpath(src)): - log.warn("path %s is a broken symlink - ignoring copy", src) + log.warning("path %s is a broken symlink - ignoring copy", src) return if not lock and locking: @@ -1055,7 +1053,7 @@ def iter_entry_points(items): for item in items: m = entry_pat.match(item) if m is None: - sys.exit("Error cound not match entry point: %r" % item) + sys.exit(f"Error cound not match entry point: {item!r}") yield m.groups() @@ -1077,7 +1075,7 @@ def create_entry_point(path, module, func, config): os.remove(path) with open(path, "w") as fo: if not config.noarch: - fo.write("#!%s\n" % config.host_python) + fo.write(f"#!{config.host_python}\n") fo.write(pyscript) os.chmod(path, 0o775) @@ -1127,7 +1125,7 @@ def convert_path_for_cygwin_or_msys2(exe, path): return path -def get_skip_message(m): +def get_skip_message(m: MetaData) -> str: return ( f"Skipped: {m.name()} from {m.path} defines build/skip for this configuration " f"({({k: m.config.variant[k] for k in m.get_used_vars()})})." @@ -1253,9 +1251,13 @@ def tmp_chdir(dest): os.chdir(curdir) -def expand_globs(path_list, root_dir): +def expand_globs( + path_list: str | os.PathLike | Path | Iterable[str | os.PathLike | Path], + root_dir: str | os.PathLike | Path, +) -> list[str]: files = [] for path in ensure_list(path_list): + path = str(path) if not os.path.isabs(path): path = os.path.join(root_dir, path) if os.path.isfile(path): @@ -1279,11 +1281,10 @@ def expand_globs(path_list, root_dir): # Avoid this potential ambiguity by sorting. (see #4185) files.extend(sorted(glob_files)) prefix_path_re = re.compile("^" + re.escape(f"{root_dir}{os.path.sep}")) - files = [prefix_path_re.sub("", f, 1) for f in files] - return files + return [prefix_path_re.sub("", f, 1) for f in files] -def find_recipe(path): +def find_recipe(path: str) -> str: """recurse through a folder, locating valid meta files (see VALID_METAS). Raises error if more than one is found. Returns full path to meta file to be built. @@ -1317,7 +1318,7 @@ def find_recipe(path): metas = [m for m in VALID_METAS if os.path.isfile(os.path.join(path, m))] if len(metas) == 1: - get_logger(__name__).warn( + get_logger(__name__).warning( "Multiple meta files found. " f"The {metas[0]} file in the base directory ({path}) " "will be used." @@ -1407,44 +1408,6 @@ def get_installed_packages(path): return installed -def _convert_lists_to_sets(_dict): - for k, v in _dict.items(): - if hasattr(v, "keys"): - _dict[k] = HashableDict(_convert_lists_to_sets(v)) - elif hasattr(v, "__iter__") and not isinstance(v, str): - try: - _dict[k] = sorted(list(set(v))) - except TypeError: - _dict[k] = sorted(list({tuple(_) for _ in v})) - return _dict - - -class HashableDict(dict): - """use hashable frozen dictionaries for resources and resource types so that they can be in sets""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self = _convert_lists_to_sets(self) - - def __hash__(self): - return hash(json.dumps(self, sort_keys=True)) - - -def represent_hashabledict(dumper, data): - value = [] - - for item_key, item_value in data.items(): - node_key = dumper.represent_data(item_key) - node_value = dumper.represent_data(item_value) - - value.append((node_key, node_value)) - - return yaml.nodes.MappingNode("tag:yaml.org,2002:map", value) - - -yaml.add_representer(HashableDict, represent_hashabledict) - - # http://stackoverflow.com/a/10743550/1170370 @contextlib.contextmanager def capture(): @@ -1617,8 +1580,12 @@ def filter_info_files(files_list, prefix): ) -def rm_rf(path, config=None): - return _rm_rf(path) +def rm_rf(path): + from conda.core.prefix_data import delete_prefix_from_linked_data + from conda.gateways.disk.delete import rm_rf as rm_rf + + rm_rf(path) + delete_prefix_from_linked_data(path) # https://stackoverflow.com/a/31459386/1170370 @@ -1676,10 +1643,8 @@ def reset_deduplicator(): def get_logger(name, level=logging.INFO, dedupe=True, add_stdout_stderr_handlers=True): config_file = None - if cc_conda_build.get("log_config_file"): - config_file = abspath( - expanduser(expandvars(cc_conda_build.get("log_config_file"))) - ) + if log_config_file := context.conda_build.get("log_config_file"): + config_file = abspath(expanduser(expandvars(log_config_file))) # by loading config file here, and then only adding handlers later, people # should be able to override conda-build's logger settings here. if config_file: @@ -1780,22 +1745,24 @@ def merge_dicts_of_lists( return {k: dol1.get(k, no) + dol2.get(k, no) for k in keys} -def prefix_files(prefix): +def prefix_files(prefix: str | os.PathLike | Path) -> set[str]: """ Returns a set of all files in prefix. """ - res = set() - prefix_rep = prefix + os.path.sep - for root, dirs, files in walk(prefix): - for fn in files: - # this is relpath, just hacked to be faster - res.add(join(root, fn).replace(prefix_rep, "", 1)) - for dn in dirs: - path = join(root, dn) - if islink(path): - res.add(path.replace(prefix_rep, "", 1)) - res.update(expand_globs((path,), prefix)) - return res + prefix = f"{os.path.abspath(prefix)}{os.path.sep}" + prefix_files: set[str] = set() + for root, directories, files in walk(prefix): + # this is effectively os.path.relpath, just hacked to be faster + relroot = root[len(prefix) :].lstrip(os.path.sep) + # add all files + prefix_files.update(join(relroot, file) for file in files) + # add all symlink directories (they are "files") + prefix_files.update( + join(relroot, directory) + for directory in directories + if islink(join(root, directory)) + ) + return prefix_files def mmap_mmap( @@ -1923,7 +1890,7 @@ def ensure_valid_spec(spec: str | MatchSpec, warn: bool = False) -> str | MatchS if "*" not in spec: if match.group(1) not in ("python", "vc") and warn: log = get_logger(__name__) - log.warn( + log.warning( f"Adding .* to spec '{spec}' to ensure satisfiability. Please " "consider putting {{{{ var_name }}}}.* or some relational " "operator (>/=/<=) on this spec in meta.yaml, or if req is " @@ -1941,7 +1908,7 @@ def insert_variant_versions(requirements_dict, variant, env): ) reqs = ensure_list(requirements_dict.get(env)) for key, val in variant.items(): - regex = re.compile(r"^(%s)(?:\s*$)" % key.replace("_", "[-_]")) + regex = re.compile(r"^({})(?:\s*$)".format(key.replace("_", "[-_]"))) matches = [regex.match(pkg) for pkg in reqs] if any(matches): for i, x in enumerate(matches): diff --git a/conda_build/variants.py b/conda_build/variants.py index c5bbe9a41e..b185a7eb34 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -3,6 +3,8 @@ """This file handles the parsing of feature specifications from files, ending up with a configuration matrix""" +from __future__ import annotations + import os.path import re import sys @@ -10,14 +12,18 @@ from copy import copy from functools import lru_cache from itertools import product +from pathlib import Path +from typing import TYPE_CHECKING import yaml from conda.base.context import context -from .conda_interface import cc_conda_build from .utils import ensure_list, get_logger, islist, on_win, trim_empty_keys from .version import _parse as parse_version +if TYPE_CHECKING: + from typing import Any, Iterable + DEFAULT_VARIANTS = { "python": f"{sys.version_info.major}.{sys.version_info.minor}", "numpy": { @@ -224,8 +230,8 @@ def find_config_files(metadata_or_path, config): if not files and not config.ignore_system_variants: # user config - if cc_conda_build.get("config_file"): - cfg = resolve(cc_conda_build["config_file"]) + if config_file := context.conda_build.get("config_file"): + cfg = resolve(config_file) else: cfg = resolve(os.path.join("~", "conda_build_config.yaml")) if os.path.isfile(cfg): @@ -694,21 +700,21 @@ def get_package_variants(recipedir_or_metadata, config=None, variants=None): return filter_combined_spec_to_used_keys(combined_spec, specs=specs) -def get_vars(variants, loop_only=False): +def get_vars(variants: Iterable[dict[str, Any]]) -> set[str]: """For purposes of naming/identifying, provide a way of identifying which variables contribute to the matrix dimensionality""" - special_keys = {"pin_run_as_build", "zip_keys", "ignore_version"} - special_keys.update(set(ensure_list(variants[0].get("extend_keys")))) - loop_vars = [ - k - for k in variants[0] - if k not in special_keys - and ( - not loop_only - or any(variant[k] != variants[0][k] for variant in variants[1:]) - ) - ] - return loop_vars + first, *others = variants + special_keys = { + "pin_run_as_build", + "zip_keys", + "ignore_version", + *ensure_list(first.get("extend_keys")), + } + return { + var + for var in set(first) - special_keys + if any(first[var] != other[var] for other in others) + } @lru_cache(maxsize=None) @@ -737,13 +743,13 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False): continue v_regex = re.escape(v) v_req_regex = "[-_]".join(map(re.escape, v.split("_"))) - variant_regex = r"\{\s*(?:pin_[a-z]+\(\s*?['\"])?%s[^'\"]*?\}\}" % v_regex - selector_regex = r"^[^#\[]*?\#?\s\[[^\]]*?(?!\]]" % v_regex + variant_regex = rf"\{{\s*(?:pin_[a-z]+\(\s*?['\"])?{v_regex}[^'\"]*?\}}\}}" + selector_regex = rf"^[^#\[]*?\#?\s\[[^\]]*?(?!\]]" conditional_regex = ( r"(?:^|[^\{])\{%\s*(?:el)?if\s*.*" + v_regex + r"\s*(?:[^%]*?)?%\}" ) # plain req name, no version spec. Look for end of line after name, or comment or selector - requirement_regex = r"^\s+\-\s+%s\s*(?:\s[\[#]|$)" % v_req_regex + requirement_regex = rf"^\s+\-\s+{v_req_regex}\s*(?:\s[\[#]|$)" if selectors_only: all_res.insert(0, selector_regex) else: @@ -758,23 +764,39 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False): return used_variables -def find_used_variables_in_shell_script(variant, file_path): - with open(file_path) as f: - text = f.read() - used_variables = set() - for v in variant: - variant_regex = r"(^[^$]*?\$\{?\s*%s\s*[\s|\}])" % v - if re.search(variant_regex, text, flags=re.MULTILINE | re.DOTALL): - used_variables.add(v) - return used_variables +def find_used_variables_in_shell_script( + variants: Iterable[str], + file_path: str | os.PathLike | Path, +) -> set[str]: + text = Path(file_path).read_text() + return { + variant + for variant in variants + if ( + variant in text # str in str is faster than re.search + and re.search( + rf"(^[^$]*?\$\{{?\s*{re.escape(variant)}\s*[\s|\}}])", + text, + flags=re.MULTILINE | re.DOTALL, + ) + ) + } -def find_used_variables_in_batch_script(variant, file_path): - with open(file_path) as f: - text = f.read() - used_variables = set() - for v in variant: - variant_regex = r"\%" + v + r"\%" - if re.search(variant_regex, text, flags=re.MULTILINE | re.DOTALL): - used_variables.add(v) - return used_variables +def find_used_variables_in_batch_script( + variants: Iterable[str], + file_path: str | os.PathLike | Path, +) -> set[str]: + text = Path(file_path).read_text() + return { + variant + for variant in variants + if ( + variant in text # str in str is faster than re.search + and re.search( + rf"\%{re.escape(variant)}\%", + text, + flags=re.MULTILINE | re.DOTALL, + ) + ) + } diff --git a/conda_build/windows.py b/conda_build/windows.py index 706b499265..8643431a5b 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -56,16 +56,13 @@ def fix_staged_scripts(scripts_dir, config): # If it's a #!python script if not (line.startswith(b"#!") and b"python" in line.lower()): continue - print( - "Adjusting unix-style #! script %s, " - "and adding a .bat file for it" % fn - ) + print(f"Adjusting unix-style #! script {fn}, and adding a .bat file for it") # copy it with a .py extension (skipping that first #! line) with open(join(scripts_dir, fn + "-script.py"), "wb") as fo: fo.write(f.read()) # now create the .exe file copy_into( - join(dirname(__file__), "cli-%s.exe" % config.host_arch), + join(dirname(__file__), f"cli-{config.host_arch}.exe"), join(scripts_dir, fn + ".exe"), ) @@ -105,16 +102,16 @@ def msvc_env_cmd(bits, config, override=None): # there's clear user demand, it's not clear that we should invest the # effort into updating a known deprecated function for a new platform. log = get_logger(__name__) - log.warn( + log.warning( "Using legacy MSVC compiler setup. This will be removed in conda-build 4.0. " "If this recipe does not use a compiler, this message is safe to ignore. " "Otherwise, use {{compiler('')}} jinja2 in requirements/build." ) if bits not in ["64", "32"]: - log.warn(f"The legacy MSVC compiler setup does not support {bits} builds. ") + log.warning(f"The legacy MSVC compiler setup does not support {bits} builds. ") return "" if override: - log.warn( + log.warning( "msvc_compiler key in meta.yaml is deprecated. Use the new" "variant-powered compiler configuration instead. Note that msvc_compiler" "is incompatible with the new {{{{compiler('c')}}}} jinja scheme." @@ -338,7 +335,7 @@ def build(m, bld_bat, stats, provision_only=False): rewrite_env = { k: env[k] for k in ["PREFIX", "BUILD_PREFIX", "SRC_DIR"] if k in env } - print("Rewriting env in output: %s" % pprint.pformat(rewrite_env)) + print(f"Rewriting env in output: {pprint.pformat(rewrite_env)}") check_call_env( cmd, cwd=m.config.work_dir, stats=stats, rewrite_stdout_env=rewrite_env ) diff --git a/docs/requirements.txt b/docs/requirements.txt index 37666a374b..58f1311df7 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,11 +1,12 @@ +Pillow==10.0.1 +PyYAML==6.0.1 +Sphinx==7.3.7 conda-sphinx-theme==0.2.1 linkify-it-py==2.0.2 myst-parser==2.0.0 -Pillow==10.0.1 -PyYAML==6.0.1 +pylint==2.17.5 requests==2.31.0 ruamel.yaml==0.17.32 -Sphinx==7.2.6 sphinx-argparse==0.4.0 sphinx-autobuild==2021.3.14 sphinx-sitemap==2.5.1 @@ -18,4 +19,3 @@ sphinxcontrib-plantuml==0.26 sphinxcontrib-programoutput==0.17 sphinxcontrib-qthelp==1.0.6 sphinxcontrib-serializinghtml==1.1.9 -pylint==2.17.5 diff --git a/docs/scrape_help.py b/docs/scrape_help.py index 2f99fbb403..66d5af1e57 100755 --- a/docs/scrape_help.py +++ b/docs/scrape_help.py @@ -112,7 +112,7 @@ def external_commands(): def get_help(command): command_help[command] = conda_command_help(command) - print("Checked for subcommand help for %s" % command) + print(f"Checked for subcommand help for {command}") with ThreadPoolExecutor(len(commands)) as executor: # list() is needed for force exceptions to be raised @@ -164,7 +164,7 @@ def generate_man(command): [ "help2man", "--name", - "conda %s" % command, + f"conda {command}", "--section", "1", "--source", @@ -172,36 +172,34 @@ def generate_man(command): "--version-string", conda_version, "--no-info", - "conda %s" % command, + f"conda {command}", ] ) retries -= 1 if not manpage: - sys.exit("Error: Could not get help for conda %s" % command) + sys.exit(f"Error: Could not get help for conda {command}") replacements = man_replacements() for text in replacements: manpage = manpage.replace(text, replacements[text]) - with open(join(manpath, "conda-%s.1" % command.replace(" ", "-")), "w") as f: + with open(join(manpath, "conda-{}.1".format(command.replace(" ", "-"))), "w") as f: f.write(manpage) - print("Generated manpage for conda %s" % command) + print(f"Generated manpage for conda {command}") def generate_html(command): command_file = command.replace(" ", "-") # Use abspath so that it always has a path separator - man = Popen( - ["man", abspath(join(manpath, "conda-%s.1" % command_file))], stdout=PIPE - ) + man = Popen(["man", abspath(join(manpath, f"conda-{command_file}.1"))], stdout=PIPE) htmlpage = check_output( [ "man2html", "-bare", # Don't use HTML, HEAD, or BODY tags "title", - "conda-%s" % command_file, + f"conda-{command_file}", "-topm", "0", # No top margin "-botm", @@ -210,14 +208,14 @@ def generate_html(command): stdin=man.stdout, ) - with open(join(manpath, "conda-%s.html" % command_file), "wb") as f: + with open(join(manpath, f"conda-{command_file}.html"), "wb") as f: f.write(htmlpage) - print("Generated html for conda %s" % command) + print(f"Generated html for conda {command}") def write_rst(command, sep=None): command_file = command.replace(" ", "-") - with open(join(manpath, "conda-%s.html" % command_file)) as f: + with open(join(manpath, f"conda-{command_file}.html")) as f: html = f.read() rp = rstpath @@ -225,13 +223,13 @@ def write_rst(command, sep=None): rp = join(rp, sep) if not isdir(rp): makedirs(rp) - with open(join(rp, "conda-%s.rst" % command_file), "w") as f: + with open(join(rp, f"conda-{command_file}.rst"), "w") as f: f.write(RST_HEADER.format(command=command)) for line in html.splitlines(): f.write(" ") f.write(line) f.write("\n") - print("Generated rst for conda %s" % command) + print(f"Generated rst for conda {command}") def main(): diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index e4b599c438..d7b5e9ff70 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -1313,7 +1313,10 @@ build prefix. Explicit file lists support glob expressions. Directory names are also supported, and they recursively include contents. -.. code-block:: none +.. warning:: + When defining `outputs/files` as a list without specifying `outputs/script`, any file in the prefix (including those installed by host dependencies) matching one of the glob expressions is included in the output. + +.. code-block:: yaml outputs: - name: subpackage-name @@ -1323,6 +1326,29 @@ contents. - *.some-extension - somefolder/*.some-extension +Greater control over file matching may be +achieved by defining ``files`` as a dictionary separating files to +``include`` from those to ``exclude``. +When using include/exclude, only files installed by +the current recipe are considered. i.e. files in the prefix installed +by host dependencies are excluded. include/exclude must not be used +simultaneously with glob expressions listed directly in ``outputs/files``. +Files matching both include and exclude expressions will be excluded. + +.. code-block:: yaml + + outputs: + - name: subpackage-name + files: + include: + - a-file + - a-folder + - *.some-extension + - somefolder/*.some-extension + exclude: + - *.exclude-extension + - a-folder/**/*.some-extension + Scripts that create or move files into the build prefix can be any kind of script. Known script types need only specify the script name. Currently the list of recognized extensions is @@ -1372,10 +1398,9 @@ A subpackage does not automatically inherit any dependencies from its top-level recipe, so any build or run requirements needed by the subpackage must be explicitly specified. -.. code-block:: none +.. code-block:: yaml outputs: - - name: subpackage-name requirements: build: diff --git a/docs/source/user-guide/recipes/build-without-recipe.rst b/docs/source/user-guide/recipes/build-without-recipe.rst deleted file mode 100644 index 51c465db4a..0000000000 --- a/docs/source/user-guide/recipes/build-without-recipe.rst +++ /dev/null @@ -1,157 +0,0 @@ -================================================= -Building a package without a recipe (bdist_conda) -================================================= - -You can use conda-build to build packages for Python to install -rather than conda by using ``setup.py bdist_conda``. This is a -quick way to build packages without using a recipe, but it has -limitations. The script is limited to the Python version used in -the build and it is not as reproducible as using a recipe. We -recommend using a recipe with conda-build. - -.. note:: - If you use Setuptools, you must first import Setuptools and - then import ``conda_build.bdist_conda``, because Setuptools - monkey patches ``distutils.dist.Distribution``. - -EXAMPLE: A minimal ``setup.py`` file using the setup options -``name`` and ``version``: - -.. code:: - - from setuptools import setup - import conda_build.bdist_conda - - setup( - name="foo", - version="1.0", - distclass=conda_build.bdist_conda.CondaDistribution, - conda_buildnum=1, - ) - - -Setup options -============= - -You can pass the following options to ``setup()``. You must -include ``distclass=conda_build.bdist_conda.CondaDistribution``. - -Build number ------------- - -The number of the build. Can be overridden on the command line -with the ``--buildnum`` flag. Defaults to ``0``. - -.. code:: - - conda_buildnum=1 - - -Build string ------------- - -The build string. Default is generated automatically from the -Python version, NumPy version---if relevant---and the build -number, such as ``py34_0``. - -.. code:: - - conda_buildstr=py34_0 - - -Import tests ------------- - -Whether to automatically run import tests. The default is -``True``, which runs import tests for all the modules in -``packages``. Also allowed are ``False``, which runs no tests, or -a list of module names to be tested on import. - -.. code:: - - conda_import_tests=False - - -Command line tests ------------------- - -Command line tests to run. Default is ``True``, which runs -``command --help`` for each command in the console_scripts and -gui_scripts entry_points. Also allowed are ``False``, which does -not run any command tests, or a list of command tests to run. - -.. code:: - - conda_command_tests=False - - -Binary files relocatable ------------------------- - -Whether binary files should be made relocatable, using -install_name_tool on macOS or patchelf on Linux. The default is -``True``. - -.. code:: - - conda_binary_relocation=False - -For more information, see :ref:`Making packages relocatable `. - - -Preserve egg directory ----------------------- - -Whether to preserve the egg directory as installed by Setuptools. -The default is ``True`` if the package depends on Setuptools or -has Setuptools entry_points other than console_scripts and -gui_scripts. - -.. code:: - - conda_preserve_egg_dir=False - - -Command line options -==================== - -Build number ------------- - -Set the build number. Defaults to the conda_buildnum passed -to ``setup()`` or ``0``. Overrides any conda_buildnum passed to -``setup()``. - -.. code:: - - --buildnum=1 - - -Notes -===== - -* You must install ``bdist_conda`` into a root conda environment, - as it imports ``conda`` and ``conda_build``. It is included as - part of the ``conda-build`` package. - -* All metadata is gathered from the standard metadata from the - ``setup()`` function. Metadata that are not directly supported - by ``setup()`` can be added using one of the options specified - above. - -* By default, import tests are run for each subpackage specified - by packages, and command line tests ``command --help`` are run - for each ``setuptools entry_points`` command. This is done to - ensure that the package is built correctly. You can disable or - change these using the ``conda_import_tests`` and - ``conda_command_tests`` options specified above. - -* The Python version used in the build must be the same as where - conda is installed, as ``bdist_conda`` uses ``conda-build``. - -* ``bdist_conda`` uses the metadata provided to the ``setup()`` - function. - -* If you want to pass any ``bdist_conda`` specific options to - ``setup()``, in ``setup()`` you must set - ``distclass=conda_build.bdist_conda.CondaDistribution``. diff --git a/docs/source/user-guide/recipes/index.rst b/docs/source/user-guide/recipes/index.rst index d482f27415..876b500396 100644 --- a/docs/source/user-guide/recipes/index.rst +++ b/docs/source/user-guide/recipes/index.rst @@ -8,6 +8,5 @@ conda-build recipes. .. toctree:: :maxdepth: 1 - build-without-recipe sample-recipes debugging diff --git a/news/4821-include-file-hash b/news/4821-include-file-hash new file mode 100644 index 0000000000..fa155e4ac3 --- /dev/null +++ b/news/4821-include-file-hash @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Include file path in addition to the content when generating the file hash to avoid unwanted caching during linkage analysis. (#4821) + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/news/5216-files-exclude b/news/5216-files-exclude new file mode 100644 index 0000000000..4641c59ffb --- /dev/null +++ b/news/5216-files-exclude @@ -0,0 +1,19 @@ +### Enhancements + +* Add new include/exclude sections for glob expressions in multi-output `outputs/files`. (#4196 via #5216) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/news/5222-deprecating-conda_interface b/news/5222-deprecating-conda_interface deleted file mode 100644 index d7737f9368..0000000000 --- a/news/5222-deprecating-conda_interface +++ /dev/null @@ -1,32 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Deprecate `conda_build.conda_interface.CONDA_VERSION` constant. Use `conda.__version__` instead. (#5222) -* Deprecate `conda_build.conda_interface.binstar_upload` constant. Use `conda.base.context.context.binstar_upload` instead. (#5222) -* Deprecate `conda_build.conda_interface.default_python` constant. Use `conda.base.context.context.default_python` instead. (#5222) -* Deprecate `conda_build.conda_interface.envs_dirs` constant. Use `conda.base.context.context.envs_dirs` instead. (#5222) -* Deprecate `conda_build.conda_interface.pkgs_dirs` constant. Use `conda.base.context.context.pkgs_dirs` instead. (#5222) -* Deprecate `conda_build.conda_interface.cc_platform` constant. Use `conda.base.context.context.platform` instead. (#5222) -* Deprecate `conda_build.conda_interface.root_dir` constant. Use `conda.base.context.context.root_prefix` instead. (#5222) -* Deprecate `conda_build.conda_interface.root_writable` constant. Use `conda.base.context.context.root_writable` instead. (#5222) -* Deprecate `conda_build.conda_interface.subdir` constant. Use `conda.base.context.context.subdir` instead. (#5222) -* Deprecate `conda_build.conda_interface.create_default_packages` constant. Use `conda.base.context.context.create_default_packages` instead. (#5222) -* Deprecate `conda_build.conda_interface.get_rc_urls` function. Use `conda.base.context.context.channels` instead. (#5222) -* Deprecate `conda_build.conda_interface.get_prefix` function. Use `conda.base.context.context.target_prefix` instead. (#5222) -* Deprecate `conda_build.conda_interface.get_conda_channel` function. Use `conda.models.channel.Channel.from_value` instead. (#5222) -* Deprecate `conda_build.conda_interface.reset_context` function. Use `conda.base.context.reset_context` instead. (#5222) - -### Docs - -* - -### Other - -* diff --git a/news/5238-open_recipe b/news/5238-open_recipe deleted file mode 100644 index 9d5d42c4c5..0000000000 --- a/news/5238-open_recipe +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* Add `conda_build.render.open_recipe` context manager to detect the recipe type (file/`meta.yaml`, directory/recipe, or tarball/package) and properly handling any exit/close behavior. (#5238) - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/5251-deprecating-conda_interface b/news/5251-deprecating-conda_interface deleted file mode 100644 index 9f5e48d6cd..0000000000 --- a/news/5251-deprecating-conda_interface +++ /dev/null @@ -1,34 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Deprecate `conda_build.conda_interface.context` singleton. Use `conda.base.context.context` instead. (#5251) -* Deprecate `conda_build.conda_interface.configparser` module. Use `configparser` instead. (#5251) -* Deprecate `conda_build.conda_interface.os` module. Use `os` instead. (#5251) -* Deprecate `conda_build.conda_interface.partial` function. Use `functools.partial` instead. (#5251) -* Deprecate `conda_build.conda_interface.import_module` function. Use `importlib.import_module` instead. (#5251) -* Deprecate `conda_build.conda_interface.determine_target_prefix` function. Use `conda.base.context.determine_target_prefix` instead. (#5251) -* Deprecate `conda_build.conda_interface.non_x86_linux_machines` constant. Use `conda.base.context.non_x86_machines` instead. (#5251) -* Deprecate `conda_build.conda_interface.ProgressiveFetchExtract` class. Use `conda.core.package_cache.ProgressiveFetchExtract` instead. (#5251) -* Deprecate `conda_build.conda_interface.CondaError` class. Use `conda.exceptions.CondaError` instead. (#5251) -* Deprecate `conda_build.conda_interface.CondaHTTPError` class. Use `conda.exceptions.CondaHTTPError` instead. (#5251) -* Deprecate `conda_build.conda_interface.LinkError` class. Use `conda.exceptions.LinkError` instead. (#5251) -* Deprecate `conda_build.conda_interface.LockError` class. Use `conda.exceptions.LockError` instead. (#5251) -* Deprecate `conda_build.conda_interface.NoPackagesFoundError` class. Use `conda.exceptions.NoPackagesFoundError` instead. (#5251) -* Deprecate `conda_build.conda_interface.PaddingError` class. Use `conda.exceptions.PaddingError` instead. (#5251) -* Deprecate `conda_build.conda_interface.UnsatisfiableError` class. Use `conda.exceptions.UnsatisfiableError` instead. (#5251) -* Deprecate `conda_build.conda_interface.get_conda_build_local_url` class. Use `conda.models.channel.get_conda_build_local_url` instead. (#5251) - -### Docs - -* - -### Other - -* diff --git a/news/5333-remove-24.7.x-deprecations b/news/5333-remove-24.7.x-deprecations new file mode 100644 index 0000000000..332176a70c --- /dev/null +++ b/news/5333-remove-24.7.x-deprecations @@ -0,0 +1,95 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Remove the following deprecations (#5333): + * `conda_build.config.Config.override_channels` (use `conda.base.context.context.channels` instead) + * `conda_build.config.noarch_python_build_age_default` + * `conda_build.conda_interface.add_parser_channels` (use `conda.cli.helpers.add_parser_channels` instead) + * `conda_build.conda_interface.add_parser_prefix` (use `conda.cli.helpers.add_parser_prefix` instead) + * `conda_build.conda_interface.ArgumentParser` (use `conda.cli.conda_argparse.ArgumentParser` instead) + * `conda_build.conda_interface.binstar_upload` (use `conda.base.context.context.binstar_upload` instead) + * `conda_build.conda_interface.cc_conda_build` (use `conda.base.context.context.conda_build` instead) + * `conda_build.conda_interface.cc_platform` (use `conda.base.context.context.platform` instead) + * `conda_build.conda_interface.Channel` (use `conda.models.channel.Channel` instead) + * `conda_build.conda_interface.Completer` + * `conda_build.conda_interface.configparser` (use `configparser` instead) + * `conda_build.conda_interface.CondaError` (use `conda.exceptions.CondaError` instead) + * `conda_build.conda_interface.CondaHTTPError` (use `conda.exceptions.CondaHTTPError` instead) + * `conda_build.conda_interface.CondaSession` (use `conda.gateways.connection.session.CondaSession` instead) + * `conda_build.conda_interface.CONDA_VERSION` (use `conda.__version__` instead) + * `conda_build.conda_interface.context` (use `conda.base.context.context` instead) + * `conda_build.conda_interface.create_default_packages` (use `conda.base.context.context.create_default_packages` instead) + * `conda_build.conda_interface.default_python` (use `conda.base.context.context.default_python` instead) + * `conda_build.conda_interface.determine_target_prefix` (use `conda.base.context.determine_target_prefix` instead) + * `conda_build.conda_interface.download` (use `conda.gateways.connection.download.download` instead) + * `conda_build.conda_interface.env_path_backup_var_exists` + * `conda_build.conda_interface.envs_dirs` (use `conda.base.context.context.envs_dirs` instead) + * `conda_build.conda_interface.EntityEncoder` (use `conda.auxlib.entity.EntityEncoder` instead) + * `conda_build.conda_interface.FileMode` (use `conda.models.enums.FileMode` instead) + * `conda_build.conda_interface.get_conda_build_local_url` (use `conda.models.channel.get_conda_build_local_url` instead) + * `conda_build.conda_interface.get_conda_channel` (use `conda.models.channel.Channel.from_value` instead) + * `conda_build.conda_interface.get_prefix` (use `conda.base.context.context.target_prefix` instead) + * `conda_build.conda_interface.get_rc_urls` (use `conda.base.context.context.channels` instead) + * `conda_build.conda_interface.human_bytes` (use `conda.utils.human_bytes` instead) + * `conda_build.conda_interface.import_module` (use `importlib.import_module` instead) + * `conda_build.conda_interface.input` (use `input` instead) + * `conda_build.conda_interface.InstalledPackages` + * `conda_build.conda_interface.lchmod` (use `conda.gateways.disk.link.lchmod` instead) + * `conda_build.conda_interface.LinkError` (use `conda.exceptions.LinkError` instead) + * `conda_build.conda_interface.LockError` (use `conda.exceptions.LockError` instead) + * `conda_build.conda_interface.MatchSpec` (use `conda.models.match_spec.MatchSpec` instead) + * `conda_build.conda_interface.non_x86_linux_machines` (use `conda.base.context.non_x86_machines` instead) + * `conda_build.conda_interface.NoPackagesFound` (use `conda.exceptions.ResolvePackageNotFound` instead) + * `conda_build.conda_interface.NoPackagesFoundError` (use `conda.exceptions.NoPackagesFoundError` instead) + * `conda_build.conda_interface.normalized_version` (use `conda.models.version.normalized_version` instead) + * `conda_build.conda_interface.os` (use `os` instead) + * `conda_build.conda_interface.PackageRecord` (use `conda.models.records.PackageRecord` instead) + * `conda_build.conda_interface.PaddingError` (use `conda.exceptions.PaddingError` instead) + * `conda_build.conda_interface.partial` (use `functools.partial` instead) + * `conda_build.conda_interface.PathType` (use `conda.models.enums.PathType` instead) + * `conda_build.conda_interface.pkgs_dirs` (use `conda.base.context.context.pkgs_dirs` instead) + * `conda_build.conda_interface.prefix_placeholder` (use `conda.base.constants.PREFIX_PLACEHOLDER` instead) + * `conda_build.conda_interface.ProgressiveFetchExtract` (use `conda.core.package_cache_data.ProgressiveFetchExtract` instead) + * `conda_build.conda_interface.reset_context` (use `conda.base.context.reset_context` instead) + * `conda_build.conda_interface.Resolve` (use `conda.resolve.Resolve` instead) + * `conda_build.conda_interface.rm_rf` (use `conda_build.utils.rm_rf` instead) + * `conda_build.conda_interface.root_dir` (use `conda.base.context.context.root_prefix` instead) + * `conda_build.conda_interface.root_writable` (use `conda.base.context.context.root_writable` instead) + * `conda_build.conda_interface.spec_from_line` (use `conda.cli.common.spec_from_line` instead) + * `conda_build.conda_interface.specs_from_args` (use `conda.cli.common.specs_from_args` instead) + * `conda_build.conda_interface.specs_from_url` (use `conda.cli.common.specs_from_url` instead) + * `conda_build.conda_interface.StringIO` (use `io.StringIO` instead) + * `conda_build.conda_interface.subdir` (use `conda.base.context.context.subdir` instead) + * `conda_build.conda_interface.symlink_conda` + * `conda_build.conda_interface.TemporaryDirectory` (use `conda.gateways.disk.create.TemporaryDirectory` instead) + * `conda_build.conda_interface.TmpDownload` (use `conda.gateways.connection.download.TmpDownload` instead) + * `conda_build.conda_interface._toposort` (use `conda.common.toposort._toposort` instead) + * `conda_build.conda_interface.unix_path_to_win` (use `conda.utils.unix_path_to_win` instead) + * `conda_build.conda_interface.untracked` (use `conda.misc.untracked` instead) + * `conda_build.conda_interface.Unsatisfiable` (use `conda.exceptions.UnsatisfiableError` instead) + * `conda_build.conda_interface.UnsatisfiableError` (use `conda.exceptions.UnsatisfiableError` instead) + * `conda_build.conda_interface.url_path` (use `conda.utils.url_path` instead) + * `conda_build.conda_interface.VersionOrder` (use `conda.models.version.VersionOrder` instead) + * `conda_build.conda_interface.walk_prefix` (use `conda.misc.walk_prefix` instead) + * `conda_build.conda_interface.win_path_to_unix` (use `conda.common.path.win_path_to_unix` instead) + * `conda_build.index.channel_data`; `conda_build.index.get_build_index` return value for `channel_data` is now always `None` + * `conda_build.utils._convert_lists_to_sets` (use `frozendict.deepfreeze` instead) + * `conda_build.utils.HashableDict` (use `frozendict.deepfreeze` instead) + * `conda_build.utils.represent_hashabledict` (use `frozendict.deepfreeze` instead) + * `conda_build.utils.rm_rf(config)` + * `conda_build.variants.get_vars(loop_only)` + +### Docs + +* + +### Other + +* diff --git a/news/5233-enable-codspeed b/news/5344-report-rendered similarity index 58% rename from news/5233-enable-codspeed rename to news/5344-report-rendered index efb32df4d1..7dccf4b960 100644 --- a/news/5233-enable-codspeed +++ b/news/5344-report-rendered @@ -1,6 +1,6 @@ ### Enhancements -* +* Report fully rendered recipe to stdout before the build process starts. (#3798 via #5344) ### Bug fixes @@ -16,4 +16,4 @@ ### Other -* Enable CodSpeed benchmarks for select tests. (#5233) +* diff --git a/news/5350-subdir-cross b/news/5350-subdir-cross new file mode 100644 index 0000000000..66824fb733 --- /dev/null +++ b/news/5350-subdir-cross @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Ensure cross-building recipes select the correct noarch package variants. (#5341 via #5350) + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/news/5355-deprecate-check_external b/news/5355-deprecate-check_external new file mode 100644 index 0000000000..2a6939ce8e --- /dev/null +++ b/news/5355-deprecate-check_external @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Mark `conda_build.build.check_external` for deprecation. `patchelf` is an explicit conda-build dependency on Linux so it will always be installed. (#5355) + +### Docs + +* + +### Other + +* diff --git a/news/5359-validate-run-constrained b/news/5359-validate-run-constrained new file mode 100644 index 0000000000..637f985553 --- /dev/null +++ b/news/5359-validate-run-constrained @@ -0,0 +1,19 @@ +### Enhancements + +* Validate `run_constrained` dependencies to prevent faulty specs reaching final repodata. (#5047 via #5359) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/pyproject.toml b/pyproject.toml index 0360aaad4b..9bc5272e8c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,16 +1,12 @@ [build-system] +build-backend = "hatchling.build" requires = [ "hatchling >=1.12.2", "hatch-vcs >=0.2.0", ] -build-backend = "hatchling.build" [project] -name = "conda-build" -description="tools for building conda packages" -readme = "README.md" authors = [{name = "Anaconda, Inc.", email = "conda@continuum.io"}] -license = {file = "LICENSE"} classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", @@ -24,16 +20,16 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy" + "Programming Language :: Python :: Implementation :: PyPy", ] -requires-python = ">=3.8" dependencies = [ "beautifulsoup4", "chardet", - "conda >=23.5.0", + "conda >=23.7.0", "conda-index >=0.4.0", "conda-package-handling >=1.3", "filelock", + "frozendict >=2.4.2", "jinja2", "jsonschema >=4.19", "libarchive-c", @@ -47,34 +43,43 @@ dependencies = [ "tomli ; python_version<'3.11'", "tqdm", ] +description = "tools for building conda packages" dynamic = ["version"] +license = {file = "LICENSE"} +name = "conda-build" +readme = "README.md" +requires-python = ">=3.8" -[project.urls] -documentation = "https://docs.conda.io/projects/conda-build/en/stable/" -repository = "https://github.com/conda/conda-build" -changelog = "https://github.com/conda/conda-build/blob/main/CHANGELOG.md" +[project.entry-points.conda] +conda-build = "conda_build.plugin" [project.scripts] conda-build = "conda_build.cli.main_build:execute" conda-convert = "conda_build.cli.main_convert:execute" +conda-debug = "conda_build.cli.main_debug:execute" conda-develop = "conda_build.cli.main_develop:execute" conda-inspect = "conda_build.cli.main_inspect:execute" conda-metapackage = "conda_build.cli.main_metapackage:execute" conda-render = "conda_build.cli.main_render:execute" conda-skeleton = "conda_build.cli.main_skeleton:execute" -conda-debug = "conda_build.cli.main_debug:execute" -[project.entry-points."distutils.commands"] -bdist_conda = "conda_build.bdist_conda:bdist_conda" - -[project.entry-points.conda] -conda-build = "conda_build.plugin" +[project.urls] +changelog = "https://github.com/conda/conda-build/blob/main/CHANGELOG.md" +documentation = "https://docs.conda.io/projects/conda-build/en/stable/" +repository = "https://github.com/conda/conda-build" -[tool.hatch.version] -source = "vcs" +[tool.coverage.report] +exclude_lines = [ + "if TYPE_CHECKING:", # ignoring type checking imports +] +omit = ["conda_build/skeletons/_example_skeleton.py"] +show_missing = true +skip_covered = true +sort = "Miss" -[tool.hatch.version.raw-options] -local_scheme = "dirty-tag" +[tool.coverage.run] +# store relative paths in coverage information +relative_files = true [tool.hatch.build] include = ["conda_build", "conda_build/templates/*", "conda_build/cli-*.exe"] @@ -82,44 +87,13 @@ include = ["conda_build", "conda_build/templates/*", "conda_build/cli-*.exe"] [tool.hatch.build.hooks.vcs] version-file = "conda_build/_version.py" -[tool.coverage.run] -# store relative paths in coverage information -relative_files = true - -[tool.coverage.report] -show_missing = true -sort = "Miss" -skip_covered = true -omit = ["conda_build/skeletons/_example_skeleton.py"] -exclude_lines = [ - "if TYPE_CHECKING:", # ignoring type checking imports -] - -[tool.ruff] -target-version = "py38" +[tool.hatch.version] +source = "vcs" -[tool.ruff.lint] -# E, W = pycodestyle errors and warnings -# F = pyflakes -# I = isort -# UP = pyupgrade -# ISC = flake8-implicit-str-concat -# TCH = flake8-type-checking -# T10 = flake8-debugger -# FA = flake8-future-annotations -# see also https://docs.astral.sh/ruff/rules/ -select = ["E", "W", "F", "I", "UP", "ISC", "TCH", "T10", "FA"] -# E402 module level import not at top of file -# E722 do not use bare 'except' -# E731 do not assign a lambda expression, use a def -ignore = ["E402", "E722", "E731"] -pycodestyle = {max-line-length = 120} -flake8-type-checking = {exempt-modules = [], strict = true} +[tool.hatch.version.raw-options] +local_scheme = "dirty-tag" [tool.pytest.ini_options] -minversion = 3.0 -testpaths = ["tests"] -norecursedirs = ["tests/test-recipes/*"] addopts = [ "--color=yes", # "--cov=conda_build", # passed in test runner scripts instead (avoid debugger) @@ -143,13 +117,6 @@ doctest_optionflags = [ "ALLOW_UNICODE", "ELLIPSIS", ] -markers = [ - "serial: execute test serially (to avoid race conditions)", - "slow: execute the slow tests if active", - "sanity: execute the sanity tests", - "no_default_testing_config: used internally to disable monkeypatching for testing_config", - "benchmark: execute the benchmark tests", -] filterwarnings = [ # elevate conda's deprecated warning to an error "error::PendingDeprecationWarning:conda", @@ -159,4 +126,41 @@ filterwarnings = [ "error::DeprecationWarning:conda_build", # ignore numpy.distutils error 'ignore:\s+`numpy.distutils` is deprecated:DeprecationWarning:conda_build._load_setup_py_data', + # ignore conda-index error + "ignore::PendingDeprecationWarning:conda_index", + "ignore::DeprecationWarning:conda_index", +] +markers = [ + "serial: execute test serially (to avoid race conditions)", + "slow: execute the slow tests if active", + "sanity: execute the sanity tests", + "no_default_testing_config: used internally to disable monkeypatching for testing_config", + "benchmark: execute the benchmark tests", +] +minversion = 3.0 +norecursedirs = ["tests/test-recipes/*"] +testpaths = ["tests"] + +[tool.ruff] +target-version = "py38" + +[tool.ruff.lint] +flake8-type-checking = {exempt-modules = [], strict = true} +ignore = [ + "E402", # module level import not at top of file + "E722", # do not use bare 'except' + "E731", # do not assign a lambda expression, use a def +] +pycodestyle = {max-line-length = 120} +# see https://docs.astral.sh/ruff/rules/ +select = [ + "E", # pycodestyle errors + "F", # pyflakes + "FA", # flake8-future-annotations + "I", # isort + "ISC", # flake8-implicit-str-concat + "T10", # flake8-debugger + "TCH", # flake8-type-checking + "UP", # pyupgrade + "W", # pycodestyle warnings ] diff --git a/recipe/conda_build_config.yaml b/recipe/conda_build_config.yaml index 42847d7ead..3959a519bd 100644 --- a/recipe/conda_build_config.yaml +++ b/recipe/conda_build_config.yaml @@ -1,6 +1,6 @@ python: - - "3.8" - - "3.9" - - "3.10" - - "3.11" - - "3.12" + - '3.8' + - '3.9' + - '3.10' + - '3.11' + - '3.12' diff --git a/recipe/meta.yaml b/recipe/meta.yaml index a9062803cb..33f8fe9125 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -30,10 +30,11 @@ requirements: run: - beautifulsoup4 - chardet - - conda >=23.5.0 + - conda >=23.7.0 - conda-index >=0.4.0 - conda-package-handling >=1.3 - filelock + - frozendict >=2.4.2 - jinja2 - jsonschema >=4.19 - m2-patch >=2.6 # [win] @@ -63,8 +64,6 @@ test: requires: - setuptools - pip - files: - - test_bdist_conda_setup.py commands: - python -m pip check # subcommands @@ -90,8 +89,6 @@ test: - conda-render --help - conda-skeleton --help - conda-debug --help - # bdist_conda - - python test_bdist_conda_setup.py bdist_conda --help about: home: https://conda.org diff --git a/recipe/test_bdist_conda_setup.py b/recipe/test_bdist_conda_setup.py deleted file mode 100644 index c7b3d34abf..0000000000 --- a/recipe/test_bdist_conda_setup.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -from setuptools import setup - -import conda_build.bdist_conda - -setup( - name="package", - version="1.0.0", - distclass=conda_build.bdist_conda.CondaDistribution, -) diff --git a/tests/bdist-recipe/bin/test-script-setup.py b/tests/bdist-recipe/bin/test-script-setup.py deleted file mode 100644 index c515fb849e..0000000000 --- a/tests/bdist-recipe/bin/test-script-setup.py +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env python -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -import conda_build_test - -conda_build_test - -print("Test script setup.py") - -if __name__ == "__main__": - from conda_build_test import manual_entry - - manual_entry.main() diff --git a/tests/bdist-recipe/conda_build_test/__init__.py b/tests/bdist-recipe/conda_build_test/__init__.py deleted file mode 100644 index 1f22b11325..0000000000 --- a/tests/bdist-recipe/conda_build_test/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -""" -conda build test package -""" - -print("conda_build_test has been imported") diff --git a/tests/bdist-recipe/conda_build_test/empty.py b/tests/bdist-recipe/conda_build_test/empty.py deleted file mode 100644 index 3f48e8b789..0000000000 --- a/tests/bdist-recipe/conda_build_test/empty.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause diff --git a/tests/bdist-recipe/conda_build_test/manual_entry.py b/tests/bdist-recipe/conda_build_test/manual_entry.py deleted file mode 100644 index 1a63c8a8e9..0000000000 --- a/tests/bdist-recipe/conda_build_test/manual_entry.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -def main(): - import argparse - - # Just picks them up from `sys.argv`. - parser = argparse.ArgumentParser(description="Basic parser.") - parser.parse_args() - - print("Manual entry point") diff --git a/tests/bdist-recipe/setup.py b/tests/bdist-recipe/setup.py deleted file mode 100644 index 74982e5865..0000000000 --- a/tests/bdist-recipe/setup.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -from setuptools import setup - -import conda_build.bdist_conda - -setup( - name="conda-build-test-project", - version="1.0", - distclass=conda_build.bdist_conda.CondaDistribution, - conda_buildnum=1, - conda_features=[], - author="Continuum Analytics, Inc.", - url="https://github.com/conda/conda-build", - license="BSD", - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "Operating System :: OS Independent", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.3", - "Programming Language :: Python :: 3.4", - ], - description="test package for testing conda-build", - packages=["conda_build_test"], - scripts=[ - "bin/test-script-setup.py", - ], -) diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py index 9da5b48418..ed56cabceb 100644 --- a/tests/cli/test_main_build.py +++ b/tests/cli/test_main_build.py @@ -8,15 +8,15 @@ from typing import TYPE_CHECKING import pytest +from conda.exceptions import PackagesNotFoundError from conda_build import api from conda_build.cli import main_build, main_render -from conda_build.conda_interface import TemporaryDirectory from conda_build.config import ( Config, zstd_compression_level_default, ) -from conda_build.exceptions import DependencyNeedsBuildingError +from conda_build.exceptions import CondaBuildUserError, DependencyNeedsBuildingError from conda_build.os_utils.external import find_executable from conda_build.utils import get_build_folders, on_win, package_has_file @@ -165,7 +165,7 @@ def test_build_long_test_prefix_default_enabled(mocker, testing_workdir): main_build.execute(args) args.append("--no-long-test-prefix") - with pytest.raises(SystemExit): + with pytest.raises(CondaBuildUserError): main_build.execute(args) @@ -264,20 +264,19 @@ def test_purge(testing_workdir, testing_metadata): @pytest.mark.serial -def test_purge_all(testing_workdir, testing_metadata): +def test_purge_all( + testing_workdir: str, testing_metadata: MetaData, tmp_path: Path +) -> None: """ purge-all clears out build folders as well as build packages in the osx-64 folders and such """ api.output_yaml(testing_metadata, "meta.yaml") - with TemporaryDirectory() as tmpdir: - testing_metadata.config.croot = tmpdir - outputs = api.build( - testing_workdir, config=testing_metadata.config, notest=True - ) - args = ["purge-all", "--croot", tmpdir] - main_build.execute(args) - assert not get_build_folders(testing_metadata.config.croot) - assert not any(os.path.isfile(fn) for fn in outputs) + testing_metadata.config.croot = str(tmp_path) + outputs = api.build(testing_workdir, config=testing_metadata.config, notest=True) + args = ["purge-all", f"--croot={tmp_path}"] + main_build.execute(args) + assert not get_build_folders(testing_metadata.config.croot) + assert not any(os.path.isfile(fn) for fn in outputs) @pytest.mark.serial @@ -484,7 +483,7 @@ def test_test_extra_dep(testing_metadata): main_build.execute(args) # missing click dep will fail tests - with pytest.raises(SystemExit): + with pytest.raises(CondaBuildUserError): args = [output, "-t"] # extra_deps will add it in main_build.execute(args) @@ -551,3 +550,14 @@ def test_user_warning(tmpdir, recwarn): main_build.parse_args([str(dir_recipe_path)]) assert not recwarn.list + + +def test_build_with_empty_channel_fails(empty_channel: Path) -> None: + with pytest.raises(PackagesNotFoundError): + main_build.execute( + [ + "--override-channels", + f"--channel={empty_channel}", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + ] + ) diff --git a/tests/cli/test_main_convert.py b/tests/cli/test_main_convert.py index 0be658b9d3..9ff65849d9 100644 --- a/tests/cli/test_main_convert.py +++ b/tests/cli/test_main_convert.py @@ -3,9 +3,9 @@ import os import pytest +from conda.gateways.connection.download import download from conda_build.cli import main_convert -from conda_build.conda_interface import download from conda_build.tarcheck import TarCheck from conda_build.utils import on_win diff --git a/tests/cli/test_main_develop.py b/tests/cli/test_main_develop.py index ede3758cfb..c0c3cdca3d 100644 --- a/tests/cli/test_main_develop.py +++ b/tests/cli/test_main_develop.py @@ -3,8 +3,9 @@ import os import sys +from conda.gateways.connection.download import download + from conda_build.cli import main_develop -from conda_build.conda_interface import download from conda_build.utils import get_site_packages, tar_xf diff --git a/tests/cli/test_main_render.py b/tests/cli/test_main_render.py index 59fff7901c..ef5fdf077d 100644 --- a/tests/cli/test_main_render.py +++ b/tests/cli/test_main_render.py @@ -1,69 +1,64 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import os import sys +from typing import TYPE_CHECKING import pytest import yaml +from conda.exceptions import PackagesNotFoundError from conda_build import api from conda_build.cli import main_render -from conda_build.conda_interface import TemporaryDirectory from ..utils import metadata_dir +if TYPE_CHECKING: + from pathlib import Path + -def test_render_add_channel(): +def test_render_add_channel(tmp_path: Path) -> None: """This recipe requires the conda_build_test_requirement package, which is only on the conda_build_test channel. This verifies that the -c argument works for rendering.""" - with TemporaryDirectory() as tmpdir: - rendered_filename = os.path.join(tmpdir, "out.yaml") - args = [ - "-c", - "conda_build_test", - os.path.join(metadata_dir, "_recipe_requiring_external_channel"), - "--file", - rendered_filename, - ] - main_render.execute(args) - with open(rendered_filename) as rendered_file: - rendered_meta = yaml.safe_load(rendered_file) - required_package_string = [ - pkg - for pkg in rendered_meta["requirements"]["build"] - if "conda_build_test_requirement" in pkg - ][0] - required_package_details = required_package_string.split(" ") - assert len(required_package_details) > 1, ( - "Expected version number on successful " - f"rendering, but got only {required_package_details}" - ) - assert ( - required_package_details[1] == "1.0" - ), f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}" - - -def test_render_without_channel_fails(tmp_path): - # do make extra channel available, so the required package should not be found - rendered_filename = tmp_path / "out.yaml" + rendered_filename = os.path.join(tmp_path, "out.yaml") args = [ - "--override-channels", + "-c", + "conda_build_test", os.path.join(metadata_dir, "_recipe_requiring_external_channel"), "--file", - str(rendered_filename), + rendered_filename, ] main_render.execute(args) with open(rendered_filename) as rendered_file: rendered_meta = yaml.safe_load(rendered_file) required_package_string = [ pkg - for pkg in rendered_meta.get("requirements", {}).get("build", []) + for pkg in rendered_meta["requirements"]["build"] if "conda_build_test_requirement" in pkg ][0] + required_package_details = required_package_string.split(" ") + assert len(required_package_details) > 1, ( + "Expected version number on successful " + f"rendering, but got only {required_package_details}" + ) assert ( - required_package_string == "conda_build_test_requirement" - ), f"Expected to get only base package name because it should not be found, but got :{required_package_string}" + required_package_details[1] == "1.0" + ), f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}" + + +def test_render_with_empty_channel_fails(tmp_path: Path, empty_channel: Path) -> None: + with pytest.raises(PackagesNotFoundError): + main_render.execute( + [ + "--override-channels", + f"--channel={empty_channel}", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + f"--file={tmp_path / 'out.yaml'}", + ] + ) def test_render_output_build_path( diff --git a/tests/cli/test_main_skeleton.py b/tests/cli/test_main_skeleton.py index 0333d77c1f..c2dd0a65b5 100644 --- a/tests/cli/test_main_skeleton.py +++ b/tests/cli/test_main_skeleton.py @@ -54,6 +54,6 @@ def test_skeleton_pypi_arguments_work(testing_workdir): assert os.path.isdir("photutils") # Check that the setup option occurs in bld.bat and build.sh. - m = api.render("photutils")[0][0] - assert "--offline" in m.meta["build"]["script"] - assert m.version() == "1.10.0" + metadata = api.render("photutils")[0][0] + assert "--offline" in metadata.meta["build"]["script"] + assert metadata.version() == "1.10.0" diff --git a/tests/conftest.py b/tests/conftest.py index f055b05d80..465cab6fcc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,7 @@ import pytest from conda.common.compat import on_mac, on_win +from conda_index.api import update_index from pytest import MonkeyPatch import conda_build @@ -26,7 +27,6 @@ filename_hashing_default, ignore_verify_codes_default, no_rewrite_stdout_env_default, - noarch_python_build_age_default, ) from conda_build.metadata import MetaData from conda_build.utils import check_call_env, copy_into, prepend_bin_path @@ -99,7 +99,6 @@ def boolify(v): _src_cache_root=_src_cache_root_default, error_overlinking=boolify(error_overlinking_default), error_overdepending=boolify(error_overdepending_default), - noarch_python_build_age=noarch_python_build_age_default, enable_static=boolify(enable_static_default), no_rewrite_stdout_env=boolify(no_rewrite_stdout_env_default), ignore_verify_codes=ignore_verify_codes_default, @@ -111,7 +110,6 @@ def boolify(v): assert result.no_rewrite_stdout_env is False assert result._src_cache_root is None assert result.src_cache_root == testing_workdir - assert result.noarch_python_build_age == 0 return result @@ -251,3 +249,11 @@ def conda_build_test_recipe_envvar( name = "CONDA_BUILD_TEST_RECIPE_PATH" monkeypatch.setenv(name, str(conda_build_test_recipe_path)) return name + + +@pytest.fixture(scope="session") +def empty_channel(tmp_path_factory: pytest.TempPathFactory) -> Path: + """Create a temporary, empty conda channel.""" + channel = tmp_path_factory.mktemp("empty_channel", numbered=False) + update_index(channel) + return channel diff --git a/tests/requirements.txt b/tests/requirements.txt index 5e94d4111a..acb3317206 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,10 +1,11 @@ beautifulsoup4 chardet -conda >=23.5.0 +conda >=23.7.0 conda-index >=0.4.0 conda-libmamba-solver # ensure we use libmamba conda-package-handling >=1.3 filelock +frozendict >=2.4.2 jinja2 jsonschema >=4.19 menuinst >=2 diff --git a/tests/test-recipes/metadata/_build_script_errors/output_build_script/meta.yaml b/tests/test-recipes/metadata/_build_script_errors/output_build_script/meta.yaml new file mode 100644 index 0000000000..406ba464c0 --- /dev/null +++ b/tests/test-recipes/metadata/_build_script_errors/output_build_script/meta.yaml @@ -0,0 +1,10 @@ +package: + name: pkg + version: '1.0' +source: + path: . +outputs: + - name: pkg-output + build: + script: + - exit 1 diff --git a/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.bat b/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.bat new file mode 100644 index 0000000000..6dedc57766 --- /dev/null +++ b/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.bat @@ -0,0 +1 @@ +exit 1 \ No newline at end of file diff --git a/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.sh b/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.sh new file mode 100644 index 0000000000..6dedc57766 --- /dev/null +++ b/tests/test-recipes/metadata/_build_script_errors/output_script/exit_1.sh @@ -0,0 +1 @@ +exit 1 \ No newline at end of file diff --git a/tests/test-recipes/metadata/_build_script_errors/output_script/meta.yaml b/tests/test-recipes/metadata/_build_script_errors/output_script/meta.yaml new file mode 100644 index 0000000000..43c2f9d054 --- /dev/null +++ b/tests/test-recipes/metadata/_build_script_errors/output_script/meta.yaml @@ -0,0 +1,9 @@ +package: + name: pkg + version: '1.0' +source: + path: . +outputs: + - name: pkg-output + script: exit_1.sh # [unix] + script: exit_1.bat # [win] diff --git a/tests/test-recipes/metadata/_build_script_errors/toplevel/meta.yaml b/tests/test-recipes/metadata/_build_script_errors/toplevel/meta.yaml new file mode 100644 index 0000000000..df710d103b --- /dev/null +++ b/tests/test-recipes/metadata/_build_script_errors/toplevel/meta.yaml @@ -0,0 +1,7 @@ +package: + name: pkg + version: '1.0' +source: + path: . +build: + script: exit 1 diff --git a/tests/test-recipes/metadata/_cross_unix_windows_mingw/conda_build_config.yaml b/tests/test-recipes/metadata/_cross_unix_windows_mingw/conda_build_config.yaml new file mode 100644 index 0000000000..ad7ed0e836 --- /dev/null +++ b/tests/test-recipes/metadata/_cross_unix_windows_mingw/conda_build_config.yaml @@ -0,0 +1,2 @@ +target_platform: + - win-64 \ No newline at end of file diff --git a/tests/test-recipes/metadata/_cross_unix_windows_mingw/meta.yaml b/tests/test-recipes/metadata/_cross_unix_windows_mingw/meta.yaml new file mode 100644 index 0000000000..c180f95383 --- /dev/null +++ b/tests/test-recipes/metadata/_cross_unix_windows_mingw/meta.yaml @@ -0,0 +1,18 @@ +package: + name: foo + version: 0.0.1 + +build: + number: 0 + script: + - echo 'hello' + - ls $PREFIX + # this is the unix layout. + - test ! -d $PREFIX/x86_64-w64-mingw32 + - test -d $PREFIX/Library + +requirements: + build: + host: + - m2w64-sysroot_win-64 + run: diff --git a/tests/test-recipes/metadata/_run_constrained_error/meta.yaml b/tests/test-recipes/metadata/_run_constrained_error/meta.yaml new file mode 100644 index 0000000000..7d8dd7f759 --- /dev/null +++ b/tests/test-recipes/metadata/_run_constrained_error/meta.yaml @@ -0,0 +1,10 @@ +package: + name: test_run_constrained_error + version: 1.0 + +requirements: + run_constrained: + # obtained from https://github.com/conda-forge/willow-feedstock/blob/67d9ac1c5232295ccaac41b131e3982a335b365b/recipe/meta.yaml#L29 + - pillow-heif >=0.10.0,<1.0.0=0.13.0,<1.0.0>=py312 + - {{ 'another-package' }} {{ '>=0.20.0,<2.0.0=0.23.0,<2.0.0>=py310' }} + diff --git a/tests/test-recipes/metadata/gh-5342/meta.yaml b/tests/test-recipes/metadata/gh-5342/meta.yaml new file mode 100644 index 0000000000..f083f1c95e --- /dev/null +++ b/tests/test-recipes/metadata/gh-5342/meta.yaml @@ -0,0 +1,15 @@ +{% set name = "gh-5342" %} + +package: + name: {{ name }} + version: 1.0 + +outputs: + - name: {{ name }} + build: + skip: true + + - name: {{ name }}-dev + build: + files: + - file diff --git a/tests/test-recipes/split-packages/_test-file-hash/build.sh b/tests/test-recipes/split-packages/_test-file-hash/build.sh new file mode 100644 index 0000000000..5cf09c5b51 --- /dev/null +++ b/tests/test-recipes/split-packages/_test-file-hash/build.sh @@ -0,0 +1,8 @@ +echo "int main() {}" > main.c +mkdir -p $PREFIX/bin +$CC main.c -o $PREFIX/bin/_file_hash + +echo "int foo() {return 2;}" > foo.c +echo "int foo(); int bar() {return foo()*2;}" > bar.c +$CC -shared foo.c -o libupstream.so +$CC -shared bar.c -o libdownstream.so -L$PWD -lupstream '-Wl,-rpath,$ORIGIN' diff --git a/tests/test-recipes/split-packages/_test-file-hash/conda_build_config.yaml b/tests/test-recipes/split-packages/_test-file-hash/conda_build_config.yaml new file mode 100644 index 0000000000..5b99fedac2 --- /dev/null +++ b/tests/test-recipes/split-packages/_test-file-hash/conda_build_config.yaml @@ -0,0 +1,3 @@ +python: +- 3.10 +- 3.11 diff --git a/tests/test-recipes/split-packages/_test-file-hash/install-py.sh b/tests/test-recipes/split-packages/_test-file-hash/install-py.sh new file mode 100644 index 0000000000..7a37b6050b --- /dev/null +++ b/tests/test-recipes/split-packages/_test-file-hash/install-py.sh @@ -0,0 +1,4 @@ +mkdir -p $SP_DIR/_py_file_hash +cp libdownstream.so $SP_DIR/_py_file_hash/ +cp libupstream.so $SP_DIR/_py_file_hash/ + diff --git a/tests/test-recipes/split-packages/_test-file-hash/meta.yaml b/tests/test-recipes/split-packages/_test-file-hash/meta.yaml new file mode 100644 index 0000000000..db0c9a89d1 --- /dev/null +++ b/tests/test-recipes/split-packages/_test-file-hash/meta.yaml @@ -0,0 +1,30 @@ +package: + name: _file_hash-split + version: 0.0.1 + +build: + number: 0 + skip: True # [not linux64] + error_overlinking: true + +requirements: + build: + - {{ compiler('c') }} + host: + run: + +outputs: + - name: py-file-hash + script: install-py.sh + requirements: + build: + - {{ compiler('c') }} + host: + - python + run: + - python + + - name: _file_hash + requirements: + build: + - {{ compiler('c') }} diff --git a/tests/test-recipes/split-packages/copying_files/bld.bat b/tests/test-recipes/split-packages/copying_files/bld.bat index e1084a2a6f..b516b7d6c3 100644 --- a/tests/test-recipes/split-packages/copying_files/bld.bat +++ b/tests/test-recipes/split-packages/copying_files/bld.bat @@ -2,4 +2,14 @@ echo "weee" > %PREFIX%\subpackage_file1 mkdir %PREFIX%\somedir echo "weee" > %PREFIX%\somedir\subpackage_file1 echo "weee" > %PREFIX%\subpackage_file1.ext -echo "weee" > %PREFIX%\subpackage_file2.ext \ No newline at end of file +echo "weee" > %PREFIX%\subpackage_file2.ext +echo "weee" > %PREFIX%\subpackage_file3.ext + +echo "weee" > %PREFIX%\subpackage_include_exclude1 +mkdir %PREFIX%\anotherdir +echo "weee" > %PREFIX%\anotherdir\subpackage_include_exclude1 +echo "weee" > %PREFIX%\subpackage_include_exclude1.wav +echo "weee" > %PREFIX%\subpackage_include_exclude2.wav +echo "weee" > %PREFIX%\subpackage_include_exclude3.wav +mkdir %PREFIX%\Library\bin +echo "weee" > %PREFIX%\Library\bin\dav1d.fake diff --git a/tests/test-recipes/split-packages/copying_files/build.sh b/tests/test-recipes/split-packages/copying_files/build.sh index 529dc13092..6dbde0d63f 100644 --- a/tests/test-recipes/split-packages/copying_files/build.sh +++ b/tests/test-recipes/split-packages/copying_files/build.sh @@ -6,3 +6,18 @@ echo "weee" > $PREFIX/somedir/subpackage_file1 # test glob patterns echo "weee" > $PREFIX/subpackage_file1.ext echo "weee" > $PREFIX/subpackage_file2.ext +echo "weee" > $PREFIX/subpackage_file3.ext + +# The files used to test the two subpackages must be disjoint because they are +# coinstalled +# test copying filename +echo "weee" > $PREFIX/subpackage_include_exclude1 +# test copying by folder name +mkdir $PREFIX/anotherdir +echo "weee" > $PREFIX/anotherdir/subpackage_include_exclude1 +# test glob patterns +echo "weee" > $PREFIX/subpackage_include_exclude1.wav +echo "weee" > $PREFIX/subpackage_include_exclude2.wav +echo "weee" > $PREFIX/subpackage_include_exclude3.wav +mkdir $PREFIX/lib +echo "weee" > $PREFIX/lib/libdav1d.fake diff --git a/tests/test-recipes/split-packages/copying_files/meta.yaml b/tests/test-recipes/split-packages/copying_files/meta.yaml index 9ab2e45957..4226e8a161 100644 --- a/tests/test-recipes/split-packages/copying_files/meta.yaml +++ b/tests/test-recipes/split-packages/copying_files/meta.yaml @@ -4,14 +4,44 @@ package: requirements: run: - - my_script_subpackage + - my_script_subpackage_files + - my_script_subpackage_include_exclude outputs: - - name: my_script_subpackage + - name: my_script_subpackage_files + build: + ignore_run_exports_from: + - libpng + requirements: + host: + - libpng=1.6.39 files: - subpackage_file1 - somedir - "*.ext" + # Libs should match because they are in the prefix + - "lib/libpng*" # [unix] + - "Library/bin/libpng*" # [win] + test: + script: subpackage_test.py + script_interpreter: python + - name: my_script_subpackage_include_exclude + build: + ignore_run_exports_from: + - dav1d + requirements: + host: + - dav1d=1.2.1 + files: + include: + - subpackage_include_exclude1 + - anotherdir + - "*.wav" + # Libs should not match because they come from a different package + - "lib/libdav1d*" # [unix] + - "Library/bin/dav1d*" # [win] + exclude: + - "*3.wav" test: script: subpackage_test.py script_interpreter: python diff --git a/tests/test-recipes/split-packages/copying_files/subpackage_test.py b/tests/test-recipes/split-packages/copying_files/subpackage_test.py index 91157c0642..9148e40947 100644 --- a/tests/test-recipes/split-packages/copying_files/subpackage_test.py +++ b/tests/test-recipes/split-packages/copying_files/subpackage_test.py @@ -1,33 +1,76 @@ import os +import sys -print(os.getenv('PREFIX')) -filename = os.path.join(os.environ['PREFIX'], 'subpackage_file1') -assert os.path.isfile(filename) +if os.getenv("PKG_NAME") == "my_script_subpackage_files": + file_basename = "subpackage_file" + dirname = "somedir" + extension = "ext" + + if "darwin" in sys.platform: + external_host_file = "lib/libpng16.dylib" + elif "win32" in sys.platform: + external_host_file = "Library/bin/libpng16.dll" + else: + external_host_file = "lib/libpng16.so" + + filename = os.path.join(os.environ["PREFIX"], f"{file_basename}3.{extension}") + print(filename) + assert os.path.isfile(filename), filename + " is missing" + print("glob files OK") + + filename = os.path.join(os.environ["PREFIX"], external_host_file) + print(filename) + assert os.path.isfile(filename), filename + " is missing" + print("glob files prefix OK") + +if os.getenv("PKG_NAME") == "my_script_subpackage_include_exclude": + file_basename = "subpackage_include_exclude" + dirname = "anotherdir" + extension = "wav" + + if "darwin" in sys.platform: + external_host_file = "lib/libdav1d.6.dylib" + elif "win32" in sys.platform: + external_host_file = "Library/bin/dav1d.dll" + else: + external_host_file = "lib/libdav1d.so.6" + + filename = os.path.join(os.environ["PREFIX"], f"{file_basename}3.{extension}") + assert not os.path.isfile(filename), filename + " is missing" + print("glob exclude OK") + + filename = os.path.join(os.environ["PREFIX"], external_host_file) + assert not os.path.isfile(filename), filename + " is missing" + print("glob exclude prefix OK") + +print(os.getenv("PREFIX")) +filename = os.path.join(os.environ["PREFIX"], f"{file_basename}1") +assert os.path.isfile(filename), filename + " is missing" contents = open(filename).read().rstrip() -if hasattr(contents, 'decode'): +if hasattr(contents, "decode"): contents = contents.decode() -assert "weee" in contents, 'incorrect file contents: %s' % contents +assert "weee" in contents, "incorrect file contents: %s" % contents print("plain file OK") -filename = os.path.join(os.environ['PREFIX'], 'somedir', 'subpackage_file1') +filename = os.path.join(os.environ["PREFIX"], dirname, f"{file_basename}1") assert os.path.isfile(filename), filename + " is missing" contents = open(filename).read().rstrip() -if hasattr(contents, 'decode'): +if hasattr(contents, "decode"): contents = contents.decode() -assert "weee" in contents, 'incorrect file contents: %s' % contents +assert "weee" in contents, "incorrect file contents: %s" % contents print("subfolder file OK") -filename = os.path.join(os.environ['PREFIX'], 'subpackage_file1.ext') -assert os.path.isfile(filename) +filename = os.path.join(os.environ["PREFIX"], f"{file_basename}1.{extension}") +assert os.path.isfile(filename), filename + " is missing" contents = open(filename).read().rstrip() -if hasattr(contents, 'decode'): +if hasattr(contents, "decode"): contents = contents.decode() -assert "weee" in contents, 'incorrect file contents: %s' % contents +assert "weee" in contents, "incorrect file contents: %s" % contents -filename = os.path.join(os.environ['PREFIX'], 'subpackage_file2.ext') -assert os.path.isfile(filename) +filename = os.path.join(os.environ["PREFIX"], f"{file_basename}2.{extension}") +assert os.path.isfile(filename), filename + " is missing" contents = open(filename).read().rstrip() -if hasattr(contents, 'decode'): +if hasattr(contents, "decode"): contents = contents.decode() -assert "weee" in contents, 'incorrect file contents: %s' % contents +assert "weee" in contents, "incorrect file contents: %s" % contents print("glob OK") diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 5932bf4f1a..514db4f223 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -30,16 +30,19 @@ from conda.base.context import context, reset_context from conda.common.compat import on_linux, on_mac, on_win from conda.exceptions import ClobberError, CondaError, CondaMultiError, LinkError +from conda.utils import url_path from conda_index.api import update_index from conda_build import __version__, api, exceptions -from conda_build.conda_interface import url_path from conda_build.config import Config from conda_build.exceptions import ( + BuildScriptException, CondaBuildException, + CondaBuildUserError, DependencyNeedsBuildingError, OverDependingError, OverLinkingError, + RecipeError, ) from conda_build.os_utils.external import find_executable from conda_build.render import finalize_metadata @@ -237,8 +240,8 @@ def test_offline( def test_git_describe_info_on_branch(testing_config): recipe_path = os.path.join(metadata_dir, "_git_describe_number_branch") - m = api.render(recipe_path, config=testing_config)[0][0] - output = api.get_output_file_paths(m)[0] + metadata = api.render(recipe_path, config=testing_config)[0][0] + output = api.get_output_file_paths(metadata)[0] # missing hash because we set custom build string in meta.yaml test_path = os.path.join( testing_config.croot, @@ -277,7 +280,7 @@ def test_no_include_recipe_meta_yaml(testing_metadata, testing_config): )[0] assert not package_has_file(output_file, "info/recipe/meta.yaml") - with pytest.raises(SystemExit): + with pytest.raises(CondaBuildUserError): # we are testing that even with the recipe excluded, we still get the tests in place output_file = api.build( os.path.join(metadata_dir, "_no_include_recipe"), config=testing_config @@ -383,7 +386,7 @@ def test_dirty_variable_available_in_build_scripts(testing_config): testing_config.dirty = True api.build(recipe, config=testing_config) - with pytest.raises(subprocess.CalledProcessError): + with pytest.raises(BuildScriptException): testing_config.dirty = False api.build(recipe, config=testing_config) @@ -543,7 +546,7 @@ def test_skip_existing_url(testing_metadata, testing_workdir, capfd): def test_failed_tests_exit_build(testing_config): """https://github.com/conda/conda-build/issues/1112""" - with pytest.raises(SystemExit, match="TESTS FAILED"): + with pytest.raises(CondaBuildUserError, match="TESTS FAILED"): api.build( os.path.join(metadata_dir, "_test_failed_test_exits"), config=testing_config ) @@ -613,13 +616,13 @@ def test_numpy_setup_py_data(testing_config): # - cython subprocess.call("conda remove -y cython".split()) with pytest.raises(CondaBuildException) as exc_info: - api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] + api.render(recipe_path, config=testing_config, numpy="1.16") assert exc_info.match("Cython") subprocess.check_call(["conda", "install", "-y", "cython"]) - m = api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] - _hash = m.hash_dependencies() + metadata = api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] + _hash = metadata.hash_dependencies() assert ( - os.path.basename(api.get_output_file_paths(m)[0]) + os.path.basename(api.get_output_file_paths(metadata)[0]) == f"load_setup_py_test-0.1.0-np116py{sys.version_info.major}{sys.version_info.minor}{_hash}_0.tar.bz2" ) @@ -816,13 +819,13 @@ def test_disable_pip(testing_metadata): testing_metadata.meta["build"]["script"] = ( 'python -c "import pip; print(pip.__version__)"' ) - with pytest.raises(subprocess.CalledProcessError): + with pytest.raises(BuildScriptException): api.build(testing_metadata) testing_metadata.meta["build"]["script"] = ( 'python -c "import setuptools; print(setuptools.__version__)"' ) - with pytest.raises(subprocess.CalledProcessError): + with pytest.raises(BuildScriptException): api.build(testing_metadata) @@ -1178,9 +1181,9 @@ def test_run_exports(testing_metadata, testing_config, testing_workdir): # will be merged when build subdir == host_subdir, the weak run_exports should be present. testing_metadata.meta["requirements"]["build"] = ["test_has_run_exports"] api.output_yaml(testing_metadata, "meta.yaml") - m = api.render(testing_workdir, config=testing_config)[0][0] - assert "strong_pinned_package 1.0.*" in m.meta["requirements"]["run"] - assert "weak_pinned_package 1.0.*" in m.meta["requirements"]["run"] + metadata = api.render(testing_workdir, config=testing_config)[0][0] + assert "strong_pinned_package 1.0.*" in metadata.meta["requirements"]["run"] + assert "weak_pinned_package 1.0.*" in metadata.meta["requirements"]["run"] # 2. host present. Use run_exports from host, ignore 'weak' ones from build. All are # weak by default. @@ -1190,10 +1193,12 @@ def test_run_exports(testing_metadata, testing_config, testing_workdir): ] testing_metadata.meta["requirements"]["host"] = ["python"] api.output_yaml(testing_metadata, "host_present_weak/meta.yaml") - m = api.render( + metadata = api.render( os.path.join(testing_workdir, "host_present_weak"), config=testing_config )[0][0] - assert "weak_pinned_package 2.0.*" not in m.meta["requirements"].get("run", []) + assert "weak_pinned_package 2.0.*" not in metadata.meta["requirements"].get( + "run", [] + ) # 3. host present, and deps in build have "strong" run_exports section. use host, add # in "strong" from build. @@ -1205,15 +1210,15 @@ def test_run_exports(testing_metadata, testing_config, testing_workdir): "test_has_run_exports_implicit_weak" ] api.output_yaml(testing_metadata, "host_present_strong/meta.yaml") - m = api.render( + metadata = api.render( os.path.join(testing_workdir, "host_present_strong"), config=testing_config )[0][0] - assert "strong_pinned_package 1.0 0" in m.meta["requirements"]["host"] - assert "strong_pinned_package 1.0.*" in m.meta["requirements"]["run"] + assert "strong_pinned_package 1.0 0" in metadata.meta["requirements"]["host"] + assert "strong_pinned_package 1.0.*" in metadata.meta["requirements"]["run"] # weak one from test_has_run_exports should be excluded, since it is a build dep - assert "weak_pinned_package 1.0.*" not in m.meta["requirements"]["run"] + assert "weak_pinned_package 1.0.*" not in metadata.meta["requirements"]["run"] # weak one from test_has_run_exports_implicit_weak should be present, since it is a host dep - assert "weak_pinned_package 2.0.*" in m.meta["requirements"]["run"] + assert "weak_pinned_package 2.0.*" in metadata.meta["requirements"]["run"] @pytest.mark.sanity @@ -1279,20 +1284,20 @@ def test_run_exports_constrains(testing_metadata, testing_config, testing_workdi testing_metadata.meta["requirements"]["build"] = ["run_exports_constrains"] testing_metadata.meta["requirements"]["host"] = [] api.output_yaml(testing_metadata, "in_build/meta.yaml") - m = api.render(os.path.join(testing_workdir, "in_build"), config=testing_config)[0][ - 0 - ] - reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + metadata = api.render( + os.path.join(testing_workdir, "in_build"), config=testing_config + )[0][0] + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) assert {"strong_run_export"} == reqs_set("run") assert {"strong_constrains_export"} == reqs_set("run_constrained") testing_metadata.meta["requirements"]["build"] = [] testing_metadata.meta["requirements"]["host"] = ["run_exports_constrains"] api.output_yaml(testing_metadata, "in_host/meta.yaml") - m = api.render(os.path.join(testing_workdir, "in_host"), config=testing_config)[0][ - 0 - ] - reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + metadata = api.render( + os.path.join(testing_workdir, "in_host"), config=testing_config + )[0][0] + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) assert {"strong_run_export", "weak_run_export"} == reqs_set("run") assert {"strong_constrains_export", "weak_constrains_export"} == reqs_set( "run_constrained" @@ -1303,32 +1308,32 @@ def test_run_exports_constrains(testing_metadata, testing_config, testing_workdi ] testing_metadata.meta["requirements"]["host"] = [] api.output_yaml(testing_metadata, "only_weak_in_build/meta.yaml") - m = api.render( + metadata = api.render( os.path.join(testing_workdir, "only_weak_in_build"), config=testing_config )[0][0] - reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) assert set() == reqs_set("run") assert set() == reqs_set("run_constrained") testing_metadata.meta["requirements"]["build"] = [] testing_metadata.meta["requirements"]["host"] = ["run_exports_constrains_only_weak"] api.output_yaml(testing_metadata, "only_weak_in_host/meta.yaml") - m = api.render( + metadata = api.render( os.path.join(testing_workdir, "only_weak_in_host"), config=testing_config )[0][0] - reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) assert {"weak_run_export"} == reqs_set("run") assert {"weak_constrains_export"} == reqs_set("run_constrained") def test_pin_subpackage_exact(testing_config): recipe = os.path.join(metadata_dir, "_pin_subpackage_exact") - ms = api.render(recipe, config=testing_config) - assert len(ms) == 2 + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 assert any( re.match(r"run_exports_subpkg\ 1\.0\ 0", req) - for (m, _, _) in ms - for req in m.meta.get("requirements", {}).get("run", []) + for metadata, _, _ in metadata_tuples + for req in metadata.meta.get("requirements", {}).get("run", []) ) @@ -1420,12 +1425,12 @@ def test_unknown_selectors(testing_config): @pytest.mark.flaky(reruns=5, reruns_delay=2) def test_failed_recipe_leaves_folders(testing_config): recipe = os.path.join(fail_dir, "recursive-build") - m = api.render(recipe, config=testing_config)[0][0] - locks = get_conda_operation_locks(m.config) + metadata = api.render(recipe, config=testing_config)[0][0] + locks = get_conda_operation_locks(metadata.config) with pytest.raises((RuntimeError, exceptions.DependencyNeedsBuildingError)): - api.build(m) - assert os.path.isdir(m.config.build_folder), "build folder was removed" - assert os.listdir(m.config.build_folder), "build folder has no files" + api.build(metadata) + assert os.path.isdir(metadata.config.build_folder), "build folder was removed" + assert os.listdir(metadata.config.build_folder), "build folder has no files" # make sure that it does not leave lock files, though, as these cause permission errors on # centralized installations @@ -1461,6 +1466,12 @@ def test_run_constrained_stores_constrains_info(testing_config): assert info_contents["constrains"][0] == "bzip2 1.*" +def test_run_constrained_is_validated(testing_config: Config): + recipe = os.path.join(metadata_dir, "_run_constrained_error") + with pytest.raises(RecipeError): + api.build(recipe, config=testing_config) + + @pytest.mark.sanity def test_no_locking(testing_config): recipe = os.path.join(metadata_dir, "source_git_jinja2") @@ -1537,7 +1548,7 @@ def test_setup_py_data_in_env(testing_config): # should pass with any modern python (just not 3.5) api.build(recipe, config=testing_config) # make sure it fails with our special python logic - with pytest.raises(subprocess.CalledProcessError): + with pytest.raises((BuildScriptException, CondaBuildException)): api.build(recipe, config=testing_config, python="3.5") @@ -1646,13 +1657,14 @@ def test_pin_depends(testing_config): dependencies """ recipe = os.path.join(metadata_dir, "_pin_depends_record") - m = api.render(recipe, config=testing_config)[0][0] + metadata = api.render(recipe, config=testing_config)[0][0] # the recipe python is not pinned, and having pin_depends set to record # will not show it in record assert not any( - re.search(r"python\s+[23]\.", dep) for dep in m.meta["requirements"]["run"] + re.search(r"python\s+[23]\.", dep) + for dep in metadata.meta["requirements"]["run"] ) - output = api.build(m, config=testing_config)[0] + output = api.build(metadata, config=testing_config)[0] requires = package_has_file(output, "info/requires") assert requires if hasattr(requires, "decode"): @@ -1797,7 +1809,7 @@ def test_downstream_tests(testing_config): upstream = os.path.join(metadata_dir, "_test_downstreams/upstream") downstream = os.path.join(metadata_dir, "_test_downstreams/downstream") api.build(downstream, config=testing_config, notest=True) - with pytest.raises(SystemExit): + with pytest.raises(CondaBuildUserError): api.build(upstream, config=testing_config) @@ -1942,7 +1954,7 @@ def test_add_pip_as_python_dependency_from_condarc_file( testing_metadata, testing_workdir, add_pip_as_python_dependency, monkeypatch ): """ - Test whether settings from .condarc files are heeded. + Test whether settings from .condarc files are needed. ref: https://github.com/conda/conda-libmamba-solver/issues/393 """ # TODO: SubdirData._cache_ clearing might not be needed for future conda versions. @@ -1958,10 +1970,64 @@ def test_add_pip_as_python_dependency_from_condarc_file( if add_pip_as_python_dependency: check_build_fails = nullcontext() else: - check_build_fails = pytest.raises(subprocess.CalledProcessError) + check_build_fails = pytest.raises(BuildScriptException) conda_rc = Path(testing_workdir, ".condarc") conda_rc.write_text(f"add_pip_as_python_dependency: {add_pip_as_python_dependency}") with env_var("CONDARC", conda_rc, reset_context): with check_build_fails: api.build(testing_metadata) + + +def test_rendered_is_reported(testing_config, capsys): + recipe_dir = os.path.join(metadata_dir, "outputs_overwrite_base_file") + api.build(recipe_dir, config=testing_config) + + captured = capsys.readouterr() + assert "Rendered as:" in captured.out + assert "name: base-outputs_overwrite_base_file" in captured.out + assert "- name: base-outputs_overwrite_base_file" in captured.out + assert "- base-outputs_overwrite_base_file >=1.0,<2.0a0" in captured.out + + +@pytest.mark.skipif(on_win, reason="Tests cross-compilation targeting Windows") +def test_cross_unix_windows_mingw(testing_config): + recipe = os.path.join(metadata_dir, "_cross_unix_windows_mingw") + testing_config.channel_urls = [ + "conda-forge", + ] + api.build(recipe, config=testing_config) + + +@pytest.mark.parametrize( + "recipe", sorted(Path(metadata_dir, "_build_script_errors").glob("*")) +) +@pytest.mark.parametrize("debug", (False, True)) +def test_conda_build_script_errors_without_conda_info_handlers(tmp_path, recipe, debug): + env = os.environ.copy() + if debug: + env["CONDA_VERBOSITY"] = "3" + process = subprocess.run( + ["conda", "build", recipe], + env=env, + capture_output=True, + text=True, + check=False, + cwd=tmp_path, + ) + assert process.returncode > 0 + all_output = process.stdout + "\n" + process.stderr + + # These should NOT appear in the output + assert ">>> ERROR REPORT <<<" not in all_output + assert "An unexpected error has occurred." not in all_output + assert "Conda has prepared the above report." not in all_output + + # These should appear + assert "returned non-zero exit status 1" in all_output + + # With verbose mode, we should actually see the traceback + if debug: + assert "Traceback" in all_output + assert "CalledProcessError" in all_output + assert "returned non-zero exit status 1" in all_output diff --git a/tests/test_api_build_conda_v2.py b/tests/test_api_build_conda_v2.py index 4c0c09b9ac..dc4078e61f 100644 --- a/tests/test_api_build_conda_v2.py +++ b/tests/test_api_build_conda_v2.py @@ -40,4 +40,4 @@ def test_conda_pkg_format( # Verify that test pass ran through api assert "Manual entry point" in out - assert "TEST END: %s" % output_file in out + assert f"TEST END: {output_file}" in out diff --git a/tests/test_api_consistency.py b/tests/test_api_consistency.py index 9d88b60eee..9dac14351c 100644 --- a/tests/test_api_consistency.py +++ b/tests/test_api_consistency.py @@ -42,7 +42,7 @@ def test_api_output_yaml(): assert argspec.defaults == (None, False) -def test_api_get_output_file_path(): +def test_api_get_output_file_paths(): argspec = getargspec(api.get_output_file_paths) assert argspec.args == [ "recipe_path_or_metadata", diff --git a/tests/test_api_convert.py b/tests/test_api_convert.py index 7da9ede2d3..c0e46b7bf3 100644 --- a/tests/test_api_convert.py +++ b/tests/test_api_convert.py @@ -7,9 +7,9 @@ import tarfile import pytest +from conda.gateways.connection.download import download from conda_build import api -from conda_build.conda_interface import download from conda_build.utils import on_win, package_has_file from .utils import assert_package_consistency, metadata_dir diff --git a/tests/test_api_render.py b/tests/test_api_render.py index 7849daa01c..0882de0df1 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -15,7 +15,6 @@ from conda.common.compat import on_win from conda_build import api, render -from conda_build.conda_interface import cc_conda_build from conda_build.variants import validate_spec from .utils import metadata_dir, variants_dir @@ -57,7 +56,7 @@ def test_render_yaml_output(testing_workdir, testing_config): assert "package:" in open(os.path.join(testing_workdir, "output.yaml")).read() -def test_get_output_file_path(testing_workdir, testing_metadata): +def test_get_output_file_paths(testing_workdir, testing_metadata): testing_metadata = render.finalize_metadata(testing_metadata) api.output_yaml(testing_metadata, "recipe/meta.yaml") @@ -69,21 +68,21 @@ def test_get_output_file_path(testing_workdir, testing_metadata): assert build_path == os.path.join( testing_metadata.config.croot, testing_metadata.config.host_subdir, - "test_get_output_file_path-1.0-1.tar.bz2", + "test_get_output_file_paths-1.0-1.tar.bz2", ) -def test_get_output_file_path_metadata_object(testing_metadata): +def test_get_output_file_paths_metadata_object(testing_metadata): testing_metadata.final = True build_path = api.get_output_file_paths(testing_metadata)[0] assert build_path == os.path.join( testing_metadata.config.croot, testing_metadata.config.host_subdir, - "test_get_output_file_path_metadata_object-1.0-1.tar.bz2", + "test_get_output_file_paths_metadata_object-1.0-1.tar.bz2", ) -def test_get_output_file_path_jinja2(testing_config): +def test_get_output_file_paths_jinja2(testing_config): # If this test does not raise, it's an indicator that the workdir is not # being cleaned as it should. recipe = os.path.join(metadata_dir, "source_git_jinja2") @@ -140,9 +139,9 @@ def test_resolved_packages_recipe(testing_config): @pytest.mark.slow def test_host_entries_finalized(testing_config): recipe = os.path.join(metadata_dir, "_host_entries_finalized") - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 2 - outputs = api.get_output_file_paths(metadata) + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + outputs = api.get_output_file_paths(metadata_tuples) assert any("py27" in out for out in outputs) assert any("py39" in out for out in outputs) @@ -160,10 +159,11 @@ def test_hash_no_apply_to_custom_build_string(testing_metadata, testing_workdir) def test_pin_depends(testing_config): """This is deprecated functionality - replaced by the more general variants pinning scheme""" recipe = os.path.join(metadata_dir, "_pin_depends_strict") - m = api.render(recipe, config=testing_config)[0][0] + metadata = api.render(recipe, config=testing_config)[0][0] # the recipe python is not pinned, but having pin_depends set will force it to be. assert any( - re.search(r"python\s+[23]\.", dep) for dep in m.meta["requirements"]["run"] + re.search(r"python\s+[23]\.", dep) + for dep in metadata.meta["requirements"]["run"] ) @@ -190,10 +190,10 @@ def test_noarch_with_platform_deps(testing_workdir, testing_config): build_ids = {} for subdir_ in ["linux-64", "linux-aarch64", "linux-ppc64le", "osx-64", "win-64"]: platform, arch = subdir_.split("-") - m = api.render( + metadata = api.render( recipe_path, config=testing_config, platform=platform, arch=arch )[0][0] - build_ids[subdir_] = m.build_id() + build_ids[subdir_] = metadata.build_id() # one hash for each platform, plus one for the archspec selector assert len(set(build_ids.values())) == 4 @@ -207,13 +207,15 @@ def test_noarch_with_no_platform_deps(testing_workdir, testing_config): recipe_path = os.path.join(metadata_dir, "_noarch_with_no_platform_deps") build_ids = set() for platform in ["osx", "linux", "win"]: - m = api.render(recipe_path, config=testing_config, platform=platform)[0][0] - build_ids.add(m.build_id()) + metadata = api.render(recipe_path, config=testing_config, platform=platform)[0][ + 0 + ] + build_ids.add(metadata.build_id()) assert len(build_ids) == 1 -def test_setting_condarc_vars_with_env_var_expansion(testing_workdir): +def test_setting_condarc_vars_with_env_var_expansion(testing_workdir, mocker): os.makedirs("config") # python won't be used - the stuff in the recipe folder will override it python_versions = ["2.6", "3.4", "3.11"] @@ -221,33 +223,31 @@ def test_setting_condarc_vars_with_env_var_expansion(testing_workdir): with open(os.path.join("config", "conda_build_config.yaml"), "w") as f: yaml.dump(config, f, default_flow_style=False) - cc_conda_build_backup = cc_conda_build.copy() - # hacky equivalent of changing condarc - # careful, this is global and affects other tests! make sure to clear it! - cc_conda_build.update( - {"config_file": "${TEST_WORKDIR}/config/conda_build_config.yaml"} + mocker.patch( + "conda.base.context.Context.conda_build", + new_callable=mocker.PropertyMock, + return_value={ + "config_file": "${TEST_WORKDIR}/config/conda_build_config.yaml", + **context.conda_build, + }, ) os.environ["TEST_WORKDIR"] = testing_workdir - try: - m = api.render( - os.path.join(variants_dir, "19_used_variables"), - bypass_env_check=True, - finalize=False, - )[0][0] - # this one should have gotten clobbered by the values in the recipe - assert m.config.variant["python"] not in python_versions - # this confirms that we loaded the config file correctly - assert len(m.config.squished_variants["bzip2"]) == 2 - finally: - cc_conda_build.clear() - cc_conda_build.update(cc_conda_build_backup) + metadata = api.render( + os.path.join(variants_dir, "19_used_variables"), + bypass_env_check=True, + finalize=False, + )[0][0] + # this one should have gotten clobbered by the values in the recipe + assert metadata.config.variant["python"] not in python_versions + # this confirms that we loaded the config file correctly + assert len(metadata.config.squished_variants["bzip2"]) == 2 def test_self_reference_run_exports_pin_subpackage_picks_up_version_correctly(): recipe = os.path.join(metadata_dir, "_self_reference_run_exports") - m = api.render(recipe)[0][0] - run_exports = m.meta.get("build", {}).get("run_exports", []) + metadata = api.render(recipe)[0][0] + run_exports = metadata.meta.get("build", {}).get("run_exports", []) assert run_exports assert len(run_exports) == 1 assert run_exports[0].split()[1] == ">=1.0.0,<2.0a0" @@ -255,11 +255,11 @@ def test_self_reference_run_exports_pin_subpackage_picks_up_version_correctly(): def test_run_exports_with_pin_compatible_in_subpackages(testing_config): recipe = os.path.join(metadata_dir, "_run_exports_in_outputs") - ms = api.render(recipe, config=testing_config) - for m, _, _ in ms: - if m.name().startswith("gfortran_"): + metadata_tuples = api.render(recipe, config=testing_config) + for metadata, _, _ in metadata_tuples: + if metadata.name().startswith("gfortran_"): run_exports = set( - m.meta.get("build", {}).get("run_exports", {}).get("strong", []) + metadata.meta.get("build", {}).get("run_exports", {}).get("strong", []) ) assert len(run_exports) == 1 # len after splitting should be more than one because of pin_compatible. If it's only zlib, we've lost the @@ -269,38 +269,46 @@ def test_run_exports_with_pin_compatible_in_subpackages(testing_config): def test_ignore_build_only_deps(): - ms = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "python_in_build_only"), bypass_env_check=True, finalize=False, ) - assert len(ms) == 1 + assert len(metadata_tuples) == 1 def test_merge_build_host_build_key(): - m = api.render(os.path.join(metadata_dir, "_no_merge_build_host"))[0][0] - assert not any("bzip2" in dep for dep in m.meta["requirements"]["run"]) + metadata = api.render(os.path.join(metadata_dir, "_no_merge_build_host"))[0][0] + assert not any("bzip2" in dep for dep in metadata.meta["requirements"]["run"]) def test_merge_build_host_empty_host_section(): - m = api.render(os.path.join(metadata_dir, "_empty_host_avoids_merge"))[0][0] - assert not any("bzip2" in dep for dep in m.meta["requirements"]["run"]) + metadata = api.render(os.path.join(metadata_dir, "_empty_host_avoids_merge"))[0][0] + assert not any("bzip2" in dep for dep in metadata.meta["requirements"]["run"]) def test_pin_expression_works_with_prereleases(testing_config): recipe = os.path.join(metadata_dir, "_pinning_prerelease") - ms = api.render(recipe, config=testing_config) - assert len(ms) == 2 - m = next(m_[0] for m_ in ms if m_[0].meta["package"]["name"] == "bar") - assert "foo >=3.10.0.rc1,<3.11.0a0" in m.meta["requirements"]["run"] + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + metadata = next( + metadata + for metadata, _, _ in metadata_tuples + if metadata.meta["package"]["name"] == "bar" + ) + assert "foo >=3.10.0.rc1,<3.11.0a0" in metadata.meta["requirements"]["run"] def test_pin_expression_works_with_python_prereleases(testing_config): recipe = os.path.join(metadata_dir, "_pinning_prerelease_python") - ms = api.render(recipe, config=testing_config) - assert len(ms) == 2 - m = next(m_[0] for m_ in ms if m_[0].meta["package"]["name"] == "bar") - assert "python >=3.10.0rc1,<3.11.0a0" in m.meta["requirements"]["run"] + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + metadata = next( + metadata + for metadata, _, _ in metadata_tuples + if metadata.meta["package"]["name"] == "bar" + ) + assert "python >=3.10.0rc1,<3.11.0a0" in metadata.meta["requirements"]["run"] @pytest.mark.benchmark @@ -329,7 +337,7 @@ def create_variants(): validate_spec("", variant) return variant - ms = api.render( + metadata_tuples = api.render( recipe, config=testing_config, channels=[], variants=create_variants() ) - assert len(ms) == 11 - 3 # omits libarrow-all, pyarrow, pyarrow-tests + assert len(metadata_tuples) == 11 - 3 # omits libarrow-all, pyarrow, pyarrow-tests diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index a8273492b0..963312ee44 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -234,8 +234,8 @@ def test_sympy(package: str, version: str | None, tmp_path: Path, testing_config config=testing_config, output_dir=tmp_path, ) - m = api.render(str(tmp_path / "sympy" / "meta.yaml"))[0][0] - assert m.version() == "1.10" + metadata = api.render(str(tmp_path / "sympy" / "meta.yaml"))[0][0] + assert metadata.version() == "1.10" def test_get_entry_points(pylint_pkginfo, pylint_metadata): @@ -350,8 +350,8 @@ def test_pypi_with_setup_options(tmp_path: Path, testing_config): ) # Check that the setup option occurs in bld.bat and build.sh. - m = api.render(str(tmp_path / "photutils"))[0][0] - assert "--offline" in m.meta["build"]["script"] + metadata = api.render(str(tmp_path / "photutils"))[0][0] + assert "--offline" in metadata.meta["build"]["script"] def test_pypi_pin_numpy(tmp_path: Path, testing_config: Config): @@ -377,8 +377,8 @@ def test_pypi_version_sorting(tmp_path: Path, testing_config: Config): config=testing_config, output_dir=tmp_path, ) - m = api.render(str(tmp_path / "fasttext"))[0][0] - assert parse_version(m.version()) >= parse_version("0.9.2") + metadata = api.render(str(tmp_path / "fasttext"))[0][0] + assert parse_version(metadata.version()) >= parse_version("0.9.2") def test_list_skeletons(): @@ -394,8 +394,8 @@ def test_pypi_with_entry_points(tmp_path: Path): def test_pypi_with_version_arg(tmp_path: Path): # regression test for https://github.com/conda/conda-build/issues/1442 api.skeletonize("PrettyTable", "pypi", version="0.7.2", output_dir=tmp_path) - m = api.render(str(tmp_path / "prettytable"))[0][0] - assert parse_version(m.version()) == parse_version("0.7.2") + metadata = api.render(str(tmp_path / "prettytable"))[0][0] + assert parse_version(metadata.version()) == parse_version("0.7.2") @pytest.mark.slow @@ -415,10 +415,10 @@ def test_pypi_with_extra_specs(tmp_path: Path, testing_config): config=testing_config, output_dir=tmp_path, ) - m = api.render(str(tmp_path / "bigfile"))[0][0] - assert parse_version(m.version()) == parse_version("0.1.24") - assert any("cython" in req for req in m.meta["requirements"]["host"]) - assert any("mpi4py" in req for req in m.meta["requirements"]["host"]) + metadata = api.render(str(tmp_path / "bigfile"))[0][0] + assert parse_version(metadata.version()) == parse_version("0.1.24") + assert any("cython" in req for req in metadata.meta["requirements"]["host"]) + assert any("mpi4py" in req for req in metadata.meta["requirements"]["host"]) @pytest.mark.slow @@ -438,17 +438,17 @@ def test_pypi_with_version_inconsistency(tmp_path: Path, testing_config): config=testing_config, output_dir=tmp_path, ) - m = api.render(str(tmp_path / "mpi4py_test"))[0][0] - assert parse_version(m.version()) == parse_version("0.0.10") + metadata = api.render(str(tmp_path / "mpi4py_test"))[0][0] + assert parse_version(metadata.version()) == parse_version("0.0.10") def test_pypi_with_basic_environment_markers(tmp_path: Path): # regression test for https://github.com/conda/conda-build/issues/1974 api.skeletonize("coconut", "pypi", version="1.2.2", output_dir=tmp_path) - m = api.render(tmp_path / "coconut")[0][0] + metadata = api.render(tmp_path / "coconut")[0][0] - build_reqs = str(m.meta["requirements"]["host"]) - run_reqs = str(m.meta["requirements"]["run"]) + build_reqs = str(metadata.meta["requirements"]["host"]) + run_reqs = str(metadata.meta["requirements"]["run"]) # should include the right dependencies for the right version assert "futures" not in build_reqs assert "futures" not in run_reqs @@ -458,8 +458,8 @@ def test_pypi_with_basic_environment_markers(tmp_path: Path): def test_setuptools_test_requirements(tmp_path: Path): api.skeletonize(packages="hdf5storage", repo="pypi", output_dir=tmp_path) - m = api.render(str(tmp_path / "hdf5storage"))[0][0] - assert m.meta["test"]["requires"] == ["nose >=1.0"] + metadata = api.render(str(tmp_path / "hdf5storage"))[0][0] + assert metadata.meta["test"]["requires"] == ["nose >=1.0"] @pytest.mark.skipif(sys.version_info < (3, 8), reason="sympy is python 3.8+") diff --git a/tests/test_api_skeleton_cpan.py b/tests/test_api_skeleton_cpan.py index 9f08ccbae6..5945158023 100644 --- a/tests/test_api_skeleton_cpan.py +++ b/tests/test_api_skeleton_cpan.py @@ -17,8 +17,8 @@ def test_xs_needs_c_compiler(testing_config): """Perl packages with XS files need a C compiler""" # This uses Sub::Identify=0.14 since it includes no .c files but a .xs file. api.skeletonize("Sub::Identify", version="0.14", repo="cpan", config=testing_config) - m = api.render("perl-sub-identify/0.14", finalize=False, bypass_env_check=True)[0][ - 0 - ] - build_requirements = m.get_value("requirements/build") + metadata = api.render( + "perl-sub-identify/0.14", finalize=False, bypass_env_check=True + )[0][0] + build_requirements = metadata.get_value("requirements/build") assert compiler("c", testing_config) in build_requirements diff --git a/tests/test_api_skeleton_cran.py b/tests/test_api_skeleton_cran.py index 912b2bee0c..57e9d02550 100644 --- a/tests/test_api_skeleton_cran.py +++ b/tests/test_api_skeleton_cran.py @@ -41,12 +41,12 @@ def test_cran_license( api.skeletonize( packages=package, repo="cran", output_dir=tmp_path, config=testing_config ) - m = api.render(str(tmp_path / package / "meta.yaml"))[0][0] + metadata = api.render(str(tmp_path / package / "meta.yaml"))[0][0] - assert m.get_value("about/license") == license_id - assert m.get_value("about/license_family") == license_family + assert metadata.get_value("about/license") == license_id + assert metadata.get_value("about/license_family") == license_family assert { - Path(license).name for license in m.get_value("about/license_file", "") + Path(license).name for license in metadata.get_value("about/license_file", "") } == set(license_files) diff --git a/tests/test_api_test.py b/tests/test_api_test.py index 2bb76838aa..10200d5a99 100644 --- a/tests/test_api_test.py +++ b/tests/test_api_test.py @@ -9,6 +9,7 @@ import pytest from conda_build import api +from conda_build.exceptions import CondaBuildUserError from .utils import metadata_dir @@ -63,5 +64,5 @@ def test_api_extra_dep(testing_metadata): api.test(output, config=testing_metadata.config, extra_deps=["click"]) # missing click dep will fail tests - with pytest.raises(SystemExit): + with pytest.raises(CondaBuildUserError): api.test(output, config=testing_metadata.config) diff --git a/tests/test_build.py b/tests/test_build.py index eca9441af8..f7c3f2ba8c 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -12,40 +12,29 @@ import sys from contextlib import nullcontext from pathlib import Path +from typing import TYPE_CHECKING import pytest from conda.common.compat import on_win from conda_build import api, build +from conda_build.exceptions import CondaBuildUserError from .utils import get_noarch_python_meta, metadata_dir -PREFIX_TESTS = {"normal": os.path.sep} -if on_win: - PREFIX_TESTS.update({"double_backslash": "\\\\", "forward_slash": "/"}) +if TYPE_CHECKING: + from pytest_mock import MockerFixture - -def test_find_prefix_files(testing_workdir): - """ - Write test output that has the prefix to be found, then verify that the prefix finding - identified the correct number of files. - """ - # create text files to be replaced - files = [] - for style, replacement in PREFIX_TESTS.items(): - filename = Path(testing_workdir, f"{style}.txt") - filename.write_text(testing_workdir.replace(os.path.sep, replacement)) - files.append(str(filename)) - - assert len(list(build.have_prefix_files(files, testing_workdir))) == len(files) + from conda_build.config import Config + from conda_build.metadata import MetaData def test_build_preserves_PATH(testing_config): - m = api.render(os.path.join(metadata_dir, "source_git"), config=testing_config)[0][ - 0 - ] + metadata = api.render( + os.path.join(metadata_dir, "source_git"), config=testing_config + )[0][0] ref_path = os.environ["PATH"] - build.build(m, stats=None) + build.build(metadata, stats=None) assert os.environ["PATH"] == ref_path @@ -344,3 +333,61 @@ def test_guess_interpreter( ): with pytest.raises(error) if error else nullcontext(): assert build.guess_interpreter(script) == interpreter + + +def test_check_external(): + with pytest.deprecated_call(): + build.check_external() + + +@pytest.mark.parametrize("readme", ["README.md", "README.rst", "README"]) +def test_copy_readme(testing_metadata: MetaData, readme: str): + testing_metadata.meta["about"]["readme"] = readme + with pytest.raises(CondaBuildUserError): + build.copy_readme(testing_metadata) + + Path(testing_metadata.config.work_dir, readme).touch() + build.copy_readme(testing_metadata) + assert Path(testing_metadata.config.info_dir, readme).exists() + + +@pytest.mark.skipif(not on_win, reason="WSL is only on Windows") +def test_wsl_unsupported( + testing_metadata: MetaData, + mocker: MockerFixture, + tmp_path: Path, +): + mocker.patch( + "conda_build.os_utils.external.find_executable", + return_value="C:\\Windows\\System32\\bash.exe", + ) + + (script := tmp_path / "install.sh").touch() + with pytest.raises(CondaBuildUserError): + build.bundle_conda( + output={"script": script}, + metadata=testing_metadata, + env={}, + stats={}, + ) + + +def test_handle_anaconda_upload(testing_config: Config, mocker: MockerFixture): + mocker.patch( + "conda_build.os_utils.external.find_executable", + return_value=None, + ) + testing_config.anaconda_upload = True + + with pytest.raises(CondaBuildUserError): + build.handle_anaconda_upload((), testing_config) + + +def test_tests_failed(testing_metadata: MetaData, tmp_path: Path): + with pytest.raises(CondaBuildUserError): + build.tests_failed( + package_or_metadata=testing_metadata, + move_broken=True, + broken_dir=tmp_path, + config=testing_metadata.config, + ) diff --git a/tests/test_codesigned.py b/tests/test_codesigned.py new file mode 100644 index 0000000000..3ed13086da --- /dev/null +++ b/tests/test_codesigned.py @@ -0,0 +1,97 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import os +from functools import lru_cache +from pathlib import Path +from shutil import which +from subprocess import CalledProcessError, check_output, run + +import pytest + +from conda_build.utils import on_win + +HERE = os.path.abspath(os.path.dirname(__file__)) +REPO_ROOT = (Path(HERE) / "..").resolve().absolute() +STUB_FOLDER = REPO_ROOT / "conda_build" + + +@lru_cache(maxsize=None) +def find_signtool() -> str | None: + """Tries to find signtool + + Prefers signtool on PATH otherwise searches system. + Ref: + - https://learn.microsoft.com/en-us/dotnet/framework/tools/signtool-exe + - https://learn.microsoft.com/en-us/windows/win32/seccrypto/signtool + - https://learn.microsoft.com/en-us/windows/win32/seccrypto/using-signtool-to-verify-a-file-signature + """ + signtool_path = which("signtool") + if signtool_path: + return signtool_path + + # Common installation directories where signtool might be located + common_paths = [ + "C:\\Program Files (x86)\\Windows Kits\\10\\bin", + "C:\\Program Files\\Windows Kits\\10\\bin", + "C:\\Windows\\System32", + ] + + signtool_path = None + # Search for signtool in common paths + for path in common_paths: + if signtool_path: + # We found one already + return signtool_path + if not os.path.exists(path): + continue + signtool_path = os.path.join(path, "signtool.exe") + if os.path.exists(signtool_path): + return signtool_path + elif "Windows Kits" in path: + signtool_path = None + max_version = 0 + for dirname in os.listdir(path): + # Use most recent signtool version + if not dirname.endswith(".0"): + continue # next dirname + if int(dirname.replace(".", "")) < max_version: + continue # next dirname + + maybe_signtool_path = os.path.join(path, dirname, "x64", "signtool.exe") + if os.path.exists(maybe_signtool_path): + signtool_path = maybe_signtool_path + return signtool_path + + +@lru_cache(maxsize=None) +def signtool_unsupported_because() -> str: + reason = "" + if not on_win: + reason = "Only verifying signatures of stub exe's on windows" + return reason + signtool = find_signtool() + if not signtool: + reason = "signtool: unable to locate signtool.exe" + try: + check_output([signtool, "verify", "/?"]) + except CalledProcessError as exc: + reason = f"signtool: something went wrong while running 'signtool verify /?', output:\n{exc.output}\n" + return reason + + +def signtool_unsupported() -> bool: + return bool(signtool_unsupported_because()) + + +@pytest.mark.skipif(signtool_unsupported(), reason=signtool_unsupported_because()) +@pytest.mark.parametrize( + "stub_file_name", ["cli-32.exe", "cli-64.exe", "gui-32.exe", "gui-64.exe"] +) +def test_stub_exe_signatures(stub_file_name: str) -> None: + """Verify that signtool verifies the signature of the stub exes""" + stub_file = STUB_FOLDER / stub_file_name + signtool_exe = find_signtool() + completed_process = run([signtool_exe, "verify", "/pa", "/v", stub_file]) + assert completed_process.returncode == 0 diff --git a/tests/test_environ.py b/tests/test_environ.py index 327accaeea..f446420feb 100644 --- a/tests/test_environ.py +++ b/tests/test_environ.py @@ -1,14 +1,8 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import os -import sys -import pytest -from conda.core.prefix_data import PrefixData -from packaging.version import parse - -import conda_build -from conda_build.environ import Environment, create_env +from conda_build.environ import create_env def test_environment_creation_preserves_PATH(testing_workdir, testing_config): @@ -21,17 +15,3 @@ def test_environment_creation_preserves_PATH(testing_workdir, testing_config): subdir=testing_config.build_subdir, ) assert os.environ["PATH"] == ref_path - - -def test_environment(): - """Asserting PrefixData can accomplish the same thing as Environment.""" - with pytest.warns( - PendingDeprecationWarning - if parse(conda_build.__version__) < parse("24.3") - else DeprecationWarning, - ): - assert (specs := Environment(sys.prefix).package_specs()) - assert specs == [ - f"{prec.name} {prec.version} {prec.build}" - for prec in PrefixData(sys.prefix).iter_records() - ] diff --git a/tests/test_jinja_context.py b/tests/test_jinja_context.py index 18ae32f7ab..f19ea31997 100644 --- a/tests/test_jinja_context.py +++ b/tests/test_jinja_context.py @@ -5,9 +5,9 @@ from typing import TYPE_CHECKING import pytest +from frozendict import deepfreeze from conda_build import jinja_context -from conda_build.utils import HashableDict if TYPE_CHECKING: from pathlib import Path @@ -99,7 +99,7 @@ def test_pin_subpackage_exact(testing_metadata): testing_metadata.meta["outputs"] = [output_dict] fm = testing_metadata.get_output_metadata(output_dict) testing_metadata.other_outputs = { - (name, HashableDict(testing_metadata.config.variant)): (output_dict, fm) + (name, deepfreeze(testing_metadata.config.variant)): (output_dict, fm) } pin = jinja_context.pin_subpackage(testing_metadata, name, exact=True) assert len(pin.split()) == 3 @@ -111,7 +111,7 @@ def test_pin_subpackage_expression(testing_metadata): testing_metadata.meta["outputs"] = [output_dict] fm = testing_metadata.get_output_metadata(output_dict) testing_metadata.other_outputs = { - (name, HashableDict(testing_metadata.config.variant)): (output_dict, fm) + (name, deepfreeze(testing_metadata.config.variant)): (output_dict, fm) } pin = jinja_context.pin_subpackage(testing_metadata, name) assert len(pin.split()) == 2 diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 0f6da9b089..1b9fc34258 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -57,14 +57,14 @@ def test_uses_vcs_in_metadata(testing_workdir, testing_metadata): def test_select_lines(): lines = "\n".join( ( - "", + "", # preserve leading newline "test", "test [abc] no", "test [abc] # no", " ' test ' ", ' " test " ', - "", - "# comment line", + "", # preserve newline + "# comment line", # preserve comment line (but not the comment) "test [abc]", " 'quoted # [abc] '", ' "quoted # [abc] yes "', @@ -74,19 +74,20 @@ def test_select_lines(): "test {{ JINJA_VAR[:2] }} # stuff yes [abc]", "test {{ JINJA_VAR[:2] }} # [abc] stuff yes", '{{ environ["test"] }} # [abc]', - "", # trailing newline + "", # preserve trailing newline ) ) assert select_lines(lines, {"abc": True}, variants_in_place=True) == "\n".join( ( - "", + "", # preserve leading newline "test", "test [abc] no", "test [abc] # no", " ' test '", ' " test "', - "", + "", # preserve newline + "", # preserve comment line (but not the comment) "test", " 'quoted'", ' "quoted"', @@ -96,20 +97,21 @@ def test_select_lines(): "test {{ JINJA_VAR[:2] }}", "test {{ JINJA_VAR[:2] }}", '{{ environ["test"] }}', - "", # trailing newline + "", # preserve trailing newline ) ) assert select_lines(lines, {"abc": False}, variants_in_place=True) == "\n".join( ( - "", + "", # preserve leading newline "test", "test [abc] no", "test [abc] # no", " ' test '", ' " test "', - "", + "", # preserve newline + "", # preserve comment line (but not the comment) "test {{ JINJA_VAR[:2] }}", - "", # trailing newline + "", # preserve trailing newline ) ) diff --git a/tests/test_misc.py b/tests/test_misc.py index bcdafcb196..4a5bb0d95c 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -4,9 +4,10 @@ from pathlib import Path import pytest +from conda.auxlib.entity import EntityEncoder +from conda.models.enums import PathType from conda_build._link import pyc_f -from conda_build.conda_interface import EntityEncoder, PathType @pytest.mark.parametrize( diff --git a/tests/test_post.py b/tests/test_post.py index 97ef1448fc..eb2672218a 100644 --- a/tests/test_post.py +++ b/tests/test_post.py @@ -18,7 +18,7 @@ package_has_file, ) -from .utils import add_mangling, metadata_dir +from .utils import add_mangling, metadata_dir, subpackage_path @pytest.mark.skipif( @@ -156,6 +156,26 @@ def test_menuinst_validation_fails_bad_json(testing_config, caplog, tmp_path): assert "JSONDecodeError" in captured_text +def test_file_hash(testing_config, caplog, tmp_path): + "check that the post-link check caching takes the file path into consideration" + recipe = Path(subpackage_path, "_test-file-hash") + recipe_tmp = tmp_path / "test-file-hash" + shutil.copytree(recipe, recipe_tmp) + + variants = {"python": ["3.11", "3.12"]} + testing_config.ignore_system_config = True + testing_config.activate = True + + with caplog.at_level(logging.INFO): + api.build( + str(recipe_tmp), + config=testing_config, + notest=True, + variants=variants, + activate=True, + ) + + @pytest.mark.skipif(on_win, reason="rpath fixup not done on Windows.") def test_rpath_symlink(mocker, testing_config): if on_linux: diff --git a/tests/test_source.py b/tests/test_source.py index 711407d153..1cae2f9997 100644 --- a/tests/test_source.py +++ b/tests/test_source.py @@ -5,10 +5,10 @@ import tarfile import pytest +from conda.gateways.disk.create import TemporaryDirectory from conda.gateways.disk.read import compute_sum from conda_build import source -from conda_build.conda_interface import TemporaryDirectory from conda_build.source import download_to_cache from conda_build.utils import reset_deduplicator diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index 4fe966c054..4307eb0f5b 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -11,6 +11,8 @@ from conda.base.context import context from conda_build import api, utils +from conda_build.exceptions import CondaBuildUserError +from conda_build.metadata import MetaDataTuple from conda_build.render import finalize_metadata from .utils import get_valid_recipes, subpackage_dir @@ -56,7 +58,10 @@ def test_output_pkg_path_shows_all_subpackages(testing_metadata): testing_metadata.meta["outputs"] = [{"name": "a"}, {"name": "b"}] out_dicts_and_metadata = testing_metadata.get_output_metadata_set() outputs = api.get_output_file_paths( - [(m, None, None) for (_, m) in out_dicts_and_metadata] + [ + MetaDataTuple(metadata, False, False) + for _, metadata in out_dicts_and_metadata + ] ) assert len(outputs) == 2 @@ -65,7 +70,10 @@ def test_subpackage_version_provided(testing_metadata): testing_metadata.meta["outputs"] = [{"name": "a", "version": "2.0"}] out_dicts_and_metadata = testing_metadata.get_output_metadata_set() outputs = api.get_output_file_paths( - [(m, None, None) for (_, m) in out_dicts_and_metadata] + [ + MetaDataTuple(metadata, False, False) + for _, metadata in out_dicts_and_metadata + ] ) assert len(outputs) == 1 assert "a-2.0-1" in outputs[0] @@ -79,7 +87,10 @@ def test_subpackage_independent_hash(testing_metadata): out_dicts_and_metadata = testing_metadata.get_output_metadata_set() assert len(out_dicts_and_metadata) == 2 outputs = api.get_output_file_paths( - [(m, None, None) for (_, m) in out_dicts_and_metadata] + [ + MetaDataTuple(metadata, False, False) + for _, metadata in out_dicts_and_metadata + ] ) assert len(outputs) == 2 assert outputs[0][-15:] != outputs[1][-15:] @@ -120,34 +131,34 @@ def test_intradependencies(testing_config): def test_git_in_output_version(testing_config, conda_build_test_recipe_envvar: str): recipe = os.path.join(subpackage_dir, "_git_in_output_version") - outputs = api.render( + metadata_tuples = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True ) - assert len(outputs) == 1 - assert outputs[0][0].version() == "1.22.0" + assert len(metadata_tuples) == 1 + assert metadata_tuples[0][0].version() == "1.22.0" def test_intradep_with_templated_output_name(testing_config): recipe = os.path.join(subpackage_dir, "_intradep_with_templated_output_name") - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 3 + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 3 expected_names = { "test_templated_subpackage_name", "templated_subpackage_nameabc", "depends_on_templated", } - assert {m.name() for (m, _, _) in metadata} == expected_names + assert {metadata.name() for metadata, _, _ in metadata_tuples} == expected_names def test_output_specific_subdir(testing_config): recipe = os.path.join(subpackage_dir, "_output_specific_subdir") - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 3 - for m, _, _ in metadata: - if m.name() in ("default_subdir", "default_subdir_2"): - assert m.config.target_subdir == context.subdir - elif m.name() == "custom_subdir": - assert m.config.target_subdir == "linux-aarch64" + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 3 + for metadata, _, _ in metadata_tuples: + if metadata.name() in ("default_subdir", "default_subdir_2"): + assert metadata.config.target_subdir == context.subdir + elif metadata.name() == "custom_subdir": + assert metadata.config.target_subdir == "linux-aarch64" else: raise AssertionError( "Test for output_specific_subdir written incorrectly - " @@ -157,17 +168,17 @@ def test_output_specific_subdir(testing_config): def test_about_metadata(testing_config): recipe = os.path.join(subpackage_dir, "_about_metadata") - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 2 - for m, _, _ in metadata: - if m.name() == "abc": - assert "summary" in m.meta["about"] - assert m.meta["about"]["summary"] == "weee" - assert "home" not in m.meta["about"] - elif m.name() == "def": - assert "home" in m.meta["about"] - assert "summary" not in m.meta["about"] - assert m.meta["about"]["home"] == "http://not.a.url" + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + for metadata, _, _ in metadata_tuples: + if metadata.name() == "abc": + assert "summary" in metadata.meta["about"] + assert metadata.meta["about"]["summary"] == "weee" + assert "home" not in metadata.meta["about"] + elif metadata.name() == "def": + assert "home" in metadata.meta["about"] + assert "summary" not in metadata.meta["about"] + assert metadata.meta["about"]["home"] == "http://not.a.url" outs = api.build(recipe, config=testing_config) for out in outs: about_meta = utils.package_has_file(out, "info/about.json") @@ -282,29 +293,30 @@ def test_per_output_tests(testing_config): @pytest.mark.sanity def test_per_output_tests_script(testing_config): recipe_dir = os.path.join(subpackage_dir, "_output_test_script") - with pytest.raises(SystemExit): + with pytest.raises(CondaBuildUserError): api.build(recipe_dir, config=testing_config) def test_pin_compatible_in_outputs(testing_config): recipe_dir = os.path.join(subpackage_dir, "_pin_compatible_in_output") - m = api.render(recipe_dir, config=testing_config)[0][0] + metadata = api.render(recipe_dir, config=testing_config)[0][0] assert any( - re.search(r"numpy\s*>=.*,<.*", req) for req in m.meta["requirements"]["run"] + re.search(r"numpy\s*>=.*,<.*", req) + for req in metadata.meta["requirements"]["run"] ) def test_output_same_name_as_top_level_does_correct_output_regex(testing_config): recipe_dir = os.path.join(subpackage_dir, "_output_named_same_as_top_level") - ms = api.render(recipe_dir, config=testing_config) + metadata_tuples = api.render(recipe_dir, config=testing_config) # TODO: need to decide what best behavior is for saying whether the # top-level build reqs or the output reqs for the similarly naemd output # win. I think you could have both, but it means rendering a new, extra, # build-only metadata in addition to all the outputs - for m, _, _ in ms: - if m.name() == "ipp": + for metadata, _, _ in metadata_tuples: + if metadata.name() == "ipp": for env in ("build", "host", "run"): - assert not m.meta.get("requirements", {}).get(env) + assert not metadata.meta.get("requirements", {}).get(env) def test_subpackage_order_natural(testing_config): @@ -361,23 +373,34 @@ def test_strong_run_exports_from_build_applies_to_host(testing_config): def test_python_line_up_with_compiled_lib(recipe, testing_config): recipe = os.path.join(subpackage_dir, recipe) # we use windows so that we have 2 libxyz results (VS2008, VS2015) - ms = api.render(recipe, config=testing_config, platform="win", arch="64") + metadata_tuples = api.render( + recipe, config=testing_config, platform="win", arch="64" + ) # 2 libxyz, 3 py-xyz, 3 xyz - assert len(ms) == 8 - for m, _, _ in ms: - if m.name() in ("py-xyz" or "xyz"): - deps = m.meta["requirements"]["run"] + assert len(metadata_tuples) == 8 + for metadata, _, _ in metadata_tuples: + if metadata.name() in ("py-xyz" or "xyz"): + deps = metadata.meta["requirements"]["run"] assert any( dep.startswith("libxyz ") and len(dep.split()) == 3 for dep in deps - ), (m.name(), deps) - assert any(dep.startswith("python >") for dep in deps), (m.name(), deps) - assert any(dep.startswith("zlib >") for dep in deps), (m.name(), deps) - if m.name() == "xyz": - deps = m.meta["requirements"]["run"] + ), (metadata.name(), deps) + assert any(dep.startswith("python >") for dep in deps), ( + metadata.name(), + deps, + ) + assert any(dep.startswith("zlib >") for dep in deps), ( + metadata.name(), + deps, + ) + if metadata.name() == "xyz": + deps = metadata.meta["requirements"]["run"] assert any( dep.startswith("py-xyz ") and len(dep.split()) == 3 for dep in deps - ), (m.name(), deps) - assert any(dep.startswith("python >") for dep in deps), (m.name(), deps) + ), (metadata.name(), deps) + assert any(dep.startswith("python >") for dep in deps), ( + metadata.name(), + deps, + ) @pytest.mark.xfail( @@ -385,17 +408,17 @@ def test_python_line_up_with_compiled_lib(recipe, testing_config): ) def test_merge_build_host_applies_in_outputs(testing_config): recipe = os.path.join(subpackage_dir, "_merge_build_host") - ms = api.render(recipe, config=testing_config) - for m, _, _ in ms: + metadata_tuples = api.render(recipe, config=testing_config) + for metadata, _, _ in metadata_tuples: # top level - if m.name() == "test_build_host_merge": - assert not m.meta.get("requirements", {}).get("run") + if metadata.name() == "test_build_host_merge": + assert not metadata.meta.get("requirements", {}).get("run") # output else: - run_exports = set(m.meta.get("build", {}).get("run_exports", [])) + run_exports = set(metadata.meta.get("build", {}).get("run_exports", [])) assert len(run_exports) == 2 assert all(len(export.split()) > 1 for export in run_exports) - run_deps = set(m.meta.get("requirements", {}).get("run", [])) + run_deps = set(metadata.meta.get("requirements", {}).get("run", [])) assert len(run_deps) == 2 assert all(len(dep.split()) > 1 for dep in run_deps) @@ -411,11 +434,13 @@ def test_activation_in_output_scripts(testing_config): def test_inherit_build_number(testing_config): recipe = os.path.join(subpackage_dir, "_inherit_build_number") - ms = api.render(recipe, config=testing_config) - for m, _, _ in ms: - assert "number" in m.meta["build"], "build number was not inherited at all" + metadata_tuples = api.render(recipe, config=testing_config) + for metadata, _, _ in metadata_tuples: + assert ( + "number" in metadata.meta["build"] + ), "build number was not inherited at all" assert ( - int(m.meta["build"]["number"]) == 1 + int(metadata.meta["build"]["number"]) == 1 ), "build number should have been inherited as '1'" diff --git a/tests/test_utils.py b/tests/test_utils.py index d245e65796..98733546b5 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -163,7 +163,7 @@ def test_logger_filtering(caplog, capfd): log.info("test info message") log.info("test duplicate message") log.info("test duplicate message") - log.warn("test warn message") + log.warning("test warn message") log.error("test error message") out, err = capfd.readouterr() assert "test debug message" in out @@ -204,11 +204,14 @@ def test_logger_config_from_file(testing_workdir, capfd, mocker): handlers: [console] """ ) - cc_conda_build = mocker.patch.object(utils, "cc_conda_build") - cc_conda_build.get.return_value = test_file + mocker.patch( + "conda.base.context.Context.conda_build", + new_callable=mocker.PropertyMock, + return_value={"log_config_file": test_file}, + ) log = utils.get_logger(__name__) # default log level is INFO, but our config file should set level to DEBUG - log.warn("test message") + log.warning("test message") # output should have gone to stdout according to config above. out, err = capfd.readouterr() assert "test message" in out @@ -433,3 +436,25 @@ def test_is_conda_pkg(tmpdir, value: str, expected: bool, is_dir: bool, create: fp.write("test") assert utils.is_conda_pkg(value) == expected + + +def test_prefix_files(tmp_path: Path): + # all files within the prefix are found + (prefix := tmp_path / "prefix1").mkdir() + (file1 := prefix / "file1").touch() + (dirA := prefix / "dirA").mkdir() + (file2 := dirA / "file2").touch() + (dirB := prefix / "dirB").mkdir() + (file3 := dirB / "file3").touch() + + # files outside of the prefix are not found + (prefix2 := tmp_path / "prefix2").mkdir() + (prefix2 / "file4").touch() + (dirC := prefix2 / "dirC").mkdir() + (dirC / "file5").touch() + + # even if they are symlinked + (link1 := prefix / "dirC").symlink_to(dirC) + + paths = {str(path.relative_to(prefix)) for path in (file1, file2, file3, link1)} + assert paths == utils.prefix_files(str(prefix)) diff --git a/tests/test_variants.py b/tests/test_variants.py index 50e9cea4f2..3c79e36e16 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -17,7 +17,10 @@ combine_specs, dict_of_lists_to_list_of_dicts, filter_combined_spec_to_used_keys, + find_used_variables_in_batch_script, + find_used_variables_in_shell_script, get_package_variants, + get_vars, validate_spec, ) @@ -70,7 +73,7 @@ def test_python_variants(testing_workdir, testing_config, as_yaml): testing_config.variant_config_files = [str(variants_path)] # render the metadata - metadata = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "variant_recipe"), no_download_source=False, config=testing_config, @@ -79,14 +82,14 @@ def test_python_variants(testing_workdir, testing_config, as_yaml): ) # we should have one package/metadata per python version - assert len(metadata) == 2 + assert len(metadata_tuples) == 2 # there should only be one run requirement for each package/metadata - assert len(metadata[0][0].meta["requirements"]["run"]) == 1 - assert len(metadata[1][0].meta["requirements"]["run"]) == 1 + assert len(metadata_tuples[0][0].meta["requirements"]["run"]) == 1 + assert len(metadata_tuples[1][0].meta["requirements"]["run"]) == 1 # the run requirements should be python ranges assert { - *metadata[0][0].meta["requirements"]["run"], - *metadata[1][0].meta["requirements"]["run"], + *metadata_tuples[0][0].meta["requirements"]["run"], + *metadata_tuples[1][0].meta["requirements"]["run"], } == {"python >=3.11,<3.12.0a0", "python >=3.12,<3.13.0a0"} @@ -106,7 +109,7 @@ def test_use_selectors_in_variants(testing_workdir, testing_config): ) ) def test_variant_with_ignore_version_reduces_matrix(): - metadata = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "03_ignore_version_reduces_matrix"), variants={ "packageA": ["1.2", "3.4"], @@ -116,13 +119,13 @@ def test_variant_with_ignore_version_reduces_matrix(): }, finalize=False, ) - assert len(metadata) == 2 + assert len(metadata_tuples) == 2 def test_variant_with_numpy_pinned_has_matrix(): recipe = os.path.join(variants_dir, "04_numpy_matrix_pinned") - metadata = api.render(recipe, finalize=False) - assert len(metadata) == 4 + metadata_tuples = api.render(recipe, finalize=False) + assert len(metadata_tuples) == 4 def test_pinning_in_build_requirements(): @@ -220,13 +223,13 @@ def test_validate_spec(): def test_cross_compilers(): recipe = os.path.join(variants_dir, "09_cross") - ms = api.render( + metadata_tuples = api.render( recipe, permit_unsatisfiable_variants=True, finalize=False, bypass_env_check=True, ) - assert len(ms) == 3 + assert len(metadata_tuples) == 3 def test_variants_in_output_names(): @@ -249,11 +252,11 @@ def test_variants_in_versions_with_setup_py_data(): def test_git_variables_with_variants(testing_config): recipe = os.path.join(variants_dir, "13_git_vars") - m = api.render( + metadata = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True )[0][0] - assert m.version() == "1.20.2" - assert m.build_number() == 0 + assert metadata.version() == "1.20.2" + assert metadata.build_number() == 0 def test_variant_input_with_zip_keys_keeps_zip_keys_list(): @@ -302,57 +305,109 @@ def test_serial_builds_have_independent_configs(testing_config): def test_subspace_selection(testing_config): recipe = os.path.join(variants_dir, "18_subspace_selection") testing_config.variant = {"a": "coffee"} - ms = api.render( + metadata_tuples = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True ) # there are two entries with a==coffee, so we should end up with 2 variants - assert len(ms) == 2 + assert len(metadata_tuples) == 2 # ensure that the zipped keys still agree - assert sum(m.config.variant["b"] == "123" for m, _, _ in ms) == 1 - assert sum(m.config.variant["b"] == "abc" for m, _, _ in ms) == 1 - assert sum(m.config.variant["b"] == "concrete" for m, _, _ in ms) == 0 - assert sum(m.config.variant["c"] == "mooo" for m, _, _ in ms) == 1 - assert sum(m.config.variant["c"] == "baaa" for m, _, _ in ms) == 1 - assert sum(m.config.variant["c"] == "woof" for m, _, _ in ms) == 0 + assert ( + sum(metadata.config.variant["b"] == "123" for metadata, _, _ in metadata_tuples) + == 1 + ) + assert ( + sum(metadata.config.variant["b"] == "abc" for metadata, _, _ in metadata_tuples) + == 1 + ) + assert ( + sum( + metadata.config.variant["b"] == "concrete" + for metadata, _, _ in metadata_tuples + ) + == 0 + ) + assert ( + sum( + metadata.config.variant["c"] == "mooo" for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["c"] == "baaa" for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["c"] == "woof" for metadata, _, _ in metadata_tuples + ) + == 0 + ) # test compound selection testing_config.variant = {"a": "coffee", "b": "123"} - ms = api.render( + metadata_tuples = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True ) # there are two entries with a==coffee, but one with both 'coffee' for a, and '123' for b, # so we should end up with 1 variants - assert len(ms) == 1 + assert len(metadata_tuples) == 1 # ensure that the zipped keys still agree - assert sum(m.config.variant["b"] == "123" for m, _, _ in ms) == 1 - assert sum(m.config.variant["b"] == "abc" for m, _, _ in ms) == 0 - assert sum(m.config.variant["b"] == "concrete" for m, _, _ in ms) == 0 - assert sum(m.config.variant["c"] == "mooo" for m, _, _ in ms) == 1 - assert sum(m.config.variant["c"] == "baaa" for m, _, _ in ms) == 0 - assert sum(m.config.variant["c"] == "woof" for m, _, _ in ms) == 0 + assert ( + sum(metadata.config.variant["b"] == "123" for metadata, _, _ in metadata_tuples) + == 1 + ) + assert ( + sum(metadata.config.variant["b"] == "abc" for metadata, _, _ in metadata_tuples) + == 0 + ) + assert ( + sum( + metadata.config.variant["b"] == "concrete" + for metadata, _, _ in metadata_tuples + ) + == 0 + ) + assert ( + sum( + metadata.config.variant["c"] == "mooo" for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["c"] == "baaa" for metadata, _, _ in metadata_tuples + ) + == 0 + ) + assert ( + sum( + metadata.config.variant["c"] == "woof" for metadata, _, _ in metadata_tuples + ) + == 0 + ) # test when configuration leads to no valid combinations - only c provided, and its value # doesn't match any other existing values of c, so it's then ambiguous which zipped # values to choose testing_config.variant = {"c": "not an animal"} with pytest.raises(ValueError): - ms = api.render( - recipe, config=testing_config, finalize=False, bypass_env_check=True - ) + api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True) # all zipped keys provided by the new variant. It should clobber the old one. testing_config.variant = {"a": "some", "b": "new", "c": "animal"} - ms = api.render( + metadata_tuples = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True ) - assert len(ms) == 1 - assert ms[0][0].config.variant["a"] == "some" - assert ms[0][0].config.variant["b"] == "new" - assert ms[0][0].config.variant["c"] == "animal" + assert len(metadata_tuples) == 1 + assert metadata_tuples[0][0].config.variant["a"] == "some" + assert metadata_tuples[0][0].config.variant["b"] == "new" + assert metadata_tuples[0][0].config.variant["c"] == "animal" def test_get_used_loop_vars(): - m = api.render( + metadata = api.render( os.path.join(variants_dir, "19_used_variables"), finalize=False, bypass_env_check=True, @@ -360,9 +415,9 @@ def test_get_used_loop_vars(): # conda_build_config.yaml has 4 loop variables defined, but only 3 are used. # python and zlib are both implicitly used (depend on name matching), while # some_package is explicitly used as a jinja2 variable - assert m.get_used_loop_vars() == {"python", "some_package"} + assert metadata.get_used_loop_vars() == {"python", "some_package"} # these are all used vars - including those with only one value (and thus not loop vars) - assert m.get_used_vars() == { + assert metadata.get_used_vars() == { "python", "some_package", "zlib", @@ -377,49 +432,63 @@ def test_reprovisioning_source(): def test_reduced_hashing_behavior(testing_config): # recipes using any compiler jinja2 function need a hash - m = api.render( + metadata = api.render( os.path.join(variants_dir, "26_reduced_hashing", "hash_yes_compiler"), finalize=False, bypass_env_check=True, )[0][0] assert ( - "c_compiler" in m.get_hash_contents() + "c_compiler" in metadata.get_hash_contents() ), "hash contents should contain c_compiler" assert re.search( - "h[0-9a-f]{%d}" % testing_config.hash_length, m.build_id() + "h[0-9a-f]{%d}" % testing_config.hash_length, metadata.build_id() ), "hash should be present when compiler jinja2 function is used" # recipes that use some variable in conda_build_config.yaml to control what # versions are present at build time also must have a hash (except # python, r_base, and the other stuff covered by legacy build string # behavior) - m = api.render( + metadata = api.render( os.path.join(variants_dir, "26_reduced_hashing", "hash_yes_pinned"), finalize=False, bypass_env_check=True, )[0][0] - assert "zlib" in m.get_hash_contents() - assert re.search("h[0-9a-f]{%d}" % testing_config.hash_length, m.build_id()) + assert "zlib" in metadata.get_hash_contents() + assert re.search("h[0-9a-f]{%d}" % testing_config.hash_length, metadata.build_id()) # anything else does not get a hash - m = api.render( + metadata = api.render( os.path.join(variants_dir, "26_reduced_hashing", "hash_no_python"), finalize=False, bypass_env_check=True, )[0][0] - assert not m.get_hash_contents() - assert not re.search("h[0-9a-f]{%d}" % testing_config.hash_length, m.build_id()) + assert not metadata.get_hash_contents() + assert not re.search( + "h[0-9a-f]{%d}" % testing_config.hash_length, metadata.build_id() + ) def test_variants_used_in_jinja2_conditionals(): - ms = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "21_conditional_sections"), finalize=False, bypass_env_check=True, ) - assert len(ms) == 2 - assert sum(m.config.variant["blas_impl"] == "mkl" for m, _, _ in ms) == 1 - assert sum(m.config.variant["blas_impl"] == "openblas" for m, _, _ in ms) == 1 + assert len(metadata_tuples) == 2 + assert ( + sum( + metadata.config.variant["blas_impl"] == "mkl" + for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["blas_impl"] == "openblas" + for metadata, _, _ in metadata_tuples + ) + == 1 + ) def test_build_run_exports_act_on_host(caplog): @@ -433,14 +502,14 @@ def test_build_run_exports_act_on_host(caplog): def test_detect_variables_in_build_and_output_scripts(): - ms = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "24_test_used_vars_in_scripts"), platform="linux", arch="64", ) - for m, _, _ in ms: - if m.name() == "test_find_used_variables_in_scripts": - used_vars = m.get_used_vars() + for metadata, _, _ in metadata_tuples: + if metadata.name() == "test_find_used_variables_in_scripts": + used_vars = metadata.get_used_vars() assert used_vars assert "SELECTOR_VAR" in used_vars assert "OUTPUT_SELECTOR_VAR" not in used_vars @@ -449,7 +518,7 @@ def test_detect_variables_in_build_and_output_scripts(): assert "BAT_VAR" not in used_vars assert "OUTPUT_VAR" not in used_vars else: - used_vars = m.get_used_vars() + used_vars = metadata.get_used_vars() assert used_vars assert "SELECTOR_VAR" not in used_vars assert "OUTPUT_SELECTOR_VAR" in used_vars @@ -458,14 +527,14 @@ def test_detect_variables_in_build_and_output_scripts(): assert "BAT_VAR" not in used_vars assert "OUTPUT_VAR" in used_vars # on windows, we find variables in bat scripts as well as shell scripts - ms = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "24_test_used_vars_in_scripts"), platform="win", arch="64", ) - for m, _, _ in ms: - if m.name() == "test_find_used_variables_in_scripts": - used_vars = m.get_used_vars() + for metadata, _, _ in metadata_tuples: + if metadata.name() == "test_find_used_variables_in_scripts": + used_vars = metadata.get_used_vars() assert used_vars assert "SELECTOR_VAR" in used_vars assert "OUTPUT_SELECTOR_VAR" not in used_vars @@ -475,7 +544,7 @@ def test_detect_variables_in_build_and_output_scripts(): assert "BAT_VAR" in used_vars assert "OUTPUT_VAR" not in used_vars else: - used_vars = m.get_used_vars() + used_vars = metadata.get_used_vars() assert used_vars assert "SELECTOR_VAR" not in used_vars assert "OUTPUT_SELECTOR_VAR" in used_vars @@ -519,11 +588,11 @@ def test_exclusive_config_files(): os.path.join("config_dir", "config-0.yaml"), os.path.join("config_dir", "config-1.yaml"), ) - output = api.render( + metadata = api.render( os.path.join(variants_dir, "exclusive_config_file"), exclusive_config_files=exclusive_config_files, )[0][0] - variant = output.config.variant + variant = metadata.config.variant # is cwd ignored? assert "cwd" not in variant # did we load the exclusive configs? @@ -544,11 +613,11 @@ def test_exclusive_config_file(): yaml.dump( {"abc": ["super"], "exclusive": ["someval"]}, f, default_flow_style=False ) - output = api.render( + metadata = api.render( os.path.join(variants_dir, "exclusive_config_file"), exclusive_config_file=os.path.join("config_dir", "config.yaml"), )[0][0] - variant = output.config.variant + variant = metadata.config.variant # is cwd ignored? assert "cwd" not in variant # did we load the exclusive config @@ -607,27 +676,27 @@ def test_inner_python_loop_with_output(testing_config): def test_variant_as_dependency_name(testing_config): - outputs = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "27_requirements_host"), config=testing_config ) - assert len(outputs) == 2 + assert len(metadata_tuples) == 2 def test_custom_compiler(): recipe = os.path.join(variants_dir, "28_custom_compiler") - ms = api.render( + metadata_tuples = api.render( recipe, permit_unsatisfiable_variants=True, finalize=False, bypass_env_check=True, ) - assert len(ms) == 3 + assert len(metadata_tuples) == 3 def test_different_git_vars(): recipe = os.path.join(variants_dir, "29_different_git_vars") - ms = api.render(recipe) - versions = [m[0].version() for m in ms] + metadata_tuples = api.render(recipe) + versions = [metadata[0].version() for metadata in metadata_tuples] assert "1.20.0" in versions assert "1.21.11" in versions @@ -644,7 +713,7 @@ def test_top_level_finalized(testing_config): def test_variant_subkeys_retained(): - m = api.render( + metadata = api.render( os.path.join(variants_dir, "31_variant_subkeys"), finalize=False, bypass_env_check=True, @@ -652,11 +721,11 @@ def test_variant_subkeys_retained(): found_replacements = False from conda_build.build import get_all_replacements - for variant in m.config.variants: + for variant in metadata.config.variants: found_replacements = get_all_replacements(variant) assert len(found_replacements), "Did not find replacements" - m.final = False - outputs = m.get_output_metadata_set(permit_unsatisfiable_variants=False) + metadata.final = False + outputs = metadata.get_output_metadata_set(permit_unsatisfiable_variants=False) get_all_replacements(outputs[0][1].config.variant) @@ -700,3 +769,39 @@ def test_zip_key_filtering( } assert filter_combined_spec_to_used_keys(combined_spec, specs=specs) == expected + + +def test_get_vars(): + variants = [ + { + "python": "3.12", + "nodejs": "20", + "zip_keys": [], # ignored + }, + {"python": "3.12", "nodejs": "18"}, + {"python": "3.12", "nodejs": "20"}, + ] + + assert get_vars(variants) == {"nodejs"} + + +def test_find_used_variables_in_shell_script(tmp_path: Path) -> None: + variants = ("FOO", "BAR", "BAZ", "QUX") + (script := tmp_path / "script.sh").write_text( + f"${variants[0]}\n" + f"${{{variants[1]}}}\n" + f"${{{{{variants[2]}}}}}\n" + f"$${variants[3]}\n" + ) + assert find_used_variables_in_shell_script(variants, script) == {"FOO", "BAR"} + + +def test_find_used_variables_in_batch_script(tmp_path: Path) -> None: + variants = ("FOO", "BAR", "BAZ", "QUX") + (script := tmp_path / "script.sh").write_text( + f"%{variants[0]}%\n" + f"%%{variants[1]}%%\n" + f"${variants[2]}\n" + f"${{{variants[3]}}}\n" + ) + assert find_used_variables_in_batch_script(variants, script) == {"FOO", "BAR"} diff --git a/tests/utils.py b/tests/utils.py index 125cda7c91..4d6803f09d 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -8,10 +8,9 @@ from pathlib import Path from typing import TYPE_CHECKING -from conda.base.context import context, reset_context +from conda.base.context import reset_context from conda.common.compat import on_mac -from conda_build.conda_interface import cc_conda_build from conda_build.metadata import MetaData if TYPE_CHECKING: @@ -92,8 +91,7 @@ def assert_package_consistency(package_path): has_prefix_present = False except tarfile.ReadError: raise RuntimeError( - "Could not extract metadata from %s. " - "File probably corrupt." % package_path + f"Could not extract metadata from {package_path}. File probably corrupt." ) errors = [] member_set = set(member_list) # The tar format allows duplicates in member_list @@ -102,7 +100,7 @@ def assert_package_consistency(package_path): file_set = set(file_list) # Check that there are no duplicates in info/files if len(file_list) != len(file_set): - errors.append("Duplicate files in info/files in %s" % package_path) + errors.append(f"Duplicate files in info/files in {package_path}") # Compare the contents of files and members unlisted_members = member_set.difference(file_set) missing_members = file_set.difference(member_set) @@ -110,14 +108,16 @@ def assert_package_consistency(package_path): missing_files = [m for m in unlisted_members if not m.startswith("info/")] if len(missing_files) > 0: errors.append( - "The following package files are not listed in " - "info/files: %s" % ", ".join(missing_files) + "The following package files are not listed in info/files: {}".format( + ", ".join(missing_files) + ) ) # Find any files missing in the archive if len(missing_members) > 0: errors.append( - "The following files listed in info/files are missing: " - "%s" % ", ".join(missing_members) + "The following files listed in info/files are missing: {}".format( + ", ".join(missing_members) + ) ) # Find any files in has_prefix that are not present in files if has_prefix_present: @@ -130,15 +130,15 @@ def assert_package_consistency(package_path): elif len(parts) == 3: prefix_path_list.append(parts[2]) else: - errors.append("Invalid has_prefix file in package: %s" % package_path) + errors.append(f"Invalid has_prefix file in package: {package_path}") prefix_path_set = set(prefix_path_list) if len(prefix_path_list) != len(prefix_path_set): - errors.append("Duplicate files in info/has_prefix in %s" % package_path) + errors.append(f"Duplicate files in info/has_prefix in {package_path}") prefix_not_in_files = prefix_path_set.difference(file_set) if len(prefix_not_in_files) > 0: errors.append( "The following files listed in info/has_prefix are missing " - "from info/files: %s" % ", ".join(prefix_not_in_files) + "from info/files: {}".format(", ".join(prefix_not_in_files)) ) # Assert that no errors are detected @@ -153,7 +153,3 @@ def get_noarch_python_meta(meta): def reset_config(search_path=None): reset_context(search_path) - cc_conda_build.clear() - cc_conda_build.update( - context.conda_build if hasattr(context, "conda_build") else {} - )