diff --git a/ci/github/.condarc b/.github/condarc
similarity index 73%
rename from ci/github/.condarc
rename to .github/condarc
index 44a36fcc35..a76e773f8f 100644
--- a/ci/github/.condarc
+++ b/.github/condarc
@@ -2,7 +2,5 @@ auto_update_conda: False
auto_activate_base: True
notify_outdated_conda: False
changeps1: False
-pkgs_dirs:
-- /usr/share/miniconda/envs/test/pkgs
always_yes: True
local_repodata_ttl: 7200
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index 6bf8249c4c..090c389a6b 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -1,6 +1,3 @@
-# this is the sibling workflow to tests-skip.yml, it is required to work around
-# the skipped but required checks issue:
-# https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/troubleshooting-required-status-checks#handling-skipped-but-required-checks
name: Tests
on:
@@ -32,20 +29,29 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true
+env:
+ # https://conda.github.io/conda-libmamba-solver/user-guide/configuration/#advanced-options
+ CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED: true
+
jobs:
# detect whether any code changes are included in this PR
changes:
runs-on: ubuntu-latest
permissions:
+ # necessary to detect changes
+ # https://github.com/dorny/paths-filter#supported-workflows
pull-requests: read
outputs:
code: ${{ steps.filter.outputs.code }}
steps:
- - uses: actions/checkout@v3
+ - name: Checkout Source
+ uses: actions/checkout@v4
# dorny/paths-filter needs git clone for non-PR events
- # https://github.com/marketplace/actions/paths-changes-filter#supported-workflows
+ # https://github.com/dorny/paths-filter#supported-workflows
if: github.event_name != 'pull_request'
- - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50
+
+ - name: Filter Changes
+ uses: dorny/paths-filter@v3
id: filter
with:
filters: |
@@ -65,6 +71,7 @@ jobs:
runs-on: ubuntu-latest
defaults:
run:
+ # https://github.com/conda-incubator/setup-miniconda#use-a-default-shell
shell: bash -el {0}
strategy:
fail-fast: false
@@ -76,10 +83,10 @@ jobs:
include:
# minimum Python/conda combo
- python-version: '3.8'
- conda-version: 22.11.0
+ conda-version: 23.5.0
test-type: serial
- python-version: '3.8'
- conda-version: 22.11.0
+ conda-version: 23.5.0
test-type: parallel
# maximum Python/conda combo
- python-version: '3.12'
@@ -89,75 +96,70 @@ jobs:
conda-version: canary
test-type: parallel
env:
- CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }}
- CONDA_VERSION: ${{ contains('canary,release', matrix.conda-version) && 'conda' || format('conda={0}', matrix.conda-version) }}
- REPLAY_NAME: Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }}
- REPLAY_DIR: ${{ github.workspace }}/pytest-replay
+ CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev::' || '' }}
+ CONDA_VERSION: ${{ contains('canary|release', matrix.conda-version) && 'conda' || format('conda={0}', matrix.conda-version) }}
PYTEST_MARKER: ${{ matrix.test-type == 'serial' && 'serial' || 'not serial' }}
- PYTEST_NUMPROCESSES: ${{ matrix.test-type == 'serial' && 0 || 'auto' }}
steps:
- - name: Checkout repository
- uses: actions/checkout@v3
+ - name: Checkout Source
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- - name: Timestamp
- run: echo "TIMESTAMP=$(date -u "+%Y%m")" >> $GITHUB_ENV
- shell: bash
+ - name: Hash + Timestamp
+ run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV
- - name: Cache conda
- uses: actions/cache@v3
+ - name: Cache Conda
+ uses: actions/cache@v4
with:
path: ~/conda_pkgs_dir
- key: ${{ runner.os }}-conda-${{ env.TIMESTAMP }}
+ key: cache-${{ env.HASH }}
- - name: Setup miniconda
- uses: conda-incubator/setup-miniconda@v2
+ - name: Setup Miniconda
+ uses: conda-incubator/setup-miniconda@v3
with:
- condarc-file: ./ci/github/.condarc
- python-version: ${{ matrix.python-version }}
+ condarc-file: .github/condarc
run-post: false # skip post cleanup
- - name: Setup environment
- run: |
- conda install -q -y -c defaults \
- --file ./tests/requirements.txt \
- --file ./tests/requirements-linux.txt \
- ${{ env.CONDA_CHANNEL_LABEL }}::${{ env.CONDA_VERSION }}
- pip install -e . --no-deps
-
- - name: Show info
- run: |
- conda info -a
- conda list --show-channel-urls
-
- - name: Run tests
- run: |
- pytest \
- --color=yes \
- -v \
- -n "${{ env.PYTEST_NUMPROCESSES }}" \
- --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" \
- --cov conda_build \
- --cov-append \
- --cov-branch \
- --cov-report xml \
- --replay-record-dir="${{ env.REPLAY_DIR }}" \
- --replay-base-name="${{ env.REPLAY_NAME }}" \
- -m "${{ env.PYTEST_MARKER }}" \
- ./tests
-
- - uses: codecov/codecov-action@v3
+ - name: Conda Install
+ run: conda install
+ --yes
+ --file tests/requirements.txt
+ --file tests/requirements-${{ runner.os }}.txt
+ --file tests/requirements-ci.txt
+ python=${{ matrix.python-version }}
+ ${{ env.CONDA_CHANNEL_LABEL }}${{ env.CONDA_VERSION }}
+
+ # TODO: how can we remove this step?
+ - name: Install Self
+ run: pip install -e .
+
+ - name: Conda Info
+ run: conda info --verbose
+
+ - name: Conda List
+ run: conda list --show-channel-urls
+
+ - name: Run Tests
+ run: pytest
+ --cov=conda_build
+ -n auto
+ -m "${{ env.PYTEST_MARKER }}"
+
+ - name: Upload Coverage
+ uses: codecov/codecov-action@v4
with:
- flags: ${{ matrix.test-type }},${{ matrix.python-version }},linux-64
+ flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }}
- - name: Upload Pytest Replay
+ - name: Upload Test Results
if: '!cancelled()'
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
- name: ${{ env.REPLAY_NAME }}-${{ matrix.test-type }}
- path: ${{ env.REPLAY_DIR }}
+ name: test-results-${{ env.HASH }}
+ path: |
+ .coverage
+ test-report.xml
+ retention-days: 1 # temporary, combined in aggregate below
# windows test suite
windows:
@@ -181,77 +183,77 @@ jobs:
conda-version: canary
test-type: parallel
env:
+ ErrorActionPreference: Stop # powershell exit on first error
CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }}
- REPLAY_NAME: Win-${{ matrix.conda-version }}-Py${{ matrix.python-version }}
- REPLAY_DIR: ${{ github.workspace }}\pytest-replay
PYTEST_MARKER: ${{ matrix.test-type == 'serial' && 'serial' || 'not serial and not slow' }}
- PYTEST_NUMPROCESSES: ${{ matrix.test-type == 'serial' && 0 || 'auto' }}
steps:
- - name: Checkout repository
- uses: actions/checkout@v3
+ - name: Checkout Source
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- - name: Timestamp
- run: echo "TIMESTAMP=$(date -u "+%Y%m")" >> $GITHUB_ENV
- shell: bash
+ - name: Hash + Timestamp
+ shell: bash # use bash to run date command
+ run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV
- - name: Cache conda
- uses: actions/cache@v3
+ - name: Cache Conda
+ uses: actions/cache@v4
with:
path: ~/conda_pkgs_dir
- key: ${{ runner.os }}-conda-${{ env.TIMESTAMP }}
+ key: cache-${{ env.HASH }}
- - name: Setup miniconda
- uses: conda-incubator/setup-miniconda@v2
+ - name: Setup Miniconda
+ uses: conda-incubator/setup-miniconda@v3
with:
- condarc-file: .\ci\github\.condarc
- python-version: ${{ matrix.python-version }}
+ condarc-file: .github\condarc
run-post: false # skip post cleanup
- - name: Setup environment
- shell: cmd /C CALL {0}
- run: |
- @echo on
- CALL choco install visualstudio2017-workload-vctools || exit 1
- CALL conda install -q -y -c defaults ^
- --file .\tests\requirements.txt ^
- --file .\tests\requirements-windows.txt ^
- ${{ env.CONDA_CHANNEL_LABEL }}::conda || exit 1
- CALL pip install -e . --no-deps || exit 1
-
- - name: Show info
- run: |
- conda info -a
- conda list --show-channel-urls
-
- - name: Run tests
- run: |
- pytest `
- --color=yes `
- -v `
- -n "${{ env.PYTEST_NUMPROCESSES }}" `
- --basetemp "${{ runner.temp }}\${{ matrix.test-type}}" `
- --cov conda_build `
- --cov-append `
- --cov-branch `
- --cov-report xml `
- --replay-record-dir="${{ env.REPLAY_DIR }}" `
- --replay-base-name="${{ env.REPLAY_NAME }}" `
- -m "${{ env.PYTEST_MARKER }}" `
- .\tests
-
- - uses: codecov/codecov-action@v3
+ - name: Choco Install
+ run: choco install visualstudio2017-workload-vctools
+
+ - name: Conda Install
+ run: conda install
+ --yes
+ --file tests\requirements.txt
+ --file tests\requirements-${{ runner.os }}.txt
+ --file tests\requirements-ci.txt
+ python=${{ matrix.python-version }}
+ ${{ env.CONDA_CHANNEL_LABEL }}::conda
+
+ # TODO: how can we remove this step?
+ - name: Install Self
+ run: pip install -e .
+
+ - name: Conda Info
+ run: conda info --verbose
+
+ - name: Conda List
+ run: conda list --show-channel-urls
+
+ - name: Run Tests
+ # Windows is sensitive to long paths, using `--basetemp=${{ runner.temp }} to
+ # keep the test directories shorter
+ run: pytest
+ --cov=conda_build
+ --basetemp=${{ runner.temp }}
+ -n auto
+ -m "${{ env.PYTEST_MARKER }}"
+
+ - name: Upload Coverage
+ uses: codecov/codecov-action@v4
with:
- flags: ${{ matrix.test-type }},${{ matrix.python-version }},win-64
+ flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }}
- - name: Upload Pytest Replay
+ - name: Upload Test Results
if: '!cancelled()'
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
- path: ${{ env.REPLAY_DIR }}
- name: ${{ env.REPLAY_NAME }}-${{ matrix.test-type }}
+ name: test-results-${{ env.HASH }}
+ path: |
+ .coverage
+ test-report.xml
+ retention-days: 1 # temporary, combined in aggregate below
# macos test suite
macos:
@@ -262,6 +264,7 @@ jobs:
runs-on: macos-11
defaults:
run:
+ # https://github.com/conda-incubator/setup-miniconda#use-a-default-shell
shell: bash -el {0}
strategy:
fail-fast: false
@@ -279,74 +282,71 @@ jobs:
test-type: parallel
env:
CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }}
- REPLAY_NAME: macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }}
- REPLAY_DIR: ${{ github.workspace }}/pytest-replay
PYTEST_MARKER: ${{ matrix.test-type == 'serial' && 'serial' || 'not serial' }}
- PYTEST_NUMPROCESSES: ${{ matrix.test-type == 'serial' && 0 || 'auto' }}
steps:
- - name: Checkout repository
- uses: actions/checkout@v3
+ - name: Checkout Source
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- - name: Timestamp
- run: echo "TIMESTAMP=$(date -u "+%Y%m")" >> $GITHUB_ENV
- shell: bash
+ - name: Hash + Timestamp
+ run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV
- - name: Cache conda
- uses: actions/cache@v3
+ - name: Cache Conda
+ uses: actions/cache@v4
with:
path: ~/conda_pkgs_dir
- key: ${{ runner.os }}-conda-${{ env.TIMESTAMP }}
+ key: cache-${{ env.HASH }}
- - name: Setup miniconda
- uses: conda-incubator/setup-miniconda@v2
+ - name: Setup Miniconda
+ uses: conda-incubator/setup-miniconda@v3
with:
- condarc-file: ./ci/github/.condarc
- python-version: ${{ matrix.python-version }}
+ condarc-file: .github/condarc
run-post: false # skip post cleanup
- - name: Setup environment
- run: |
- sudo xcode-select --switch /Applications/Xcode_11.7.app
- conda install -q -y -c defaults \
- --file ./tests/requirements.txt \
- --file ./tests/requirements-macos.txt \
- ${{ env.CONDA_CHANNEL_LABEL }}::conda
- pip install -e . --no-deps
-
- - name: Show info
- run: |
- conda info -a
- conda list --show-channel-urls
+ - name: Xcode Install
+ run: sudo xcode-select --switch /Applications/Xcode_11.7.app
- - name: Run tests
- run: |
- pytest \
- --color=yes \
- -v \
- -n "${{ env.PYTEST_NUMPROCESSES }}" \
- --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" \
- --cov conda_build \
- --cov-append \
- --cov-branch \
- --cov-report xml \
- --replay-record-dir="${{ env.REPLAY_DIR }}" \
- --replay-base-name="${{ env.REPLAY_NAME }}" \
- -m "${{ env.PYTEST_MARKER }}" \
- ./tests
-
- - uses: codecov/codecov-action@v3
+ - name: Conda Install
+ run: conda install
+ --yes
+ --file tests/requirements.txt
+ --file tests/requirements-${{ runner.os }}.txt
+ --file tests/requirements-ci.txt
+ python=${{ matrix.python-version }}
+ ${{ env.CONDA_CHANNEL_LABEL }}::conda
+
+ # TODO: how can we remove this step?
+ - name: Install Self
+ run: pip install -e .
+
+ - name: Conda Info
+ run: conda info --verbose
+
+ - name: Conda List
+ run: conda list --show-channel-urls
+
+ - name: Run Tests
+ run: pytest
+ --cov=conda_build
+ -n auto
+ -m "${{ env.PYTEST_MARKER }}"
+
+ - name: Upload Coverage
+ uses: codecov/codecov-action@v4
with:
- flags: ${{ matrix.test-type }},${{ matrix.python-version }},osx-64
+ flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }}
- - name: Upload Pytest Replay
+ - name: Upload Test Results
if: '!cancelled()'
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
- name: ${{ env.REPLAY_NAME }}-${{ matrix.test-type }}
- path: ${{ env.REPLAY_DIR }}
+ name: test-results-${{ env.HASH }}
+ path: |
+ .coverage
+ test-report.xml
+ retention-days: 1 # temporary, combined in aggregate below
# aggregate and upload
aggregate:
@@ -361,40 +361,37 @@ jobs:
runs-on: ubuntu-latest
steps:
- - name: Download test results
- uses: actions/download-artifact@v3
+ - name: Download Artifacts
+ uses: actions/download-artifact@v4
- - name: Upload combined test results
- # provides one downloadable archive of all .coverage/test-report.xml files
- # of all matrix runs for further analysis.
- uses: actions/upload-artifact@v3
+ - name: Upload Combined Test Results
+ # provides one downloadable archive of all matrix run test results for further analysis
+ uses: actions/upload-artifact@v4
with:
name: test-results-${{ github.sha }}-all
- path: test-results-${{ github.sha }}-*
- retention-days: 90 # default: 90
+ path: test-results-*
- name: Test Summary
uses: test-summary/action@v2
with:
- paths: ./test-results-${{ github.sha }}-**/test-report*.xml
+ paths: test-results-*/test-report.xml
# required check
analyze:
- name: Analyze results
needs: [linux, windows, macos, aggregate]
if: '!cancelled()'
runs-on: ubuntu-latest
steps:
- - name: Decide whether the needed jobs succeeded or failed
- uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe
+ - name: Determine Success
+ uses: re-actors/alls-green@v1.2.2
with:
+ # permit jobs to be skipped if there are no code changes (see changes job)
allowed-skips: ${{ toJSON(needs) }}
jobs: ${{ toJSON(needs) }}
# canary builds
build:
- name: Canary Build
needs: [analyze]
# only build canary build if
# - prior steps succeeded,
@@ -415,24 +412,28 @@ jobs:
subdir: linux-64
- runner: macos-latest
subdir: osx-64
+ - runner: macos-14
+ subdir: osx-arm64
- runner: windows-latest
subdir: win-64
runs-on: ${{ matrix.runner }}
steps:
# Clean checkout of specific git ref needed for package metadata version
# which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR:
- - uses: actions/checkout@v3
+ - name: Checkout Source
+ uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
clean: true
fetch-depth: 0
# Explicitly use Python 3.12 since each of the OSes has a different default Python
- - uses: actions/setup-python@v4
+ - name: Setup Python
+ uses: actions/setup-python@v4
with:
python-version: '3.12'
- - name: Detect label
+ - name: Detect Label
shell: python
run: |
from pathlib import Path
@@ -453,8 +454,8 @@ jobs:
Path(environ["GITHUB_ENV"]).write_text(f"ANACONDA_ORG_LABEL={label}")
- - name: Create and upload canary build
- uses: conda/actions/canary-release@v23.7.0
+ - name: Create & Upload
+ uses: conda/actions/canary-release@v24.2.0
with:
package-name: ${{ github.event.repository.name }}
subdir: ${{ matrix.subdir }}
diff --git a/HOW_WE_USE_GITHUB.md b/HOW_WE_USE_GITHUB.md
index d0a4f4266f..46a13ecd98 100644
--- a/HOW_WE_USE_GITHUB.md
+++ b/HOW_WE_USE_GITHUB.md
@@ -225,7 +225,7 @@ This is a duplicate of [link to primary issue]; please feel free to conti
Please uninstall your current version of `conda` and reinstall the latest version.
-Feel free to use either the [miniconda](https://docs.conda.io/en/latest/miniconda.html)
+Feel free to use either the [miniconda](https://docs.anaconda.com/free/miniconda/)
or [anaconda](https://www.anaconda.com/products/individual) installer,
whichever is more appropriate for your needs.
diff --git a/conda_build/api.py b/conda_build/api.py
index 8a1298bbe9..a8fc525e66 100644
--- a/conda_build/api.py
+++ b/conda_build/api.py
@@ -20,13 +20,13 @@
# make the Config class available in the api namespace
from .config import DEFAULT_PREFIX_LENGTH as _prefix_length
from .config import Config, get_channel_urls, get_or_merge_config
+from .deprecations import deprecated
from .utils import (
CONDA_PACKAGE_EXTENSIONS,
LoggingContext,
ensure_list,
expand_globs,
find_recipe,
- get_logger,
get_skip_message,
on_win,
)
@@ -168,6 +168,7 @@ def get_output_file_paths(
return sorted(list(set(outs)))
+@deprecated("24.3.0", "24.5.0", addendum="Use `get_output_file_paths` instead.")
def get_output_file_path(
recipe_path_or_metadata,
no_download_source=False,
@@ -180,12 +181,6 @@ def get_output_file_path(
Both split packages (recipes with more than one output) and build matrices,
created with variants, contribute to the list of file paths here.
"""
- log = get_logger(__name__)
- log.warn(
- "deprecation warning: this function has been renamed to get_output_file_paths, "
- "to reflect that potentially multiple paths are returned. This function will be "
- "removed in the conda-build 4.0 release."
- )
return get_output_file_paths(
recipe_path_or_metadata,
no_download_source=no_download_source,
diff --git a/conda_build/build.py b/conda_build/build.py
index a24e468eca..9e62fc7293 100644
--- a/conda_build/build.py
+++ b/conda_build/build.py
@@ -22,6 +22,7 @@
import yaml
from bs4 import UnicodeDammit
from conda import __version__ as conda_version
+from conda.core.prefix_data import PrefixData
from . import __version__ as conda_build_version
from . import environ, noarch_python, source, tarcheck, utils
@@ -1421,8 +1422,10 @@ def write_about_json(m):
m.config.extra_meta,
)
extra.update(m.config.extra_meta)
- env = environ.Environment(root_dir)
- d["root_pkgs"] = env.package_specs()
+ d["root_pkgs"] = [
+ f"{prec.name} {prec.version} {prec.build}"
+ for prec in PrefixData(root_dir).iter_records()
+ ]
# Include the extra section of the metadata in the about.json
d["extra"] = extra
json.dump(d, fo, indent=2, sort_keys=True)
@@ -3501,7 +3504,7 @@ def test(
AssertionError,
) as exc:
log.warn(
- "failed to get install actions, retrying. exception was: %s", str(exc)
+ "failed to get package records, retrying. exception was: %s", str(exc)
)
tests_failed(
metadata,
diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py
index 61c46c1c4b..933528b114 100644
--- a/conda_build/cli/main_render.py
+++ b/conda_build/cli/main_render.py
@@ -13,7 +13,6 @@
from .. import __version__, api
from ..conda_interface import ArgumentParser, add_parser_channels, cc_conda_build
from ..config import get_channel_urls, get_or_merge_config
-from ..deprecations import deprecated
from ..utils import LoggingContext
from ..variants import get_package_variants, set_language_env_vars
@@ -193,7 +192,6 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]:
return parser, parser.parse_args(args)
-@deprecated.argument("24.1.1", "24.3.0", "print_results")
def execute(args: Sequence[str] | None = None) -> int:
_, parsed = parse_args(args)
diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py
index 4fa9fb3777..5d5c455d07 100644
--- a/conda_build/conda_interface.py
+++ b/conda_build/conda_interface.py
@@ -8,9 +8,6 @@
from importlib import import_module # noqa: F401
from conda import __version__ as CONDA_VERSION # noqa: F401
-from conda.auxlib.packaging import ( # noqa: F401
- _get_version_from_git_tag as get_version_from_git_tag,
-)
from conda.base.context import context, determine_target_prefix, reset_context
from conda.base.context import non_x86_machines as non_x86_linux_machines # noqa: F401
from conda.core.package_cache import ProgressiveFetchExtract # noqa: F401
@@ -45,13 +42,9 @@
add_parser_channels,
add_parser_prefix,
download,
- handle_proxy_407,
- hashsum_file,
human_bytes,
input,
lchmod,
- md5_file,
- memoized,
normalized_version,
prefix_placeholder,
rm_rf,
@@ -65,29 +58,13 @@
walk_prefix,
win_path_to_unix,
)
-from conda.exports import display_actions as _display_actions
-from conda.exports import execute_actions as _execute_actions
-from conda.exports import execute_plan as _execute_plan
from conda.exports import get_index as _get_index
-from conda.exports import install_actions as _install_actions
-from conda.exports import linked as _linked
-from conda.exports import linked_data as _linked_data
-from conda.exports import package_cache as _package_cache
+from conda.gateways.disk.read import compute_sum
from conda.models.channel import get_conda_build_local_url # noqa: F401
-from conda.models.dist import Dist as _Dist
from .deprecations import deprecated
-deprecated.constant("24.1.0", "24.3.0", "Dist", _Dist)
-deprecated.constant("24.1.0", "24.3.0", "display_actions", _display_actions)
-deprecated.constant("24.1.0", "24.3.0", "execute_actions", _execute_actions)
-deprecated.constant("24.1.0", "24.3.0", "execute_plan", _execute_plan)
-deprecated.constant("24.1.0", "24.3.0", "get_index", _get_index)
-deprecated.constant("24.1.0", "24.3.0", "install_actions", _install_actions)
-deprecated.constant("24.1.0", "24.3.0", "linked", _linked)
-deprecated.constant("24.1.0", "24.3.0", "linked_data", _linked_data)
-deprecated.constant("24.1.0", "24.3.0", "package_cache", _package_cache)
-
+deprecated.constant("24.1.0", "24.5.0", "get_index", _get_index)
# TODO: Go to references of all properties below and import them from `context` instead
binstar_upload = context.binstar_upload
default_python = context.default_python
@@ -112,3 +89,41 @@
# When deactivating envs (e.g. switching from root to build/test) this env var is used,
# except the PR that removed this has been reverted (for now) and Windows doesn't need it.
env_path_backup_var_exists = os.environ.get("CONDA_PATH_BACKUP", None)
+
+
+@deprecated(
+ "24.3",
+ "24.5",
+ addendum="Handled by `conda.gateways.connection.session.CondaSession`.",
+)
+def handle_proxy_407(x, y):
+ pass
+
+
+deprecated.constant(
+ "24.3",
+ "24.5",
+ "hashsum_file",
+ compute_sum,
+ addendum="Use `conda.gateways.disk.read.compute_sum` instead.",
+)
+
+
+@deprecated(
+ "24.3",
+ "24.5",
+ addendum="Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead.",
+)
+def md5_file(path: str | os.PathLike) -> str:
+ return compute_sum(path, "md5")
+
+
+@deprecated(
+ "24.3",
+ "24.5",
+ addendum="Use `conda_build.environ.get_version_from_git_tag` instead.",
+)
+def get_version_from_git_tag(tag):
+ from .environ import get_version_from_git_tag
+
+ return get_version_from_git_tag(tag)
diff --git a/conda_build/environ.py b/conda_build/environ.py
index 762b9c7479..f7260ac92e 100644
--- a/conda_build/environ.py
+++ b/conda_build/environ.py
@@ -1,5 +1,7 @@
# Copyright (C) 2014 Anaconda, Inc
# SPDX-License-Identifier: BSD-3-Clause
+from __future__ import annotations
+
import contextlib
import json
import logging
@@ -15,6 +17,7 @@
from glob import glob
from logging import getLogger
from os.path import join, normpath
+from typing import TYPE_CHECKING
from conda.base.constants import (
CONDA_PACKAGE_EXTENSIONS,
@@ -26,34 +29,33 @@
from conda.core.link import PrefixSetup, UnlinkLinkTransaction
from conda.core.package_cache_data import PackageCacheData
from conda.core.prefix_data import PrefixData
+from conda.exceptions import (
+ CondaError,
+ LinkError,
+ LockError,
+ NoPackagesFoundError,
+ PaddingError,
+ UnsatisfiableError,
+)
from conda.models.channel import prioritize_channels
+from conda.models.match_spec import MatchSpec
from . import utils
from .conda_interface import (
Channel,
- CondaError,
- LinkError,
- LockError,
- MatchSpec,
- NoPackagesFoundError,
PackageRecord,
- PaddingError,
ProgressiveFetchExtract,
TemporaryDirectory,
- UnsatisfiableError,
context,
create_default_packages,
- get_version_from_git_tag,
pkgs_dirs,
reset_context,
root_dir,
)
-from .config import Config
from .deprecations import deprecated
from .exceptions import BuildLockError, DependencyNeedsBuildingError
from .features import feature_list
from .index import get_build_index
-from .metadata import MetaData
from .os_utils import external
from .utils import (
ensure_list,
@@ -65,10 +67,22 @@
)
from .variants import get_default_variant
+if TYPE_CHECKING:
+ from pathlib import Path
+ from typing import Any, Iterable, TypedDict
+
+ from .config import Config
+ from .metadata import MetaData
+
+ class InstallActionsType(TypedDict):
+ PREFIX: str | os.PathLike | Path
+ LINK: list[PackageRecord]
+
+
log = getLogger(__name__)
-PREFIX_ACTION = "PREFIX"
-LINK_ACTION = "LINK"
+deprecated.constant("24.3", "24.5", "PREFIX_ACTION", _PREFIX_ACTION := "PREFIX")
+deprecated.constant("24.3", "24.5", "LINK_ACTION", _LINK_ACTION := "LINK")
# these are things that we provide env vars for more explicitly. This list disables the
# pass-through of variant values to env vars for these keys.
@@ -208,6 +222,24 @@ def verify_git_repo(
return OK
+GIT_DESCRIBE_REGEX = re.compile(
+ r"(?:[_-a-zA-Z]*)"
+ r"(?P[a-zA-Z0-9.]+)"
+ r"(?:-(?P\d+)-g(?P[0-9a-f]{7,}))$"
+)
+
+
+def get_version_from_git_tag(tag):
+ """Return a PEP440-compliant version derived from the git status.
+ If that fails for any reason, return the changeset hash.
+ """
+ m = GIT_DESCRIBE_REGEX.match(tag)
+ if m is None:
+ return None
+ version, post_commit, hash = m.groups()
+ return version if post_commit == "0" else f"{version}.post{post_commit}+{hash}"
+
+
def get_git_info(git_exe, repo, debug):
"""
Given a repo to a git repo, return a dictionary of:
@@ -795,18 +827,21 @@ def os_vars(m, prefix):
return d
+@deprecated("24.3", "24.5")
class InvalidEnvironment(Exception):
pass
# Stripped-down Environment class from conda-tools ( https://github.com/groutr/conda-tools )
# Vendored here to avoid the whole dependency for just this bit.
+@deprecated("24.3", "24.5")
def _load_json(path):
with open(path) as fin:
x = json.load(fin)
return x
+@deprecated("24.3", "24.5")
def _load_all_json(path):
"""
Load all json files in a directory. Return dictionary with filenames mapped to json
@@ -820,6 +855,7 @@ def _load_all_json(path):
return result
+@deprecated("24.3", "24.5", addendum="Use `conda.core.prefix_data.PrefixData` instead.")
class Environment:
def __init__(self, path):
"""
@@ -852,29 +888,35 @@ def package_specs(self):
return specs
-cached_actions = {}
+cached_precs: dict[
+ tuple[tuple[str | MatchSpec, ...], Any, Any, Any, bool], list[PackageRecord]
+] = {}
+deprecated.constant("24.3", "24.5", "cached_actions", cached_precs)
last_index_ts = 0
-def get_package_records(
- prefix,
- specs,
- env,
- retries=0,
+# NOTE: The function has to retain the "get_install_actions" name for now since
+# conda_libmamba_solver.solver.LibMambaSolver._called_from_conda_build
+# checks for this name in the call stack explicitly.
+def get_install_actions(
+ prefix: str | os.PathLike | Path,
+ specs: Iterable[str | MatchSpec],
+ env, # unused
+ retries: int = 0,
subdir=None,
- verbose=True,
- debug=False,
- locking=True,
+ verbose: bool = True,
+ debug: bool = False,
+ locking: bool = True,
bldpkgs_dirs=None,
timeout=900,
- disable_pip=False,
- max_env_retry=3,
+ disable_pip: bool = False,
+ max_env_retry: int = 3,
output_folder=None,
channel_urls=None,
-):
- global cached_actions
+) -> list[PackageRecord]:
+ global cached_precs
global last_index_ts
- actions = {}
+
log = utils.get_logger(__name__)
conda_log_level = logging.WARN
specs = list(specs)
@@ -906,16 +948,15 @@ def get_package_records(
utils.ensure_valid_spec(spec) for spec in specs if not str(spec).endswith("@")
)
+ precs: list[PackageRecord] = []
if (
specs,
env,
subdir,
channel_urls,
disable_pip,
- ) in cached_actions and last_index_ts >= index_ts:
- actions = cached_actions[(specs, env, subdir, channel_urls, disable_pip)].copy()
- if PREFIX_ACTION in actions:
- actions[PREFIX_ACTION] = prefix
+ ) in cached_precs and last_index_ts >= index_ts:
+ precs = cached_precs[(specs, env, subdir, channel_urls, disable_pip)].copy()
elif specs:
# this is hiding output like:
# Fetching package metadata ...........
@@ -923,7 +964,7 @@ def get_package_records(
with utils.LoggingContext(conda_log_level):
with capture():
try:
- actions = _install_actions(prefix, index, specs)
+ precs = _install_actions(prefix, index, specs)["LINK"]
except (NoPackagesFoundError, UnsatisfiableError) as exc:
raise DependencyNeedsBuildingError(exc, subdir=subdir)
except (
@@ -937,7 +978,7 @@ def get_package_records(
) as exc:
if "lock" in str(exc):
log.warn(
- "failed to get install actions, retrying. exception was: %s",
+ "failed to get package records, retrying. exception was: %s",
str(exc),
)
elif (
@@ -966,12 +1007,12 @@ def get_package_records(
utils.rm_rf(pkg_dir)
if retries < max_env_retry:
log.warn(
- "failed to get install actions, retrying. exception was: %s",
+ "failed to get package records, retrying. exception was: %s",
str(exc),
)
- actions = get_install_actions(
+ precs = get_package_records(
prefix,
- tuple(specs),
+ specs,
env,
retries=retries + 1,
subdir=subdir,
@@ -987,7 +1028,7 @@ def get_package_records(
)
else:
log.error(
- "Failed to get install actions, max retries exceeded."
+ "Failed to get package records, max retries exceeded."
)
raise
if disable_pip:
@@ -997,64 +1038,28 @@ def get_package_records(
if not any(
re.match(r"^%s(?:$|[\s=].*)" % pkg, str(dep)) for dep in specs
):
- actions[LINK_ACTION] = [
- prec for prec in actions[LINK_ACTION] if prec.name != pkg
- ]
- utils.trim_empty_keys(actions)
- cached_actions[(specs, env, subdir, channel_urls, disable_pip)] = actions.copy()
+ precs = [prec for prec in precs if prec.name != pkg]
+ cached_precs[(specs, env, subdir, channel_urls, disable_pip)] = precs.copy()
last_index_ts = index_ts
- return actions.get(LINK_ACTION, [])
+ return precs
-@deprecated("24.1.0", "24.3.0", addendum="Use `get_package_records` instead.")
-def get_install_actions(
- prefix,
- specs,
- env,
- retries=0,
- subdir=None,
- verbose=True,
- debug=False,
- locking=True,
- bldpkgs_dirs=None,
- timeout=900,
- disable_pip=False,
- max_env_retry=3,
- output_folder=None,
- channel_urls=None,
-):
- precs = get_package_records(
- prefix=prefix,
- specs=specs,
- env=env,
- retries=retries,
- subdir=subdir,
- verbose=verbose,
- debug=debug,
- locking=locking,
- bldpkgs_dirs=bldpkgs_dirs,
- timeout=timeout,
- disable_pip=disable_pip,
- max_env_retry=max_env_retry,
- output_folder=output_folder,
- channel_urls=channel_urls,
- )
- return {PREFIX_ACTION: prefix, LINK_ACTION: precs}
+get_package_records = get_install_actions
+del get_install_actions
-@deprecated.argument("24.1.0", "24.3.0", "specs_or_actions", rename="specs_or_precs")
def create_env(
- prefix,
- specs_or_precs,
+ prefix: str | os.PathLike | Path,
+ specs_or_precs: Iterable[str | MatchSpec] | Iterable[PackageRecord],
env,
config,
subdir,
- clear_cache=True,
- retry=0,
+ clear_cache: bool = True,
+ retry: int = 0,
locks=None,
- is_cross=False,
- is_conda=False,
-):
+ is_cross: bool = False,
+ is_conda: bool = False,
+) -> None:
"""
Create a conda envrionment for the given prefix and specs.
"""
@@ -1073,6 +1078,7 @@ def create_env(
# if os.path.isdir(prefix):
# utils.rm_rf(prefix)
+ specs_or_precs = tuple(ensure_list(specs_or_precs))
if specs_or_precs: # Don't waste time if there is nothing to do
log.debug("Creating environment in %s", prefix)
log.debug(str(specs_or_precs))
@@ -1082,14 +1088,10 @@ def create_env(
try:
with utils.try_acquire_locks(locks, timeout=config.timeout):
# input is a list of specs in MatchSpec format
- if not (
- hasattr(specs_or_precs, "keys")
- or isinstance(specs_or_precs[0], PackageRecord)
- ):
- specs = list(set(specs_or_precs))
- actions = get_install_actions(
+ if not isinstance(specs_or_precs[0], PackageRecord):
+ precs = get_package_records(
prefix,
- tuple(specs),
+ tuple(set(specs_or_precs)),
env,
subdir=subdir,
verbose=config.verbose,
@@ -1103,10 +1105,7 @@ def create_env(
channel_urls=tuple(config.channel_urls),
)
else:
- if not hasattr(specs_or_precs, "keys"):
- actions = {LINK_ACTION: specs_or_precs}
- else:
- actions = specs_or_precs
+ precs = specs_or_precs
index, _, _ = get_build_index(
subdir=subdir,
bldpkgs_dir=config.bldpkgs_dir,
@@ -1117,14 +1116,13 @@ def create_env(
locking=config.locking,
timeout=config.timeout,
)
- utils.trim_empty_keys(actions)
- _display_actions(prefix, actions)
+ _display_actions(prefix, precs)
if utils.on_win:
for k, v in os.environ.items():
os.environ[k] = str(v)
with env_var("CONDA_QUIET", not config.verbose, reset_context):
with env_var("CONDA_JSON", not config.verbose, reset_context):
- _execute_actions(prefix, actions)
+ _execute_actions(prefix, precs)
except (
SystemExit,
PaddingError,
@@ -1159,15 +1157,13 @@ def create_env(
)
config.prefix_length = 80
- host = "_h_env" in prefix
- # Set this here and use to create environ
- # Setting this here is important because we use it below (symlink)
- prefix = config.host_prefix if host else config.build_prefix
- actions[PREFIX_ACTION] = prefix
-
create_env(
- prefix,
- actions,
+ (
+ config.host_prefix
+ if "_h_env" in prefix
+ else config.build_prefix
+ ),
+ specs_or_precs,
config=config,
subdir=subdir,
env=env,
@@ -1308,7 +1304,7 @@ def remove_existing_packages(dirs, fns, config):
def get_pinned_deps(m, section):
with TemporaryDirectory(prefix="_") as tmpdir:
- actions = get_install_actions(
+ precs = get_package_records(
tmpdir,
tuple(m.ms_depends(section)),
section,
@@ -1323,16 +1319,17 @@ def get_pinned_deps(m, section):
output_folder=m.config.output_folder,
channel_urls=tuple(m.config.channel_urls),
)
- runtime_deps = [
- package_record_to_requirement(prec) for prec in actions.get(LINK_ACTION, [])
- ]
- return runtime_deps
+ return [package_record_to_requirement(prec) for prec in precs]
# NOTE: The function has to retain the "install_actions" name for now since
# conda_libmamba_solver.solver.LibMambaSolver._called_from_conda_build
# checks for this name in the call stack explicitly.
-def install_actions(prefix, index, specs):
+def install_actions(
+ prefix: str | os.PathLike | Path,
+ index,
+ specs: Iterable[str | MatchSpec],
+) -> InstallActionsType:
# This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L471
# but reduced to only the functionality actually used within conda-build.
@@ -1344,6 +1341,8 @@ def install_actions(prefix, index, specs):
callback=reset_context,
):
# a hack since in conda-build we don't track channel_priority_map
+ channels: tuple[Channel, ...] | None
+ subdirs: tuple[str, ...] | None
if LAST_CHANNEL_URLS:
channel_priority_map = prioritize_channels(LAST_CHANNEL_URLS)
# tuple(dict.fromkeys(...)) removes duplicates while preserving input order.
@@ -1353,7 +1352,7 @@ def install_actions(prefix, index, specs):
subdirs = (
tuple(
dict.fromkeys(
- subdir for subdir in (c.subdir for c in channels) if subdir
+ subdir for channel in channels if (subdir := channel.subdir)
)
)
or context.subdirs
@@ -1361,12 +1360,12 @@ def install_actions(prefix, index, specs):
else:
channels = subdirs = None
- specs = tuple(MatchSpec(spec) for spec in specs)
+ mspecs = tuple(MatchSpec(spec) for spec in specs)
PrefixData._cache_.clear()
solver_backend = context.plugin_manager.get_cached_solver_backend()
- solver = solver_backend(prefix, channels, subdirs, specs_to_add=specs)
+ solver = solver_backend(prefix, channels, subdirs, specs_to_add=mspecs)
if index:
# Solver can modify the index (e.g., Solver._prepare adds virtual
# package) => Copy index (just outer container, not deep copy)
@@ -1374,42 +1373,33 @@ def install_actions(prefix, index, specs):
solver._index = index.copy()
txn = solver.solve_for_transaction(prune=False, ignore_pinned=False)
prefix_setup = txn.prefix_setups[prefix]
- actions = {
- PREFIX_ACTION: prefix,
- LINK_ACTION: [prec for prec in prefix_setup.link_precs],
+ return {
+ "PREFIX": prefix,
+ "LINK": [prec for prec in prefix_setup.link_precs],
}
- return actions
_install_actions = install_actions
del install_actions
-def _execute_actions(prefix, actions):
+@deprecated.argument("24.3", "24.5", "actions", rename="precs")
+def _execute_actions(prefix, precs):
# This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L575
# but reduced to only the functionality actually used within conda-build.
-
assert prefix
- if LINK_ACTION not in actions:
- log.debug(f"action {LINK_ACTION} not in actions")
- return
-
- link_precs = actions[LINK_ACTION]
- if not link_precs:
- log.debug(f"action {LINK_ACTION} has None value")
- return
-
# Always link menuinst first/last on windows in case a subsequent
# package tries to import it to create/remove a shortcut
- link_precs = [p for p in link_precs if p.name == "menuinst"] + [
- p for p in link_precs if p.name != "menuinst"
+ precs = [
+ *(prec for prec in precs if prec.name == "menuinst"),
+ *(prec for prec in precs if prec.name != "menuinst"),
]
- progressive_fetch_extract = ProgressiveFetchExtract(link_precs)
+ progressive_fetch_extract = ProgressiveFetchExtract(precs)
progressive_fetch_extract.prepare()
- stp = PrefixSetup(prefix, (), link_precs, (), [], ())
+ stp = PrefixSetup(prefix, (), precs, (), [], ())
unlink_link_transaction = UnlinkLinkTransaction(stp)
log.debug(" %s(%r)", "PROGRESSIVEFETCHEXTRACT", progressive_fetch_extract)
@@ -1418,7 +1408,8 @@ def _execute_actions(prefix, actions):
unlink_link_transaction.execute()
-def _display_actions(prefix, actions):
+@deprecated.argument("24.3", "24.5", "actions", rename="precs")
+def _display_actions(prefix, precs):
# This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L58
# but reduced to only the functionality actually used within conda-build.
@@ -1450,7 +1441,7 @@ def channel_filt(s):
features = defaultdict(lambda: "")
channels = defaultdict(lambda: "")
- for prec in actions.get(LINK_ACTION, []):
+ for prec in precs:
assert isinstance(prec, PackageRecord)
pkg = prec["name"]
channels[pkg] = channel_filt(channel_str(prec))
diff --git a/conda_build/index.py b/conda_build/index.py
index 229c5e1632..cd36cc9cac 100644
--- a/conda_build/index.py
+++ b/conda_build/index.py
@@ -1,81 +1,31 @@
# Copyright (C) 2014 Anaconda, Inc
# SPDX-License-Identifier: BSD-3-Clause
-import bz2
-import copy
-import fnmatch
-import functools
import json
import logging
import os
-import subprocess
-import sys
import time
-from collections import OrderedDict
-from concurrent.futures import Executor, ProcessPoolExecutor
-from datetime import datetime
+from concurrent.futures import Executor
from functools import partial
-from itertools import groupby
-from numbers import Number
-from os.path import (
- abspath,
- basename,
- dirname,
- getmtime,
- getsize,
- isfile,
- join,
- splitext,
-)
-from pathlib import Path
-from uuid import uuid4
-
-import conda_package_handling.api
-import pytz
-import yaml
+from os.path import dirname
-# Lots of conda internals here. Should refactor to use exports.
-from conda.common.compat import ensure_binary
-
-# BAD BAD BAD - conda internals
from conda.core.index import get_index
-from conda.core.subdir_data import SubdirData
-from conda.models.channel import Channel
from conda_index.index import update_index as _update_index
-from conda_package_handling.api import InvalidArchiveError
-from jinja2 import Environment, PackageLoader
-from tqdm import tqdm
-from yaml.constructor import ConstructorError
-from yaml.parser import ParserError
-from yaml.reader import ReaderError
-from yaml.scanner import ScannerError
from . import conda_interface, utils
-from .conda_interface import (
- CondaError,
- CondaHTTPError,
- MatchSpec,
- Resolve,
- TemporaryDirectory,
- VersionOrder,
- context,
- human_bytes,
- url_path,
-)
+from .conda_interface import CondaHTTPError, context, url_path
from .deprecations import deprecated
from .utils import (
CONDA_PACKAGE_EXTENSION_V1,
CONDA_PACKAGE_EXTENSION_V2,
- CONDA_PACKAGE_EXTENSIONS,
JSONDecodeError,
get_logger,
- glob,
on_win,
)
log = get_logger(__name__)
-# use this for debugging, because ProcessPoolExecutor isn't pdb/ipdb friendly
+@deprecated("24.3", "24.5")
class DummyExecutor(Executor):
def map(self, func, *iterables):
for iterable in iterables:
@@ -83,50 +33,24 @@ def map(self, func, *iterables):
yield func(thing)
-try:
- from conda.base.constants import NAMESPACE_PACKAGE_NAMES, NAMESPACES_MAP
-except ImportError:
- NAMESPACES_MAP = { # base package name, namespace
- "python": "python",
- "r": "r",
- "r-base": "r",
- "mro-base": "r",
- "mro-base_impl": "r",
- "erlang": "erlang",
- "java": "java",
- "openjdk": "java",
- "julia": "julia",
- "latex": "latex",
- "lua": "lua",
- "nodejs": "js",
- "perl": "perl",
- "php": "php",
- "ruby": "ruby",
- "m2-base": "m2",
- "msys2-conda-epoch": "m2w64",
- }
- NAMESPACE_PACKAGE_NAMES = frozenset(NAMESPACES_MAP)
- NAMESPACES = frozenset(NAMESPACES_MAP.values())
-
local_index_timestamp = 0
cached_index = None
local_subdir = ""
local_output_folder = ""
cached_channels = []
_channel_data = {}
-deprecated.constant("24.1.0", "24.3.0", "channel_data", _channel_data)
+deprecated.constant("24.1", "24.5", "channel_data", _channel_data)
# TODO: support for libarchive seems to have broken ability to use multiple threads here.
# The new conda format is so much faster that it more than makes up for it. However, it
# would be nice to fix this at some point.
-MAX_THREADS_DEFAULT = (
- os.cpu_count() if (hasattr(os, "cpu_count") and os.cpu_count() > 1) else 1
-)
+_MAX_THREADS_DEFAULT = os.cpu_count() or 1
if on_win: # see https://github.com/python/cpython/commit/8ea0fd85bc67438f679491fae29dfe0a3961900a
- MAX_THREADS_DEFAULT = min(48, MAX_THREADS_DEFAULT)
-LOCK_TIMEOUT_SECS = 3 * 3600
-LOCKFILE_NAME = ".lock"
+ _MAX_THREADS_DEFAULT = min(48, _MAX_THREADS_DEFAULT)
+deprecated.constant("24.3", "24.5", "MAX_THREADS_DEFAULT", _MAX_THREADS_DEFAULT)
+deprecated.constant("24.3", "24.5", "LOCK_TIMEOUT_SECS", 3 * 3600)
+deprecated.constant("24.3", "24.5", "LOCKFILE_NAME", ".lock")
# TODO: this is to make sure that the index doesn't leak tokens. It breaks use of private channels, though.
# os.environ['CONDA_ADD_ANACONDA_TOKEN'] = "false"
@@ -327,103 +251,6 @@ def _delegated_update_index(
)
-# Everything below is deprecated to maintain API/feature compatibility.
-
-
-@deprecated("24.1.0", "24.3.0")
-def _determine_namespace(info):
- if info.get("namespace"):
- namespace = info["namespace"]
- else:
- depends_names = set()
- for spec in info.get("depends", []):
- try:
- depends_names.add(MatchSpec(spec).name)
- except CondaError:
- pass
- spaces = depends_names & NAMESPACE_PACKAGE_NAMES
- if len(spaces) == 1:
- namespace = NAMESPACES_MAP[spaces.pop()]
- else:
- namespace = "global"
- info["namespace"] = namespace
-
- if not info.get("namespace_in_name") and "-" in info["name"]:
- namespace_prefix, reduced_name = info["name"].split("-", 1)
- if namespace_prefix == namespace:
- info["name_in_channel"] = info["name"]
- info["name"] = reduced_name
-
- return namespace, info.get("name_in_channel", info["name"]), info["name"]
-
-
-@deprecated("24.1.0", "24.3.0")
-def _make_seconds(timestamp):
- timestamp = int(timestamp)
- if timestamp > 253402300799: # 9999-12-31
- timestamp //= (
- 1000 # convert milliseconds to seconds; see conda/conda-build#1988
- )
- return timestamp
-
-
-# ==========================================================================
-
-
-_REPODATA_VERSION = 1
-_CHANNELDATA_VERSION = 1
-_REPODATA_JSON_FN = "repodata.json"
-_REPODATA_FROM_PKGS_JSON_FN = "repodata_from_packages.json"
-_CHANNELDATA_FIELDS = (
- "description",
- "dev_url",
- "doc_url",
- "doc_source_url",
- "home",
- "license",
- "reference_package",
- "source_url",
- "source_git_url",
- "source_git_tag",
- "source_git_rev",
- "summary",
- "version",
- "subdirs",
- "icon_url",
- "icon_hash", # "md5:abc123:12"
- "run_exports",
- "binary_prefix",
- "text_prefix",
- "activate.d",
- "deactivate.d",
- "pre_link",
- "post_link",
- "pre_unlink",
- "tags",
- "identifiers",
- "keywords",
- "recipe_origin",
- "commits",
-)
-deprecated.constant("24.1.0", "24.3.0", "REPODATA_VERSION", _REPODATA_VERSION)
-deprecated.constant("24.1.0", "24.3.0", "CHANNELDATA_VERSION", _CHANNELDATA_VERSION)
-deprecated.constant("24.1.0", "24.3.0", "REPODATA_JSON_FN", _REPODATA_JSON_FN)
-deprecated.constant(
- "24.1.0", "24.3.0", "REPODATA_FROM_PKGS_JSON_FN", _REPODATA_FROM_PKGS_JSON_FN
-)
-deprecated.constant("24.1.0", "24.3.0", "CHANNELDATA_FIELDS", _CHANNELDATA_FIELDS)
-
-
-@deprecated("24.1.0", "24.3.0")
-def _clear_newline_chars(record, field_name):
- if field_name in record:
- try:
- record[field_name] = record[field_name].strip().replace("\n", " ")
- except AttributeError:
- # sometimes description gets added as a list instead of just a string
- record[field_name] = record[field_name][0].strip().replace("\n", " ")
-
-
@deprecated(
"24.1.0", "24.5.0", addendum="Use `conda_index._apply_instructions` instead."
)
@@ -473,1271 +300,3 @@ def _apply_instructions(subdir, repodata, instructions):
repodata["removed"].sort()
return repodata
-
-
-@deprecated("24.1.0", "24.3.0")
-def _get_jinja2_environment():
- def _filter_strftime(dt, dt_format):
- if isinstance(dt, Number):
- if dt > 253402300799: # 9999-12-31
- dt //= 1000 # convert milliseconds to seconds; see #1988
- dt = datetime.utcfromtimestamp(dt).replace(tzinfo=pytz.timezone("UTC"))
- return dt.strftime(dt_format)
-
- def _filter_add_href(text, link, **kwargs):
- if link:
- kwargs_list = [f'href="{link}"']
- kwargs_list.append(f'alt="{text}"')
- kwargs_list += [f'{k}="{v}"' for k, v in kwargs.items()]
- return "{}".format(" ".join(kwargs_list), text)
- else:
- return text
-
- environment = Environment(
- loader=PackageLoader("conda_build", "templates"),
- )
- environment.filters["human_bytes"] = human_bytes
- environment.filters["strftime"] = _filter_strftime
- environment.filters["add_href"] = _filter_add_href
- environment.trim_blocks = True
- environment.lstrip_blocks = True
-
- return environment
-
-
-@deprecated("24.1.0", "24.3.0")
-def _maybe_write(path, content, write_newline_end=False, content_is_binary=False):
- # Create the temp file next "path" so that we can use an atomic move, see
- # https://github.com/conda/conda-build/issues/3833
- temp_path = f"{path}.{uuid4()}"
-
- if not content_is_binary:
- content = ensure_binary(content)
- with open(temp_path, "wb") as fh:
- fh.write(content)
- if write_newline_end:
- fh.write(b"\n")
- if isfile(path):
- if utils.md5_file(temp_path) == utils.md5_file(path):
- # No need to change mtimes. The contents already match.
- os.unlink(temp_path)
- return False
- # log.info("writing %s", path)
- utils.move_with_fallback(temp_path, path)
- return True
-
-
-@deprecated("24.1.0", "24.3.0")
-def _make_build_string(build, build_number):
- build_number_as_string = str(build_number)
- if build.endswith(build_number_as_string):
- build = build[: -len(build_number_as_string)]
- build = build.rstrip("_")
- build_string = build
- return build_string
-
-
-@deprecated("24.1.0", "24.3.0")
-def _warn_on_missing_dependencies(missing_dependencies, patched_repodata):
- """
- The following dependencies do not exist in the channel and are not declared
- as external dependencies:
-
- dependency1:
- - subdir/fn1.tar.bz2
- - subdir/fn2.tar.bz2
- dependency2:
- - subdir/fn3.tar.bz2
- - subdir/fn4.tar.bz2
-
- The associated packages are being removed from the index.
- """
-
- if missing_dependencies:
- builder = [
- "WARNING: The following dependencies do not exist in the channel",
- " and are not declared as external dependencies:",
- ]
- for dep_name in sorted(missing_dependencies):
- builder.append(" %s" % dep_name)
- for subdir_fn in sorted(missing_dependencies[dep_name]):
- builder.append(" - %s" % subdir_fn)
- subdir, fn = subdir_fn.split("/")
- popped = patched_repodata["packages"].pop(fn, None)
- if popped:
- patched_repodata["removed"].append(fn)
-
- builder.append("The associated packages are being removed from the index.")
- builder.append("")
- log.warn("\n".join(builder))
-
-
-@deprecated("24.1.0", "24.3.0")
-def _cache_post_install_details(paths_cache_path, post_install_cache_path):
- post_install_details_json = {
- "binary_prefix": False,
- "text_prefix": False,
- "activate.d": False,
- "deactivate.d": False,
- "pre_link": False,
- "post_link": False,
- "pre_unlink": False,
- }
- if os.path.lexists(paths_cache_path):
- with open(paths_cache_path) as f:
- paths = json.load(f).get("paths", [])
-
- # get embedded prefix data from paths.json
- for f in paths:
- if f.get("prefix_placeholder"):
- if f.get("file_mode") == "binary":
- post_install_details_json["binary_prefix"] = True
- elif f.get("file_mode") == "text":
- post_install_details_json["text_prefix"] = True
- # check for any activate.d/deactivate.d scripts
- for k in ("activate.d", "deactivate.d"):
- if not post_install_details_json.get(k) and f["_path"].startswith(
- "etc/conda/%s" % k
- ):
- post_install_details_json[k] = True
- # check for any link scripts
- for pat in ("pre-link", "post-link", "pre-unlink"):
- if not post_install_details_json.get(pat) and fnmatch.fnmatch(
- f["_path"], "*/.*-%s.*" % pat
- ):
- post_install_details_json[pat.replace("-", "_")] = True
-
- with open(post_install_cache_path, "w") as fh:
- json.dump(post_install_details_json, fh)
-
-
-@deprecated("24.1.0", "24.3.0")
-def _cache_recipe(tmpdir, recipe_cache_path):
- recipe_path_search_order = (
- "info/recipe/meta.yaml.rendered",
- "info/recipe/meta.yaml",
- "info/meta.yaml",
- )
- for path in recipe_path_search_order:
- recipe_path = os.path.join(tmpdir, path)
- if os.path.lexists(recipe_path):
- break
- recipe_path = None
-
- recipe_json = {}
- if recipe_path:
- with open(recipe_path) as f:
- try:
- recipe_json = yaml.safe_load(f)
- except (ConstructorError, ParserError, ScannerError, ReaderError):
- pass
- try:
- recipe_json_str = json.dumps(recipe_json)
- except TypeError:
- recipe_json.get("requirements", {}).pop("build")
- recipe_json_str = json.dumps(recipe_json)
- with open(recipe_cache_path, "w") as fh:
- fh.write(recipe_json_str)
- return recipe_json
-
-
-@deprecated("24.1.0", "24.3.0")
-def _cache_run_exports(tmpdir, run_exports_cache_path):
- run_exports = {}
- try:
- with open(os.path.join(tmpdir, "info", "run_exports.json")) as f:
- run_exports = json.load(f)
- except (OSError, FileNotFoundError):
- try:
- with open(os.path.join(tmpdir, "info", "run_exports.yaml")) as f:
- run_exports = yaml.safe_load(f)
- except (OSError, FileNotFoundError):
- log.debug("%s has no run_exports file (this is OK)" % tmpdir)
- with open(run_exports_cache_path, "w") as fh:
- json.dump(run_exports, fh)
-
-
-@deprecated("24.1.0", "24.3.0")
-def _cache_icon(tmpdir, recipe_json, icon_cache_path):
- # If a conda package contains an icon, also extract and cache that in an .icon/
- # directory. The icon file name is the name of the package, plus the extension
- # of the icon file as indicated by the meta.yaml `app/icon` key.
- # apparently right now conda-build renames all icons to 'icon.png'
- # What happens if it's an ico file, or a svg file, instead of a png? Not sure!
- app_icon_path = recipe_json.get("app", {}).get("icon")
- if app_icon_path:
- icon_path = os.path.join(tmpdir, "info", "recipe", app_icon_path)
- if not os.path.lexists(icon_path):
- icon_path = os.path.join(tmpdir, "info", "icon.png")
- if os.path.lexists(icon_path):
- icon_cache_path += splitext(app_icon_path)[-1]
- utils.move_with_fallback(icon_path, icon_cache_path)
-
-
-@deprecated("24.1.0", "24.3.0")
-def _make_subdir_index_html(channel_name, subdir, repodata_packages, extra_paths):
- environment = _get_jinja2_environment()
- template = environment.get_template("subdir-index.html.j2")
- rendered_html = template.render(
- title="{}/{}".format(channel_name or "", subdir),
- packages=repodata_packages,
- current_time=datetime.utcnow().replace(tzinfo=pytz.timezone("UTC")),
- extra_paths=extra_paths,
- )
- return rendered_html
-
-
-@deprecated("24.1.0", "24.3.0")
-def _make_channeldata_index_html(channel_name, channeldata):
- environment = _get_jinja2_environment()
- template = environment.get_template("channeldata-index.html.j2")
- rendered_html = template.render(
- title=channel_name,
- packages=channeldata["packages"],
- subdirs=channeldata["subdirs"],
- current_time=datetime.utcnow().replace(tzinfo=pytz.timezone("UTC")),
- )
- return rendered_html
-
-
-@deprecated("24.1.0", "24.3.0")
-def _get_source_repo_git_info(path):
- is_repo = subprocess.check_output(
- ["git", "rev-parse", "--is-inside-work-tree"], cwd=path
- )
- if is_repo.strip().decode("utf-8") == "true":
- output = subprocess.check_output(
- ["git", "log", "--pretty=format:'%h|%ad|%an|%s'", "--date=unix"], cwd=path
- )
- commits = []
- for line in output.decode("utf-8").strip().splitlines():
- _hash, _time, _author, _desc = line.split("|")
- commits.append(
- {
- "hash": _hash,
- "timestamp": int(_time),
- "author": _author,
- "description": _desc,
- }
- )
- return commits
-
-
-@deprecated("24.1.0", "24.3.0")
-def _cache_info_file(tmpdir, info_fn, cache_path):
- info_path = os.path.join(tmpdir, "info", info_fn)
- if os.path.lexists(info_path):
- utils.move_with_fallback(info_path, cache_path)
-
-
-@deprecated("24.1.0", "24.3.0")
-def _alternate_file_extension(fn):
- cache_fn = fn
- for ext in CONDA_PACKAGE_EXTENSIONS:
- cache_fn = cache_fn.replace(ext, "")
- other_ext = set(CONDA_PACKAGE_EXTENSIONS) - {fn.replace(cache_fn, "")}
- return cache_fn + next(iter(other_ext))
-
-
-@deprecated("24.1.0", "24.3.0")
-def _get_resolve_object(subdir, file_path=None, precs=None, repodata=None):
- packages = {}
- conda_packages = {}
- if file_path:
- with open(file_path) as fi:
- packages = json.load(fi)
- recs = json.load(fi)
- for k, v in recs.items():
- if k.endswith(CONDA_PACKAGE_EXTENSION_V1):
- packages[k] = v
- elif k.endswith(CONDA_PACKAGE_EXTENSION_V2):
- conda_packages[k] = v
- if not repodata:
- repodata = {
- "info": {
- "subdir": subdir,
- "arch": context.arch_name,
- "platform": context.platform,
- },
- "packages": packages,
- "packages.conda": conda_packages,
- }
-
- channel = Channel("https://conda.anaconda.org/dummy-channel/%s" % subdir)
- sd = SubdirData(channel)
- sd._process_raw_repodata_str(json.dumps(repodata))
- sd._loaded = True
- SubdirData._cache_[channel.url(with_credentials=True)] = sd
-
- index = {prec: prec for prec in precs or sd._package_records}
- r = Resolve(index, channels=(channel,))
- return r
-
-
-@deprecated("24.1.0", "24.3.0")
-def _get_newest_versions(r, pins={}):
- groups = {}
- for g_name, g_recs in r.groups.items():
- if g_name in pins:
- matches = []
- for pin in pins[g_name]:
- version = r.find_matches(MatchSpec(f"{g_name}={pin}"))[0].version
- matches.extend(r.find_matches(MatchSpec(f"{g_name}={version}")))
- else:
- version = r.groups[g_name][0].version
- matches = r.find_matches(MatchSpec(f"{g_name}={version}"))
- groups[g_name] = matches
- return [pkg for group in groups.values() for pkg in group]
-
-
-@deprecated("24.1.0", "24.3.0")
-def _add_missing_deps(new_r, original_r):
- """For each package in new_r, if any deps are not satisfiable, backfill them from original_r."""
-
- expanded_groups = copy.deepcopy(new_r.groups)
- seen_specs = set()
- for g_name, g_recs in new_r.groups.items():
- for g_rec in g_recs:
- for dep_spec in g_rec.depends:
- if dep_spec in seen_specs:
- continue
- ms = MatchSpec(dep_spec)
- if not new_r.find_matches(ms):
- matches = original_r.find_matches(ms)
- if matches:
- version = matches[0].version
- expanded_groups[ms.name] = set(
- expanded_groups.get(ms.name, [])
- ) | set(
- original_r.find_matches(MatchSpec(f"{ms.name}={version}"))
- )
- seen_specs.add(dep_spec)
- return [pkg for group in expanded_groups.values() for pkg in group]
-
-
-@deprecated("24.1.0", "24.3.0")
-def _add_prev_ver_for_features(new_r, orig_r):
- expanded_groups = copy.deepcopy(new_r.groups)
- for g_name in new_r.groups:
- if not any(m.track_features or m.features for m in new_r.groups[g_name]):
- # no features so skip
- continue
-
- # versions are sorted here so this is the latest
- latest_version = VersionOrder(str(new_r.groups[g_name][0].version))
- if g_name in orig_r.groups:
- # now we iterate through the list to find the next to latest
- # without a feature
- keep_m = None
- for i in range(len(orig_r.groups[g_name])):
- _m = orig_r.groups[g_name][i]
- if VersionOrder(str(_m.version)) <= latest_version and not (
- _m.track_features or _m.features
- ):
- keep_m = _m
- break
- if keep_m is not None:
- expanded_groups[g_name] = {keep_m} | set(
- expanded_groups.get(g_name, [])
- )
-
- return [pkg for group in expanded_groups.values() for pkg in group]
-
-
-@deprecated("24.1.0", "24.3.0")
-def _shard_newest_packages(subdir, r, pins=None):
- """Captures only the newest versions of software in the resolve object.
-
- For things where more than one version is supported simultaneously (like Python),
- pass pins as a dictionary, with the key being the package name, and the value being
- a list of supported versions. For example:
-
- {'python': ["2.7", "3.6"]}
- """
- groups = {}
- pins = pins or {}
- for g_name, g_recs in r.groups.items():
- # always do the latest implicitly
- version = r.groups[g_name][0].version
- matches = set(r.find_matches(MatchSpec(f"{g_name}={version}")))
- if g_name in pins:
- for pin_value in pins[g_name]:
- version = r.find_matches(MatchSpec(f"{g_name}={pin_value}"))[0].version
- matches.update(r.find_matches(MatchSpec(f"{g_name}={version}")))
- groups[g_name] = matches
-
- # add the deps of the stuff in the index
- new_r = _get_resolve_object(
- subdir, precs=[pkg for group in groups.values() for pkg in group]
- )
- new_r = _get_resolve_object(subdir, precs=_add_missing_deps(new_r, r))
-
- # now for any pkg with features, add at least one previous version
- # also return
- return set(_add_prev_ver_for_features(new_r, r))
-
-
-@deprecated("24.1.0", "24.3.0")
-def _build_current_repodata(subdir, repodata, pins):
- r = _get_resolve_object(subdir, repodata=repodata)
- keep_pkgs = _shard_newest_packages(subdir, r, pins)
- new_repodata = {
- k: repodata[k] for k in set(repodata.keys()) - {"packages", "packages.conda"}
- }
- packages = {}
- conda_packages = {}
- for keep_pkg in keep_pkgs:
- if keep_pkg.fn.endswith(CONDA_PACKAGE_EXTENSION_V2):
- conda_packages[keep_pkg.fn] = repodata["packages.conda"][keep_pkg.fn]
- # in order to prevent package churn we consider the md5 for the .tar.bz2 that matches the .conda file
- # This holds when .conda files contain the same files as .tar.bz2, which is an assumption we'll make
- # until it becomes more prevalent that people provide only .conda files and just skip .tar.bz2
- counterpart = keep_pkg.fn.replace(
- CONDA_PACKAGE_EXTENSION_V2, CONDA_PACKAGE_EXTENSION_V1
- )
- conda_packages[keep_pkg.fn]["legacy_bz2_md5"] = (
- repodata["packages"].get(counterpart, {}).get("md5")
- )
- elif keep_pkg.fn.endswith(CONDA_PACKAGE_EXTENSION_V1):
- packages[keep_pkg.fn] = repodata["packages"][keep_pkg.fn]
- new_repodata["packages"] = packages
- new_repodata["packages.conda"] = conda_packages
- return new_repodata
-
-
-@deprecated("24.1.0", "24.3.0")
-class ChannelIndex:
- def __init__(
- self,
- channel_root,
- channel_name,
- subdirs=None,
- threads=MAX_THREADS_DEFAULT,
- deep_integrity_check=False,
- debug=False,
- ):
- self.channel_root = abspath(channel_root)
- self.channel_name = channel_name or basename(channel_root.rstrip("/"))
- self._subdirs = subdirs
- self.thread_executor = (
- DummyExecutor()
- if debug or sys.version_info.major == 2 or threads == 1
- else ProcessPoolExecutor(threads)
- )
- self.deep_integrity_check = deep_integrity_check
-
- def index(
- self,
- patch_generator,
- hotfix_source_repo=None,
- verbose=False,
- progress=False,
- current_index_versions=None,
- index_file=None,
- ):
- if verbose:
- level = logging.DEBUG
- else:
- level = logging.ERROR
-
- with utils.LoggingContext(level, loggers=[__name__]):
- if not self._subdirs:
- detected_subdirs = {
- subdir.name
- for subdir in os.scandir(self.channel_root)
- if subdir.name in utils.DEFAULT_SUBDIRS and subdir.is_dir()
- }
- log.debug("found subdirs %s" % detected_subdirs)
- self.subdirs = subdirs = sorted(detected_subdirs | {"noarch"})
- else:
- self.subdirs = subdirs = sorted(set(self._subdirs) | {"noarch"})
-
- # Step 1. Lock local channel.
- with utils.try_acquire_locks(
- [utils.get_lock(self.channel_root)], timeout=900
- ):
- channel_data = {}
- channeldata_file = os.path.join(self.channel_root, "channeldata.json")
- if os.path.isfile(channeldata_file):
- with open(channeldata_file) as f:
- channel_data = json.load(f)
- # Step 2. Collect repodata from packages, save to pkg_repodata.json file
- with tqdm(
- total=len(subdirs), disable=(verbose or not progress), leave=False
- ) as t:
- for subdir in subdirs:
- t.set_description("Subdir: %s" % subdir)
- t.update()
- with tqdm(
- total=8, disable=(verbose or not progress), leave=False
- ) as t2:
- t2.set_description("Gathering repodata")
- t2.update()
- _ensure_valid_channel(self.channel_root, subdir)
- repodata_from_packages = self.index_subdir(
- subdir,
- verbose=verbose,
- progress=progress,
- index_file=index_file,
- )
-
- t2.set_description("Writing pre-patch repodata")
- t2.update()
- self._write_repodata(
- subdir,
- repodata_from_packages,
- _REPODATA_FROM_PKGS_JSON_FN,
- )
-
- # Step 3. Apply patch instructions.
- t2.set_description("Applying patch instructions")
- t2.update()
- patched_repodata, patch_instructions = self._patch_repodata(
- subdir, repodata_from_packages, patch_generator
- )
-
- # Step 4. Save patched and augmented repodata.
- # If the contents of repodata have changed, write a new repodata.json file.
- # Also create associated index.html.
-
- t2.set_description("Writing patched repodata")
- t2.update()
- self._write_repodata(
- subdir, patched_repodata, _REPODATA_JSON_FN
- )
- t2.set_description("Building current_repodata subset")
- t2.update()
- current_repodata = _build_current_repodata(
- subdir, patched_repodata, pins=current_index_versions
- )
- t2.set_description("Writing current_repodata subset")
- t2.update()
- self._write_repodata(
- subdir,
- current_repodata,
- json_filename="current_repodata.json",
- )
-
- t2.set_description("Writing subdir index HTML")
- t2.update()
- self._write_subdir_index_html(subdir, patched_repodata)
-
- t2.set_description("Updating channeldata")
- t2.update()
- self._update_channeldata(
- channel_data, patched_repodata, subdir
- )
-
- # Step 7. Create and write channeldata.
- self._write_channeldata_index_html(channel_data)
- self._write_channeldata(channel_data)
-
- def index_subdir(self, subdir, index_file=None, verbose=False, progress=False):
- subdir_path = join(self.channel_root, subdir)
- self._ensure_dirs(subdir)
- repodata_json_path = join(subdir_path, _REPODATA_FROM_PKGS_JSON_FN)
-
- if verbose:
- log.info("Building repodata for %s" % subdir_path)
-
- # gather conda package filenames in subdir
- # we'll process these first, because reading their metadata is much faster
- fns_in_subdir = {
- fn
- for fn in os.listdir(subdir_path)
- if fn.endswith(".conda") or fn.endswith(".tar.bz2")
- }
-
- # load current/old repodata
- try:
- with open(repodata_json_path) as fh:
- old_repodata = json.load(fh) or {}
- except (OSError, JSONDecodeError):
- # log.info("no repodata found at %s", repodata_json_path)
- old_repodata = {}
-
- old_repodata_packages = old_repodata.get("packages", {})
- old_repodata_conda_packages = old_repodata.get("packages.conda", {})
- old_repodata_fns = set(old_repodata_packages) | set(old_repodata_conda_packages)
-
- # Load stat cache. The stat cache has the form
- # {
- # 'package_name.tar.bz2': {
- # 'mtime': 123456,
- # 'md5': 'abd123',
- # },
- # }
- stat_cache_path = join(subdir_path, ".cache", "stat.json")
- try:
- with open(stat_cache_path) as fh:
- stat_cache = json.load(fh) or {}
- except:
- stat_cache = {}
-
- stat_cache_original = stat_cache.copy()
-
- remove_set = old_repodata_fns - fns_in_subdir
- ignore_set = set(old_repodata.get("removed", []))
- try:
- # calculate all the paths and figure out what we're going to do with them
- # add_set: filenames that aren't in the current/old repodata, but exist in the subdir
- if index_file:
- with open(index_file) as fin:
- add_set = set()
- for line in fin:
- fn_subdir, fn = line.strip().split("/")
- if fn_subdir != subdir:
- continue
- if fn.endswith(".conda") or fn.endswith(".tar.bz2"):
- add_set.add(fn)
- else:
- add_set = fns_in_subdir - old_repodata_fns
-
- add_set -= ignore_set
-
- # update_set: Filenames that are in both old repodata and new repodata,
- # and whose contents have changed based on file size or mtime. We're
- # not using md5 here because it takes too long. If needing to do full md5 checks,
- # use the --deep-integrity-check flag / self.deep_integrity_check option.
- update_set = self._calculate_update_set(
- subdir,
- fns_in_subdir,
- old_repodata_fns,
- stat_cache,
- verbose=verbose,
- progress=progress,
- )
- # unchanged_set: packages in old repodata whose information can carry straight
- # across to new repodata
- unchanged_set = set(old_repodata_fns - update_set - remove_set - ignore_set)
-
- assert isinstance(unchanged_set, set) # faster `in` queries
-
- # clean up removed files
- removed_set = old_repodata_fns - fns_in_subdir
- for fn in removed_set:
- if fn in stat_cache:
- del stat_cache[fn]
-
- new_repodata_packages = {
- k: v
- for k, v in old_repodata.get("packages", {}).items()
- if k in unchanged_set
- }
- new_repodata_conda_packages = {
- k: v
- for k, v in old_repodata.get("packages.conda", {}).items()
- if k in unchanged_set
- }
-
- for k in sorted(unchanged_set):
- if not (k in new_repodata_packages or k in new_repodata_conda_packages):
- fn, rec = ChannelIndex._load_index_from_cache(
- self.channel_root, subdir, fn, stat_cache
- )
- # this is how we pass an exception through. When fn == rec, there's been a problem,
- # and we need to reload this file
- if fn == rec:
- update_set.add(fn)
- else:
- if fn.endswith(CONDA_PACKAGE_EXTENSION_V1):
- new_repodata_packages[fn] = rec
- else:
- new_repodata_conda_packages[fn] = rec
-
- # Invalidate cached files for update_set.
- # Extract and cache update_set and add_set, then add to new_repodata_packages.
- # This is also where we update the contents of the stat_cache for successfully
- # extracted packages.
- # Sorting here prioritizes .conda files ('c') over .tar.bz2 files ('b')
- hash_extract_set = (*add_set, *update_set)
-
- extract_func = functools.partial(
- ChannelIndex._extract_to_cache, self.channel_root, subdir
- )
- # split up the set by .conda packages first, then .tar.bz2. This avoids race conditions
- # with execution in parallel that would end up in the same place.
- for conda_format in tqdm(
- CONDA_PACKAGE_EXTENSIONS,
- desc="File format",
- disable=(verbose or not progress),
- leave=False,
- ):
- for fn, mtime, size, index_json in tqdm(
- self.thread_executor.map(
- extract_func,
- (fn for fn in hash_extract_set if fn.endswith(conda_format)),
- ),
- desc="hash & extract packages for %s" % subdir,
- disable=(verbose or not progress),
- leave=False,
- ):
- # fn can be None if the file was corrupt or no longer there
- if fn and mtime:
- stat_cache[fn] = {"mtime": int(mtime), "size": size}
- if index_json:
- if fn.endswith(CONDA_PACKAGE_EXTENSION_V2):
- new_repodata_conda_packages[fn] = index_json
- else:
- new_repodata_packages[fn] = index_json
- else:
- log.error(
- "Package at %s did not contain valid index.json data. Please"
- " check the file and remove/redownload if necessary to obtain "
- "a valid package." % os.path.join(subdir_path, fn)
- )
-
- new_repodata = {
- "packages": new_repodata_packages,
- "packages.conda": new_repodata_conda_packages,
- "info": {
- "subdir": subdir,
- },
- "repodata_version": _REPODATA_VERSION,
- "removed": sorted(list(ignore_set)),
- }
- finally:
- if stat_cache != stat_cache_original:
- # log.info("writing stat cache to %s", stat_cache_path)
- with open(stat_cache_path, "w") as fh:
- json.dump(stat_cache, fh)
- return new_repodata
-
- def _ensure_dirs(self, subdir: str):
- """Create cache directories within a subdir.
-
- Args:
- subdir (str): name of the subdirectory
- """
- # Create all cache directories in the subdir.
- cache_path = Path(self.channel_root, subdir, ".cache")
- cache_path.mkdir(parents=True, exist_ok=True)
- (cache_path / "index").mkdir(exist_ok=True)
- (cache_path / "about").mkdir(exist_ok=True)
- (cache_path / "paths").mkdir(exist_ok=True)
- (cache_path / "recipe").mkdir(exist_ok=True)
- (cache_path / "run_exports").mkdir(exist_ok=True)
- (cache_path / "post_install").mkdir(exist_ok=True)
- (cache_path / "icon").mkdir(exist_ok=True)
- (cache_path / "recipe_log").mkdir(exist_ok=True)
- Path(self.channel_root, "icons").mkdir(exist_ok=True)
-
- def _calculate_update_set(
- self,
- subdir,
- fns_in_subdir,
- old_repodata_fns,
- stat_cache,
- verbose=False,
- progress=True,
- ):
- # Determine the packages that already exist in repodata, but need to be updated.
- # We're not using md5 here because it takes too long.
- candidate_fns = fns_in_subdir & old_repodata_fns
- subdir_path = join(self.channel_root, subdir)
-
- update_set = set()
- for fn in tqdm(
- iter(candidate_fns),
- desc="Finding updated files",
- disable=(verbose or not progress),
- leave=False,
- ):
- if fn not in stat_cache:
- update_set.add(fn)
- else:
- stat_result = os.stat(join(subdir_path, fn))
- if (
- int(stat_result.st_mtime) != int(stat_cache[fn]["mtime"])
- or stat_result.st_size != stat_cache[fn]["size"]
- ):
- update_set.add(fn)
- return update_set
-
- @staticmethod
- def _extract_to_cache(channel_root, subdir, fn, second_try=False):
- # This method WILL reread the tarball. Probably need another one to exit early if
- # there are cases where it's fine not to reread. Like if we just rebuild repodata
- # from the cached files, but don't use the existing repodata.json as a starting point.
- subdir_path = join(channel_root, subdir)
-
- # allow .conda files to reuse cache from .tar.bz2 and vice-versa.
- # Assumes that .tar.bz2 and .conda files have exactly the same
- # contents. This is convention, but not guaranteed, nor checked.
- alternate_cache_fn = _alternate_file_extension(fn)
- cache_fn = fn
-
- abs_fn = os.path.join(subdir_path, fn)
-
- stat_result = os.stat(abs_fn)
- size = stat_result.st_size
- mtime = stat_result.st_mtime
- retval = fn, mtime, size, None
-
- index_cache_path = join(subdir_path, ".cache", "index", cache_fn + ".json")
- about_cache_path = join(subdir_path, ".cache", "about", cache_fn + ".json")
- paths_cache_path = join(subdir_path, ".cache", "paths", cache_fn + ".json")
- recipe_cache_path = join(subdir_path, ".cache", "recipe", cache_fn + ".json")
- run_exports_cache_path = join(
- subdir_path, ".cache", "run_exports", cache_fn + ".json"
- )
- post_install_cache_path = join(
- subdir_path, ".cache", "post_install", cache_fn + ".json"
- )
- icon_cache_path = join(subdir_path, ".cache", "icon", cache_fn)
-
- log.debug("hashing, extracting, and caching %s" % fn)
-
- alternate_cache = False
- if not os.path.exists(index_cache_path) and os.path.exists(
- index_cache_path.replace(fn, alternate_cache_fn)
- ):
- alternate_cache = True
-
- try:
- # allow .tar.bz2 files to use the .conda cache, but not vice-versa.
- # .conda readup is very fast (essentially free), but .conda files come from
- # converting .tar.bz2 files, which can go wrong. Forcing extraction for
- # .conda files gives us a check on the validity of that conversion.
- if not fn.endswith(CONDA_PACKAGE_EXTENSION_V2) and os.path.isfile(
- index_cache_path
- ):
- with open(index_cache_path) as f:
- index_json = json.load(f)
- elif not alternate_cache and (
- second_try or not os.path.exists(index_cache_path)
- ):
- with TemporaryDirectory() as tmpdir:
- conda_package_handling.api.extract(
- abs_fn, dest_dir=tmpdir, components="info"
- )
- index_file = os.path.join(tmpdir, "info", "index.json")
- if not os.path.exists(index_file):
- return retval
- with open(index_file) as f:
- index_json = json.load(f)
-
- _cache_info_file(tmpdir, "about.json", about_cache_path)
- _cache_info_file(tmpdir, "paths.json", paths_cache_path)
- _cache_info_file(tmpdir, "recipe_log.json", paths_cache_path)
- _cache_run_exports(tmpdir, run_exports_cache_path)
- _cache_post_install_details(
- paths_cache_path, post_install_cache_path
- )
- recipe_json = _cache_recipe(tmpdir, recipe_cache_path)
- _cache_icon(tmpdir, recipe_json, icon_cache_path)
-
- # decide what fields to filter out, like has_prefix
- filter_fields = {
- "arch",
- "has_prefix",
- "mtime",
- "platform",
- "ucs",
- "requires_features",
- "binstar",
- "target-triplet",
- "machine",
- "operatingsystem",
- }
- for field_name in filter_fields & set(index_json):
- del index_json[field_name]
- elif alternate_cache:
- # we hit the cache of the other file type. Copy files to this name, and replace
- # the size, md5, and sha256 values
- paths = [
- index_cache_path,
- about_cache_path,
- paths_cache_path,
- recipe_cache_path,
- run_exports_cache_path,
- post_install_cache_path,
- icon_cache_path,
- ]
- bizarro_paths = [_.replace(fn, alternate_cache_fn) for _ in paths]
- for src, dest in zip(bizarro_paths, paths):
- if os.path.exists(src):
- try:
- os.makedirs(os.path.dirname(dest))
- except:
- pass
- utils.copy_into(src, dest)
-
- with open(index_cache_path) as f:
- index_json = json.load(f)
- else:
- with open(index_cache_path) as f:
- index_json = json.load(f)
-
- # calculate extra stuff to add to index.json cache, size, md5, sha256
- # This is done always for all files, whether the cache is loaded or not,
- # because the cache may be from the other file type. We don't store this
- # info in the cache to avoid confusion.
- index_json.update(conda_package_handling.api.get_pkg_details(abs_fn))
-
- with open(index_cache_path, "w") as fh:
- json.dump(index_json, fh)
- retval = fn, mtime, size, index_json
- except (InvalidArchiveError, KeyError, EOFError, JSONDecodeError):
- if not second_try:
- return ChannelIndex._extract_to_cache(
- channel_root, subdir, fn, second_try=True
- )
- return retval
-
- @staticmethod
- def _load_index_from_cache(channel_root, subdir, fn, stat_cache):
- index_cache_path = join(channel_root, subdir, ".cache", "index", fn + ".json")
- try:
- with open(index_cache_path) as fh:
- index_json = json.load(fh)
- except (OSError, JSONDecodeError):
- index_json = fn
-
- return fn, index_json
-
- @staticmethod
- def _load_all_from_cache(channel_root, subdir, fn):
- subdir_path = join(channel_root, subdir)
- try:
- mtime = getmtime(join(subdir_path, fn))
- except FileNotFoundError:
- return {}
- # In contrast to self._load_index_from_cache(), this method reads up pretty much
- # all of the cached metadata, except for paths. It all gets dumped into a single map.
- index_cache_path = join(subdir_path, ".cache", "index", fn + ".json")
- about_cache_path = join(subdir_path, ".cache", "about", fn + ".json")
- recipe_cache_path = join(subdir_path, ".cache", "recipe", fn + ".json")
- run_exports_cache_path = join(
- subdir_path, ".cache", "run_exports", fn + ".json"
- )
- post_install_cache_path = join(
- subdir_path, ".cache", "post_install", fn + ".json"
- )
- icon_cache_path_glob = join(subdir_path, ".cache", "icon", fn + ".*")
- recipe_log_path = join(subdir_path, ".cache", "recipe_log", fn + ".json")
-
- data = {}
- for path in (
- recipe_cache_path,
- about_cache_path,
- index_cache_path,
- post_install_cache_path,
- recipe_log_path,
- ):
- try:
- if os.path.getsize(path) != 0:
- with open(path) as fh:
- data.update(json.load(fh))
- except (OSError, EOFError):
- pass
-
- try:
- icon_cache_paths = glob(icon_cache_path_glob)
- if icon_cache_paths:
- icon_cache_path = sorted(icon_cache_paths)[-1]
- icon_ext = icon_cache_path.rsplit(".", 1)[-1]
- channel_icon_fn = "{}.{}".format(data["name"], icon_ext)
- icon_url = "icons/" + channel_icon_fn
- icon_channel_path = join(channel_root, "icons", channel_icon_fn)
- icon_md5 = utils.md5_file(icon_cache_path)
- icon_hash = f"md5:{icon_md5}:{getsize(icon_cache_path)}"
- data.update(icon_hash=icon_hash, icon_url=icon_url)
- # log.info("writing icon from %s to %s", icon_cache_path, icon_channel_path)
- utils.move_with_fallback(icon_cache_path, icon_channel_path)
- except:
- pass
-
- # have to stat again, because we don't have access to the stat cache here
- data["mtime"] = mtime
-
- source = data.get("source", {})
- try:
- data.update({"source_" + k: v for k, v in source.items()})
- except AttributeError:
- # sometimes source is a list instead of a dict
- pass
- _clear_newline_chars(data, "description")
- _clear_newline_chars(data, "summary")
- try:
- with open(run_exports_cache_path) as fh:
- data["run_exports"] = json.load(fh)
- except (OSError, EOFError):
- data["run_exports"] = {}
- return data
-
- def _write_repodata(self, subdir, repodata, json_filename):
- repodata_json_path = join(self.channel_root, subdir, json_filename)
- new_repodata_binary = (
- json.dumps(
- repodata,
- indent=2,
- sort_keys=True,
- )
- .replace("':'", "': '")
- .encode("utf-8")
- )
- write_result = _maybe_write(
- repodata_json_path, new_repodata_binary, write_newline_end=True
- )
- if write_result:
- repodata_bz2_path = repodata_json_path + ".bz2"
- bz2_content = bz2.compress(new_repodata_binary)
- _maybe_write(repodata_bz2_path, bz2_content, content_is_binary=True)
- return write_result
-
- def _write_subdir_index_html(self, subdir, repodata):
- repodata_packages = repodata["packages"]
- subdir_path = join(self.channel_root, subdir)
-
- def _add_extra_path(extra_paths, path):
- if isfile(join(self.channel_root, path)):
- extra_paths[basename(path)] = {
- "size": getsize(path),
- "timestamp": int(getmtime(path)),
- "sha256": utils.sha256_checksum(path),
- "md5": utils.md5_file(path),
- }
-
- extra_paths = OrderedDict()
- _add_extra_path(extra_paths, join(subdir_path, _REPODATA_JSON_FN))
- _add_extra_path(extra_paths, join(subdir_path, _REPODATA_JSON_FN + ".bz2"))
- _add_extra_path(extra_paths, join(subdir_path, _REPODATA_FROM_PKGS_JSON_FN))
- _add_extra_path(
- extra_paths, join(subdir_path, _REPODATA_FROM_PKGS_JSON_FN + ".bz2")
- )
- # _add_extra_path(extra_paths, join(subdir_path, "repodata2.json"))
- _add_extra_path(extra_paths, join(subdir_path, "patch_instructions.json"))
- rendered_html = _make_subdir_index_html(
- self.channel_name, subdir, repodata_packages, extra_paths
- )
- index_path = join(subdir_path, "index.html")
- return _maybe_write(index_path, rendered_html)
-
- def _write_channeldata_index_html(self, channeldata):
- rendered_html = _make_channeldata_index_html(self.channel_name, channeldata)
- index_path = join(self.channel_root, "index.html")
- _maybe_write(index_path, rendered_html)
-
- def _update_channeldata(self, channel_data, repodata, subdir):
- legacy_packages = repodata["packages"]
- conda_packages = repodata["packages.conda"]
-
- use_these_legacy_keys = set(legacy_packages.keys()) - {
- k[:-6] + CONDA_PACKAGE_EXTENSION_V1 for k in conda_packages.keys()
- }
- all_packages = conda_packages.copy()
- all_packages.update({k: legacy_packages[k] for k in use_these_legacy_keys})
- package_data = channel_data.get("packages", {})
-
- def _append_group(groups, candidates):
- candidate = sorted(candidates, key=lambda x: x[1].get("timestamp", 0))[-1]
- pkg_dict = candidate[1]
- pkg_name = pkg_dict["name"]
-
- run_exports = package_data.get(pkg_name, {}).get("run_exports", {})
- if (
- pkg_name not in package_data
- or subdir not in package_data.get(pkg_name, {}).get("subdirs", [])
- or package_data.get(pkg_name, {}).get("timestamp", 0)
- < _make_seconds(pkg_dict.get("timestamp", 0))
- or run_exports
- and pkg_dict["version"] not in run_exports
- ):
- groups.append(candidate)
-
- groups = []
- for name, group in groupby(all_packages.items(), lambda x: x[1]["name"]):
- if name not in package_data or package_data[name].get("run_exports"):
- # pay special attention to groups that have run_exports - we need to process each version
- # group by version; take newest per version group. We handle groups that are not
- # in the index t all yet similarly, because we can't check if they have any run_exports
- for _, vgroup in groupby(group, lambda x: x[1]["version"]):
- _append_group(groups, vgroup)
- else:
- # take newest per group
- _append_group(groups, group)
-
- def _replace_if_newer_and_present(pd, data, erec, data_newer, k):
- if data.get(k) and (data_newer or not erec.get(k)):
- pd[k] = data[k]
- else:
- pd[k] = erec.get(k)
-
- # unzipping
- fns, fn_dicts = [], []
- if groups:
- fns, fn_dicts = zip(*groups)
-
- load_func = functools.partial(
- ChannelIndex._load_all_from_cache,
- self.channel_root,
- subdir,
- )
- for fn_dict, data in zip(fn_dicts, self.thread_executor.map(load_func, fns)):
- if data:
- data.update(fn_dict)
- name = data["name"]
- # existing record
- erec = package_data.get(name, {})
- data_v = data.get("version", "0")
- erec_v = erec.get("version", "0")
- data_newer = VersionOrder(data_v) > VersionOrder(erec_v)
-
- package_data[name] = package_data.get(name, {})
- # keep newer value for these
- for k in (
- "description",
- "dev_url",
- "doc_url",
- "doc_source_url",
- "home",
- "license",
- "source_url",
- "source_git_url",
- "summary",
- "icon_url",
- "icon_hash",
- "tags",
- "identifiers",
- "keywords",
- "recipe_origin",
- "version",
- ):
- _replace_if_newer_and_present(
- package_data[name], data, erec, data_newer, k
- )
-
- # keep any true value for these, since we don't distinguish subdirs
- for k in (
- "binary_prefix",
- "text_prefix",
- "activate.d",
- "deactivate.d",
- "pre_link",
- "post_link",
- "pre_unlink",
- ):
- package_data[name][k] = any((data.get(k), erec.get(k)))
-
- package_data[name]["subdirs"] = sorted(
- list(set(erec.get("subdirs", []) + [subdir]))
- )
- # keep one run_exports entry per version of the package, since these vary by version
- run_exports = erec.get("run_exports", {})
- exports_from_this_version = data.get("run_exports")
- if exports_from_this_version:
- run_exports[data_v] = data.get("run_exports")
- package_data[name]["run_exports"] = run_exports
- package_data[name]["timestamp"] = _make_seconds(
- max(
- data.get("timestamp", 0),
- channel_data.get(name, {}).get("timestamp", 0),
- )
- )
-
- channel_data.update(
- {
- "channeldata_version": _CHANNELDATA_VERSION,
- "subdirs": sorted(
- list(set(channel_data.get("subdirs", []) + [subdir]))
- ),
- "packages": package_data,
- }
- )
-
- def _write_channeldata(self, channeldata):
- # trim out commits, as they can take up a ton of space. They're really only for the RSS feed.
- for _pkg, pkg_dict in channeldata.get("packages", {}).items():
- if "commits" in pkg_dict:
- del pkg_dict["commits"]
- channeldata_path = join(self.channel_root, "channeldata.json")
- content = json.dumps(channeldata, indent=2, sort_keys=True).replace(
- "':'", "': '"
- )
- _maybe_write(channeldata_path, content, True)
-
- def _load_patch_instructions_tarball(self, subdir, patch_generator):
- instructions = {}
- with TemporaryDirectory() as tmpdir:
- conda_package_handling.api.extract(patch_generator, dest_dir=tmpdir)
- instructions_file = os.path.join(tmpdir, subdir, "patch_instructions.json")
- if os.path.isfile(instructions_file):
- with open(instructions_file) as f:
- instructions = json.load(f)
- return instructions
-
- def _create_patch_instructions(self, subdir, repodata, patch_generator=None):
- gen_patch_path = patch_generator or join(self.channel_root, "gen_patch.py")
- if isfile(gen_patch_path):
- log.debug(f"using patch generator {gen_patch_path} for {subdir}")
-
- # https://stackoverflow.com/a/41595552/2127762
- try:
- from importlib.util import module_from_spec, spec_from_file_location
-
- spec = spec_from_file_location("a_b", gen_patch_path)
- mod = module_from_spec(spec)
-
- spec.loader.exec_module(mod)
- # older pythons
- except ImportError:
- import imp
-
- mod = imp.load_source("a_b", gen_patch_path)
-
- instructions = mod._patch_repodata(repodata, subdir)
-
- if instructions.get("patch_instructions_version", 0) > 1:
- raise RuntimeError("Incompatible patch instructions version")
-
- return instructions
- else:
- if patch_generator:
- raise ValueError(
- f"Specified metadata patch file '{patch_generator}' does not exist. Please try an absolute "
- "path, or examine your relative path carefully with respect to your cwd."
- )
- return {}
-
- def _write_patch_instructions(self, subdir, instructions):
- new_patch = json.dumps(instructions, indent=2, sort_keys=True).replace(
- "':'", "': '"
- )
- patch_instructions_path = join(
- self.channel_root, subdir, "patch_instructions.json"
- )
- _maybe_write(patch_instructions_path, new_patch, True)
-
- def _load_instructions(self, subdir):
- patch_instructions_path = join(
- self.channel_root, subdir, "patch_instructions.json"
- )
- if isfile(patch_instructions_path):
- log.debug("using patch instructions %s" % patch_instructions_path)
- with open(patch_instructions_path) as fh:
- instructions = json.load(fh)
- if instructions.get("patch_instructions_version", 0) > 1:
- raise RuntimeError("Incompatible patch instructions version")
- return instructions
- return {}
-
- def _patch_repodata(self, subdir, repodata, patch_generator=None):
- if patch_generator and any(
- patch_generator.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS
- ):
- instructions = self._load_patch_instructions_tarball(
- subdir, patch_generator
- )
- else:
- instructions = self._create_patch_instructions(
- subdir, repodata, patch_generator
- )
- if instructions:
- self._write_patch_instructions(subdir, instructions)
- else:
- instructions = self._load_instructions(subdir)
- if instructions.get("patch_instructions_version", 0) > 1:
- raise RuntimeError("Incompatible patch instructions version")
-
- return _apply_instructions(subdir, repodata, instructions), instructions
diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py
index 1b50a076c6..7a9985fc8a 100644
--- a/conda_build/inspect_pkg.py
+++ b/conda_build/inspect_pkg.py
@@ -22,7 +22,6 @@
from .conda_interface import (
specs_from_args,
)
-from .deprecations import deprecated
from .os_utils.ldd import (
get_linkages,
get_package_obj_files,
@@ -96,9 +95,6 @@ def __str__(self):
untracked_package = _untracked_package()
-@deprecated.argument("24.1.0", "24.3.0", "platform", rename="subdir")
-@deprecated.argument("24.1.0", "24.3.0", "prepend")
-@deprecated.argument("24.1.0", "24.3.0", "minimal_hint")
def check_install(
packages: Iterable[str],
subdir: str | None = None,
diff --git a/conda_build/metadata.py b/conda_build/metadata.py
index f9f0d55438..633b6de8fc 100644
--- a/conda_build/metadata.py
+++ b/conda_build/metadata.py
@@ -16,9 +16,10 @@
from typing import TYPE_CHECKING, overload
from bs4 import UnicodeDammit
+from conda.gateways.disk.read import compute_sum
from . import exceptions, utils, variants
-from .conda_interface import MatchSpec, envs_dirs, md5_file
+from .conda_interface import MatchSpec, envs_dirs
from .config import Config, get_or_merge_config
from .features import feature_list
from .license_family import ensure_valid_license_family
@@ -1704,7 +1705,9 @@ def is_app(self):
def app_meta(self):
d = {"type": "app"}
if self.get_value("app/icon"):
- d["icon"] = "%s.png" % md5_file(join(self.path, self.get_value("app/icon")))
+ d["icon"] = "%s.png" % compute_sum(
+ join(self.path, self.get_value("app/icon")), "md5"
+ )
for field, key in [
("app/entry", "app_entry"),
diff --git a/conda_build/noarch_python.py b/conda_build/noarch_python.py
index daaf163490..fb81565b3d 100644
--- a/conda_build/noarch_python.py
+++ b/conda_build/noarch_python.py
@@ -6,30 +6,10 @@
import os
import shutil
import sys
-from os.path import basename, dirname, isdir, isfile, join
+from os.path import basename, dirname, isfile, join
-from .deprecations import deprecated
from .utils import on_win
-deprecated.constant(
- "24.1",
- "24.3",
- "ISWIN",
- on_win,
- addendum="Use `conda_build.utils.on_win` instead.",
-)
-
-
-@deprecated("24.1", "24.3", addendum="Use `os.makedirs(exist_ok=True)` instead.")
-def _force_dir(dirname):
- if not isdir(dirname):
- os.makedirs(dirname)
-
-
-@deprecated("24.1", "24.3")
-def _error_exit(exit_message):
- sys.exit("[noarch_python] %s" % exit_message)
-
def rewrite_script(fn, prefix):
"""Take a file from the bin directory and rewrite it into the python-scripts
diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py
index 9f358f619a..9b14454c4f 100644
--- a/conda_build/os_utils/liefldd.py
+++ b/conda_build/os_utils/liefldd.py
@@ -27,6 +27,16 @@
lief.logging.disable()
have_lief = True
+ try:
+ PE_HEADER_CHARACTERISTICS = lief.PE.Header.CHARACTERISTICS
+ except AttributeError:
+ # Fallback for lief<0.14.
+ PE_HEADER_CHARACTERISTICS = lief.PE.HEADER_CHARACTERISTICS
+ try:
+ EXE_FORMATS = lief.Binary.FORMATS
+ except AttributeError:
+ # Fallback for lief<0.14.
+ EXE_FORMATS = lief.EXE_FORMATS
except ImportError:
have_lief = False
@@ -78,15 +88,15 @@ def codefile_class(
if not (binary := ensure_binary(path)):
return None
elif (
- binary.format == lief.EXE_FORMATS.PE
- and lief.PE.HEADER_CHARACTERISTICS.DLL in binary.header.characteristics_list
+ binary.format == EXE_FORMATS.PE
+ and PE_HEADER_CHARACTERISTICS.DLL in binary.header.characteristics_list
):
return DLLfile
- elif binary.format == lief.EXE_FORMATS.PE:
+ elif binary.format == EXE_FORMATS.PE:
return EXEfile
- elif binary.format == lief.EXE_FORMATS.MACHO:
+ elif binary.format == EXE_FORMATS.MACHO:
return machofile
- elif binary.format == lief.EXE_FORMATS.ELF:
+ elif binary.format == EXE_FORMATS.ELF:
return elffile
else:
return None
@@ -105,7 +115,7 @@ def get_libraries(file):
result = []
binary = ensure_binary(file)
if binary:
- if binary.format == lief.EXE_FORMATS.PE:
+ if binary.format == EXE_FORMATS.PE:
result = binary.libraries
else:
result = [
@@ -113,7 +123,7 @@ def get_libraries(file):
]
# LIEF returns LC_ID_DYLIB name @rpath/libbz2.dylib in binary.libraries. Strip that.
binary_name = None
- if binary.format == lief.EXE_FORMATS.MACHO:
+ if binary.format == EXE_FORMATS.MACHO:
binary_name = [
command.name
for command in binary.commands
@@ -174,7 +184,7 @@ def get_rpathy_thing_raw_partial(file, elf_attribute, elf_dyn_tag):
rpaths = []
if binary:
binary_format = binary.format
- if binary_format == lief.EXE_FORMATS.ELF:
+ if binary_format == EXE_FORMATS.ELF:
binary_type = binary.type
if (
binary_type == lief.ELF.ELF_CLASS.CLASS32
@@ -182,7 +192,7 @@ def get_rpathy_thing_raw_partial(file, elf_attribute, elf_dyn_tag):
):
rpaths = _get_elf_rpathy_thing(binary, elf_attribute, elf_dyn_tag)
elif (
- binary_format == lief.EXE_FORMATS.MACHO
+ binary_format == EXE_FORMATS.MACHO
and binary.has_rpath
and elf_dyn_tag == lief.ELF.DYNAMIC_TAGS.RPATH
):
@@ -232,7 +242,7 @@ def set_rpath(old_matching, new_rpath, file):
binary = ensure_binary(file)
if not binary:
return
- if binary.format == lief.EXE_FORMATS.ELF and (
+ if binary.format == EXE_FORMATS.ELF and (
binary.type == lief.ELF.ELF_CLASS.CLASS32
or binary.type == lief.ELF.ELF_CLASS.CLASS64
):
@@ -244,7 +254,7 @@ def set_rpath(old_matching, new_rpath, file):
def get_rpaths(file, exe_dirname, envroot, windows_root=""):
rpaths, rpaths_type, binary_format, binary_type = get_runpaths_or_rpaths_raw(file)
- if binary_format == lief.EXE_FORMATS.PE:
+ if binary_format == EXE_FORMATS.PE:
# To allow the unix-y rpath code to work we consider
# exes as having rpaths of env + CONDA_WINDOWS_PATHS
# and consider DLLs as having no rpaths.
@@ -259,9 +269,9 @@ def get_rpaths(file, exe_dirname, envroot, windows_root=""):
rpaths.append("/".join((windows_root, "System32", "downlevel")))
rpaths.append(windows_root)
if envroot:
- # and not lief.PE.HEADER_CHARACTERISTICS.DLL in binary.header.characteristics_list:
+ # and not .DLL in binary.header.characteristics_list:
rpaths.extend(list(_get_path_dirs(envroot)))
- elif binary_format == lief.EXE_FORMATS.MACHO:
+ elif binary_format == EXE_FORMATS.MACHO:
rpaths = [rpath.rstrip("/") for rpath in rpaths]
return [from_os_varnames(binary_format, binary_type, rpath) for rpath in rpaths]
@@ -299,13 +309,13 @@ def _inspect_linkages_this(filename, sysroot="", arch="native"):
def to_os_varnames(binary, input_):
"""Don't make these functions - they are methods to match the API for elffiles."""
- if binary.format == lief.EXE_FORMATS.MACHO:
+ if binary.format == EXE_FORMATS.MACHO:
return (
input_.replace("$SELFDIR", "@loader_path")
.replace("$EXEDIR", "@executable_path")
.replace("$RPATH", "@rpath")
)
- elif binary.format == lief.EXE_FORMATS.ELF:
+ elif binary.format == EXE_FORMATS.ELF:
if binary.ehdr.sz_ptr == 8:
libdir = "/lib64"
else:
@@ -315,19 +325,19 @@ def to_os_varnames(binary, input_):
def from_os_varnames(binary_format, binary_type, input_):
"""Don't make these functions - they are methods to match the API for elffiles."""
- if binary_format == lief.EXE_FORMATS.MACHO:
+ if binary_format == EXE_FORMATS.MACHO:
return (
input_.replace("@loader_path", "$SELFDIR")
.replace("@executable_path", "$EXEDIR")
.replace("@rpath", "$RPATH")
)
- elif binary_format == lief.EXE_FORMATS.ELF:
+ elif binary_format == EXE_FORMATS.ELF:
if binary_type == lief.ELF.ELF_CLASS.CLASS64:
libdir = "/lib64"
else:
libdir = "/lib"
return input_.replace("$ORIGIN", "$SELFDIR").replace("$LIB", libdir)
- elif binary_format == lief.EXE_FORMATS.PE:
+ elif binary_format == EXE_FORMATS.PE:
return input_
@@ -344,10 +354,10 @@ def _get_path_dirs(prefix):
def get_uniqueness_key(file):
binary = ensure_binary(file)
if not binary:
- return lief.EXE_FORMATS.UNKNOWN
- elif binary.format == lief.EXE_FORMATS.MACHO:
- return binary.name
- elif binary.format == lief.EXE_FORMATS.ELF and ( # noqa
+ return EXE_FORMATS.UNKNOWN
+ elif binary.format == EXE_FORMATS.MACHO:
+ return str(file)
+ elif binary.format == EXE_FORMATS.ELF and ( # noqa
binary.type == lief.ELF.ELF_CLASS.CLASS32
or binary.type == lief.ELF.ELF_CLASS.CLASS64
):
@@ -357,8 +367,8 @@ def get_uniqueness_key(file):
]
if result:
return result[0]
- return binary.name
- return binary.name
+ return str(file)
+ return str(file)
def _get_resolved_location(
@@ -467,7 +477,7 @@ def inspect_linkages_lief(
default_paths = []
if not binary:
default_paths = []
- elif binary.format == lief.EXE_FORMATS.ELF:
+ elif binary.format == EXE_FORMATS.ELF:
if binary.type == lief.ELF.ELF_CLASS.CLASS64:
default_paths = [
"$SYSROOT/lib64",
@@ -477,9 +487,9 @@ def inspect_linkages_lief(
]
else:
default_paths = ["$SYSROOT/lib", "$SYSROOT/usr/lib"]
- elif binary.format == lief.EXE_FORMATS.MACHO:
+ elif binary.format == EXE_FORMATS.MACHO:
default_paths = ["$SYSROOT/usr/lib"]
- elif binary.format == lief.EXE_FORMATS.PE:
+ elif binary.format == EXE_FORMATS.PE:
# We do not include C:\Windows nor C:\Windows\System32 in this list. They are added in
# get_rpaths() instead since we need to carefully control the order.
default_paths = [
@@ -499,7 +509,7 @@ def inspect_linkages_lief(
uniqueness_key = get_uniqueness_key(binary)
if uniqueness_key not in already_seen:
parent_exe_dirname = None
- if binary.format == lief.EXE_FORMATS.PE:
+ if binary.format == EXE_FORMATS.PE:
tmp_filename = filename2
while tmp_filename:
if (
@@ -519,7 +529,7 @@ def inspect_linkages_lief(
)
tmp_filename = filename2
rpaths_transitive = []
- if binary.format == lief.EXE_FORMATS.PE:
+ if binary.format == EXE_FORMATS.PE:
rpaths_transitive = rpaths_by_binary[tmp_filename]
else:
while tmp_filename:
@@ -534,7 +544,7 @@ def inspect_linkages_lief(
"$RPATH/" + lib
if not lib.startswith("/")
and not lib.startswith("$")
- and binary.format != lief.EXE_FORMATS.MACHO # noqa
+ and binary.format != EXE_FORMATS.MACHO # noqa
else lib
)
for lib in libraries
@@ -556,7 +566,7 @@ def inspect_linkages_lief(
# can be run case-sensitively if the user wishes.
#
"""
- if binary.format == lief.EXE_FORMATS.PE:
+ if binary.format == EXE_FORMATS.PE:
import random
path_fixed = (
os.path.dirname(path_fixed)
diff --git a/conda_build/post.py b/conda_build/post.py
index 17edda3d6e..4512c9e508 100644
--- a/conda_build/post.py
+++ b/conda_build/post.py
@@ -35,13 +35,13 @@
from typing import TYPE_CHECKING
from conda.core.prefix_data import PrefixData
+from conda.gateways.disk.read import compute_sum
from conda.models.records import PrefixRecord
from . import utils
from .conda_interface import (
TemporaryDirectory,
lchmod,
- md5_file,
walk_prefix,
)
from .exceptions import OverDependingError, OverLinkingError, RunPathError
@@ -393,7 +393,7 @@ def find_lib(link, prefix, files, path=None):
# multiple places.
md5s = set()
for f in file_names[link]:
- md5s.add(md5_file(join(prefix, f)))
+ md5s.add(compute_sum(join(prefix, f), "md5"))
if len(md5s) > 1:
sys.exit(
f"Error: Found multiple instances of {link}: {file_names[link]}"
diff --git a/conda_build/render.py b/conda_build/render.py
index a46130f4ed..9ba417bf23 100644
--- a/conda_build/render.py
+++ b/conda_build/render.py
@@ -35,8 +35,6 @@
pkgs_dirs,
specs_from_url,
)
-from .deprecations import deprecated
-from .environ import LINK_ACTION
from .exceptions import DependencyNeedsBuildingError
from .index import get_build_index
from .metadata import MetaData, combine_top_level_metadata_with_output
@@ -91,13 +89,6 @@ def bldpkg_path(m):
return path
-@deprecated("24.1.0", "24.3.0")
-def actions_to_pins(actions):
- if LINK_ACTION in actions:
- return [package_record_to_requirement(prec) for prec in actions[LINK_ACTION]]
- return []
-
-
def _categorize_deps(m, specs, exclude_pattern, variant):
subpackages = []
dependencies = []
@@ -158,7 +149,7 @@ def get_env_dependencies(
)
with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir:
try:
- actions = environ.get_install_actions(
+ precs = environ.get_package_records(
tmpdir,
tuple(dependencies),
env,
@@ -180,19 +171,17 @@ def get_env_dependencies(
else:
unsat = e.message
if permit_unsatisfiable_variants:
- actions = {}
+ precs = []
else:
raise
- specs = [
- package_record_to_requirement(prec) for prec in actions.get(LINK_ACTION, [])
- ]
+ specs = [package_record_to_requirement(prec) for prec in precs]
return (
utils.ensure_list(
(specs + subpackages + pass_through_deps)
or m.get_value(f"requirements/{env}", [])
),
- actions,
+ precs,
unsat,
)
@@ -329,7 +318,6 @@ def _read_specs_from_package(pkg_loc, pkg_dist):
return specs
-@deprecated.argument("24.1.0", "24.3.0", "actions", rename="precs")
def execute_download_actions(m, precs, env, package_subset=None, require_files=False):
subdir = getattr(m.config, f"{env}_subdir")
index, _, _ = get_build_index(
@@ -359,8 +347,6 @@ def execute_download_actions(m, precs, env, package_subset=None, require_files=F
pkg_files = {}
- if hasattr(precs, "keys"):
- precs = precs.get(LINK_ACTION, [])
if isinstance(package_subset, PackageRecord):
package_subset = [package_subset]
else:
@@ -409,14 +395,11 @@ def execute_download_actions(m, precs, env, package_subset=None, require_files=F
return pkg_files
-@deprecated.argument("24.1.0", "24.3.0", "actions", rename="precs")
def get_upstream_pins(m: MetaData, precs, env):
"""Download packages from specs, then inspect each downloaded package for additional
downstream dependency specs. Return these additional specs."""
env_specs = m.get_value(f"requirements/{env}", [])
explicit_specs = [req.split(" ")[0] for req in env_specs] if env_specs else []
- if hasattr(precs, "keys"):
- precs = precs.get(LINK_ACTION, [])
precs = [prec for prec in precs if prec.name in explicit_specs]
ignore_pkgs_list = utils.ensure_list(m.get_value("build/ignore_run_exports_from"))
@@ -453,7 +436,7 @@ def _read_upstream_pin_files(
permit_unsatisfiable_variants,
exclude_pattern,
):
- deps, actions, unsat = get_env_dependencies(
+ deps, precs, unsat = get_env_dependencies(
m,
env,
m.config.variant,
@@ -462,7 +445,7 @@ def _read_upstream_pin_files(
)
# extend host deps with strong build run exports. This is important for things like
# vc feature activation to work correctly in the host env.
- extra_run_specs = get_upstream_pins(m, actions, env)
+ extra_run_specs = get_upstream_pins(m, precs, env)
return (
list(set(deps)) or m.get_value(f"requirements/{env}", []),
unsat,
diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py
index e1c061bf73..891f62f3cb 100644
--- a/conda_build/skeletons/cpan.py
+++ b/conda_build/skeletons/cpan.py
@@ -18,6 +18,7 @@
from os.path import basename, dirname, exists, join
import requests
+from conda.core.index import get_index
from .. import environ
from ..conda_interface import (
@@ -28,7 +29,6 @@
TemporaryDirectory,
TmpDownload,
download,
- get_index,
)
from ..config import Config, get_or_merge_config
from ..utils import check_call_env, on_linux, on_win
diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py
index fbe59199b3..92e2ff9efd 100644
--- a/conda_build/skeletons/pypi.py
+++ b/conda_build/skeletons/pypi.py
@@ -19,6 +19,7 @@
import pkginfo
import requests
import yaml
+from conda.gateways.disk.read import compute_sum
from requests.packages.urllib3.util.url import parse_url
from ..conda_interface import (
@@ -26,7 +27,6 @@
configparser,
default_python,
download,
- hashsum_file,
human_bytes,
input,
normalized_version,
@@ -1276,10 +1276,10 @@ def get_pkginfo(
download_path = join(config.src_cache, filename)
if (
not isfile(download_path)
- or hashsum_file(download_path, hash_type) != hash_value
+ or compute_sum(download_path, hash_type) != hash_value
):
download(pypiurl, join(config.src_cache, filename))
- if hashsum_file(download_path, hash_type) != hash_value:
+ if compute_sum(download_path, hash_type) != hash_value:
raise RuntimeError(
f" Download of {package} failed"
f" checksum type {hash_type} expected value {hash_value}. Please"
@@ -1291,7 +1291,7 @@ def get_pkginfo(
# Needs to be done in this block because this is where we have
# access to the source file.
if hash_type != "sha256":
- new_hash_value = hashsum_file(download_path, "sha256")
+ new_hash_value = compute_sum(download_path, "sha256")
else:
new_hash_value = ""
@@ -1356,7 +1356,7 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op
create_env(
config.host_prefix,
- specs_or_actions=specs,
+ specs_or_precs=specs,
env="host",
subdir=subdir,
clear_cache=False,
diff --git a/conda_build/source.py b/conda_build/source.py
index d4e1ca5b69..436a4137b2 100644
--- a/conda_build/source.py
+++ b/conda_build/source.py
@@ -15,11 +15,12 @@
from typing import TYPE_CHECKING
from urllib.parse import urljoin
+from conda.gateways.disk.read import compute_sum
+
from .conda_interface import (
CondaHTTPError,
TemporaryDirectory,
download,
- hashsum_file,
url_path,
)
from .exceptions import MissingDependency
@@ -120,7 +121,7 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False):
for tp in ("md5", "sha1", "sha256"):
if tp in source_dict:
expected_hash = source_dict[tp]
- hashed = hashsum_file(path, tp)
+ hashed = compute_sum(path, tp)
if expected_hash != hashed:
rm_rf(path)
raise RuntimeError(
@@ -132,7 +133,7 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False):
# collisions in our source cache, but the end user will get no benefit from the cache.
if not hash_added:
if not hashed:
- hashed = hashsum_file(path, "sha256")
+ hashed = compute_sum(path, "sha256")
dest_path = append_hash_to_fn(path, hashed)
if not os.path.isfile(dest_path):
shutil.move(path, dest_path)
diff --git a/conda_build/utils.py b/conda_build/utils.py
index 29baa98005..7635c45a6f 100644
--- a/conda_build/utils.py
+++ b/conda_build/utils.py
@@ -41,7 +41,7 @@
)
from pathlib import Path
from threading import Thread
-from typing import TYPE_CHECKING, Iterable
+from typing import TYPE_CHECKING, Iterable, overload
import conda_package_handling.api
import filelock
@@ -53,12 +53,11 @@
CONDA_PACKAGE_EXTENSIONS,
KNOWN_SUBDIRS,
)
-from conda.core.prefix_data import PrefixData
-from conda.models.dist import Dist
+from conda.gateways.disk.read import compute_sum
+from conda.models.match_spec import MatchSpec
from .conda_interface import (
CondaHTTPError,
- MatchSpec,
PackageRecord,
StringIO,
TemporaryDirectory,
@@ -67,19 +66,20 @@
context,
download,
get_conda_channel,
- hashsum_file,
- md5_file,
pkgs_dirs,
root_dir,
unix_path_to_win,
win_path_to_unix,
)
from .conda_interface import rm_rf as _rm_rf
-from .deprecations import deprecated
from .exceptions import BuildLockError
if TYPE_CHECKING:
- from conda.models.records import PrefixRecord
+ from typing import Mapping, TypeVar
+
+ T = TypeVar("T")
+ K = TypeVar("K")
+ V = TypeVar("V")
on_win = sys.platform == "win32"
on_mac = sys.platform == "darwin"
@@ -876,8 +876,8 @@ def tar_xf(tarball, dir_path):
def file_info(path):
return {
"size": getsize(path),
- "md5": md5_file(path),
- "sha256": hashsum_file(path, "sha256"),
+ "md5": compute_sum(path, "md5"),
+ "sha256": compute_sum(path, "sha256"),
"mtime": getmtime(path),
}
@@ -1162,7 +1162,7 @@ def package_has_file(package_path, file_path, refresh_mode="modified"):
return content
-def ensure_list(arg, include_dict=True):
+def ensure_list(arg: T | Iterable[T] | None, include_dict: bool = True) -> list[T]:
"""
Ensure the object is a list. If not return it in a list.
@@ -1181,7 +1181,11 @@ def ensure_list(arg, include_dict=True):
return [arg]
-def islist(arg, uniform=False, include_dict=True):
+def islist(
+ arg: T | Iterable[T],
+ uniform: bool = False,
+ include_dict: bool = True,
+) -> bool:
"""
Check whether `arg` is a `list`. Optionally determine whether the list elements
are all uniform.
@@ -1767,7 +1771,10 @@ def merge_or_update_dict(
return base
-def merge_dicts_of_lists(dol1, dol2):
+def merge_dicts_of_lists(
+ dol1: Mapping[K, Iterable[V]],
+ dol2: Mapping[K, Iterable[V]],
+) -> dict[K, list[V]]:
"""
From Alex Martelli: https://stackoverflow.com/a/1495821/3257826
"""
@@ -1889,7 +1896,17 @@ def sort_list_in_nested_structure(dictionary, omissions=""):
spec_ver_needing_star_re = re.compile(r"^([0-9a-zA-Z\.]+)$")
-def ensure_valid_spec(spec, warn=False):
+@overload
+def ensure_valid_spec(spec: str, warn: bool = False) -> str:
+ ...
+
+
+@overload
+def ensure_valid_spec(spec: MatchSpec, warn: bool = False) -> MatchSpec:
+ ...
+
+
+def ensure_valid_spec(spec: str | MatchSpec, warn: bool = False) -> str | MatchSpec:
if isinstance(spec, MatchSpec):
if (
hasattr(spec, "version")
@@ -2112,21 +2129,6 @@ def download_channeldata(channel_url):
return data
-@deprecated("24.1.0", "24.3.0")
-def linked_data_no_multichannels(
- prefix: str | os.PathLike | Path,
-) -> dict[Dist, PrefixRecord]:
- """
- Return a dictionary of the linked packages in prefix, with correct channels, hopefully.
- cc @kalefranz.
- """
- prefix = Path(prefix)
- return {
- Dist.from_string(prec.fn, channel_override=prec.channel.name): prec
- for prec in PrefixData(str(prefix)).iter_records()
- }
-
-
def shutil_move_more_retrying(src, dest, debug_name):
log = get_logger(__name__)
log.info(f"Renaming {debug_name} directory '{src}' to '{dest}'")
diff --git a/docs/source/resources/variants.rst b/docs/source/resources/variants.rst
index 3209fd3620..90953126ee 100644
--- a/docs/source/resources/variants.rst
+++ b/docs/source/resources/variants.rst
@@ -323,7 +323,7 @@ your Jinja2 templates. There are two ways that you can feed this information
into the API:
1. Pass the ``variants`` keyword argument to API functions. Currently, the
- ``build``, ``render``, ``get_output_file_path``, and ``check`` functions
+ ``build``, ``render``, ``get_output_file_paths``, and ``check`` functions
accept this argument. ``variants`` should be a dictionary where each value
is a list of versions to iterate over. These are aggregated as detailed in
the `Aggregation of multiple variants`_ section below.
diff --git a/docs/source/user-guide/getting-started.rst b/docs/source/user-guide/getting-started.rst
index 113632fe73..64c82e292e 100644
--- a/docs/source/user-guide/getting-started.rst
+++ b/docs/source/user-guide/getting-started.rst
@@ -16,16 +16,17 @@ Prerequisites
Before starting the tutorials, you need to install:
-- `Miniconda or Anaconda `_
+- `Miniconda `_ or `Anaconda `_
- conda-build
- Git
The most straightforward way to do this is to install Miniconda or
Anaconda, which contain conda, and then use conda to install conda-build
-and Git. Make sure you install these packages into a new environment
-and not your base environment.::
+and Git. Make sure you install these packages into your base environment.::
- conda create -n my-conda-build-environment conda-build git
+ conda install -n base conda-build git
+
+For more information on installing and updating conda-build, see :doc:`Installing and updating conda-build <../install-conda-build>`.
.. _submissions:
diff --git a/news/5203-remove-deprecations b/news/5203-remove-deprecations
new file mode 100644
index 0000000000..5021c12907
--- /dev/null
+++ b/news/5203-remove-deprecations
@@ -0,0 +1,80 @@
+### Enhancements
+
+*
+
+### Bug fixes
+
+*
+
+### Deprecations
+
+* Mark `conda_build.conda_interface.handle_proxy_407` as deprecated. Handled by `conda.gateways.connection.session.CondaSession`. (#5203)
+* Mark `conda_build.conda_interface.hashsum_file` as deprecated. Use `conda.gateways.disk.read.compute_sum` instead. (#5203)
+* Mark `conda_build.conda_interface.md5_file` as deprecated. Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead. (#5203)
+* Mark `conda_build.environ.PREFIX_ACTION` as deprecated. (#5203)
+* Mark `conda_build.environ.LINK_ACTION` as deprecated. (#5203)
+* Mark `conda_build.environ.cache_actions` as deprecated. (#5203)
+* Mark `conda_build.index.DummyExecutor` as deprecated. (#5203)
+* Mark `conda_build.index.MAX_THREADS_DEFAULT` as deprecated. (#5203)
+* Mark `conda_build.index.LOCK_TIMEOUT_SECS` as deprecated. (#5203)
+* Mark `conda_build.index.LOCKFILE_NAME` as deprecated. (#5203)
+* Postpone `conda_build.index.channel_data` deprecation. (#5203)
+* Rename `conda_build.environ.create_env('specs_or_actions' -> 'specs_or_precs')`. (#5203)
+* Rename `conda_build.environ._execute_actions('actions' -> 'precs'). (#5203)
+* Rename `conda_build.environ._display_actions('actions' -> 'precs'). (#5203)
+* Rename `conda_build.inspect.check_install('platform' -> 'subdir')`. (#5203)
+* Rename `conda_build.render.execute_download_actions('actions' -> 'precs')`. (#5203)
+* Rename `conda_build.render.get_upstream_pins('actions' -> 'precs')`. (#5203)
+* Remove `conda_build.cli.main_render.execute(print_results)`. (#5203)
+* Remove `conda_build.conda_interface.Dist`. (#5203)
+* Remove `conda_build.conda_interface.display_actions`. (#5203)
+* Remove `conda_build.conda_interface.execute_actions`. (#5203)
+* Remove `conda_build.conda_interface.execute_plan`. (#5203)
+* Remove `conda_build.conda_interface.install_actions`. (#5203)
+* Remove `conda_build.conda_interface.linked`. (#5203)
+* Remove `conda_build.conda_interface.linked_data`. (#5203)
+* Remove `conda_build.conda_interface.package_cache`. (#5203)
+* Remove `conda_build.environ.get_install_actions`. Use `conda_build.environ.get_package_records` instead. (#5203)
+* Remove `conda_build.index._determine_namespace`. (#5203)
+* Remove `conda_build.index._make_seconds`. (#5203)
+* Remove `conda_build.index.REPODATA_VERSION`. (#5203)
+* Remove `conda_build.index.CHANNELDATA_VERSION`. (#5203)
+* Remove `conda_build.index.REPODATA_JSON_FN`. (#5203)
+* Remove `conda_build.index.REPODATA_FROM_PKGS_JSON_FN`. (#5203)
+* Remove `conda_build.index.CHANNELDATA_FIELDS`. (#5203)
+* Remove `conda_build.index._clear_newline_chars`. (#5203)
+* Remove `conda_build.index._get_jinja2_environment`. (#5203)
+* Remove `conda_build.index._maybe_write`. (#5203)
+* Remove `conda_build.index._make_build_string`. (#5203)
+* Remove `conda_build.index._warn_on_missing_dependencies`. (#5203)
+* Remove `conda_build.index._cache_post_install_details`. (#5203)
+* Remove `conda_build.index._cache_recipe`. (#5203)
+* Remove `conda_build.index._cache_run_exports`. (#5203)
+* Remove `conda_build.index._cache_icon`. (#5203)
+* Remove `conda_build.index._make_subdir_index_html`. (#5203)
+* Remove `conda_build.index._make_channeldata_index_html`. (#5203)
+* Remove `conda_build.index._get_source_repo_git_info`. (#5203)
+* Remove `conda_build.index._cache_info_file`. (#5203)
+* Remove `conda_build.index._alternate_file_extension`. (#5203)
+* Remove `conda_build.index._get_resolve_object`. (#5203)
+* Remove `conda_build.index._get_newest_versions`. (#5203)
+* Remove `conda_build.index._add_missing_deps`. (#5203)
+* Remove `conda_build.index._add_prev_ver_for_features`. (#5203)
+* Remove `conda_build.index._shard_newest_packages`. (#5203)
+* Remove `conda_build.index._build_current_repodata`. (#5203)
+* Remove `conda_build.index.ChannelIndex`. (#5203)
+* Remove `conda_build.inspect.check_install('prepend')`. (#5203)
+* Remove `conda_build.inspect.check_install('minimal_hint')`. (#5203)
+* Remove `conda_build.noarch_python.ISWIN`. Use `conda_build.utils.on_win` instead. (#5203)
+* Remove `conda_build.noarch_python._force_dir`. Use `os.makedirs(exist_ok=True)` instead. (#5203)
+* Remove `conda_build.noarch_python._error_exit`. (#5203)
+* Remove `conda_build.render.actions_to_pins`. (#5203)
+* Remove `conda_build.utils.linked_data_no_multichannels`. (#5203)
+
+### Docs
+
+*
+
+### Other
+
+*
diff --git a/news/5208-deprecate-get_output_file_path b/news/5208-deprecate-get_output_file_path
new file mode 100644
index 0000000000..33244e8bf5
--- /dev/null
+++ b/news/5208-deprecate-get_output_file_path
@@ -0,0 +1,19 @@
+### Enhancements
+
+*
+
+### Bug fixes
+
+*
+
+### Deprecations
+
+* Mark `conda_build.api.get_output_file_path` as deprecated. Use `conda_build.api.get_output_file_paths` instead. (#5208)
+
+### Docs
+
+*
+
+### Other
+
+*
diff --git a/news/5219-deprecate-Environment b/news/5219-deprecate-Environment
new file mode 100644
index 0000000000..95780c6be3
--- /dev/null
+++ b/news/5219-deprecate-Environment
@@ -0,0 +1,19 @@
+### Enhancements
+
+*
+
+### Bug fixes
+
+*
+
+### Deprecations
+
+* Mark `conda_build.environ.Environment` as deprecated. Use `conda.core.prefix_data.PrefixData` instead. (#5219)
+
+### Docs
+
+*
+
+### Other
+
+*
diff --git a/news/5221-deprecate-get_version_from_git_tag b/news/5221-deprecate-get_version_from_git_tag
new file mode 100644
index 0000000000..2c1e811a54
--- /dev/null
+++ b/news/5221-deprecate-get_version_from_git_tag
@@ -0,0 +1,19 @@
+### Enhancements
+
+*
+
+### Bug fixes
+
+*
+
+### Deprecations
+
+* Mark `conda_build.conda_interface.get_version_from_git_tag` as deprecated. Use `conda_build.environ.get_version_from_git_tag` instead. (#5221)
+
+### Docs
+
+*
+
+### Other
+
+*
diff --git a/news/5228-lief-0.14-compat b/news/5228-lief-0.14-compat
new file mode 100644
index 0000000000..602242c7d4
--- /dev/null
+++ b/news/5228-lief-0.14-compat
@@ -0,0 +1,19 @@
+### Enhancements
+
+* Add compatibility for LIEF=0.14. (#5227 via #5228)
+
+### Bug fixes
+
+*
+
+### Deprecations
+
+*
+
+### Docs
+
+*
+
+### Other
+
+*
diff --git a/pyproject.toml b/pyproject.toml
index 21d787c86d..dd3e95dd56 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -30,7 +30,7 @@ requires-python = ">=3.8"
dependencies = [
"beautifulsoup4",
"chardet",
- "conda >=22.11",
+ "conda >=23.5.0",
"conda-index >=0.4.0",
"conda-package-handling >=1.3",
"filelock",
@@ -125,16 +125,24 @@ addopts = [
# "--cov=conda_build", # passed in test runner scripts instead (avoid debugger)
"--cov-append",
"--cov-branch",
- "--cov-report=term-missing",
- "--cov-report=xml",
- "--durations=16",
+ "--cov-report=term", # print summary table to screen
+ "--cov-report=xml", # for codecov/codecov-action upload
+ "--durations=16", # show 16 slowest tests
"--junitxml=junit.xml",
# "--splitting-algorithm=least_duration", # not available yet
# "--store-durations", # not available yet
"--strict-markers",
"--tb=native",
+ "--xdoctest-modules",
+ "--xdoctest-style=google",
"-vv",
]
+doctest_optionflags = [
+ "NORMALIZE_WHITESPACE",
+ "IGNORE_EXCEPTION_DETAIL",
+ "ALLOW_UNICODE",
+ "ELLIPSIS",
+]
markers = [
"serial: execute test serially (to avoid race conditions)",
"slow: execute the slow tests if active",
diff --git a/recipe/meta.yaml b/recipe/meta.yaml
index 9b1ec2f3bc..a9062803cb 100644
--- a/recipe/meta.yaml
+++ b/recipe/meta.yaml
@@ -30,7 +30,7 @@ requirements:
run:
- beautifulsoup4
- chardet
- - conda >=22.11.0
+ - conda >=23.5.0
- conda-index >=0.4.0
- conda-package-handling >=1.3
- filelock
diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py
index 60f24cf7ca..9da5b48418 100644
--- a/tests/cli/test_main_build.py
+++ b/tests/cli/test_main_build.py
@@ -296,7 +296,7 @@ def test_no_force_upload(
# render recipe
api.output_yaml(testing_metadata, "meta.yaml")
- pkg = api.get_output_file_path(testing_metadata)
+ pkg = api.get_output_file_paths(testing_metadata)
# mock Config.set_keys to always set anaconda_upload to True
# conda's Context + conda_build's MetaData & Config objects interact in such an
diff --git a/tests/requirements-linux.txt b/tests/requirements-Linux.txt
similarity index 100%
rename from tests/requirements-linux.txt
rename to tests/requirements-Linux.txt
diff --git a/tests/requirements-windows.txt b/tests/requirements-Windows.txt
similarity index 100%
rename from tests/requirements-windows.txt
rename to tests/requirements-Windows.txt
diff --git a/tests/requirements-ci.txt b/tests/requirements-ci.txt
new file mode 100644
index 0000000000..23d78bb0b2
--- /dev/null
+++ b/tests/requirements-ci.txt
@@ -0,0 +1,19 @@
+anaconda-client
+conda-forge::xdoctest
+conda-verify
+contextlib2
+coverage
+cytoolz
+git
+numpy
+perl
+pip
+pyflakes
+pytest
+pytest-cov
+pytest-forked
+pytest-mock
+pytest-rerunfailures
+pytest-xdist
+ruamel.yaml
+tomli # [py<3.11] for coverage pyproject.toml
diff --git a/tests/requirements-macos.txt b/tests/requirements-macOS.txt
similarity index 100%
rename from tests/requirements-macos.txt
rename to tests/requirements-macOS.txt
diff --git a/tests/requirements.txt b/tests/requirements.txt
index a4ecdd07a8..5e94d4111a 100644
--- a/tests/requirements.txt
+++ b/tests/requirements.txt
@@ -1,37 +1,22 @@
beautifulsoup4
chardet
-conda >=22.11.0
-conda-forge::anaconda-client
-conda-index
+conda >=23.5.0
+conda-index >=0.4.0
+conda-libmamba-solver # ensure we use libmamba
conda-package-handling >=1.3
-conda-verify
-contextlib2
-cytoolz
filelock
-git
jinja2
jsonschema >=4.19
menuinst >=2
-numpy
packaging
-perl
-pip
pkginfo
psutil
py-lief
-pyflakes
-pytest
-pytest-cov
-pytest-forked
-pytest-mock
-pytest-replay
-pytest-rerunfailures
-pytest-xdist
+python >=3.8
python-libarchive-c
pytz
+pyyaml
requests
-ripgrep
-ruamel.yaml
+ripgrep # for faster grep
setuptools_scm # needed for devenv version detection
-tomli # [py<3.11] for coverage pyproject.toml
tqdm
diff --git a/tests/test_api_build.py b/tests/test_api_build.py
index 6ad6577c50..0d2bd3b5f0 100644
--- a/tests/test_api_build.py
+++ b/tests/test_api_build.py
@@ -242,7 +242,7 @@ def test_offline(
def test_git_describe_info_on_branch(testing_config):
recipe_path = os.path.join(metadata_dir, "_git_describe_number_branch")
m = api.render(recipe_path, config=testing_config)[0][0]
- output = api.get_output_file_path(m)[0]
+ output = api.get_output_file_paths(m)[0]
# missing hash because we set custom build string in meta.yaml
test_path = os.path.join(
testing_config.croot,
@@ -625,7 +625,7 @@ def test_numpy_setup_py_data(testing_config):
m = api.render(recipe_path, config=testing_config, numpy="1.16")[0][0]
_hash = m.hash_dependencies()
assert (
- os.path.basename(api.get_output_file_path(m)[0])
+ os.path.basename(api.get_output_file_paths(m)[0])
== f"load_setup_py_test-0.1.0-np116py{sys.version_info.major}{sys.version_info.minor}{_hash}_0.tar.bz2"
)
@@ -795,7 +795,7 @@ def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monke
# This will (after one spin round the loop) install and run 'git' with the
# build env prepended to os.environ[]
metadata = api.render(testing_workdir, config=testing_config)[0][0]
- output = api.get_output_file_path(metadata, config=testing_config)[0]
+ output = api.get_output_file_paths(metadata, config=testing_config)[0]
assert f"relative_submodules-{tag}-" in output
api.build(metadata, config=testing_config)
@@ -811,7 +811,7 @@ def test_noarch(testing_workdir):
)
with open(filename, "w") as outfile:
outfile.write(yaml.dump(data, default_flow_style=False, width=999999999))
- output = api.get_output_file_path(testing_workdir)[0]
+ output = api.get_output_file_paths(testing_workdir)[0]
assert os.path.sep + "noarch" + os.path.sep in output or not noarch
assert os.path.sep + "noarch" + os.path.sep not in output or noarch
diff --git a/tests/test_api_consistency.py b/tests/test_api_consistency.py
index 56685f66d1..9d88b60eee 100644
--- a/tests/test_api_consistency.py
+++ b/tests/test_api_consistency.py
@@ -43,7 +43,7 @@ def test_api_output_yaml():
def test_api_get_output_file_path():
- argspec = getargspec(api.get_output_file_path)
+ argspec = getargspec(api.get_output_file_paths)
assert argspec.args == [
"recipe_path_or_metadata",
"no_download_source",
diff --git a/tests/test_api_render.py b/tests/test_api_render.py
index 878617e78d..868053876b 100644
--- a/tests/test_api_render.py
+++ b/tests/test_api_render.py
@@ -105,7 +105,7 @@ def test_get_output_file_path_jinja2(testing_config):
def test_output_without_jinja_does_not_download(mocker, testing_config):
mock = mocker.patch("conda_build.source")
- api.get_output_file_path(
+ api.get_output_file_paths(
os.path.join(metadata_dir, "source_git"), config=testing_config
)
mock.assert_not_called()
diff --git a/tests/test_environ.py b/tests/test_environ.py
index 93311ab81b..d45fc8ed7f 100644
--- a/tests/test_environ.py
+++ b/tests/test_environ.py
@@ -1,6 +1,9 @@
# Copyright (C) 2014 Anaconda, Inc
# SPDX-License-Identifier: BSD-3-Clause
import os
+import sys
+
+from conda.core.prefix_data import PrefixData
from conda_build import environ
@@ -15,3 +18,12 @@ def test_environment_creation_preserves_PATH(testing_workdir, testing_config):
subdir=testing_config.build_subdir,
)
assert os.environ["PATH"] == ref_path
+
+
+def test_environment():
+ """Asserting PrefixData can accomplish the same thing as Environment."""
+ assert (specs := environ.Environment(sys.prefix).package_specs())
+ assert specs == [
+ f"{prec.name} {prec.version} {prec.build}"
+ for prec in PrefixData(sys.prefix).iter_records()
+ ]
diff --git a/tests/test_render.py b/tests/test_render.py
index 6cfd0abeea..aef9d0e928 100644
--- a/tests/test_render.py
+++ b/tests/test_render.py
@@ -27,7 +27,7 @@
)
def test_noarch_output(build, testing_metadata):
testing_metadata.meta["build"].update(build)
- output = api.get_output_file_path(testing_metadata)
+ output = api.get_output_file_paths(testing_metadata)
assert os.path.sep + "noarch" + os.path.sep in output[0]
diff --git a/tests/test_source.py b/tests/test_source.py
index e32a133b84..711407d153 100644
--- a/tests/test_source.py
+++ b/tests/test_source.py
@@ -5,9 +5,10 @@
import tarfile
import pytest
+from conda.gateways.disk.read import compute_sum
from conda_build import source
-from conda_build.conda_interface import TemporaryDirectory, hashsum_file
+from conda_build.conda_interface import TemporaryDirectory
from conda_build.source import download_to_cache
from conda_build.utils import reset_deduplicator
@@ -142,7 +143,7 @@ def test_source_user_expand():
"url": os.path.join(
prefix, os.path.basename(tmp), "cb-test.tar.bz2"
),
- "sha256": hashsum_file(tbz_name, "sha256"),
+ "sha256": compute_sum(tbz_name, "sha256"),
}
with TemporaryDirectory() as tmp2:
download_to_cache(tmp2, "", source_dict)
diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py
index 3937036d14..3c3b011c58 100644
--- a/tests/test_subpackages.py
+++ b/tests/test_subpackages.py
@@ -55,7 +55,7 @@ def test_rm_rf_does_not_remove_relative_source_package_files(
def test_output_pkg_path_shows_all_subpackages(testing_metadata):
testing_metadata.meta["outputs"] = [{"name": "a"}, {"name": "b"}]
out_dicts_and_metadata = testing_metadata.get_output_metadata_set()
- outputs = api.get_output_file_path(
+ outputs = api.get_output_file_paths(
[(m, None, None) for (_, m) in out_dicts_and_metadata]
)
assert len(outputs) == 2
@@ -64,7 +64,7 @@ def test_output_pkg_path_shows_all_subpackages(testing_metadata):
def test_subpackage_version_provided(testing_metadata):
testing_metadata.meta["outputs"] = [{"name": "a", "version": "2.0"}]
out_dicts_and_metadata = testing_metadata.get_output_metadata_set()
- outputs = api.get_output_file_path(
+ outputs = api.get_output_file_paths(
[(m, None, None) for (_, m) in out_dicts_and_metadata]
)
assert len(outputs) == 1
@@ -78,7 +78,7 @@ def test_subpackage_independent_hash(testing_metadata):
testing_metadata.meta["requirements"]["run"] = ["a"]
out_dicts_and_metadata = testing_metadata.get_output_metadata_set()
assert len(out_dicts_and_metadata) == 2
- outputs = api.get_output_file_path(
+ outputs = api.get_output_file_paths(
[(m, None, None) for (_, m) in out_dicts_and_metadata]
)
assert len(outputs) == 2
diff --git a/tests/test_variants.py b/tests/test_variants.py
index 89ebb67999..50e9cea4f2 100644
--- a/tests/test_variants.py
+++ b/tests/test_variants.py
@@ -429,7 +429,7 @@ def test_build_run_exports_act_on_host(caplog):
platform="win",
arch="64",
)
- assert "failed to get install actions, retrying" not in caplog.text
+ assert "failed to get package records, retrying" not in caplog.text
def test_detect_variables_in_build_and_output_scripts():