From 36b2d9d511f9e8c5af138561a777bdf41e2ba901 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 30 Nov 2022 17:07:42 -0600 Subject: [PATCH 001/366] Release 3.23.2 (#4661) --- .authors.yml | 8 ++++---- CHANGELOG.md | 14 ++++++++++++++ .../gh-4650-fix-cli-zstd-level-override-gh-4649.md | 3 --- 3 files changed, 18 insertions(+), 7 deletions(-) delete mode 100644 news/gh-4650-fix-cli-zstd-level-override-gh-4649.md diff --git a/.authors.yml b/.authors.yml index 99635e8cc9..6ed0816b9c 100644 --- a/.authors.yml +++ b/.authors.yml @@ -611,7 +611,7 @@ first_commit: 2015-08-30 06:44:37 - name: Marcel Bargull email: marcel.bargull@udo.edu - num_commits: 70 + num_commits: 72 first_commit: 2016-09-26 11:45:54 github: mbargull alternate_emails: @@ -1199,7 +1199,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 60 + num_commits: 61 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1222,7 +1222,7 @@ first_commit: 2020-11-19 10:46:41 - name: Jannis Leidel email: jannis@leidel.info - num_commits: 21 + num_commits: 22 github: jezdez first_commit: 2020-11-19 10:46:41 - name: Christof Kaufmann @@ -1237,7 +1237,7 @@ github: pre-commit-ci[bot] aliases: - pre-commit-ci[bot] - num_commits: 27 + num_commits: 29 first_commit: 2021-11-20 01:47:17 - name: Jacob Walls email: jacobtylerwalls@gmail.com diff --git a/CHANGELOG.md b/CHANGELOG.md index 8e05ca33f3..0ad9bd8c40 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ [//]: # (current developments) +## 3.23.2 (2022-11-30) + +### Bug fixes + +* `conda-build` CLI overrode `condarc`'s `zstd_compression_level` with the default value. (#4650) + +### Contributors + +* @kenodegard +* @mbargull +* @pre-commit-ci[bot] + + + ## 3.23.1 (2022-11-17) ### Bug fixes diff --git a/news/gh-4650-fix-cli-zstd-level-override-gh-4649.md b/news/gh-4650-fix-cli-zstd-level-override-gh-4649.md deleted file mode 100644 index 1bae50d432..0000000000 --- a/news/gh-4650-fix-cli-zstd-level-override-gh-4649.md +++ /dev/null @@ -1,3 +0,0 @@ -### Bug fixes - -* fix: `conda-build` CLI overrode `condarc`'s `zstd_compression_level` with the default value From 53aa0d88ad03a5a05eeeee7fe2d9d99bf5d072cc Mon Sep 17 00:00:00 2001 From: jakirkham Date: Fri, 2 Dec 2022 09:24:58 -0800 Subject: [PATCH 002/366] Change Zstd compression default to 19 (#4663) * Change Zstd compression default to 19 This uses less memory and takes less time while still providing a good level compression. Also lines up with how this was changed in conda-package-handling and conda-forge. --- conda_build/cli/main_build.py | 3 ++- conda_build/config.py | 2 +- news/zstd_def_19 | 19 +++++++++++++++++++ 3 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 news/zstd_def_19 diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index 8582247606..316ff998b1 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -158,7 +158,8 @@ def parse_args(args): p.add_argument( "--zstd-compression-level", help=("When building v2 packages, set the compression level used by " - "conda-package-handling. Defaults to the maximum."), + "conda-package-handling. " + f"Defaults to {zstd_compression_level_default}."), type=int, choices=range(1, 23), default=cc_conda_build.get('zstd_compression_level', zstd_compression_level_default), diff --git a/conda_build/config.py b/conda_build/config.py index 0551068b46..db11b5d309 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -50,7 +50,7 @@ def set_invocation_time(): ignore_verify_codes_default = [] exit_on_verify_error_default = False conda_pkg_format_default = None -zstd_compression_level_default = 22 +zstd_compression_level_default = 19 # Python2 silliness: diff --git a/news/zstd_def_19 b/news/zstd_def_19 new file mode 100644 index 0000000000..9087c3d12e --- /dev/null +++ b/news/zstd_def_19 @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Change Zstd default compression to 19 + +### Deprecations + +* + +### Docs + +* + +### Other + +* From 7e9d1231057a6cae01d3116d399d9d52717cff21 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Mon, 5 Dec 2022 17:13:36 +0100 Subject: [PATCH 003/366] Fix: Build environments not activated (#4665) * test: Add test to inspect actual PATH during build * Fix: MetaData should never be used for memoization Its members are very dynamic so it is problematic to use it for cached calculations. * Fix: Remove invalid memoization for MetaData * test: Fix test_activated_prefixes_in_actual_path Needed to set .activate=True on the MetaData's own .config * lint * Add news/4665-fix-activation-path * Fix non-activate prepend_bin_path, path_prepended - Prefix "bin" directory get added on all platforms - Prefix root directory should only be added on Windows * test: Add description in test docstring --- conda_build/build.py | 7 ++++-- conda_build/environ.py | 1 - conda_build/metadata.py | 2 ++ conda_build/utils.py | 10 ++++----- conda_build/windows.py | 3 +++ news/4665-fix-activation-path | 20 ++++++++++++++++++ tests/test_api_build.py | 40 ++++++++++++++++++++++++++++++++++- 7 files changed, 74 insertions(+), 9 deletions(-) create mode 100644 news/4665-fix-activation-path diff --git a/conda_build/build.py b/conda_build/build.py index c171896686..f264a6b549 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -2584,8 +2584,11 @@ def construct_metadata_for_test(recipedir_or_package, config): def write_build_scripts(m, script, build_file): - with utils.path_prepended(m.config.host_prefix): - with utils.path_prepended(m.config.build_prefix): + # TODO: Prepending the prefixes here should probably be guarded by + # if not m.activate_build_script: + # Leaving it as is, for now, since we need a quick, non-disruptive patch release. + with utils.path_prepended(m.config.host_prefix, False): + with utils.path_prepended(m.config.build_prefix, False): env = environ.get_dict(m=m) env["CONDA_BUILD_STATE"] = "BUILD" diff --git a/conda_build/environ.py b/conda_build/environ.py index 348b0bcd97..5a57359a4b 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -663,7 +663,6 @@ def system_vars(env_dict, m, prefix): return os_vars(m, prefix) -@lru_cache(maxsize=None) def os_vars(m, prefix): d = dict() # note the dictionary is passed in here - variables are set in that dict if they are non-null diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 6277601647..99f1b423d9 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -1046,6 +1046,8 @@ def stringify_numbers(): class MetaData: + __hash__ = None # declare as non-hashable to avoid its use with memoization + def __init__(self, path, config=None, variant=None): self.undefined_jinja_vars = [] diff --git a/conda_build/utils.py b/conda_build/utils.py index 37d978fca5..c398b7fda6 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -950,11 +950,10 @@ def get_build_folders(croot): def prepend_bin_path(env, prefix, prepend_prefix=False): - # bin_dirname takes care of bin on *nix, Scripts on win - env['PATH'] = join(prefix, bin_dirname) + os.pathsep + env['PATH'] + env['PATH'] = join(prefix, "bin") + os.pathsep + env['PATH'] if sys.platform == "win32": env['PATH'] = join(prefix, "Library", "mingw-w64", "bin") + os.pathsep + \ - join(prefix, "Library", "usr", "bin") + os.pathsep + os.pathsep + \ + join(prefix, "Library", "usr", "bin") + os.pathsep + \ join(prefix, "Library", "bin") + os.pathsep + \ join(prefix, "Scripts") + os.pathsep + \ env['PATH'] @@ -985,9 +984,10 @@ def sys_path_prepended(prefix): @contextlib.contextmanager -def path_prepended(prefix): +def path_prepended(prefix, prepend_prefix=True): + # FIXME: Unclear why prepend_prefix=True for all platforms. old_path = os.environ['PATH'] - os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix, True)['PATH'] + os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix, prepend_prefix)['PATH'] try: yield finally: diff --git a/conda_build/windows.py b/conda_build/windows.py index 9bee722f18..c365cc60db 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -250,6 +250,9 @@ def write_build_scripts(m, env, bld_bat): def build(m, bld_bat, stats, provision_only=False): + # TODO: Prepending the prefixes here should probably be guarded by + # if not m.activate_build_script: + # Leaving it as is, for now, since we need a quick, non-disruptive patch release. with path_prepended(m.config.host_prefix): with path_prepended(m.config.build_prefix): env = environ.get_dict(m=m) diff --git a/news/4665-fix-activation-path b/news/4665-fix-activation-path new file mode 100644 index 0000000000..d4faa26d26 --- /dev/null +++ b/news/4665-fix-activation-path @@ -0,0 +1,20 @@ +### Enhancements + +* + +### Bug fixes + +* Fix build/host environment activation broken in >=3.23.0,<=3.23.2 +* Add PREFIX/bin to PATH on Windows and remove PREFIX root from PATH on Unix + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test_api_build.py b/tests/test_api_build.py index bf1dc00af9..0812906e66 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -36,7 +36,7 @@ from conda_build.render import finalize_metadata from conda_build.utils import (copy_into, on_win, check_call_env, convert_path_for_cygwin_or_msys2, package_has_file, check_output_env, get_conda_operation_locks, rm_rf, - walk, env_var, FileNotFoundError) + prepend_bin_path, walk, env_var, FileNotFoundError) from conda_build.os_utils.external import find_executable from conda_build.exceptions import (DependencyNeedsBuildingError, CondaBuildException, OverLinkingError, OverDependingError) @@ -1652,3 +1652,41 @@ def assert_keyword(keyword): assert_keyword('') finally: os.environ.pop(token) + + +@pytest.mark.slow +def test_activated_prefixes_in_actual_path(testing_config, testing_metadata): + """ + Check if build and host env are properly added to PATH in the correct order. + Do this in an actual build and not just in a unit test to avoid regression. + Currently only tests for single non-"outputs" recipe with build/host split + and proper env activation (Metadata.is_cross and Config.activate both True). + """ + file = "env-path-dump" + testing_metadata.config.activate = True + meta = testing_metadata.meta + meta["requirements"]["host"] = [] + meta["build"]["script"] = [ + f"echo %PATH%>%PREFIX%/{file}" if on_win else f"echo $PATH>$PREFIX/{file}" + ] + outputs = api.build(testing_metadata) + env = {"PATH": ""} + # We get the PATH entries twice: (which we should fix at some point) + # 1. from the environment activation hooks, + # 2. also beforehand from utils.path_prepended at the top of + # - build.write_build_scripts on Unix + # - windows.build on Windows + # And apparently here the previously added build env gets deactivated + # from the activation hook, hence only host is on PATH twice. + prepend_bin_path(env, testing_metadata.config.host_prefix) + if not on_win: + prepend_bin_path(env, testing_metadata.config.build_prefix) + prepend_bin_path(env, testing_metadata.config.host_prefix) + prepend_bin_path(env, testing_metadata.config.build_prefix) + expected_paths = [path for path in env["PATH"].split(os.pathsep) if path] + actual_paths = [ + path + for path in package_has_file(outputs[0], file).strip().split(os.pathsep) + if path in expected_paths + ] + assert actual_paths == expected_paths From d83f18c157f50d1dc141ae6d057e1a74cfb89f95 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 6 Dec 2022 21:09:24 -0600 Subject: [PATCH 004/366] Skip tests for non-code changes (#4664) * Skip tests for non-code changes * Add news --- .github/workflows/tests-skip.yml | 46 ++++++++++++++++++++++++++++++++ .github/workflows/tests.yml | 18 ++++++++++++- news/4664-skip-testing-non-code | 19 +++++++++++++ 3 files changed, 82 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/tests-skip.yml create mode 100644 news/4664-skip-testing-non-code diff --git a/.github/workflows/tests-skip.yml b/.github/workflows/tests-skip.yml new file mode 100644 index 0000000000..05cc3ccabe --- /dev/null +++ b/.github/workflows/tests-skip.yml @@ -0,0 +1,46 @@ +# this is the sibling workflow to tests-skip.yml, it is required to work around +# the skipped but required checks issue: +# https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/troubleshooting-required-status-checks#handling-skipped-but-required-checks +name: Tests + +on: + # NOTE: github.event context is push payload: + # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#push + push: + branches: + - main + - feature/** + paths: # run if src/tests/recipe are modified + - conda_build/** + - tests/** + - '*.py' + - recipe/** + - .github/workflows/tests.yml + + # NOTE: github.event context is pull_request payload: + # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request + pull_request: + paths: + - conda_build/** + - tests/** + - '*.py' + - recipe/** + - .github/workflows/tests.yml + +concurrency: + # Concurrency group that uses the workflow name and PR number if available + # or commit SHA as a fallback. If a new build is triggered under that + # concurrency group while a previous build is running it will be canceled. + # Repeated pushes to a PR will cancel all previous builds, while multiple + # merges to main will not cancel. + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +jobs: + # required check + analyze: + name: Analyze results + if: always() + runs-on: ubuntu-latest + steps: + - run: 'echo "Nothing to analyze"' diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 3ea15b4e7a..b9ee87ef83 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,3 +1,6 @@ +# this is the sibling workflow to tests-skip.yml, it is required to work around +# the skipped but required checks issue: +# https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/troubleshooting-required-status-checks#handling-skipped-but-required-checks name: Tests on: @@ -7,10 +10,22 @@ on: branches: - main - feature/** + paths: # run if src/tests/recipe are modified + - conda_build/** + - tests/** + - '*.py' + - recipe/** + - .github/workflows/tests.yml # NOTE: github.event context is pull_request payload: # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request pull_request: + paths: + - conda_build/** + - tests/** + - '*.py' + - recipe/** + - .github/workflows/tests.yml concurrency: # Concurrency group that uses the workflow name and PR number if available @@ -348,8 +363,9 @@ jobs: name: macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ env. parameters.serial_or_parallel }} if: always() + # required check analyze: - name: Analyze test results + name: Analyze results needs: [windows, linux, macos] if: always() runs-on: ubuntu-latest diff --git a/news/4664-skip-testing-non-code b/news/4664-skip-testing-non-code new file mode 100644 index 0000000000..a2e6b12549 --- /dev/null +++ b/news/4664-skip-testing-non-code @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Skip test suite for non-code changes. (#4664) From 2057e82d216ff4bd355a23306c8f8c0c8ed91a38 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 6 Dec 2022 21:22:48 -0600 Subject: [PATCH 005/366] Fix skip workflow (#4670) --- .github/workflows/tests-skip.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests-skip.yml b/.github/workflows/tests-skip.yml index 05cc3ccabe..9ae5afbac6 100644 --- a/.github/workflows/tests-skip.yml +++ b/.github/workflows/tests-skip.yml @@ -10,7 +10,7 @@ on: branches: - main - feature/** - paths: # run if src/tests/recipe are modified + paths-ignore: # run if src/tests/recipe are modified - conda_build/** - tests/** - '*.py' @@ -20,7 +20,7 @@ on: # NOTE: github.event context is pull_request payload: # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request pull_request: - paths: + paths-ignore: - conda_build/** - tests/** - '*.py' From 20b08017b9308309f8feb877493db0c09d6a91c0 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 6 Dec 2022 21:34:04 -0600 Subject: [PATCH 006/366] Release 3.23.3 (#4669) * Update news snippets * Updated authorship for 3.23.3 * Updated CHANGELOG for 3.23.3 --- .authors.yml | 6 +++--- CHANGELOG.md | 20 ++++++++++++++++++++ news/4664-skip-testing-non-code | 19 ------------------- news/4665-fix-activation-path | 20 -------------------- news/zstd_def_19 | 19 ------------------- 5 files changed, 23 insertions(+), 61 deletions(-) delete mode 100644 news/4664-skip-testing-non-code delete mode 100644 news/4665-fix-activation-path delete mode 100644 news/zstd_def_19 diff --git a/.authors.yml b/.authors.yml index 6ed0816b9c..a5a07b6390 100644 --- a/.authors.yml +++ b/.authors.yml @@ -611,7 +611,7 @@ first_commit: 2015-08-30 06:44:37 - name: Marcel Bargull email: marcel.bargull@udo.edu - num_commits: 72 + num_commits: 73 first_commit: 2016-09-26 11:45:54 github: mbargull alternate_emails: @@ -754,7 +754,7 @@ alternate_emails: - kirkhamj@janelia.hhmi.org - jakirkham@gmail.com - num_commits: 136 + num_commits: 138 first_commit: 2015-04-21 13:26:39 github: jakirkham - name: Anthony Scopatz @@ -1199,7 +1199,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 61 + num_commits: 63 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ad9bd8c40..394447e77c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,25 @@ [//]: # (current developments) +## 3.23.3 (2022-12-06) + +### Bug fixes + +* Change Zstd default compression to 19. (#4663) +* Fix build/host environment activation broken in >=3.23.0,<=3.23.2. (#4665) +* Add `PREFIX/bin` to `PATH` on Windows and remove `PREFIX` root from `PATH` on Unix. (#4665) + +### Other + +* Skip test suite for non-code changes. (#4664) + +### Contributors + +* @jakirkham +* @kenodegard +* @mbargull + + + ## 3.23.2 (2022-11-30) ### Bug fixes diff --git a/news/4664-skip-testing-non-code b/news/4664-skip-testing-non-code deleted file mode 100644 index a2e6b12549..0000000000 --- a/news/4664-skip-testing-non-code +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Skip test suite for non-code changes. (#4664) diff --git a/news/4665-fix-activation-path b/news/4665-fix-activation-path deleted file mode 100644 index d4faa26d26..0000000000 --- a/news/4665-fix-activation-path +++ /dev/null @@ -1,20 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Fix build/host environment activation broken in >=3.23.0,<=3.23.2 -* Add PREFIX/bin to PATH on Windows and remove PREFIX root from PATH on Unix - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/zstd_def_19 b/news/zstd_def_19 deleted file mode 100644 index 9087c3d12e..0000000000 --- a/news/zstd_def_19 +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Change Zstd default compression to 19 - -### Deprecations - -* - -### Docs - -* - -### Other - -* From 8be29efb8002388641f2e63a2d6c665a55e2fa89 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 13 Dec 2022 10:53:00 -0600 Subject: [PATCH 007/366] Update .pre-commit-config.yaml (#4674) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 15c4a460d1..1fca0bd8c2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -46,7 +46,7 @@ repos: rev: 1.5.1 hooks: - id: darker - additional_dependencies: [black] + additional_dependencies: [black==22.10.0] - repo: https://github.com/asottile/blacken-docs rev: v1.12.1 hooks: From daeda52d75323f92af29fb91002b1f683c5606ab Mon Sep 17 00:00:00 2001 From: Srivas Venkatesh <110486050+sven6002@users.noreply.github.com> Date: Wed, 14 Dec 2022 12:50:36 -0600 Subject: [PATCH 008/366] Add Allure reporting, update Python version test matrix, and prune test_api_build.py (#4621) * Updated test.yml file to remove mkdir commands * Adding the allure commands * update tests.yml * revert small test programs; remove autogenerated build script * change "are we newer than old conda version" to True * format utils.py * remove _unicode_in_tarball test recipe * remove obsolete skipif from test_api_build * call api.build() in test_recipe_builds * Test lower and upper Python versions * Bump default numpy to 1.21 Python 3.10 and numpy 1.16 are not compatible so to move forward with supporting Python 3.10 and soon 3.11 we will need to also bump up the default numpy variant. * Modernize tests These tests utilized a bad glob pattern that assumes only 2 digit Python version (e.g., py37, py38, py39) but fails for valid 3 digit versions (e.g., py310, py311). * Bump sympy to 1.10 to support Python 3.10 * Skip bad compile test for Python 3.10 * Fix lib/pythonM.M regex pattern to support Python 3.10 Co-authored-by: Bianca Henderson Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Daniel Holth Co-authored-by: Ken Odegard --- .github/workflows/tests.yml | 40 +++++-- conda_build/conda_interface.py | 37 +++--- conda_build/convert.py | 4 +- conda_build/variants.py | 4 +- setup.py | 1 + .../metadata/_unicode_in_tarball/meta.yaml | 58 --------- .../metadata/numpy_build/run_test.bat | 3 + .../metadata/numpy_build/run_test.py | 10 +- .../metadata/numpy_build/run_test.sh | 6 +- .../metadata/numpy_build_run/run_test.bat | 6 +- .../metadata/numpy_build_run/run_test.py | 22 ++-- .../metadata/numpy_build_run/run_test.sh | 6 +- .../metadata/python_build_run/run_test.bat | 5 +- .../metadata/python_build_run/run_test.py | 18 ++- .../metadata/python_build_run/run_test.sh | 8 +- tests/test_api_build.py | 113 +++--------------- tests/test_api_skeleton.py | 24 ++-- tests/test_post.py | 4 + tests/utils.py | 5 + 19 files changed, 136 insertions(+), 238 deletions(-) delete mode 100644 tests/test-recipes/metadata/_unicode_in_tarball/meta.yaml diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index b9ee87ef83..d0637b3e25 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -45,14 +45,15 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.7', '3.8'] + # test lower and upper bounds + python-version: ['3.7', '3.10'] conda-version: [release] test-type: [serial, parallel] include: - - python-version: '3.9' + - python-version: '3.10' conda-version: canary test-type: serial - - python-version: '3.9' + - python-version: '3.10' conda-version: canary test-type: parallel env: @@ -84,8 +85,8 @@ jobs: run: | source ci/github/install_conda_build_test_deps pip install -e . + pip install allure-pytest conda info -a - mkdir ./pytest-replay - name: Run Serial tests if: matrix.test-type == 'serial' @@ -98,7 +99,9 @@ jobs: --cov conda_build \ --cov-report xml \ -m "serial" tests \ - ${{ env.pytest-replay }} + ${{ env.pytest-replay }} \ + --alluredir=allure-results + tar -zcf allure-results.tar.gz allure-results - name: Run Parallel tests if: matrix.test-type == 'parallel' @@ -112,7 +115,9 @@ jobs: --cov-append \ --cov-report xml \ -m "not serial" tests \ - ${{ env.pytest-replay }} + ${{ env.pytest-replay }} \ + --alluredir=allure-results + tar -zcf allure-results.tar.gz allure-results - name: Upload Pytest Replay uses: actions/upload-artifact@v2 @@ -125,7 +130,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.7', '3.8'] + python-version: ['3.7', '3.8', '3.9', '3.10'] conda-version: [release] test-type: [serial, parallel] include: @@ -226,6 +231,7 @@ jobs: python --version python -c "import struct; print(struct.calcsize('P') * 8)" pip install --no-deps . + pip install allure-pytest conda-build --version pushd .. && git clone https://github.com/conda/conda_build_test_recipe && popd mkdir %UserProfile%\cbtmp_serial @@ -235,9 +241,9 @@ jobs: call conda create -n blarg -yq --download-only python=2.7||exit 1 call conda create -n blarg -yq --download-only python=3.8||exit 1 call conda create -n blarg -yq --download-only python cmake||exit 1 - mkdir ${{ github.workspace }}\\pytest-replay set "PYTEST_REPLAY_OPTIONS=--replay-record-dir=${{ github.workspace }}\\pytest-replay --replay-base-name=Win-%CONDA_VERSION%-Py%PYTHON_VERSION%" echo "##vso[task.setvariable variable=PYTEST_REPLAY_OPTIONS]%PYTEST_REPLAY_OPTIONS%" + shell: cmd - name: Run Serial Tests @@ -251,7 +257,9 @@ jobs: set PERL= set LUA= set R= - pytest --color=yes -vv -n 0 --basetemp %UserProfile%\cbtmp_serial --cov conda_build --cov-report xml -m "serial" ${{ env.pytest-replay }} + pytest --color=yes -vv -n 0 --basetemp %UserProfile%\cbtmp_serial --cov conda_build --cov-report xml -m "serial" ${{ env.pytest-replay }} --alluredir=allure-results + tar -zcf allure-results.tar.gz allure-results + shell: cmd - name: Run Parallel Tests @@ -265,7 +273,8 @@ jobs: set PERL= set LUA= set R= - pytest --color=yes -vv -n auto --basetemp %UserProfile%\cbtmp --cov conda_build --cov-append --cov-report xml -m "not serial" ${{ env.pytest-replay }} + pytest --color=yes -vv -n auto --basetemp %UserProfile%\cbtmp --cov conda_build --cov-append --cov-report xml -m "not serial" ${{ env.pytest-replay }} --alluredir=allure-results + tar -zcf allure-results.tar.gz allure-results shell: cmd env: VS90COMNTOOLS: C:\Program Files (x86)\Common Files\Microsoft\Visual C++ for Python\9.0\VC\bin @@ -286,7 +295,8 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.9'] + # test lower and upper bounds + python-version: ['3.7', '3.10'] conda-version: [canary] test-type: [serial, parallel] max-parallel: 10 @@ -331,6 +341,8 @@ jobs: conda info source ci/github/install_conda_build_test_deps pip install -e . + echo "Installing Support Libraries" + pip install allure-pytest conda info -a conda list --show-channel-urls @@ -340,7 +352,8 @@ jobs: set -e -u source ci/github/activate_conda "${{ github.workspace }}/miniconda/bin/python" conda install conda-verify -y - pytest --color=yes -v -n 0 --basetemp /tmp/cb_serial --cov conda_build --cov-report xml -m "serial" tests "${PYTEST_REPLAY_OPTIONS[@]+"${PYTEST_REPLAY_OPTIONS[@]}"}" + pytest --color=yes -v -n 0 --basetemp /tmp/cb_serial --cov conda_build --cov-report xml -m "serial" tests "${PYTEST_REPLAY_OPTIONS[@]+"${PYTEST_REPLAY_OPTIONS[@]}"}" --alluredir=allure-results + tar -zcf allure-results.tar.gz allure-results - name: Run Parallel Tests if: matrix.test-type == 'parallel' @@ -354,7 +367,8 @@ jobs: conda create -n blarg1 -yq python=2.7 conda create -n blarg3 -yq python=3.7 conda create -n blarg4 -yq python nomkl numpy pandas svn - pytest --color=yes -v -n auto --basetemp /tmp/cb --cov conda_build --cov-append --cov-report xml -m "not serial" tests "${PYTEST_REPLAY_OPTIONS[@]+"${PYTEST_REPLAY_OPTIONS[@]}"}" + pytest --color=yes -v -n auto --basetemp /tmp/cb --cov conda_build --cov-append --cov-report xml -m "not serial" tests "${PYTEST_REPLAY_OPTIONS[@]+"${PYTEST_REPLAY_OPTIONS[@]}"}" --alluredir=allure-results + tar -zcf allure-results.tar.gz allure-results - name: Upload Pytest Replay uses: actions/upload-artifact@v2 diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 591c8ac6df..43b026e926 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -5,10 +5,10 @@ from os import lstat from importlib import import_module -from pkg_resources import parse_version - from conda import __version__ as CONDA_VERSION +CONDA_VERSION = CONDA_VERSION + def try_exports(module, attr): # this assumes conda.exports exists, so only use for conda 4.3 onward @@ -27,13 +27,14 @@ def try_exports(module, attr): # no need to patch if it doesn't exist pass -conda_43 = parse_version(CONDA_VERSION) >= parse_version("4.3.0a0") -conda_44 = parse_version(CONDA_VERSION) >= parse_version("4.4.0a0") -conda_45 = parse_version(CONDA_VERSION) >= parse_version("4.5.0a0") -conda_46 = parse_version(CONDA_VERSION) >= parse_version("4.6.0a0") -conda_47 = parse_version(CONDA_VERSION) >= parse_version("4.7.0a0") -conda_48 = parse_version(CONDA_VERSION) >= parse_version("4.8.0a0") -conda_411 = parse_version(CONDA_VERSION) >= parse_version("4.11.0a0") +# All of these conda's are older than our minimum dependency +conda_43 = True +conda_44 = True +conda_45 = True +conda_46 = True +conda_47 = True +conda_48 = True +conda_411 = True if conda_44: from conda.exports import display_actions, execute_actions, execute_plan, install_actions @@ -43,17 +44,12 @@ def try_exports(module, attr): display_actions, execute_actions, execute_plan = display_actions, execute_actions, execute_plan install_actions = install_actions -try: - # Conda 4.4+ - from conda.exports import _toposort -except ImportError: - from conda.toposort import _toposort +from conda.exports import _toposort # NOQA + _toposort = _toposort -if conda_411: - from conda.auxlib.packaging import _get_version_from_git_tag -else: - from conda._vendor.auxlib.packaging import _get_version_from_git_tag +from conda.auxlib.packaging import _get_version_from_git_tag # NOQA + get_version_from_git_tag = _get_version_from_git_tag from conda.exports import TmpDownload, download, handle_proxy_407 # NOQA @@ -134,10 +130,7 @@ def try_exports(module, attr): get_prefix = partial(context_get_prefix, context) cc_conda_build = context.conda_build if hasattr(context, 'conda_build') else {} -try: - from conda.exports import Channel -except: - from conda.models.channel import Channel +from conda.exports import Channel # NOQA get_conda_channel = Channel.from_value # disallow softlinks. This avoids a lot of dumb issues, at the potential cost of disk space. diff --git a/conda_build/convert.py b/conda_build/convert.py index 07fdc5c4a8..9915ca799a 100644 --- a/conda_build/convert.py +++ b/conda_build/convert.py @@ -85,8 +85,8 @@ def retrieve_python_version(file_path): to the source tar file itself, or the file path to the temporary directory containing the extracted source package contents """ - if 'python' in file_path: - pattern = re.compile(r'python\d\.\d') + if "python" in file_path: + pattern = re.compile(r"python\d\.\d+") matched = pattern.search(file_path) if matched: diff --git a/conda_build/variants.py b/conda_build/variants.py index 2e7cd9b193..19c1e87a5f 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -19,8 +19,8 @@ from conda_build.utils import ensure_list, get_logger, islist, on_win, trim_empty_keys DEFAULT_VARIANTS = { - 'python': f'{sys.version_info.major}.{sys.version_info.minor}', - 'numpy': '1.16', + "python": f"{sys.version_info.major}.{sys.version_info.minor}", + "numpy": "1.21", # this one actually needs to be pretty specific. The reason is that cpan skeleton uses the # version to say what's in their standard library. 'perl': '5.26.2', diff --git a/setup.py b/setup.py index 2a52da6420..52802c523a 100755 --- a/setup.py +++ b/setup.py @@ -55,6 +55,7 @@ "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10" ], python_requires=">=3.6", description="tools for building conda packages", diff --git a/tests/test-recipes/metadata/_unicode_in_tarball/meta.yaml b/tests/test-recipes/metadata/_unicode_in_tarball/meta.yaml deleted file mode 100644 index 8a4b2b8a87..0000000000 --- a/tests/test-recipes/metadata/_unicode_in_tarball/meta.yaml +++ /dev/null @@ -1,58 +0,0 @@ -{% set name = "pyslet" %} -{% set version = "0.5.20140801" %} -{% set sha256 = "89538ad432d8c51b7d4b419817526f864697580d5eb1471784d15f6c056a88b6" %} - -package: - name: {{ name|lower }} - version: {{ version }} - -source: - fn: {{ name }}-{{ version }}.tar.gz - url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz - sha256: {{ sha256 }} - -build: - skip: True # [py3k] - number: 0 - script: python setup.py install --single-version-externally-managed --record record.txt - -requirements: - build: - - python - - setuptools - - run: - - python - -test: - imports: - - pyslet - - pyslet.http - - pyslet.odata2 - - pyslet.qtiv1 - - pyslet.qtiv2 - - pyslet.xml20081126 - -about: - home: http://www.pyslet.org/ - license: BSD-3-Clause - license_family: BSD - summary: 'Pyslet: Python package for Standards in Learning, Education and Training' - description: | - Pyslet is a Python package for Standards in Learning Education and Training - (LET). It implements a number of LET-specific standards, including IMS QTI, - Content Packaging and Basic LTI. It also includes support for some general - standards, including the data access standard OData (see - http://www.odata.org). - - Pyslet was originally written to be the engine behind the QTI migration - tool but it can be used independently as a support module for your own - Python applications. - - Pyslet currently supports Python 2.6 and 2.7, see docs for details. - doc_url: http://pyslet.readthedocs.org - dev_url: https://github.com/swl10/pyslet - -extra: - recipe-maintainers: - - stuertz diff --git a/tests/test-recipes/metadata/numpy_build/run_test.bat b/tests/test-recipes/metadata/numpy_build/run_test.bat index 326404b171..c7539158cc 100644 --- a/tests/test-recipes/metadata/numpy_build/run_test.bat +++ b/tests/test-recipes/metadata/numpy_build/run_test.bat @@ -1,4 +1,7 @@ +:: show list conda list -p "%PREFIX%" --canonical if errorlevel 1 exit 1 + +:: grep for package conda list -p "%PREFIX%" --canonical | grep "conda-build-test-numpy-build-1.0-0" if errorlevel 1 exit 1 diff --git a/tests/test-recipes/metadata/numpy_build/run_test.py b/tests/test-recipes/metadata/numpy_build/run_test.py index 7e8e5c3cb8..5e23751d1b 100644 --- a/tests/test-recipes/metadata/numpy_build/run_test.py +++ b/tests/test-recipes/metadata/numpy_build/run_test.py @@ -1,15 +1,15 @@ import os -from glob import glob import json +from pathlib import Path def main(): - prefix = os.environ['PREFIX'] - info_file = glob(os.path.join(prefix, 'conda-meta', 'conda-build-test-numpy-build-1.0*.json'))[0] - with open(info_file) as fh: - info = json.load(fh) + info_files = list(Path(os.environ['PREFIX'], 'conda-meta').glob('conda-build-test-numpy-build-1.0-0.json')) + assert len(info_files) == 1 + info = json.loads(info_files[0].read_text()) assert len(info['depends']) == 0 + if __name__ == '__main__': main() diff --git a/tests/test-recipes/metadata/numpy_build/run_test.sh b/tests/test-recipes/metadata/numpy_build/run_test.sh index 6098c7d093..902155f5f9 100644 --- a/tests/test-recipes/metadata/numpy_build/run_test.sh +++ b/tests/test-recipes/metadata/numpy_build/run_test.sh @@ -1 +1,5 @@ -conda list -p $PREFIX --canonical | grep "conda-build-test-numpy-build-1.0-0" +# show list +conda list -p "$PREFIX" --canonical + +# grep for package +conda list -p "$PREFIX" --canonical | grep "conda-build-test-numpy-build-1.0-0" diff --git a/tests/test-recipes/metadata/numpy_build_run/run_test.bat b/tests/test-recipes/metadata/numpy_build_run/run_test.bat index 069cc9ff13..d2fd25cc5b 100644 --- a/tests/test-recipes/metadata/numpy_build_run/run_test.bat +++ b/tests/test-recipes/metadata/numpy_build_run/run_test.bat @@ -1,5 +1,7 @@ -@echo on +:: show list conda list -p "%PREFIX%" --canonical if errorlevel 1 exit 1 -conda list -p "%PREFIX%" --canonical | grep "conda-build-test-numpy-build-run-1\.0-py..h......._0" + +:: grep for package +conda list -p "%PREFIX%" --canonical | grep "conda-build-test-numpy-build-run-1.0-py.*_0" if errorlevel 1 exit 1 diff --git a/tests/test-recipes/metadata/numpy_build_run/run_test.py b/tests/test-recipes/metadata/numpy_build_run/run_test.py index 54c0b3859f..7e4c6a7f2a 100644 --- a/tests/test-recipes/metadata/numpy_build_run/run_test.py +++ b/tests/test-recipes/metadata/numpy_build_run/run_test.py @@ -1,24 +1,20 @@ import os import json -import glob +from pathlib import Path def main(): - prefix = os.environ['PREFIX'] - - info_files = glob.glob(os.path.join(prefix, 'conda-meta', - 'conda-build-test-numpy-build-run-1.0-py*0.json')) + info_files = list(Path(os.environ['PREFIX'], "conda-meta").glob('conda-build-test-numpy-build-run-1.0-py*_0.json')) assert len(info_files) == 1 - info_file = info_files[0] - with open(info_file) as fh: - info = json.load(fh) - # numpy with no version, python with no version, python with version pin + info = json.loads(info_files[0].read_text()) assert len(info['depends']) == 2 - depends = sorted(info['depends']) - # With no version - assert depends[0] == 'numpy' - assert depends[1].startswith('python ') + + # numpy with no version, python with version pin + numpy, python = sorted(info['depends']) + assert numpy == 'numpy' + assert python.startswith('python ') + if __name__ == '__main__': main() diff --git a/tests/test-recipes/metadata/numpy_build_run/run_test.sh b/tests/test-recipes/metadata/numpy_build_run/run_test.sh index d56a5445ba..1079b31761 100644 --- a/tests/test-recipes/metadata/numpy_build_run/run_test.sh +++ b/tests/test-recipes/metadata/numpy_build_run/run_test.sh @@ -1,3 +1,5 @@ +# show list conda list -p $PREFIX --canonical -# Test the build string. Should contain NumPy, but not the version -conda list -p $PREFIX --canonical | grep "conda-build-test-numpy-build-run-1\.0-py..h......._0" + +# grep for package +conda list -p $PREFIX --canonical | grep "conda-build-test-numpy-build-run-1.0-py.*_0" diff --git a/tests/test-recipes/metadata/python_build_run/run_test.bat b/tests/test-recipes/metadata/python_build_run/run_test.bat index 7ef8c3e19c..0fb1f9d9ce 100644 --- a/tests/test-recipes/metadata/python_build_run/run_test.bat +++ b/tests/test-recipes/metadata/python_build_run/run_test.bat @@ -1,4 +1,7 @@ +:: show list conda list -p "%PREFIX%" --canonical if errorlevel 1 exit 1 -conda list -p "%PREFIX%" --canonical | grep "conda-build-test-python-build-run-1\.0-py.._0" + +:: grep for package +conda list -p "%PREFIX%" --canonical | grep "conda-build-test-python-build-run-1.0-py.*_0" if errorlevel 1 exit 1 diff --git a/tests/test-recipes/metadata/python_build_run/run_test.py b/tests/test-recipes/metadata/python_build_run/run_test.py index b3c64cc2a1..a34db82af7 100644 --- a/tests/test-recipes/metadata/python_build_run/run_test.py +++ b/tests/test-recipes/metadata/python_build_run/run_test.py @@ -1,20 +1,18 @@ import os import json -import glob +from pathlib import Path def main(): - prefix = os.environ['PREFIX'] - info_files = glob.glob(os.path.join(prefix, 'conda-meta', - 'conda-build-test-python-build-run-1.0-py*0.json')) + info_files = list(Path(os.environ['PREFIX'], 'conda-meta').glob('conda-build-test-python-build-run-1.0-py*0.json')) assert len(info_files) == 1 - info_file = info_files[0] - with open(info_file) as fh: - info = json.load(fh) - # one without the version, and another with the version - assert len(info['depends']) == 1, info['depends'] - assert info['depends'][0].startswith('python ') + info = json.loads(info_files[0].read_text()) + assert len(info['depends']) == 1 + + # python with version pin + python, = info['depends'] + assert python.startswith('python ') if __name__ == '__main__': diff --git a/tests/test-recipes/metadata/python_build_run/run_test.sh b/tests/test-recipes/metadata/python_build_run/run_test.sh index ebf0ddfb90..34d6e0d89a 100644 --- a/tests/test-recipes/metadata/python_build_run/run_test.sh +++ b/tests/test-recipes/metadata/python_build_run/run_test.sh @@ -1,3 +1,5 @@ -conda list -p $PREFIX --canonical -# Test the build string. Should contain Python -conda list -p $PREFIX --canonical | grep "conda-build-test-python-build-run-1\.0-py.._0" +# show list +conda list -p "$PREFIX" --canonical + +# grep for package +conda list -p "$PREFIX" --canonical | grep "conda-build-test-python-build-run-1.0-py.*_0" diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 0812906e66..a74081667b 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -20,19 +20,16 @@ import conda from conda_build.conda_interface import url_path, LinkError, CondaError, cc_conda_build -from conda_build.conda_interface import linked import conda_build from binstar_client.commands import remove, show from binstar_client.errors import NotFound -from pkg_resources import parse_version import pytest import yaml import tarfile from conda_build import api, exceptions, __version__ -from conda_build.build import VersionOrder from conda_build.render import finalize_metadata from conda_build.utils import (copy_into, on_win, check_call_env, convert_path_for_cygwin_or_msys2, package_has_file, check_output_env, get_conda_operation_locks, rm_rf, @@ -42,9 +39,9 @@ OverLinkingError, OverDependingError) from conda_build.conda_interface import reset_context from conda.exceptions import ClobberError, CondaMultiError -from conda_build.conda_interface import conda_46, conda_47 +from conda_build.conda_interface import conda_47 -from .utils import is_valid_dir, metadata_dir, fail_dir, add_mangling +from .utils import is_valid_dir, metadata_dir, fail_dir, add_mangling, numpy_installed # define a few commonly used recipes - use os.path.join(metadata_dir, recipe) elsewhere empty_sections = os.path.join(metadata_dir, "empty_sections") @@ -103,8 +100,6 @@ def test_recipe_builds(recipe, testing_config, testing_workdir, monkeypatch): testing_config.activate = True monkeypatch.setenv("CONDA_TEST_VAR", "conda_test") monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2") - if 'unicode_all_over' in recipe and sys.version_info[0] == 2: - pytest.skip('unicode_all_over does not work on Python 2') api.build(recipe, config=testing_config) @@ -132,6 +127,7 @@ def test_ignore_prefix_files(testing_config, monkeypatch): # Regardless of the reason for skipping, we should definitely find a better way for tests to look for the packages # Rather than assuming they will be at $ROOT/pkgs since that can change and we don't care where they are in terms of the # tests. +# Need more time to figure the problem circumventing.. def test_ignore_some_prefix_files(testing_config, monkeypatch): recipe = os.path.join(metadata_dir, "_ignore_some_prefix_files") testing_config.activate = True @@ -351,11 +347,7 @@ def test_checkout_tool_as_dependency(testing_workdir, testing_config, monkeypatc platforms = ["64" if sys.maxsize > 2**32 else "32"] if sys.platform == "win32": platforms = sorted({"32", *platforms}) - compilers = [ - "3.6", - "3.7", - pytest.param("2.7", marks=pytest.mark.skip("Failing for Python 2.7")), - ] + compilers = ["3.9", "3.10"] msvc_vers = ["14.0"] else: msvc_vers = [] @@ -476,6 +468,10 @@ def test_requirements_txt_for_run_reqs(testing_workdir, testing_config): api.build(os.path.join(metadata_dir, "_requirements_txt_run_reqs"), config=testing_config) +@pytest.mark.skipif( + sys.version_info >= (3, 10), + reason="Python 3.10+, py_compile terminates once it finds an invalid file", +) def test_compileall_compiles_all_good_files(testing_workdir, testing_config): output = api.build(os.path.join(metadata_dir, "_compile-test"), config=testing_config)[0] good_files = ['f1.py', 'f3.py'] @@ -501,15 +497,12 @@ def test_build_metadata_object(testing_metadata): api.build(testing_metadata) -def numpy_installed(): - return any([True for dist in linked(sys.prefix) if dist.name == 'numpy']) - - @pytest.mark.serial @pytest.mark.skipif(not numpy_installed(), reason="numpy not installed in base environment") def test_numpy_setup_py_data(testing_config): recipe_path = os.path.join(metadata_dir, '_numpy_setup_py_data') # this shows an error that is OK to ignore: + # (Is this Error still relevant) # PackagesNotFoundError: The following packages are missing from the target environment: # - cython @@ -727,14 +720,7 @@ def test_about_json_content(testing_metadata): assert 'tags' in about and about['tags'] == ["a", "b"] # this one comes in as a string - test type coercion assert 'identifiers' in about and about['identifiers'] == ["a"] - try: - assert 'env_vars' in about and about['env_vars'] - except AssertionError: - # new versions of conda support this, so we should raise errors. - if VersionOrder(conda.__version__) >= VersionOrder('4.2.10'): - raise - else: - pass + assert 'env_vars' in about and about['env_vars'] assert 'root_pkgs' in about and about['root_pkgs'] @@ -776,8 +762,6 @@ def test_about_license_file_and_prelink_message(testing_workdir, testing_config, # Regardless of the reason for skipping, we should definitely find a better way for tests to look for the packages # Rather than assuming they will be at $ROOT/pkgs since that can change and we don't care where they are in terms of the # tests. -@pytest.mark.xfail(parse_version(conda.__version__) < parse_version("4.3.14"), - reason="new noarch supported starting with conda 4.3.14") def test_noarch_python_with_tests(testing_config): recipe = os.path.join(metadata_dir, "_noarch_python_with_tests") pkg = api.build(recipe, config=testing_config)[0] @@ -806,25 +790,6 @@ def test_legacy_noarch_python(testing_config): assert os.path.basename(os.path.dirname(output)) == 'noarch' -@pytest.mark.skipif(True, - reason="Re-enable when private application environments are fully implemented " - "in conda. " - "See https://github.com/conda/conda/issues/3912#issuecomment-374820599") -def test_preferred_env(testing_config): - recipe = os.path.join(metadata_dir, "_preferred_env") - output = api.build(recipe, config=testing_config)[0] - extra = json.loads(package_has_file(output, 'info/link.json').decode()) - assert 'preferred_env' in extra - assert 'name' in extra['preferred_env'] - assert 'executable_paths' in extra['preferred_env'] - exe_paths = extra['preferred_env']['executable_paths'] - if on_win: - assert exe_paths == ['Scripts/exepath1.bat', 'Scripts/exepath2.bat'] - else: - assert exe_paths == ['bin/exepath1', 'bin/exepath2'] - assert 'package_metadata_version' in extra - - @pytest.mark.sanity def test_skip_compile_pyc(testing_config): outputs = api.build(os.path.join(metadata_dir, "skip_compile_pyc"), config=testing_config) @@ -995,22 +960,6 @@ def test_workdir_removal_warning(testing_config, caplog): assert "work dir is removed" in str(exc) -# @pytest.mark.serial -# @pytest.mark.skipif(not sys.platform.startswith('linux'), -# reason="cross compiler packages created only on Linux right now") -# @pytest.mark.xfail(VersionOrder(conda.__version__) < VersionOrder('4.3.2'), -# reason="not completely implemented yet") -# def test_cross_compiler(testing_workdir, testing_config, capfd): -# # TODO: testing purposes. Package from @mingwandroid's channel, copied to conda_build_test -# testing_config.channel_urls = ('conda_build_test', ) -# # activation is necessary to set the appropriate toolchain env vars -# testing_config.activate = True -# # testing_config.debug = True -# recipe_dir = os.path.join(metadata_dir, '_cross_helloworld') -# output = api.build(recipe_dir, config=testing_config)[0] -# assert output.startswith(os.path.join(testing_config.croot, 'linux-imx351uc')) - - @pytest.mark.sanity @pytest.mark.skipif(sys.platform != 'darwin', reason="relevant to mac only") def test_append_python_app_osx(testing_config): @@ -1023,23 +972,6 @@ def test_append_python_app_osx(testing_config): api.build(recipe, config=testing_config) -# Not sure about this behavior. Basically, people need to realize that if they -# start with a recipe from disk, they should not then alter the metadata -# object. Later reparsing will clobber their edits to the object. The -# complicated thing is that these edits are indistinguishable from Jinja2 -# templating doing its normal thing. - -# def test_clobbering_manually_set_metadata_raises(testing_metadata, testing_workdir): -# api.output_yaml(testing_metadata, 'meta.yaml') -# metadata = api.render(testing_workdir)[0][0] -# # make the package meta dict out of sync with file contents -# metadata.meta['package']['name'] = 'steve' -# # re-render happens as part of build. We should see an error about clobbering our customized -# # meta dict -# with pytest.raises(ValueError): -# api.build(metadata) - - @pytest.mark.sanity def test_run_exports(testing_metadata, testing_config, testing_workdir): api.build(os.path.join(metadata_dir, '_run_exports'), config=testing_config, notest=True) @@ -1119,7 +1051,7 @@ def test_run_exports_noarch_python(testing_metadata, testing_config): testing_metadata.meta['requirements']['run'] = ['python'] testing_metadata.meta['build']['noarch'] = 'python' testing_metadata.config.index = None - testing_metadata.config.variant["python"] = "3.6 with_run_exports" + testing_metadata.config.variant["python"] = "3.8 with_run_exports" m = finalize_metadata(testing_metadata) assert 'python 3.6 with_run_exports' in m.meta['requirements'].get('host', []) @@ -1238,13 +1170,6 @@ def test_unknown_selectors(testing_config): api.build(recipe, config=testing_config) -@pytest.mark.sanity -def test_extract_tarball_with_unicode_filename(testing_config): - """See https://github.com/conda/conda-build/pull/1779""" - recipe = os.path.join(metadata_dir, '_unicode_in_tarball') - api.build(recipe, config=testing_config) - - def test_failed_recipe_leaves_folders(testing_config, testing_workdir): recipe = os.path.join(fail_dir, 'recursive-build') m = api.render(recipe, config=testing_config)[0][0] @@ -1466,8 +1391,7 @@ def test_failed_patch_exits_build(testing_config): @pytest.mark.sanity -def test_version_mismatch_in_variant_does_not_infinitely_rebuild_folder( - testing_config): +def test_version_mismatch_in_variant_does_not_infinitely_rebuild_folder(testing_config): # unsatisfiable; also not buildable (test_a recipe version is 2.0) testing_config.variant['test_a'] = "1.0" recipe = os.path.join(metadata_dir, '_build_deps_no_infinite_loop', 'test_b') @@ -1489,8 +1413,6 @@ def test_provides_features_metadata(testing_config): assert index['provides_features'] == {'test2': 'also_ok'} -@pytest.mark.skipif(on_win and sys.version[:3] == "2.7", - reason="py-lief not available on win for Python 2.7") def test_overlinking_detection(testing_config): testing_config.activate = True testing_config.error_overlinking = True @@ -1509,8 +1431,6 @@ def test_overlinking_detection(testing_config): rm_rf(dest_bat) -@pytest.mark.skipif(on_win and sys.version[:3] == "2.7", - reason="py-lief not available on win for Python 2.7") def test_overlinking_detection_ignore_patterns(testing_config): testing_config.activate = True testing_config.error_overlinking = True @@ -1541,6 +1461,11 @@ def test_overdepending_detection(testing_config): @pytest.mark.skipif(sys.platform != "darwin", reason="macOS-only test (at present)") def test_macos_tbd_handling(testing_config): + """ + Test path handling after installation... The test case uses a Hello World + example in C/C++ for testing the installation of C libraries... + """ + testing_config.activate = True testing_config.error_overlinking = True testing_config.error_overdepending = True @@ -1573,8 +1498,6 @@ def test_downstream_tests(testing_config): @pytest.mark.sanity -@pytest.mark.xfail(not conda_46, - reason="conda 4.6 changed logger level from info to warn") def test_warning_on_file_clobbering(testing_config, capfd): recipe_dir = os.path.join(metadata_dir, '_overlapping_files_warning') @@ -1589,7 +1512,7 @@ def test_warning_on_file_clobbering(testing_config, capfd): @pytest.mark.sanity -@pytest.mark.serial +@pytest.mark.skip(reason="conda-verify is deprecated because it is unsupported") def test_verify_bad_package(testing_config): from conda_verify.errors import PackageError recipe_dir = os.path.join(fail_dir, 'create_bad_folder_for_conda_verify') diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index 2c1a3b0001..18cb57e6d9 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -60,18 +60,24 @@ def test_repo(prefix, repo, package, version, testing_workdir, testing_config): @pytest.mark.slow def test_name_with_version_specified(testing_workdir, testing_config): - api.skeletonize(packages='sympy', repo='pypi', version='0.7.5', - config=testing_config) - m = api.render('sympy/meta.yaml')[0][0] - assert m.version() == "0.7.5" + api.skeletonize( + packages="sympy", + repo="pypi", + version="1.10", + config=testing_config, + ) + m = api.render("sympy/meta.yaml")[0][0] + assert m.version() == "1.10" def test_pypi_url(testing_workdir, testing_config): - api.skeletonize('https://pypi.python.org/packages/source/s/sympy/' - 'sympy-0.7.5.tar.gz#md5=7de1adb49972a15a3dd975e879a2bea9', - repo='pypi', config=testing_config) - m = api.render('sympy/meta.yaml')[0][0] - assert m.version() == "0.7.5" + api.skeletonize( + packages="https://pypi.python.org/packages/source/s/sympy/sympy-1.10.tar.gz#md5=b3f5189ad782bbcb1bedc1ec2ca12f29", + repo="pypi", + config=testing_config, + ) + m = api.render("sympy/meta.yaml")[0][0] + assert m.version() == "1.10" @pytest.fixture diff --git a/tests/test_post.py b/tests/test_post.py index a008c6656a..75c6bfc796 100644 --- a/tests/test_post.py +++ b/tests/test_post.py @@ -12,6 +12,10 @@ from .utils import add_mangling, metadata_dir +@pytest.mark.skipif( + sys.version_info >= (3, 10), + reason="Python 3.10+, py_compile terminates once it finds an invalid file", +) def test_compile_missing_pyc(testing_workdir): good_files = ['f1.py', 'f3.py'] bad_file = 'f2_bad.py' diff --git a/tests/utils.py b/tests/utils.py index 5e627299be..c3425f9cec 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -8,6 +8,11 @@ import pytest from conda_build.metadata import MetaData from conda_build.utils import on_win +from conda_build.conda_interface import linked + + +def numpy_installed(): + return any([True for dist in linked(sys.prefix) if dist.name == "numpy"]) def get_root_dir(): From 9951c3ee0e208cf9c7e3c42a0b88b44560f04989 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 15 Dec 2022 10:46:52 -0600 Subject: [PATCH 009/366] Use GH Action instead of `on` path filtering (#4675) * Use GH Action instead of `on` path filtering * Include git clone for push events * Update conditional syntax * Remove path filtering * Split result aggregation into separate job * Always run aggregate if a code change Co-authored-by: Jannis Leidel --- .github/workflows/tests-skip.yml | 46 -------------------- .github/workflows/tests.yml | 73 ++++++++++++++++++++++++-------- 2 files changed, 55 insertions(+), 64 deletions(-) delete mode 100644 .github/workflows/tests-skip.yml diff --git a/.github/workflows/tests-skip.yml b/.github/workflows/tests-skip.yml deleted file mode 100644 index 9ae5afbac6..0000000000 --- a/.github/workflows/tests-skip.yml +++ /dev/null @@ -1,46 +0,0 @@ -# this is the sibling workflow to tests-skip.yml, it is required to work around -# the skipped but required checks issue: -# https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/troubleshooting-required-status-checks#handling-skipped-but-required-checks -name: Tests - -on: - # NOTE: github.event context is push payload: - # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#push - push: - branches: - - main - - feature/** - paths-ignore: # run if src/tests/recipe are modified - - conda_build/** - - tests/** - - '*.py' - - recipe/** - - .github/workflows/tests.yml - - # NOTE: github.event context is pull_request payload: - # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request - pull_request: - paths-ignore: - - conda_build/** - - tests/** - - '*.py' - - recipe/** - - .github/workflows/tests.yml - -concurrency: - # Concurrency group that uses the workflow name and PR number if available - # or commit SHA as a fallback. If a new build is triggered under that - # concurrency group while a previous build is running it will be canceled. - # Repeated pushes to a PR will cancel all previous builds, while multiple - # merges to main will not cancel. - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} - cancel-in-progress: true - -jobs: - # required check - analyze: - name: Analyze results - if: always() - runs-on: ubuntu-latest - steps: - - run: 'echo "Nothing to analyze"' diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index d0637b3e25..7bc848a0aa 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -10,22 +10,10 @@ on: branches: - main - feature/** - paths: # run if src/tests/recipe are modified - - conda_build/** - - tests/** - - '*.py' - - recipe/** - - .github/workflows/tests.yml # NOTE: github.event context is pull_request payload: # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request pull_request: - paths: - - conda_build/** - - tests/** - - '*.py' - - recipe/** - - .github/workflows/tests.yml concurrency: # Concurrency group that uses the workflow name and PR number if available @@ -37,7 +25,35 @@ concurrency: cancel-in-progress: true jobs: + # detect whether any code changes are included in this PR + changes: + runs-on: ubuntu-latest + permissions: + pull-requests: read + outputs: + code: ${{ steps.filter.outputs.code }} + steps: + - uses: actions/checkout@v3 + # dorny/paths-filter needs git clone for push events + # https://github.com/marketplace/actions/paths-changes-filter#supported-workflows + if: github.event_name == 'push' + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 + id: filter + with: + filters: | + code: + - 'conda_build/**' + - 'tests/**' + - '*.py' + - 'recipe/**' + - '.github/workflows/tests.yml' + + # linux test suite linux: + # only run test suite if there are code changes + needs: changes + if: needs.changes.outputs.code == 'true' + runs-on: ubuntu-latest defaults: run: @@ -125,7 +141,12 @@ jobs: name: Linux_replay-py-${{ matrix.python-version }}_${{ matrix.conda-version }}_${{ matrix.test-type }} path: pytest-replay/ + # windows test suite windows: + # only run test suite if there are code changes + needs: changes + if: needs.changes.outputs.code == 'true' + runs-on: windows-2019 strategy: fail-fast: false @@ -287,7 +308,12 @@ jobs: name: Win-${{ env.CONDA_VERSION }}-Python${{ matrix.python-version }}-${{ env. parameters.serial_or_parallel }} if: always() + # macos test suite macos: + # only run test suite if there are code changes + needs: changes + if: needs.changes.outputs.code == 'true' + runs-on: macos-11 defaults: run: @@ -377,11 +403,12 @@ jobs: name: macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ env. parameters.serial_or_parallel }} if: always() - # required check - analyze: - name: Analyze results - needs: [windows, linux, macos] - if: always() + # aggregate and upload + aggregate: + # only aggregate test suite if there are code changes + needs: [changes, linux, windows, macos] + if: needs.changes.outputs.code == 'true' && always() + runs-on: ubuntu-latest steps: - name: Download test results @@ -401,11 +428,21 @@ jobs: with: paths: ./test-results-${{ github.sha }}-**/test-report*.xml + # required check + analyze: + name: Analyze results + needs: [linux, windows, macos, aggregate] + if: always() + + runs-on: ubuntu-latest + steps: - name: Decide whether the needed jobs succeeded or failed - uses: re-actors/alls-green@release/v1 + uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe with: + allowed-skips: ${{ toJSON(needs) }} jobs: ${{ toJSON(needs) }} + # canary builds build: name: Canary Build needs: [analyze] From b62c7fa87c635669608b9be130f68772a3b157f9 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 16 Dec 2022 03:04:35 -0600 Subject: [PATCH 010/366] Move tar-ing into separate step to avoid shadowing test results (#4679) --- .github/workflows/tests.yml | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 7bc848a0aa..ff19c9d304 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -117,7 +117,6 @@ jobs: -m "serial" tests \ ${{ env.pytest-replay }} \ --alluredir=allure-results - tar -zcf allure-results.tar.gz allure-results - name: Run Parallel tests if: matrix.test-type == 'parallel' @@ -133,7 +132,9 @@ jobs: -m "not serial" tests \ ${{ env.pytest-replay }} \ --alluredir=allure-results - tar -zcf allure-results.tar.gz allure-results + + - name: Tar Allure Results + run: tar -zcf allure-results.tar.gz allure-results - name: Upload Pytest Replay uses: actions/upload-artifact@v2 @@ -279,8 +280,6 @@ jobs: set LUA= set R= pytest --color=yes -vv -n 0 --basetemp %UserProfile%\cbtmp_serial --cov conda_build --cov-report xml -m "serial" ${{ env.pytest-replay }} --alluredir=allure-results - tar -zcf allure-results.tar.gz allure-results - shell: cmd - name: Run Parallel Tests @@ -295,12 +294,14 @@ jobs: set LUA= set R= pytest --color=yes -vv -n auto --basetemp %UserProfile%\cbtmp --cov conda_build --cov-append --cov-report xml -m "not serial" ${{ env.pytest-replay }} --alluredir=allure-results - tar -zcf allure-results.tar.gz allure-results shell: cmd env: VS90COMNTOOLS: C:\Program Files (x86)\Common Files\Microsoft\Visual C++ for Python\9.0\VC\bin LIB: + - name: Tar Allure Results + run: tar -zcf allure-results.tar.gz allure-results + - name: Upload Pytest Replay uses: actions/upload-artifact@v2 with: @@ -379,7 +380,6 @@ jobs: source ci/github/activate_conda "${{ github.workspace }}/miniconda/bin/python" conda install conda-verify -y pytest --color=yes -v -n 0 --basetemp /tmp/cb_serial --cov conda_build --cov-report xml -m "serial" tests "${PYTEST_REPLAY_OPTIONS[@]+"${PYTEST_REPLAY_OPTIONS[@]}"}" --alluredir=allure-results - tar -zcf allure-results.tar.gz allure-results - name: Run Parallel Tests if: matrix.test-type == 'parallel' @@ -394,7 +394,9 @@ jobs: conda create -n blarg3 -yq python=3.7 conda create -n blarg4 -yq python nomkl numpy pandas svn pytest --color=yes -v -n auto --basetemp /tmp/cb --cov conda_build --cov-append --cov-report xml -m "not serial" tests "${PYTEST_REPLAY_OPTIONS[@]+"${PYTEST_REPLAY_OPTIONS[@]}"}" --alluredir=allure-results - tar -zcf allure-results.tar.gz allure-results + + - name: Tar Allure Results + run: tar -zcf allure-results.tar.gz allure-results - name: Upload Pytest Replay uses: actions/upload-artifact@v2 From 16ab6a5bd2de792c2b49da803a9b2374960a16dd Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 20 Dec 2022 00:12:59 -0600 Subject: [PATCH 011/366] Update action versions to avoid Node.js 12 deprecation notices (#4678) * Bump version for actions/checkout * Bump version for actions/upload-artifact --- .github/workflows/docs.yml | 2 +- .github/workflows/tests.yml | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 42e73c5d35..18ea421b87 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -22,7 +22,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 0 - name: Setup diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ff19c9d304..8eb07ac7c7 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -77,7 +77,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 @@ -137,7 +137,7 @@ jobs: run: tar -zcf allure-results.tar.gz allure-results - name: Upload Pytest Replay - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: Linux_replay-py-${{ matrix.python-version }}_${{ matrix.conda-version }}_${{ matrix.test-type }} path: pytest-replay/ @@ -168,7 +168,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 @@ -303,7 +303,7 @@ jobs: run: tar -zcf allure-results.tar.gz allure-results - name: Upload Pytest Replay - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: path: ${{ github.workspace }}/pytest-replay name: Win-${{ env.CONDA_VERSION }}-Python${{ matrix.python-version }}-${{ env. parameters.serial_or_parallel }} @@ -333,7 +333,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 @@ -399,7 +399,7 @@ jobs: run: tar -zcf allure-results.tar.gz allure-results - name: Upload Pytest Replay - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: path: ${{ github.workspace }}/pytest-replay name: macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ env. parameters.serial_or_parallel }} From db9a728a9e4e6cfc895637ca3221117970fc2663 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 22 Dec 2022 11:29:25 -0600 Subject: [PATCH 012/366] Remove logic branching for `conda_43`/`conda_44`/`conda_45`/`conda_46`/`conda_47` (#4677) --- conda_build/build.py | 90 ++++++++++-------------------- conda_build/conda_interface.py | 10 ++-- conda_build/render.py | 17 +++--- conda_build/utils.py | 89 +++++++---------------------- news/4677-remove-pre-conda413-code | 19 +++++++ tests/test_api_build.py | 10 ---- tests/test_index.py | 25 +++------ 7 files changed, 92 insertions(+), 168 deletions(-) create mode 100644 news/4677-remove-pre-conda413-code diff --git a/conda_build/build.py b/conda_build/build.py index f264a6b549..2722c470c5 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -32,7 +32,7 @@ import conda_package_handling.api # used to get version -from .conda_interface import env_path_backup_var_exists, conda_45, conda_46 +from .conda_interface import env_path_backup_var_exists from .conda_interface import prefix_placeholder from .conda_interface import TemporaryDirectory from .conda_interface import VersionOrder @@ -1811,15 +1811,11 @@ def scan_metadata(path): def _write_sh_activation_text(file_handle, m): cygpath_prefix = "$(cygpath -u " if utils.on_win else "" cygpath_suffix = " )" if utils.on_win else "" - activate_path = ''.join((cygpath_prefix, - os.path.join(utils.root_script_dir, 'activate').replace('\\', '\\\\'), - cygpath_suffix)) - - if conda_46: - py_flags = '-I -m' if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") else '-m' - file_handle.write( - f"""eval "$('{sys.executable}' {py_flags} conda shell.bash hook)"\n""" - ) + + py_flags = '-I -m' if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") else '-m' + file_handle.write( + f"""eval "$('{sys.executable}' {py_flags} conda shell.bash hook)"\n""" + ) if m.is_cross: # HACK: we need both build and host envs "active" - i.e. on PATH, @@ -1846,24 +1842,16 @@ def _write_sh_activation_text(file_handle, m): host_prefix_path = ''.join((cygpath_prefix, m.config.host_prefix.replace('\\', '\\\\'), cygpath_suffix)) - if conda_46: - file_handle.write(f"conda activate \"{host_prefix_path}\"\n") - else: - file_handle.write('source "{}" "{}"\n' .format(activate_path, host_prefix_path)) - file_handle.write('unset CONDA_PATH_BACKUP\n') - file_handle.write('export CONDA_MAX_SHLVL=2\n') + file_handle.write(f"conda activate \"{host_prefix_path}\"\n") # Write build prefix activation AFTER host prefix, so that its executables come first build_prefix_path = ''.join((cygpath_prefix, m.config.build_prefix.replace('\\', '\\\\'), cygpath_suffix)) - if conda_46: - # Do not stack against base env when not cross. - stack = '--stack' if m.is_cross else '' - file_handle.write(f"conda activate {stack} \"{build_prefix_path}\"\n") - else: - file_handle.write(f'source "{activate_path}" "{build_prefix_path}"\n') + # Do not stack against base env when not cross. + stack = '--stack' if m.is_cross else '' + file_handle.write(f"conda activate {stack} \"{build_prefix_path}\"\n") from conda_build.os_utils.external import find_executable ccache = find_executable('ccache', m.config.build_prefix, False) @@ -2723,35 +2711,27 @@ def write_test_scripts(metadata, env_vars, py_files, pl_files, lua_files, r_file if not utils.on_win: tf.write(f'set {trace}-e\n') if metadata.config.activate and not metadata.name() == 'conda': - ext = ".bat" if utils.on_win else "" - if conda_46: - if utils.on_win: - tf.write( - 'set "CONDA_SHLVL=" ' - '&& @CALL {}\\condabin\\conda_hook.bat {}' - '&& set CONDA_EXE={python_exe}' - '&& set CONDA_PYTHON_EXE={python_exe}' - '&& set _CE_I={}' - '&& set _CE_M=-m' - '&& set _CE_CONDA=conda\n'.format( - sys.prefix, - '--dev' if metadata.config.debug else '', - "-i" if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") else "", - python_exe=sys.executable - ) - ) - else: - py_flags = '-I -m' if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") else '-m' - tf.write( - f"""eval "$('{sys.executable}' {py_flags} conda shell.bash hook)"\n""" + if utils.on_win: + tf.write( + 'set "CONDA_SHLVL=" ' + '&& @CALL {}\\condabin\\conda_hook.bat {}' + '&& set CONDA_EXE={python_exe}' + '&& set CONDA_PYTHON_EXE={python_exe}' + '&& set _CE_I={}' + '&& set _CE_M=-m' + '&& set _CE_CONDA=conda\n'.format( + sys.prefix, + '--dev' if metadata.config.debug else '', + "-i" if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") else "", + python_exe=sys.executable ) - tf.write(f'conda activate "{metadata.config.test_prefix}"\n') + ) else: - tf.write('{source} "{conda_root}activate{ext}" "{test_env}"\n'.format( - conda_root=utils.root_script_dir + os.path.sep, - source="call" if utils.on_win else "source", - ext=ext, - test_env=metadata.config.test_prefix)) + py_flags = '-I -m' if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") else '-m' + tf.write( + f"""eval "$('{sys.executable}' {py_flags} conda shell.bash hook)"\n""" + ) + tf.write(f'conda activate "{metadata.config.test_prefix}"\n') if utils.on_win: tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") # In-case people source this, it's essential errors are not fatal in an interactive shell. @@ -3414,13 +3394,5 @@ def is_package_built(metadata, env, include_local=True): spec = MatchSpec(name=metadata.name(), version=metadata.version(), build=metadata.build_id()) - if conda_45: - from conda.api import SubdirData - return bool(SubdirData.query_all(spec, channels=urls, subdirs=(subdir, "noarch"))) - else: - index, _, _ = get_build_index(subdir=subdir, bldpkgs_dir=metadata.config.bldpkgs_dir, - output_folder=metadata.config.output_folder, channel_urls=urls, - debug=metadata.config.debug, verbose=metadata.config.verbose, - locking=metadata.config.locking, timeout=metadata.config.timeout, - clear_cache=True) - return any(spec.match(prec) for prec in index.values()) + from conda.api import SubdirData + return bool(SubdirData.query_all(spec, channels=urls, subdirs=(subdir, "noarch"))) diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 43b026e926..9a63f9b3ef 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -36,10 +36,12 @@ def try_exports(module, attr): conda_48 = True conda_411 = True -if conda_44: - from conda.exports import display_actions, execute_actions, execute_plan, install_actions -else: - from conda.plan import display_actions, execute_actions, execute_plan, install_actions +from conda.exports import ( # noqa: E402 + display_actions, + execute_actions, + execute_plan, + install_actions, +) display_actions, execute_actions, execute_plan = display_actions, execute_actions, execute_plan install_actions = install_actions diff --git a/conda_build/render.py b/conda_build/render.py index e89103e2a5..9f4e3c7cd2 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -20,7 +20,6 @@ TemporaryDirectory) from .conda_interface import execute_actions from .conda_interface import pkgs_dirs -from .conda_interface import conda_43 from .conda_interface import specs_from_url from .utils import CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2 @@ -68,14 +67,12 @@ def bldpkg_path(m): def actions_to_pins(actions): - specs = [] - if conda_43: - spec_name = lambda x: x.dist_name - else: - spec_name = lambda x: str(x) - if 'LINK' in actions: - specs = [' '.join(spec_name(spec).split()[0].rsplit('-', 2)) for spec in actions['LINK']] - return specs + if "LINK" in actions: + return [ + " ".join(spec.dist_name.split()[0].rsplit("-", 2)) + for spec in actions["LINK"] + ] + return [] def _categorize_deps(m, specs, exclude_pattern, variant): @@ -310,7 +307,7 @@ def execute_download_actions(m, actions, env, package_subset=None, require_files # ran through all pkgs_dirs, and did not find package or folder. Download it. # TODO: this is a vile hack reaching into conda's internals. Replace with # proper conda API when available. - if not pkg_loc and conda_43: + if not pkg_loc: try: pkg_record = [_ for _ in index if _.dist_name == pkg_dist][0] # the conda 4.4 API uses a single `link_prefs` kwarg diff --git a/conda_build/utils.py b/conda_build/utils.py index c398b7fda6..ded2ac728a 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -50,7 +50,7 @@ from .conda_interface import StringIO # noqa from .conda_interface import VersionOrder, MatchSpec # noqa from .conda_interface import cc_conda_build # noqa -from .conda_interface import conda_43, conda_46, Dist # noqa +from .conda_interface import Dist # noqa from .conda_interface import context # noqa from .conda_interface import download, TemporaryDirectory, get_conda_channel, CondaHTTPError # noqa # NOQA because it is not used in this file. @@ -1504,36 +1504,7 @@ def filter_info_files(files_list, prefix): def rm_rf(path, config=None): - if conda_46: - return _rm_rf(path) - if os.path.isdir(path): - try: - # subprocessing to delete large folders can be quite a bit faster - if on_win: - subprocess.check_call(f'rd /s /q {path}', shell=True) - else: - try: - os.makedirs('.empty') - except: - pass - del_dir_cmd = 'rsync -a --delete .empty {}/' - subprocess.check_call(del_dir_cmd.format(path).split()) - try: - shutil.rmtree('.empty') - except: - pass - # we don't really care about errors that much. People can and should - # clean out their folders once in a while with "purge" - except: - pass - - conda_log_level = logging.WARN - if config and config.debug: - conda_log_level = logging.DEBUG - with LoggingContext(conda_log_level): - # this clears out the path from conda's cache, which otherwise thinks - # that things are still installed here - _rm_rf(path) + return _rm_rf(path) # https://stackoverflow.com/a/31459386/1170370 @@ -1829,19 +1800,17 @@ def match_peer_job(target_matchspec, other_m, this_m=None): """target_matchspec comes from the recipe. target_variant is the variant from the recipe whose deps we are matching. m is the peer job, which must satisfy conda and also have matching keys for any keys that are shared between target_variant and m.config.variant""" - match_dict = {'name': other_m.name(), - 'version': other_m.version(), - 'build': '', } - if conda_43: - match_dict = Dist(name=match_dict['name'], - dist_name='-'.join((match_dict['name'], - match_dict['version'], - match_dict['build'])), - version=match_dict['version'], - build_string=match_dict['build'], - build_number=other_m.build_number(), - channel=None) - matchspec_matches = target_matchspec.match(match_dict) + name, version, build = other_m.name(), other_m.version(), "" + matchspec_matches = target_matchspec.match( + Dist( + name=name, + dist_name=f"{name}-{version}-{build}", + version=version, + build_string=build, + build_number=other_m.build_number(), + channel=None, + ) + ) variant_matches = True if this_m: @@ -1878,10 +1847,7 @@ def sha256_checksum(filename, buffersize=65536): def write_bat_activation_text(file_handle, m): - if conda_46: - file_handle.write('call "{conda_root}\\..\\condabin\\conda_hook.bat"\n'.format( - conda_root=root_script_dir, - )) + file_handle.write(f'call "{root_script_dir}\\..\\condabin\\conda_hook.bat"\n') if m.is_cross: # HACK: we need both build and host envs "active" - i.e. on PATH, # and with their activate.d scripts sourced. Conda only @@ -1905,29 +1871,14 @@ def write_bat_activation_text(file_handle, m): os.makedirs(dirname(history_file)) open(history_file, 'a').close() - if conda_46: - file_handle.write('call "{conda_root}\\..\\condabin\\conda.bat" activate "{prefix}"\n'.format( - conda_root=root_script_dir, - prefix=m.config.host_prefix, - )) - else: - file_handle.write('call "{conda_root}\\activate.bat" "{prefix}"\n'.format( - conda_root=root_script_dir, - prefix=m.config.host_prefix)) - # removing this placeholder should make conda double-activate with conda 4.3 - file_handle.write('set "PATH=%PATH:CONDA_PATH_PLACEHOLDER;=%"\n') - file_handle.write('set CONDA_MAX_SHLVL=2\n') + file_handle.write( + f'call "{root_script_dir}\\..\\condabin\\conda.bat" activate "{m.config.host_prefix}"\n' + ) # Write build prefix activation AFTER host prefix, so that its executables come first - if conda_46: - file_handle.write('call "{conda_root}\\..\\condabin\\conda.bat" activate --stack "{prefix}"\n'.format( - conda_root=root_script_dir, - prefix=m.config.build_prefix, - )) - else: - file_handle.write('call "{conda_root}\\activate.bat" "{prefix}"\n'.format( - conda_root=root_script_dir, - prefix=m.config.build_prefix)) + file_handle.write( + f'call "{root_script_dir}\\..\\condabin\\conda.bat" activate --stack "{m.config.build_prefix}"\n' + ) from conda_build.os_utils.external import find_executable ccache = find_executable('ccache', m.config.build_prefix, False) if ccache: diff --git a/news/4677-remove-pre-conda413-code b/news/4677-remove-pre-conda413-code new file mode 100644 index 0000000000..27eb7f9aa3 --- /dev/null +++ b/news/4677-remove-pre-conda413-code @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Removed conda <4.13 logic. (#4677) + +### Docs + +* + +### Other + +* diff --git a/tests/test_api_build.py b/tests/test_api_build.py index a74081667b..8c76d8df08 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -39,7 +39,6 @@ OverLinkingError, OverDependingError) from conda_build.conda_interface import reset_context from conda.exceptions import ClobberError, CondaMultiError -from conda_build.conda_interface import conda_47 from .utils import is_valid_dir, metadata_dir, fail_dir, add_mangling, numpy_installed @@ -781,15 +780,6 @@ def test_noarch_python_1(testing_config): assert 'package_metadata_version' in extra -@pytest.mark.sanity -@pytest.mark.xfail(conda_47, reason="parallel verify/execute in conda 4.7 breaks legacy noarch, which depends on having the env files present before pre-link scripts are run.") -def test_legacy_noarch_python(testing_config): - output = api.build(os.path.join(metadata_dir, "_legacy_noarch_python"), - config=testing_config)[0] - # make sure that the package is going into the noarch folder - assert os.path.basename(os.path.dirname(output)) == 'noarch' - - @pytest.mark.sanity def test_skip_compile_pyc(testing_config): outputs = api.build(os.path.join(metadata_dir, "skip_compile_pyc"), config=testing_config) diff --git a/tests/test_index.py b/tests/test_index.py index 628717df77..7d2e68579c 100644 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -13,7 +13,7 @@ import conda_package_handling.api import pytest from unittest import mock -from conda_build.conda_interface import conda_47, context +from conda_build.conda_interface import context from conda_build.utils import copy_into, rm_rf import conda_build.api @@ -1134,29 +1134,22 @@ def test_current_index_reduces_space(): "one-gets-filtered-1.3.10-h7b6447c_3.tar.bz2", } # conda 4.7 removes .tar.bz2 files in favor of .conda files - if conda_47: - tar_bz2_keys.remove("one-gets-filtered-1.3.10-h7b6447c_3.tar.bz2") + tar_bz2_keys.remove("one-gets-filtered-1.3.10-h7b6447c_3.tar.bz2") # .conda files will replace .tar.bz2 files. Older packages that are necessary for satisfiability will remain assert set(trimmed_repodata["packages"].keys()) == tar_bz2_keys - if conda_47: - assert set(trimmed_repodata["packages.conda"].keys()) == { - "one-gets-filtered-1.3.10-h7b6447c_3.conda" - } + assert set(trimmed_repodata["packages.conda"].keys()) == { + "one-gets-filtered-1.3.10-h7b6447c_3.conda" + } # we can keep more than one version series using a collection of keys trimmed_repodata = conda_build.index._build_current_repodata( "linux-64", repodata, {"one-gets-filtered": ["1.2", "1.3"]} ) - if conda_47: - assert set(trimmed_repodata["packages.conda"].keys()) == { - "one-gets-filtered-1.2.11-h7b6447c_3.conda", - "one-gets-filtered-1.3.10-h7b6447c_3.conda", - } - else: - assert set(trimmed_repodata["packages"].keys()) == tar_bz2_keys | { - "one-gets-filtered-1.2.11-h7b6447c_3.tar.bz2" - } + assert set(trimmed_repodata["packages.conda"].keys()) == { + "one-gets-filtered-1.2.11-h7b6447c_3.conda", + "one-gets-filtered-1.3.10-h7b6447c_3.conda", + } def test_current_index_version_keys_keep_older_packages(testing_workdir): From 37829e7b0d82fdfa574a9f01962acb4d9e40f461 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 3 Jan 2023 10:41:59 -0600 Subject: [PATCH 013/366] Remove custom temporary directories (#4681) * Remove custom temporary directories * Mark test as flaky --- .github/workflows/tests.yml | 24 ++++++------------------ tests/test_api_build.py | 20 +++++--------------- 2 files changed, 11 insertions(+), 33 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 8eb07ac7c7..04e2ee1798 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -111,7 +111,7 @@ jobs: --color=yes \ -vv \ -n 0 \ - --basetemp /tmp/cb_serial \ + --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" \ --cov conda_build \ --cov-report xml \ -m "serial" tests \ @@ -125,7 +125,7 @@ jobs: --color=yes \ -vv \ -n auto \ - --basetemp /tmp/cb \ + --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" \ --cov conda_build \ --cov-append \ --cov-report xml \ @@ -172,14 +172,6 @@ jobs: with: fetch-depth: 0 - - name: Set temp dirs correctly - shell: cmd - # https://github.com/actions/virtual-environments/issues/712 - run: | - echo "TMPDIR=$env:USERPROFILE\AppData\Local\Temp" >> $env:GITHUB_ENV - echo "TEMP=$env:USERPROFILE\AppData\Local\Temp" >> $env:GITHUB_ENV - echo "TMP=$env:USERPROFILE\AppData\Local\Temp" >> $env:GITHUB_ENV - - name: Setup Python ${{ matrix.python-version }} uses: actions/setup-python@v1 with: @@ -256,10 +248,6 @@ jobs: pip install allure-pytest conda-build --version pushd .. && git clone https://github.com/conda/conda_build_test_recipe && popd - mkdir %UserProfile%\cbtmp_serial - mkdir %UserProfile%\cbtmp - for /d %%F in (%UserProfile%\cbtmp_serial\*) do rd /s /q "%%F" - for /d %%F in (%UserProfile%\cbtmp\*) do rd /s /q "%%F" call conda create -n blarg -yq --download-only python=2.7||exit 1 call conda create -n blarg -yq --download-only python=3.8||exit 1 call conda create -n blarg -yq --download-only python cmake||exit 1 @@ -279,7 +267,7 @@ jobs: set PERL= set LUA= set R= - pytest --color=yes -vv -n 0 --basetemp %UserProfile%\cbtmp_serial --cov conda_build --cov-report xml -m "serial" ${{ env.pytest-replay }} --alluredir=allure-results + pytest --color=yes -vv -n 0 --basetemp "${{ runner.temp }}\${{ matrix.test-type }}" --cov conda_build --cov-report xml -m "serial" ${{ env.pytest-replay }} --alluredir=allure-results shell: cmd - name: Run Parallel Tests @@ -293,7 +281,7 @@ jobs: set PERL= set LUA= set R= - pytest --color=yes -vv -n auto --basetemp %UserProfile%\cbtmp --cov conda_build --cov-append --cov-report xml -m "not serial" ${{ env.pytest-replay }} --alluredir=allure-results + pytest --color=yes -vv -n auto --basetemp "${{ runner.temp }}\${{ matrix.test-type }}" --cov conda_build --cov-append --cov-report xml -m "not serial" ${{ env.pytest-replay }} --alluredir=allure-results shell: cmd env: VS90COMNTOOLS: C:\Program Files (x86)\Common Files\Microsoft\Visual C++ for Python\9.0\VC\bin @@ -379,7 +367,7 @@ jobs: set -e -u source ci/github/activate_conda "${{ github.workspace }}/miniconda/bin/python" conda install conda-verify -y - pytest --color=yes -v -n 0 --basetemp /tmp/cb_serial --cov conda_build --cov-report xml -m "serial" tests "${PYTEST_REPLAY_OPTIONS[@]+"${PYTEST_REPLAY_OPTIONS[@]}"}" --alluredir=allure-results + pytest --color=yes -v -n 0 --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" --cov conda_build --cov-report xml -m "serial" tests "${PYTEST_REPLAY_OPTIONS[@]+"${PYTEST_REPLAY_OPTIONS[@]}"}" --alluredir=allure-results - name: Run Parallel Tests if: matrix.test-type == 'parallel' @@ -393,7 +381,7 @@ jobs: conda create -n blarg1 -yq python=2.7 conda create -n blarg3 -yq python=3.7 conda create -n blarg4 -yq python nomkl numpy pandas svn - pytest --color=yes -v -n auto --basetemp /tmp/cb --cov conda_build --cov-append --cov-report xml -m "not serial" tests "${PYTEST_REPLAY_OPTIONS[@]+"${PYTEST_REPLAY_OPTIONS[@]}"}" --alluredir=allure-results + pytest --color=yes -v -n auto --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" --cov conda_build --cov-append --cov-report xml -m "not serial" tests "${PYTEST_REPLAY_OPTIONS[@]+"${PYTEST_REPLAY_OPTIONS[@]}"}" --alluredir=allure-results - name: Tar Allure Results run: tar -zcf allure-results.tar.gz allure-results diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 8c76d8df08..dbb9e05012 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -3,9 +3,6 @@ """ This module tests the build API. These are high-level integration tests. """ - -import base64 -import locale from collections import OrderedDict from glob import glob import logging @@ -1160,6 +1157,9 @@ def test_unknown_selectors(testing_config): api.build(recipe, config=testing_config) +# the locks can be very flaky on GitHub Windows Runners +# https://github.com/conda/conda-build/issues/4685 +@pytest.mark.flaky(rerun=5, reruns_delay=2) def test_failed_recipe_leaves_folders(testing_config, testing_workdir): recipe = os.path.join(fail_dir, 'recursive-build') m = api.render(recipe, config=testing_config)[0][0] @@ -1168,20 +1168,10 @@ def test_failed_recipe_leaves_folders(testing_config, testing_workdir): api.build(m) assert os.path.isdir(m.config.build_folder), 'build folder was removed' assert os.listdir(m.config.build_folder), 'build folder has no files' + # make sure that it does not leave lock files, though, as these cause permission errors on # centralized installations - any_locks = False - locks_list = set() - locale.getpreferredencoding(False) - for lock in locks: - if os.path.isfile(lock.lock_file): - any_locks = True - dest_path = base64.b64decode(os.path.basename(lock.lock_file)) - if hasattr(dest_path, 'decode'): - dest_path = dest_path.decode(sys.getfilesystemencoding(), errors='backslashreplace') - locks_list.add((lock.lock_file, dest_path)) - assert not any_locks, "remaining locks:\n{}".format('\n'.join('->'.join((l, r)) - for (l, r) in locks_list)) + assert [lock.lock_file for lock in locks if os.path.isfile(lock.lock_file)] == [] @pytest.mark.sanity From ebda76c221646ca49811f8b8c6f555a4aa792e6c Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 4 Jan 2023 14:50:24 -0600 Subject: [PATCH 014/366] Combine license tests into parameterized test (#4683) --- tests/test_license_family.py | 194 +++++++++++------------------------ 1 file changed, 60 insertions(+), 134 deletions(-) diff --git a/tests/test_license_family.py b/tests/test_license_family.py index 775e5898a8..bbc4a1a880 100644 --- a/tests/test_license_family.py +++ b/tests/test_license_family.py @@ -1,142 +1,68 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from conda_build.license_family import guess_license_family, allowed_license_families, ensure_valid_license_family +from conda_build.license_family import guess_license_family, ensure_valid_license_family import pytest - -def test_new_vs_previous_guesses_match(): +LICENSE_FAMILY = { + # AGPL + "Affero GPL": "AGPL", + # APACHE + "Apache License (== 2.0)": "APACHE", + "Apache License 2.0": "APACHE", + # BSD + "BSD License": "BSD", + "BSD_2_clause + file LICENSE": "BSD", + "BSD_3_clause + file LICENSE": "BSD", + # CC + "CC0": "CC", + # GPL + "GPL": "GPL", # previously, GPL3 was incorrectly preferred + # GPL2 + "GNU General Public License v2 or later (GPLv2+)": "GPL2", + "GPL-2 | file LICENSE": "GPL2", + "GPL-2": "GPL2", + # GPL3 + "GNU General Public License some stuff then a 3 then stuff": "GPL3", + "GPL (>= 2) | file LICENSE": "GPL3", + "GPL (>= 2)": "GPL3", + "GPL (>= 3) | file LICENCE": "GPL3", + "GPL (>= 3)": "GPL3", + "GPL 3": "GPL3", + "GPL-2 | GPL-3 | file LICENSE": "GPL3", # previously, Public-Domain was incorrectly preferred + "GPL-2 | GPL-3": "GPL3", + "GPL-3 | file LICENSE": "GPL3", + "GPL-3": "GPL3", + # LGPL + "BSD License and GNU Library or Lesser General Public License (LGPL)": "LGPL", + "GNU Lesser General Public License (LGPL)": "LGPL", + "GNU Lesser General Public License": "LGPL", + "LGPL (>= 2)": "LGPL", + "LGPL-2": "LGPL", + "LGPL-2.1": "LGPL", + "LGPL-3": "LGPL", + # MIT + "MIT + file LICENSE | Unlimited": "MIT", + "MIT + file LICENSE": "MIT", + "MIT License": "MIT", + "Old MIT": "MIT", + "Unlimited": "MIT", # unfortunate corner case + # NONE + None: "NONE", + # OTHER + "BSL-1.0": "OTHER", + "Custom free software license": "OTHER", + "file LICENSE (FOSS)": "OTHER", + "Free software (X11 License)": "OTHER", + "Lucent Public License": "OTHER", + "Open Source (http://www.libpng.org/pub/png/src/libpng-LICENSE.txt)": "OTHER", + "zlib (http://zlib.net/zlib_license.html)": "OTHER", +} + + +@pytest.mark.parametrize("license,family", LICENSE_FAMILY.items()) +def test_guess_license_family(license, family): """Test cases where new and deprecated functions match""" - - cens = "GPL (>= 3)" - fam = guess_license_family(cens) - assert fam == 'GPL3' - - cens = 'GNU Lesser General Public License' - fam = guess_license_family(cens) - assert fam == 'LGPL', f'guess_license_family({cens}) is {fam}' - - cens = 'GNU General Public License some stuff then a 3 then stuff' - fam = guess_license_family(cens) - assert fam == 'GPL3', f'guess_license_family({cens}) is {fam}' - - cens = 'Affero GPL' - fam = guess_license_family(cens) - assert fam == 'AGPL', f'guess_license_family({cens}) is {fam}' - - -def test_new_vs_previous_guess_differ_gpl(): - """Test cases where new and deprecated functions differ - - license = 'GPL' - New guess is GPL, which is an allowed family, hence the most accurate. - Previously, GPL3 was chosen over GPL - """ - cens = "GPL" - fam = guess_license_family(cens) - assert fam == 'GPL' - - -def test_new_vs_previous_guess_differ_multiple_gpl(): - """Test cases where new and deprecated functions differ - - license = 'GPL-2 | GPL-3 | file LICENSE' - New guess is GPL-3, which is the most accurate. - Previously, somehow Public-Domain is closer than GPL2 or GPL3! - """ - cens = 'GPL-2 | GPL-3 | file LICENSE' - fam = guess_license_family(cens) - assert fam == 'GPL3', f'guess_license_family_from_index({cens}) is {fam}' - - -def test_old_warnings_no_longer_fail(): - # the following previously threw warnings. Came from r/linux-64 - warnings = {'MIT License', 'GNU Lesser General Public License (LGPL)', - 'GPL-2 | GPL-3 | file LICENSE', 'GPL (>= 3) | file LICENCE', - 'BSL-1.0', 'GPL (>= 2)', 'file LICENSE (FOSS)', - 'Open Source (http://www.libpng.org/pub/png/src/libpng-LICENSE.txt)', - 'MIT + file LICENSE', 'GPL-2 | GPL-3', 'GPL (>= 2) | file LICENSE', - 'Unlimited', 'GPL-3 | file LICENSE', - 'GNU General Public License v2 or later (GPLv2+)', 'LGPL-2.1', - 'LGPL-2', 'LGPL-3', 'GPL', - 'zlib (http://zlib.net/zlib_license.html)', - 'Free software (X11 License)', 'Custom free software license', - 'Old MIT', 'GPL 3', 'Apache License (== 2.0)', 'GPL (>= 3)', None, - 'LGPL (>= 2)', 'BSD_2_clause + file LICENSE', 'GPL-3', 'GPL-2', - 'BSD License and GNU Library or Lesser General Public License (LGPL)', - 'GPL-2 | file LICENSE', 'BSD_3_clause + file LICENSE', 'CC0', - 'MIT + file LICENSE | Unlimited', 'Apache License 2.0', - 'BSD License', 'Lucent Public License'} - - for cens in warnings: - fam = guess_license_family(cens) - print(f'{cens}:{fam}') - assert fam in allowed_license_families - - -def test_gpl2(): - licenses = {'GPL-2', 'GPL-2 | file LICENSE', - 'GNU General Public License v2 or later (GPLv2+)'} - for cens in licenses: - fam = guess_license_family(cens) - assert fam == 'GPL2' - - -def test_not_gpl2(): - licenses = {'GPL (>= 2)', 'LGPL (>= 2)', 'GPL', - 'LGPL-3', 'GPL 3', 'GPL (>= 3)', - 'Apache License (== 2.0)'} - for cens in licenses: - fam = guess_license_family(cens) - assert fam != 'GPL2' - - -def test_gpl3(): - licenses = {'GPL 3', 'GPL-3', 'GPL-3 | file LICENSE', - 'GPL-2 | GPL-3 | file LICENSE', 'GPL (>= 3) | file LICENCE', - 'GPL (>= 2)', 'GPL-2 | GPL-3', 'GPL (>= 2) | file LICENSE'} - for cens in licenses: - fam = guess_license_family(cens) - assert fam == 'GPL3' - - -def test_lgpl(): - licenses = {'GNU Lesser General Public License (LGPL)', 'LGPL-2.1', - 'LGPL-2', 'LGPL-3', 'LGPL (>= 2)', - 'BSD License and GNU Library or Lesser General Public License (LGPL)'} - for cens in licenses: - fam = guess_license_family(cens) - assert fam == 'LGPL' - - -def test_mit(): - licenses = {'MIT License', 'MIT + file LICENSE', 'Old MIT'} - for cens in licenses: - fam = guess_license_family(cens) - assert fam == 'MIT' - - -def test_unlimited(): - """The following is an unfortunate case where MIT is in UNLIMITED - - We could add words to filter out, but it would be hard to keep track of... - """ - cens = 'Unlimited' - assert guess_license_family(cens) == 'MIT' - - -def test_cc(): - fam = guess_license_family('CC0') - assert fam == 'CC' - - -def test_other(): - licenses = {'file LICENSE (FOSS)', - 'Open Source (http://www.libpng.org/pub/png/src/libpng-LICENSE.txt)', - 'zlib (http://zlib.net/zlib_license.html)', - 'Free software (X11 License)', 'Custom free software license'} - for cens in licenses: - fam = guess_license_family(cens) - assert fam == 'OTHER' + assert guess_license_family(license) == family def test_ensure_valid_family(testing_metadata): From fc243e6a6df2dfa7b82a5114a0046601f0b08793 Mon Sep 17 00:00:00 2001 From: Daniel Holth Date: Fri, 6 Jan 2023 07:34:39 -0500 Subject: [PATCH 015/366] Recompress .tar.bz2 test archives; skip enough tests to pass before getting cancelled. (#4694) * upload test data * more consistent pytest replay names * compress test data as bz2 * also skip slow tests --- .github/workflows/tests.yml | 34 +++++++++++++++--- tests/archives/flask-0.11.1-py_0.tar.bz2 | Bin 30720 -> 5334 bytes tests/archives/fly-2.5.2-0.tar.bz2 | Bin 40960 -> 6297 bytes tests/archives/nano-2.4.1-0.tar.bz2 | Bin 10240 -> 790 bytes .../spiffy-test-app-0.5-pyh6afbcc8_0.tar.bz2 | Bin 40960 -> 4460 bytes .../test_index_of_removed_pkg-1.0-1.tar.bz2 | Bin 20480 -> 2236 bytes tests/test_index.py | 6 ++-- 7 files changed, 33 insertions(+), 7 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 04e2ee1798..d9a0531138 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -134,13 +134,22 @@ jobs: --alluredir=allure-results - name: Tar Allure Results + if: always() run: tar -zcf allure-results.tar.gz allure-results + - name: Upload Allure Results + if: always() + uses: actions/upload-artifact@v3 + with: + name: allure-Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} + path: allure-results.tar.gz + - name: Upload Pytest Replay uses: actions/upload-artifact@v3 with: - name: Linux_replay-py-${{ matrix.python-version }}_${{ matrix.conda-version }}_${{ matrix.test-type }} + name: Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} path: pytest-replay/ + if: always() # windows test suite windows: @@ -272,6 +281,7 @@ jobs: - name: Run Parallel Tests if: matrix.test-type == 'parallel' + # coverage disabled to decrease runtime run: | echo on set PATH @@ -281,20 +291,28 @@ jobs: set PERL= set LUA= set R= - pytest --color=yes -vv -n auto --basetemp "${{ runner.temp }}\${{ matrix.test-type }}" --cov conda_build --cov-append --cov-report xml -m "not serial" ${{ env.pytest-replay }} --alluredir=allure-results + pytest --color=yes -vv -n auto --basetemp "${{ runner.temp }}\${{ matrix.test-type }}" -m "not serial and not slow" ${{ env.pytest-replay }} --alluredir=allure-results shell: cmd env: VS90COMNTOOLS: C:\Program Files (x86)\Common Files\Microsoft\Visual C++ for Python\9.0\VC\bin LIB: - name: Tar Allure Results + if: always() run: tar -zcf allure-results.tar.gz allure-results + - name: Upload Allure Results + if: always() + uses: actions/upload-artifact@v3 + with: + name: allure-Win-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} + path: allure-results.tar.gz + - name: Upload Pytest Replay uses: actions/upload-artifact@v3 with: path: ${{ github.workspace }}/pytest-replay - name: Win-${{ env.CONDA_VERSION }}-Python${{ matrix.python-version }}-${{ env. parameters.serial_or_parallel }} + name: Win-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} if: always() # macos test suite @@ -384,13 +402,21 @@ jobs: pytest --color=yes -v -n auto --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" --cov conda_build --cov-append --cov-report xml -m "not serial" tests "${PYTEST_REPLAY_OPTIONS[@]+"${PYTEST_REPLAY_OPTIONS[@]}"}" --alluredir=allure-results - name: Tar Allure Results + if: always() run: tar -zcf allure-results.tar.gz allure-results + - name: Upload Allure Results + if: always() + uses: actions/upload-artifact@v3 + with: + name: allure-macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} + path: allure-results.tar.gz + - name: Upload Pytest Replay uses: actions/upload-artifact@v3 with: path: ${{ github.workspace }}/pytest-replay - name: macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ env. parameters.serial_or_parallel }} + name: macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} if: always() # aggregate and upload diff --git a/tests/archives/flask-0.11.1-py_0.tar.bz2 b/tests/archives/flask-0.11.1-py_0.tar.bz2 index 1e6d8779a79ec09e28130f03ef0514ceeae75f42..0f1994b8b3fa459ccce55ad5f04305d97f11c320 100644 GIT binary patch literal 5334 zcmV;{6e;UMT4*^jL0KkKS)YpemjD+l--Z5Ae*gdffB*mP|NsB~KnQ?<2nb*oe*5PW zdE4%J@3FqGw|za{`*Y6t`>>!I002Jw>+dtu>z4TEynCIt?)vw=y5aY?H&4B-x~F^G zqlgc!%xmq22_z04rGyuc^0U{y*nh4ROLY|cUQ~ga7 zQ%_JHp_-Z+4^RexJtlx2r{d<{x7oYAP2T@;go5lc2!WAwoDicIg%Hu@%HN=JnB~xb z70nCUBJmaoVlKJf?;;|;TDHaVz9@zg8m6WKkYNNND1{_s7{!}gyuY{IuGw#I+si{9 zSCQ4#ulTt>HYiGDqyQjEU|cw${JP$}mALjk`>ucM{G&`;-oHh>cwHrR--GGOgID(J z&oP)unJkoz*W>opTukQAI2|8ez1=a{_G&rk^JE`2o{WEIqtzHR{-!K${Vs;rT;Db- z40eJ!%@)_Jhm{BkF949@VJB0vX){(dv%kX_!^pki!n`IhIa7onJ*+6v%DJW^ zJwmipt;;iVZCg|c`cpbldfdDHN_w*QqrqKFeWsML)$27(xK6VUeyrQ$XK`qp;89! zfW2PBLY&9ArR4FBQxZ1lHd&mJ0arwY7TPDUVjxBsH@VGwUEbfpez!-L-T$x{b2|)% zjIUE=6}fR%U8VwqdW#jcLdh`0!uNN}_B+QlHreX#b*s>ktc<=Cy4Q?}Exo zf*`;_ioqBG;P&dm5-dSMKt>9JudsxbA~8s+0>MrWm#&YI z!@6t* zrZ<}etABQL46Fesi8-5p4^HA zCwfg+-B4M!$&$b`fu^DnjN=3&a?MLAc5Wn=IJ2y=d#$x*(UwjuW(!S0TU={8G%q>PFd*pjJr6-L-ZkVxT~=dc{?R@JyP770xwRiVHs zejBtX44AB}QU3(_Kk?f*Mo1ly7&!(zaSC(u$Awy*wbt30#z~668H&<08S5oe}jIG4>Yy!3fS6a za`4OFfkb$jY-8#BXi#+{?jB*y$dVq}-En(h-RlV2c3Q#h8VDITvNIC6t*GrPE)W~a z&StT!=_8tpQ5G_ilHe-IrsCHLSS#JzWTwzQ@PIZ}=!78&y~ z8UxoCsgTFHF(Rxiq`(3rCq*HREA6-Ei+=$Z&?B&q(ax*9_654 ze(P~~DIQwHv4zG5X?uc1qy%N#svS0Yv&QNPxg56#NVrt*3ECUPT`Li8_G0bdTBfZ$ z75ZL-c?-_P0~9>M;+hDJO`=hiFe;)4ceVoZ#Rsf{BHQH~gx88{Jl^al;P8+vK9 z^o7_+&M>L54ek0Z5rc;yLeJ5HlAH6H`MbK)9MSFv?G@UWUcs?$>UN$K8(W%Si?3Dv zAcp#kr#&Sy_Z)UUCoK>nnow*n9ic{XYopt{8?GhCLxp#!mab!L4e=4H%u2GrLubsR z32YuSw4*~t%$QDW_0(eW)b`^+VTgfPX+V(~{NO~wgJQi{5bXwlz?+Q9H@@6cc(q@3 zW@DDy)-{Q>3|8!OBG`cu9R)SQQx#-fbH_UhZFF3=m2rqW0UjZ2&N2##Jtpwg__SwT z(%7rU_YjC7kV5?lK=UDm3|kvZs6zvgX%08Mh&JqyX5FAF~t3Mn9bQ8=sZ3^H1;-fB0cmg7VSipJYsD0H~4Z0ic*(MO1E3e+tEh|%7tmKA4t zPdE+rD-#4Blt*|e0xkF;I){^MNX@pguU1s!A>c5O;x=I^4IJaBr0NYOfQCTDLC+qN zK9;Dmj)ocIn@~n8upsfKA+JCoDsR2krjJu$U72R{gchzFE*>H0Z6zH$tTx&-2KuVv z;TGA0Xy$c<_l*m%ay`$`q_4d~R(GTd6l&Lw>v_mi_}Zw-OW zIzuOkBxDpey;=Q&{S@y3$Ct z)4DWh_d2A*;tp;by01mewaCR~Lp^Mg7&X+HYYc5FZA6laomNXci&_b& zUKa^bw_Jp{uRJ#_7c`YEz{60G&uL8?jssXiy9Ew)SHNA9IQ6I4`6eUu3?v*wJa-))JjkpV2mJ{t`^mfXDU=7 zRX*Q(@VU<&Q1u3b*`eUtk`SZ62fSe=$efXqErTFFu%O{_23D}`6F~_a{WzE59s1eX zV^pwo9k2?9KpERoDe#-`s*btUpr>=ZR5Gl9v*#dZ+L~c)0^`;5UwwpqC+y9q1_#dy zMt*2njq9HH>C~e$b(&z+Ukv#*8nCQXD0I-dpLyQaO#mrdMXD|jkd4uP>^dm!QScvW zgz9q}2W?0C`_!>Ix(+%FUaqmcqSUylOkPpgclA$qx`svO?}F4ILGf3I+!Cv9!2Bm1 z8!)qGu-y;_wo>>+ZaAs3+2$JPip6M2Y*gYI$w7S0yZ$4DGH+N(!i~n6=dj{Ru=B_a zxSFP6_HdhT^TJ#kiyNmid9*8aH7ej|XB!J-mPH_#h7kc#Nf~e3DN@q0YHYQtxFtaW zyNkH#IxWBm>N<(W25TT;v6xk}0f5vj7_yzhH3w^@xYGG#50GXl%SI5I$ErSFt3s9n z-D7FA-v)E!b>dlK+)q8LiCI*F%f`sQNs~(zA|hjf6+!2LC>BeB)K<)DXq6LYTXGVD zMrSoI1B|iT&oI|EOTe_CLjgP33WCKF+yOaU#JG9od1&lZq1G_#sX`aCUF^qE8C+L# zuRO`lgH~K+N6OX-lA9s|h*k`fnuSpG1qUHB+uYzPm!Cdx^mRlSf!iT6@J0-I_;Ph2 zj6iD=MUlDPDAZ|$T%guBI7H40U5Qvo5ZpIwOZC+VzDl5mxTNl9iC%k~;J6_~1Ulr6 zov6s!@EcVH?-4A&T9cx_`2s4&aw`|W-wf~gd;!wVI?m8aJaMcu;p<8WY;;3NW}BG< zwC4efXwzE8jClKlsD0}_3-%%)E#u-q0K$TO#j@y&@1_>RUDvbBJgUxJh^$~qbXsj3 z%Cn}`Qr~4EzOk?L?iYBlJs$vfNsxAle1ns*9sPG)Vmvk^yhROK34gpt9LO@|a@b24 z%NK>&bESF&ji@6Y7HoynWIQGd;6uVZa&| zoo!r}I|Qm=2Wv70y>IBF(d8XUb0dPh{DTWhekJjNdVO1Q5RZhYUJbIhq;IgI-8 zAw!a9vPcBD}Gk+ceiO>*2Pto$bJ-IDo zW4CsuXUI0M4)@t{nYcBedYW0YQ8ZSyYgTlyG;g_FPKNl*sdf~Rx2#ImB%h$*Ua%en z2Tq#0Pl3=XZENdPCCJLN3h=;*WGQB;Boqqi7&g0lai~M3t*k?_3|$Za!cNn@aaSh^ z_VyI1NP(TMFNg>sVJd*c!N5Tgp);`i(T!)sE3+5VjWKhGp+}NuJ{rJbqm+ZMq?vI|Fzcs|6L7hJ>u~vz9H1)uRbcryNSC7+b>BJVY&n zDs(gCb{)VfmwhNn{Q$ROaYL9Zsb1x+HYJBRv5_k6k$7wab12doqgC4K$XU zDm}&eH@v%Z&85aAkW=jDCEuYW8l=Yjdo`ex%At6gdWeSf+Xh*j+7=1Z_AwE7yk#Co z%%$b-Alm7zSRKO)PKJjE^j(v$c^zTp7^cH5GbB1FTmm7-WoWXZ%-Q7^lv&0@IzASf z%hP7uXeO1`-+NVFWQwARs|AS~X}~*)nQ-FWryjeYa*O%CFlREKy3W>=J7qg|Qxa5+ z`9eB9c_*Aov%Tl-MH4OTi@RjBEi_8TqEo!J$VCvCwjkNIb)r$BFs(pxf@E*Oaq1g< zfS`!JZ*m&-@>rRXRvU!G3xAq#K$;Ivo;)|kg1jJuLhl5c7-)+*u?~31mri8l|DV$@v-7$ulBI1^X@seFdg$bFmLg>LFPyrtXokE^%mIt9=VhWXB{7EDe zk3`gp3+%=veNen`?x6~8u)u^OIh3YDl5;@Dhy)GaB%lf~&t`r@GjPWU6<+IHiSw7J;i~!?HHQ#_W~ZfEaWLql zL`g!yTbnSN0~h6vZr;tj)1YK27d5ahDeq%=0|N0JR?{U4$B3mEt~@SfrsQpbd&UNa zBBlpp-c<-AMC+Ph%NM9QgY%r*?HmoLyVS_F{a%9=1iH5-%@Cj**9j={pnF~S-J4M`0nT4q! zh7TUi0^r}7A*#-@N))5H3mnAO^-5%dxGa{ay1`iq8iYuDB}Gwao`W+p?X?sgxHw5R z1(Kw*L~8Tf&WlJ*En?1lzmYMpGf=FwCIA?@A75folwv@Od!|xFu~vJwu+$qN#>GW7 zHB7QZQjI+sz`@&u?0&5 zj78g1rJI7^4GcL-9u{R~7)Yku5%Q?E?#a61=`Jg(%C|BpxE;MTxr1RuE#TG!(6QQq zC$D0)1=?>bf(}$%;U$3*6rqHc)*&Pl*5yDV(u6>^r7Iii)E=8Fxse46)WUB$&b=>T zb;dx8vJ3{rhZLFHAfnk321t}OC|<5_B)DQz0xkR)axc0Zj7!f7(m9b&P6WZIxEr9E zIz`+DJWan~ZZ8;c?V=#+4Y>3SNI=4{Y9$;?w(*-5+n~%zGU#v^AtpYS o1?2@ie6(I7n9K$fL?K;>7wx3|=2Q!yp#R0(kxmpOC*r literal 30720 zcmeHP`E%RIm7bsZS9Fw3O|lV5+zn_qsih^_=B_MhNopL|CdKLt6h`$e*GHBBu<-CCLY;P4aTZqk`=7+eV$If`D%X_{(`{! zMt{;EFrSuF{?^YNi+*FfmgD-=_NeuZWxKXxe?zRVM!_>3&GLd}AnBPZzvQC7)d)G8 zq%0E;XBR~O!f!PxviV?|CX*t^)BiC~@jM=}{E_^Dn9qh7B^P6sOwLU4U(U<6#eAyJ z=4?7I9#T{GgDyGSl(E5kmE{R)ToAkV!!Mb||Il+wwEia(sXl&XrLW9d>-cl}?*;Z+ z{{ssBw=K&*>3@ZVKiOBK{|oJ(%tj-lt9+J>WU0gh>umjKFqw_H%1~mNhf+<|M6ON5 z8BBtBB!B#a(sU3X1l=b>_?l$~}8V>75NbdgYRlRfb^0sz-liuxp_p;q<6d#MvXjNJjH4*c{ zD49IwpV_<$UOuzwBrWQnl4(6>vHA>xGxgV5lBv%nv`D9ek$SI2JE$~9tT`AYlcX37 z8q@g(Im>d5zn&=<>3GVDea+c)y03_jW@?&g{I{Zp&!LB62QLz+JB>tZ-cIUDUA;Q#@8Z?v!HQ92wZlf6^VRjwiL-jVZUIC+M! zLXEZWdS-w^<;y)kw-wIsog#Ukvwb<bt16L?0Iy;l;CyGkZ` z*$wQV>35bZg0b>+mI-B^@MOZWInfY3CuvNI2ZiFe_`om(%gF~d8rASAQ!yLE+ruKJ zlXLPRDIN$Y(Vyw807Wv16R{}OZDHNUNl_?Crdj$vkxJHz2UeH?;&e1hKftu=iONKG znG@XzF|mtU?O(}L(hXq?nnfy64RIvQ6!V{${uoc+EA+MCE_N;e7udC&qcq9M2yD=E zs}2Sx>#43O0k6b}C1aI=b=wqRqzbE+6kuv{22Sd(3=tT5MaJh7f!1p>^g`)=pKxBU zSrdFuQ35$&g~}2*u{DoLxiqF>W%bRR`sht>+*S)+6*F`;*Ax@32*d>JNeT9w5I}{6 z24$G*I4!WSX3hmf0%JqcNQ{D#(JR4OibXzELd%6|o@jZ=v`kD&=`j8ol=WYCdZc&z zvj4W#ZIjl`6}h^-ynoZa>9_iw+Z%$?yYB6GovZd0dGU_)U$@ES?cKX>=hf>zd3}3* z)$aCmv)bkDO~2cDao>lckh4|~EzeAUt($kG{inNbyVoPPUDA1Tcill}7_Zy9>37<_ zbJDrFyuQEc+`Kv`=%99U+b7qZH=RDJ^>5G3pxe#J?Mw2e-MxH`URp0Y*PZ@5Gv3Qi z|3;7X^0r%Rk-Jv6-?_ZMZgt7sefRFRhe=|hSDoJFb*uBHebvC|M6mmPemwg1!x z3tHWG=b9bpv)BIbdsIRpxoW*>y=wQ!Z=Z4m^gZE74NG!)-)-w<(roD6zv%Tl{ri5K zyt=);GJNi}yWhdv_5O`q-)hdkB=^1cIY#KW^uW3ca0CThV|doN>9@Px`#Zh# zzr*sr1y?|03r(*Kb0HCWst|+Q?mKi^(>yS|KPPWrxA7b+d{c6&rJ2!#P+V%_SJf~c zWTao2W9_E>>bmo)eRJ8?)ZFS`-gbKJ-(jgbpzM|IQV;vKg)#5Vl(is&Dm^bP!nOS( zd~TMObY7Cy)pzLis%+CH5Qbi7AvlJKm#=H(0yJuy-sAN9pPd5+|1l1S`FD%|Qpa;P z;y;N01}FUYbI6v7|B44TnW#}Eko!;+)BK{@)cAxo7;vEZl4iqZoGJN`K8)F@}Q* z?KY%(Z@a&j!WND#tJM+vc@HFqfOrfnr%W`w?5GkG-`|N;xyX{KM$tN^Zo-NLu$Pf> zBC<5j0Qo+o*`o&e-`%GU%_j9sg<02!KY4Z}UG-5Rd`@)Bq3dhN{rP z4x$fmHK+$=bAm{mcpS~mM&g6sBlv^ThG^DqNTmc}t+=}+}J zOVeVYO&@wJkND4)hQ1D6O5%Cl)@DK<1o(?{Ku9B^gcB%~J|)mg|D+64)!L}boVv4| zHBTSagL&3)HyW4&!+OvNww}+ivp3^An?>3a)jf=EZ<1zKzHTg|k<2|6N}qCZQC5dC zqH=(_tj;XUf&~v2BK~BaQ0zyDCE9KCM_}WhA)5vyZjlblQl||s!eFaYP^m6+t=G8F?X#DE*+C9TUT>61K&@S~ajCR2x!kHJzb&1$9({!}Zmp8aHM+qPNG z)xtf<*hhn~Vbe9eNqIzD_t)#cn#-bWFjSLSfk@C!A*r4S>nV8~Gb)dTK~-25ta{gx z_Z$JKF+(AS-)uJnWhxthord{a(rI-2;^Gm@lVe)emeh?|_BfmB(;rROLRMGY7z@F; zE^)Vp9wV+Xo9HsS6!8_+i&vfYXcurqqAf$^ZZn|&f$Gc5*1nvNb5t@KtS+N^@KqAV3SKf$QmOK3bFt7>cbh$P`1eO>zh2bM#hcEDIwq-2 z1By3i0r|{}IxGZ=x20&ISo+kpqN0nZV>K2!8Y?lZPq#ZpvQ-178KnygQJG8esgB5Q z(AaJ^XX8;Flx)sFQ%q`y0n=PDqwu2gAsZ~S0loj08lWQY603{Il`H2q6?sN8%3o_hK3K-#RVYhTyk{1O>prDg@rJO zl@T+R6e9%|Lze%e7-! zjk8=NOnUX~(#We;M(h}N(K7Q$cdDr!EOU&3LbR89Jp19CRH=6Yi&_)} z2_C6XFk6Yp=5`!p6kx2gB38+{S=l_aR6 zCt%r}dDMT%S9V*)sNc=uw9;?ot|B;JfmL60VJ`vyED$wCg;Fqv!CCvlIvmt+LaOCZ94ph@qvvvN;cYpLwnyvygJ=2zMOiIi4L@ z4#!8NA`U%{EZ>SnDDc#B5JTKv0zdeoC8(y!9Z!D{hny+TSD{72z>j?2^*muSKMX@h zx*;f4+>1lsk9|m$9e56>(hHr)VsS)w(f?)SKS|d+n*K<{QS9R{U|v9R#RYQC!`P=W zja=b?elDahLMnVW4npREbT9BjPdWj$-C!5}c0k>|D_~-e)eO0#Nu`cYeeMR1qAm+u z+hws$g$Tjv*ikU4t{({G<_qe3QN&@Hd|w8U%iK7$ptrk7b)x-dXG!TQhw*4ixspK; zxYP}$3~0n1E#4mE5Cz{0{Wy}L6ARz5L+MfxILwbi5z&DAk;h}7?WEL;_FI+(sjGC# zqbaq60Bll$^c>grg@~1)3=Yx`B2Q}~NA9yD9nKYlv5Z4lazGpxk8npgVH{H)+syT1+jXPBQZ@}~4Eu&FWi+y47TBKfh2`2J z=8?k#?!cVvVya${L!w%yydF(vOhd<}QDi|Z1$C6g0YJDNDBp(Xi?sYo&f)DG>W8l9 z`xLii+|Uhq2rEo?Vkpe<5KpoDEVFNqCKUGGay_KnF)IXXU3hAko-ha)wYZFY3leH0 zC>AM;F`vT52Ev7P@->=b)=s#Di+s=ErE>L!$pk){OehbJtboN%2uv5aQ4~=RK-3c4 z2f&k{){?FVAm=!~#ce4afYM0vSO80iow@{edIwDDMpE+89pw zgzZI=!c#?2KtU}H-N?5s*sDkjuE#-e6nlU{*ktceeeclfZ=^>bO)ZGEcoc;aj*6?m zb)pE~)dlz$5pX+mu;RjXeCEKhMz$Z@D#Um02R?_?Ld z%gHInQUjY1vsvYv}0W_T_Kjv(4eYUBimEb1bc z{%A7c_$=zhusQITZV(0%_#}2kj1z1h@Cr;5fQU}SC3FhzKe9s`J3ZIN|3JLE)*g)x zp{aFR|Iw5}!7a~ITrauw9m^H20t5^lhk`Dqph_T8NU8k*98?fUsqKIZf5oKSiCb;Y zbq|qRxngiMtsHxHm~;khR@eZz9O?xzQxr4`v){?ecR!9N;$UyQ30W1Z0u>Qm41fsS7`tL* zl*3i*_EcVEA2O;>E?yi@EEO<~@KHf5Jcq>=;w3OuX8kEJ0DO{yt+hR1L`(rP%^}$^ zX|b<2_B(|Yig!q-)^|XTrd0Zt1<8wI$w8xt7)Knz6&AiwCW64Ru+B2%5L0YtgvVk) zLf9)i0y!XG540KW@!>-b2*C0>%F%Sf_Xoafhm<+kym~ewPjIJ!W5W%=a_QFxAc(*M z2Q}aY7`&`eWg}!?>HPZ()Y_E~gmc{Q zak`v0m&qo}*N-3tPhUShC-{GPeFN0IK}y(_62x}!@4}}Sp0Dqz1n9p0nSZs~;->2b zPO5Mt11FM5aMOdD-wW-QwRf=F2S$d@5p zCt^NSN5UgODQrQo9l^%R)Wn(c1zF~mP3Kd6g;KvJ!(@pqXv>?v-wat3djLD|rHjp( z2swxApl~B_ZWcT*^`g-66@VpN9qb!WFydhVK%N`N+e*G_Y!m=$<+JF1swpBft7p*`%3rz3xqfCPwoMU5KO6 zyn!q}RuuX+NBLLZY*41@l=+SgBy1jXY&Bac+tE7f4q7b=Oqa37C89ni`Aj%L{@RlT;r7o{i9San;t8EV3jn z$dAjB_))iVs`$?s>_-0ZRRIP%JAnVNrS~=dvk+o%v7ylK|3P^Fg#W(wI_&E2*T2%q z%S|NZJk)CcQYIkl}Tgd zRsUC*zOlsgUx%Cn`VTN)x&P&B{YN#3t%vQs5oFKL@p^F-{yfap1&(69-Nl zIC0>_ffEN#95`{{#DNnBP8>LK;KYFw2TmL~ap1&(69-NlIC0>_ffEN#95`{{#DNnB I_Hp3<0OFLXV*mgE diff --git a/tests/archives/fly-2.5.2-0.tar.bz2 b/tests/archives/fly-2.5.2-0.tar.bz2 index b302f1f5b044da8b03f7513d44015410f7361101..fc7ed60e2c0ce84f027183df163242f2a054f9f4 100644 GIT binary patch literal 6297 zcmV;K7-r`}T4*^jL0KkKStld+EC3^4|D^vg1b}|;|Nnpg|L*_)|NaO-AOQ*pU>)B2 z)ysU(4*&oGcV02!-tE@r_jgr~Hg%3|=J!^GO4$*-F5Im*diRNKi)`yC=ol1{RI>HN z?_<&EF}H0cc?O$=kpuxW&;nHao}}=NFqC>m%~ATNs2-S_k5JRp$n`xz?NBuJ27nMD z1k}?Ypc)2`Bxuk800000&;x;>Xa<3ypaVl8rhpn~27my_VgL;Unuw4JY$(Xo(UN$h zOqhYAY8n7C2AVw}0MGzLqCiCShL0(=G#UT^^$j%0WY7QsplAR9NQfX!0GSDSzy9>HrM@4FC`Kih6w8KDP$Vcj$gDvWQaEZPug|m&5?I#5iRjXl8rBI*<3?PWeK{63(;Uv$>P8-+`pK^0U zes%-tV^>x9F5TT)zssQVLCkJ*H#!z!nYLVR#X!(e3u0eU3ZjS~-pZ~U`|!^m$UrkPBYS8xku zqBTuHsslaBId%B+u+kE}?e6Bbdpj)2--b?v4Jvp1BHlNR=L8IB>zJ~0J)P<$SJzz) z)@}8Xuv@oKX${A+A2DgdVNGzw`Pxj0Lh5bHeKeHaJ((-j7st>m#G>TQ?nmr;e`!+d zb~&5IHa0ZI*z+7|w{O*5H^HotFN@xBX4SKn>~h(zOFcrXrEr*jw;4eheo{Vb*Q?PB zR@e%W#EM9Arc`*xRldC5Iqz-s7A5uBu>VeTj#U&S-skm^Bj0-6s31GG@eqtQx`$D2 z0wTb{(|nR@R?~Y59RiSgBMOP5lMFV6j~K*RY_$1=N|WdrUarF+OoK*Cjp(WHw&ALW zqDGohDRz8~i7FoUb#X*L$2z?6;tN~>tpP>>nVFw9LK>((cyp&b1Yc+qOQ!c^Y|W&= zWR*+|x@Zy!s9c>K8JI3ubrUzMz+?;T+GI1?PJJmtQbJ%S;Rx%l58(- zFG_KvF7dl-@!w-MP@9Y!Q2xkIgb##o2443^WHaVq{Gl{Q(iR#AFIjg8=LteZW0KuQ z9{h@CLP|mdqXoNU@s|rh3Yj{R!X2N!h(N4UNxQl1t7v->NMMSDm&8}Ys4mrbVi4RVsWQ)nRhLU3$*J^yYw319ufd`5cTid(oss8bIY>0h2-tG)3b0XFprV-= z8n_s=#>ccE#o_gUz)SL+T0(-9wdq_D$CrDd8I`M!Eo@&(W_xPlMWUVg-;_AEZsmbF zpC>4oQjkZ#oG$mw?hw?=efMftKvJkQsVK9!q(yTO9Qoik7CiS{Cg?N| zJ(9$G9>|%(SS=Se@C}adCA&915VM{Jv9ki=b=!rPo;%cP@+*VGDXV~&<*r-}e5gp@ z-*0R@Y52Q6#_b;s+xA5`b@0=5t*B!CH!-%}U}nVH4eXFLFg>zzK=CrDp-O3hpzES5 zLD2?C>1eci%WJ4^S|MC<$^GS$xob zD*K7zcMX-(ku z)c(G&%g%XnaQJnb$Ir@^V=U%v+0OjiD*M>xuE}TB6MZ=~wHcdz8BBXl+!c6deak@1 zHi-{5baKO;wJl6@hfEc!v8FJ$vqp^N(&nmZB?1^T2%%;iz-cx)fofG}Q8eJdh*>sR zw-hCl2CAB5*O`H8%PeU(_J_80c?ses(-Ua4_x0*537o#9a2#`*qV#>Jkg*n$7197sj%kXW{OR)kmxDbRQ$ifUYP`n}=A3vwZ`#JF6>8T-F zR^BLaoK6H3n@R^dTRbC6Q`0!@a&11}tGupx_9N%*?L9)ou5GljY8X3OL7~hHVNEiG z^iXiXnYlA(&>WbBW~WFVjyTJoy&I^o;ck79eS5jw%M+dG&HRTXjr3x@843j-JX}T& zUpyR{sg~onPUTMqo`?=OWI#i4_xW;JurSN&jIuPn0-Gs?0;#z(qJ)P?&z@XLDjy{CUm*9e zh_JgoLhhsYi)NhP8n#pwU`_Oz3|^4MZidP<+M-zTq=#CS-F-ahK zx*TWx@8V{GllHkvfSaP9+b9X~TLG=vSIux%WgNZWUFw{8a; ziDJOL0FZj#+DWlzH5Kh{GUi?^>^kQ})5xL~?kI;jwGS-Fq8gCx+@oSBLsHbUtGz=I zuSyv;sYohcw5#2LBg}qRL;7tqGJNd`Xo8kg!QnCDW%UX|_6JY6K{oENVZs{fC&8+u zFRW$f98>peJr`=m2)DhhJyhGZ5k!x`%4qjcGC2=a;kPWM;Mee&JP_Ah3lJwT8(wu#1|PALOu z8{L_0J8~rF*N3K?E?CDICO%w+m?R7+c3H(Qj>f2Ih=T^$pd24-BRqc}^wjp2>!@ztz{k}Ab2hUgcK z>W=(M-ahszy}i8sQiR2F1TwXdW*Bw?z>yM0rZs-OS#kOJ`WuTuTGq}QLn*Kbq&X`* zjB3x6p@Fy@9zvOJl;I^#Q~@dUTS8G*p4&zM(*;5xxUQD4NF2bL788%H>ZZHi#tLRm zsvXcYE0t?@&<+YQH5ZBO5tDaqa%Cj&fu%Bp;Lt!}`KT2EVkQ@uO@ubU$JL?rQUKe1 zl-ewBh`4G(8%$&wI8x35w%OktukoVl$xKzRDe2=42|~u$6psHUkIAfB!ll~kDc%F!VM0}-!C-iwW;0}dJ5&RU1Os3aT1 zRHD=;({!{%H`Yzt9W^Z*VmZY)1~PP#Is_XjfYkNbUi+R^H8UOQM3I$LFxa%PBQuCf zU`bO0qJYCKdu$5L0Am=sp}5J0V*;g`>1qgQMB&03vVxK%Q^|-#A+#jEST!KFm4Yz% zuERt?DS`x9(o>We4lc?p=ovkG2i!0?6DS-iW7D>`ZBn$5v4CVKaZSsO{aqH`P|bBk zh9K_7U$laPLJfhG(1#S`rgBrxLI<<-tokF~*|4~9_bCO8w}-os+jecxo|IlNDPydmQgox)xxOM%Sl`@e5(P`j0!l< z=hnDKOHiG1@h_N8P>D*nx!fSrODh9Pyrk{RcoUKLmq8^B1zX9-MrWK$_fH+yD&%TL z5~{QoBRy>0Z97#g2TPrZfdPO(0t8L_~k$B--BN_-kYhn0|%!A}Q3Do~?%zQ0i=GMeFqFR^adGz{NphInds)8!P z6z)7(2pKL6lM+Z6qNd%>fhU%YGlB|1V-gViC`hoNRahZ_MN|YaF@10&1T|)a*>0!K zVV)|z5v3He4YoQj67Ksd3R5MbV#M%SlGWoHaX4BPO)qezfe%-(tV-6*NvqjR>_m(~ z5osniHkgP5;#P;jmyEFHG-=(!N;Qdy%rJA9`~0yDx>^9mn>Q4eViF+E+&CT{zzQYB zfw`Wh1$P0&**Z1PQEShNsf*bl*rFhvuZHMR6)+^tZG{X%c?^3xfA`Dse2ar_-X?8(UJl_7kW)FCiVc{c|`Y$Bcgv8v;uFN(#Xr;I?` zFGPYw8ErOFoGcaM%Ad2BXdO*@rdbteLpQ=q(g+R=i`tYrfs4Le>r~*v7l=q1BC(8v zBc;{LS9(GUq#&S>f?FvNI+eFMxa^>>z2ve+{&|5hM3M~PAi2|I2|P@W0YS9evrtwC z4W(PNDJIi3Xf@xpt^1P2cV+r|LZG;s^ud_sC7wfr8%YAwvlw&1{!By-8qyGApvfo_ z5Juq?qf9d0wE0^bEyNSmJhHOQ0tQ_mqYbOOrZN!}anjjd(MxhKf-RqRQ2`A>qyj+< zZAUnPc+gB9n@y;2)xBgL;-n1adMhHScf6~6_!$*wMOs2kvPIf+_GOD|;np_O3!P!? zGl<;7Ta$uKL!1R$i!{V!cjg+gWb0E5<&BKP6iCXV>fvQpHe7P43Ka z+jvXGOB3B-Wd$!4Vjx@?QYbI|q6HxkEKXJwNUg$>i9)*n$8ZCwkdpjq$5?CV>a%^K z5ZVVS#F0P@D~J-zKuZ$fGX#(-h`4p8D_?*l3~>aGV;xpPMJBbS9dh||t7W0>T4)*) zK-?*(G+s{jY-?7Jv`&85Ncnae29HrS4J2=DgcV9EG(w%>MM7AM0;FN<-W*Woq|On+ zDtto`4D!SzxsysN8(;)oI8vf&)&!^6gYA7Q&>eNlOIExGx}4jOqvlgZQgX#vHS&l>59bj`p}F> zprSAof@nfRyu)FcuXsL|F9P~qrGwVhW3#&lcj@@50kPxAU^NMkgah0#GtXL3O^kgH zy#rJN-s`OUR+fk3bRUnFcXdc8lim|!DbBTPJwSK}ogb#!=}rcxn#^tX$BD*Q$iFbHby^^V>2?iuBi&!DGfV8T)V%CvhaMAH;Ze=uPMZ|{I!)Pfo zP>+y+4RscoAHLE}g%NZtpuvrt_5sBOEEcSlRUx86go_c7 zTB$^0MS`?ZLZqNVkgA~~L>grgp^aKtCOu2L(sLYR9HxUQp4KQnGsip(Lk5Yg(UFvi zMG|4509KGulgCzi>PFPe5M~h8EVQUdU~fBk|g4x$8Gb>ufA6B|+kb zOdK$IR8$6tJw{ZtX0mW?O9~*3l?6Hllq9rTVxzt2sW-fkUg{jcT$-jtw2%;cbI6vE zgI~@tyjLnC5=d?e&M~uPG;|$em!R10;ZV?6%QFJCp~)ofnnDUsTzB#jUVJbkwZ{=t zRn)fS-P=IZFO3CZ;#n@zfocOqQV?^wX{BnhTOyu6AU4vO1j3PP2SN!+V)8?^j#^sv z*{j5YVYHas;X26N#eFFszvEgqw&dWL{}5lw=MoTBIP^!@B4% zh58U=x=a`gbP}A=K(10%D~=*V5(cbW zwnA?hEN}EgVj~;ccpxJYv8r_)|nv|QwDdu%^>5zrv(Q!1(c(>1?84&`IxdH}_QV%(&+__!20tl2X zKnXdaBn5f2ic-`JpH74sxl`=GS3;_eT6pG#t%S5iAqLch6pU*V$OyNr<)UqB)*)Xo z!*GA{2JF~^=b9oGUA@XCm0 zYKOSoWEs)8j-J_2(UHm`%P~q-Xl-2~11>bMuqX>}2xh$TAAGOE#Xu8JHI zSYoj#Ni%uvGwGz+X^a`0cnd|NbwZ%_4Jz5Z)8@oG_!ezQLDId@BHTSKg$1l31ixc~ PNBmvM6yZWZoR8eFcdCjF literal 40960 zcmeHQYjYdNmG#%=_Ixe!@OF>fY(ZADwM`64*juspLPIF$* zwcQ7q- zs&%_Jyc`Z%n|wAZ!YJUwZte4BT+Bu~#pABL6zI8*gAAYl{ttN&g}#n6&31=#&!1}3 zIQC}p44%e3m`7edPSP1)OtM?^Fe^qu$jwSMO81#1ZNbh^WVAR|5w+H*%dequft+|(r zSFsR-k2uz4zL<+Mn0jdth4B@?$ig4>Wf)(M7P+Q7>23G+hr7eU&;O8CIx?9Tj1WH!sIwn&p>5Jx8znPw)Ss z;}QFxg!wPgosVtvSo|Ljc5C+E0~p`k>F%uU|6}?4Yu@|%>u(gqucBn4+Hp^P_4U^q zelnZk(O%cv^SthG9Q1bk{q9ij5BvMOy}%!g{N3&Gc)UFhyrDnbI4tsMk{+lRC+gcI zS6}(zpQFV0qG^)l-)tOtxjs-zT@+d!X&UVoX^+)Q*1~7+GsgAUl zX?4v=-`m)5)CD^8({Q9!yPbQJ4F3Qq*6@kXT>Sl^wZo(*1Aja@1*TM6R zjsNhIIPfk7HFdJ-#zvgv(=eW}JB))~ELTTGu8QUH6&zEh<9wM28CvK>_rFHO54IKi ze|7TW`1Jg^li&U(e&AvI?+?4XgVO%{1K5ABJM6FX{|DvvR9_mDu3j13E=EX0T=D0x z28XMDr@N{C%Zm#yUBH05gAX>E=K1_!YwPCbrsGL6&`HwCmQ4||Eiw6vU|aXd>Dr*gMNPUnO{C#i>q@I8FV@}Wk6o_LWumagv+ zQN)~}P_LDTiJ=!q1=s*i+VSDtK%;yW>I@@cXfQNSlW0?UDT$L0kr2Jf+3;zBN<)h8 zX0s%=J=quGRr9Io#|+p}FVToom>20h$uLUE)l%S2MlIV*O9Y*%XW?_xG`Z2~Cd8LQ zN+bfM4*CD4xMYuBA46DD%vZvJRL#8Dn`jD;<7S0FwVc>Q6woq#NMFVUqtJJqGdCf% z3FAErL5T=7n}&1tG!Dm*>0GDCl;QGsx_`h67%l;O4C7r#MUGeodLaWSF=B-45nYaS z3?}#?M7h*^jm}Es?~|gXo?(y{{+G6%*KGo3L7rTPLBYOK^{S>HmJ)pnYQv0#fbLnC zF(J2lW{g0T{hq3wV>Dm4XkN4Wm?Bhwah;}UF5l7uXWkWu09+3-HBSuM3E0G7e$4=i z;Hsh!AO+14TC-%F-%!0W84yAVzx+~?0+0iid4QgNT6cb>(LL;nERxA z@xl-caTJl*$2d)9P&#h}_52qJf!acZV}Oi!Mm?qMy=Q-(d#-KGT@c7m4LJJW>O z%z;Y+l?ZWmniFUO=77gb6LyTVGqqU^EHzQac4s;WJ;h|K<~83X>D4`_aD#_bP2-2C z$CU{H6WM8$24Glcr#15coV;rmGF;r z8eT(C*IJPw*%C*n0UTpB50sg`C4lmWr?Gx{1ZWH4#bS!^gXkaH5A89&ibIKF(iU!} zAsh~z-_10# zf(6Vyz7ocvoFIC_nj#oygdWC->tp2g3=&TDDdUo<_lUx`sZPd~)L$@S*%-f4;>-M~ znXF|5$153V4UTP==%D&6c{tJGz#6U`IS>-7?sX9MT64fivDzkiy6VBB4d4&1Qf(CcY@< zU5+ZFN+qMnr#ur#D8?cV0jBj(D6(BGj{y!;G$G1`(B%mfI?K^pEo6Z-!AvS!WVxPY zPLJur5p$70OFGEL_6?&YjC=;I&@g1Ob*58EoEs@8M=hYREw&=FfpPr;p&i}Jh%@n* z_~i6=ld;#%P~S2q=DW*tb-oT$$>yP7Bt-_}op~vR?VuZHf zA`}%hTqt_^1T>pa3vS!~8O~~XTOH<$v*Wh{mhX)SnUs*mhI0WkMnKy&0lk6VLRA0y z(s>4<>NyN6Q!Jl3hy?Y?Om_QRp&rh;)}#lx4#7=5pd_L^Cnm|GlQF=(Kyw|z{bura zID_ivMlhuxFtAwOhC8BYTt~>V2&-GR4aGXgDZCG6Z~RZ_Qo6DwBIdK~Inv-HW~k?TaDPA~C8~$74AaWY zOap5JHp8``1mqi#w@jQJ;)bGd@w=l=#tPPH`w8O=b%7!;XCA?8P7Dd&1YjzIB;n*B zFP0CURROES)@hPuZDAOv;U}m_BE=&Ei3mIuc{f=R=A3Gzfe0A{fJa;;u!NxLvMYYj z!7;p9!V%OZ3$OaYcLsbVs47`qLv2#z!5mzI7%0QEmNhZ6eTW-_=)hL`F;i%$r=%o^w;+x&>?j9y;YKxeJ3}WjHXdV9-%#z~xnCs3SwqyhY{N?s_4BD^XxU zg_d;jN8sf>PXu-oqKZf`ab|1QYr*=OE!GgEAy!s-DW0Oexx4TQrGQ;dO}EGO;s>|_Z|?@iQ$Zp_hx zt(sYf`X@549CK6~)0e^x2Q{;X+@+FF>HA(v6+-%e3LQL_$sV1WaCo}7Ql+UZOx
Tn0(RMgcRwqly4ata~g}SHksl-hp z%33GLcU_j^<|awxDter~I+x1XSI7<81Mg}1gXS>sAmL|X08W$aG!~ZSuP?CRfb|W1n+v}gcM$yr^Z;_##wi&Ew^(xlcZW8_Q`#C_E(>i00?A&#*HHuq(Pf%wkUNz zW_=C`vjVWtUfNL?m>#80|5cNgWphp5Qq4B0G?AwSUzAO%QZ*6EGC9_^#ABIE+xodG zY;f%nT8uR52C=Tv$QmimTrG$0D$6QMppOCfGeG!SM-1LfoMnYF4vLupR9Y=*3~QUt zh^w$s>qgv$SObWcH~}`)NH2NqOSK9)lfeOLv1C>t#W2P==CVFmmS9Sx$0aQvZX%`ZM$$;6l1gmi+dC}0an>JWPqwDkqGt3cEs19{5IO( z%>Xy6uL7F0;I&nb-Q0^L_E z>sUHYlbmcVS8qrSinAtVX3{^Aj#H4)_xVoJDB_C8H2v=Jzl zbnI+O85&k{odYW+0O92qFejw0a7l3at;2J5a^6zk9-g0^JLbMSx%lq&+Y9yX@a*jH z^y1|BTwxu)wk-epC6?gdtN%VZJp$Z;Mq$FRZY(7L>pO>rBp-eyAZW1_!peeV$xcEg z{S=l_467s|s@sv1&{$oZT)aBoL`XVqpPartJ30N!@%P847n|z)gPhOo| zycZREd2(@jd~UW69NIqK9G(GezI}Cgrrx|gd-EE*T*OD1)xwA?3!r}v-h^DAqF2X; zA;}7t)diAElQ~vkpbawNF|>=Xpat~M0eYu4r9OEm+|DFHvJ|CZhNTZYPOy5*Qj6H8 zICZ8r|FLKt)jEH>p2#?^WoHP+g|-oy4$FrLP&zs4J!qJVHK? z2o2#O({`eF3))aVtTJI$x)h#@^&|>`zI^?Bldg^>Xk_Do3y%fgjKcMf=3{YTA zKf-n{)H*-Dr$5D?y~bqlAw@_f}HKLI?1vFSFbG1O5SeW!Chg}c05=KL`C$gu85D9zla)D^=J|ABAmG<0EJfw(yGV7RqRp8Qc#IU0ey#i=C7W z*&X#A_Q4<%;!u_xGEDfHR2^6IlN~KcVVsO(&fKIWb}^4rMKF=a8J3SLH!AdUae&FH zUHfzW7!xsbtEqv3ty0y6lAupx@Z_)M&A8+ZmIJYBZ(48{w?x4z%z-Zuz)avLb)P0# za?z3jb1r1-iCrzTm1Fp1)IFemwulOtvILLN8$!4N8`XpsWQ-9pdCKvmnG>&AO{I+` zEk=QlvGWaab=5XF0osRe-r$v!za3D75h7Kl*C__`voi^9M5(BpU*Kne3rsShB_!T0T92WgP{yx*TS~# zL_8I2C0oFs-`c9Msr&H*Qu&U0hlWwc1uFA4OM|GuI4{l`A;k*4fnz0N6`yEeCVY{} z``pqhs50S(eod&dlw@p(QwdCW0u;a|*eZpLk7QSgt;zR=OrYFQmpF^@sN5$dmM>GT ziX1%Eh1#q|^e@WA#Ge-U^RxO%=!RFV#*+2^pGEJi=Ww`zG%f4fruu8OAyhr%3+0~6 z=YQf1A!Nze=?F|(V9Vz$m2tHR^fKhYBIp{e?{ayYmSpO^?IRgUJ+fXWE?)CUj?J;-Y8VfUPIrX~7=7#&ut$iuAbFF*xWBJf`czm)~v!nSjS z9!kWeY;=VDEU*T9>yUiJlL86{NPw^q)BNgy6V9FUs_=Ste_oy6Zd3YGZEjxo^C$;o z|37LIMgFK=En)vwGjPyZS#tbuj?-iI{i*>yA$Z^z5U)Fy>55U+Z)#32hPEsTAutI16xV;n!EOK zXsR~M1HIJH6;G%h5<5T%14Joo#wa&{8l;vM+!ToYYB}_FF%E2eK`12%6uvhZoh8P9ax{n?VOYot{c>CdQ@65owTx98%oFlA0 z%B18kewoSt%-_|^ljPyC(8x*h&3jZlPrrNp{qaiin6DlCD?NaY{7nDV!nQzjlG%Do z93Pw(BlwmX9Vd3xps3H=Rg9WNX5$-&GEmw)Mgf5ujK`H-q)CDk$nZ*d`gc)DqnH?z zo5(MVG{QG=QI&F9)`zoqWXK-QCS-P+Xhi&ZNxaXmoi}g8?vQ%5d$ip4e6hWAY3_Ou z6JWZ>bm;Clgz1-KjWd?0n_gBIK#&-rR9K^|B|Qzn^Ld=ibsHXt+TC12p;}n;Nl9qG zZJcG(csWY0^i&A!Y&V&eKCI=cJ?spdP2>VexEN5s(R4D*wg)huUZV|NLPcAGwuG|J zOsyfuAkYu^Qu7J`1n3>YIW^OP8Y->3w-}+hUer&LD}W4ewBd2dXYB+<6j)ZX%^z@~ zz1tb=LqU6$A~z_)2dJ}RF)&HNN(9eF2YGn>bTLnmaAb(1P}0xp`9H!(v04oreI+in z(eZQ=jAn?Y08h>Iqtl%^b1XXgc;4&v1d28j=S_q04p_O@>FqYu=EL~{8)f4*E_!jO zzM0O+5_*YE5V-##qScoaH&ais?h3OOv)-XoCfR3V&+i20_ z%-B4?%#$R#3RxZRcJ>=hFYdrf6J7%VFw$^nFX400*>1K1Ec_(%a&U9#G_|27weV5O z%M+>TSe6c|Or=2u()j{AmZgZh-S7al7)Q7lggIT~DV^}x9M>5@Yoi?$FGbJWSZ)BV z4NvQAQE!{S)X-GmZ0e3PJ@e&aXsGJWZo}n|LMiUhor6@n+|X1vmH*i0rQV%}Gj!q2 zqBcya(N3TDRB&q$wm~#?I|+Br!#9Z&ZFqubk9i_AH6G=q1-5EMm!hP|N{~D^I(^D; zC*PDpEdeo%|#>VqY&X7QZvoy#>c&-x$JjsHX_BZ6kPqNiwUDB z*GHOc^0+KQM(ZW!cs82u}`Zw9v~4LDt8Z!@u{4G-``RtOIp=Kg*2BF=AJ;^OiHWq(tS zjrk0v_k)j~*Oom8yYes?_-c-Yb@26u@Yf8@`(WeAf%>xh<%YeEK&sQ`Tk|4*WOqZ3 zYMY0(1CeO=1YWAVML~TjeB<%91G!RxM}b$)$1&BOZ9A}+L`ZY{48Nxtp&R!bb2D=# zo%OG1@axZUt}i0^<^G}W|3%C2$Am?Y-R z^@cyE!&Se3b9Vgl;M9?Ux)56P#6FA6^HH0|NsB`0RR9%5C8}WKmzw! zcF2ZC5{Wb>nI=sOpwXr#jRR>J03`?$KnTdy3_!>l27qA(fuLS)ja&;S4cG5`QH4c#3lCLsEiftghh79t`8v&%bz z9U%bF6c$4%NUDH65P)<9^f;G#G7twQC<7*zD`w?V^1~Hj^bL(8JOKh$S{Dc?8$dD! z5k&>e7nGG4kS}tN3Yc&6RnLU2XL|0)j~n+sS68gI3P=`(5l%@$ww40PBzGerT5+&Z zN^hdVmjxzGZn0jovha~AkscJs-`QZkWbv}|i%K7ih_ZdqGa9zUW3jsh;L~3OO z^`J-wgF<6K+ErvomRKdo?S+7_Fbt_XUnU1Y3N_J%&_{(1x5pPk5D>7cXk)Qb*`t@` z7;!}aUChT2!7eveCNm@9OS5emN)7#Z z_GEbzE65@to*><70F^uD%>L1_1T+6jiss>Bx(FH+g<_`q5e=^Lt+jxSFea}nds_wv zR8c~g^V3*}gPy&uZ{%}J!3_q=F+4Hbq0B-0ERS}*JTo}o0!Ii7Q7vdMF+MsP!M_|A%&8i1aq4~ILn@FMMy)f!B%WxXl7!-Qew?P}9z>hH#)9#95JpLq#z`DU z(I5zeB#8%ZaF}p*y>?O&0U_<8PiDumPDm<8iT7d@I6v!ZJ_^0aJAuuU;ovDhXr=4F zp^6-k1YP3)s{e5m?AAYYi8!+Lf54Pnxo*!F6PFRjN0AqMp_5lq-2|Wyl)Rx6w^XW~ z`2K3O@{BsoE5(J!#lm;$Dx*w-(!105PD$i!Ua^V$MRe5cps7sUx1^SvZF{-hO{cG1 zxtcAhknYRr)HxMA?=tm~z=1~}NalX}?0(y#UqP$lY;qI^9{k46>#nNvYczfnj#JyT zt}^?ug{l9F<#g?xNzNW-w5Qg6^&i7*5RQW|8K?2_IDzkf2s44L|2=7bl=?5nb%#V` z<#6H-*C**Ljfc-&4Yq<7R0x{SC|hdjOxOKv_*@I;y7n{}^x6iPv96g@0eGV-D+ycBnlCU}L(VCN zONpR4omYgRf`W9lLL|`alt673lqt>;lLZP2UKEJ3&NKp_ktMkUmYJwpEL1&DAU1V} zdD6V;HiQ^Jjxlu6VrZwZP;}nmHQ3y4W}VN^P&gwf4J}bwiMVO0z*uWbEXEegN6VL( z-%C}ZB#Lf+iReo#IN4I{8C8abtY8qlO%tHH6cNe#ny395kJYveSOzQumI2FvWxz6E f8L$jk1}p=X0n318z%pPNunbrRECZH-gEH_3Y}9Ke diff --git a/tests/archives/spiffy-test-app-0.5-pyh6afbcc8_0.tar.bz2 b/tests/archives/spiffy-test-app-0.5-pyh6afbcc8_0.tar.bz2 index 99f04561bdb71f3181e95ac337e6ba9cf613b767..c60e4218247e870f2c79af5b9d1451103f6215df 100644 GIT binary patch literal 4460 zcmV-y5tHshT4*^jL0KkKSF;n@=fYys5A_LrqmBmWEwOY00000 z01c@%O{G02rjmM^eu*BFKn#pc8W}xIKsJyx216!HhMUxYXe3A=5^O+I6G^fVO)#EQ z)W{h27mwnqZep;59U%6L?%puaT0|J6rn z=JY`f5#9LSn@KIb($*G&RaI1_)hSg2VsAWYY&Eh0mseapMDNI`j=Z35pFLY-^_ zaR9foWik>7zyx3jlNzo{q9YBoRD}IcE)3a^3j+rfpD!A}IQR z;7kJnoiwd8JHUd3j?+d)bzFjY*Q|*dq;!#x5ddc0UwLK^&XT^m4ud2t&Ln{o>aLH8 z;oP=oN%yL;#nGC=SA$4Ix$cWlDRo>NFuf?|yY1@1} zI|ml*{qEPQ;14RKf;{L|2p|eT1%MXGG#IO_nX#5A^=n30T-UFM4Q{4Q_pL{QuJ{-M zF>16LRl?c9qwMfA@MTm+lu#~Kf=G~JL2`OtHapWTeQmSTJ4~#Zlw5lU4kU5%;dCvH zIs9%9FO$K!cf1wd+8!aU?)eBLG7>^TXn_$3Ndt79byY|K1%#kyY8`}!VF-=}LKKJ> zRND$AQdE}Jl?pk%jl;%bdu1f9A;6WC@v9Isy2b9bQ}xvrtlKRtDdr94NG4~|-f(_j z7R-ux(?2HpG@edITw^BxaiH6l&Ngx1l~mb4Wbj>dE3I}tUlq~XEj*Zqr~8n9X3eju z@SS7Xg`p;*>u+nVbiq*+!sVK4T>oj1EZ&!I?a*>IavFhvM z-L1IGcA4i4Wc{($xN)6m+BNs9MmFWX=ik$}tHn7~w@umTc!f>N9mZUOsSpE)Pn#3+ zY|~)^08c*aA(r!OsgpsP@`PpUl-JH(B&*)D#prkN@I0&L+DhtUz=Z8Vo71n6ou zVL&=NN=a+o%Lbdn-s$BAx&^k3*M@TdwM)29iD-b4M8T%SqD00n62v>IiE}RaiHc0Fp@3+d3_ie7JBix&zj+5P*UKw1EBVD-njpiMRu&aSAqF!jPnG zioZveLI~&; zK^cQffE6l6fJ;il1Fdaf3jslL*&jInui!^MeE3W}*O(nX;qa11zhI=j(}x{P4E+OV zq`=?q9#1x_(_|3W!~{Y@>>Q;j2td5@jt&_~#H8FRn@cSA37DJ{8*SjXrNIIF4d(`h z?W}T!q>q?{#pO;^0JP_{OUF!=l_YPPxDkw;fH7_u8B}+fXM38nbz8BNI2ADI^_4f2 zz{z*4RMburxm7{k!yC1Qgcm4_5=lrTl(AOANt3A!0vz6Eij{XMi;6_y;Y4A~j4+$V zFic)SVhAcw3}}ZT!?Oi4!e0|iih!nBg;Em=0d#JfWq@J}IfIN$5Fn`z2xE445@DXK z(H5vUV)1qgSuu+=s=~n%K?IbnrucTK0v@7+254ZerKP4s!WJPYsS;GMqp~M96mZA! z2U7vMxgF%l_I(Vz=JU7o|;WPp<@;{cZ)FhK1X2$6&ux$~NT5_Aix|LZhIS^1BJ#&*e_DTd4M|03tGo{Y+satr9t799{ zr{kTgdLCxMI|7`;25&;3BPtYCk3JxeiJF8oa$CbIJ!912RK2m zeU++3SD!-JmTT@D+dSYy3HD^qYo!b(wyPNVq~bwARSRSvH1zgFIL=O@Uc((qkaiW7 zkX1!rZ3JdKnY)Bb>6+}3rgOqQ)3|yTNr;R0Q4!??4x5A?nByp5urm=60(Hl|-xDLp z#ir&s=@*@SCKj0_HTP+~VNnDT6~?0iyE_PLx(Nr?C~FT!*Qx2>o84)MyQ!7l>e)dd zPW4jox?Q_$Mw`ocOG~R_O5~Hec(c1SO^mDr_xyJ?K}AB$P0(dbSY;hqVMt=S_ne+_ z(v%Uf>j0V(ob!+-_LBAOLPc*n-x%1g_|YYieQ$|2T-sR{-vTgZWT0^kgCXO7HIi5g zPJ3xvT)35z-BBfW%w>Ru;FwKT`BK|6wdunf?LChoO^xk>B4H@p_({e<*^7YSI>l9# zYgXZHe6BfO7{1h&hO22qU2l!;B^Yk99F4%9Yc%>LUpm@O)x&OEaDg{lU1c>yH!Kuo z0L~;9{j}V$y8G3!!Up6-Z6pa<-XMS~f)%p6?ATqp?6a+$h5dJERg`V5MmAVaX`!+= z&nZL3wnXyU3bBu+HisB%-dyIUI|wgA2pY#%nsE~Vn@7Jg;?vfmE$(59IyLMO_PZ?;60np?rRd)i zfyf4cgu}zg2XmwkqsU>=YhV;j7-{fI5&ZX(%E^`Q*kMMTCrBaJaw<+IL zTYCgJ()Hxnd>44v_x)@JAk%z4~A%46srmyuef4h-5LC`0Hcby#CQX z^DxZAGYrgVhrOXKP{=l^9XbSvbA@r(wGHQA4-SPf3GNOgGE$5JIHGgep{7X1qKAeS z^@2U)=r9C(O$i4`XW)Az9C*Fb4^N~AhicwJO2;Q025~fE-&UVGqn$hFQzSTc-MN zBtYL)vy3N!NrsveO(gI$0P)~`MK-Px8!K9@zr~7@m23_t)RQ zI(VhTV>lAyi)|{CyK0_y&g8f*wY#N&!q80yBXhs**uXR z+1hK9ykhoIl1!nCX`}&57|jY;)4QW`6^pnrS&=HYK^D@`Grw_zIKHt)m~H0XcU5zv zw~TuSXx8HnOXlG5>31$L3uV{^Cg}ozU@b!lC9%bR6Ipf@0b74s(gD!%Lj)eRbpmXlh^=cO)HWjmMrzdgBxSRDN6&%6L#tK=3$9es7k@T=k`4hN(}A+N z!gM>-1;EL1=OhCJ0?aw6(ZODZz!F|uY@Zgb!xRUxjcqigvOqigXe~jN3Hs%pPIH9< zgRg`VNIibr3ZZpjrJ>5OPh^OeH_i}gnctExdwjAb`hqbYyc+JBMBYn!dDR`L`%{7Nl_P?2AjyiB+!aW z+~}Fa_7@{L?*^|oE9SeAlOI;NJ1hF(@+3j3iu5SaNdqg=59$5u!!*}s!gwMVE@QW{ zjP!Me_|3{wZ0ET)baMcQ#<@EXJC3yomDe=pZEKV$8{X*T+!`o{7@LJ1N|v0u-&r(b zU1tlVCmFS_J)}6}wJ)yLuu`INWY@c)y9Kq^WXFOvV>~$1qW;c_lrz1fGOwz0tz?v# z+8fD*Fk==OG?Hv=O8|JcuOg)tHto>v(Q8i^knzFX@YHqDgeHwG14h(WHvj zj4;9t*GJ?R19Y^pEDSOkj3^|SF{ofb+HJX!Trr%cin67MdM^h3$+>rBD)$O#B0i#1 zv^Vn@sVk@3Ec2IABOS_BFr$eDFG9u@ls90pP2mB9V^IaUMPUP0oNgO034v=@Mn`(i zRl{KKqEl*Xey~bmn%Fg7(wiog@CJ;-Xgh;!r!$FfOGfR^5|c)s-lSQ|4YQ<>|OEn;=QlLLD@e!bC_#AxzPbxUyA?+Rd1QC@MuH zKoDmHH1UFkTx>IhyEC z9P18j6pN~3 zL7^pO0HD$*wWCneB%6p!kRaR+#FD1g0;wtpTSPTXN{aTEPTZ(YqY`%O7u9 z$zzPY5M`(lz}Y3SjC!%L=bTw!EpCniPk0$&uI>0>{w;P%a>uD67CIicj_D)BJCvwT z4Ju69RE)qdttD^c2dI;MsWGbcxb0eNK$5r-Ha_;GLJ07l)g{h3OoDNE=yvmc{VvIC zV3%fcYlKEcYBYsBh%795t=65M5J`tfP)HE2DnY<8ux#y-VPi$DEuCZ;q+mgq(UJxQ ytsrd{mEo!@yK_Oee-oUv=ize$#pDhXQSsC&=>SmucNRz!{}*yaI8cz!4}UJEE+6Iq literal 40960 zcmeHQTW{P(mX>x0i$N~}%;P>1Jrmf*xGCPdTZjYOv7H!`*n#Z~CKv?0MY7mUiA~ZZ z+fvIgkf;4!`_uM2Rcw+XY3mYck@ZrrMKxKs^VPW)t4@b;kQ}U7zFLHFx+xYx81a1X zW`8vNIF_|X*Qw)(t7gd0bWhi`J>Ar7TQ@AzwD&aKFpTM*qTTE;`di!N1Y) z-89LINfgG%s>_N|{V89Ka$amwHQkQ#Gz^06qTqS4U}<`=0I!6_VnNS7dw@Q30z;C2 zG5<$-65kH7B;%iwe^a+w@~=7iApf^B8Ld{|j}&DbCoJ<;<3oi$;WLkmY`aL4FfMZ3 z{inE#`%)21ND)k+2$Tm(OEny>Ss0J+_myAqDCJpC75QK+>iiWuq?7{wDV)}k_5h4Gj^v9bhy04I&xtlQZr2wuR7Py`_TWCVA<)cNsS-U(# z2~Ae;dCFtIRu&D({Y93D*;I1VxvpB+E|m;9h5w-a5}#vkr0RqMq1@fJFsDNE(B&-jPsEOKLV;mID3M!KrN zfdPrd3PgND*_~55SOqDGmqYLsL7!>(GsAG4xogjiU}pGiJ~a$GaOT|Pfu=Ke<~pV} zx430Z*_?SE)2$hE_;ija3MipnfU6s(WjoW^94p<|`D+9KFGp0L);g~(h*HcgmUI{H z7PDTWSrV)}<&aHd2&b+sWWVEzErGd}kzSzxP1~aPKT`i|@C7v;Z-k7&|GSmRXf-O< ze}uxMppAsRTm;4{&|()4Ud08gSAVJYVz-jJVwq1qQvF!!SBkUzIW+3bp4&m-xl_|{ z$XK|BHuW4svwR=#1T9-*#*F)(;aWC|2T-s2G;n8}>w2TjrY1wvx-%0biPhsi%06ik z`jqF+fvx+zG?^^Wrj`ezF|(~X>TNx!8?*bx2JyK%xrUJ&bd3aO#oxV zkZ7*q7mUN2r3Nyb~w!3}-W5DpUu4i4XwSKj60b z!QUd5z*V6exIQeezsf7_;AdY}#Z^|_FGGxdSsnM4vfRV+`m4Ok>U;ZT@UdU^ykABr z`=WQWuZZG%`w!)H_&p#nAaEXmf0ZBqME0LHf_xLc>l-TWK-PA(H}AF3*Y}4?ejm_|D_N6ziw;Jo9&AIU$ni0!$Jcidw`)m8him@suo8<_%2!(Ns~g~qWw zOEp#9RB6M$^;doBVVekc#+0Rb{BH4%W#SBxc&|SG=K0ITo2Ree&=#?;u#JNb;(RSj zW9P^>FJC-)w0QFL*`uGnf3tY{;_sg-hR3fLKR$Z%U4Nk`KY#yZ@%{71uO7Yn`4h!< z4IFIp>>x@!79C*32bqhpALOfWogX$HxnV3nia)APx%f-|PSWDwAG&@}J(P|9z`SpN z{MKmS#)i3!AEeuKdw3uwg9=o5{P^>m?_R$6?&S|p`)QQVcO=w!0&5()78+WKA?c1T;5aRTfVoX&-)f5~P!3OWDz!Jw1;$m{b-8}J* zd7)_PTs7OJ{3Gn|#(ro#v)j+sVP0%vj*94@{qS9w6&n@-cty9tft~)nDD;jMM|IjY zy-gM=Q`NFjryefYi{n-Gv0{PJZk??c;p{q*(CH}U=?nxo-7&hHZHh41VB`8Gwa~Jq z>g~})p?kWH48=gxMq@lF%L|UJX;HF8Qs`2~WyTQ|=Xt*_}c<|A(^zIqK-uSZZKTBig2)13Ln| z#UyTm&G3e5_0$WC+^4<&S+b$ZhSjbskKp-Xj;%o63L~SL>o6G3DW`;>uJ4}3$@(~I zFly>$T$!n+-b8;JMC=%Q;R&YBsMD>NZNdBkrOu3|+qVVKC;VE}1{vqH9TxR$uzNjU zl*s_XjVBtYm@09K4bY-;U50}a(;`Z`?3r%Y35k)}hX6*Kn zO&sF9XNx2D}t=Wk0oq$x=%M6?8sc4zF}U06RgM!Aj&XzwAd;%6$5~45t3(=DWkpFfIxin{#w#lK1CoiD#`g(r61fJF=U=M3Zvt;Ge&ZSmZ&_2yHM_d(6S6 z*kmwVr0+T&6=9KXPb17a41Bw)G-}I?r7P)kbd0Hs>^o_a75Rd>#O|HZ2_5KjFMmgX z_P9GIE6yW4GkP2;;eCJsF!c7&LO*6CpzZsoXRx$HXg#AT&Jf~|Ai9Sh78*-$O)D&3 z;iG`rg@C-VJR%JbVV^bTJdcwT7w7xol&#nB1WA*3M#HkDmykjLZ}i#-I%ZFPo6t2K z`~$dr9>d&<${lWuT>yk&N*dfw=N_AsQjg=AV1QcmYiz=l`98Qq`ZaoI`pRQIa7Q>v z1%_$ujCb8gz9o-}obt+_s2`?VmdB8A;MZYII3m*Z#>|j45z|r|!!^r}Hz}PeM|Gpa zd!EPyB_8egu9hMoQy>QerVlF5M^orv9VFQuar2@rf|dSRg^oH#atE+2_<1+% znqtAVln5dWNSpS#iyQ$5Jb2>E7IvzuEFctN9k9>r!)MZ`{QACmStg zuAZPX`0?;02|=?l7zCTN{)CD>x}r1=3z*kRM|Zmq5uE}NXtV~jM`3)#D5&ay0{uy< zTdYGgEhaIDDaO$u5HlQv%fvkrx~3zxPuekv+;W;n5?mmD>-`I~3D1vEpDF=#!kKvd_9< zs2GZ8FnIViod!|}N+TYrdqv6?DaKedkb0medJ|zJtW$?S5XlG@>~VW@6OMG82Leq< zM|*xDbIe7)RY8x@>tCn2P{qAwapeaHBgyL|eOh}cF~_O+>{v={yU%QcNsAUcRRS*P z0JTm%Gz+O}h-&F&i~A)6HSRdL=Y?zNm7ZV)3Pq;wD4T3qW0$#5t(dN^YL&5zSoMMo zbiku@bve00;o9oq*M z3=u_Pg@)v`)AojNB)U5lci#EAwXqJfgx*H6NqyIZV7UGkIU`7?HI*zHJPCb3PebW3 zL2h!)Nl`+ZcLsABA(s*v#m)ezD}*wXD6;Ia#bRQ`2NX7k0w1xAC^~@bmdGG-ow-IZ zjE1UMx|RSpi3+eI4jcj09)6m=8pB0e-NT?UAgd_Cx6)fdQe@N!yA>-V7!#Db~ zQZWls1tI7~MJ35X`16zoPh`tEM5#+=61=ZqiKkH3iH~I~jNn-oD$h6~iYYYep{XBQ z4n?$6v^crq&uCO;H#!t=uE=FQZ09KwPs$gShsqtO**hbd7Ex9r2mbJ~w8#ubvSRbB zy}Uzj&nE5ud$J-^B4shGZiNdH8%i=oKGj2LezJ@5mPpwO@<1FlD!sB@ksHf$DBBk4 zo2czN_tU7EDN=$V_^XLaW4I;V)q6(KE^e>0Qb4K!Ufae@BV4BII4{~-Od$p2&5+K~UP!tXZ!z-poL z)mO^Xm(P^1mGNJGeD(C%^M4$uShdKJ$5doA9FJ&|2;L#fhqS$*ELSE+7$Ygp;>9Dm z^WZPlhccHteg6B?SFfMHd?BkgJ~xUy{^|MmPhR}=Lsh1_flSUA-v3((z0KL*+iS{y zOI0>AZ~Ue~TqOUNCij2oj-#16z8^-nZF|W7`kOGnY-MRN6q2|K@+Soid3&OOL9;#N3w9Jo%fMXwf4Y-@ z3+Z$r|K@bq|9w@IuUEdYIP$Tjk!oQ?h&Ggh_`(Qs3*@@U9rezrUi6UP41+I*!E^cb z3cDo++_=3pho+cIkL60MEawYfAiAT@QbuDTW71r6L9>-=1^LzTMOA3DTe~>7a-Znv1$aGn>aBH={L^z<|Jjz<|Jjz<|Jjz<|Jjz<|Jjz<|Jjz<|Jjz<|Jj zz<|Jjz<|Jjz<|Jjz<|Jjz<|Jjz<|Jjz<|Jjz<|Jjz<|Jjz<|Jjz<|Jjz<|Kz5cnTD CUSd4} diff --git a/tests/archives/test_index_of_removed_pkg-1.0-1.tar.bz2 b/tests/archives/test_index_of_removed_pkg-1.0-1.tar.bz2 index 11eac8a95929047eb2ba00164888fd100df0ae0e..e01aaaa3974ba18fefacde04254965625299e447 100644 GIT binary patch literal 2236 zcmV;t2t)TmT4*^jL0KkKS-HCe{s0Loe}eD000dZ%|Mw~^txEs@|M~#{2pC`qUm9n4 z>D+tXLwju}T2s-b?{^$Pq6S0(6H1@3noQ7WHB-c%lh7bC0BG?~NuZP<36Lg5t$?)uAYy7sr5d;5P(>F5K;<2&}KonzqdqGj6r~dPH=!ikpuK*o0qq< z!~OgT0EQ@NNNouDJr9hABLP$qR0o5D1z5M(vTsL>eUooY`SWG|^{p-TKjC5=5you3~W zXQwRtqg!cft7JY<%-7R(j+D1oI$D=_iWbXV&_wxYuIMtviZjibuz(|IA994Svc`qb zs~4&UP{sUoFs__yZ2}uK-KJPjV7_i3vt_LVCk2a;^_78Euw$)ws~(iR=CtwYO76%u@-Kz7+8DvHjSONy!LV#Y1|UKN+6IKi zz#Hv%_PP0PN~{~|uBQ`eYzefN88DtCT)=37`GCv}Otp{HTcc%4Io}jxXlA}p50!TG z8dr@-qhyeBU7-b#Vj@s713u^r;yyVi zN=W0-`q>PQEmcfWlv5b#Ta+Dbd>}wZBvSdws*9wSvbhi$-<=~z0t4uY0;+s(o>75W zW`vDftYv#r?r@N`G;XPZD3HEJblIYqXzT4EeH~4Aqdm)q+%|%5@HqI0> zFH}*=T>X;irCm+M%KUo(U9mht=<~)u2IFH_F1ct_N-B50~Fll)Z zXc%_}XOzZ^&?B+N4<+5f>f@0i`D+Rj;n?g<6V;4ncK##Qe+A!%UckZ7f;OQ6_S~bo z_OlAUft|Z%qd`#dDXuJXF$>tmaH9Ufa3K#I1Al2sF3r3&x`s?KQ0@vT10+^KtqRa* zoC!iR(k$VgR%Zkjb8^Bb4e0ZV%Jx?3g*-FyShOmFOpUo}DhWHr&_clQFIs{b1X2z& z(}zjeExcJX$4w0LioH0FKDEa!i3dW}ieeN(OpCG_^^*)kQnGa579ay@nE?=PVc`&<#6XO<{H- zI7N`77@Vs_O4HD396;Q8#Z6;(YZs9{VY#a0ImdDgC`QENGkiVG(P7y{w7gfGRI(394`!?pkd_E)YXVWHXJ#d7vQF_Jfeo zmidtyVD#0M<<84SEk$gK+ihI6nCW61Y;YWc2a5HIZu0W<{QiDy+47XG;b*3<4hK!0 z*aaXeZ36$ELuF~P?{iGdeuibzOl2yR>DVU?A)>xmrUPBkw~B_YGh(ZbTD<~WfzOV8zQLD zCkuofn;a6(#+ut}RYS_|ltH^LL{cHAQt8Q(_y3SI(Uw{z?%PbIIhxtZPP_Om&oo(8 z;eAP3-dnk)=*&8UBe*y(2|Ixc{}*ya KI8czey9EA1Z5v7e literal 20480 zcmeHNO>g766`i$z1(CPNqP3zgOSVxID2gtMF5aTEdng2oqAVt|q@m=Dorm||doE>5 z8b_HlgQP7Um`x&nTwR`f&*3E%Rps?v!P_EHRdeW^`?jt=z4MQYpC}AJ(eF5l%x~B4 zpQ+7t{ZC%#hh7*)LEwIJJ>T`BPt1L1KW5Dx+Ky|uG~4<6Z~XPQ-~Srz3SUM*5&s+B z743&m-@KR@{$2iu@fH6)*9|TIUyj=Q_58YE?8e0ZX3Kv0*B^(+>K+NnKk@qL31FIxGVXdOkN7p#qoqm12n zj(cOJ?Ax?5{O4)?L8ZTW*(>{h6i30c{onKL{{L;HKIX~rUlhEmWNCPe0ss|3(REF` zy}JX6Gw$$;_kYxS*EcP}&7Cx?oQ&48smH;h-rs@G?cJWM>IQcE>cUt`jJ!;0jGMsm zJ!k!gb>QsQ>1LD0*`_CbKfclGk$0pVIWJpjsNMHW2p&3~500BoSv@9?T+^CpjqviX zfB!l8{I`F8CjIzRRd=kF-Ji*?-}(th$kjMn)Xga|BGuLhEo73bQj)b!AI$4vF;^8Y z&rO4!qV5H*{VHY0%#*RNGFf#hS17A6<(J+kKH~BqPhV^OHZj$8-H~QaENc1X06A=# z@A$FfvF21{YngdGO?qVD_fy+t%yU+b&kDZDa$mYhFZ1YZzm%$zEO3IgvtmWI$=zt< zkCR(<#9S|OT(TXBko$fz|4A5sS;Og}{n^RkhhZgSkNCci+_)>&^wnD{>j;KA%9U57;xy!@mJ$JniCE+ zZ3iY&Qhmo6Vta|?8G(2fR-sQ#SMM}$3g88o44~vx7UkUI2+JiVPpWBPY5j77fU?W| z?E3~IJ(>g~axIu;!-?NhXi?~}0kZB1LFdeOeV;R+?xm~P9TVfAOOP-?X#B|8%vwc1 zQRI;_G=gH}o(y;}n)q5sNhA$k(wi!!QjT|Q^&&^^56Dod6H#kPIpBUT2TUr%E2>-7 zqg)EixK5b?Hj(FwJYNk%gb3m^T8|4jKEX3X?#tT7*;12e6MV4YI_s;<98^HmWvw|# z;bM&;;rNU>S~WXtk<1H_D=sV2l#49&MkTpd)jjt~BwpZVs_U*)&57teEN37RE=?{8 zhaqs6@?cJ*pMV_&93vPMJjlf+fQER(bZmJ|tK_3ZGQpeKtWf)=*6=1>?q$bKju|1Q z%p?+AxH~|Jp%jV$_+a0;$mpQM=oUw8EeIu1SNLuqqlpSOr;AYfw4s=lJ>z8eCEFtz zq8FuDH4M||SCmQ)ZOjiPEq~Zn03eTJSHzP&5F02Yjq8k5WF`_r7|O$LN1=G0&cQGS zdEa&n+!^%a0^Pvj0c#wT`f{X2#$M5F09xQErJ=i|fkr4O9N4H=eiV&K-<%rk)FdaA zj`Pe{S;f&kFE(0oQgz8lMhkZ(gV>+gzZ8v2fgK)tOWHplv=s8_8<5sxUJSW--nnw# zc9X3KFihFxd~ zXLAyD3psQh+NFBBj4U2215uuONy*D^SlHTd?|Vr6sR6h=?qd>xIwV-{7Gfm2Mee`DnnMR(=dypP2!^B zF$Kt&^vh}8*ZY<7Ac%s=wuw6OfGxHzv7KLpXu5y_P-w?_FTkr!S)0i)UWeJJt$VH? zAZn;C%?3$|7|G$*l2+h*S^7FS8tgpd9_=J z=8)74F4ff&@9+UI!T(3>WVdBijilfUp?Q_;mDV+;4EKa+bo8>(^}Q6GgUi7Miur5X z$KygCarn*>Tz0kF#ga`P@uYURHIkQG!LgygEUc^Yl;Dd)i;hNBi9X;?rS0=|fdeLr zqH6NS-MMLTwt8iR-j1&s&55j;+GJ54_?F$SZWpESGw#6B0QWmEw&ffHRPGnSs1xr9HM4F0b<1YR^N~i*>60dnK%#WQ_|F`>RS5hNXE=J zNg8>Jr?%5L)Amnv7q0MW5tE=cjP+LfSx^r%pPZmR$lxAldNYnOD3d5^cJr6z&;OvZ z*|Z!Lj%*9_v5aQW)&7=^*ZgRj-0^Q2N5lDhD75wZYbZL;WjscHQ^cc(DtLMYy_U?- z@07Gt^WN9FZxj8e35+(MCC*Q;WsTuKVQ@!sN`E+Q@!F&}_P@q|_u2pd_}=0.7", ], "license": "BSD", - "md5": "f85925da2dc4f3cc2771be01fd644023", + "md5": "f53df88de4ba505aadbcf42ff310a18d", "name": "flask", "noarch": "python", - "sha256": "096466b5ff6c243fccbafe75951dc9b1456569f31235882ff29f30064219339c", - "size": 30720, + "sha256": "20bb13679a48679964cd84571c8dd1aa110f8366565f5d82a8f4efa8dd8b160c", + "size": 5334, "subdir": "noarch", "version": "0.11.1", } From 0237c1f28030109e824594fe7a4586fe1ac2b4fe Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 6 Jan 2023 13:46:11 +0100 Subject: [PATCH 016/366] [pre-commit.ci] pre-commit autoupdate (#4668) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.2.2 → v3.3.1](https://github.com/asottile/pyupgrade/compare/v3.2.2...v3.3.1) - [github.com/akaihola/darker: 1.5.1 → 1.6.1](https://github.com/akaihola/darker/compare/1.5.1...1.6.1) - [github.com/PyCQA/pylint: v2.15.6 → v2.15.9](https://github.com/PyCQA/pylint/compare/v2.15.6...v2.15.9) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1fca0bd8c2..9ef7779e47 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -38,12 +38,12 @@ repos: # catch git merge/rebase problems - id: check-merge-conflict - repo: https://github.com/asottile/pyupgrade - rev: v3.2.2 + rev: v3.3.1 hooks: - id: pyupgrade args: ["--py37-plus"] - repo: https://github.com/akaihola/darker - rev: 1.5.1 + rev: 1.6.1 hooks: - id: darker additional_dependencies: [black==22.10.0] @@ -57,7 +57,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/PyCQA/pylint - rev: v2.15.6 + rev: v2.15.9 hooks: - id: pylint args: [--exit-zero] From ef2026a09fd5d8572e506937effb4eb88d3a08b7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 6 Jan 2023 12:47:17 +0000 Subject: [PATCH 017/366] Bump pillow from 9.0.1 to 9.3.0 in /docs (#4643) Bumps [pillow](https://github.com/python-pillow/Pillow) from 9.0.1 to 9.3.0. - [Release notes](https://github.com/python-pillow/Pillow/releases) - [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) - [Commits](https://github.com/python-pillow/Pillow/compare/9.0.1...9.3.0) --- updated-dependencies: - dependency-name: pillow dependency-type: direct:production ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 735d2460cb..c30be920a9 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,6 +1,6 @@ linkify-it-py==1.0.1 myst-parser==0.15.2 -Pillow==9.0.1 +Pillow==9.3.0 PyYAML==5.4.1 requests==2.26.0 ruamel.yaml==0.17.16 From 52d2553f2464f6a3d6a66498d04752c77f3fa1e0 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 6 Jan 2023 14:42:20 -0600 Subject: [PATCH 018/366] Simplify `test_subpackages.py::test_intradependencies` (#4695) Test would previously take ~1min to run on a dedicated macOS and Linux but 4+min to run on Windows. On shared machines (e.g., GitHub Actions) this could take 10+min. Simplified dependency logic by dropping m2-base dependency for Windows, replaced bad dummy names (abc and foo), and shortened verbose naming. --- .../conda_build_config.yaml | 4 +- ...b1intradependencies.sh => install-lib1.sh} | 0 ...b2intradependencies.sh => install-lib2.sh} | 0 ...y1-intradependencies.sh => install-py1.sh} | 0 ...y2-intradependencies.sh => install-py2.sh} | 0 ...-r1-intradependencies.sh => install-r1.sh} | 0 ...-r2-intradependencies.sh => install-r2.sh} | 0 .../_intradependencies/meta.yaml | 61 ++++++++----------- 8 files changed, 29 insertions(+), 36 deletions(-) rename tests/test-recipes/split-packages/_intradependencies/{install-lib1intradependencies.sh => install-lib1.sh} (100%) rename tests/test-recipes/split-packages/_intradependencies/{install-lib2intradependencies.sh => install-lib2.sh} (100%) rename tests/test-recipes/split-packages/_intradependencies/{install-py1-intradependencies.sh => install-py1.sh} (100%) rename tests/test-recipes/split-packages/_intradependencies/{install-py2-intradependencies.sh => install-py2.sh} (100%) rename tests/test-recipes/split-packages/_intradependencies/{install-r1-intradependencies.sh => install-r1.sh} (100%) rename tests/test-recipes/split-packages/_intradependencies/{install-r2-intradependencies.sh => install-r2.sh} (100%) diff --git a/tests/test-recipes/split-packages/_intradependencies/conda_build_config.yaml b/tests/test-recipes/split-packages/_intradependencies/conda_build_config.yaml index de88138e64..9c788bb2ba 100644 --- a/tests/test-recipes/split-packages/_intradependencies/conda_build_config.yaml +++ b/tests/test-recipes/split-packages/_intradependencies/conda_build_config.yaml @@ -1,5 +1,5 @@ -abc: +DEP1: - 2.7.16 - 3.7.3 -foo: +DEP2: - 3.5.1 diff --git a/tests/test-recipes/split-packages/_intradependencies/install-lib1intradependencies.sh b/tests/test-recipes/split-packages/_intradependencies/install-lib1.sh similarity index 100% rename from tests/test-recipes/split-packages/_intradependencies/install-lib1intradependencies.sh rename to tests/test-recipes/split-packages/_intradependencies/install-lib1.sh diff --git a/tests/test-recipes/split-packages/_intradependencies/install-lib2intradependencies.sh b/tests/test-recipes/split-packages/_intradependencies/install-lib2.sh similarity index 100% rename from tests/test-recipes/split-packages/_intradependencies/install-lib2intradependencies.sh rename to tests/test-recipes/split-packages/_intradependencies/install-lib2.sh diff --git a/tests/test-recipes/split-packages/_intradependencies/install-py1-intradependencies.sh b/tests/test-recipes/split-packages/_intradependencies/install-py1.sh similarity index 100% rename from tests/test-recipes/split-packages/_intradependencies/install-py1-intradependencies.sh rename to tests/test-recipes/split-packages/_intradependencies/install-py1.sh diff --git a/tests/test-recipes/split-packages/_intradependencies/install-py2-intradependencies.sh b/tests/test-recipes/split-packages/_intradependencies/install-py2.sh similarity index 100% rename from tests/test-recipes/split-packages/_intradependencies/install-py2-intradependencies.sh rename to tests/test-recipes/split-packages/_intradependencies/install-py2.sh diff --git a/tests/test-recipes/split-packages/_intradependencies/install-r1-intradependencies.sh b/tests/test-recipes/split-packages/_intradependencies/install-r1.sh similarity index 100% rename from tests/test-recipes/split-packages/_intradependencies/install-r1-intradependencies.sh rename to tests/test-recipes/split-packages/_intradependencies/install-r1.sh diff --git a/tests/test-recipes/split-packages/_intradependencies/install-r2-intradependencies.sh b/tests/test-recipes/split-packages/_intradependencies/install-r2.sh similarity index 100% rename from tests/test-recipes/split-packages/_intradependencies/install-r2-intradependencies.sh rename to tests/test-recipes/split-packages/_intradependencies/install-r2.sh diff --git a/tests/test-recipes/split-packages/_intradependencies/meta.yaml b/tests/test-recipes/split-packages/_intradependencies/meta.yaml index 25b2611c3b..644ff633d0 100644 --- a/tests/test-recipes/split-packages/_intradependencies/meta.yaml +++ b/tests/test-recipes/split-packages/_intradependencies/meta.yaml @@ -4,49 +4,42 @@ package: requirements: build: - # the purpose of these dependencies is to disallow empty requirements from making the tests - # pass when they would otherwise fail. Empty requirements hide lack of finalization. - - posix # [win] - - git # [not win] + # the purpose of this dependency is to disallow empty requirements from making the tests + # pass when they would otherwise fail, empty requirements hide lack of finalization + - ca-certificates outputs: - - name: abc - version: {{ abc }} - - name: foo - version: {{ foo }} + - name: dep1 + version: {{ DEP1 }} + - name: dep2 + version: {{ DEP2 }} - - name: lib1intradependencies - script: install-lib1intradependencies.sh + - name: lib1 + script: install-lib1.sh - - name: py1-intradependencies + - name: py1 requirements: - - {{ pin_subpackage('lib1intradependencies', exact=True) }} - - abc {{ abc }} - - m2-base # [win] - script: install-py1-intradependencies.sh + - {{ pin_subpackage('lib1', exact=True) }} + - dep1 {{ DEP1 }} + script: install-py1.sh - - name: r1-intradependencies + - name: r1 requirements: - - {{ pin_subpackage('lib1intradependencies', exact=True) }} - - foo {{ foo }} - - m2-base # [win] - script: install-r1-intradependencies.sh + - {{ pin_subpackage('lib1', exact=True) }} + - dep2 {{ DEP2 }} + script: install-r1.sh - - name: lib2intradependencies - requirements: - - m2-base # [win] - script: install-lib2intradependencies.sh + - name: lib2 + script: install-lib2.sh - - name: py2-intradependencies + - name: py2 requirements: - - {{ pin_subpackage('lib2intradependencies', exact=True) }} - - abc {{ abc }} - - m2-base # [win] - script: install-py2-intradependencies.sh + - {{ pin_subpackage('lib2', exact=True) }} + - dep1 {{ DEP1 }} + script: install-py2.sh - - name: r2-intradependencies + - name: r2 requirements: - - {{ pin_subpackage('lib2intradependencies', exact=True) }} - - foo {{ foo }} - - m2-base # [win] - script: install-r2-intradependencies.sh + - {{ pin_subpackage('lib2', exact=True) }} + - dep2 {{ DEP2 }} + script: install-r2.sh From 021ff09092995dffc96c7f4af43c8a63352ba975 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Sat, 7 Jan 2023 16:43:16 -0600 Subject: [PATCH 019/366] Cleanup replay/allure variables, use setup-miniconda w/ caching (#4648) * Switch replay & allure values into environment variables * Rework install_conda_build_test_deps into requirements.txt * Use setup-miniconda action for macOS & Windows, skip setup-miniconda post process since it is especially expensive on Windows taking ~10min to run * Remove unused CI helper scripts * Switch to macOSX 10.15 SDK * Rework cbc.yaml test to use dynamic variants * Remove timeout-minutes and max-parallel limits * Add package caching Co-authored-by: Marcel Bargull --- .github/workflows/tests.yml | 387 ++++++++---------- ci/github/.condarc | 1 + ci/github/activate_conda | 5 - ci/github/install_conda_build_test_deps | 51 --- ci/github/run_conda_forge_build_setup_osx | 55 --- ci/github/setup_pytest_replay | 5 - tests/conftest.py | 33 +- tests/requirements-linux.txt | 5 + tests/requirements-macos.txt | 4 + tests/requirements-windows.txt | 4 + tests/requirements.txt | 23 +- .../conda_build_config.yaml | 4 - .../conda_build_config.yaml | 10 - .../metadata/_macos_tbd_handling/meta.yaml | 5 - .../conda_build_config.yaml | 2 - .../conda_build_config.yaml | 2 - .../conda_build_config.yaml | 2 - .../metadata/_rpath/conda_build_config.yaml | 2 - .../conda_build_config.yaml | 4 - tests/test_api_build.py | 31 +- 20 files changed, 258 insertions(+), 377 deletions(-) delete mode 100644 ci/github/activate_conda delete mode 100755 ci/github/install_conda_build_test_deps delete mode 100644 ci/github/run_conda_forge_build_setup_osx delete mode 100644 ci/github/setup_pytest_replay create mode 100644 tests/requirements-linux.txt create mode 100644 tests/requirements-macos.txt create mode 100644 tests/requirements-windows.txt delete mode 100644 tests/test-recipes/metadata/_macos_tbd_handling/conda_build_config.yaml delete mode 100644 tests/test-recipes/metadata/_overdepending_detection/conda_build_config.yaml delete mode 100644 tests/test-recipes/metadata/_overlinking_detection/conda_build_config.yaml delete mode 100644 tests/test-recipes/metadata/_overlinking_detection_ignore_patterns/conda_build_config.yaml delete mode 100644 tests/test-recipes/metadata/_rpath/conda_build_config.yaml diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index d9a0531138..72cf236073 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -58,6 +58,7 @@ jobs: defaults: run: shell: bash -l {0} + working-directory: ./src strategy: fail-fast: false matrix: @@ -73,69 +74,77 @@ jobs: conda-version: canary test-type: parallel env: - pytest-replay: --replay-record-dir=pytest-replay/ --replay-base-name=Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }} + CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }} + REPLAY_NAME: Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }} + REPLAY_DIR: ${{ github.workspace }}/pytest-replay + ALLURE_DIR: ${{ github.workspace }}/allure-results + PYTEST_MARKER: ${{ matrix.test-type == 'serial' && 'serial' || 'not serial' }} + PYTEST_NUMPROCESSES: ${{ matrix.test-type == 'serial' && 0 || 'auto' }} steps: - name: Checkout repository uses: actions/checkout@v3 with: + path: ./src fetch-depth: 0 - - name: Setup Miniconda + - name: Checkout test recipe + uses: actions/checkout@v3 + with: + repository: conda/conda_build_test_recipe + path: ./conda_build_test_recipe + fetch-depth: 0 + + - name: Timestamp + run: echo "TIMESTAMP=$(date -u "+%Y%m")" >> $GITHUB_ENV + shell: bash + + - name: Cache conda + uses: actions/cache@v2 + with: + path: ~/conda_pkgs_dir + key: ${{ runner.os }}-conda-${{ env.TIMESTAMP }} + + - name: Setup miniconda uses: conda-incubator/setup-miniconda@v2 with: - condarc-file: ci/github/.condarc + condarc-file: ./src/ci/github/.condarc python-version: ${{ matrix.python-version }} + run-post: false # skip post cleanup - - name: Install Conda-Canary if canary version - if: matrix.conda-version == 'canary' - run: | - conda install -y -c conda-canary/label/dev conda - - - name: Install default Conda if release version - if: matrix.conda-version == 'release' + - name: Setup environment run: | - conda install -y conda + conda install -q -y -c defaults \ + --file ./tests/requirements.txt \ + --file ./tests/requirements-linux.txt \ + ${{ env.CONDA_CHANNEL_LABEL }}::conda + pip install allure-pytest + pip install -e . - - name: Source Scripts + - name: Show info run: | - source ci/github/install_conda_build_test_deps - pip install -e . - pip install allure-pytest conda info -a + conda list --show-channel-urls - - name: Run Serial tests - if: matrix.test-type == 'serial' - run: | - pytest \ - --color=yes \ - -vv \ - -n 0 \ - --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" \ - --cov conda_build \ - --cov-report xml \ - -m "serial" tests \ - ${{ env.pytest-replay }} \ - --alluredir=allure-results - - - name: Run Parallel tests - if: matrix.test-type == 'parallel' + - name: Run tests run: | pytest \ - --color=yes \ - -vv \ - -n auto \ - --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" \ - --cov conda_build \ - --cov-append \ - --cov-report xml \ - -m "not serial" tests \ - ${{ env.pytest-replay }} \ - --alluredir=allure-results + --color=yes \ + -v \ + -n "${{ env.PYTEST_NUMPROCESSES }}" \ + --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" \ + --cov conda_build \ + --cov-append \ + --cov-report xml \ + --replay-record-dir="${{ env.REPLAY_DIR }}" \ + --replay-base-name="${{ env.REPLAY_NAME }}" \ + --alluredir="${{ env.ALLURE_DIR }}" \ + -m "${{ env.PYTEST_MARKER }}" \ + ./tests - name: Tar Allure Results if: always() - run: tar -zcf allure-results.tar.gz allure-results + run: tar -zcf "${{ env.ALLURE_DIR }}.tar.gz" "${{ env.ALLURE_DIR }}" - name: Upload Allure Results if: always() @@ -145,11 +154,11 @@ jobs: path: allure-results.tar.gz - name: Upload Pytest Replay + if: always() uses: actions/upload-artifact@v3 with: - name: Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} - path: pytest-replay/ - if: always() + name: ${{ env.REPLAY_NAME }}-${{ matrix.test-type }} + path: ${{ env.REPLAY_DIR }} # windows test suite windows: @@ -158,6 +167,9 @@ jobs: if: needs.changes.outputs.code == 'true' runs-on: windows-2019 + defaults: + run: + working-directory: .\src strategy: fail-fast: false matrix: @@ -165,141 +177,88 @@ jobs: conda-version: [release] test-type: [serial, parallel] include: - - python-version: '3.9' + - python-version: '3.10' conda-version: canary test-type: serial - - python-version: '3.9' + - python-version: '3.10' conda-version: canary test-type: parallel - timeout-minutes: 120 env: - serial_or_parallel: '' + CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }} + REPLAY_NAME: Win-${{ matrix.conda-version }}-Py${{ matrix.python-version }} + REPLAY_DIR: ${{ github.workspace }}\pytest-replay + ALLURE_DIR: ${{ github.workspace }}\allure-results + PYTEST_MARKER: ${{ matrix.test-type == 'serial' && 'serial' || 'not serial and not slow' }} + PYTEST_NUMPROCESSES: ${{ matrix.test-type == 'serial' && 0 || 'auto' }} steps: - name: Checkout repository uses: actions/checkout@v3 with: + path: .\src fetch-depth: 0 - - name: Setup Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + - name: Checkout test recipe + uses: actions/checkout@v3 with: - python-version: ${{ matrix.python-version }} + repository: conda/conda_build_test_recipe + path: .\conda_build_test_recipe + fetch-depth: 0 - - name: Install vcpython27.msi - run: | - $wc = New-Object net.webclient - $wc.Downloadfile("https://github.com/GeoNode/geonode-win-installer/raw/ffb76c7cbf1d6b4970c6c25f79c3c7682a3aa035/VCForPython27.msi", "VCForPython27.msi") - Start-Process "VCForPython27.msi" /qn -Wait - shell: pwsh + - name: Timestamp + run: echo "TIMESTAMP=$(date -u "+%Y%m")" >> $GITHUB_ENV + shell: bash - - name: Install miniconda - run: | - rmdir C:\Strawberry /s /q - choco install wget visualstudio2017-workload-vctools - wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Windows-x86_64.exe - start /wait "" Miniconda3-latest-Windows-x86_64.exe /InstallationType=JustMe /S /D=%UserProfile%\Miniconda3 - "%UserProfile%\Miniconda3\condabin\conda.bat" init - conda info -a - shell: cmd + - name: Cache conda + uses: actions/cache@v2 + with: + path: ~/conda_pkgs_dir + key: ${{ runner.os }}-conda-${{ env.TIMESTAMP }} - - name: Patch vs2008 - run: | - Set-PSDebug -Trace 1 - $batchcontent = @" - ECHO ON - SET vcpython=C:\Program Files (x86)\Common Files\Microsoft\Visual C++ for Python\9.0 - DIR "%vcpython%" - CALL "%vcpython%\vcvarsall.bat" %* - "@ - $batchDir = "C:\Program Files (x86)\Common Files\Microsoft\Visual C++ for Python\9.0\VC" - $batchPath = "$batchDir" + "\vcvarsall.bat" - New-Item -Path $batchPath -ItemType "file" -Force - Set-Content -Value $batchcontent -Path $batchPath - Get-ChildItem -Path $batchDir - Get-ChildItem -Path ($batchDir + '\..') - set LIB - shell: pwsh - - - name: conda init - run: | - echo on - set PATH - doskey conda="call %UserProfile%\Miniconda3\condabin\conda.bat" $* - doskey /macros - call "%UserProfile%\Miniconda3\condabin\conda.bat" init - set PATH - shell: cmd + - name: Setup miniconda + uses: conda-incubator/setup-miniconda@v2 + with: + condarc-file: .\src\ci\github\.condarc + python-version: ${{ matrix.python-version }} + run-post: false # skip post cleanup - - name: Configuration + - name: Setup environment run: | - echo on - set PATH - call %UserProfile%\Miniconda3\condabin\activate.bat base||exit 1 - set PATH - call conda install python="%PYTHON_VERSION%" -y||exit 1 - if "%CONDA_VERSION%" == "canary" (call conda update -c conda-canary/label/dev conda||exit 1) else (call conda update -q conda||exit 1) - call conda config --set always_yes yes - call conda config --set auto_update_conda no - call conda info - python -c "import sys; print(sys.version)" - python -c "import sys; print(sys.executable)" - python -c "import sys; print(sys.prefix)" - call conda update -q --all||exit 1 - call conda install -q pip python-libarchive-c pytest git pytest-cov jinja2 m2-patch flake8 mock requests contextlib2 chardet glob2 perl pyflakes pycrypto posix m2-git anaconda-client numpy beautifulsoup4 pytest-xdist pytest-mock filelock pkginfo psutil pytz tqdm conda-package-handling||exit 1 - call conda install pytest-replay pytest-rerunfailures -y||exit 1 - echo safety_checks: disabled >> %UserProfile%\.condarc - echo local_repodata_ttl: 1800 >> %UserProfile%\.condarc - call conda install -q py-lief||exit 1 - python --version - python -c "import struct; print(struct.calcsize('P') * 8)" - pip install --no-deps . + choco install visualstudio2017-workload-vctools + conda install -q -y -c defaults ` + --file .\tests\requirements.txt ` + --file .\tests\requirements-windows.txt ` + ${{ env.CONDA_CHANNEL_LABEL }}::conda pip install allure-pytest - conda-build --version - pushd .. && git clone https://github.com/conda/conda_build_test_recipe && popd - call conda create -n blarg -yq --download-only python=2.7||exit 1 - call conda create -n blarg -yq --download-only python=3.8||exit 1 - call conda create -n blarg -yq --download-only python cmake||exit 1 - set "PYTEST_REPLAY_OPTIONS=--replay-record-dir=${{ github.workspace }}\\pytest-replay --replay-base-name=Win-%CONDA_VERSION%-Py%PYTHON_VERSION%" - echo "##vso[task.setvariable variable=PYTEST_REPLAY_OPTIONS]%PYTEST_REPLAY_OPTIONS%" - - shell: cmd + pip install -e . - - name: Run Serial Tests - if: matrix.test-type == 'serial' + - name: Show info run: | - echo on - set PATH - call %UserProfile%\Miniconda3\condabin\activate.bat base||exit 1 - set PATH - call conda install -y conda-verify||exit 1 - set PERL= - set LUA= - set R= - pytest --color=yes -vv -n 0 --basetemp "${{ runner.temp }}\${{ matrix.test-type }}" --cov conda_build --cov-report xml -m "serial" ${{ env.pytest-replay }} --alluredir=allure-results - shell: cmd + conda info -a + conda list --show-channel-urls - - name: Run Parallel Tests - if: matrix.test-type == 'parallel' - # coverage disabled to decrease runtime + - name: Run tests run: | - echo on - set PATH - call %UserProfile%\Miniconda3\condabin\activate.bat base||exit 1 - set PATH - :: call conda remove -y conda-verify||exit 1 - set PERL= - set LUA= - set R= - pytest --color=yes -vv -n auto --basetemp "${{ runner.temp }}\${{ matrix.test-type }}" -m "not serial and not slow" ${{ env.pytest-replay }} --alluredir=allure-results - shell: cmd - env: - VS90COMNTOOLS: C:\Program Files (x86)\Common Files\Microsoft\Visual C++ for Python\9.0\VC\bin - LIB: + pytest ` + --color=yes ` + -v ` + -n "${{ env.PYTEST_NUMPROCESSES }}" ` + --basetemp "${{ runner.temp }}\${{ matrix.test-type}}" ` + --cov conda_build ` + --cov-append ` + --cov-report xml ` + --replay-record-dir="${{ env.REPLAY_DIR }}" ` + --replay-base-name="${{ env.REPLAY_NAME }}" ` + --alluredir="${{ env.ALLURE_DIR }}" ` + -m "${{ env.PYTEST_MARKER }}" ` + .\tests - name: Tar Allure Results if: always() - run: tar -zcf allure-results.tar.gz allure-results + run: tar -zcf "${{ env.ALLURE_DIR }}.tar.gz" "${{ env.ALLURE_DIR }}" + # windows-2019/powershell ships with GNU tar 1.28 which struggles with Windows paths + # window-2019/cmd ships with bsdtar 3.5.2 which doesn't have this problem + shell: cmd - name: Upload Allure Results if: always() @@ -309,11 +268,11 @@ jobs: path: allure-results.tar.gz - name: Upload Pytest Replay + if: always() uses: actions/upload-artifact@v3 with: - path: ${{ github.workspace }}/pytest-replay - name: Win-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} - if: always() + path: ${{ env.REPLAY_DIR }} + name: ${{ env.REPLAY_NAME }}-${{ matrix.test-type }} # macos test suite macos: @@ -325,6 +284,7 @@ jobs: defaults: run: shell: bash -l {0} + working-directory: ./src strategy: fail-fast: false matrix: @@ -332,78 +292,79 @@ jobs: python-version: ['3.7', '3.10'] conda-version: [canary] test-type: [serial, parallel] - max-parallel: 10 - timeout-minutes: 120 env: - pytest-replay: --replay-record-dir=pytest-replay/ --replay-base-name=macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }} + CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }} + REPLAY_NAME: macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }} + REPLAY_DIR: ${{ github.workspace }}/pytest-replay + ALLURE_DIR: ${{ github.workspace }}/allure-results + PYTEST_MARKER: ${{ matrix.test-type == 'serial' && 'serial' || 'not serial' }} + PYTEST_NUMPROCESSES: ${{ matrix.test-type == 'serial' && 0 || 'auto' }} steps: - name: Checkout repository uses: actions/checkout@v3 with: + path: ./src fetch-depth: 0 - - name: Install miniconda - run: | - set -x -e -u - curl -o ${{ github.workspace }}/miniconda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh - chmod +x ${{ github.workspace }}/miniconda.sh - ${{ github.workspace }}/miniconda.sh -b -p ${{ github.workspace }}/miniconda - source ci/github/activate_conda "${{ github.workspace }}/miniconda/bin/python" + - name: Checkout test recipe + uses: actions/checkout@v3 + with: + repository: conda/conda_build_test_recipe + path: ./conda_build_test_recipe + fetch-depth: 0 - - name: Setup CF MacOS Bits and Select Xcode - run: | - set -x -e -u - source ci/github/activate_conda "${{ github.workspace }}/miniconda/bin/python" - source ci/github/run_conda_forge_build_setup_osx + - name: Timestamp + run: echo "TIMESTAMP=$(date -u "+%Y%m")" >> $GITHUB_ENV + shell: bash + + - name: Cache conda + uses: actions/cache@v2 + with: + path: ~/conda_pkgs_dir + key: ${{ runner.os }}-conda-${{ env.TIMESTAMP }} + + - name: Setup miniconda + uses: conda-incubator/setup-miniconda@v2 + with: + condarc-file: ./src/ci/github/.condarc + python-version: ${{ matrix.python-version }} + run-post: false # skip post cleanup - - name: Prepare Test Environment + - name: Setup environment run: | - set -e -u - source ci/github/activate_conda "${{ github.workspace }}/miniconda/bin/python" - conda info - conda list - grep '^#' "${CONDA_PREFIX}/conda-meta/history" - conda install python=${{ matrix.python-version }} -y - mkdir -p ${{ github.workspace }}/miniconda/locks - mkdir -p ${{ github.workspace }}/miniconda/bin - chmod -w ${{ github.workspace }}/miniconda/locks - conda install -y -c conda-canary/label/dev conda - conda config --set always_yes yes - conda config --set auto_update_conda False - conda info - source ci/github/install_conda_build_test_deps - pip install -e . - echo "Installing Support Libraries" + sudo xcode-select --switch /Applications/Xcode_11.7.app + conda install -q -y -c defaults \ + --file ./tests/requirements.txt \ + --file ./tests/requirements-macos.txt \ + ${{ env.CONDA_CHANNEL_LABEL }}::conda pip install allure-pytest - conda info -a - conda list --show-channel-urls + pip install -e . - - name: Run Serial Tests - if: matrix.test-type == 'serial' + - name: Show info run: | - set -e -u - source ci/github/activate_conda "${{ github.workspace }}/miniconda/bin/python" - conda install conda-verify -y - pytest --color=yes -v -n 0 --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" --cov conda_build --cov-report xml -m "serial" tests "${PYTEST_REPLAY_OPTIONS[@]+"${PYTEST_REPLAY_OPTIONS[@]}"}" --alluredir=allure-results + conda info -a + conda list --show-channel-urls - - name: Run Parallel Tests - if: matrix.test-type == 'parallel' + - name: Run tests run: | - set -e -u - source ci/github/activate_conda "${{ github.workspace }}/miniconda/bin/python" - conda remove conda-verify -y - echo "safety_checks: disabled" >> ~/.condarc - echo "local_repodata_ttl: 1800" >> ~/.condarc - mkdir -p ~/.conda - conda create -n blarg1 -yq python=2.7 - conda create -n blarg3 -yq python=3.7 - conda create -n blarg4 -yq python nomkl numpy pandas svn - pytest --color=yes -v -n auto --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" --cov conda_build --cov-append --cov-report xml -m "not serial" tests "${PYTEST_REPLAY_OPTIONS[@]+"${PYTEST_REPLAY_OPTIONS[@]}"}" --alluredir=allure-results + pytest \ + --color=yes \ + -v \ + -n "${{ env.PYTEST_NUMPROCESSES }}" \ + --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" \ + --cov conda_build \ + --cov-append \ + --cov-report xml \ + --replay-record-dir="${{ env.REPLAY_DIR }}" \ + --replay-base-name="${{ env.REPLAY_NAME }}" \ + --alluredir="${{ env.ALLURE_DIR }}" \ + -m "${{ env.PYTEST_MARKER }}" \ + ./tests - name: Tar Allure Results if: always() - run: tar -zcf allure-results.tar.gz allure-results + run: tar -zcf "${{ env.ALLURE_DIR }}.tar.gz" "${{ env.ALLURE_DIR }}" - name: Upload Allure Results if: always() @@ -413,11 +374,11 @@ jobs: path: allure-results.tar.gz - name: Upload Pytest Replay + if: always() uses: actions/upload-artifact@v3 with: - path: ${{ github.workspace }}/pytest-replay - name: macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} - if: always() + name: ${{ env.REPLAY_NAME }}-${{ matrix.test-type }} + path: ${{ env.REPLAY_DIR }} # aggregate and upload aggregate: diff --git a/ci/github/.condarc b/ci/github/.condarc index 01be5676eb..44a36fcc35 100644 --- a/ci/github/.condarc +++ b/ci/github/.condarc @@ -5,3 +5,4 @@ changeps1: False pkgs_dirs: - /usr/share/miniconda/envs/test/pkgs always_yes: True +local_repodata_ttl: 7200 diff --git a/ci/github/activate_conda b/ci/github/activate_conda deleted file mode 100644 index 381e7afc0b..0000000000 --- a/ci/github/activate_conda +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -unset CONDA_SHLVL -eval "$(${1} -m conda shell.bash hook)" -conda activate base diff --git a/ci/github/install_conda_build_test_deps b/ci/github/install_conda_build_test_deps deleted file mode 100755 index a7d03305b2..0000000000 --- a/ci/github/install_conda_build_test_deps +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env bash - -# Note, this can be used on Windows but is not used by our CI. -function install_conda_build_test_deps_fn() -{ - local MACOS_ARM64=no - local DEF_CHAN=defaults - if [[ $(uname) == Darwin ]] && [[ $(uname -m) == arm64 ]]; then - MACOS_ARM64=yes - DEF_CHAN=conda-forge - fi - # avoids a python 3.7 problem - local -a _PKGS=(cytoolz conda-verify) - _PKGS+=(${DEF_CHAN}::pytest ${DEF_CHAN}::pytest-cov ${DEF_CHAN}::pytest-forked ${DEF_CHAN}::pytest-xdist) - _PKGS+=(${DEF_CHAN}::py-lief ${DEF_CHAN}::pytest-mock) - _PKGS+=(${DEF_CHAN}::pytest-replay ${DEF_CHAN}::pytest-rerunfailures) - _PKGS+=(${DEF_CHAN}::anaconda-client ${DEF_CHAN}::git ${DEF_CHAN}::requests ${DEF_CHAN}::filelock ${DEF_CHAN}::contextlib2 ${DEF_CHAN}::jinja2 ${DEF_CHAN}::pytest-rerunfailures) - _PKGS+=(${DEF_CHAN}::ripgrep ${DEF_CHAN}::pyflakes ${DEF_CHAN}::beautifulsoup4 ${DEF_CHAN}::chardet ${DEF_CHAN}::pycrypto ${DEF_CHAN}::glob2 ${DEF_CHAN}::psutil ${DEF_CHAN}::pytz ${DEF_CHAN}::tqdm) - _PKGS+=(${DEF_CHAN}::conda-package-handling ${DEF_CHAN}::perl ${DEF_CHAN}::python-libarchive-c) - _PKGS+=(${DEF_CHAN}::pip ${DEF_CHAN}::numpy ${DEF_CHAN}::pkginfo) - if [[ $(uname) =~ .*inux.* ]] && [[ ! ${MACOS_ARM64} == yes ]] ; then - _PKGS+=(${DEF_CHAN}::patchelf) - fi - if [[ $(uname) =~ M.* ]]; then - _PKGS+=(${DEF_CHAN}::m2-patch) - _PKGS+=(${DEF_CHAN}::m2-gcc-libs) - else - _PKGS+=(${DEF_CHAN}::patch) - fi - echo -e "Asking conda to install:\n${_PKGS[@]}" - conda install -y --show-channel-urls "${_PKGS[@]}" "$@" - # If we install shellcheck from conda-forge and packages from defaults at the same time (via channel::package) - # then conda-forge used for other packages too. We could force it by forcing transitive deps to also be listed - # with their channel, but, well, yuck. - if [[ ${MACOS_ARM64} == yes ]]; then - echo "Not installing shellcheck as it is unavailable on macOS arm64 at present" - else - if [[ $(uname) =~ .*inux.* ]]; then - conda install -y --show-channel-urls shellcheck - else - conda install -y --show-channel-urls conda-forge::shellcheck - fi - fi - if [[ ! -d ../conda_build_test_recipe ]]; then - pushd .. - git clone "https://github.com/conda/conda_build_test_recipe" - popd - fi -} - -install_conda_build_test_deps_fn "$@" diff --git a/ci/github/run_conda_forge_build_setup_osx b/ci/github/run_conda_forge_build_setup_osx deleted file mode 100644 index a17357c6d6..0000000000 --- a/ci/github/run_conda_forge_build_setup_osx +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash - -export PYTHONUNBUFFERED=1 - -# deployment target should be set by conda_build_config.yaml (default in conda-forge-pinning). -# The default here will only be used when that is undefined, -# which should only be recipes still using conda-build 2. -export MACOSX_DEPLOYMENT_TARGET=${MACOSX_DEPLOYMENT_TARGET:-10.15} -export CPU_COUNT=$(sysctl -n hw.ncpu) -export INSTALL_XCODE=${INSTALL_XCODE:-0} -echo "Intial \$(xcode-select -p) is $(xcode-select -p)" -echo "PATH is $PATH" -echo "INSTALL_XCODE is $INSTALL_XCODE" -if [[ ${INSTALL_XCODE} == 1 ]]; then - sudo xcode-select --switch /Applications/Xcode_12.5.app/Contents/Developer - echo "After selecting Xcode_12.5.app, \$(xcode-select -p) is $(xcode-select -p)" -fi -echo "PATH is $PATH" -export CONDA_BUILD_SYSROOT="$(xcode-select -p)/Platforms/MacOSX.platform/Developer/SDKs/MacOSX${MACOSX_DEPLOYMENT_TARGET}.sdk" - -if [[ ! -d ${CONDA_BUILD_SYSROOT} || "$OSX_FORCE_SDK_DOWNLOAD" == "1" ]]; then - echo "Downloading ${MACOSX_DEPLOYMENT_TARGET} sdk" - curl -L -O https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX${MACOSX_DEPLOYMENT_TARGET}.sdk.tar.xz - tar -xf MacOSX${MACOSX_DEPLOYMENT_TARGET}.sdk.tar.xz -C "$(dirname "$CONDA_BUILD_SYSROOT")" - # set minimum sdk version to our target - if [[ ${INSTALL_XCODE} == 1 ]]; then - plutil -replace MinimumSDKVersion -string ${MACOSX_DEPLOYMENT_TARGET} $(xcode-select -p)/Platforms/MacOSX.platform/Info.plist - plutil -replace DTSDKName -string macosx${MACOSX_DEPLOYMENT_TARGET}internal $(xcode-select -p)/Platforms/MacOSX.platform/Info.plist - fi -fi - -if [ -d "${CONDA_BUILD_SYSROOT}" ] -then - echo "Found CONDA_BUILD_SYSROOT: ${CONDA_BUILD_SYSROOT}" -else - echo "Missing CONDA_BUILD_SYSROOT: ${CONDA_BUILD_SYSROOT}" - exit 1 -fi - -conda config --set show_channel_urls true -conda config --set auto_update_conda false -conda config --set add_pip_as_python_dependency false - -# CONDA_PREFIX might be unset -export CONDA_PREFIX="${CONDA_PREFIX:-$(conda info --json | jq -r .root_prefix)}" - -mkdir -p "${CONDA_PREFIX}/etc/conda/activate.d" -echo "export CONDA_BUILD_SYSROOT='${CONDA_BUILD_SYSROOT}'" > "${CONDA_PREFIX}/etc/conda/activate.d/conda-forge-ci-setup-activate.sh" -echo "export CPU_COUNT='${CPU_COUNT}'" >> "${CONDA_PREFIX}/etc/conda/activate.d/conda-forge-ci-setup-activate.sh" -echo "export PYTHONUNBUFFERED='${PYTHONUNBUFFERED}'" >> "${CONDA_PREFIX}/etc/conda/activate.d/conda-forge-ci-setup-activate.sh" -echo "export MACOSX_DEPLOYMENT_TARGET='${MACOSX_DEPLOYMENT_TARGET}'" >> "${CONDA_PREFIX}/etc/conda/activate.d/conda-forge-ci-setup-activate.sh" - -conda info -conda config --show-sources -conda list --show-channel-urls diff --git a/ci/github/setup_pytest_replay b/ci/github/setup_pytest_replay deleted file mode 100644 index f90955bd58..0000000000 --- a/ci/github/setup_pytest_replay +++ /dev/null @@ -1,5 +0,0 @@ -mkdir $BUILD_ARTIFACTSTAGINGDIRECTORY/pytest-replay -declare -a PYTEST_REPLAY_OPTIONS=() -PYTEST_REPLAY_OPTIONS+=("--replay-record-dir=$BUILD_ARTIFACTSTAGINGDIRECTORY/pytest-replay") -PYTEST_REPLAY_OPTIONS+=("--replay-base-name=Linux-$CONDA_VERSION-Py$PYTHON_VERSION") -echo "##vso[task.setvariable variable=PYTEST_REPLAY_OPTIONS]${PYTEST_REPLAY_OPTIONS[@]}" diff --git a/tests/conftest.py b/tests/conftest.py index 981a439e3a..415830efda 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,11 +1,13 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from collections import defaultdict import os +import subprocess import sys -from collections import defaultdict import pytest +from conda.common.compat import on_mac import conda_build.config from conda_build.config import ( Config, @@ -205,3 +207,32 @@ def single_version(): @pytest.fixture(scope="function") def no_numpy_version(): return {"python": ["2.7.*", "3.5.*"]} + + +@pytest.fixture( + scope="function", + params=[{}, {"MACOSX_DEPLOYMENT_TARGET": ["10.9"]}] if on_mac else [{}], +) +def variants_conda_build_sysroot(monkeypatch, request): + if not on_mac: + return {} + + monkeypatch.setenv( + "CONDA_BUILD_SYSROOT", + subprocess.run( + ["xcrun", "--sdk", "macosx", "--show-sdk-path"], + check=True, + capture_output=True, + text=True, + ).stdout.strip(), + ) + monkeypatch.setenv( + "MACOSX_DEPLOYMENT_TARGET", + subprocess.run( + ["xcrun", "--sdk", "macosx", "--show-sdk-version"], + check=True, + capture_output=True, + text=True, + ).stdout.strip(), + ) + return request.param diff --git a/tests/requirements-linux.txt b/tests/requirements-linux.txt new file mode 100644 index 0000000000..cb3ae7e805 --- /dev/null +++ b/tests/requirements-linux.txt @@ -0,0 +1,5 @@ +# conda-build supplemental test dependencies +# run as 'conda install -c defaults --file tests/requirements-linux.txt' +patch +patchelf +shellcheck diff --git a/tests/requirements-macos.txt b/tests/requirements-macos.txt new file mode 100644 index 0000000000..dbe8e3dd81 --- /dev/null +++ b/tests/requirements-macos.txt @@ -0,0 +1,4 @@ +# conda-build supplemental test dependencies +# run as 'conda install -c defaults --file tests/requirements-macos.txt' +patch +shellcheck diff --git a/tests/requirements-windows.txt b/tests/requirements-windows.txt new file mode 100644 index 0000000000..8d40d2482b --- /dev/null +++ b/tests/requirements-windows.txt @@ -0,0 +1,4 @@ +# conda-build supplemental test dependencies +# run as 'conda install -c defaults --file tests/requirements-windows.txt' +m2-git +m2-patch diff --git a/tests/requirements.txt b/tests/requirements.txt index 98e4dbf048..45c537eec1 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,18 +1,35 @@ -# conda-build -# run as 'conda install --file tests/requirements.txt -c defaults' +# conda-build test dependencies +# run as 'conda install -c defaults --file tests/requirements.txt' anaconda-client -bs4 +beautifulsoup4 chardet conda conda-package-handling +conda-verify +contextlib2 +cytoolz filelock +git glob2 jinja2 +numpy +perl +pip pkginfo psutil +py-lief +pycrypto +pyflakes pytest pytest-cov +pytest-forked pytest-mock +pytest-replay pytest-rerunfailures +pytest-xdist python-libarchive-c +pytz +requests +ripgrep toml +tqdm diff --git a/tests/test-recipes/metadata/_c_vendoring_detection/conda_build_config.yaml b/tests/test-recipes/metadata/_c_vendoring_detection/conda_build_config.yaml index 97524573aa..80037b0ede 100644 --- a/tests/test-recipes/metadata/_c_vendoring_detection/conda_build_config.yaml +++ b/tests/test-recipes/metadata/_c_vendoring_detection/conda_build_config.yaml @@ -33,8 +33,6 @@ rust_compiler: - rust rust_compiler_version: - 1.29.0 -CONDA_BUILD_SYSROOT: - - /opt/MacOSX10.10.sdk # [osx] VERBOSE_AT: - V=1 VERBOSE_CM: @@ -155,8 +153,6 @@ macos_min_version: - 10.9 macos_machine: - x86_64-apple-darwin13.4.0 -MACOSX_DEPLOYMENT_TARGET: - - 10.9 mkl: - 2019 mpfr: diff --git a/tests/test-recipes/metadata/_macos_tbd_handling/conda_build_config.yaml b/tests/test-recipes/metadata/_macos_tbd_handling/conda_build_config.yaml deleted file mode 100644 index 12632709ff..0000000000 --- a/tests/test-recipes/metadata/_macos_tbd_handling/conda_build_config.yaml +++ /dev/null @@ -1,10 +0,0 @@ -CONDA_BUILD_SYSROOT: - - /opt/MacOSX10.10.sdk - - /opt/MacOSX10.11.sdk -CONDA_BUILD_SYSROOT_NAME: - - sdk_10_10 - - sdk_10_11 -zip_keys: - - - - CONDA_BUILD_SYSROOT - - CONDA_BUILD_SYSROOT_NAME diff --git a/tests/test-recipes/metadata/_macos_tbd_handling/meta.yaml b/tests/test-recipes/metadata/_macos_tbd_handling/meta.yaml index 4d79df4009..709e24afd3 100644 --- a/tests/test-recipes/metadata/_macos_tbd_handling/meta.yaml +++ b/tests/test-recipes/metadata/_macos_tbd_handling/meta.yaml @@ -9,11 +9,6 @@ source: build: number: 0 - # .. this does not work .. - # string: {{ CONDA_BUILD_SYSROOT | replace('/', '_') | replace('.', '_') | lower() }} - # .. conda-build needs this as a hint, it does not detect the above CONDA_BUILD_SYSROOT | replace .. - # as jinja2-variant-fodder (I think). - string: {{ CONDA_BUILD_SYSROOT_NAME }} error_overlinking: True error_ocerdepending: True diff --git a/tests/test-recipes/metadata/_overdepending_detection/conda_build_config.yaml b/tests/test-recipes/metadata/_overdepending_detection/conda_build_config.yaml deleted file mode 100644 index 019f4210bf..0000000000 --- a/tests/test-recipes/metadata/_overdepending_detection/conda_build_config.yaml +++ /dev/null @@ -1,2 +0,0 @@ -CONDA_BUILD_SYSROOT: - - /opt/MacOSX10.10.sdk # [osx] diff --git a/tests/test-recipes/metadata/_overlinking_detection/conda_build_config.yaml b/tests/test-recipes/metadata/_overlinking_detection/conda_build_config.yaml deleted file mode 100644 index 019f4210bf..0000000000 --- a/tests/test-recipes/metadata/_overlinking_detection/conda_build_config.yaml +++ /dev/null @@ -1,2 +0,0 @@ -CONDA_BUILD_SYSROOT: - - /opt/MacOSX10.10.sdk # [osx] diff --git a/tests/test-recipes/metadata/_overlinking_detection_ignore_patterns/conda_build_config.yaml b/tests/test-recipes/metadata/_overlinking_detection_ignore_patterns/conda_build_config.yaml deleted file mode 100644 index 019f4210bf..0000000000 --- a/tests/test-recipes/metadata/_overlinking_detection_ignore_patterns/conda_build_config.yaml +++ /dev/null @@ -1,2 +0,0 @@ -CONDA_BUILD_SYSROOT: - - /opt/MacOSX10.10.sdk # [osx] diff --git a/tests/test-recipes/metadata/_rpath/conda_build_config.yaml b/tests/test-recipes/metadata/_rpath/conda_build_config.yaml deleted file mode 100644 index 019f4210bf..0000000000 --- a/tests/test-recipes/metadata/_rpath/conda_build_config.yaml +++ /dev/null @@ -1,2 +0,0 @@ -CONDA_BUILD_SYSROOT: - - /opt/MacOSX10.10.sdk # [osx] diff --git a/tests/test-recipes/metadata/_run_exports_in_outputs/conda_build_config.yaml b/tests/test-recipes/metadata/_run_exports_in_outputs/conda_build_config.yaml index 130ecdf296..19eaa35bd3 100644 --- a/tests/test-recipes/metadata/_run_exports_in_outputs/conda_build_config.yaml +++ b/tests/test-recipes/metadata/_run_exports_in_outputs/conda_build_config.yaml @@ -34,8 +34,6 @@ rust_compiler: - rust rust_compiler_version: - 1.32.0 -CONDA_BUILD_SYSROOT: - - /opt/MacOSX10.10.sdk # [osx] VERBOSE_AT: - V=1 VERBOSE_CM: @@ -160,8 +158,6 @@ macos_min_version: - 10.9 macos_machine: - x86_64-apple-darwin13.4.0 -MACOSX_DEPLOYMENT_TARGET: - - 10.9 mkl: - 2019 mpfr: diff --git a/tests/test_api_build.py b/tests/test_api_build.py index dbb9e05012..413c705cda 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -689,9 +689,13 @@ def test_disable_pip(testing_config, testing_metadata): @pytest.mark.sanity @pytest.mark.skipif(sys.platform.startswith('win'), reason="rpath fixup not done on Windows.") -def test_rpath_unix(testing_config): +def test_rpath_unix(testing_config, variants_conda_build_sysroot): testing_config.activate = True - api.build(os.path.join(metadata_dir, "_rpath"), config=testing_config) + api.build( + os.path.join(metadata_dir, "_rpath"), + config=testing_config, + variants=variants_conda_build_sysroot, + ) def test_noarch_none_value(testing_workdir, testing_config): @@ -1393,7 +1397,7 @@ def test_provides_features_metadata(testing_config): assert index['provides_features'] == {'test2': 'also_ok'} -def test_overlinking_detection(testing_config): +def test_overlinking_detection(testing_config, variants_conda_build_sysroot): testing_config.activate = True testing_config.error_overlinking = True testing_config.verify = False @@ -1402,16 +1406,18 @@ def test_overlinking_detection(testing_config): dest_bat = os.path.join(recipe, 'bld.bat') copy_into(os.path.join(recipe, 'build_scripts', 'default.sh'), dest_sh, clobber=True) copy_into(os.path.join(recipe, 'build_scripts', 'default.bat'), dest_bat, clobber=True) - api.build(recipe, config=testing_config) + api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) copy_into(os.path.join(recipe, 'build_scripts', 'no_as_needed.sh'), dest_sh, clobber=True) copy_into(os.path.join(recipe, 'build_scripts', 'with_bzip2.bat'), dest_bat, clobber=True) with pytest.raises(OverLinkingError): - api.build(recipe, config=testing_config) + api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) rm_rf(dest_sh) rm_rf(dest_bat) -def test_overlinking_detection_ignore_patterns(testing_config): +def test_overlinking_detection_ignore_patterns( + testing_config, variants_conda_build_sysroot +): testing_config.activate = True testing_config.error_overlinking = True testing_config.verify = False @@ -1420,38 +1426,37 @@ def test_overlinking_detection_ignore_patterns(testing_config): dest_bat = os.path.join(recipe, 'bld.bat') copy_into(os.path.join(recipe, 'build_scripts', 'default.sh'), dest_sh, clobber=True) copy_into(os.path.join(recipe, 'build_scripts', 'default.bat'), dest_bat, clobber=True) - api.build(recipe, config=testing_config) + api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) copy_into(os.path.join(recipe, 'build_scripts', 'no_as_needed.sh'), dest_sh, clobber=True) copy_into(os.path.join(recipe, 'build_scripts', 'with_bzip2.bat'), dest_bat, clobber=True) - api.build(recipe, config=testing_config) + api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) rm_rf(dest_sh) rm_rf(dest_bat) -def test_overdepending_detection(testing_config): +def test_overdepending_detection(testing_config, variants_conda_build_sysroot): testing_config.activate = True testing_config.error_overlinking = True testing_config.error_overdepending = True testing_config.verify = False recipe = os.path.join(metadata_dir, '_overdepending_detection') with pytest.raises(OverDependingError): - api.build(recipe, config=testing_config) + api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) @pytest.mark.skipif(sys.platform != "darwin", reason="macOS-only test (at present)") -def test_macos_tbd_handling(testing_config): +def test_macos_tbd_handling(testing_config, variants_conda_build_sysroot): """ Test path handling after installation... The test case uses a Hello World example in C/C++ for testing the installation of C libraries... """ - testing_config.activate = True testing_config.error_overlinking = True testing_config.error_overdepending = True testing_config.verify = False recipe = os.path.join(metadata_dir, '_macos_tbd_handling') - api.build(recipe, config=testing_config) + api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) @pytest.mark.sanity From 27aabfc931e907a7019b85b6320e2308ce8b7c5f Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Tue, 10 Jan 2023 04:13:05 -0500 Subject: [PATCH 020/366] Add misc. fixes to conda-build documentation (#4702) * Add miscellaneous fixes to conda-build documentation * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- docs/source/concepts/channels.rst | 2 +- docs/source/concepts/generating-index.rst | 81 ++++++++++--------- docs/source/concepts/index.rst | 4 +- docs/source/concepts/package-naming-conv.rst | 16 ++-- docs/source/concepts/recipe.rst | 68 ++++++++-------- docs/source/index.rst | 6 +- docs/source/user-guide/index.rst | 2 +- .../tutorials/building-conda-packages.rst | 4 +- 8 files changed, 96 insertions(+), 87 deletions(-) diff --git a/docs/source/concepts/channels.rst b/docs/source/concepts/channels.rst index b43680b2b3..ce17d67991 100644 --- a/docs/source/concepts/channels.rst +++ b/docs/source/concepts/channels.rst @@ -53,7 +53,7 @@ This happens because ``conda-build`` will consider the directory ``./example/`` There are several ways to resolve this issue. -#. Use the url of the desired channel: +#. Use the URL of the desired channel: .. code-block:: bash diff --git a/docs/source/concepts/generating-index.rst b/docs/source/concepts/generating-index.rst index 7b6a7db51a..1455325088 100644 --- a/docs/source/concepts/generating-index.rst +++ b/docs/source/concepts/generating-index.rst @@ -29,7 +29,7 @@ Channel layout Parts of a channel ------------------ -* Channeldata.json contains metadata about the channel, including: +* ``channeldata.json`` contains metadata about the channel, including: - What subdirs the channel contains. - What packages exist in the channel and what subdirs they are in. @@ -37,13 +37,13 @@ Parts of a channel * Subdirs are associated with platforms. For example, the linux-64 subdir contains packages for linux-64 systems. -* Repodata.json contains an index of the packages in a subdir. Each subdir will - have it's own repodata. +* ``repodata.json`` contains an index of the packages in a subdir. Each subdir will + have its own repodata. * Channels have packages as tarballs under corresponding subdirs. -channeldata.json ----------------- +``channeldata.json`` +-------------------- .. code-block:: bash @@ -74,8 +74,8 @@ channeldata.json ] } -repodata.json -------------- +``repodata.json`` +----------------- .. code-block:: bash @@ -113,18 +113,18 @@ For each subdir: * For all packages that need to be added/updated: * Extract the package to access metadata, including full package name, - mtime, size, and index.json. + mtime &&&&&& ???, size, and index.json. * Aggregate package metadata to repodata collection. * Apply repodata hotfixes (patches). -* Compute and save the reduced `current_index.json` index. +* Compute and save the reduced ``current_index.json`` index. Example: Building a channel --------------------------- -To build a local channel and put a package in it, follow the directions below. +To build a local channel and put a package in it, follow the directions below: #. Make the channel directory. @@ -133,7 +133,7 @@ To build a local channel and put a package in it, follow the directions below. $ mkdir local-channel $ cd local-channel -#. Now, download your favorite package. We'll use SciPy in our example. The next steps depend on your platform. +#. Now, download your favorite package. We'll use SciPy in our example. The next steps depend on your platform: #. Windows @@ -144,23 +144,21 @@ To build a local channel and put a package in it, follow the directions below. #. Linux - #. Confirm that you have cURL; if not then install it. + #. Most Linux systems come with ``curl`` pre-installed. Let's install it if you don't already have it. - Most Linux systems come with cURL pre-installed. Let's install it if you don't already have it. - - #. Check if you have cURL + #. Check if you have ``curl``: .. code-block:: bash $ which curl - #. if ``curl`` is not found, then install it: + #. If ``curl`` is not found, then install it: .. code-block:: bash $ conda install curl - #. Create a local copy of this package you want to include in your channel. + #. Create a local copy of the package you want to include in your channel: .. code-block:: bash @@ -183,16 +181,16 @@ To build a local channel and put a package in it, follow the directions below. #. Other - To find the latest SciPy on other platform, go to the `Anaconda Packages file list for SciPy `_. + To find the latest SciPy on a platform not included in the list above, go to the `Anaconda Packages file list for SciPy`_. -#. Run a conda index. This will generate both channeldata.json for the channel and - repodata.json for the linux-64 and osx-64 subdirs, along with some other files. +#. Run a conda index. This will generate both ``channeldata.json`` for the channel and + ``repodata.json`` for the linux-64 and osx-64 subdirs, along with some other files: .. code-block:: bash $ conda index . -#. Check your work by searching the channel. +#. Check your work by searching the channel: .. code-block:: bash @@ -207,29 +205,29 @@ More details behind the scenes Caching package metadata ~~~~~~~~~~~~~~~~~~~~~~~~ -Caching utilizes the existing repodata.json file if it exists. Indexing checks +Caching utilizes the existing ``repodata.json`` file if it exists. Indexing checks which files to update based on which files are new, removed, or changed since -the last repodata.json was created. When a package is new or changed, its +the last ``repodata.json`` was created. When a package is new or changed, its metadata is extracted and cached in the subdir to which the package belongs. The -subfolder is the `.cache` folder. This folder has one file of interest: -`stat.json`, which contains results from the `stat` command for each file. This +subfolder is the ``.cache`` folder. This folder has one file of interest: +``stat.json``, which contains results from the ``stat`` command for each file. This is used for understanding when a file has changed and needs to be updated. In each of the other subfolders, the extracted metadata file for each package is -saved as the original package name, plus a `.json` extension. Having these +saved as the original package name, plus a ``.json`` extension. Having these already extracted can save a lot of time in fully re-creating the index, should that be necessary. -An aside: one design goal of the `.conda` package format was to make indexing as +An aside: one design goal of the ``.conda`` package format was to make indexing as fast as possible. To achieve this, the .conda format separates metadata from the -actual package contents. Where the old `.tar.bz2` container required extracting +actual package contents. Where the old ``.tar.bz2`` container required extracting the entire package to obtain the metadata, the new package format allows extraction of metadata without touching the package contents. This allows -indexing speed to be independent of the package size. Large `.tar.bz2` packages +indexing speed to be independent of the package size. Large ``.tar.bz2`` packages can take a very long time to extract and index. It is generally never necessary to manually alter the cache. To force an update/rescan of all cached packages, you can delete the .cache folder, or you -can delete just the `.cache/stat.json` file. Ideally, you could remove only one +can delete just the ``.cache/stat.json`` file. Ideally, you could remove only one package of interest from the cache, but that functionality does not currently exist. @@ -252,7 +250,7 @@ generated on the fly every time the index is generated. That means that any new packages that have been added since the patch python file was last committed will be picked up and will have hotfixes applied to them where appropriate. -Anaconda applies hotfixes by providing a python file to `conda index` that has +Anaconda applies hotfixes by providing a python file to ``conda index`` that has logic on how to alter metadata. Anaconda's repository of hotfixes is at https://github.com/AnacondaRecipes/repodata-hotfixes @@ -262,16 +260,16 @@ Repodata patches applied from a JSON file Unfortunately, you can't always run your python code directly - other people who host your patches may not allow you to run code. What you can do instead is package the patches as .json files. These will clobber the entries in the -repodata.json when they are applied. +``repodata.json`` when they are applied. This is the approach that conda-forge has to take, for example. Their patch creation code is here: https://github.com/conda-forge/conda-forge-repodata-patches-feedstock/tree/main/recipe -What that code does is to download the current repodata.json, then runs their +What that code does is to download the current ``repodata.json``, then runs their python logic to generate the patch JSON file. Those patches are placed into a location where Anaconda's mirroring tools will find them and apply them to -conda-forge's repodata.json at mirroring time. +conda-forge's ``repodata.json`` at mirroring time. The downside here is that this JSON file is only as new as the last time that the repodata-patches feedstock last generated a package. Any new packages that @@ -284,20 +282,23 @@ Trimming to "current" repodata The number of packages available is always growing. That means conda is always having to do more and more work. To slow down this growth, in conda 4.7, we -added the ability to have alternate repodata.json files that may represent a -subset of the normal repodata.json. One in particular is -`current_repodata.json`, which represents: +added the ability to have alternate ``repodata.json`` files that may represent a +subset of the normal ``repodata.json``. One in particular is +``current_repodata.json``, which represents: 1. the latest version of each package 2. any earlier versions of dependencies needed to make the latest versions satisfiable -current_repodata.json also keeps only one file type: `.conda` where it is -available, and `.tar.bz2` where only `.tar.bz2` is available. +``current_repodata.json`` also keeps only one file type: ``.conda`` where it is +available, and ``.tar.bz2`` where only ``.tar.bz2`` is available. For Anaconda's defaults "main" channel, the current_repodata.json file is -approximately 1/7 the size of repodata.json. This makes downloading the repodata +approximately 1/7 the size of ``repodata.json``. This makes downloading the repodata faster, and it also makes loading the repodata into its python representation faster. For those interested in how this is achieved, please refer to the code at https://github.com/conda/conda-build/blob/90a6de55d8b9e36fc4a8c471b566d356e07436c7/conda_build/index.py#L695-L737 + + +.. _`Anaconda Packages file list for SciPy`: https://anaconda.org/anaconda/scipy/files diff --git a/docs/source/concepts/index.rst b/docs/source/concepts/index.rst index 57d53a490b..6dbaee7c90 100644 --- a/docs/source/concepts/index.rst +++ b/docs/source/concepts/index.rst @@ -23,8 +23,8 @@ What is a “package”? * You can use conda-build to build a conda package. -What about channels -------------------- +What about channels? +-------------------- * Channels contain packages. diff --git a/docs/source/concepts/package-naming-conv.rst b/docs/source/concepts/package-naming-conv.rst index 0c6e936f8f..89683e7b24 100644 --- a/docs/source/concepts/package-naming-conv.rst +++ b/docs/source/concepts/package-naming-conv.rst @@ -52,7 +52,7 @@ Canonical name ============== The package name, version, and build string joined together by -hyphens---name-version-buildstring. In usage documentation, these +hyphens: name-version-buildstring. In usage documentation, these are referred to by ``canonical_name``. .. _filename: @@ -65,7 +65,7 @@ Filename Conda package filenames are canonical names, plus the suffix ``.tar.bz2`` or ``.conda``. -The following figure compares a canonical name to a filename: +The following figure compares a canonical name to a file name: .. figure:: /img/conda_names.png :align: center @@ -76,8 +76,7 @@ The following figure compares a canonical name to a filename: Conda supports both ``.conda`` and ``.tar.bz2`` package extensions. The ``.conda`` format is generally smaller and more efficient than ``.tar.bz2`` packages. -Read our `blog post `_ -about it to learn more. +Read our `blog post`_ about it to learn more. The build string is created as the package is built. Things that contribute to it are the variants specified either by the command @@ -93,13 +92,16 @@ then the build string is the build number that is specified in the recipe. Package specification ===================== -A package name together with a package version---which may be -partial or absent---joined by an equal sign. +A package name together with a package version — which may be +partial or absent — joined by an equal sign. -EXAMPLES: +Examples: * ``python=2.7.3`` * ``python=2.7`` * ``python`` In usage documentation, these are referred to by ``package_spec``. + + +.. _`blog post`: https://www.anaconda.com/understanding-and-improving-condas-performance/ diff --git a/docs/source/concepts/recipe.rst b/docs/source/concepts/recipe.rst index 7410a0dc11..3f287182c5 100644 --- a/docs/source/concepts/recipe.rst +++ b/docs/source/concepts/recipe.rst @@ -6,24 +6,24 @@ Conda-build recipes :local: :depth: 2 -To enable building `conda packages `_, :ref:`install and update conda +To enable building `conda packages`_, :ref:`install and update conda and conda-build `. Building a conda package requires a recipe. A conda-build recipe is a flat directory that contains the following files: -* ``meta.yaml``---A file that contains all the metadata in the +* ``meta.yaml`` — A file that contains all the metadata in the recipe. Only ``package/name`` and ``package/version`` are required. -* ``build.sh``---The script that installs the files for the +* ``build.sh`` — The script that installs the files for the package on macOS and Linux. It is executed using the ``bash`` command. -* ``bld.bat``---The build script that installs the files for the +* ``bld.bat`` — The build script that installs the files for the package on Windows. It is executed using ``cmd``. -* ``run_test.[py,pl,sh,bat]``---An optional Python test file, a +* ``run_test.[py,pl,sh,bat]`` — An optional Python test file, a test script that runs automatically if it is part of the recipe. * Optional patches that are applied to the source. @@ -34,8 +34,8 @@ is a flat directory that contains the following files: .. tip:: When you use the :ref:`conda skeleton ` command, - the first 3 files---``meta.yaml``, ``build.sh``, and - ``bld.bat``---are automatically generated for you. + the first 3 files — ``meta.yaml``, ``build.sh``, and + ``bld.bat`` — are automatically generated for you. Conda-build process =================== @@ -60,24 +60,23 @@ Conda-build performs the following steps: source directory with environment variables set. The build script installs into the build environment. -#. Performs some necessary post-processing steps, such as shebang - and rpath. +#. Performs some necessary post-processing steps, such as adding a shebang + and ``rpath``. #. Creates a conda package containing all the files in the build environment that are new from step 5, along with the necessary conda package metadata. -#. Tests the new conda package if the recipe includes tests: +#. Tests the new conda package — if the recipe includes tests — by doing the following: - #. Deletes the build environment and source directory to ensure that the new conda package does not inadvertantly depend on artifacts not included in the package. + * Deletes the build environment and source directory to ensure that the new conda package does not inadvertantly depend on artifacts not included in the package. - #. Creates a test environment with the package and its + * Creates a test environment with the package and its dependencies. - #. Runs the test scripts. + * Runs the test scripts. -The `conda-recipes -`_ repo +The `conda-recipes`_ repo contains example recipes for many conda packages. .. caution:: @@ -87,8 +86,7 @@ contains example recipes for many conda packages. such as passwords into recipes where it could be made public. The ``conda skeleton`` command can help to make -skeleton recipes for common repositories, such as `PyPI -`_. +skeleton recipes for common repositories, such as PyPI_. Deep dive @@ -101,12 +99,12 @@ Templates --------- When you build a conda package, conda-build renders the package -by reading a template in the meta.yaml. See :ref:`jinja-templates`. +by reading a template in the ``meta.yaml``. See :ref:`jinja-templates`. -Templates are filled in using your conda-build config, +Templates are filled in using your ``conda build config``, which shows the matrix of things to build against. The ``conda build config`` determines how many builds it has to do. -For example, defining a conda_build_config.yaml of the form +For example, defining a ``conda_build_config.yaml`` of the form and filling it defines a matrix of 4 packages to build:: foo: @@ -131,7 +129,7 @@ The prefix will take the form:: /conda-bld//h_env_placeholder… -`Conda-forge `_ downloads your package source and then builds the conda +`Conda-forge`_ downloads your package source and then builds the conda package in the context of the build environment. For example, you may direct it to download from a Git repo or pull down a tarball from another source. See the :ref:`source-section` for more information. @@ -149,16 +147,16 @@ Building Once the content is downloaded, conda-build runs the build step. See the :ref:`meta-build` for more information. The build step runs a script. It can be one that you provided. -See the :ref:`build-script` section for more information. +See the :ref:`build-script` section for more information on this topic. If you do not define the script section, then you can create a -build.sh or a bld.bat file to be run. +``build.sh`` or a ``bld.bat`` file to be run. Prefix replacement ------------------ When the build environment is created, it is in a placeholder prefix. -When the package is all bundled up, the prefix is set to a dummy prefix. +When the package is all bundled up, the prefix is set to a "dummy" prefix. When conda is ready to install the package, it rewrites the dummy prefix with the correct one. @@ -172,17 +170,17 @@ of this prefix is:: /conda-bld//_test_env_placeholder… -At this point, conda-build has all of the info from the meta.yaml about +At this point, conda-build has all of the info from ``meta.yaml`` about what its runtime dependencies are, so those dependencies are installed as well. This generates a test runner script with a reference to the -testing meta.yaml that is created. See the :ref:`meta-test` for +testing ``meta.yaml`` that is created. See the :ref:`meta-test` for more information. That file is run for testing. Output metadata --------------- After the package is built and tested, conda-build cleans up the -environments created prior and outputs the metadata. The recipe for +environments created during prior steps and outputs the metadata. The recipe for the package is also added in the output metadata. The metadata directory is on the top level of the tarball in the ``info`` directory. The metadata contains information about the dependencies of the @@ -205,8 +203,14 @@ More information Review :doc:`../resources/define-metadata` to see a breakdown of the components of a recipe, including: - * Package name. - * Package version. - * Descriptive metadata. - * Where to obtain source code. - * How to test the package. + * Package name + * Package version + * Descriptive metadata + * Where to obtain source code + * How to test the package + + +.. _`conda packages`: https://conda.io/projects/conda/en/latest/user-guide/concepts/packages.html +.. _`conda-recipes`: https://github.com/continuumio/conda-recipes +.. _`Conda-forge`: https://anaconda.org/conda-forge +.. _PyPI: https://pypi.python.org/pypi diff --git a/docs/source/index.rst b/docs/source/index.rst index 6f80b10bb0..7fabd7b219 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -14,8 +14,7 @@ from the conda recipe. You can build conda packages from a variety of source code projects, most notably Python. For help packing a Python project, -see the `packaging.python.org tutorial - `_. +see the `packaging.python.org tutorial`_. OPTIONAL: If you are planning to upload your packages to Anaconda Cloud, you will need an @@ -30,3 +29,6 @@ Anaconda Cloud, you will need an resources/index release-notes contributing-guide + + +.. _`packaging.python.org tutorial`: https://packaging.python.org/en/latest/tutorials/packaging-projects diff --git a/docs/source/user-guide/index.rst b/docs/source/user-guide/index.rst index ff5787b5a7..86874e581d 100644 --- a/docs/source/user-guide/index.rst +++ b/docs/source/user-guide/index.rst @@ -20,7 +20,7 @@ variables and wheel files. **Tutorials** The :doc:`tutorials <../user-guide/tutorials/index>` will guide you through -how to build conda packages---whether you're creating a +how to build conda packages — whether you're creating a package with compilers, using conda skeleton, creating from scratch, or building R packages using skeleton CRAN. diff --git a/docs/source/user-guide/tutorials/building-conda-packages.rst b/docs/source/user-guide/tutorials/building-conda-packages.rst index 006b7dac97..f27d17a98f 100644 --- a/docs/source/user-guide/tutorials/building-conda-packages.rst +++ b/docs/source/user-guide/tutorials/building-conda-packages.rst @@ -17,9 +17,9 @@ SEP and GDAL. Additional Windows-specific instructions are provided in the The final built packages from this tutorial are available on `Anaconda Cloud `_: -* `SEP `_. +* `SEP `_ -* `GDAL `_. +* `GDAL `_ This tutorial also describes writing recipes. You can see the final `SEP recipe From 22d250e837f9b9e09746c37f7094f024c4516609 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 10 Jan 2023 10:13:23 +0100 Subject: [PATCH 021/366] [pre-commit.ci] pre-commit autoupdate (#4699) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/PyCQA/pylint: v2.15.9 → v2.15.10](https://github.com/PyCQA/pylint/compare/v2.15.9...v2.15.10) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9ef7779e47..742565b9e2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -57,7 +57,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/PyCQA/pylint - rev: v2.15.9 + rev: v2.15.10 hooks: - id: pylint args: [--exit-zero] From ac64c08563f1d948ff13e90eef9782e38c523ffe Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 12 Jan 2023 05:40:51 +0100 Subject: [PATCH 022/366] Adjust testing matrix (#4691) Only run tests with all supported Python versions on Linux, where tests are faster and runners are plentiful. For Windows and macOS, only run tests on the lower and upper Python bounds with the stable and canary conda packages, respectively. --- .github/workflows/tests.yml | 20 ++++++++++++++------ news/4691-update-test-matrix | 19 +++++++++++++++++++ 2 files changed, 33 insertions(+), 6 deletions(-) create mode 100644 news/4691-update-test-matrix diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 72cf236073..210ed2e5f8 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -62,8 +62,8 @@ jobs: strategy: fail-fast: false matrix: - # test lower and upper bounds - python-version: ['3.7', '3.10'] + # test all lower versions (w/ stable conda) and upper version (w/ canary conda) + python-version: ['3.7', '3.8', '3.9'] conda-version: [release] test-type: [serial, parallel] include: @@ -173,7 +173,8 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.7', '3.8', '3.9', '3.10'] + # test lower version (w/ stable conda) and upper version (w/ canary conda) + python-version: ['3.7'] conda-version: [release] test-type: [serial, parallel] include: @@ -288,10 +289,17 @@ jobs: strategy: fail-fast: false matrix: - # test lower and upper bounds - python-version: ['3.7', '3.10'] - conda-version: [canary] + # test lower version (w/ stable conda) and upper version (w/ canary conda) + python-version: ['3.7'] + conda-version: [release] test-type: [serial, parallel] + include: + - python-version: '3.10' + conda-version: canary + test-type: serial + - python-version: '3.10' + conda-version: canary + test-type: parallel env: CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }} REPLAY_NAME: macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }} diff --git a/news/4691-update-test-matrix b/news/4691-update-test-matrix new file mode 100644 index 0000000000..d8fd6fdba5 --- /dev/null +++ b/news/4691-update-test-matrix @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Update test matrix to run tests on all supported Python versions on Linux. Only run tests on lower & upper Python bounds for Windows and macOS. (#4691) From d8cabf06d1d6b9353a96a8bc4eb9db4491266fac Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Thu, 12 Jan 2023 09:28:11 -0500 Subject: [PATCH 023/366] Make further changes to conda-build docs (#4704) * Edit conda-build docs Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- docs/source/concepts/channels.rst | 21 +- docs/source/concepts/generating-index.rst | 2 +- docs/source/concepts/index.rst | 8 - docs/source/index.rst | 4 +- .../tutorials/building-conda-packages.rst | 197 +++++++++--------- 5 files changed, 115 insertions(+), 117 deletions(-) diff --git a/docs/source/concepts/channels.rst b/docs/source/concepts/channels.rst index ce17d67991..275cc19986 100644 --- a/docs/source/concepts/channels.rst +++ b/docs/source/concepts/channels.rst @@ -2,17 +2,7 @@ Conda channels ============== -The ``conda-build`` options ``-c CHANNEL`` or ``--channel CHANNEL`` configure additional channels to search for packages. - -These are URLs searched in the order they are given (including file:// for local directories). - -Then, the defaults or channels from ``.condarc`` are searched (unless ``--override-channels`` is given). - -You can use 'defaults' to get the default packages for conda, and 'system' to get the system packages, which also takes ``.condarc`` into account. - -You can also use any name and the ``.condarc`` ``channel_alias`` value will be prepended. The default ``channel_alias`` is http://conda.anaconda.org/. - -The option ``--override-channels`` tells to not search default or ``.condarc`` channels. Requires the ``--channel`` or ``-c`` option. +Conda-build supports standard `conda channel`_ behavior. Identical channel and package name problem @@ -49,7 +39,9 @@ will fail with the following error message (the path will be different): is empty. please request that the channel administrator create `noarch/repodata.json` and associated `noarch/repodata.json.bz2` files. -This happens because ``conda-build`` will consider the directory ``./example/`` in your project as a channel. This is by design due to conda's CI servers, where the build path can be long, complicated, and not predictable prior to build. +This happens because conda-build will consider the directory ``./example/`` in your project +as a channel. This is by design due to conda's CI servers, where the build path can be long, +complicated, and not predictable prior to build. There are several ways to resolve this issue. @@ -72,4 +64,7 @@ There are several ways to resolve this issue. conda-build ./conda/ -c example/label/main - which technically is the same as ``-c example``, since ``main`` is the default label, but now it won't by mistake find a channel ``example/label/main`` on the local filesystem. + which technically is the same as `-c example`, since main is the default label, + but now it won't mistakenly find a channel ``example/label/main`` on the local filesystem. + +.. _`conda channel`: https://docs.conda.io/projects/conda/en/latest/user-guide/concepts/channels.html diff --git a/docs/source/concepts/generating-index.rst b/docs/source/concepts/generating-index.rst index 1455325088..b512d9c7e5 100644 --- a/docs/source/concepts/generating-index.rst +++ b/docs/source/concepts/generating-index.rst @@ -113,7 +113,7 @@ For each subdir: * For all packages that need to be added/updated: * Extract the package to access metadata, including full package name, - mtime &&&&&& ???, size, and index.json. + file modification time (``mtime``), size, and ``index.json``. * Aggregate package metadata to repodata collection. diff --git a/docs/source/concepts/index.rst b/docs/source/concepts/index.rst index 6dbaee7c90..f2934b5d21 100644 --- a/docs/source/concepts/index.rst +++ b/docs/source/concepts/index.rst @@ -38,11 +38,3 @@ What about channels? * conda is able to install from channels and uses the indexes in the channel to solve for requirements and dependencies. - -Building Anaconda installers ----------------------------- - -* Anaconda(/Miniconda) installers are built with a modified version of constructor. - -* The idea is to build an Anaconda metapackage and bundle it together with some - other packages to build an Anaconda installer. diff --git a/docs/source/index.rst b/docs/source/index.rst index 7fabd7b219..e4033b4580 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -3,8 +3,8 @@ Conda-build documentation ========================= -Conda-build contains commands and tools to use conda to build your -own packages. +Conda-build contains commands and tools to build your own +conda packages. It also provides helpful tools to constrain or pin versions in recipes. Building a conda package requires :doc:`installing conda-build ` and diff --git a/docs/source/user-guide/tutorials/building-conda-packages.rst b/docs/source/user-guide/tutorials/building-conda-packages.rst index f27d17a98f..2e4015790f 100644 --- a/docs/source/user-guide/tutorials/building-conda-packages.rst +++ b/docs/source/user-guide/tutorials/building-conda-packages.rst @@ -15,19 +15,16 @@ SEP and GDAL. Additional Windows-specific instructions are provided in the :ref:`toolkit` section. The final built packages from this tutorial are available on -`Anaconda Cloud `_: +`Anaconda Cloud`_: -* `SEP `_ +* `SEP`_ -* `GDAL `_ +* `GDAL`_ This tutorial also describes writing recipes. You can see the -final `SEP recipe -`_ -and the `GDAL recipe -`_ -on GitHub in the `conda-build documentation repository -`_. +final `SEP recipe`_ +and the `GDAL recipe`_ +on GitHub in the `conda-build documentation repository`_. Who is this for? ================ @@ -45,7 +42,7 @@ Before you start Before you start, make sure you have installed: - * `Conda `_. + * `Conda`_. * :ref:`Conda-build `. * Any compilers you want. @@ -82,24 +79,19 @@ There are free alternatives available for each version of the VS compilers: * Instead of VS 2008, it is often possible to substitute the - `free Microsoft Visual C++ Compiler for Python 2.7 - `_. + `free Microsoft Visual C++ Compiler for Python 2.7`_. * Instead of VS 2010, it is often possible to substitute the - `free Microsoft Windows SDK for Windows 7 and .NET Framework 4 - `_. + `free Microsoft Windows SDK for Windows 7 and .NET Framework 4`_. -* Make sure that you also install `VS 2010 Service Pack 1 (SP1) - `_. +* Make sure that you also install `VS 2010 Service Pack 1 (SP1)`_. * Due to a bug in the VS 2010 SP1 installer, the compiler tools may be removed during installation of VS 2010 SP1. They can be - restored as described in `Microsoft Visual C++ 2010 Service - Pack 1 Compiler Update for the Windows SDK 7.1 - `_. + restored as described in `Microsoft Visual C++ 2010 Service Pack + 1 Compiler Update for the Windows SDK 7.1`_. -* Visual Studio 2015 has a full-featured, free `Community edition - `_ +* Visual Studio 2015 has a full-featured, free `Community edition`_ for academic research, open source projects, and certain other use cases. @@ -122,22 +114,22 @@ built on Windows 10. Other tools ------------ -Some environments initially lack tools such as patch or Git +Some environments initially lack tools such as ``patch`` or Git that may be needed for some build workflows. -On Windows, these can be installed with conda: +On Windows, these can be installed with conda using the following command: -``conda install git m2-patch`` +``$ conda install git m2-patch`` -On macOS and Linux, replace ``m2-patch`` with patch. +On macOS and Linux, replace ``m2-patch`` with ``patch``. Developing a build strategy ============================ Conda recipes are typically built with a trial-and-error method. -Often the first attempt to build a package fails with compiler +Sometimes, the first attempt to build a package will fail with compiler or linker errors, often caused by missing dependencies. The person writing the recipe then examines these errors and modifies the recipe to include the missing dependencies, usually as part of the @@ -157,18 +149,18 @@ Python 3. Installing only one makes it easier to keep track of the builds, but it is possible to have both installed on the same system at the same time. If you have both installed, use the -``where`` command on Windows, or ``which`` command on Linux to -see which version comes first on PATH since this is the one you will be using:: +``where`` command on Windows, or the ``which`` command on macOS or +Linux, to see which version comes first on ``PATH``, since this is +the one you will be using:: - where python + $ where python To build a package for a Python version other than the one in your Miniconda installation, use the ``--python`` option in the -``conda-build`` command. +``conda-build`` command. For example, to build a Python 3.5 package +with Miniconda2:: -EXAMPLE: To build a Python 3.5 package with Miniconda2:: - - conda-build recipeDirectory --python=3.5 + $ conda-build recipeDirectory --python=3.5 .. note:: Replace ``recipeDirectory`` with the name and path of your @@ -180,29 +172,29 @@ Automated testing After the build, if the recipe directory contains a test file. This test file is named ``run_test.bat`` on Windows, ``run_test.sh`` on macOS or Linux, or ``run_test.py`` on any platform. The file runs to test the package -and any errors are reported. After seeing "check the output," you can -also test if this package was built by using the command:: +and any errors that surface are reported. After seeing "check the output," +you can also test if this package was built by using the command:: $ conda build --test .tar.bz2 .. note:: - Use the :ref:`Test section of the meta.yaml file - ` to move data files from the recipe directory to the - test directory when the test is run. + Use the :ref:`"test" section ` of the ``meta.yaml`` file + to move data files from the recipe directory to the test directory when + the test is run. Building a SEP package with conda and Python 2 or 3 ===================================================== -The `SEP documentation `_ states +The `SEP documentation`_ states that SEP runs on Python 2 and 3, and it depends only on NumPy. -Searching for SEP and PyPI shows that there is already `a PyPI -package for SEP `_. +Searching for SEP on PyPI shows that there is already `a PyPI +package for SEP`_. Because a PyPI package for SEP already exists, the ``conda skeleton`` command can make a skeleton or outline of a -conda recipe based on the PyPI package. Then the recipe outline -can be completed manually and conda can build a conda package +conda recipe based on the PyPI package. The recipe outline +can then be completed manually and conda can build a conda package from the completed recipe. @@ -212,15 +204,15 @@ Install Visual Studio If you have not already done so, install the appropriate version of Visual Studio: -* For Python 3---Visual Studio 2017: +* For Python 3 — Visual Studio 2017: - #. Choose Custom install. + #. Choose the "Custom install" option. #. Under Programming Languages, choose to install Visual C++. -* For Python 2---Visual Studio 2008: +* For Python 2 — Visual Studio 2008: - #. Choose Custom install. + #. Choose the "Custom install" option. #. Choose to install X64 Compilers and Tools. Install Service Pack 1. @@ -230,16 +222,16 @@ Make a conda skeleton recipe #. Run the skeleton command:: - conda skeleton pypi sep + $ conda skeleton pypi sep - The ``skeleton`` command installs into a newly created + The ``skeleton`` command installs into a newly-created directory called ``sep``. #. Go to the ``sep`` directory to view the files:: - cd sep + $ cd sep - One skeleton file has been created: ``meta.yaml`` + You will see that one ``skeleton`` file has been created: ``meta.yaml`` Edit the skeleton files @@ -275,10 +267,11 @@ EXAMPLE: - python - numpy x.x -Notice that there are two types of requirements, host and run. -Host represents packages that need to be specific to the target +Notice that there are two types of requirements, ``host`` and ``run`` +(``build`` is another valid parameter, but is not shown in this example). +``host`` represents packages that need to be specific to the target platform when the target platform is not necessarily the same as -the native build platform. Run represents the dependencies that +the native build platform. ``run`` represents the dependencies that should be installed when the package is installed. .. note:: @@ -292,7 +285,7 @@ should be installed when the package is installed. at build time. -OPTIONAL: Add a test for the built package +Optional: Add a test for the built package ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Adding this optional test will test the package at the end of the @@ -300,9 +293,7 @@ build by making sure that the Python statement ``import sep`` runs successfully: #. Add ``- sep``, checking to be sure that the indentation is - consistent with the rest of the file. - - EXAMPLE: + consistent with the rest of the file: .. code-block:: yaml @@ -317,7 +308,7 @@ Build the package Build the package using the recipe you just created:: - conda build sep + $ conda build sep Check the output @@ -354,16 +345,13 @@ Check the output Building a GDAL package with conda and Python 2 or 3 ==================================================== -This procedure describes how to build a package with Python 2 or Python 3. -Follow the instructions for your preferred version. - To begin, install Anaconda or Miniconda and conda-build. If you are using a -Windows machine, also use conda to install Git and the m2-patch. +Windows machine, also use conda to install Git and ``m2-patch``. .. code-block:: bash - conda install git - conda install m2-patch + $ conda install git + $ conda install m2-patch Because GDAL includes C and C++, building it on Windows requires Visual Studio. This procedure describes how to build a package with Python 2 or @@ -375,42 +363,46 @@ To build a GDAL package: #. Install Visual Studio: - * For Python 3, install `Visual Studio 2017 `_. - Choose Custom install. Under Programming Languages, select workloads that - come from Visual Studio so you choose the Desktop Development with C++ and - Universal Platform C. + * For Python 3, install `Visual Studio 2017`_: + + * Choose "Custom install". + * Under "Programming Languages", select workloads that come from Visual + Studio so that you can choose the Desktop Development with C++ and + Universal Platform C. - * For Python 2, install `Visual Studio 2008 `_. - Choose Custom install. Choose to install X64 Compilers and Tools. - Install Visual Studio 2008 Service Pack 1. + * For Python 2, install `Visual Studio 2008`_: + + * Choose "Custom install". + * Choose to install X64 Compilers and Tools. + * Install Visual Studio 2008 Service Pack 1. #. Install Git. Because the GDAL package sources are retrieved from GitHub for the build, you must install Git:: - conda install git m2-patch conda-build + $ conda install git m2-patch conda-build -#. Get gdal-feedstock. For the purpose of this tutorial, we will be using a recipe from Anaconda:: +#. Get ``gdal-feedstock``. For the purpose of this tutorial, we will be using a recipe from Anaconda:: - git clone https://github.com/AnacondaRecipes/gdal-feedstock.git + $ git clone https://github.com/AnacondaRecipes/gdal-feedstock.git -#. Use conda-build to build the gdal-feedstock:: +#. Use conda-build to build the ``gdal-feedstock``:: - conda build gdal-feedstock + $ conda build gdal-feedstock -#. Check the output to make sure the build completed +#. Check the output to make sure that the build completed successfully. The output also contains the location of the final package file and a command to upload the package to Cloud. For this package in particular, there should be two - packages outputted: libgdal and GDAL. + packages outputted: ``libgdal`` and ``GDAL``. #. In case of any linker or compiler errors, modify the recipe and run it again. -Let’s take a better look at what’s happening inside the gdal-feedstock. -In particular, what is happening in the ``meta.yaml``. +Let's take a closer look at what's happening inside the ``gdal-feedstock``, +specifically in the ``meta.yaml`` file. -The first interesting bit happens under ``source`` in the patches +The first interesting bit happens under ``source`` in the ``patches`` section: :: @@ -422,22 +414,23 @@ section: # disable 12 bit jpeg on Windows as we aren't using internal jpeg - 0003-disable_jpeg12.patch -This section says that when this package is being built on a Windows -platform, apply the following patch files. Notice that the patch files -are in the `patches` directory of the recipe. These patches will only +This section is basically saying "when this package is being built on a Windows +platform, apply the following patch files". Notice that the patch files +are in the ``patches`` directory of the recipe. These patches will only be applied to Windows since the ``# [win]`` selector is applied to each of the patch entries. For more about selectors, see :ref:`preprocess-selectors`. -In the requirements section, notice how there are both a build and -host set of requirements. For this recipe, all the compilers required to -build the package are listed in the build requirements. +In the ``requirements`` section, notice how there are both a ``build`` and +``host`` set of requirements (``run`` is another valid parameter, but is +not shown in this example). For this recipe, all the compilers required to +build the package are listed in the ``build`` requirements. Normally, this section will list out packages required to build the package. GDAL requires CMake on Windows, as well as C compilers. Notice that the C compilers are pulled into the recipe using the syntax ``{{ compiler('c') }}``. Since conda-build 3, conda-build defines a jinja2 function ``compiler()`` to specify compiler packages dynamically. So, using -the ``compiler(‘c’)`` function in a conda recipe will pull in the correct +the ``compiler('c')`` function in a conda recipe will pull in the correct compiler for any build platform. For more information about compilers with conda-build see :ref:`compiler-tools`. @@ -447,9 +440,9 @@ see :ref:`using-your-customized-compiler-package-with-conda-build-3`. Notice that this package has an ``outputs`` section. This section is a list of packages to output as a result of building -this package. In this case, the packages libgdal and GDAL will be built. +this package. In this case, the packages ``libgdal`` and ``GDAL`` will be built. Similar to a normal recipe, the outputs can have build scripts, -tests scripts and requirements specified. +tests scripts, and requirements specified. For more information on how outputs work, see the :ref:`package-outputs`. Now, let's try to build GDAL against some build matrix. @@ -465,12 +458,30 @@ Add the following to your ``conda_build_config.yaml``: Now you can build GDAL using conda-build with the command:: - conda build gdal-feedstock + $ conda build gdal-feedstock Or explicitly set the location of the conda-build variant matrix:: - conda build gdal-feedstock --variant-config-file conda_build_config.yaml + $ conda build gdal-feedstock --variant-config-file conda_build_config.yaml If you want to know more about build variants and ``conda_build_config.yaml``, including how to specify a config file and what can go into it, take a look at :ref:`conda-build-variant-config-files`. + + +.. _`Anaconda Cloud`: https://anaconda.org +.. _SEP: https://anaconda.org/wwarner/sep/files +.. _GDAL: https://anaconda.org/conda-forge/gdal/files +.. _`SEP recipe`: https://github.com/conda-forge/sep-feedstock +.. _`GDAL recipe`: https://github.com/conda-forge/gdal-feedstock +.. _`conda-build documentation repository`: https://github.com/conda/conda-build/tree/main/docs +.. _Conda: https://conda.io/projects/conda/en/latest/user-guide/install/index.html +.. _`free Microsoft Visual C++ Compiler for Python 2.7`: https://www.microsoft.com/en-us/download/details.aspx?id=44266 +.. _`free Microsoft Windows SDK for Windows 7 and .NET Framework 4`: https://www.microsoft.com/en-us/download/details.aspx?id=8279 +.. _`VS 2010 Service Pack 1 (SP1)`: https://www.microsoft.com/en-us/download/details.aspx?id=34677 +.. _`Microsoft Visual C++ 2010 Service Pack 1 Compiler Update for the Windows SDK 7.1`: https://www.microsoft.com/en-us/download/details.aspx?id=4422 +.. _`Community edition`: https://www.visualstudio.com/en-us/products/visual-studio-community-vs.aspx +.. _`SEP documentation`: https://sep.readthedocs.io +.. _`a PyPI package for SEP`: https://pypi.python.org/pypi/sep +.. _`Visual Studio 2017`: https://docs.microsoft.com/en-us/visualstudio/install/install-visual-studio?view=vs-2017 +.. _`Visual Studio 2008`: http://download.microsoft.com/download/E/8/E/E8EEB394-7F42-4963-A2D8-29559B738298/VS2008ExpressWithSP1ENUX1504728.iso From 6c0377f5d11a77dbe941b91ef8029eaa0c313c2f Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 12 Jan 2023 17:27:20 +0100 Subject: [PATCH 024/366] Mark flaky test as serial & flaky to avoid macOS SDK race condition (#4709) --- tests/test_api_build.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 413c705cda..4948bb2bc0 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -1163,7 +1163,7 @@ def test_unknown_selectors(testing_config): # the locks can be very flaky on GitHub Windows Runners # https://github.com/conda/conda-build/issues/4685 -@pytest.mark.flaky(rerun=5, reruns_delay=2) +@pytest.mark.flaky(reruns=5, reruns_delay=2) def test_failed_recipe_leaves_folders(testing_config, testing_workdir): recipe = os.path.join(fail_dir, 'recursive-build') m = api.render(recipe, config=testing_config)[0][0] @@ -1397,6 +1397,10 @@ def test_provides_features_metadata(testing_config): assert index['provides_features'] == {'test2': 'also_ok'} +# using different MACOSX_DEPLOYMENT_TARGET in parallel causes some SDK race condition +# https://github.com/conda/conda-build/issues/4708 +@pytest.mark.serial +@pytest.mark.flaky(reruns=5, reruns_delay=2) def test_overlinking_detection(testing_config, variants_conda_build_sysroot): testing_config.activate = True testing_config.error_overlinking = True @@ -1415,6 +1419,10 @@ def test_overlinking_detection(testing_config, variants_conda_build_sysroot): rm_rf(dest_bat) +# using different MACOSX_DEPLOYMENT_TARGET in parallel causes some SDK race condition +# https://github.com/conda/conda-build/issues/4708 +@pytest.mark.serial +@pytest.mark.flaky(reruns=5, reruns_delay=2) def test_overlinking_detection_ignore_patterns( testing_config, variants_conda_build_sysroot ): From 711a6f5bd9675b2400c75f34da592216019174ab Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 12 Jan 2023 22:35:45 +0100 Subject: [PATCH 025/366] Speedup `test_variants.py::test_get_package_variants*` (#4696) Previously both test_get_package_variants_from_file and test_get_package_variants_from_dictionary_of_lists tests would take 3+min to run on all OSes using dedicated resources. We remove the unused build_config.yaml and drop the unnecessary numpy dependency resulting in the tests now completing in less than 30s. --- .pre-commit-config.yaml | 3 +- .../variants}/variant_recipe/meta.yaml | 3 - tests/test_variants.py | 282 ++++++++++++------ tests/utils.py | 10 +- tests/variant_recipe/build_config.yaml | 4 - 5 files changed, 199 insertions(+), 103 deletions(-) rename tests/{ => test-recipes/variants}/variant_recipe/meta.yaml (73%) delete mode 100644 tests/variant_recipe/build_config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 742565b9e2..b81c2d65ea 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,8 +10,7 @@ exclude: | index_data | test-cran-skeleton | test-recipes | - test-skeleton | - variant_recipe + test-skeleton )/ | .*\.(patch|diff) | versioneer.py | diff --git a/tests/variant_recipe/meta.yaml b/tests/test-recipes/variants/variant_recipe/meta.yaml similarity index 73% rename from tests/variant_recipe/meta.yaml rename to tests/test-recipes/variants/variant_recipe/meta.yaml index 4bf58dceef..5645afd47f 100644 --- a/tests/variant_recipe/meta.yaml +++ b/tests/test-recipes/variants/variant_recipe/meta.yaml @@ -5,11 +5,8 @@ package: requirements: build: - python - - numpy - - nomkl # [unix] run: - python - - numpy about: summary: {{ python }} diff --git a/tests/test_variants.py b/tests/test_variants.py index e8ace350a7..4a6a132862 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -1,8 +1,9 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause from collections import OrderedDict -import os import json +import os +from pathlib import Path import re import sys @@ -12,8 +13,7 @@ from conda_build import api, exceptions, variants from conda_build.utils import package_has_file -thisdir = os.path.dirname(__file__) -recipe_dir = os.path.join(thisdir, 'test-recipes', 'variants') +from .utils import variants_dir def test_later_spec_priority(single_version, no_numpy_version): @@ -35,38 +35,76 @@ def test_later_spec_priority(single_version, no_numpy_version): assert len(combined_spec["python"]) == 2 -def test_get_package_variants_from_file(testing_workdir, testing_config, no_numpy_version): - with open('variant_example.yaml', 'w') as f: - yaml.dump(no_numpy_version, f, default_flow_style=False) - testing_config.variant_config_files = [os.path.join(testing_workdir, 'variant_example.yaml')] +def test_get_package_variants_from_file( + testing_workdir, testing_config, no_numpy_version +): + variants_path = Path(testing_workdir, "variant_example.yaml") + variants_path.write_text(yaml.dump(no_numpy_version, default_flow_style=False)) + + testing_config.variant_config_files = [str(variants_path)] testing_config.ignore_system_config = True - metadata = api.render(os.path.join(thisdir, "variant_recipe"), - no_download_source=False, config=testing_config) + + metadata = api.render( + os.path.join(variants_dir, "variant_recipe"), + no_download_source=False, + config=testing_config, + ) + # one for each Python version. Numpy is not strictly pinned and should present only 1 dimension assert len(metadata) == 2 - assert sum('python >=2.7,<2.8' in req for (m, _, _) in metadata - for req in m.meta['requirements']['run']) == 1 - assert sum('python >=3.5,<3.6' in req for (m, _, _) in metadata - for req in m.meta['requirements']['run']) == 1 + assert ( + sum( + "python >=2.7,<2.8" in req + for (m, _, _) in metadata + for req in m.meta["requirements"]["run"] + ) + == 1 + ) + assert ( + sum( + "python >=3.5,<3.6" in req + for (m, _, _) in metadata + for req in m.meta["requirements"]["run"] + ) + == 1 + ) def test_use_selectors_in_variants(testing_workdir, testing_config): - testing_config.variant_config_files = [os.path.join(recipe_dir, - 'selector_conda_build_config.yaml')] + testing_config.variant_config_files = [ + os.path.join(variants_dir, "selector_conda_build_config.yaml") + ] variants.get_package_variants(testing_workdir, testing_config) def test_get_package_variants_from_dictionary_of_lists(testing_config, no_numpy_version): testing_config.ignore_system_config = True - metadata = api.render(os.path.join(thisdir, "variant_recipe"), - no_download_source=False, config=testing_config, - variants=no_numpy_version) + + metadata = api.render( + os.path.join(variants_dir, "variant_recipe"), + no_download_source=False, + config=testing_config, + variants=no_numpy_version, + ) + # one for each Python version. Numpy is not strictly pinned and should present only 1 dimension assert len(metadata) == 2, metadata - assert sum('python >=2.7,<2.8' in req for (m, _, _) in metadata - for req in m.meta['requirements']['run']) == 1 - assert sum('python >=3.5,<3.6' in req for (m, _, _) in metadata - for req in m.meta['requirements']['run']) == 1 + assert ( + sum( + "python >=2.7,<2.8" in req + for (m, _, _) in metadata + for req in m.meta["requirements"]["run"] + ) + == 1 + ) + assert ( + sum( + "python >=3.5,<3.6" in req + for (m, _, _) in metadata + for req in m.meta["requirements"]["run"] + ) + == 1 + ) @pytest.mark.xfail(reason="Strange failure 7/19/2017. Can't reproduce locally. Test runs fine " @@ -76,19 +114,19 @@ def test_variant_with_ignore_numpy_version_reduces_matrix(numpy_version_ignored) # variants are defined in yaml file in this folder # there are two python versions and two numpy versions. However, because numpy is not pinned, # the numpy dimensions should get collapsed. - recipe = os.path.join(recipe_dir, '03_numpy_matrix') + recipe = os.path.join(variants_dir, "03_numpy_matrix") metadata = api.render(recipe, variants=numpy_version_ignored, finalize=False) assert len(metadata) == 2, metadata def test_variant_with_numpy_pinned_has_matrix(): - recipe = os.path.join(recipe_dir, '04_numpy_matrix_pinned') + recipe = os.path.join(variants_dir, "04_numpy_matrix_pinned") metadata = api.render(recipe, finalize=False) assert len(metadata) == 4 def test_pinning_in_build_requirements(): - recipe = os.path.join(recipe_dir, '05_compatible') + recipe = os.path.join(variants_dir, "05_compatible") metadata = api.render(recipe)[0][0] build_requirements = metadata.meta['requirements']['build'] # make sure that everything in the build deps is exactly pinned @@ -97,7 +135,7 @@ def test_pinning_in_build_requirements(): @pytest.mark.sanity def test_no_satisfiable_variants_raises_error(): - recipe = os.path.join(recipe_dir, '01_basic_templating') + recipe = os.path.join(variants_dir, "01_basic_templating") with pytest.raises(exceptions.DependencyNeedsBuildingError): api.render(recipe, permit_unsatisfiable_variants=False) @@ -181,19 +219,24 @@ def test_validate_spec(): def test_cross_compilers(): - recipe = os.path.join(recipe_dir, '09_cross') - ms = api.render(recipe, permit_unsatisfiable_variants=True, finalize=False, bypass_env_check=True) + recipe = os.path.join(variants_dir, "09_cross") + ms = api.render( + recipe, + permit_unsatisfiable_variants=True, + finalize=False, + bypass_env_check=True, + ) assert len(ms) == 3 def test_variants_in_output_names(): - recipe = os.path.join(recipe_dir, '11_variant_output_names') + recipe = os.path.join(variants_dir, "11_variant_output_names") outputs = api.get_output_file_paths(recipe) assert len(outputs) == 4 def test_variants_in_versions_with_setup_py_data(testing_workdir): - recipe = os.path.join(recipe_dir, '12_variant_versions') + recipe = os.path.join(variants_dir, "12_variant_versions") outputs = api.get_output_file_paths(recipe) assert len(outputs) == 2 assert any(os.path.basename(pkg).startswith('my_package-470.470') for pkg in outputs) @@ -201,8 +244,10 @@ def test_variants_in_versions_with_setup_py_data(testing_workdir): def test_git_variables_with_variants(testing_workdir, testing_config): - recipe = os.path.join(recipe_dir, '13_git_vars') - m = api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True)[0][0] + recipe = os.path.join(variants_dir, "13_git_vars") + m = api.render( + recipe, config=testing_config, finalize=False, bypass_env_check=True + )[0][0] assert m.version() == "1.20.2" assert m.build_number() == 0 @@ -226,7 +271,7 @@ def test_variant_input_with_zip_keys_keeps_zip_keys_list(): def test_ensure_valid_spec_on_run_and_test(testing_workdir, testing_config, caplog): testing_config.debug = True testing_config.verbose = True - recipe = os.path.join(recipe_dir, '14_variant_in_run_and_test') + recipe = os.path.join(variants_dir, "14_variant_in_run_and_test") api.render(recipe, config=testing_config) text = caplog.text @@ -237,8 +282,8 @@ def test_ensure_valid_spec_on_run_and_test(testing_workdir, testing_config, capl def test_serial_builds_have_independent_configs(testing_config): - recipe = os.path.join(recipe_dir, '17_multiple_recipes_independent_config') - recipes = [os.path.join(recipe, dirname) for dirname in ('a', 'b')] + recipe = os.path.join(variants_dir, "17_multiple_recipes_independent_config") + recipes = [os.path.join(recipe, dirname) for dirname in ("a", "b")] outputs = api.build(recipes, config=testing_config) index_json = json.loads(package_has_file(outputs[0], 'info/index.json')) assert 'bzip2 >=1,<1.0.7.0a0' in index_json['depends'] @@ -247,9 +292,11 @@ def test_serial_builds_have_independent_configs(testing_config): def test_subspace_selection(testing_config): - recipe = os.path.join(recipe_dir, '18_subspace_selection') - testing_config.variant = {'a': 'coffee'} - ms = api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True) + recipe = os.path.join(variants_dir, "18_subspace_selection") + testing_config.variant = {"a": "coffee"} + ms = api.render( + recipe, config=testing_config, finalize=False, bypass_env_check=True + ) # there are two entries with a==coffee, so we should end up with 2 variants assert len(ms) == 2 # ensure that the zipped keys still agree @@ -291,7 +338,11 @@ def test_subspace_selection(testing_config): def test_get_used_loop_vars(testing_config): - m = api.render(os.path.join(recipe_dir, '19_used_variables'), finalize=False, bypass_env_check=True)[0][0] + m = api.render( + os.path.join(variants_dir, "19_used_variables"), + finalize=False, + bypass_env_check=True, + )[0][0] # conda_build_config.yaml has 4 loop variables defined, but only 3 are used. # python and zlib are both implicitly used (depend on name matching), while # some_package is explicitly used as a jinja2 variable @@ -301,36 +352,51 @@ def test_get_used_loop_vars(testing_config): def test_reprovisioning_source(testing_config): - api.render(os.path.join(recipe_dir, "20_reprovision_source")) + api.render(os.path.join(variants_dir, "20_reprovision_source")) def test_reduced_hashing_behavior(testing_config): # recipes using any compiler jinja2 function need a hash - m = api.render(os.path.join(recipe_dir, '26_reduced_hashing', 'hash_yes_compiler'), - finalize=False, bypass_env_check=True)[0][0] - assert 'c_compiler' in m.get_hash_contents(), "hash contents should contain c_compiler" - assert re.search('h[0-9a-f]{%d}' % testing_config.hash_length, m.build_id()), \ - "hash should be present when compiler jinja2 function is used" + m = api.render( + os.path.join(variants_dir, "26_reduced_hashing", "hash_yes_compiler"), + finalize=False, + bypass_env_check=True, + )[0][0] + assert ( + "c_compiler" in m.get_hash_contents() + ), "hash contents should contain c_compiler" + assert re.search( + "h[0-9a-f]{%d}" % testing_config.hash_length, m.build_id() + ), "hash should be present when compiler jinja2 function is used" # recipes that use some variable in conda_build_config.yaml to control what # versions are present at build time also must have a hash (except # python, r_base, and the other stuff covered by legacy build string # behavior) - m = api.render(os.path.join(recipe_dir, '26_reduced_hashing', 'hash_yes_pinned'), - finalize=False, bypass_env_check=True)[0][0] - assert 'zlib' in m.get_hash_contents() - assert re.search('h[0-9a-f]{%d}' % testing_config.hash_length, m.build_id()) + m = api.render( + os.path.join(variants_dir, "26_reduced_hashing", "hash_yes_pinned"), + finalize=False, + bypass_env_check=True, + )[0][0] + assert "zlib" in m.get_hash_contents() + assert re.search("h[0-9a-f]{%d}" % testing_config.hash_length, m.build_id()) # anything else does not get a hash - m = api.render(os.path.join(recipe_dir, '26_reduced_hashing', 'hash_no_python'), - finalize=False, bypass_env_check=True)[0][0] + m = api.render( + os.path.join(variants_dir, "26_reduced_hashing", "hash_no_python"), + finalize=False, + bypass_env_check=True, + )[0][0] assert not m.get_hash_contents() assert not re.search('h[0-9a-f]{%d}' % testing_config.hash_length, m.build_id()) def test_variants_used_in_jinja2_conditionals(testing_config): - ms = api.render(os.path.join(recipe_dir, '21_conditional_sections'), - finalize=False, bypass_env_check=True) + ms = api.render( + os.path.join(variants_dir, "21_conditional_sections"), + finalize=False, + bypass_env_check=True, + ) assert len(ms) == 2 assert sum(m.config.variant['blas_impl'] == 'mkl' for m, _, _ in ms) == 1 assert sum(m.config.variant['blas_impl'] == 'openblas' for m, _, _ in ms) == 1 @@ -338,14 +404,20 @@ def test_variants_used_in_jinja2_conditionals(testing_config): def test_build_run_exports_act_on_host(testing_config, caplog): """Regression test for https://github.com/conda/conda-build/issues/2559""" - api.render(os.path.join(recipe_dir, '22_run_exports_rerendered_for_other_variants'), - platform='win', arch='64') + api.render( + os.path.join(variants_dir, "22_run_exports_rerendered_for_other_variants"), + platform="win", + arch="64", + ) assert "failed to get install actions, retrying" not in caplog.text def test_detect_variables_in_build_and_output_scripts(testing_config): - ms = api.render(os.path.join(recipe_dir, '24_test_used_vars_in_scripts'), - platform='linux', arch='64') + ms = api.render( + os.path.join(variants_dir, "24_test_used_vars_in_scripts"), + platform="linux", + arch="64", + ) for m, _, _ in ms: if m.name() == 'test_find_used_variables_in_scripts': used_vars = m.get_used_vars() @@ -366,8 +438,11 @@ def test_detect_variables_in_build_and_output_scripts(testing_config): assert 'BAT_VAR' not in used_vars assert 'OUTPUT_VAR' in used_vars # on windows, we find variables in bat scripts as well as shell scripts - ms = api.render(os.path.join(recipe_dir, '24_test_used_vars_in_scripts'), - platform='win', arch='64') + ms = api.render( + os.path.join(variants_dir, "24_test_used_vars_in_scripts"), + platform="win", + arch="64", + ) for m, _, _ in ms: if m.name() == 'test_find_used_variables_in_scripts': used_vars = m.get_used_vars() @@ -391,13 +466,16 @@ def test_detect_variables_in_build_and_output_scripts(testing_config): def test_target_platform_looping(testing_config): - outputs = api.get_output_file_paths(os.path.join(recipe_dir, '25_target_platform_looping'), - platform='win', arch='64') + outputs = api.get_output_file_paths( + os.path.join(variants_dir, "25_target_platform_looping"), + platform="win", + arch="64", + ) assert len(outputs) == 2 def test_numpy_used_variable_looping(testing_config): - outputs = api.get_output_file_paths(os.path.join(recipe_dir, 'numpy_used')) + outputs = api.get_output_file_paths(os.path.join(variants_dir, "numpy_used")) assert len(outputs) == 4 @@ -415,8 +493,10 @@ def test_exclusive_config_files(testing_workdir): os.path.join('config_dir', 'config-0.yaml'), os.path.join('config_dir', 'config-1.yaml'), ) - output = api.render(os.path.join(recipe_dir, 'exclusive_config_file'), - exclusive_config_files=exclusive_config_files)[0][0] + output = api.render( + os.path.join(variants_dir, "exclusive_config_file"), + exclusive_config_files=exclusive_config_files, + )[0][0] variant = output.config.variant # is cwd ignored? assert 'cwd' not in variant @@ -431,13 +511,17 @@ def test_exclusive_config_files(testing_workdir): def test_exclusive_config_file(testing_workdir): - with open('conda_build_config.yaml', 'w') as f: - yaml.dump({'abc': ['someval'], 'cwd': ['someval']}, f, default_flow_style=False) - os.makedirs('config_dir') - with open(os.path.join('config_dir', 'config.yaml'), 'w') as f: - yaml.dump({'abc': ['super'], 'exclusive': ['someval']}, f, default_flow_style=False) - output = api.render(os.path.join(recipe_dir, 'exclusive_config_file'), - exclusive_config_file=os.path.join('config_dir', 'config.yaml'))[0][0] + with open("conda_build_config.yaml", "w") as f: + yaml.dump({"abc": ["someval"], "cwd": ["someval"]}, f, default_flow_style=False) + os.makedirs("config_dir") + with open(os.path.join("config_dir", "config.yaml"), "w") as f: + yaml.dump( + {"abc": ["super"], "exclusive": ["someval"]}, f, default_flow_style=False + ) + output = api.render( + os.path.join(variants_dir, "exclusive_config_file"), + exclusive_config_file=os.path.join("config_dir", "config.yaml"), + )[0][0] variant = output.config.variant # is cwd ignored? assert 'cwd' not in variant @@ -449,26 +533,42 @@ def test_exclusive_config_file(testing_workdir): def test_inner_python_loop_with_output(testing_config): - outputs = api.get_output_file_paths(os.path.join(recipe_dir, 'test_python_as_subpackage_loop'), - config=testing_config) + outputs = api.get_output_file_paths( + os.path.join(variants_dir, "test_python_as_subpackage_loop"), + config=testing_config, + ) outputs = [os.path.basename(out) for out in outputs] assert len(outputs) == 5 assert len([out for out in outputs if out.startswith('tbb-2018')]) == 1 assert len([out for out in outputs if out.startswith('tbb-devel-2018')]) == 1 assert len([out for out in outputs if out.startswith('tbb4py-2018')]) == 3 - testing_config.variant_config_files = [os.path.join(recipe_dir, 'test_python_as_subpackage_loop', 'config_with_zip.yaml')] - outputs = api.get_output_file_paths(os.path.join(recipe_dir, 'test_python_as_subpackage_loop'), - config=testing_config) + testing_config.variant_config_files = [ + os.path.join( + variants_dir, "test_python_as_subpackage_loop", "config_with_zip.yaml" + ) + ] + outputs = api.get_output_file_paths( + os.path.join(variants_dir, "test_python_as_subpackage_loop"), + config=testing_config, + ) outputs = [os.path.basename(out) for out in outputs] assert len(outputs) == 5 assert len([out for out in outputs if out.startswith('tbb-2018')]) == 1 assert len([out for out in outputs if out.startswith('tbb-devel-2018')]) == 1 assert len([out for out in outputs if out.startswith('tbb4py-2018')]) == 3 - testing_config.variant_config_files = [os.path.join(recipe_dir, 'test_python_as_subpackage_loop', 'config_with_zip.yaml')] - outputs = api.get_output_file_paths(os.path.join(recipe_dir, 'test_python_as_subpackage_loop'), - config=testing_config, platform='win', arch=64) + testing_config.variant_config_files = [ + os.path.join( + variants_dir, "test_python_as_subpackage_loop", "config_with_zip.yaml" + ) + ] + outputs = api.get_output_file_paths( + os.path.join(variants_dir, "test_python_as_subpackage_loop"), + config=testing_config, + platform="win", + arch=64, + ) outputs = [os.path.basename(out) for out in outputs] assert len(outputs) == 5 assert len([out for out in outputs if out.startswith('tbb-2018')]) == 1 @@ -477,19 +577,25 @@ def test_inner_python_loop_with_output(testing_config): def test_variant_as_dependency_name(testing_config): - outputs = api.render(os.path.join(recipe_dir, '27_requirements_host'), - config=testing_config) + outputs = api.render( + os.path.join(variants_dir, "27_requirements_host"), config=testing_config + ) assert len(outputs) == 2 def test_custom_compiler(): - recipe = os.path.join(recipe_dir, '28_custom_compiler') - ms = api.render(recipe, permit_unsatisfiable_variants=True, finalize=False, bypass_env_check=True) + recipe = os.path.join(variants_dir, "28_custom_compiler") + ms = api.render( + recipe, + permit_unsatisfiable_variants=True, + finalize=False, + bypass_env_check=True, + ) assert len(ms) == 3 def test_different_git_vars(): - recipe = os.path.join(recipe_dir, '29_different_git_vars') + recipe = os.path.join(variants_dir, "29_different_git_vars") ms = api.render(recipe) versions = [m[0].version() for m in ms] assert "1.20.0" in versions @@ -499,14 +605,18 @@ def test_different_git_vars(): @pytest.mark.skipif(sys.platform != "linux", reason="recipe uses a unix specific script") def test_top_level_finalized(testing_config): # see https://github.com/conda/conda-build/issues/3618 - recipe = os.path.join(recipe_dir, '30_top_level_finalized') + recipe = os.path.join(variants_dir, "30_top_level_finalized") outputs = api.build(recipe, config=testing_config) xzcat_output = package_has_file(outputs[0], 'xzcat_output') assert '5.2.3' in xzcat_output def test_variant_subkeys_retained(testing_config): - m = api.render(os.path.join(recipe_dir, '31_variant_subkeys'), finalize=False, bypass_env_check=True)[0][0] + m = api.render( + os.path.join(variants_dir, "31_variant_subkeys"), + finalize=False, + bypass_env_check=True, + )[0][0] found_replacements = False from conda_build.build import get_all_replacements for variant in m.config.variants: diff --git a/tests/utils.py b/tests/utils.py index c3425f9cec..713917843c 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -15,17 +15,11 @@ def numpy_installed(): return any([True for dist in linked(sys.prefix) if dist.name == "numpy"]) -def get_root_dir(): - import conda_build - - conda_build_dir = os.path.realpath(os.path.dirname(conda_build.__file__)) - return os.path.abspath(os.path.join(conda_build_dir, "..")) - - -thisdir = os.path.join(get_root_dir(), "tests") +thisdir = os.path.dirname(__file__) metadata_dir = os.path.join(thisdir, "test-recipes", "metadata") subpackage_dir = os.path.join(thisdir, "test-recipes", "split-packages") fail_dir = os.path.join(thisdir, "test-recipes", "fail") +variants_dir = os.path.join(thisdir, "test-recipes", "variants") archive_dir = os.path.join(thisdir, "archives") diff --git a/tests/variant_recipe/build_config.yaml b/tests/variant_recipe/build_config.yaml deleted file mode 100644 index 10feaf73db..0000000000 --- a/tests/variant_recipe/build_config.yaml +++ /dev/null @@ -1,4 +0,0 @@ -requirements: - build: - # git chosen here because it is independent of compiler on win - - git From e19360008c0751cd0b8cd9493274d49a05f98555 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Jan 2023 11:34:57 -0600 Subject: [PATCH 026/366] [pre-commit.ci] pre-commit autoupdate (#4716) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/blacken-docs: v1.12.1 → 1.13.0](https://github.com/asottile/blacken-docs/compare/v1.12.1...1.13.0) - [github.com/PyCQA/pylint: v2.15.10 → v2.16.0b0](https://github.com/PyCQA/pylint/compare/v2.15.10...v2.16.0b0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b81c2d65ea..8ef6c925dd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,7 +47,7 @@ repos: - id: darker additional_dependencies: [black==22.10.0] - repo: https://github.com/asottile/blacken-docs - rev: v1.12.1 + rev: 1.13.0 hooks: - id: blacken-docs additional_dependencies: [black] @@ -56,7 +56,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/PyCQA/pylint - rev: v2.15.10 + rev: v2.16.0b0 hooks: - id: pylint args: [--exit-zero] From 1b1032520b53cbb34ab531037ac529e340096937 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 18 Jan 2023 09:48:13 +0100 Subject: [PATCH 027/366] Refactor test_api_update_index.py to use pathlib (#4721) --- tests/test_api_update_index.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/test_api_update_index.py b/tests/test_api_update_index.py index cf1f06c44d..6573d5533a 100644 --- a/tests/test_api_update_index.py +++ b/tests/test_api_update_index.py @@ -1,12 +1,12 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import os +from pathlib import Path -from conda_build import api +from conda_build.api import update_index def test_update_index(testing_workdir): - api.update_index(testing_workdir) - files = "repodata.json", "repodata.json.bz2" - for f in files: - assert os.path.isfile(os.path.join(testing_workdir, 'noarch', f)) + update_index(testing_workdir) + + for name in ("repodata.json", "repodata.json.bz2"): + assert Path(testing_workdir, "noarch", name).is_file() From 3cf5cfbc199db675e39286a61bf098e7dc1e1c5f Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 18 Jan 2023 09:59:28 +0100 Subject: [PATCH 028/366] Remove unused test_api_inspect.py tests & global variable (#4724) --- tests/test_api_inspect.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/tests/test_api_inspect.py b/tests/test_api_inspect.py index f3fc6411a2..7e6e72d388 100644 --- a/tests/test_api_inspect.py +++ b/tests/test_api_inspect.py @@ -7,23 +7,8 @@ from conda_build import api from .utils import metadata_dir -thisdir = os.path.dirname(os.path.abspath(__file__)) - @pytest.mark.sanity def test_check_recipe(): """Technically not inspect, but close enough to belong here""" assert api.check(os.path.join(metadata_dir, "source_git_jinja2")) - - -# These tests are already being done in test_cli.py. If we have a better way to test, move here. -def test_inpect_linkages(): - pass - - -def test_inspect_objects(): - pass - - -def test_installable(): - pass From 569ef312a6765a11bba7e34907afbc7f228babe3 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 18 Jan 2023 10:27:12 +0100 Subject: [PATCH 029/366] Combine `test_render.py` duplicate tests with pytest parametrize (#4726) * Combine duplicate tests as parametrized tests * Cleanup test_reduce_duplicate_specs --- tests/test_render.py | 35 ++++++++++++++++++++--------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/tests/test_render.py b/tests/test_render.py index 140d75001f..2fca77ad25 100644 --- a/tests/test_render.py +++ b/tests/test_render.py @@ -2,31 +2,36 @@ # SPDX-License-Identifier: BSD-3-Clause import os +import pytest + from conda_build import api from conda_build import render -def test_output_with_noarch_says_noarch(testing_metadata): - testing_metadata.meta['build']['noarch'] = 'python' - output = api.get_output_file_path(testing_metadata) - assert os.path.sep + "noarch" + os.path.sep in output[0] - - -def test_output_with_noarch_python_says_noarch(testing_metadata): - testing_metadata.meta['build']['noarch_python'] = True +@pytest.mark.parametrize( + "build", + [ + pytest.param({"noarch": "python"}, id="noarch"), + pytest.param({"noarch_python": True}, id="noarch_python"), + ], +) +def test_noarch_output(build, testing_metadata): + testing_metadata.meta["build"].update(build) output = api.get_output_file_path(testing_metadata) assert os.path.sep + "noarch" + os.path.sep in output[0] def test_reduce_duplicate_specs(testing_metadata): - reqs = {"build": ["exact", "exact 1.2.3 1", "exact >1.0,<2"], "host": ["exact", "exact 1.2.3 1"]} - testing_metadata.meta["requirements"] = reqs + testing_metadata.meta["requirements"] = { + "build": ["exact", "exact 1.2.3 1", "exact >1.0,<2"], + "host": ["exact", "exact 1.2.3 1"], + } render._simplify_to_exact_constraints(testing_metadata) - assert (testing_metadata.meta['requirements']['build'] == - testing_metadata.meta['requirements']['host']) - simplified_deps = testing_metadata.meta['requirements'] - assert len(simplified_deps['build']) == 1 - assert 'exact 1.2.3 1' in simplified_deps['build'] + simplified = testing_metadata.meta["requirements"] + + assert simplified["build"] == simplified["host"] + assert len(simplified["build"]) == 1 + assert "exact 1.2.3 1" in simplified["build"] def test_pin_run_as_build_preserve_string(testing_metadata): From 47af17ae7a22668c9c247d4b61d6440165badff9 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 18 Jan 2023 10:30:52 +0100 Subject: [PATCH 030/366] Minimize test_find_executable test and add Windows support (#4727) --- tests/test_os_utils_external.py | 83 ++++++++++++++------------------- 1 file changed, 34 insertions(+), 49 deletions(-) diff --git a/tests/test_os_utils_external.py b/tests/test_os_utils_external.py index caf8fd49bd..f964b13d38 100644 --- a/tests/test_os_utils_external.py +++ b/tests/test_os_utils_external.py @@ -1,57 +1,42 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import sys import os -import os.path +from pathlib import Path +from conda.common.compat import on_win from conda_build.os_utils.external import find_executable def test_find_executable(testing_workdir, monkeypatch): - if sys.platform != "win32": - import stat - - path_components = [] - - def create_file(unix_path, put_on_path, executable): - localized_path = os.path.join(testing_workdir, *unix_path.split('/')) - # empty prefix by default - extra bit at beginning of file - if sys.platform == "win32": - localized_path = localized_path + ".bat" - - dirname = os.path.split(localized_path)[0] - if not os.path.isdir(dirname): - os.makedirs(dirname) - - if sys.platform == "win32": - prefix = "@echo off\n" - else: - prefix = "#!/bin/bash\nexec 1>&2\n" - with open(localized_path, 'w') as f: - f.write(prefix + """ - echo ******* You have reached the dummy {}. It is likely there is a bug in - echo ******* conda that makes it not add the _build/bin directory onto the - echo ******* PATH before running the source checkout tool - exit -1 - """.format(localized_path)) - - if put_on_path: - path_components.append(dirname) - - if executable: - st = os.stat(localized_path) - os.chmod(localized_path, st.st_mode | stat.S_IEXEC) - - return localized_path - - create_file('executable/not/on/path/with/target_name', put_on_path=False, executable=True) - create_file('non_executable/on/path/with/target_name', put_on_path=True, executable=False) - create_file('executable/on/path/with/non_target_name', put_on_path=True, executable=True) - target_path = create_file('executable/on/path/with/target_name', put_on_path=True, executable=True) - create_file('another/executable/later/on/path/with/target_name', put_on_path=True, executable=True) - - monkeypatch.setenv('PATH', os.pathsep.join(path_components)) - - find = find_executable('target_name') - - assert find == target_path, f"Expected to find 'target_name' in '{target_path}', but found it in '{find}'" + search_path = [] + + def touch(target, searchable=True, executable=True, alternative=False): + path = Path( + testing_workdir, + "alt" if alternative else "not", + "exec" if executable else "not", + "search" if searchable else "not", + target, + ) + if on_win: + path = path.with_suffix(".bat") + path.parent.mkdir(parents=True, exist_ok=True) + + path.touch(0o100 if executable else 0o666) + + if searchable: + search_path.append(str(path.parent)) + + return str(path) + + touch("target", searchable=False) + # Windows doesn't have an execute bit so this is the path found + win_expected = touch("target", executable=False) + touch("not_target") + nix_expected = touch("target") + touch("target", alternative=True) + expected = win_expected if on_win else nix_expected + + monkeypatch.setenv("PATH", os.pathsep.join(search_path)) + + assert find_executable("target") == expected From 7bbb3d19be92fc0385413c427551fe19801e8c43 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 18 Jan 2023 10:31:50 +0100 Subject: [PATCH 031/366] Inline test_api_build_go_package.py fixture (#4723) --- tests/test_api_build_go_package.py | 14 ++++---------- tests/utils.py | 1 + 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/tests/test_api_build_go_package.py b/tests/test_api_build_go_package.py index 16e7a27385..4c752b780d 100644 --- a/tests/test_api_build_go_package.py +++ b/tests/test_api_build_go_package.py @@ -1,24 +1,18 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import os import pytest -from conda_build import api +from conda_build.api import build -from .utils import thisdir - - -@pytest.fixture() -def recipe(): - return os.path.join(thisdir, 'test-recipes', 'go-package') +from .utils import go_dir @pytest.mark.sanity @pytest.mark.serial -def test_recipe_build(recipe, testing_config, testing_workdir, monkeypatch): +def test_recipe_build(testing_config, testing_workdir, monkeypatch): # These variables are defined solely for testing purposes, # so they can be checked within build scripts testing_config.activate = True monkeypatch.setenv("CONDA_TEST_VAR", "conda_test") monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2") - api.build(recipe, config=testing_config) + build(go_dir, config=testing_config) diff --git a/tests/utils.py b/tests/utils.py index 713917843c..05082eeb55 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -20,6 +20,7 @@ def numpy_installed(): subpackage_dir = os.path.join(thisdir, "test-recipes", "split-packages") fail_dir = os.path.join(thisdir, "test-recipes", "fail") variants_dir = os.path.join(thisdir, "test-recipes", "variants") +go_dir = os.path.join(thisdir, "test-recipes", "go-package") archive_dir = os.path.join(thisdir, "archives") From 4b6a7beb06d4254f7269b3c33df6ab03fa0ea39c Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 18 Jan 2023 11:01:58 +0100 Subject: [PATCH 032/366] Label variants_conda_build_sysroot fixture (#4712) --- tests/conftest.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 415830efda..92c3d4aab0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -211,7 +211,17 @@ def no_numpy_version(): @pytest.fixture( scope="function", - params=[{}, {"MACOSX_DEPLOYMENT_TARGET": ["10.9"]}] if on_mac else [{}], + params=[ + pytest.param({}, id="default MACOSX_DEPLOYMENT_TARGET"), + pytest.param( + {"MACOSX_DEPLOYMENT_TARGET": ["10.9"]}, + id="override MACOSX_DEPLOYMENT_TARGET", + ), + ] + if on_mac + else [ + pytest.param({}, id="no MACOSX_DEPLOYMENT_TARGET"), + ], ) def variants_conda_build_sysroot(monkeypatch, request): if not on_mac: From ce48a92180389aa6b4eceb695ad9bd32c77f0bc1 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 18 Jan 2023 11:02:16 +0100 Subject: [PATCH 033/366] Bump actions/cache version (#4713) --- .github/workflows/tests.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 210ed2e5f8..4c10e922f0 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -100,7 +100,7 @@ jobs: shell: bash - name: Cache conda - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ~/conda_pkgs_dir key: ${{ runner.os }}-conda-${{ env.TIMESTAMP }} @@ -211,7 +211,7 @@ jobs: shell: bash - name: Cache conda - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ~/conda_pkgs_dir key: ${{ runner.os }}-conda-${{ env.TIMESTAMP }} @@ -327,7 +327,7 @@ jobs: shell: bash - name: Cache conda - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ~/conda_pkgs_dir key: ${{ runner.os }}-conda-${{ env.TIMESTAMP }} From d73b58e0c31dcc1390e20c9fc521bef9028e6489 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 18 Jan 2023 23:03:17 +0100 Subject: [PATCH 034/366] Inline fixtures in `tests/test_variants.py` (#4714) * Abstract single_version fixture The python specific variant is an insignificant detail that adds confusion to what the test is targeting. * Inline numpy_version_ignored fixture This fixture is only used once, bring it inline for simpler code. Furthermore, the test using this fixture is broken and needs to be investigated separately. * Inline no_numpy_version fixture and combine two near duplicate tests Combines test_get_package_variants_from_file and test_get_package_variants_from_dictionary_of_lists and parameterize them since they contain the same assertions for different inputs (yaml or dict). --- tests/conftest.py | 21 -- .../meta.yaml | 11 ++ .../03_numpy_matrix/conda_build_config.yaml | 8 - .../variants/03_numpy_matrix/meta.yaml | 12 -- tests/test_variants.py | 183 +++++++++--------- 5 files changed, 101 insertions(+), 134 deletions(-) create mode 100644 tests/test-recipes/variants/03_ignore_version_reduces_matrix/meta.yaml delete mode 100644 tests/test-recipes/variants/03_numpy_matrix/conda_build_config.yaml delete mode 100644 tests/test-recipes/variants/03_numpy_matrix/meta.yaml diff --git a/tests/conftest.py b/tests/conftest.py index 92c3d4aab0..d29f92ef5e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -188,27 +188,6 @@ def testing_env(testing_workdir, request, monkeypatch): return env_path -# these are functions so that they get regenerated each time we use them. -# They could be fixtures, I guess. -@pytest.fixture(scope="function") -def numpy_version_ignored(): - return { - "python": ["2.7.*", "3.5.*"], - "numpy": ["1.10.*", "1.11.*"], - "ignore_version": ["numpy"], - } - - -@pytest.fixture(scope="function") -def single_version(): - return {"python": "2.7.*", "numpy": "1.11.*"} - - -@pytest.fixture(scope="function") -def no_numpy_version(): - return {"python": ["2.7.*", "3.5.*"]} - - @pytest.fixture( scope="function", params=[ diff --git a/tests/test-recipes/variants/03_ignore_version_reduces_matrix/meta.yaml b/tests/test-recipes/variants/03_ignore_version_reduces_matrix/meta.yaml new file mode 100644 index 0000000000..141ac077e4 --- /dev/null +++ b/tests/test-recipes/variants/03_ignore_version_reduces_matrix/meta.yaml @@ -0,0 +1,11 @@ +package: + name: ignore_version_reduces_matrix + version: 1.0 + +requirements: + build: + - packageA + - packageB { packageB } + run: + - packageA + - packageB diff --git a/tests/test-recipes/variants/03_numpy_matrix/conda_build_config.yaml b/tests/test-recipes/variants/03_numpy_matrix/conda_build_config.yaml deleted file mode 100644 index d8481eabec..0000000000 --- a/tests/test-recipes/variants/03_numpy_matrix/conda_build_config.yaml +++ /dev/null @@ -1,8 +0,0 @@ -python: - - 2.7 - - 3.9 -numpy: - - 1.10 - - 1.16 -ignore_version: - - numpy diff --git a/tests/test-recipes/variants/03_numpy_matrix/meta.yaml b/tests/test-recipes/variants/03_numpy_matrix/meta.yaml deleted file mode 100644 index 0f5a69f2a9..0000000000 --- a/tests/test-recipes/variants/03_numpy_matrix/meta.yaml +++ /dev/null @@ -1,12 +0,0 @@ -package: - name: numpy_matrix - version: 1.0 - -requirements: - build: - - python - - numpy - - nomkl # [unix] - run: - - python - - numpy diff --git a/tests/test_variants.py b/tests/test_variants.py index 4a6a132862..731db493c9 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -1,6 +1,5 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from collections import OrderedDict import json import os from pathlib import Path @@ -10,113 +9,111 @@ import pytest import yaml -from conda_build import api, exceptions, variants -from conda_build.utils import package_has_file +from conda_build import api, exceptions +from conda_build.variants import ( + combine_specs, + dict_of_lists_to_list_of_dicts, + get_package_variants, + validate_spec, +) +from conda_build.utils import ensure_list, package_has_file from .utils import variants_dir -def test_later_spec_priority(single_version, no_numpy_version): - # override a single key - specs = OrderedDict() - specs['no_numpy'] = no_numpy_version - specs['single_ver'] = single_version - - combined_spec = variants.combine_specs(specs) - assert len(combined_spec) == 2 - assert combined_spec["python"] == ["2.7.*"] - - # keep keys that are not overwritten - specs = OrderedDict() - specs['single_ver'] = single_version - specs['no_numpy'] = no_numpy_version - combined_spec = variants.combine_specs(specs) - assert len(combined_spec) == 2 - assert len(combined_spec["python"]) == 2 - - -def test_get_package_variants_from_file( - testing_workdir, testing_config, no_numpy_version -): - variants_path = Path(testing_workdir, "variant_example.yaml") - variants_path.write_text(yaml.dump(no_numpy_version, default_flow_style=False)) +@pytest.mark.parametrize( + "variants", + [ + (["1.2", "3.4"], "5.6"), + ("1.2", ["3.4", "5.6"]), + ], +) +def test_spec_priority_overriding(variants): + name = "package" + + first, second = variants + ordered_specs = { + "first": {name: first}, + "second": {name: second}, + } - testing_config.variant_config_files = [str(variants_path)] + combined = combine_specs(ordered_specs)[name] + expected = ensure_list(second) + assert len(combined) == len(expected) + assert combined == expected + + +@pytest.mark.parametrize( + "as_yaml", + [ + pytest.param(True, id="yaml"), + pytest.param(False, id="dict"), + ], +) +def test_python_variants(testing_workdir, testing_config, as_yaml): + """Python variants are treated differently in conda recipes. Instead of being pinned against a + specific version they are converted into version ranges. E.g.: + + python 3.5 -> python >=3.5,<3.6.0a0 + otherPackages 3.5 -> otherPackages 3.5 + """ + variants = {"python": ["3.9", "3.10"]} testing_config.ignore_system_config = True + # write variants to disk + if as_yaml: + variants_path = Path(testing_workdir, "variant_example.yaml") + variants_path.write_text(yaml.dump(variants, default_flow_style=False)) + testing_config.variant_config_files = [str(variants_path)] + + # render the metadata metadata = api.render( os.path.join(variants_dir, "variant_recipe"), no_download_source=False, config=testing_config, + # if variants were written to disk then don't pass it along + variants=None if as_yaml else variants, ) - # one for each Python version. Numpy is not strictly pinned and should present only 1 dimension + # we should have one package/metadata per python version assert len(metadata) == 2 - assert ( - sum( - "python >=2.7,<2.8" in req - for (m, _, _) in metadata - for req in m.meta["requirements"]["run"] - ) - == 1 - ) - assert ( - sum( - "python >=3.5,<3.6" in req - for (m, _, _) in metadata - for req in m.meta["requirements"]["run"] - ) - == 1 - ) + # there should only be one run requirement for each package/metadata + assert len(metadata[0][0].meta["requirements"]["run"]) == 1 + assert len(metadata[1][0].meta["requirements"]["run"]) == 1 + # the run requirements should be python ranges + assert { + *metadata[0][0].meta["requirements"]["run"], + *metadata[1][0].meta["requirements"]["run"], + } == {"python >=3.9,<3.10.0a0", "python >=3.10,<3.11.0a0"} def test_use_selectors_in_variants(testing_workdir, testing_config): testing_config.variant_config_files = [ os.path.join(variants_dir, "selector_conda_build_config.yaml") ] - variants.get_package_variants(testing_workdir, testing_config) - - -def test_get_package_variants_from_dictionary_of_lists(testing_config, no_numpy_version): - testing_config.ignore_system_config = True + get_package_variants(testing_workdir, testing_config) - metadata = api.render( - os.path.join(variants_dir, "variant_recipe"), - no_download_source=False, - config=testing_config, - variants=no_numpy_version, - ) - # one for each Python version. Numpy is not strictly pinned and should present only 1 dimension - assert len(metadata) == 2, metadata - assert ( - sum( - "python >=2.7,<2.8" in req - for (m, _, _) in metadata - for req in m.meta["requirements"]["run"] - ) - == 1 +@pytest.mark.xfail( + reason=( + "7/19/2017 Strange failure. Can't reproduce locally. Test runs fine " + "with parallelism and everything. Test fails reproducibly on CI, but logging " + "into appveyor after failed run, test passes." + "1/9/2023 ignore_version doesn't work as advertised." ) - assert ( - sum( - "python >=3.5,<3.6" in req - for (m, _, _) in metadata - for req in m.meta["requirements"]["run"] - ) - == 1 +) +def test_variant_with_ignore_version_reduces_matrix(): + metadata = api.render( + os.path.join(variants_dir, "03_ignore_version_reduces_matrix"), + variants={ + "packageA": ["1.2", "3.4"], + "packageB": ["5.6", "7.8"], + # packageB is ignored so that dimension should get collapsed + "ignore_version": "packageB", + }, + finalize=False, ) - - -@pytest.mark.xfail(reason="Strange failure 7/19/2017. Can't reproduce locally. Test runs fine " - "with parallelism and everything. Test fails reproducibly on CI, but logging " - "into appveyor after failed run, test passes. =(") -def test_variant_with_ignore_numpy_version_reduces_matrix(numpy_version_ignored): - # variants are defined in yaml file in this folder - # there are two python versions and two numpy versions. However, because numpy is not pinned, - # the numpy dimensions should get collapsed. - recipe = os.path.join(variants_dir, "03_numpy_matrix") - metadata = api.render(recipe, variants=numpy_version_ignored, finalize=False) - assert len(metadata) == 2, metadata + assert len(metadata) == 2 def test_variant_with_numpy_pinned_has_matrix(): @@ -152,7 +149,7 @@ def test_no_satisfiable_variants_raises_error(): def test_zip_fields(): """Zipping keys together allows people to tie different versions as sets of combinations.""" v = {'python': ['2.7', '3.5'], 'vc': ['9', '14'], 'zip_keys': [('python', 'vc')]} - ld = variants.dict_of_lists_to_list_of_dicts(v) + ld = dict_of_lists_to_list_of_dicts(v) assert len(ld) == 2 assert ld[0]['python'] == '2.7' assert ld[0]['vc'] == '9' @@ -161,7 +158,7 @@ def test_zip_fields(): # allow duplication of values, but lengths of lists must always match v = {'python': ['2.7', '2.7'], 'vc': ['9', '14'], 'zip_keys': [('python', 'vc')]} - ld = variants.dict_of_lists_to_list_of_dicts(v) + ld = dict_of_lists_to_list_of_dicts(v) assert len(ld) == 2 assert ld[0]['python'] == '2.7' assert ld[0]['vc'] == '9' @@ -189,33 +186,33 @@ def test_validate_spec(): "corge": 42, } # valid spec - variants.validate_spec("spec", spec) + validate_spec("spec", spec) spec2 = dict(spec) spec2["bad-char"] = "bad-char" # invalid characters with pytest.raises(ValueError): - variants.validate_spec("spec[bad_char]", spec2) + validate_spec("spec[bad_char]", spec2) spec3 = dict(spec, zip_keys="bad_zip_keys") # bad zip_keys with pytest.raises(ValueError): - variants.validate_spec("spec[bad_zip_keys]", spec3) + validate_spec("spec[bad_zip_keys]", spec3) spec4 = dict(spec, zip_keys=[["bar", "baz"], ["qux", "quux"], ["quuz", "missing"]]) # zip_keys' zip_group has key missing from spec with pytest.raises(ValueError): - variants.validate_spec("spec[missing_key]", spec4) + validate_spec("spec[missing_key]", spec4) spec5 = dict(spec, zip_keys=[["bar", "baz"], ["qux", "quux", "quuz"], ["quuz"]]) # zip_keys' zip_group has duplicate key with pytest.raises(ValueError): - variants.validate_spec("spec[duplicate_key]", spec5) + validate_spec("spec[duplicate_key]", spec5) spec6 = dict(spec, baz=[4, 6]) # zip_keys' zip_group key fields have same length with pytest.raises(ValueError): - variants.validate_spec("spec[duplicate_key]", spec6) + validate_spec("spec[duplicate_key]", spec6) def test_cross_compilers(): @@ -261,7 +258,7 @@ def test_variant_input_with_zip_keys_keeps_zip_keys_list(): 'zip_keys': ['sqlite', 'zlib', 'xz'], 'pin_run_as_build': {'python': {'min_pin': 'x.x', 'max_pin': 'x.x'}} } - vrnts = variants.dict_of_lists_to_list_of_dicts(spec) + vrnts = dict_of_lists_to_list_of_dicts(spec) assert len(vrnts) == 2 assert vrnts[0].get("zip_keys") == spec["zip_keys"] From 9264757032eda0415226d1ab9a4fb12c42444c3a Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 19 Jan 2023 19:06:52 +0100 Subject: [PATCH 035/366] Inline test_api_build_dll_package.py fixture (#4722) Co-authored-by: Travis Hathaway --- tests/test_api_build_dll_package.py | 14 ++++---------- tests/utils.py | 1 + 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/tests/test_api_build_dll_package.py b/tests/test_api_build_dll_package.py index 32adb88cc4..80fc0267e6 100644 --- a/tests/test_api_build_dll_package.py +++ b/tests/test_api_build_dll_package.py @@ -1,23 +1,17 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import os import pytest -from conda_build import api +from conda_build.api import build -from .utils import thisdir - - -@pytest.fixture() -def recipe(): - return os.path.join(thisdir, 'test-recipes', 'dll-package') +from .utils import dll_dir @pytest.mark.sanity -def test_recipe_build(recipe, testing_config, testing_workdir, monkeypatch): +def test_recipe_build(testing_config, testing_workdir, monkeypatch): # These variables are defined solely for testing purposes, # so they can be checked within build scripts testing_config.activate = True monkeypatch.setenv("CONDA_TEST_VAR", "conda_test") monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2") - api.build(recipe, config=testing_config) + build(dll_dir, config=testing_config) diff --git a/tests/utils.py b/tests/utils.py index 05082eeb55..3872dd8b1c 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -20,6 +20,7 @@ def numpy_installed(): subpackage_dir = os.path.join(thisdir, "test-recipes", "split-packages") fail_dir = os.path.join(thisdir, "test-recipes", "fail") variants_dir = os.path.join(thisdir, "test-recipes", "variants") +dll_dir = os.path.join(thisdir, "test-recipes", "dll-package") go_dir = os.path.join(thisdir, "test-recipes", "go-package") archive_dir = os.path.join(thisdir, "archives") From 60ca380a6206c49fe48432c0119a5fcfd238b1e9 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 19 Jan 2023 19:08:33 +0100 Subject: [PATCH 036/366] Refactor `test_misc.py` and deprecate `CrossPlatformStLink` (#4728) * Combine pyc_f tests into parameterized test * Deprecate CrossPlatformStLink os.stat().st_nlink has been cross-platform since Python 3.2 --- conda_build/_link.py | 20 +++-- conda_build/build.py | 7 +- conda_build/conda_interface.py | 98 +++---------------------- news/4728-deprecate-CrossPlatformStLink | 19 +++++ tests/test_misc.py | 63 +++++++--------- 5 files changed, 74 insertions(+), 133 deletions(-) create mode 100644 news/4728-deprecate-CrossPlatformStLink diff --git a/conda_build/_link.py b/conda_build/_link.py index 50c0da3641..5f6d2a4c5a 100644 --- a/conda_build/_link.py +++ b/conda_build/_link.py @@ -3,10 +3,12 @@ """ This is code that is added to noarch Python packages. See conda_build/noarch_python.py. - """ +from __future__ import annotations + import os from os.path import dirname, exists, isdir, join, normpath +from pathlib import Path import re import sys import shutil @@ -52,12 +54,18 @@ def _unlink(path): pass -def pyc_f(f, version_info=sys.version_info): +def pyc_f( + path: str | os.PathLike, + version_info: tuple[int, ...] = sys.version_info, +) -> str: + path = Path(path) if version_info[0] == 2: - return f + 'c' - dn, fn = f.rsplit('/', 1) - return '%s/__pycache__/%s.cpython-%d%d.pyc' % ( - dn, fn[:-3], version_info[0], version_info[1]) + return str(path.with_suffix(".pyc")) + return str( + path.parent + / "__pycache__" + / f"{path.stem}.cpython-{version_info[0]}{version_info[1]}.pyc" + ) def link_files(src_root, dst_root, files): diff --git a/conda_build/build.py b/conda_build/build.py index 2722c470c5..0a5cf08c65 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -36,7 +36,6 @@ from .conda_interface import prefix_placeholder from .conda_interface import TemporaryDirectory from .conda_interface import VersionOrder -from .conda_interface import CrossPlatformStLink from .conda_interface import PathType, FileMode from .conda_interface import EntityEncoder from .conda_interface import get_rc_urls @@ -1412,8 +1411,10 @@ def build_info_files_json_v1(m, prefix, files, files_with_prefix): if prefix_placeholder and file_mode: file_info["prefix_placeholder"] = prefix_placeholder file_info["file_mode"] = file_mode - if file_info.get("path_type") == PathType.hardlink and CrossPlatformStLink.st_nlink( - path) > 1: + if ( + file_info.get("path_type") == PathType.hardlink + and os.stat(path).st_nlink > 1 + ): target_short_path_inode = get_inode(path) inode_paths = [files[index] for index, ino in enumerate(files_inodes) if ino == target_short_path_inode] file_info["inode_paths"] = inode_paths diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 9a63f9b3ef..733dc31090 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -1,9 +1,11 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + from functools import partial import os -from os import lstat from importlib import import_module +import warnings from conda import __version__ as CONDA_VERSION @@ -148,96 +150,18 @@ def try_exports(module, attr): PaddingError, UnsatisfiableError = PaddingError, UnsatisfiableError -# work-around for python bug on Windows prior to python 3.2 -# https://bugs.python.org/issue10027 -# Adapted from the ntfsutils package, Copyright (c) 2012, the Mozilla Foundation class CrossPlatformStLink: - _st_nlink = None - - def __call__(self, path): + def __call__(self, path: str | os.PathLike) -> int: return self.st_nlink(path) @classmethod - def st_nlink(cls, path): - if cls._st_nlink is None: - cls._initialize() - return cls._st_nlink(path) - - @classmethod - def _standard_st_nlink(cls, path): - return lstat(path).st_nlink - - @classmethod - def _windows_st_nlink(cls, path): - st_nlink = cls._standard_st_nlink(path) - if st_nlink != 0: - return st_nlink - else: - # cannot trust python on Windows when st_nlink == 0 - # get value using windows libraries to be sure of its true value - # Adapted from the ntfsutils package, Copyright (c) 2012, the Mozilla Foundation - GENERIC_READ = 0x80000000 - FILE_SHARE_READ = 0x00000001 - OPEN_EXISTING = 3 - hfile = cls.CreateFile(path, GENERIC_READ, FILE_SHARE_READ, None, - OPEN_EXISTING, 0, None) - if hfile is None: - from ctypes import WinError - raise WinError( - "Could not determine determine number of hardlinks for %s" % path) - info = cls.BY_HANDLE_FILE_INFORMATION() - rv = cls.GetFileInformationByHandle(hfile, info) - cls.CloseHandle(hfile) - if rv == 0: - from ctypes import WinError - raise WinError("Could not determine file information for %s" % path) - return info.nNumberOfLinks - - @classmethod - def _initialize(cls): - if os.name != 'nt': - cls._st_nlink = cls._standard_st_nlink - else: - # http://msdn.microsoft.com/en-us/library/windows/desktop/aa363858 - import ctypes - from ctypes import POINTER - from ctypes.wintypes import DWORD, HANDLE, BOOL - - cls.CreateFile = ctypes.windll.kernel32.CreateFileW - cls.CreateFile.argtypes = [ctypes.c_wchar_p, DWORD, DWORD, ctypes.c_void_p, - DWORD, DWORD, HANDLE] - cls.CreateFile.restype = HANDLE - - # http://msdn.microsoft.com/en-us/library/windows/desktop/ms724211 - cls.CloseHandle = ctypes.windll.kernel32.CloseHandle - cls.CloseHandle.argtypes = [HANDLE] - cls.CloseHandle.restype = BOOL - - class FILETIME(ctypes.Structure): - _fields_ = [("dwLowDateTime", DWORD), - ("dwHighDateTime", DWORD)] - - class BY_HANDLE_FILE_INFORMATION(ctypes.Structure): - _fields_ = [("dwFileAttributes", DWORD), - ("ftCreationTime", FILETIME), - ("ftLastAccessTime", FILETIME), - ("ftLastWriteTime", FILETIME), - ("dwVolumeSerialNumber", DWORD), - ("nFileSizeHigh", DWORD), - ("nFileSizeLow", DWORD), - ("nNumberOfLinks", DWORD), - ("nFileIndexHigh", DWORD), - ("nFileIndexLow", DWORD)] - - cls.BY_HANDLE_FILE_INFORMATION = BY_HANDLE_FILE_INFORMATION - - # http://msdn.microsoft.com/en-us/library/windows/desktop/aa364952 - cls.GetFileInformationByHandle = ctypes.windll.kernel32.GetFileInformationByHandle - cls.GetFileInformationByHandle.argtypes = [HANDLE, - POINTER(BY_HANDLE_FILE_INFORMATION)] - cls.GetFileInformationByHandle.restype = BOOL - - cls._st_nlink = cls._windows_st_nlink + def st_nlink(cls, path: str | os.PathLike) -> int: + warnings.warn( + "`conda_build.conda_interface.CrossPlatformStLink` is pending deprecation and will be removed in a " + "future release. Please use `os.stat().st_nlink` instead.", + PendingDeprecationWarning, + ) + return os.stat(path).st_nlink class SignatureError(Exception): diff --git a/news/4728-deprecate-CrossPlatformStLink b/news/4728-deprecate-CrossPlatformStLink new file mode 100644 index 0000000000..995757be81 --- /dev/null +++ b/news/4728-deprecate-CrossPlatformStLink @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* `conda_build.conda_interface.CrossPlatformStLink` is pending deprecation in favor of using `os.stat().st_nlink`. (#4728) + +### Docs + +* + +### Other + +* diff --git a/tests/test_misc.py b/tests/test_misc.py index fc9a415243..27cd9b430c 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -1,26 +1,25 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import json -from os.path import join +import os +from pathlib import Path import pytest -from conda_build.utils import on_win -import conda_build._link as _link +from conda_build._link import pyc_f from conda_build.conda_interface import PathType, EntityEncoder, CrossPlatformStLink -def test_pyc_f_2(): - assert _link.pyc_f('sp/utils.py', (2, 7, 9)) == 'sp/utils.pyc' - - -def test_pyc_f_3(): - for f, r in [ - ('sp/utils.py', - 'sp/__pycache__/utils.cpython-34.pyc'), - ('sp/foo/utils.py', - 'sp/foo/__pycache__/utils.cpython-34.pyc'), - ]: - assert _link.pyc_f(f, (3, 4, 2)) == r +@pytest.mark.parametrize( + "source,python,compiled", + [ + ("path/utils.py", (2, 7), "path/utils.pyc"), + ("pa/th/utils.py", (2, 7), "pa/th/utils.pyc"), + ("path/utils.py", (3, 10), "path/__pycache__/utils.cpython-310.pyc"), + ("pa/th/utils.py", (3, 10), "pa/th/__pycache__/utils.cpython-310.pyc"), + ], +) +def test_pyc_f(source, python, compiled): + assert Path(pyc_f(source, python)) == Path(compiled) def test_pathtype(): @@ -33,34 +32,24 @@ def test_pathtype(): assert softlink.__json__() == "softlink" -def test_entity_encoder(tmpdir): - test_file = join(str(tmpdir), "test-file") +def test_entity_encoder(tmp_path): + test_file = tmp_path / "test-file" test_json = {"a": PathType("hardlink"), "b": 1} - with open(test_file, "w") as f: - json.dump(test_json, f, cls=EntityEncoder) + test_file.write_text(json.dumps(test_json, cls=EntityEncoder)) - with open(test_file) as f: - json_file = json.load(f) + json_file = json.loads(test_file.read_text()) assert json_file == {"a": "hardlink", "b": 1} -@pytest.mark.skipif(on_win, reason="link not available on win/py2.7") -def test_crossplatform_st_link(tmpdir): - from os import link - test_file = join(str(tmpdir), "test-file") - test_file_linked = join(str(tmpdir), "test-file-linked") - test_file_link = join(str(tmpdir), "test-file-link") +def test_crossplatform_st_link(tmp_path): + test_file = tmp_path / "test-file" + test_file_linked = tmp_path / "test-file-linked" + test_file_link = tmp_path / "test-file-link" + + test_file.touch() + test_file_link.touch() + os.link(test_file_link, test_file_linked) - open(test_file, "a").close() - open(test_file_link, "a").close() - link(test_file_link, test_file_linked) assert 1 == CrossPlatformStLink.st_nlink(test_file) assert 2 == CrossPlatformStLink.st_nlink(test_file_link) assert 2 == CrossPlatformStLink.st_nlink(test_file_linked) - - -@pytest.mark.skipif(not on_win, reason="already tested") -def test_crossplatform_st_link_on_win(tmpdir): - test_file = join(str(tmpdir), "test-file") - open(test_file, "a").close() - assert 1 == CrossPlatformStLink.st_nlink(test_file) From 5a3dec494809aaa07ee89b11c55a70c6cababc6d Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 19 Jan 2023 19:09:42 +0100 Subject: [PATCH 037/366] Refactor `test_pypi_skeleton.py` to use parametrize and convert `OrderedDict` to `dict` (#4731) * Refactored tests into parametrized tests * Switch from OrderedDict to dict --- tests/test_pypi_skeleton.py | 142 ++++++++++++++++++------------------ 1 file changed, 69 insertions(+), 73 deletions(-) diff --git a/tests/test_pypi_skeleton.py b/tests/test_pypi_skeleton.py index d86a81dbeb..5d4a347d42 100644 --- a/tests/test_pypi_skeleton.py +++ b/tests/test_pypi_skeleton.py @@ -1,92 +1,88 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from collections import OrderedDict +import pytest +from conda.auxlib.ish import dals from conda_build.skeletons import pypi from conda_build.skeletons.pypi import _print_dict, _formating_value -def test_version_compare(): - short_version = '2.2' - long_version = '1.4.5' - post_version = '2.2.post3' - pre_version = '2.2.pre3' - alpha_version = '1.4.5a4' - beta_version = '1.4.5b4' - rc_version = '1.4.5rc4' - padding_version_short = '2.2.0' - padding_version_long = '1.4.5.0' +@pytest.mark.parametrize( + "version,version_range", + [ + ("2.2", " >=2.2,<3"), + ("1.4.5", " >=1.4.5,<1.5"), + ("2.2.post3", " >=2.2.post3,<3"), + ("2.2.pre3", " >=2.2.pre3,<3"), + ("1.4.5a4", " >=1.4.5a4,<1.5"), + ("1.4.5b4", " >=1.4.5b4,<1.5"), + ("1.4.5rc4", " >=1.4.5rc4,<1.5"), + ("2.2.0", " >=2.2.0,<2.3"), + ("1.4.5.0", " >=1.4.5.0,<1.4.6"), + ], +) +def test_version_compare(version, version_range): + assert pypi.convert_version(version) == version_range - assert pypi.convert_version(short_version) == ' >=2.2,<3' - assert pypi.convert_version(long_version) == ' >=1.4.5,<1.5' - assert pypi.convert_version(post_version) == ' >=2.2.post3,<3' - assert pypi.convert_version(pre_version) == ' >=2.2.pre3,<3' - assert pypi.convert_version(alpha_version) == ' >=1.4.5a4,<1.5' - assert pypi.convert_version(beta_version) == ' >=1.4.5b4,<1.5' - assert pypi.convert_version(rc_version) == ' >=1.4.5rc4,<1.5' - assert pypi.convert_version(padding_version_short) == ' >=2.2.0,<2.3' - assert pypi.convert_version(padding_version_long) == ' >=1.4.5.0,<1.4.6' - -def test_formating_value(): - assert _formating_value("summary", "SUMMARY SUMMARY") == " \"SUMMARY SUMMARY\"\n" - assert _formating_value("description", "DESCRIPTION DESCRIPTION") == " \"DESCRIPTION DESCRIPTION\"\n" - assert _formating_value("script", "SCRIPT VALUE") == " \"SCRIPT VALUE\"\n" - assert _formating_value("name", "{{name|lower}}") == " \"{{name|lower}}\"\n" - assert _formating_value("name", "NORMAL NAME") == " NORMAL NAME\n" +@pytest.mark.parametrize( + "name,value,result", + [ + ("summary", "SUMMARY SUMMARY", ' "SUMMARY SUMMARY"\n'), + ("description", "DESCRIPTION DESCRIPTION", ' "DESCRIPTION DESCRIPTION"\n'), + ("script", "SCRIPT VALUE", ' "SCRIPT VALUE"\n'), + ("name", "{{name|lower}}", ' "{{name|lower}}"\n'), + ("name", "NORMAL NAME", " NORMAL NAME\n"), + ], +) +def test_formating_value(name, value, result): + assert _formating_value(name, value) == result def test_print_dict(): recipe_metadata = { - "about": OrderedDict( - [ - ("home", "https://conda.io"), - ("license", "MIT"), - ("license_family", "MIT"), - ("summary", "SUMMARY SUMMARY SUMMARY"), - ("description", "DESCRIPTION DESCRIPTION DESCRIPTION"), - ] - ), - "source": OrderedDict( - [ - ("sha256", "4d24b03ffa67638a3fa931c09fd9e0273ffa904e95ebebe7d4b1a54c93d7b732"), - ("url", "https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz"), - ] - ), - "package": OrderedDict( - [("name", "{{ name|lower }}"), ("version", "{{ version }}")] - ), - "build": OrderedDict( - [ - ("number", 0), - ("script", "{{ PYTHON }} -m pip install . -vv"), - ] - ), + "about": { + "home": "https://conda.io", + "license": "MIT", + "license_family": "MIT", + "summary": "SUMMARY SUMMARY SUMMARY", + "description": "DESCRIPTION DESCRIPTION DESCRIPTION", + }, + "source": { + "sha256": "4d24b03ffa67638a3fa931c09fd9e0273ffa904e95ebebe7d4b1a54c93d7b732", + "url": "https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz", + }, + "package": { + "name": "{{ name|lower }}", + "version": "{{ version }}", + }, + "build": { + "number": 0, + "script": "{{ PYTHON }} -m pip install . -vv", + }, } + recipe_order = ["package", "source", "build", "about"] + recipe_yaml = dals( + """ + package: + name: "{{ name|lower }}" + version: "{{ version }}" - assert ( - _print_dict( - recipe_metadata, - order=["package", "source", "build", "about"], - ) - == """package: - name: "{{ name|lower }}" - version: "{{ version }}" - -source: - sha256: 4d24b03ffa67638a3fa931c09fd9e0273ffa904e95ebebe7d4b1a54c93d7b732 - url: "https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz" + source: + sha256: 4d24b03ffa67638a3fa931c09fd9e0273ffa904e95ebebe7d4b1a54c93d7b732 + url: "https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz" -build: - number: 0 - script: "{{ PYTHON }} -m pip install . -vv" + build: + number: 0 + script: "{{ PYTHON }} -m pip install . -vv" -about: - home: "https://conda.io" - license: MIT - license_family: MIT - summary: "SUMMARY SUMMARY SUMMARY" - description: "DESCRIPTION DESCRIPTION DESCRIPTION" + about: + home: "https://conda.io" + license: MIT + license_family: MIT + summary: "SUMMARY SUMMARY SUMMARY" + description: "DESCRIPTION DESCRIPTION DESCRIPTION" -""" + """ # yes, the trailing extra newline is necessary ) + assert _print_dict(recipe_metadata, order=recipe_order) == recipe_yaml From c752fc43b88928321eb7632a9840476b1b93abd0 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 19 Jan 2023 19:10:04 +0100 Subject: [PATCH 038/366] Linking is possible on Windows (#4730) --- tests/test_post.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_post.py b/tests/test_post.py index 75c6bfc796..3ca9174448 100644 --- a/tests/test_post.py +++ b/tests/test_post.py @@ -29,7 +29,6 @@ def test_compile_missing_pyc(testing_workdir): assert not os.path.isfile(os.path.join(tmp, add_mangling(bad_file))) -@pytest.mark.skipif(on_win, reason="no linking on win") def test_hardlinks_to_copies(testing_workdir): with open('test1', 'w') as f: f.write("\n") From 84df3371595b25da8ad8165119b69908168a7a6b Mon Sep 17 00:00:00 2001 From: Katherine Kinnaman Date: Fri, 20 Jan 2023 11:31:36 -0600 Subject: [PATCH 039/366] Replaced instances of Anaconda Cloud with anaconda.org. (#4719) --- docs/source/index.rst | 6 ++++-- .../tutorials/building-conda-packages.rst | 4 ++-- ...changed-to-anacondaorg-in-conda-build-docs | 19 +++++++++++++++++++ 3 files changed, 25 insertions(+), 4 deletions(-) create mode 100644 news/4719-anaconda-cloud-needs-changed-to-anacondaorg-in-conda-build-docs diff --git a/docs/source/index.rst b/docs/source/index.rst index e4033b4580..f544933cce 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -17,8 +17,8 @@ projects, most notably Python. For help packing a Python project, see the `packaging.python.org tutorial`_. OPTIONAL: If you are planning to upload your packages to -Anaconda Cloud, you will need an -`Anaconda Cloud `_ account and client. +`anaconda.org`_, you will need to make an anaconda.org account and +`install the Anaconda client`_. .. toctree:: :maxdepth: 1 @@ -32,3 +32,5 @@ Anaconda Cloud, you will need an .. _`packaging.python.org tutorial`: https://packaging.python.org/en/latest/tutorials/packaging-projects +.. _`anaconda.org`: https://anaconda.org +.. _`install the Anaconda client`: https://docs.anaconda.com/anaconda/install/ diff --git a/docs/source/user-guide/tutorials/building-conda-packages.rst b/docs/source/user-guide/tutorials/building-conda-packages.rst index 2e4015790f..c0d7093422 100644 --- a/docs/source/user-guide/tutorials/building-conda-packages.rst +++ b/docs/source/user-guide/tutorials/building-conda-packages.rst @@ -15,7 +15,7 @@ SEP and GDAL. Additional Windows-specific instructions are provided in the :ref:`toolkit` section. The final built packages from this tutorial are available on -`Anaconda Cloud`_: +`anaconda.org`_ (formerly known as Anaconda Cloud): * `SEP`_ @@ -469,7 +469,7 @@ including how to specify a config file and what can go into it, take a look at :ref:`conda-build-variant-config-files`. -.. _`Anaconda Cloud`: https://anaconda.org +.. _`anaconda.org`: https://anaconda.org .. _SEP: https://anaconda.org/wwarner/sep/files .. _GDAL: https://anaconda.org/conda-forge/gdal/files .. _`SEP recipe`: https://github.com/conda-forge/sep-feedstock diff --git a/news/4719-anaconda-cloud-needs-changed-to-anacondaorg-in-conda-build-docs b/news/4719-anaconda-cloud-needs-changed-to-anacondaorg-in-conda-build-docs new file mode 100644 index 0000000000..e46a165c11 --- /dev/null +++ b/news/4719-anaconda-cloud-needs-changed-to-anacondaorg-in-conda-build-docs @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* Replaced two instances of "Anaconda Cloud" with "anaconda.org". (#4719) + +### Other + +* From a4491480f75cd0aa33debab2beb37a6084ab5380 Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Fri, 20 Jan 2023 14:57:04 -0500 Subject: [PATCH 040/366] Prune/update `test_variant.py` file (#4720) * Abstracting test spec * Change skip decorator to only target M1 Macs * Update test skip markers along with relevant imports * Abstract package info for test_zip_fields Co-authored-by: Ken Odegard --- tests/test_variants.py | 44 ++++++++++++++++++++---------------------- 1 file changed, 21 insertions(+), 23 deletions(-) diff --git a/tests/test_variants.py b/tests/test_variants.py index 731db493c9..5e9d8419cc 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -3,12 +3,14 @@ import json import os from pathlib import Path +import platform import re import sys import pytest import yaml +from conda.common.compat import on_mac from conda_build import api, exceptions from conda_build.variants import ( combine_specs, @@ -135,35 +137,27 @@ def test_no_satisfiable_variants_raises_error(): recipe = os.path.join(variants_dir, "01_basic_templating") with pytest.raises(exceptions.DependencyNeedsBuildingError): api.render(recipe, permit_unsatisfiable_variants=False) - - # the packages are not installable anyway, so this should show a warning that recipe can't - # be finalized api.render(recipe, permit_unsatisfiable_variants=True) - # out, err = capsys.readouterr() - # print(out) - # print(err) - # print(caplog.text) - # assert "Returning non-final recipe; one or more dependencies was unsatisfiable" in err def test_zip_fields(): """Zipping keys together allows people to tie different versions as sets of combinations.""" - v = {'python': ['2.7', '3.5'], 'vc': ['9', '14'], 'zip_keys': [('python', 'vc')]} - ld = dict_of_lists_to_list_of_dicts(v) - assert len(ld) == 2 - assert ld[0]['python'] == '2.7' - assert ld[0]['vc'] == '9' - assert ld[1]['python'] == '3.5' - assert ld[1]['vc'] == '14' + variants = {'packageA': ['1.2', '3.4'], 'packageB': ['5', '6'], 'zip_keys': [('packageA', 'packageB')]} + zipped = dict_of_lists_to_list_of_dicts(variants) + assert len(zipped) == 2 + assert zipped[0]['packageA'] == '1.2' + assert zipped[0]['packageB'] == '5' + assert zipped[1]['packageA'] == '3.4' + assert zipped[1]['packageB'] == '6' # allow duplication of values, but lengths of lists must always match - v = {'python': ['2.7', '2.7'], 'vc': ['9', '14'], 'zip_keys': [('python', 'vc')]} - ld = dict_of_lists_to_list_of_dicts(v) - assert len(ld) == 2 - assert ld[0]['python'] == '2.7' - assert ld[0]['vc'] == '9' - assert ld[1]['python'] == '2.7' - assert ld[1]['vc'] == '14' + variants = {'packageA': ['1.2', '1.2'], 'packageB': ['5', '6'], 'zip_keys': [('packageA', 'packageB')]} + zipped = dict_of_lists_to_list_of_dicts(variants) + assert len(zipped) == 2 + assert zipped[0]['packageA'] == '1.2' + assert zipped[0]['packageB'] == '5' + assert zipped[1]['packageA'] == '1.2' + assert zipped[1]['packageB'] == '6' def test_validate_spec(): @@ -173,7 +167,7 @@ def test_validate_spec(): """ spec = { # normal expansions - "foo": [2.7, 3.7, 3.8], + "foo": [1.2, 3.4], # zip_keys are the values that need to be expanded as a set "zip_keys": [["bar", "baz"], ["qux", "quux", "quuz"]], "bar": [1, 2, 3], @@ -278,6 +272,7 @@ def test_ensure_valid_spec_on_run_and_test(testing_workdir, testing_config, capl assert "Adding .* to spec 'pytest-mock 1.6'" not in text +@pytest.mark.skipif(on_mac and platform.machine() == "arm64", reason="Unsatisfiable dependencies for M1 MacOS: {'bzip2=1.0.6'}") def test_serial_builds_have_independent_configs(testing_config): recipe = os.path.join(variants_dir, "17_multiple_recipes_independent_config") recipes = [os.path.join(recipe, dirname) for dirname in ("a", "b")] @@ -471,6 +466,8 @@ def test_target_platform_looping(testing_config): assert len(outputs) == 2 +@pytest.mark.skipif(on_mac and platform.machine() == "arm64", reason="Unsatisfiable dependencies for M1 MacOS systems: {'numpy=1.16'}") +# TODO Remove the above skip decorator once https://github.com/conda/conda-build/issues/4717 is resolved def test_numpy_used_variable_looping(testing_config): outputs = api.get_output_file_paths(os.path.join(variants_dir, "numpy_used")) assert len(outputs) == 4 @@ -529,6 +526,7 @@ def test_exclusive_config_file(testing_workdir): assert variant['abc'] == '123' +@pytest.mark.skipif(on_mac and platform.machine() == "arm64", reason="M1 Mac-specific file system error related to this test") def test_inner_python_loop_with_output(testing_config): outputs = api.get_output_file_paths( os.path.join(variants_dir, "test_python_as_subpackage_loop"), From 5a5db0a40afa7b7600919e90177a988afc2940b5 Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Mon, 23 Jan 2023 10:29:18 -0500 Subject: [PATCH 041/366] Update test_utils file to use more up-to-date Python versions (#4735) --- tests/test_utils.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 5aa6469b72..98840c2158 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -287,21 +287,21 @@ def test_logger_config_from_file(testing_workdir, caplog, capfd, mocker): def test_ensure_valid_spec(): assert utils.ensure_valid_spec('python') == 'python' - assert utils.ensure_valid_spec('python 2.7') == 'python 2.7.*' - assert utils.ensure_valid_spec('python 2.7.2') == 'python 2.7.2.*' - assert utils.ensure_valid_spec('python 2.7.12 0') == 'python 2.7.12 0' - assert utils.ensure_valid_spec('python >=2.7,<2.8') == 'python >=2.7,<2.8' + assert utils.ensure_valid_spec('python 3.8') == 'python 3.8.*' + assert utils.ensure_valid_spec('python 3.8.2') == 'python 3.8.2.*' + assert utils.ensure_valid_spec('python 3.8.10 0') == 'python 3.8.10 0' + assert utils.ensure_valid_spec('python >=3.8,<3.9') == 'python >=3.8,<3.9' assert utils.ensure_valid_spec('numpy x.x') == 'numpy x.x' assert utils.ensure_valid_spec(utils.MatchSpec('numpy x.x')) == utils.MatchSpec('numpy x.x') def test_insert_variant_versions(testing_metadata): testing_metadata.meta['requirements']['build'] = ['python', 'numpy 1.13'] - testing_metadata.config.variant = {'python': '2.7', 'numpy': '1.11'} + testing_metadata.config.variant = {'python': '3.8', 'numpy': '1.11'} utils.insert_variant_versions(testing_metadata.meta.get('requirements', {}), testing_metadata.config.variant, 'build') # this one gets inserted - assert 'python 2.7.*' in testing_metadata.meta['requirements']['build'] + assert 'python 3.8.*' in testing_metadata.meta['requirements']['build'] # this one should not be altered assert 'numpy 1.13' in testing_metadata.meta['requirements']['build'] # the overall length does not change From a7e214c12faaeaafaddcfe3dd10d29e1ff0a4d19 Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Mon, 23 Jan 2023 10:29:29 -0500 Subject: [PATCH 042/366] Update test_subpackages.py (#4737) --- .../_variant_override/conda_build_config.yaml | 2 +- .../python_test_dep/conda_build_config.yaml | 2 +- tests/test_subpackages.py | 13 +------------ 3 files changed, 3 insertions(+), 14 deletions(-) diff --git a/tests/test-recipes/split-packages/_variant_override/conda_build_config.yaml b/tests/test-recipes/split-packages/_variant_override/conda_build_config.yaml index f392b6d9b2..36dddf6134 100644 --- a/tests/test-recipes/split-packages/_variant_override/conda_build_config.yaml +++ b/tests/test-recipes/split-packages/_variant_override/conda_build_config.yaml @@ -1,3 +1,3 @@ python: - - 2.7 + - 3.8 - 3.9 diff --git a/tests/test-recipes/split-packages/python_test_dep/conda_build_config.yaml b/tests/test-recipes/split-packages/python_test_dep/conda_build_config.yaml index f392b6d9b2..36dddf6134 100644 --- a/tests/test-recipes/split-packages/python_test_dep/conda_build_config.yaml +++ b/tests/test-recipes/split-packages/python_test_dep/conda_build_config.yaml @@ -1,3 +1,3 @@ python: - - 2.7 + - 3.8 - 3.9 diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index e2b0b59855..f42e21ab45 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -91,7 +91,7 @@ def test_subpackage_variant_override(testing_config): assert len(outputs) == 3 -def test_intradependencies(testing_workdir, testing_config): +def test_intradependencies(testing_config): recipe = os.path.join(subpackage_dir, '_intradependencies') outputs1 = api.get_output_file_paths(recipe, config=testing_config) outputs1_set = {os.path.basename(p) for p in outputs1} @@ -372,14 +372,3 @@ def test_build_string_does_not_incorrectly_add_hash(testing_config): assert len(output_files) == 4 assert any("clang_variant-1.0-cling.tar.bz2" in f for f in output_files) assert any("clang_variant-1.0-default.tar.bz2" in f for f in output_files) - - -# def test_conda_pkg_v2_format(testing_config): -# recipe = os.path.join(subpackage_dir, '_alternate_type_conda2') -# output_files = api.get_output_file_paths(recipe, config=testing_config) -# assert len(output_files) == 1 -# assert output_files[0].endswith('.conda'), output_files[0] - -# out_files = api.build(recipe, config=testing_config) -# assert len(out_files) == 1 -# assert out_files[0].endswith('.conda'), out_files[0] From 39805450d1be4dab66e91381b2b2188a7a0d5d89 Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Mon, 23 Jan 2023 10:29:37 -0500 Subject: [PATCH 043/366] Update test_source.py file (#4738) --- tests/test_source.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/tests/test_source.py b/tests/test_source.py index 6776a3815d..8c33062d3d 100644 --- a/tests/test_source.py +++ b/tests/test_source.py @@ -100,7 +100,7 @@ def test_git_repo_with_single_subdir_does_not_enter_subdir(testing_metadata): @pytest.mark.sanity -def test_source_user_expand(testing_workdir): +def test_source_user_expand(): with TemporaryDirectory(dir=os.path.expanduser('~')) as tmp: with TemporaryDirectory() as tbz_srcdir: file_txt = os.path.join(tbz_srcdir, "file.txt") @@ -138,7 +138,7 @@ def test_hoist_different_name(testing_workdir): assert not os.path.isdir(nesteddir) -def test_append_hash_to_fn(testing_metadata, caplog): +def test_append_hash_to_fn(testing_metadata): relative_zip = 'testfn.zip' assert source.append_hash_to_fn(relative_zip, '123') == 'testfn_123.zip' relative_tar_gz = 'testfn.tar.gz' @@ -158,7 +158,3 @@ def test_append_hash_to_fn(testing_metadata, caplog): {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')}] reset_deduplicator() source.provide(testing_metadata) - # TODO: Can't seem to get this to work. Test passes when run by itself, but fails when run in whole - # serial suite. Some residual state, somehow. I suspect the deduplicator logic with the logger, - # but attempts to reset it have not been successful. - # assert any("No hash (md5, sha1, sha256) provided." in rec.message for rec in caplog.records) From 0cb645c8b7a4b7d405a56f71009b568751a0be5a Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 24 Jan 2023 00:18:44 +0100 Subject: [PATCH 044/366] Refactor `test_published_examples.py` to use test parameterization (#4725) * Refactor unnecessary string splitting * Update is_valid_dir to support pathlib * Create get_valid_recipes helper function Co-authored-by: Travis Hathaway --- tests/test_published_examples.py | 27 ++++++---------- tests/utils.py | 53 +++++++++++++++++++++++--------- 2 files changed, 48 insertions(+), 32 deletions(-) diff --git a/tests/test_published_examples.py b/tests/test_published_examples.py index 2d76a08e6c..ae763c6f04 100644 --- a/tests/test_published_examples.py +++ b/tests/test_published_examples.py @@ -1,37 +1,28 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import os +import sys import pytest -import sys - -from conda_build import api +from conda.testing.integration import BIN_DIRECTORY +from conda_build.api import build from conda_build.utils import check_call_env -from .utils import metadata_dir, is_valid_dir - -published_examples = os.path.join(os.path.dirname(metadata_dir), 'published_code') +from .utils import published_path, get_valid_recipes @pytest.mark.sanity def test_skeleton_pypi(testing_workdir): """published in docs at https://docs.conda.io/projects/conda-build/en/latest/user-guide/tutorials/build-pkgs-skeleton.html""" - conda_path = os.path.join(sys.prefix, 'Scripts' if sys.platform == 'win32' else 'bin', 'conda') - cmd = conda_path + ' skeleton pypi click' - check_call_env(cmd.split()) - cmd = conda_path + ' build click' - check_call_env(cmd.split()) - + conda_path = os.path.join(sys.prefix, BIN_DIRECTORY, "conda") -@pytest.fixture(params=[dirname for dirname in os.listdir(published_examples) - if is_valid_dir(published_examples, dirname)]) -def recipe(request): - return os.path.join(published_examples, request.param) + check_call_env([conda_path, "skeleton", "pypi", "click"]) + check_call_env([conda_path, "build", "click"]) -# This tests any of the folders in the test-recipes/published_code folder that don't start with _ @pytest.mark.sanity +@pytest.mark.parametrize("recipe", get_valid_recipes(published_path)) def test_recipe_builds(recipe, testing_config, testing_workdir): # These variables are defined solely for testing purposes, # so they can be checked within build scripts - api.build(recipe, config=testing_config) + build(str(recipe), config=testing_config) diff --git a/tests/utils.py b/tests/utils.py index 3872dd8b1c..70279188f2 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,13 +1,17 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import contextlib import os +from pathlib import Path import shlex import sys +from typing import Generator import pytest +from conda.common.compat import on_mac, on_win from conda_build.metadata import MetaData -from conda_build.utils import on_win from conda_build.conda_interface import linked @@ -15,21 +19,42 @@ def numpy_installed(): return any([True for dist in linked(sys.prefix) if dist.name == "numpy"]) -thisdir = os.path.dirname(__file__) -metadata_dir = os.path.join(thisdir, "test-recipes", "metadata") -subpackage_dir = os.path.join(thisdir, "test-recipes", "split-packages") -fail_dir = os.path.join(thisdir, "test-recipes", "fail") -variants_dir = os.path.join(thisdir, "test-recipes", "variants") -dll_dir = os.path.join(thisdir, "test-recipes", "dll-package") -go_dir = os.path.join(thisdir, "test-recipes", "go-package") -archive_dir = os.path.join(thisdir, "archives") +tests_path = Path(__file__).parent +metadata_path = tests_path / "test-recipes" / "metadata" +subpackage_path = tests_path / "test-recipes" / "split-packages" +fail_path = tests_path / "test-recipes" / "fail" +variants_path = tests_path / "test-recipes" / "variants" +dll_path = tests_path / "test-recipes" / "dll-package" +go_path = tests_path / "test-recipes" / "go-package" +published_path = tests_path / "test-recipes" / "published_code" +archive_path = tests_path / "archives" + +# backport +thisdir = str(tests_path) +metadata_dir = str(metadata_path) +subpackage_dir = str(subpackage_path) +fail_dir = str(fail_path) +variants_dir = str(variants_path) +dll_dir = str(dll_path) +go_dir = str(go_path) +published_dir = str(published_path) +archive_dir = str(archive_path) + + +def is_valid_dir(*parts: Path | str) -> bool: + path = Path(*parts) + return ( + # only directories are valid recipes + path.is_dir() + # recipes prefixed with _ are special and shouldn't be run as part of bulk tests + and not path.name.startswith("_") + # exclude macOS-only recipes + and (path.name not in ["osx_is_app"] or on_mac) + ) -def is_valid_dir(parent_dir, dirname): - valid = os.path.isdir(os.path.join(parent_dir, dirname)) - valid &= not dirname.startswith("_") - valid &= "osx_is_app" != dirname or sys.platform == "darwin" - return valid +def get_valid_recipes(*parts: Path | str) -> Generator[Path, None, None]: + yield from filter(is_valid_dir, Path(*parts).iterdir()) def add_mangling(filename): From 9a7ea907c42c1b642ba436548752148791abcc3d Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 24 Jan 2023 16:37:16 +0100 Subject: [PATCH 045/366] Convert tests into parametrized test (#4741) --- tests/test_create_test.py | 270 +++++++++++++++++++++----------------- 1 file changed, 146 insertions(+), 124 deletions(-) diff --git a/tests/test_create_test.py b/tests/test_create_test.py index 69877a72e8..0ebf6816df 100644 --- a/tests/test_create_test.py +++ b/tests/test_create_test.py @@ -1,126 +1,148 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import os - -from conda_build import create_test as ct - - -def test_create_py_files_with_py_imports(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = ['time', 'datetime'] - ct.create_py_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.py') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'import time\n' in data - assert 'import datetime\n' in data - - -def test_create_py_files_in_other_language(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = [{'lang': 'python', 'imports': ['time', 'datetime']}] - testing_metadata.meta['package']['name'] = 'perl-conda-test' - ct.create_py_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.py') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'import time\n' in data - assert 'import datetime\n' in data - - -def test_create_py_files_in_other_language_multiple_python_dicts(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = [{'lang': 'python', 'imports': ['time', 'datetime']}] - testing_metadata.meta['test']['imports'].append({'lang': 'python', - 'imports': ['bokeh', 'holoviews']}) - testing_metadata.meta['package']['name'] = 'perl-conda-test' - ct.create_py_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.py') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'import time\n' in data - assert 'import datetime\n' in data - assert 'import bokeh\n' in data - assert 'import holoviews\n' in data - - -def test_create_r_files(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = ['r-base', 'r-matrix'] - testing_metadata.meta['package']['name'] = 'r-conda-test' - ct.create_r_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.r') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'library(r-base)\n' in data - assert 'library(r-matrix)\n' in data - - -def test_create_r_files_lang_spec(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = [{'lang': 'r', 'imports': ['r-base', 'r-matrix']}] - testing_metadata.meta['package']['name'] = 'conda-test-r' - ct.create_r_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.r') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'library(r-base)\n' in data - assert 'library(r-matrix)\n' in data - - -def test_create_pl_files(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = ['perl-base', 'perl-matrix'] - testing_metadata.meta['package']['name'] = 'perl-conda-test' - ct.create_pl_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.pl') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'use perl-base;\n' in data - assert 'use perl-matrix;\n' in data - - -def test_non_py_does_not_create_py_files(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = ['perl-base', 'perl-matrix'] - testing_metadata.meta['package']['name'] = 'perl-conda-test' - ct.create_py_files(testing_metadata) - py_test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.py') - assert not os.path.isfile(py_test_file), "non-python package should not create run_test.py" - - -def test_create_pl_files_lang_spec(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = [{'lang': 'perl', 'imports': ['perl-base', - 'perl-matrix']}] - testing_metadata.meta['package']['name'] = 'conda-test-perl' - ct.create_pl_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.pl') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'use perl-base;\n' in data - assert 'use perl-matrix;\n' in data - - -def test_create_lua_files(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = ['lua-base', 'lua-matrix'] - testing_metadata.meta['package']['name'] = 'lua-conda-test' - ct.create_lua_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.lua') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'require "lua-base"\n' in data - assert 'require "lua-matrix"\n' in data - - -def test_create_lua_files_lang_spec(testing_workdir, testing_metadata): - testing_metadata.meta['test']['imports'] = [{'lang': 'lua', 'imports': ['lua-base', - 'lua-matrix']}] - testing_metadata.meta['package']['name'] = 'conda-test-lua' - ct.create_lua_files(testing_metadata) - test_file = os.path.join(testing_metadata.config.test_dir, 'run_test.lua') - assert os.path.isfile(test_file) - with open(test_file) as f: - data = f.readlines() - assert 'require "lua-base"\n' in data - assert 'require "lua-matrix"\n' in data +from __future__ import annotations + +from pathlib import Path +from typing import Any + +import pytest + +from conda_build.create_test import ( + create_py_files, + create_lua_files, + create_pl_files, + create_r_files, +) + + +@pytest.mark.parametrize( + "name,imports,expected,unexpected", + [ + pytest.param( + "name", + ["time", "datetime"], + {".py": {"import time", "import datetime"}}, + {".r", ".pl", ".lua"}, + id="implicit Python imports", + ), + pytest.param( + "r-name", + [{"lang": "python", "imports": ["time", "datetime"]}], + {".r": set(), ".py": {"import time", "import datetime"}}, + {".pl", ".lua"}, + id="explicit Python imports", + ), + pytest.param( + "r-name", + [ + {"lang": "python", "imports": ["time"]}, + {"lang": "python", "imports": ["datetime"]}, + ], + {".r": set(), ".py": {"import time", "import datetime"}}, + {".pl", ".lua"}, + id="multiple explicit Python imports", + ), + pytest.param( + "r-name", + ["r-time", "r-datetime"], + {".r": {"library(r-time)", "library(r-datetime)"}}, + {".py", ".pl", ".lua"}, + id="implicit R imports", + ), + pytest.param( + "perl-name", + [{"lang": "r", "imports": ["r-time", "r-datetime"]}], + {".pl": set(), ".r": {"library(r-time)", "library(r-datetime)"}}, + {".py", ".lua"}, + id="explicit R imports", + ), + # unsupported syntax, why? + # pytest.param( + # "perl-name", + # [ + # {"lang": "r", "imports": ["r-time"]}, + # {"lang": "r", "imports": ["r-datetime"]}, + # ], + # {".r": {"library(r-time)", "library(r-datetime)"}}, + # {".py", ".pl", ".lua"}, + # id="multiple explicit R imports", + # ), + pytest.param( + "perl-name", + ["perl-time", "perl-datetime"], + {".pl": {"use perl-time;", "use perl-datetime;"}}, + {".py", ".r", ".lua"}, + id="implicit Perl imports", + ), + pytest.param( + "lua-name", + [{"lang": "perl", "imports": ["perl-time", "perl-datetime"]}], + {".lua": set(), ".pl": {"use perl-time;", "use perl-datetime;"}}, + {".py", ".r"}, + id="explicit Perl imports", + ), + # unsupported syntax, why? + # pytest.param( + # "lua-name", + # [ + # {"lang": "perl", "imports": ["perl-time"]}, + # {"lang": "perl", "imports": ["perl-datetime"]}, + # ], + # {".pl": {"use perl-time;", "use perl-datetime;"}}, + # {".py", ".r", ".lua"}, + # id="multiple explicit Perl imports", + # ), + pytest.param( + "lua-name", + ["lua-time", "lua-datetime"], + {".lua": {'require "lua-time"', 'require "lua-datetime"'}}, + {".py", ".r", ".pl"}, + id="implicit Lua imports", + ), + # why is this test different from the other explicit imports? + pytest.param( + "name", + [{"lang": "lua", "imports": ["lua-time", "lua-datetime"]}], + {".lua": {'require "lua-time"', 'require "lua-datetime"'}}, + {".py", ".r", ".pl"}, + id="explicit Lua imports", + ), + # unsupported syntax, why? + # pytest.param( + # "name", + # [ + # {"lang": "lua", "imports": ["lua-time"]}, + # {"lang": "lua", "imports": ["lua-datetime"]}, + # ], + # {".lua": {'require "lua-time"', 'require "lua-datetime"'}}, + # {".py", ".r", ".pl"}, + # id="multiple explicit Lua imports", + # ), + ], +) +def test_create_run_test( + name: str, + imports: Any, + expected: dict[str, set[str]], + unexpected: set[str], + testing_metadata, +): + testing_metadata.meta["package"]["name"] = name + testing_metadata.meta["test"]["imports"] = imports + create_py_files(testing_metadata) + create_r_files(testing_metadata) + create_pl_files(testing_metadata) + create_lua_files(testing_metadata) + + # assert expected test file exists + for ext, tests in expected.items(): + test_file = Path(testing_metadata.config.test_dir, "run_test").with_suffix(ext) + assert test_file.is_file() + + # ensure all tests (for this language/ext) are present in the test file + assert tests <= set(filter(None, test_file.read_text().split("\n"))) + + # assert unexpected test files do not exist + for ext in unexpected: + test_file = Path(testing_metadata.config.test_dir, "run_test").with_suffix(ext) + assert not test_file.exists() From 1a7848cf50190c8d1f587df2abf922f70cea67c9 Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Wed, 25 Jan 2023 12:21:40 -0500 Subject: [PATCH 046/366] Remove unused test fixtures (#4742) * Remove unused test fixtures in misc. conda-build tests --- tests/test_api_build_conda_v2.py | 2 +- tests/test_api_build_dll_package.py | 2 +- tests/test_api_build_go_package.py | 2 +- tests/test_api_convert.py | 20 ++++++------- tests/test_api_render.py | 12 ++++---- tests/test_api_skeleton.py | 30 +++++++++---------- tests/test_api_test.py | 8 ++--- tests/test_build.py | 4 +-- tests/test_cli.py | 46 ++++++++++++++--------------- tests/test_develop.py | 2 +- tests/test_inspect.py | 10 ------- tests/test_metadata.py | 4 +-- tests/test_post.py | 6 ++-- tests/test_published_examples.py | 4 +-- tests/test_utils.py | 6 ++-- tests/test_variants.py | 26 ++++++++-------- 16 files changed, 86 insertions(+), 98 deletions(-) diff --git a/tests/test_api_build_conda_v2.py b/tests/test_api_build_conda_v2.py index b7c38a96e8..328c340ea7 100644 --- a/tests/test_api_build_conda_v2.py +++ b/tests/test_api_build_conda_v2.py @@ -11,7 +11,7 @@ @pytest.mark.parametrize("pkg_format,pkg_ext", [(None, ".tar.bz2"), ("2", ".conda")]) def test_conda_pkg_format( - pkg_format, pkg_ext, testing_config, testing_workdir, monkeypatch, capfd + pkg_format, pkg_ext, testing_config, monkeypatch, capfd ): """Conda package format "2" builds .conda packages.""" diff --git a/tests/test_api_build_dll_package.py b/tests/test_api_build_dll_package.py index 80fc0267e6..8c2ede1e7b 100644 --- a/tests/test_api_build_dll_package.py +++ b/tests/test_api_build_dll_package.py @@ -8,7 +8,7 @@ @pytest.mark.sanity -def test_recipe_build(testing_config, testing_workdir, monkeypatch): +def test_recipe_build(testing_config, monkeypatch): # These variables are defined solely for testing purposes, # so they can be checked within build scripts testing_config.activate = True diff --git a/tests/test_api_build_go_package.py b/tests/test_api_build_go_package.py index 4c752b780d..35cc12a965 100644 --- a/tests/test_api_build_go_package.py +++ b/tests/test_api_build_go_package.py @@ -9,7 +9,7 @@ @pytest.mark.sanity @pytest.mark.serial -def test_recipe_build(testing_config, testing_workdir, monkeypatch): +def test_recipe_build(testing_config, monkeypatch): # These variables are defined solely for testing purposes, # so they can be checked within build scripts testing_config.activate = True diff --git a/tests/test_api_convert.py b/tests/test_api_convert.py index dc973467da..de41fc9ca7 100644 --- a/tests/test_api_convert.py +++ b/tests/test_api_convert.py @@ -43,7 +43,7 @@ def assert_package_paths_matches_files(package_path): @pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) @pytest.mark.parametrize('package', [('cryptography-1.8.1', '__about__.py')]) -def test_show_imports(testing_workdir, base_platform, package, capfd): +def test_show_imports(base_platform, package, capfd): package_name, example_file = package platforms = ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32'] @@ -72,7 +72,7 @@ def test_show_imports(testing_workdir, base_platform, package, capfd): @pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) @pytest.mark.parametrize('package', [('itsdangerous-0.24', 'itsdangerous.py')]) -def test_no_imports_found(testing_workdir, base_platform, package, capfd): +def test_no_imports_found(base_platform, package, capfd): package_name, example_file = package f = 'http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2'.format(base_platform, @@ -89,7 +89,7 @@ def test_no_imports_found(testing_workdir, base_platform, package, capfd): @pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) @pytest.mark.parametrize('package', [('cryptography-1.8.1', '__about__.py')]) -def test_no_platform(testing_workdir, base_platform, package): +def test_no_platform(base_platform, package): package_name, example_file = package f = 'http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2'.format(base_platform, @@ -105,7 +105,7 @@ def test_no_platform(testing_workdir, base_platform, package): @pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) @pytest.mark.parametrize('package', [('cryptography-1.8.1', '__about__.py')]) -def test_c_extension_error(testing_workdir, base_platform, package): +def test_c_extension_error(base_platform, package): package_name, example_file = package platforms = ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32'] @@ -130,7 +130,7 @@ def test_c_extension_error(testing_workdir, base_platform, package): @pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) @pytest.mark.parametrize('package', [('cryptography-1.8.1', '__about__.py')]) -def test_c_extension_conversion(testing_workdir, base_platform, package): +def test_c_extension_conversion(base_platform, package): package_name, example_file = package platforms = ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32'] @@ -154,7 +154,7 @@ def test_c_extension_conversion(testing_workdir, base_platform, package): @pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) @pytest.mark.parametrize('package', [('itsdangerous-0.24', 'itsdangerous.py'), ('py-1.4.32', 'py/__init__.py')]) -def test_convert_platform_to_others(testing_workdir, base_platform, package): +def test_convert_platform_to_others(base_platform, package): package_name, example_file = package subdir = f'{base_platform}-64' f = 'http://repo.anaconda.com/pkgs/free/{}/{}-py27_0.tar.bz2'.format(subdir, @@ -226,7 +226,7 @@ def test_convert_from_unix_to_win_creates_entry_points(testing_config): @pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) @pytest.mark.parametrize('package', [('anaconda-4.4.0', 'version.txt')]) -def test_convert_dependencies(testing_workdir, base_platform, package): +def test_convert_dependencies(base_platform, package): package_name, example_file = package subdir = f'{base_platform}-64' f = 'http://repo.anaconda.com/pkgs/free/{}/{}-np112py36_0.tar.bz2'.format(subdir, @@ -259,7 +259,7 @@ def test_convert_dependencies(testing_workdir, base_platform, package): @pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) @pytest.mark.parametrize('package', [('anaconda-4.4.0', 'version.txt')]) -def test_convert_no_dependencies(testing_workdir, base_platform, package): +def test_convert_no_dependencies(base_platform, package): package_name, example_file = package subdir = f'{base_platform}-64' f = 'http://repo.anaconda.com/pkgs/free/{}/{}-np112py36_0.tar.bz2'.format(subdir, @@ -289,7 +289,7 @@ def test_convert_no_dependencies(testing_workdir, base_platform, package): @pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) @pytest.mark.parametrize('package', [('anaconda-4.4.0', 'version.txt')]) -def test_skip_conversion(testing_workdir, base_platform, package, capfd): +def test_skip_conversion(base_platform, package, capfd): package_name, example_file = package source_plat_arch = '{}-64' .format(base_platform) @@ -314,7 +314,7 @@ def test_skip_conversion(testing_workdir, base_platform, package, capfd): @pytest.mark.parametrize('base_platform', ['linux', 'osx']) @pytest.mark.parametrize('package', [('sparkmagic-0.12.1', '')]) -def test_renaming_executables(testing_workdir, base_platform, package): +def test_renaming_executables(base_platform, package): """Test that the files in /bin are properly renamed. When converting the bin/ directory to Scripts/, only scripts diff --git a/tests/test_api_render.py b/tests/test_api_render.py index 452a642e43..eea75cb445 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -20,7 +20,7 @@ from .utils import metadata_dir, thisdir -def test_render_need_download(testing_workdir, testing_config): +def test_render_need_download(testing_config): # first, test that the download/render system renders all it can, # and accurately returns its needs @@ -74,7 +74,7 @@ def test_get_output_file_path_metadata_object(testing_metadata): "test_get_output_file_path_metadata_object-1.0-1.tar.bz2") -def test_get_output_file_path_jinja2(testing_workdir, testing_config): +def test_get_output_file_path_jinja2(testing_config): # If this test does not raise, it's an indicator that the workdir is not # being cleaned as it should. recipe = os.path.join(metadata_dir, "source_git_jinja2") @@ -98,7 +98,7 @@ def test_get_output_file_path_jinja2(testing_workdir, testing_config): @mock.patch('conda_build.source') -def test_output_without_jinja_does_not_download(mock_source, testing_workdir, testing_config): +def test_output_without_jinja_does_not_download(mock_source, testing_config): api.get_output_file_path(os.path.join(metadata_dir, "source_git"), config=testing_config)[0] mock_source.provide.assert_not_called() @@ -225,18 +225,18 @@ def test_run_exports_with_pin_compatible_in_subpackages(testing_config): assert all(len(export.split()) > 1 for export in run_exports), run_exports -def test_ignore_build_only_deps(testing_config): +def test_ignore_build_only_deps(): ms = api.render(os.path.join(thisdir, 'test-recipes', 'variants', 'python_in_build_only'), bypass_env_check=True, finalize=False) assert len(ms) == 1 -def test_merge_build_host_build_key(testing_workdir, testing_metadata): +def test_merge_build_host_build_key(): m = api.render(os.path.join(metadata_dir, '_no_merge_build_host'))[0][0] assert not any('bzip2' in dep for dep in m.meta['requirements']['run']) -def test_merge_build_host_empty_host_section(testing_config): +def test_merge_build_host_empty_host_section(): m = api.render(os.path.join(metadata_dir, '_empty_host_avoids_merge'))[0][0] assert not any('bzip2' in dep for dep in m.meta['requirements']['run']) diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index 18cb57e6d9..2233a9dda8 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -59,7 +59,7 @@ def test_repo(prefix, repo, package, version, testing_workdir, testing_config): @pytest.mark.slow -def test_name_with_version_specified(testing_workdir, testing_config): +def test_name_with_version_specified(testing_config): api.skeletonize( packages="sympy", repo="pypi", @@ -70,7 +70,7 @@ def test_name_with_version_specified(testing_workdir, testing_config): assert m.version() == "1.10" -def test_pypi_url(testing_workdir, testing_config): +def test_pypi_url(testing_config): api.skeletonize( packages="https://pypi.python.org/packages/source/s/sympy/sympy-1.10.tar.gz#md5=b3f5189ad782bbcb1bedc1ec2ca12f29", repo="pypi", @@ -110,7 +110,7 @@ def mock_metada_pylint(url_pylint_package): @pytest.fixture -def pkginfo_pylint(url_pylint_package): +def pkginfo_pylint(): # Hardcoding it to avoid to use the get_pkginfo because it takes too much time return { 'classifiers': [ @@ -158,8 +158,7 @@ def pkginfo_pylint(url_pylint_package): } -def test_get_entry_points(testing_workdir, pkginfo_pylint, - result_metadata_pylint): +def test_get_entry_points(pkginfo_pylint, result_metadata_pylint): pkginfo = pkginfo_pylint entry_points = get_entry_points(pkginfo) @@ -284,7 +283,6 @@ def test_get_tests_require(pkginfo_pylint, result_metadata_pylint): def test_get_package_metadata( - testing_workdir, testing_config, url_pylint_package, mock_metada_pylint, @@ -310,7 +308,7 @@ def test_get_package_metadata( @pytest.mark.slow -def test_pypi_with_setup_options(testing_workdir, testing_config): +def test_pypi_with_setup_options(testing_config): # Use photutils package below because skeleton will fail unless the setup.py is given # the flag --offline because of a bootstrapping a helper file that # occurs by default. @@ -325,7 +323,7 @@ def test_pypi_with_setup_options(testing_workdir, testing_config): assert '--offline' in m.meta['build']['script'] -def test_pypi_pin_numpy(testing_workdir, testing_config): +def test_pypi_pin_numpy(testing_config): # The package used here must have a numpy dependence for pin-numpy to have # any effect. api.skeletonize(packages='msumastro', repo='pypi', version='0.9.0', @@ -337,7 +335,7 @@ def test_pypi_pin_numpy(testing_workdir, testing_config): api.build('msumastro') -def test_pypi_version_sorting(testing_workdir, testing_config): +def test_pypi_version_sorting(testing_config): # The package used here must have a numpy dependence for pin-numpy to have # any effect. api.skeletonize(packages='impyla', repo='pypi', config=testing_config) @@ -350,12 +348,12 @@ def test_list_skeletons(): assert set(skeletons) == {'pypi', 'cran', 'cpan', 'luarocks', 'rpm'} -def test_pypi_with_entry_points(testing_workdir): +def test_pypi_with_entry_points(): api.skeletonize('planemo', repo='pypi', python_version="3.7") assert os.path.isdir('planemo') -def test_pypi_with_version_arg(testing_workdir): +def test_pypi_with_version_arg(): # regression test for https://github.com/conda/conda-build/issues/1442 api.skeletonize('PrettyTable', 'pypi', version='0.7.2') m = api.render('prettytable')[0][0] @@ -363,7 +361,7 @@ def test_pypi_with_version_arg(testing_workdir): @pytest.mark.slow -def test_pypi_with_extra_specs(testing_workdir, testing_config): +def test_pypi_with_extra_specs(testing_config): # regression test for https://github.com/conda/conda-build/issues/1697 # For mpi4py: testing_config.channel_urls.append('https://repo.anaconda.com/pkgs/free') @@ -379,7 +377,7 @@ def test_pypi_with_extra_specs(testing_workdir, testing_config): @pytest.mark.slow -def test_pypi_with_version_inconsistency(testing_workdir, testing_config): +def test_pypi_with_version_inconsistency(testing_config): # regression test for https://github.com/conda/conda-build/issues/189 # For mpi4py: extra_specs = ['mpi4py'] @@ -392,7 +390,7 @@ def test_pypi_with_version_inconsistency(testing_workdir, testing_config): assert parse_version(m.version()) == parse_version("0.0.10") -def test_pypi_with_basic_environment_markers(testing_workdir): +def test_pypi_with_basic_environment_markers(): # regression test for https://github.com/conda/conda-build/issues/1974 api.skeletonize('coconut', 'pypi', version='1.2.2') m = api.render('coconut')[0][0] @@ -410,14 +408,14 @@ def test_pypi_with_basic_environment_markers(testing_workdir): assert "pygments" not in run_reqs -def test_setuptools_test_requirements(testing_workdir): +def test_setuptools_test_requirements(): api.skeletonize(packages='hdf5storage', repo='pypi') m = api.render('hdf5storage')[0][0] assert m.meta['test']['requires'] == ['nose >=1.0'] @pytest.mark.skipif(sys.version_info < (3, 8), reason="sympy is python 3.8+") -def test_pypi_section_order_preserved(testing_workdir): +def test_pypi_section_order_preserved(): """ Test whether sections have been written in the correct order. """ diff --git a/tests/test_api_test.py b/tests/test_api_test.py index 4bb70a220c..1bce60a224 100644 --- a/tests/test_api_test.py +++ b/tests/test_api_test.py @@ -13,7 +13,7 @@ @pytest.mark.sanity -def test_recipe_test(testing_workdir, testing_config): +def test_recipe_test(testing_config): """Test calling conda build -t """ recipe = os.path.join(metadata_dir, 'has_prefix_files') metadata = api.render(recipe, config=testing_config)[0][0] @@ -22,7 +22,7 @@ def test_recipe_test(testing_workdir, testing_config): @pytest.mark.sanity -def test_package_test(testing_workdir, testing_config): +def test_package_test(testing_config): """Test calling conda build -t - rather than """ recipe = os.path.join(metadata_dir, 'has_prefix_files') metadata = api.render(recipe, config=testing_config)[0][0] @@ -30,14 +30,14 @@ def test_package_test(testing_workdir, testing_config): api.test(outputs[0], config=metadata.config) -def test_package_test_without_recipe_in_package(testing_workdir, testing_metadata): +def test_package_test_without_recipe_in_package(testing_metadata): """Can't test packages after building if recipe is not included. Not enough info to go on.""" testing_metadata.config.include_recipe = False output = api.build(testing_metadata, notest=True, copy_test_source_files=True)[0] api.test(output, config=testing_metadata.config) -def test_package_with_jinja2_does_not_redownload_source(testing_workdir, testing_config, mocker): +def test_package_with_jinja2_does_not_redownload_source(testing_config, mocker): recipe = os.path.join(metadata_dir, 'jinja2_build_str') metadata = api.render(recipe, config=testing_config, dirty=True)[0][0] outputs = api.build(metadata, notest=True, anaconda_upload=False) diff --git a/tests/test_build.py b/tests/test_build.py index e7e086f8e6..6e0296391c 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -43,7 +43,7 @@ def test_find_prefix_files(testing_workdir): assert len(list(build.have_prefix_files(files, testing_workdir))) == len(files) -def test_build_preserves_PATH(testing_workdir, testing_config): +def test_build_preserves_PATH(testing_config): m = api.render(os.path.join(metadata_dir, 'source_git'), config=testing_config)[0][0] ref_path = os.environ['PATH'] build.build(m, stats=None) @@ -233,7 +233,7 @@ def test_create_info_files_json_no_inodes(testing_workdir, testing_metadata): assert output == expected_output -def test_rewrite_output(testing_workdir, testing_config, capsys): +def test_rewrite_output(testing_config, capsys): api.build(os.path.join(metadata_dir, "_rewrite_env"), config=testing_config) captured = capsys.readouterr() stdout = captured.out diff --git a/tests/test_cli.py b/tests/test_cli.py index 66d51fbbe3..a365b5cff7 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -60,7 +60,7 @@ def test_build_add_channel(): main_build.execute(args) -def test_build_without_channel_fails(testing_workdir): +def test_build_without_channel_fails(): # remove the conda forge channel from the arguments and make sure that we fail. If we don't, # we probably have channels in condarc, and this is not a good test. args = ['--no-anaconda-upload', '--no-activate', @@ -117,7 +117,7 @@ def test_no_filename_hash(testing_workdir, testing_metadata, capfd): assert not re.search('test_no_filename_hash.*h[0-9a-f]{%d}' % testing_metadata.config.hash_length, error) -def test_render_output_build_path(testing_workdir, testing_metadata, capfd, caplog): +def test_render_output_build_path(testing_workdir, testing_metadata, capfd): api.output_yaml(testing_metadata, 'meta.yaml') args = ['--output', testing_workdir] main_render.execute(args) @@ -128,7 +128,7 @@ def test_render_output_build_path(testing_workdir, testing_metadata, capfd, capl assert error == "" -def test_render_output_build_path_and_file(testing_workdir, testing_metadata, capfd, caplog): +def test_render_output_build_path_and_file(testing_workdir, testing_metadata, capfd): api.output_yaml(testing_metadata, 'meta.yaml') rendered_filename = 'out.yaml' args = ['--output', '--file', rendered_filename, testing_workdir] @@ -173,7 +173,7 @@ def test_build_output_build_path_multiple_recipes(testing_workdir, testing_metad assert output.rstrip().splitlines() == test_paths, error -def test_slash_in_recipe_arg_keeps_build_id(testing_workdir, testing_config): +def test_slash_in_recipe_arg_keeps_build_id(testing_config): args = [os.path.join(metadata_dir, "has_prefix_files"), '--croot', testing_config.croot, '--no-anaconda-upload'] outputs = main_build.execute(args) @@ -186,7 +186,7 @@ def test_slash_in_recipe_arg_keeps_build_id(testing_workdir, testing_config): @pytest.mark.sanity @pytest.mark.skipif(on_win, reason="prefix is always short on win.") -def test_build_long_test_prefix_default_enabled(mocker, testing_workdir): +def test_build_long_test_prefix_default_enabled(): recipe_path = os.path.join(metadata_dir, '_test_long_test_prefix') args = [recipe_path, '--no-anaconda-upload'] main_build.execute(args) @@ -196,7 +196,7 @@ def test_build_long_test_prefix_default_enabled(mocker, testing_workdir): main_build.execute(args) -def test_build_no_build_id(testing_workdir, testing_config): +def test_build_no_build_id(testing_config): args = [os.path.join(metadata_dir, "has_prefix_files"), '--no-build-id', '--croot', testing_config.croot, '--no-activate', '--no-anaconda-upload'] outputs = main_build.execute(args) @@ -207,7 +207,7 @@ def test_build_no_build_id(testing_workdir, testing_config): assert 'has_prefix_files_1' not in data -def test_build_multiple_recipes(testing_metadata, testing_workdir, testing_config): +def test_build_multiple_recipes(testing_metadata): """Test that building two recipes in one CLI call separates the build environment for each""" os.makedirs('recipe1') os.makedirs('recipe2') @@ -222,7 +222,7 @@ def test_build_multiple_recipes(testing_metadata, testing_workdir, testing_confi main_build.execute(args) -def test_build_output_folder(testing_workdir, testing_metadata, capfd): +def test_build_output_folder(testing_workdir, testing_metadata): api.output_yaml(testing_metadata, 'meta.yaml') with TemporaryDirectory() as tmp: out = os.path.join(tmp, 'out') @@ -234,7 +234,7 @@ def test_build_output_folder(testing_workdir, testing_metadata, capfd): os.path.basename(output))) -def test_build_source(testing_workdir): +def test_build_source(): with TemporaryDirectory() as tmp: args = [os.path.join(metadata_dir, '_pyyaml_find_header'), '--source', '--no-build-id', '--croot', tmp, '--no-activate', '--no-anaconda-upload', ] @@ -266,7 +266,7 @@ def test_render_output_build_path_set_python(testing_workdir, testing_metadata, @pytest.mark.sanity -def test_skeleton_pypi(testing_workdir, testing_config): +def test_skeleton_pypi(): args = ['pypi', 'peppercorn'] main_skeleton.execute(args) assert os.path.isdir('peppercorn') @@ -276,14 +276,14 @@ def test_skeleton_pypi(testing_workdir, testing_config): @pytest.mark.sanity -def test_skeleton_pypi_compatible_versions(testing_workdir, testing_config): +def test_skeleton_pypi_compatible_versions(): args = ['pypi', 'openshift'] main_skeleton.execute(args) assert os.path.isdir('openshift') @pytest.mark.slow -def test_skeleton_pypi_arguments_work(testing_workdir): +def test_skeleton_pypi_arguments_work(): """ These checks whether skeleton executes without error when these options are specified on the command line AND whether the underlying @@ -310,7 +310,7 @@ def test_skeleton_pypi_arguments_work(testing_workdir): assert m.version() == '0.2.2' -def test_metapackage(testing_config, testing_workdir): +def test_metapackage(testing_config): """the metapackage command creates a package with runtime dependencies specified on the CLI""" args = ['metapackage_test', '1.0', '-d', 'bzip2', '--no-anaconda-upload'] main_metapackage.execute(args) @@ -319,7 +319,7 @@ def test_metapackage(testing_config, testing_workdir): assert os.path.isfile(test_path) -def test_metapackage_build_number(testing_config, testing_workdir): +def test_metapackage_build_number(testing_config): """the metapackage command creates a package with runtime dependencies specified on the CLI""" args = ['metapackage_test_build_number', '1.0', '-d', 'bzip2', '--build-number', '1', '--no-anaconda-upload'] @@ -329,7 +329,7 @@ def test_metapackage_build_number(testing_config, testing_workdir): assert os.path.isfile(test_path) -def test_metapackage_build_string(testing_config, testing_workdir): +def test_metapackage_build_string(testing_config): """the metapackage command creates a package with runtime dependencies specified on the CLI""" args = ['metapackage_test_build_string', '1.0', '-d', 'bzip2', '--build-string', 'frank', '--no-anaconda-upload'] @@ -339,7 +339,7 @@ def test_metapackage_build_string(testing_config, testing_workdir): assert os.path.isfile(test_path) -def test_metapackage_metadata(testing_config, testing_workdir): +def test_metapackage_metadata(testing_config): args = ['metapackage_testing_metadata', '1.0', '-d', 'bzip2', "--home", "http://abc.com", "--summary", "wee", "--license", "BSD", '--no-anaconda-upload'] main_metapackage.execute(args) @@ -354,18 +354,18 @@ def test_metapackage_metadata(testing_config, testing_workdir): assert info['summary'] == 'wee' -def testing_index(testing_workdir): +def testing_index(): args = ['.'] main_index.execute(args) assert os.path.isfile('noarch/repodata.json') -def test_inspect_installable(testing_workdir): +def test_inspect_installable(): args = ['channels', '--test-installable', 'conda-team'] main_inspect.execute(args) -def test_inspect_linkages(testing_workdir, capfd): +def test_inspect_linkages(capfd): # get a package that has known object output args = ['linkages', 'python'] if sys.platform == 'win32': @@ -378,7 +378,7 @@ def test_inspect_linkages(testing_workdir, capfd): assert 'libncursesw' in output -def test_inspect_objects(testing_workdir, capfd): +def test_inspect_objects(capfd): # get a package that has known object output args = ['objects', 'python'] if sys.platform != 'darwin': @@ -392,7 +392,7 @@ def test_inspect_objects(testing_workdir, capfd): @pytest.mark.skipif(on_win, reason="Windows prefix length doesn't matter (yet?)") -def test_inspect_prefix_length(testing_workdir, capfd): +def test_inspect_prefix_length(capfd): from conda_build import api # build our own known-length package here test_base = os.path.expanduser("~/cbtmp") @@ -447,7 +447,7 @@ def test_develop(testing_env): @pytest.mark.xfail(on_win, reason="This is a flaky test that doesn't seem to be working well on Windows.") -def test_convert(testing_workdir, testing_config): +def test_convert(testing_config): # download a sample py2.7 package f = 'https://repo.anaconda.com/pkgs/free/win-64/affine-2.0.0-py27_0.tar.bz2' pkg_name = "affine-2.0.0-py27_0.tar.bz2" @@ -532,7 +532,7 @@ def test_conda_py_no_period(testing_workdir, testing_metadata, monkeypatch): assert any('py36' in output for output in outputs) -def test_build_skip_existing(testing_workdir, capfd, mocker): +def test_build_skip_existing(capfd, mocker): # build the recipe first empty_sections = os.path.join(metadata_dir, "empty_sections") args = ['--no-anaconda-upload', empty_sections] diff --git a/tests/test_develop.py b/tests/test_develop.py index f10d19e9a1..e70708de88 100644 --- a/tests/test_develop.py +++ b/tests/test_develop.py @@ -86,7 +86,7 @@ def test_write_to_conda_pth(sp_dir, conda_pth): assert len(lines) == exp_num_pths -def test_uninstall(sp_dir, conda_pth, request): +def test_uninstall(sp_dir, conda_pth): ''' `conda develop --uninstall pkg_path` invokes uninstall() to remove path from conda.pth - this is a unit test for uninstall diff --git a/tests/test_inspect.py b/tests/test_inspect.py index d2c4d812fd..238f09240a 100644 --- a/tests/test_inspect.py +++ b/tests/test_inspect.py @@ -31,13 +31,3 @@ def test_inspect_objects(): def test_channel_installable(): # make sure the default channel is installable as a reference assert api.test_installable('conda-team') - -# # create a channel that is not installable to validate function - -# platform = os.path.join(testing_workdir, subdir) -# output_file = os.path.join(platform, "empty_sections-0.0-0.tar.bz2") - -# # create the index so conda can find the file -# api.update_index(platform) - -# assert not api.test_installable(channel=to_url(testing_workdir)) diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 2bf9b0c045..9eed828f3c 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -158,7 +158,7 @@ def test_native_compiler_metadata_win(testing_config, py_ver, mocker): assert any(dep.startswith(py_ver[1]) for dep in metadata.meta['requirements']['build']) -def test_native_compiler_metadata_linux(testing_config, mocker): +def test_native_compiler_metadata_linux(testing_config): testing_config.platform = 'linux' metadata = api.render(os.path.join(metadata_dir, '_compiler_jinja2'), config=testing_config, permit_unsatisfiable_variants=True, @@ -169,7 +169,7 @@ def test_native_compiler_metadata_linux(testing_config, mocker): assert any(dep.startswith('gfortran_linux-' + _64) for dep in metadata.meta['requirements']['build']) -def test_native_compiler_metadata_osx(testing_config, mocker): +def test_native_compiler_metadata_osx(testing_config): testing_config.platform = 'osx' metadata = api.render(os.path.join(metadata_dir, '_compiler_jinja2'), config=testing_config, permit_unsatisfiable_variants=True, diff --git a/tests/test_post.py b/tests/test_post.py index 3ca9174448..ed50b9dde9 100644 --- a/tests/test_post.py +++ b/tests/test_post.py @@ -29,7 +29,7 @@ def test_compile_missing_pyc(testing_workdir): assert not os.path.isfile(os.path.join(tmp, add_mangling(bad_file))) -def test_hardlinks_to_copies(testing_workdir): +def test_hardlinks_to_copies(): with open('test1', 'w') as f: f.write("\n") @@ -44,7 +44,7 @@ def test_hardlinks_to_copies(testing_workdir): assert os.lstat('test2').st_nlink == 1 -def test_postbuild_files_raise(testing_metadata, testing_workdir): +def test_postbuild_files_raise(testing_metadata): fn = 'buildstr', 'buildnum', 'version' for f in fn: with open(os.path.join(testing_metadata.config.work_dir, @@ -55,7 +55,7 @@ def test_postbuild_files_raise(testing_metadata, testing_workdir): @pytest.mark.skipif(on_win, reason="fix_shebang is not executed on win32") -def test_fix_shebang(testing_config): +def test_fix_shebang(): fname = 'test1' with open(fname, 'w') as f: f.write("\n") diff --git a/tests/test_published_examples.py b/tests/test_published_examples.py index ae763c6f04..45c17fbd31 100644 --- a/tests/test_published_examples.py +++ b/tests/test_published_examples.py @@ -12,7 +12,7 @@ @pytest.mark.sanity -def test_skeleton_pypi(testing_workdir): +def test_skeleton_pypi(): """published in docs at https://docs.conda.io/projects/conda-build/en/latest/user-guide/tutorials/build-pkgs-skeleton.html""" conda_path = os.path.join(sys.prefix, BIN_DIRECTORY, "conda") @@ -22,7 +22,7 @@ def test_skeleton_pypi(testing_workdir): @pytest.mark.sanity @pytest.mark.parametrize("recipe", get_valid_recipes(published_path)) -def test_recipe_builds(recipe, testing_config, testing_workdir): +def test_recipe_builds(recipe, testing_config): # These variables are defined solely for testing purposes, # so they can be checked within build scripts build(str(recipe), config=testing_config) diff --git a/tests/test_utils.py b/tests/test_utils.py index 98840c2158..1677f0f833 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -57,7 +57,7 @@ def test_merge_namespace_trees(namespace_setup): @pytest.fixture(scope='function') -def namespace_setup(testing_workdir, request): +def namespace_setup(testing_workdir): namespace = os.path.join(testing_workdir, 'namespace') package = os.path.join(namespace, 'package') makefile(os.path.join(package, "module.py")) @@ -65,7 +65,7 @@ def namespace_setup(testing_workdir, request): @pytest.mark.sanity -def test_disallow_merge_conflicts(namespace_setup, testing_config): +def test_disallow_merge_conflicts(namespace_setup): duplicate = os.path.join(namespace_setup, 'dupe', 'namespace', 'package', 'module.py') makefile(duplicate) with pytest.raises(IOError): @@ -250,7 +250,7 @@ def test_logger_filtering(caplog, capfd): log.removeHandler(logging.StreamHandler(sys.stderr)) -def test_logger_config_from_file(testing_workdir, caplog, capfd, mocker): +def test_logger_config_from_file(testing_workdir, capfd, mocker): test_file = os.path.join(testing_workdir, 'build_log_config.yaml') with open(test_file, 'w') as f: f.write(""" diff --git a/tests/test_variants.py b/tests/test_variants.py index 5e9d8419cc..e590c8a476 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -226,7 +226,7 @@ def test_variants_in_output_names(): assert len(outputs) == 4 -def test_variants_in_versions_with_setup_py_data(testing_workdir): +def test_variants_in_versions_with_setup_py_data(): recipe = os.path.join(variants_dir, "12_variant_versions") outputs = api.get_output_file_paths(recipe) assert len(outputs) == 2 @@ -234,7 +234,7 @@ def test_variants_in_versions_with_setup_py_data(testing_workdir): assert any(os.path.basename(pkg).startswith('my_package-480.480') for pkg in outputs) -def test_git_variables_with_variants(testing_workdir, testing_config): +def test_git_variables_with_variants(testing_config): recipe = os.path.join(variants_dir, "13_git_vars") m = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True @@ -259,7 +259,7 @@ def test_variant_input_with_zip_keys_keeps_zip_keys_list(): @pytest.mark.serial @pytest.mark.xfail(sys.platform == "win32", reason="console readout issues on appveyor") -def test_ensure_valid_spec_on_run_and_test(testing_workdir, testing_config, caplog): +def test_ensure_valid_spec_on_run_and_test(testing_config, caplog): testing_config.debug = True testing_config.verbose = True recipe = os.path.join(variants_dir, "14_variant_in_run_and_test") @@ -329,7 +329,7 @@ def test_subspace_selection(testing_config): assert ms[0][0].config.variant['c'] == 'animal' -def test_get_used_loop_vars(testing_config): +def test_get_used_loop_vars(): m = api.render( os.path.join(variants_dir, "19_used_variables"), finalize=False, @@ -343,7 +343,7 @@ def test_get_used_loop_vars(testing_config): assert m.get_used_vars() == {'python', 'some_package', 'zlib', 'pthread_stubs', 'target_platform'} -def test_reprovisioning_source(testing_config): +def test_reprovisioning_source(): api.render(os.path.join(variants_dir, "20_reprovision_source")) @@ -383,7 +383,7 @@ def test_reduced_hashing_behavior(testing_config): assert not re.search('h[0-9a-f]{%d}' % testing_config.hash_length, m.build_id()) -def test_variants_used_in_jinja2_conditionals(testing_config): +def test_variants_used_in_jinja2_conditionals(): ms = api.render( os.path.join(variants_dir, "21_conditional_sections"), finalize=False, @@ -394,7 +394,7 @@ def test_variants_used_in_jinja2_conditionals(testing_config): assert sum(m.config.variant['blas_impl'] == 'openblas' for m, _, _ in ms) == 1 -def test_build_run_exports_act_on_host(testing_config, caplog): +def test_build_run_exports_act_on_host(caplog): """Regression test for https://github.com/conda/conda-build/issues/2559""" api.render( os.path.join(variants_dir, "22_run_exports_rerendered_for_other_variants"), @@ -404,7 +404,7 @@ def test_build_run_exports_act_on_host(testing_config, caplog): assert "failed to get install actions, retrying" not in caplog.text -def test_detect_variables_in_build_and_output_scripts(testing_config): +def test_detect_variables_in_build_and_output_scripts(): ms = api.render( os.path.join(variants_dir, "24_test_used_vars_in_scripts"), platform="linux", @@ -457,7 +457,7 @@ def test_detect_variables_in_build_and_output_scripts(testing_config): assert 'OUTPUT_VAR' in used_vars -def test_target_platform_looping(testing_config): +def test_target_platform_looping(): outputs = api.get_output_file_paths( os.path.join(variants_dir, "25_target_platform_looping"), platform="win", @@ -468,12 +468,12 @@ def test_target_platform_looping(testing_config): @pytest.mark.skipif(on_mac and platform.machine() == "arm64", reason="Unsatisfiable dependencies for M1 MacOS systems: {'numpy=1.16'}") # TODO Remove the above skip decorator once https://github.com/conda/conda-build/issues/4717 is resolved -def test_numpy_used_variable_looping(testing_config): +def test_numpy_used_variable_looping(): outputs = api.get_output_file_paths(os.path.join(variants_dir, "numpy_used")) assert len(outputs) == 4 -def test_exclusive_config_files(testing_workdir): +def test_exclusive_config_files(): with open('conda_build_config.yaml', 'w') as f: yaml.dump({'abc': ['someval'], 'cwd': ['someval']}, f, default_flow_style=False) os.makedirs('config_dir') @@ -504,7 +504,7 @@ def test_exclusive_config_files(testing_workdir): assert variant['abc'] == '123' -def test_exclusive_config_file(testing_workdir): +def test_exclusive_config_file(): with open("conda_build_config.yaml", "w") as f: yaml.dump({"abc": ["someval"], "cwd": ["someval"]}, f, default_flow_style=False) os.makedirs("config_dir") @@ -606,7 +606,7 @@ def test_top_level_finalized(testing_config): assert '5.2.3' in xzcat_output -def test_variant_subkeys_retained(testing_config): +def test_variant_subkeys_retained(): m = api.render( os.path.join(variants_dir, "31_variant_subkeys"), finalize=False, From bd4e79263982a2a22beffa36174c70fa11b5c02a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 25 Jan 2023 17:32:52 -0600 Subject: [PATCH 047/366] [pre-commit.ci] pre-commit autoupdate (#4740) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/PyCQA/pylint: v2.16.0b0 → v2.16.0b1](https://github.com/PyCQA/pylint/compare/v2.16.0b0...v2.16.0b1) - [github.com/Lucas-C/pre-commit-hooks: v1.3.1 → v1.4.1](https://github.com/Lucas-C/pre-commit-hooks/compare/v1.3.1...v1.4.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8ef6c925dd..527258861b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -56,7 +56,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/PyCQA/pylint - rev: v2.16.0b0 + rev: v2.16.0b1 hooks: - id: pylint args: [--exit-zero] @@ -68,7 +68,7 @@ repos: # ignore all tests, not just tests data exclude: ^tests/ - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.3.1 + rev: v1.4.1 hooks: - id: insert-license files: \.py$ From 7b25006466d1fdcc19b6963d71bca227b421750e Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 26 Jan 2023 14:55:06 +0100 Subject: [PATCH 048/366] `test_api_skeleton_cran.py`: Inline parametrize, use pathlib, update CRAN packages (#4736) * Inline parametrize, use pathlib, update CRAN packages Use smaller CRAN packages for faster downloads. * Additional type hinting --- tests/test_api_skeleton_cran.py | 132 +++++++++++++++----------------- 1 file changed, 61 insertions(+), 71 deletions(-) diff --git a/tests/test_api_skeleton_cran.py b/tests/test_api_skeleton_cran.py index 52babe2098..6a487c3ad3 100644 --- a/tests/test_api_skeleton_cran.py +++ b/tests/test_api_skeleton_cran.py @@ -4,80 +4,68 @@ Integrative tests of the CRAN skeleton that start from conda_build.api.skeletonize and check the output files ''' +from pathlib import Path +from typing import Sequence - -import os import pytest from conda_build import api from conda_build.skeletons.cran import CRAN_BUILD_SH_SOURCE, CRAN_META -from conda_build.utils import ensure_list - - -# CRAN packages to test license_file entry. -# (package, license_id, license_family, license_files) -cran_packages = [ - ("r-rmarkdown", "GPL-3", "GPL3", "GPL-3"), # cran: 'GPL-3' - ( - # cran: 'Artistic License 2.0' - "r-badger", - "Artistic-2.0", - "OTHER", - "Artistic-2.0", - ), - ("r-udpipe", "MPL-2.0", "OTHER", ""), # cran: 'MPL-2.0' - ("r-broom", "MIT", "MIT", ["MIT", "LICENSE"]), # cran: 'MIT + file LICENSE' - ( - # cran: 'BSD 2-clause License + file LICENSE' - "r-meanr", - "BSD_2_clause", - "BSD", - ["BSD_2_clause", "LICENSE"], - ), - ("r-zoo", "GPL-2 | GPL-3", "GPL3", ["GPL-2", "GPL-3"]), # cran: 'GPL-2 | GPL-3' - ("r-magree", "GPL-3 | GPL-2", "GPL3", ["GPL-3", "GPL-2"]), # cran: 'GPL-3 | GPL-2' - ("r-mglm", "GPL-2", "GPL2", "GPL-2"), # cran: 'GPL (>= 2)' -] @pytest.mark.slow -@pytest.mark.parametrize("package, license_id, license_family, license_files", cran_packages) -@pytest.mark.flaky(rerun=5, reruns_delay=2) -def test_cran_license(package, license_id, license_family, license_files, testing_workdir, testing_config): - api.skeletonize(packages=package, repo='cran', output_dir=testing_workdir, - config=testing_config) - m = api.render(os.path.join(package, 'meta.yaml'))[0][0] - m_license_id = m.get_value('about/license') - assert m_license_id == license_id - m_license_family = m.get_value('about/license_family') - assert m_license_family == license_family - m_license_files = ensure_list(m.get_value('about/license_file', '')) - license_files = ensure_list(license_files) - for m_license_file in m_license_files: - assert os.path.basename(m_license_file) in license_files - - -# CRAN packages to test skip entry. -# (package, skip_text) -cran_os_type_pkgs = [ - ('bigReg', 'skip: True # [not unix]'), - ('blatr', 'skip: True # [not win]') - ] - - -@pytest.mark.parametrize("package, skip_text", cran_os_type_pkgs) +@pytest.mark.parametrize( + "package,license_id,license_family,license_files", + [ + ("r-rmarkdown", "GPL-3", "GPL3", {"GPL-3"}), + ("r-fastdigest", "Artistic-2.0", "OTHER", {"Artistic-2.0"}), + ("r-tokenizers.bpe", "MPL-2.0", "OTHER", set()), + ("r-broom", "MIT", "MIT", {"MIT", "LICENSE"}), + ("r-meanr", "BSD_2_clause", "BSD", {"BSD_2_clause", "LICENSE"}), + ("r-base64enc", "GPL-2 | GPL-3", "GPL3", {"GPL-2", "GPL-3"}), + ("r-magree", "GPL-3 | GPL-2", "GPL3", {"GPL-3", "GPL-2"}), + ("r-mglm", "GPL-2", "GPL2", {"GPL-2"}), + ], +) +# @pytest.mark.flaky(rerun=5, reruns_delay=2) +def test_cran_license( + package: str, + license_id: str, + license_family: str, + license_files: Sequence[str], + tmp_path: Path, + testing_config, +): + api.skeletonize( + packages=package, repo="cran", output_dir=tmp_path, config=testing_config + ) + m = api.render(str(tmp_path / package / "meta.yaml"))[0][0] + + assert m.get_value("about/license") == license_id + assert m.get_value("about/license_family") == license_family + assert { + Path(license).name for license in m.get_value("about/license_file", "") + } == set(license_files) + + +@pytest.mark.parametrize( + "package,skip_text", + [ + ("bigReg", "skip: True # [not unix]"), + ("blatr", "skip: True # [not win]"), + ], +) @pytest.mark.flaky(rerun=5, reruns_delay=2) -def test_cran_os_type(package, skip_text, testing_workdir, testing_config): - api.skeletonize(packages=package, repo='cran', output_dir=testing_workdir, - config=testing_config) - fpath = os.path.join(testing_workdir, 'r-' + package.lower(), 'meta.yaml') - with open(fpath) as f: - assert skip_text in f.read() +def test_cran_os_type(package: str, skip_text: str, tmp_path: Path, testing_config): + api.skeletonize( + packages=package, repo="cran", output_dir=tmp_path, config=testing_config + ) + assert skip_text in (tmp_path / f"r-{package.lower()}" / "meta.yaml").read_text() # Test cran skeleton argument --no-comments @pytest.mark.flaky(rerun=5, reruns_delay=2) -def test_cran_no_comments(testing_workdir, testing_config): +def test_cran_no_comments(tmp_path: Path, testing_config): package = "data.table" meta_yaml_comment = ' # This is required to make R link correctly on Linux.' build_sh_comment = '# Add more build steps here, if they are necessary.' @@ -88,16 +76,18 @@ def test_cran_no_comments(testing_workdir, testing_config): assert build_sh_comment in CRAN_BUILD_SH_SOURCE assert build_sh_shebang in CRAN_BUILD_SH_SOURCE - api.skeletonize(packages=package, repo='cran', output_dir=testing_workdir, - config=testing_config, no_comments=True) + api.skeletonize( + packages=package, + repo="cran", + output_dir=tmp_path, + config=testing_config, + no_comments=True, + ) # Check that comments got removed - meta_yaml = os.path.join(testing_workdir, 'r-' + package.lower(), 'meta.yaml') - with open(meta_yaml) as f: - assert meta_yaml_comment not in f.read() + meta_yaml_text = (tmp_path / f"r-{package.lower()}" / "meta.yaml").read_text() + assert meta_yaml_comment not in meta_yaml_text - build_sh = os.path.join(testing_workdir, 'r-' + package.lower(), 'build.sh') - with open(build_sh) as f: - build_sh_text = f.read() - assert build_sh_comment not in build_sh_text - assert build_sh_shebang in build_sh_text + build_sh_text = (tmp_path / f"r-{package.lower()}" / "build.sh").read_text() + assert build_sh_comment not in build_sh_text + assert build_sh_shebang in build_sh_text From e2763721a06d7023e86702c5e094709e4132cf41 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 26 Jan 2023 15:05:54 +0100 Subject: [PATCH 049/366] `test_api_render.py`: remove unused `skipif` and converted `unittest.mock` to pytest-mocker (#4747) * Converted unittest.mock to pytest mocker * Remove unnecessary skipif for Python 2.7 --- tests/test_api_render.py | 49 ++++++++++++++++------------------------ 1 file changed, 20 insertions(+), 29 deletions(-) diff --git a/tests/test_api_render.py b/tests/test_api_render.py index eea75cb445..2e0c400474 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -4,20 +4,18 @@ This module tests the test API. These are high-level integration tests. Lower level unit tests should go in test_render.py """ - import os import re -import sys -from unittest import mock import pytest import yaml +from conda.common.compat import on_win + from conda_build import api, render from conda_build.conda_interface import subdir, cc_conda_build -from tests import utils -from .utils import metadata_dir, thisdir +from .utils import metadata_dir, variants_dir def test_render_need_download(testing_config): @@ -97,10 +95,12 @@ def test_get_output_file_path_jinja2(testing_config): "py{}{}_0_g262d444.tar.bz2".format(python, _hash)) -@mock.patch('conda_build.source') -def test_output_without_jinja_does_not_download(mock_source, testing_config): - api.get_output_file_path(os.path.join(metadata_dir, "source_git"), config=testing_config)[0] - mock_source.provide.assert_not_called() +def test_output_without_jinja_does_not_download(mocker, testing_config): + mock = mocker.patch("conda_build.source") + api.get_output_file_path( + os.path.join(metadata_dir, "source_git"), config=testing_config + ) + mock.assert_not_called() def test_pin_compatible_semver(testing_config): @@ -110,12 +110,7 @@ def test_pin_compatible_semver(testing_config): @pytest.mark.slow -@pytest.mark.skipif( - utils.on_win and sys.version_info < (3, 6), - reason="Failing tests on CI for Python 2.7" -) -@pytest.mark.xfail(sys.platform == "win32", - reason="Defaults channel has conflicting vc packages") +@pytest.mark.xfail(on_win, reason="Defaults channel has conflicting vc packages") def test_resolved_packages_recipe(testing_config): recipe_dir = os.path.join(metadata_dir, '_resolved_packages_host_build') metadata = api.render(recipe_dir, config=testing_config)[0][0] @@ -192,8 +187,11 @@ def test_setting_condarc_vars_with_env_var_expansion(testing_workdir): os.environ['TEST_WORKDIR'] = testing_workdir try: - m = api.render(os.path.join(thisdir, 'test-recipes', 'variants', '19_used_variables'), - bypass_env_check=True, finalize=False)[0][0] + m = api.render( + os.path.join(variants_dir, "19_used_variables"), + bypass_env_check=True, + finalize=False, + )[0][0] # this one should have gotten clobbered by the values in the recipe assert m.config.variant['python'] not in python_versions # this confirms that we loaded the config file correctly @@ -226,8 +224,11 @@ def test_run_exports_with_pin_compatible_in_subpackages(testing_config): def test_ignore_build_only_deps(): - ms = api.render(os.path.join(thisdir, 'test-recipes', 'variants', 'python_in_build_only'), - bypass_env_check=True, finalize=False) + ms = api.render( + os.path.join(variants_dir, "python_in_build_only"), + bypass_env_check=True, + finalize=False, + ) assert len(ms) == 1 @@ -241,16 +242,6 @@ def test_merge_build_host_empty_host_section(): assert not any('bzip2' in dep for dep in m.meta['requirements']['run']) -@pytest.mark.skipif(sys.platform != "linux2", reason="package on remote end is only on linux") -@pytest.mark.xfail(reason="It needs to be fixed for Python v2.7. #3681") -def test_run_exports_from_repo_without_channeldata(testing_config): - ms = api.render(os.path.join(metadata_dir, '_run_export_no_channeldata'), config=testing_config) - assert ms[0][0].meta['requirements']['build'] == ["exporty"] - # these two will be missing if run_exports has failed. - assert ms[0][0].meta['requirements']['host'] == ["exporty"] - assert ms[0][0].meta['requirements']['run'] == ["exporty"] - - def test_pin_expression_works_with_prereleases(testing_config): recipe = os.path.join(metadata_dir, '_pinning_prerelease') ms = api.render(recipe, config=testing_config) From 74d7ad67545a1d33122b941a51ff7c881da4af9f Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 26 Jan 2023 15:06:25 +0100 Subject: [PATCH 050/366] `test_build.py`: removed unnecessary `skipif` markers and converted `os.path` to `pathlib` (#4746) * Drop skipif for Python 3+ test * Convert os.path to pathlib --- tests/test_build.py | 128 ++++++++++++++++++-------------------------- 1 file changed, 51 insertions(+), 77 deletions(-) diff --git a/tests/test_build.py b/tests/test_build.py index 6e0296391c..6f2e12aabb 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -4,28 +4,20 @@ This file tests the build.py module. It sits lower in the stack than the API tests, and is more unit-test oriented. """ - import json import os +from pathlib import Path import sys -import pytest +from conda.common.compat import on_win from conda_build import build, api -from conda_build.utils import on_win from .utils import metadata_dir, get_noarch_python_meta -prefix_tests = {"normal": os.path.sep} -if sys.platform == "win32": - prefix_tests.update({"double_backslash": "\\\\", - "forward_slash": "/"}) - - -def _write_prefix(filename, prefix, replacement): - with open(filename, "w") as f: - f.write(prefix.replace(os.path.sep, replacement)) - f.write("\n") +PREFIX_TESTS = {"normal": os.path.sep} +if on_win: + PREFIX_TESTS.update({"double_backslash": "\\\\", "forward_slash": "/"}) def test_find_prefix_files(testing_workdir): @@ -35,10 +27,10 @@ def test_find_prefix_files(testing_workdir): """ # create text files to be replaced files = [] - for slash_style in prefix_tests: - filename = os.path.join(testing_workdir, "%s.txt" % slash_style) - _write_prefix(filename, testing_workdir, prefix_tests[slash_style]) - files.append(filename) + for style, replacement in PREFIX_TESTS.items(): + filename = Path(testing_workdir, f"{style}.txt") + filename.write_text(testing_workdir.replace(os.path.sep, replacement)) + files.append(str(filename)) assert len(list(build.have_prefix_files(files, testing_workdir))) == len(files) @@ -81,16 +73,14 @@ def test_is_no_link(): assert build.is_no_link(no_link, "path/nope") is None -@pytest.mark.skipif(on_win and sys.version[:3] == "2.7", - reason="os.link is not available so can't setup test") def test_sorted_inode_first_path(testing_workdir): - path_one = os.path.join(testing_workdir, "one") - path_two = os.path.join(testing_workdir, "two") - path_one_hardlink = os.path.join(testing_workdir, "one_hl") - open(path_one, "a").close() - open(path_two, "a").close() + path_one = Path(testing_workdir, "one") + path_two = Path(testing_workdir, "two") + path_hardlink = Path(testing_workdir, "one_hl") - os.link(path_one, path_one_hardlink) + path_one.touch() + path_two.touch() + os.link(path_one, path_hardlink) files = ["one", "two", "one_hl"] assert build.get_inode_paths(files, "one", testing_workdir) == ["one", "one_hl"] @@ -99,21 +89,18 @@ def test_sorted_inode_first_path(testing_workdir): def test_create_info_files_json(testing_workdir, testing_metadata): - info_dir = os.path.join(testing_workdir, "info") - os.mkdir(info_dir) - path_one = os.path.join(testing_workdir, "one") - path_two = os.path.join(testing_workdir, "two") - path_foo = os.path.join(testing_workdir, "foo") - open(path_one, "a").close() - open(path_two, "a").close() - open(path_foo, "a").close() + info_dir = Path(testing_workdir, "info") + info_dir.mkdir() + Path(testing_workdir, "one").touch() + Path(testing_workdir, "two").touch() + Path(testing_workdir, "foo").touch() + files_with_prefix = [("prefix/path", "text", "foo")] files = ["one", "two", "foo"] - build.create_info_files_json_v1(testing_metadata, info_dir, testing_workdir, files, files_with_prefix) - files_json_path = os.path.join(info_dir, "paths.json") - expected_output = { + + assert json.loads((info_dir / "paths.json").read_text()) == { "paths": [{"file_mode": "text", "path_type": "hardlink", "_path": "foo", "prefix_placeholder": "prefix/path", "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", @@ -125,28 +112,24 @@ def test_create_info_files_json(testing_workdir, testing_metadata): "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size_in_bytes": 0}], "paths_version": 1} - with open(files_json_path) as files_json: - output = json.load(files_json) - assert output == expected_output -@pytest.mark.skipif(on_win and sys.version[:3] == "2.7", - reason="os.symlink is not available so can't setup test") def test_create_info_files_json_symlinks(testing_workdir, testing_metadata): - info_dir = os.path.join(testing_workdir, "info") - os.mkdir(info_dir) - path_one = os.path.join(testing_workdir, "one") - path_two = os.path.join(testing_workdir, "two") - path_three = os.path.join(testing_workdir, "three") # do not make this one - path_foo = os.path.join(testing_workdir, "foo") - path_two_symlink = os.path.join(testing_workdir, "two_sl") - symlink_to_nowhere = os.path.join(testing_workdir, "nowhere_sl") - recursive_symlink = os.path.join(testing_workdir, "recursive_sl") - cycle1_symlink = os.path.join(testing_workdir, "cycle1_sl") - cycle2_symlink = os.path.join(testing_workdir, "cycle2_sl") - open(path_one, "a").close() - open(path_two, "a").close() - open(path_foo, "a").close() + info_dir = Path(testing_workdir, "info") + info_dir.mkdir() + path_one = Path(testing_workdir, "one") + path_two = Path(testing_workdir, "two") + path_three = Path(testing_workdir, "three") # do not make this one + path_foo = Path(testing_workdir, "foo") + path_two_symlink = Path(testing_workdir, "two_sl") + symlink_to_nowhere = Path(testing_workdir, "nowhere_sl") + recursive_symlink = Path(testing_workdir, "recursive_sl") + cycle1_symlink = Path(testing_workdir, "cycle1_sl") + cycle2_symlink = Path(testing_workdir, "cycle2_sl") + + path_one.touch() + path_two.touch() + path_foo.touch() os.symlink(path_two, path_two_symlink) os.symlink(path_three, symlink_to_nowhere) @@ -160,8 +143,7 @@ def test_create_info_files_json_symlinks(testing_workdir, testing_metadata): build.create_info_files_json_v1(testing_metadata, info_dir, testing_workdir, files, files_with_prefix) - files_json_path = os.path.join(info_dir, "paths.json") - expected_output = { + assert json.loads((info_dir / "paths.json").read_text()) == { "paths": [ {"path_type": "softlink", "_path": "cycle1_sl", "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", @@ -189,31 +171,26 @@ def test_create_info_files_json_symlinks(testing_workdir, testing_metadata): "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size_in_bytes": 0}], "paths_version": 1} - with open(files_json_path) as files_json: - output = json.load(files_json) - assert output == expected_output -@pytest.mark.skipif(on_win and sys.version[:3] == "2.7", - reason="os.link is not available so can't setup test") def test_create_info_files_json_no_inodes(testing_workdir, testing_metadata): - info_dir = os.path.join(testing_workdir, "info") - os.mkdir(info_dir) - path_one = os.path.join(testing_workdir, "one") - path_two = os.path.join(testing_workdir, "two") - path_foo = os.path.join(testing_workdir, "foo") - path_one_hardlink = os.path.join(testing_workdir, "one_hl") - open(path_one, "a").close() - open(path_two, "a").close() - open(path_foo, "a").close() + info_dir = Path(testing_workdir, "info") + info_dir.mkdir() + path_one = Path(testing_workdir, "one") + path_two = Path(testing_workdir, "two") + path_foo = Path(testing_workdir, "foo") + path_one_hardlink = Path(testing_workdir, "one_hl") + + path_one.touch() + path_two.touch() + path_foo.touch() os.link(path_one, path_one_hardlink) + files_with_prefix = [("prefix/path", "text", "foo")] files = ["one", "two", "one_hl", "foo"] - build.create_info_files_json_v1(testing_metadata, info_dir, testing_workdir, files, files_with_prefix) - files_json_path = os.path.join(info_dir, "paths.json") - expected_output = { + assert json.loads((info_dir / "paths.json").read_text()) == { "paths": [{"file_mode": "text", "path_type": "hardlink", "_path": "foo", "prefix_placeholder": "prefix/path", "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", @@ -228,9 +205,6 @@ def test_create_info_files_json_no_inodes(testing_workdir, testing_metadata): "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size_in_bytes": 0}], "paths_version": 1} - with open(files_json_path) as files_json: - output = json.load(files_json) - assert output == expected_output def test_rewrite_output(testing_config, capsys): From 6ef45dc88bf0327caf9998fcf014a39adc43ec0d Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 26 Jan 2023 15:11:57 +0100 Subject: [PATCH 051/366] `test_metadata.py`: Combine native compiler metadata tests (#4745) * Combine native compiler metadata tests * Add missing future import --- tests/test_metadata.py | 72 ++++++++++++++++++++++-------------------- 1 file changed, 38 insertions(+), 34 deletions(-) diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 9eed828f3c..d616d2ec4f 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -1,12 +1,14 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import os import subprocess import pytest from conda_build.metadata import select_lines, MetaData -from conda_build import api, conda_interface +from conda_build import api from .utils import thisdir, metadata_dir from conda_build.utils import DEFAULT_SUBDIRS @@ -145,39 +147,41 @@ def test_build_bootstrap_env_by_path(testing_metadata): subprocess.check_call(cmd.split()) -@pytest.mark.parametrize('py_ver', [('2.7', 'vs2008_win-x86_64'), - ('3.4', 'vs2010_win-x86_64'), - ('3.7', 'vs2017_win-x86_64'), ]) -def test_native_compiler_metadata_win(testing_config, py_ver, mocker): - testing_config.platform = 'win' - metadata = api.render(os.path.join(metadata_dir, '_compiler_jinja2'), config=testing_config, - variants={'target_platform': 'win-x86_64'}, - permit_unsatisfiable_variants=True, finalize=False, - bypass_env_check=True, python=py_ver[0])[0][0] - # see parameterization - py_ver[1] is the compiler package name - assert any(dep.startswith(py_ver[1]) for dep in metadata.meta['requirements']['build']) - - -def test_native_compiler_metadata_linux(testing_config): - testing_config.platform = 'linux' - metadata = api.render(os.path.join(metadata_dir, '_compiler_jinja2'), - config=testing_config, permit_unsatisfiable_variants=True, - finalize=False, bypass_env_check=True)[0][0] - _64 = '64' if conda_interface.bits == 64 else '32' - assert any(dep.startswith('gcc_linux-' + _64) for dep in metadata.meta['requirements']['build']) - assert any(dep.startswith('gxx_linux-' + _64) for dep in metadata.meta['requirements']['build']) - assert any(dep.startswith('gfortran_linux-' + _64) for dep in metadata.meta['requirements']['build']) - - -def test_native_compiler_metadata_osx(testing_config): - testing_config.platform = 'osx' - metadata = api.render(os.path.join(metadata_dir, '_compiler_jinja2'), - config=testing_config, permit_unsatisfiable_variants=True, - finalize=False, bypass_env_check=True)[0][0] - _64 = '64' if conda_interface.bits == 64 else '32' - assert any(dep.startswith('clang_osx-' + _64) for dep in metadata.meta['requirements']['build']) - assert any(dep.startswith('clangxx_osx-' + _64) for dep in metadata.meta['requirements']['build']) - assert any(dep.startswith('gfortran_osx-' + _64) for dep in metadata.meta['requirements']['build']) +@pytest.mark.parametrize( + "platform,arch,python,compilers", + [ + ("win", "x86_64", "2.7", {"vs2008_win-x86_64"}), + ("win", "x86_64", "3.1", {"vs2008_win-x86_64"}), + ("win", "x86_64", "3.2", {"vs2008_win-x86_64"}), + ("win", "x86_64", "3.3", {"vs2010_win-x86_64"}), + ("win", "x86_64", "3.4", {"vs2010_win-x86_64"}), + ("win", "x86_64", "3.5", {"vs2017_win-x86_64"}), + ("win", "x86_64", "3.6", {"vs2017_win-x86_64"}), + ("win", "x86_64", "3.7", {"vs2017_win-x86_64"}), + ("win", "x86_64", "3.8", {"vs2017_win-x86_64"}), + ("win", "x86_64", "3.9", {"vs2017_win-x86_64"}), + ("win", "x86_64", "3.10", {"vs2017_win-x86_64"}), + ("win", "x86_64", "3.11", {"vs2017_win-x86_64"}), + ("linux", "32", "3.11", {"gcc_linux-32", "gxx_linux-32"}), + ("linux", "64", "3.11", {"gcc_linux-64", "gxx_linux-64"}), + ("osx", "32", "3.11", {"clang_osx-32", "clangxx_osx-32"}), + ("osx", "64", "3.11", {"clang_osx-64", "clangxx_osx-64"}), + ], +) +def test_native_compiler_metadata( + platform: str, arch: str, python: str, compilers: set[str], testing_config, mocker +): + testing_config.platform = platform + metadata = api.render( + os.path.join(metadata_dir, "_compiler_jinja2"), + config=testing_config, + variants={"target_platform": f"{platform}-{arch}"}, + permit_unsatisfiable_variants=True, + finalize=False, + bypass_env_check=True, + python=python, + )[0][0] + assert compilers <= set(metadata.meta["requirements"]["build"]) def test_compiler_metadata_cross_compiler(): From 0e4cec5c5d0cf1a386da834b92cf648806711520 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 26 Jan 2023 15:13:29 +0100 Subject: [PATCH 052/366] `test_jinja_context.py`: remove unnecessary `skipif` clause and convert `tmpdir` to `tmp_path` (#4744) * Remove skip condition for setuptools<30.3.0 Conda-build requires `conda>=4.13`, which in turn depends on `setuptools>=31.0.1`, so there's no need to fence this test anymore. * Convert tmpdir to tmp_path * Add missing future import --- tests/test_jinja_context.py | 70 ++++++++++++++++++------------------- 1 file changed, 35 insertions(+), 35 deletions(-) diff --git a/tests/test_jinja_context.py b/tests/test_jinja_context.py index a6e6eedefa..8654da5842 100644 --- a/tests/test_jinja_context.py +++ b/tests/test_jinja_context.py @@ -1,5 +1,10 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +from pathlib import Path +from typing import Any + import pytest from conda_build import jinja_context @@ -115,53 +120,48 @@ def test_resolved_packages(testing_metadata): assert any('python' == pkg.split()[0] for pkg in packages) -try: - try: - # Recommended for setuptools 61.0.0+ - # (though may disappear in the future) - from setuptools.config.setupcfg import read_configuration - except ImportError: - from setuptools.config import read_configuration - del read_configuration -except ImportError: - _has_read_configuration = False -else: - _has_read_configuration = True - - -@pytest.mark.skipif(not _has_read_configuration, - reason="setuptools <30.3.0 cannot read metadata / options from 'setup.cfg'") -def test_load_setup_py_data_from_setup_cfg(testing_metadata, tmpdir): - setup_py = tmpdir.join('setup.py') - setup_cfg = tmpdir.join('setup.cfg') - setup_py.write( +def test_load_setup_py_data_from_setup_cfg(testing_metadata, tmp_path: Path): + setup_py = tmp_path / "setup.py" + setup_cfg = tmp_path / "setup.cfg" + setup_py.write_text( 'from setuptools import setup\n' 'setup(name="name_from_setup_py")\n' ) - setup_cfg.write( + setup_cfg.write_text( '[metadata]\n' 'name = name_from_setup_cfg\n' 'version = version_from_setup_cfg\n' '[options.extras_require]\n' 'extra = extra_package\n' ) - setup_file = str(setup_py) - setuptools_data = jinja_context.load_setup_py_data(testing_metadata, setup_file) + setuptools_data = jinja_context.load_setup_py_data(testing_metadata, str(setup_py)) # ensure that setup.cfg has priority over setup.py assert setuptools_data['name'] == 'name_from_setup_cfg' assert setuptools_data['version'] == 'version_from_setup_cfg' assert setuptools_data['extras_require'] == {'extra': ['extra_package']} -@pytest.mark.parametrize("filename,fmt,data,expected", [ - ("file.json", None, '{"a": 1}', {"a": 1}), - ("json_file", "json", '{"a": 1}', {"a": 1}), - ("file.toml", None, '[tbl]\na = 1', {"tbl": {"a": 1}}), - ("toml_file", "toml", '[tbl]\na = 1', {"tbl": {"a": 1}}), - ("file.yaml", None, 'a: 1\nb:\n - c: 2', {"a": 1, "b": [{"c": 2}]}), -]) -def test_load_file_data(tmpdir, filename, fmt, data, expected, testing_metadata): - f = tmpdir.join(filename) - f.write(data) - fn = str(f) - assert jinja_context.load_file_data(fn, fmt, config=testing_metadata.config) == expected +@pytest.mark.parametrize( + "filename,fmt,data,expected", + [ + ("file.json", None, '{"a": 1}', {"a": 1}), + ("json_file", "json", '{"a": 1}', {"a": 1}), + ("file.toml", None, "[tbl]\na = 1", {"tbl": {"a": 1}}), + ("toml_file", "toml", "[tbl]\na = 1", {"tbl": {"a": 1}}), + ("file.yaml", None, "a: 1\nb:\n - c: 2", {"a": 1, "b": [{"c": 2}]}), + ], +) +def test_load_file_data( + tmp_path: Path, + filename: str, + fmt: str | None, + data: str, + expected: Any, + testing_metadata, +): + path = tmp_path / filename + path.write_text(data) + assert ( + jinja_context.load_file_data(str(path), fmt, config=testing_metadata.config) + == expected + ) From d21c6fc2ce724a93fb8c3ee8ef432cd5d1180ed6 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 26 Jan 2023 15:17:15 +0100 Subject: [PATCH 053/366] `test_patch.py`: Expand patch strip level test to cover more cases (#4743) * Expand patch strip level test to cover more cases * Add missing future import --- conda_build/source.py | 28 +++++++-------- tests/test_patch.py | 79 ++++++++++++++++++++++++++++++++----------- 2 files changed, 74 insertions(+), 33 deletions(-) diff --git a/conda_build/source.py b/conda_build/source.py index 5dbaa59d52..58954602be 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import locale import os from os.path import join, isdir, isfile, abspath, basename, exists, normpath, expanduser @@ -10,7 +12,7 @@ import tempfile import time from pathlib import Path -from typing import Optional +from typing import Iterable from .conda_interface import download, TemporaryDirectory from .conda_interface import hashsum_file @@ -495,7 +497,7 @@ def get_repository_info(recipe_path): _RE_CRLF = re.compile(rb"\r\n") -def _ensure_LF(src: os.PathLike, dst: Optional[os.PathLike] = None) -> Path: +def _ensure_LF(src: os.PathLike, dst: os.PathLike | None = None) -> Path: """Replace windows line endings with Unix. Return path to modified file.""" src = Path(src) dst = Path(dst or src) # overwrite src if dst is undefined @@ -503,7 +505,7 @@ def _ensure_LF(src: os.PathLike, dst: Optional[os.PathLike] = None) -> Path: return dst -def _ensure_CRLF(src: os.PathLike, dst: Optional[os.PathLike] = None) -> Path: +def _ensure_CRLF(src: os.PathLike, dst: os.PathLike | None = None) -> Path: """Replace unix line endings with win. Return path to modified file.""" src = Path(src) dst = Path(dst or src) # overwrite src if dst is undefined @@ -511,23 +513,21 @@ def _ensure_CRLF(src: os.PathLike, dst: Optional[os.PathLike] = None) -> Path: return dst -def _guess_patch_strip_level(filesstr, src_dir): - """ Determine the patch strip level automatically. """ - maxlevel = None - files = {filestr.encode(errors='ignore') for filestr in filesstr} - src_dir = src_dir.encode(errors='ignore') +def _guess_patch_strip_level( + patches: Iterable[str | os.PathLike], src_dir: str | os.PathLike +) -> tuple[int, bool]: + """Determine the patch strip level automatically.""" + patches = set(map(Path, patches)) + maxlevel = min(len(patch.parent.parts) for patch in patches) guessed = False - for file in files: - numslash = file.count(b'/') - maxlevel = numslash if maxlevel is None else min(maxlevel, numslash) if maxlevel == 0: patchlevel = 0 else: histo = {i: 0 for i in range(maxlevel + 1)} - for file in files: - parts = file.split(b'/') + for patch in patches: + parts = patch.parts for level in range(maxlevel + 1): - if os.path.exists(join(src_dir, *parts[-len(parts) + level:])): + if Path(src_dir, *parts[-len(parts) + level :]).exists(): histo[level] += 1 order = sorted(histo, key=histo.get, reverse=True) if histo[order[0]] == histo[order[1]]: diff --git a/tests/test_patch.py b/tests/test_patch.py index 6ca2702f24..e5e69beeaf 100644 --- a/tests/test_patch.py +++ b/tests/test_patch.py @@ -1,6 +1,8 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import os +from __future__ import annotations + +from pathlib import Path from textwrap import dedent from types import SimpleNamespace from subprocess import CalledProcessError @@ -15,24 +17,63 @@ ) -def test_patch_strip_level(testing_workdir, monkeypatch): - patchfiles = {'some/common/prefix/one.txt', - 'some/common/prefix/two.txt', - 'some/common/prefix/three.txt'} - folders = ('some', 'common', 'prefix') - files = ('one.txt', 'two.txt', 'three.txt') - os.makedirs(os.path.join(*folders)) - for file in files: - with open(os.path.join(os.path.join(*folders), file), 'w') as f: - f.write('hello\n') - assert _guess_patch_strip_level(patchfiles, os.getcwd()) == (0, False) - monkeypatch.chdir(folders[0]) - assert _guess_patch_strip_level(patchfiles, os.getcwd()) == (1, False) - monkeypatch.chdir(folders[1]) - assert _guess_patch_strip_level(patchfiles, os.getcwd()) == (2, False) - monkeypatch.chdir(folders[2]) - assert _guess_patch_strip_level(patchfiles, os.getcwd()) == (3, False) - monkeypatch.chdir(testing_workdir) +@pytest.mark.parametrize( + "patches,results", + [ + pytest.param( + [ + Path("one.txt"), + Path("some", "common", "prefix", "two.txt"), + Path("some", "common", "prefix", "three.txt"), + ], + [(0, False), (0, False), (0, False), (0, False)], + id="strip level 0", + ), + pytest.param( + [ + Path("some", "one.txt"), + Path("some", "common", "prefix", "two.txt"), + Path("some", "common", "prefix", "three.txt"), + ], + [(0, False), (1, False), (0, True), (0, True)], + id="strip level 1", + ), + pytest.param( + [ + Path("some", "common", "one.txt"), + Path("some", "common", "prefix", "two.txt"), + Path("some", "common", "prefix", "three.txt"), + ], + [(0, False), (1, False), (2, False), (0, True)], + id="strip level 2", + ), + pytest.param( + [ + Path("some", "common", "prefix", "one.txt"), + Path("some", "common", "prefix", "two.txt"), + Path("some", "common", "prefix", "three.txt"), + ], + [(0, False), (1, False), (2, False), (3, False)], + id="strip level 3", + ), + ], +) +def test_patch_strip_level( + patches: Path, results: list[tuple[int, bool]], tmp_path: Path +): + # generate dummy files + for patch in patches: + (tmp_path / patch).parent.mkdir(parents=True, exist_ok=True) + (tmp_path / patch).touch() + + src_dir = tmp_path + assert _guess_patch_strip_level(patches, src_dir) == results[0] + src_dir = src_dir / "some" + assert _guess_patch_strip_level(patches, src_dir) == results[1] + src_dir = src_dir / "common" + assert _guess_patch_strip_level(patches, src_dir) == results[2] + src_dir = src_dir / "prefix" + assert _guess_patch_strip_level(patches, src_dir) == results[3] @pytest.fixture From 2f5dadb3a28145585e83af0d1c6d53fe24c0dac1 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 26 Jan 2023 15:21:57 +0100 Subject: [PATCH 054/366] Minor `test_cran_skeleton.py` refactor (#4729) * Inline parametrization * Remove duplicate thisdir * Fix multiline blocks * Additional indentations * Use conda.common.io.dashlist --- conda_build/skeletons/cran.py | 5 +- tests/test_cran_skeleton.py | 156 +++++++++++++++++++++++++--------- tests/utils.py | 2 + 3 files changed, 121 insertions(+), 42 deletions(-) diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index 9c9e0e2c72..1ff51c2635 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -28,6 +28,7 @@ except ImportError: from yaml import SafeDumper +from conda.common.io import dashlist from conda_build import source, metadata from conda_build.config import get_or_merge_config from conda_build.conda_interface import TemporaryDirectory, cc_conda_build @@ -1581,7 +1582,9 @@ def get_license_info(license_text, allowed_license_families): license_text = " | ".join(license_texts) or license_text # Build the license_file entry and ensure it is empty if no license file - license_file = "license_file:\n - " + "\n - ".join(license_files) if license_files else "" + license_file = "" + if license_files: + license_file = f"license_file:{dashlist(license_files, indent=4)}\n" # Only one family is allowed, so guessing it once license_family = guess_license_family(license_text, allowed_license_families) diff --git a/tests/test_cran_skeleton.py b/tests/test_cran_skeleton.py index 2116fff0f7..b17fe5c59d 100644 --- a/tests/test_cran_skeleton.py +++ b/tests/test_cran_skeleton.py @@ -3,40 +3,111 @@ ''' Unit tests of the CRAN skeleton utility functions ''' - - import os import pytest +from conda.auxlib.ish import dals from conda_build.license_family import allowed_license_families from conda_build.skeletons.cran import (get_license_info, read_description_contents, remove_comments) +from .utils import cran_dir -thisdir = os.path.dirname(os.path.realpath(__file__)) - - -# (license_string, license_id, license_family, license_files) -cran_licenses = [('GPL-3', 'GPL-3', 'GPL3', - 'license_file:\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3\''), - ('Artistic License 2.0', 'Artistic-2.0', 'OTHER', - 'license_file:\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/Artistic-2.0\''), - ('MPL-2.0', 'MPL-2.0', 'OTHER', ''), - ('MIT + file LICENSE', 'MIT', 'MIT', - 'license_file:\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/MIT\'\n - LICENSE'), - ('BSD 2-clause License + file LICENSE', 'BSD_2_clause', 'BSD', - 'license_file:\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/BSD_2_clause\'\n - LICENSE'), - ('GPL-2 | GPL-3', 'GPL-2 | GPL-3', 'GPL3', - 'license_file:\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2\'\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3\''), - ('GPL-3 | GPL-2', 'GPL-3 | GPL-2', 'GPL3', - 'license_file:\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3\'\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2\''), - ('GPL (>= 2)', 'GPL-2', 'GPL2', - 'license_file:\n - \'{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2\''), - ] - - -@pytest.mark.parametrize("license_string, license_id, license_family, license_files", cran_licenses) +@pytest.mark.parametrize( + "license_string, license_id, license_family, license_files", + [ + pytest.param( + "GPL-3", + "GPL-3", + "GPL3", + dals( + """ + license_file: + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3' + """ + ), + id="GPL-3", + ), + pytest.param( + "Artistic License 2.0", + "Artistic-2.0", + "OTHER", + dals( + """ + license_file: + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/Artistic-2.0' + """ + ), + id="Artistic-2.0", + ), + pytest.param("MPL-2.0", "MPL-2.0", "OTHER", "", id="MPL-2.0"), + pytest.param( + "MIT + file LICENSE", + "MIT", + "MIT", + dals( + """ + license_file: + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/MIT' + - LICENSE + """ + ), + id="MIT", + ), + pytest.param( + "BSD 2-clause License + file LICENSE", + "BSD_2_clause", + "BSD", + dals( + """ + license_file: + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/BSD_2_clause' + - LICENSE + """ + ), + id="BSD_2_clause", + ), + pytest.param( + "GPL-2 | GPL-3", + "GPL-2 | GPL-3", + "GPL3", + dals( + """ + license_file: + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2' + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3' + """ + ), + id="GPL-2 | GPL-3", + ), + pytest.param( + "GPL-3 | GPL-2", + "GPL-3 | GPL-2", + "GPL3", + dals( + """ + license_file: + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3' + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2' + """ + ), + id="GPL-3 | GPL-2", + ), + pytest.param( + "GPL (>= 2)", + "GPL-2", + "GPL2", + dals( + """ + license_file: + - '{{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-2' + """ + ), + id="GPL-2", + ), + ], +) def test_get_license_info(license_string, license_id, license_family, license_files): observed = get_license_info(license_string, allowed_license_families) assert observed[0] == license_id @@ -45,8 +116,8 @@ def test_get_license_info(license_string, license_id, license_family, license_fi def test_read_description_contents(): - description = os.path.join(thisdir, 'test-cran-skeleton', 'rpart', 'DESCRIPTION') - with open(description, 'rb') as fp: + description = os.path.join(cran_dir, "rpart", "DESCRIPTION") + with open(description, "rb") as fp: contents = read_description_contents(fp) assert contents['Package'] == 'rpart' assert contents['Priority'] == 'recommended' @@ -57,17 +128,20 @@ def test_read_description_contents(): def test_remove_comments(): - example = ''' -#!keep -# remove - # remove -keep -keep # keep -''' - expected = ''' -#!keep -keep -keep # keep -''' - observed = remove_comments(example) - assert observed == expected + with_comments = dals( + """ + #!keep + # remove + # remove + keep + keep # keep + """ + ) + without_comments = dals( + """ + #!keep + keep + keep # keep + """ + ) + assert remove_comments(with_comments) == without_comments diff --git a/tests/utils.py b/tests/utils.py index 70279188f2..9f94cd4a0e 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -28,6 +28,7 @@ def numpy_installed(): go_path = tests_path / "test-recipes" / "go-package" published_path = tests_path / "test-recipes" / "published_code" archive_path = tests_path / "archives" +cran_path = tests_path / "test-cran-skeleton" # backport thisdir = str(tests_path) @@ -39,6 +40,7 @@ def numpy_installed(): go_dir = str(go_path) published_dir = str(published_path) archive_dir = str(archive_path) +cran_dir = str(cran_path) def is_valid_dir(*parts: Path | str) -> bool: From 3e354656c93a439317a68a419b5555d65a1216a8 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 27 Jan 2023 06:02:12 +0100 Subject: [PATCH 055/366] `test_develop.py`: Refactor to use pathlib & abstract tests (#4739) * Refactor to use pathlib & abstract tests * Move duplicate paths into global DEVELOP_PATHS * rm_rf doesn't support pathlib --- tests/test_develop.py | 147 +++++++++++++++++++----------------------- 1 file changed, 67 insertions(+), 80 deletions(-) diff --git a/tests/test_develop.py b/tests/test_develop.py index e70708de88..add9f65c03 100644 --- a/tests/test_develop.py +++ b/tests/test_develop.py @@ -1,113 +1,100 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -''' +""" Simple tests for testing functions in develop module - lower level than going through API. -''' -import os -from os.path import dirname, join, exists +""" +from pathlib import Path +from typing import Generator + +import pytest from conda_build.develop import _uninstall, write_to_conda_pth from conda_build.utils import rm_rf -import pytest +from .utils import thisdir @pytest.fixture(scope="session") -def sp_dir(request): - ''' +def site_packages() -> Generator[Path, None, None]: + """ create site-packges/ directory in same place where test is located. This is where tests look conda.pth file. It is a session scoped fixture and it has a finalizer function invoked in the end to remove site-packages/ directory - ''' - base_dir = dirname(__file__) - sp = join(base_dir, 'site-packages') - if exists(sp): - rm_rf(sp) - - os.mkdir(sp) + """ + site_packages = Path(thisdir, "site-packages") + if site_packages.exists(): + rm_rf(str(site_packages)) - def cleanup(): - # session scoped cleanup is called at end of the session - rm_rf(sp) + site_packages.mkdir(exist_ok=True) - request.addfinalizer(cleanup) + yield site_packages - return sp + rm_rf(str(site_packages)) @pytest.fixture(scope="function") -def conda_pth(sp_dir): - ''' +def conda_pth(site_packages: Path) -> Generator[Path, None, None]: + """ Returns the path to conda.pth - though we don't expect name to change from conda.pth, better to keep this in one place Removes 'conda.pth' if it exists so each test starts without a conda.pth file - ''' - pth = join(sp_dir, 'conda.pth') - if exists(pth): - os.remove(pth) + """ + path = site_packages / "conda.pth" + if path.exists(): + path.unlink() - return pth + yield path + if path.exists(): + path.unlink() -# Note: following list is data used for testing - do not change it -_path_in_dev_mode = ['/Users/jsandhu/Documents/projects/CythonExample', - '/Users/jsandhu/Documents/projects/TestOne', - '/Users/jsandhu/Documents/projects/TestOne', - '/Users/jsandhu/Documents/projects/TestTwo'] -# following list of tuples contains the path and the number of lines -# added/remaining after invoking develop/uninstall. -# These are used to make assertions -_toadd_and_num_after_install = zip(_path_in_dev_mode, (1, 2, 2, 3)) -_torm_and_num_after_uninstall = zip(_path_in_dev_mode, (2, 1, 1, 0)) +DEVELOP_PATHS = ("/path/to/one", "/path/to/two", "/path/to/three") -def test_write_to_conda_pth(sp_dir, conda_pth): - ''' +def test_write_to_conda_pth(site_packages: Path, conda_pth: Path): + """ `conda develop pkg_path` invokes write_to_conda_pth() to write/append to - conda.pth - this is a basic unit test for write_to_conda_pth - - :param str sp_dir: path to site-packages directory returned by fixture - :param str conda_pth: path to conda.pth returned by fixture - ''' - assert not exists(conda_pth) - - for pth, exp_num_pths in _toadd_and_num_after_install: - write_to_conda_pth(sp_dir, pth) - assert exists(conda_pth) - # write to path twice but ensure it only gets written to fine once - write_to_conda_pth(sp_dir, pth) - with open(conda_pth) as f: - lines = f.readlines() - assert (pth + '\n') in lines - assert len(lines) == exp_num_pths - - -def test_uninstall(sp_dir, conda_pth): - ''' + conda.pth + """ + assert not conda_pth.exists() + + for count, path in enumerate(DEVELOP_PATHS, start=1): + # adding path + write_to_conda_pth(site_packages, path) + assert conda_pth.exists() + + develop_paths = list(filter(None, conda_pth.read_text().split("\n"))) + assert path in develop_paths + assert len(develop_paths) == count + + # adding path a second time has no effect + write_to_conda_pth(site_packages, path) + + assert list(filter(None, conda_pth.read_text().split("\n"))) == develop_paths + + +def test_uninstall(site_packages: Path, conda_pth: Path): + """ `conda develop --uninstall pkg_path` invokes uninstall() to remove path - from conda.pth - this is a unit test for uninstall - - It also includes a cleanup function that deletes the conda.pth file - - :param str sp_dir: path to site-packages directory returned by fixture - :param str conda_pth: path to conda.pth returned by fixture - ''' - # first write data in conda.pth if it doesn't yet exist - # if all tests are invoked, then conda.pth exists - if not exists(conda_pth): - for pth in _path_in_dev_mode: - write_to_conda_pth(sp_dir, pth) - - for to_rm, exp_num_pths in _torm_and_num_after_uninstall: - # here's where the testing begins - _uninstall(sp_dir, to_rm) - assert exists(conda_pth) - - with open(conda_pth) as f: - lines = f.readlines() - assert to_rm + '\n' not in lines - assert len(lines) == exp_num_pths + from conda.pth + """ + for path in DEVELOP_PATHS: + write_to_conda_pth(site_packages, path) + + for count, path in enumerate(DEVELOP_PATHS, start=1): + # removing path + _uninstall(site_packages, path) + assert conda_pth.exists() + + develop_paths = list(filter(None, conda_pth.read_text().split("\n"))) + assert path not in develop_paths + assert len(develop_paths) == len(DEVELOP_PATHS) - count + + # removing path a second time has no effect + _uninstall(site_packages, path) + + assert list(filter(None, conda_pth.read_text().split("\n"))) == develop_paths From cb4fb0a9e63589c55c741b8e2f7b606a4ce181d8 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 27 Jan 2023 08:35:19 +0100 Subject: [PATCH 056/366] `test_api_skeleton.py`: Reorg fixtures, remove pre-Python 2.7 condition, require `ruamel.yaml` (#4748) * Require ruamel.yaml * Inline parameterization and update skipif * Move fixtures to the top * Remove pre-Python 2.7 condition * Add missing future import * Convert all skeletonize/render to use tmp_path * Fix wording and include link to lua GH issue --- tests/requirements.txt | 1 + tests/test_api_skeleton.py | 456 +++++++++++++++++++------------------ 2 files changed, 238 insertions(+), 219 deletions(-) diff --git a/tests/requirements.txt b/tests/requirements.txt index 45c537eec1..6daa8592f2 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -31,5 +31,6 @@ python-libarchive-c pytz requests ripgrep +ruamel.yaml toml tqdm diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index 2233a9dda8..ab54d16278 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -1,116 +1,54 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import fnmatch +from __future__ import annotations + import os +from pathlib import Path import subprocess import sys from pkg_resources import parse_version import pytest +import ruamel.yaml from conda_build.skeletons.pypi import get_package_metadata, \ get_entry_points, is_setuptools_enabled, convert_to_flat_list, \ get_dependencies, get_import_tests, get_tests_require, get_home, \ get_summary, get_license_name, clean_license_name -try: - import ruamel_yaml -except ImportError: - try: - import ruamel.yaml as ruamel_yaml - except ImportError: - raise ImportError("No ruamel_yaml library available.\n" - "To proceed, conda install ruamel_yaml") - from conda_build import api from conda_build.exceptions import DependencyNeedsBuildingError -import conda_build.os_utils.external as external from conda_build.utils import on_win -thisdir = os.path.dirname(os.path.realpath(__file__)) - -repo_packages = [('', 'pypi', 'pip', '8.1.2'), - ('r', 'cran', 'acs', ''), - ( - 'r', 'cran', - 'https://github.com/twitter/AnomalyDetection.git', - ''), - ('perl', 'cpan', 'Moo', ''), - ('', 'rpm', 'libX11-devel', ''), - # ('lua', luarocks', 'LuaSocket', ''), - ] - - -@pytest.mark.parametrize("prefix, repo, package, version", repo_packages) -def test_repo(prefix, repo, package, version, testing_workdir, testing_config): - api.skeletonize(package, repo, version=version, output_dir=testing_workdir, - config=testing_config) - try: - base_package, _ = os.path.splitext(os.path.basename(package)) - package_name = "-".join( - [prefix, base_package]) if prefix else base_package - contents = os.listdir(testing_workdir) - assert len([content for content in contents - if content.startswith(package_name.lower()) and - os.path.isdir(os.path.join(testing_workdir, content))]) - except: - print(os.listdir(testing_workdir)) - raise - - -@pytest.mark.slow -def test_name_with_version_specified(testing_config): - api.skeletonize( - packages="sympy", - repo="pypi", - version="1.10", - config=testing_config, - ) - m = api.render("sympy/meta.yaml")[0][0] - assert m.version() == "1.10" - -def test_pypi_url(testing_config): - api.skeletonize( - packages="https://pypi.python.org/packages/source/s/sympy/sympy-1.10.tar.gz#md5=b3f5189ad782bbcb1bedc1ec2ca12f29", - repo="pypi", - config=testing_config, - ) - m = api.render("sympy/meta.yaml")[0][0] - assert m.version() == "1.10" +SYMPY_URL = "https://pypi.python.org/packages/source/s/sympy/sympy-1.10.tar.gz#md5=b3f5189ad782bbcb1bedc1ec2ca12f29" - -@pytest.fixture -def url_pylint_package(): - return "https://pypi.python.org/packages/source/p/pylint/pylint-2.3.1.tar.gz#" \ - "sha256=723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1" +PYLINT_VERSION = "2.3.1" +PYLINT_HASH_TYPE = "sha256" +PYLINT_HASH_VALUE = "723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1" +PYLINT_FILENAME = f"pylint-{PYLINT_VERSION}.tar.gz" +PYLINT_URL = f"https://pypi.python.org/packages/source/p/pylint/{PYLINT_FILENAME}#{PYLINT_HASH_TYPE}={PYLINT_HASH_VALUE}" @pytest.fixture -def mock_metada_pylint(url_pylint_package): - import re - - version, hash_type, hash_value = re.findall( - r"pylint-(.*).tar.gz#(.*)=(.*)$", url_pylint_package - )[0] - +def mock_metadata(): return { - 'run_depends': '', - 'build_depends': '', - 'entry_points': '', - 'test_commands': '', - 'tests_require': '', - 'version': 'UNKNOWN', - 'pypiurl': url_pylint_package, - 'filename': f"black-{version}.tar.gz", - 'digest': [hash_type, hash_value], - 'import_tests': '', - 'summary': '' + "run_depends": "", + "build_depends": "", + "entry_points": "", + "test_commands": "", + "tests_require": "", + "version": "UNKNOWN", + "pypiurl": PYLINT_URL, + "filename": PYLINT_FILENAME, + "digest": [PYLINT_HASH_TYPE, PYLINT_HASH_VALUE], + "import_tests": "", + "summary": "", } @pytest.fixture -def pkginfo_pylint(): +def pylint_pkginfo(): # Hardcoding it to avoid to use the get_pkginfo because it takes too much time return { 'classifiers': [ @@ -158,14 +96,113 @@ def pkginfo_pylint(): } -def test_get_entry_points(pkginfo_pylint, result_metadata_pylint): - pkginfo = pkginfo_pylint +@pytest.fixture +def pylint_metadata(): + return { + "run_depends": ["astroid >=2.2.0,<3", "isort >=4.2.5,<5", "mccabe >=0.6,<0.7"], + "build_depends": [ + "pip", + "astroid >=2.2.0,<3", + "isort >=4.2.5,<5", + "mccabe >=0.6,<0.7", + ], + "entry_points": [ + "pylint = pylint:run_pylint", + "epylint = pylint:run_epylint", + "pyreverse = pylint:run_pyreverse", + "symilar = pylint:run_symilar", + ], + "test_commands": [ + "pylint --help", + "epylint --help", + "pyreverse --help", + "symilar --help", + ], + "tests_require": ["pytest"], + "version": PYLINT_VERSION, + "pypiurl": PYLINT_URL, + "filename": PYLINT_FILENAME, + "digest": [PYLINT_HASH_TYPE, PYLINT_HASH_VALUE], + "import_tests": [ + "pylint", + "pylint.checkers", + "pylint.extensions", + "pylint.pyreverse", + "pylint.reporters", + "pylint.reporters.ureports", + ], + "summary": "python code static checker", + "packagename": "pylint", + "home": "https://github.com/PyCQA/pylint", + "license": "GNU General Public (GPL)", + "license_family": "LGPL", + } + + +@pytest.mark.parametrize( + "prefix, repo, package, version", + [ + ("", "pypi", "pip", "8.1.2"), + ("r-", "cran", "acs", None), + ("r-", "cran", "https://github.com/twitter/AnomalyDetection.git", None), + ("perl-", "cpan", "Moo", None), + ("", "rpm", "libX11-devel", None), + # skeleton("luarocks") appears broken and needs work + # https://github.com/conda/conda-build/issues/4756 + # ("lua-", "luarocks", "LuaSocket", None), + ], +) +def test_repo( + prefix: str, + repo: str, + package: str, + version: str | None, + tmp_path: Path, + testing_config, +): + api.skeletonize( + package, + repo, + version=version, + output_dir=tmp_path, + config=testing_config, + ) + + package_name = f"{prefix}{Path(package).stem}".lower() + assert len( + [ + content + for content in tmp_path.iterdir() + if content.name.startswith(package_name) and content.is_dir() + ] + ) + + +@pytest.mark.parametrize( + "package,version", + [ + pytest.param("sympy", "1.10", id="with version"), + pytest.param(SYMPY_URL, None, id="with url"), + ], +) +def test_sympy(package: str, version: str | None, tmp_path: Path, testing_config): + api.skeletonize( + packages=package, + repo="pypi", + version=version, + config=testing_config, + output_dir=tmp_path, + ) + m = api.render(str(tmp_path / "sympy" / "meta.yaml"))[0][0] + assert m.version() == "1.10" + + +def test_get_entry_points(pylint_pkginfo, pylint_metadata): + pkginfo = pylint_pkginfo entry_points = get_entry_points(pkginfo) - assert entry_points["entry_points"] == result_metadata_pylint[ - "entry_points"] - assert entry_points["test_commands"] == result_metadata_pylint[ - "test_commands"] + assert entry_points["entry_points"] == pylint_metadata["entry_points"] + assert entry_points["test_commands"] == pylint_metadata["test_commands"] def test_convert_to_flat_list(): @@ -191,51 +228,6 @@ def test_is_setuptools_enabled(): }) -@pytest.fixture -def result_metadata_pylint(url_pylint_package): - return { - 'run_depends': [ - 'astroid >=2.2.0,<3', 'isort >=4.2.5,<5', 'mccabe >=0.6,<0.7' - ], - 'build_depends': [ - 'pip', 'astroid >=2.2.0,<3', 'isort >=4.2.5,<5', 'mccabe >=0.6,<0.7' - ], - 'entry_points': [ - 'pylint = pylint:run_pylint', - 'epylint = pylint:run_epylint', - 'pyreverse = pylint:run_pyreverse', - 'symilar = pylint:run_symilar' - ], - 'test_commands': [ - 'pylint --help', - 'epylint --help', - 'pyreverse --help', - 'symilar --help' - ], - 'tests_require': ['pytest'], - 'version': '2.3.1', - 'pypiurl': url_pylint_package, - 'filename': 'black-2.3.1.tar.gz', - 'digest': [ - 'sha256', - '723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1' - ], - 'import_tests': [ - 'pylint', - 'pylint.checkers', - 'pylint.extensions', - 'pylint.pyreverse', - 'pylint.reporters', - 'pylint.reporters.ureports' - ], - 'summary': 'python code static checker', - 'packagename': 'pylint', - 'home': 'https://github.com/PyCQA/pylint', - 'license': 'GNU General Public (GPL)', - 'license_family': 'LGPL' - } - - def test_get_dependencies(): assert get_dependencies( ['astroid >=2.2.0,<3 #COMMENTS', 'isort >=4.2.5,<5', @@ -251,9 +243,8 @@ def test_get_dependencies(): 'mccabe >=0.6,<0.7'] -def test_get_import_tests(pkginfo_pylint, result_metadata_pylint): - assert get_import_tests(pkginfo_pylint) \ - == result_metadata_pylint["import_tests"] +def test_get_import_tests(pylint_pkginfo, pylint_metadata): + assert get_import_tests(pylint_pkginfo) == pylint_metadata["import_tests"] def test_get_home(): @@ -269,34 +260,27 @@ def test_get_summary(): assert get_summary({"summary": 'SUMMARY "QUOTES"'}) == r"SUMMARY \"QUOTES\"" -def test_license_name(url_pylint_package, pkginfo_pylint): +def test_license_name(pylint_pkginfo): license_name = "GNU General Public License (GPL)" - assert get_license_name(url_pylint_package, pkginfo_pylint, True, {}) \ - == license_name + assert get_license_name(PYLINT_URL, pylint_pkginfo, True, {}) == license_name assert clean_license_name(license_name) == "GNU General Public (GPL)" assert clean_license_name("MIT License") == "MIT" -def test_get_tests_require(pkginfo_pylint, result_metadata_pylint): - assert get_tests_require(pkginfo_pylint) == result_metadata_pylint[ - "tests_require"] +def test_get_tests_require(pylint_pkginfo, pylint_metadata): + assert get_tests_require(pylint_pkginfo) == pylint_metadata["tests_require"] -def test_get_package_metadata( - testing_config, - url_pylint_package, - mock_metada_pylint, - result_metadata_pylint -): +def test_get_package_metadata(testing_config, mock_metadata, pylint_metadata): get_package_metadata( - url_pylint_package, - mock_metada_pylint, + PYLINT_URL, + mock_metadata, {}, ".", "3.7", False, False, - [url_pylint_package], + [PYLINT_URL], False, True, [], @@ -304,42 +288,53 @@ def test_get_package_metadata( config=testing_config, setup_options=[], ) - assert mock_metada_pylint == result_metadata_pylint + assert mock_metadata == pylint_metadata @pytest.mark.slow -def test_pypi_with_setup_options(testing_config): +def test_pypi_with_setup_options(tmp_path: Path, testing_config): # Use photutils package below because skeleton will fail unless the setup.py is given # the flag --offline because of a bootstrapping a helper file that # occurs by default. # Test that the setup option is used in constructing the skeleton. - api.skeletonize(packages='photutils', repo='pypi', version='0.2.2', - setup_options='--offline', - config=testing_config) + api.skeletonize( + packages="photutils", + repo="pypi", + version="0.2.2", + setup_options="--offline", + config=testing_config, + output_dir=tmp_path, + ) # Check that the setup option occurs in bld.bat and build.sh. - m = api.render('photutils')[0][0] - assert '--offline' in m.meta['build']['script'] + m = api.render(str(tmp_path / "photutils"))[0][0] + assert "--offline" in m.meta["build"]["script"] -def test_pypi_pin_numpy(testing_config): +def test_pypi_pin_numpy(tmp_path: Path, testing_config): # The package used here must have a numpy dependence for pin-numpy to have # any effect. - api.skeletonize(packages='msumastro', repo='pypi', version='0.9.0', - config=testing_config, - pin_numpy=True) - with open(os.path.join('msumastro', 'meta.yaml')) as f: - assert f.read().count('numpy x.x') == 2 + api.skeletonize( + packages="msumastro", + repo="pypi", + version="0.9.0", + config=testing_config, + pin_numpy=True, + output_dir=tmp_path, + ) + assert (tmp_path / "msumastro" / "meta.yaml").read_text().count("numpy x.x") == 2 with pytest.raises(DependencyNeedsBuildingError): api.build('msumastro') -def test_pypi_version_sorting(testing_config): +def test_pypi_version_sorting(tmp_path: Path, testing_config): # The package used here must have a numpy dependence for pin-numpy to have # any effect. - api.skeletonize(packages='impyla', repo='pypi', config=testing_config) - m = api.render('impyla')[0][0] + api.skeletonize( + packages="impyla", repo="pypi", config=testing_config, output_dir=tmp_path + ) + m = api.render(str(tmp_path / "impyla"))[0][0] assert parse_version(m.version()) >= parse_version("0.13.8") @@ -348,74 +343,84 @@ def test_list_skeletons(): assert set(skeletons) == {'pypi', 'cran', 'cpan', 'luarocks', 'rpm'} -def test_pypi_with_entry_points(): - api.skeletonize('planemo', repo='pypi', python_version="3.7") - assert os.path.isdir('planemo') +def test_pypi_with_entry_points(tmp_path: Path): + api.skeletonize("planemo", repo="pypi", python_version="3.7", output_dir=tmp_path) + assert (tmp_path / "planemo").is_dir() -def test_pypi_with_version_arg(): +def test_pypi_with_version_arg(tmp_path: Path): # regression test for https://github.com/conda/conda-build/issues/1442 - api.skeletonize('PrettyTable', 'pypi', version='0.7.2') - m = api.render('prettytable')[0][0] + api.skeletonize("PrettyTable", "pypi", version="0.7.2", output_dir=tmp_path) + m = api.render(str(tmp_path / "prettytable"))[0][0] assert parse_version(m.version()) == parse_version("0.7.2") @pytest.mark.slow -def test_pypi_with_extra_specs(testing_config): +def test_pypi_with_extra_specs(tmp_path: Path, testing_config): # regression test for https://github.com/conda/conda-build/issues/1697 # For mpi4py: testing_config.channel_urls.append('https://repo.anaconda.com/pkgs/free') extra_specs = ['cython', 'mpi4py'] if not on_win: - extra_specs.append('nomkl') - api.skeletonize('bigfile', 'pypi', extra_specs=extra_specs, - version='0.1.24', python="3.6", config=testing_config) - m = api.render('bigfile')[0][0] + extra_specs.append("nomkl") + api.skeletonize( + "bigfile", + "pypi", + extra_specs=extra_specs, + version="0.1.24", + python="3.6", + config=testing_config, + output_dir=tmp_path, + ) + m = api.render(str(tmp_path / "bigfile"))[0][0] assert parse_version(m.version()) == parse_version("0.1.24") assert any('cython' in req for req in m.meta['requirements']['host']) assert any('mpi4py' in req for req in m.meta['requirements']['host']) @pytest.mark.slow -def test_pypi_with_version_inconsistency(testing_config): +def test_pypi_with_version_inconsistency(tmp_path: Path, testing_config): # regression test for https://github.com/conda/conda-build/issues/189 # For mpi4py: extra_specs = ['mpi4py'] if not on_win: - extra_specs.append('nomkl') - testing_config.channel_urls.append('https://repo.anaconda.com/pkgs/free') - api.skeletonize('mpi4py_test', 'pypi', extra_specs=extra_specs, - version='0.0.10', python="3.6", config=testing_config) - m = api.render('mpi4py_test')[0][0] + extra_specs.append("nomkl") + testing_config.channel_urls.append("https://repo.anaconda.com/pkgs/free") + api.skeletonize( + "mpi4py_test", + "pypi", + extra_specs=extra_specs, + version="0.0.10", + python="3.6", + config=testing_config, + output_dir=tmp_path, + ) + m = api.render(str(tmp_path / "mpi4py_test"))[0][0] assert parse_version(m.version()) == parse_version("0.0.10") -def test_pypi_with_basic_environment_markers(): +def test_pypi_with_basic_environment_markers(tmp_path: Path): # regression test for https://github.com/conda/conda-build/issues/1974 - api.skeletonize('coconut', 'pypi', version='1.2.2') - m = api.render('coconut')[0][0] + api.skeletonize("coconut", "pypi", version="1.2.2", output_dir=tmp_path) + m = api.render(tmp_path / "coconut")[0][0] build_reqs = str(m.meta['requirements']['host']) run_reqs = str(m.meta['requirements']['run']) # should include the right dependencies for the right version assert "futures" not in build_reqs assert "futures" not in run_reqs - if sys.version_info >= (2, 7): - assert "pygments" in build_reqs - assert "pygments" in run_reqs - else: - assert "pygments" not in build_reqs - assert "pygments" not in run_reqs + assert "pygments" in build_reqs + assert "pygments" in run_reqs -def test_setuptools_test_requirements(): - api.skeletonize(packages='hdf5storage', repo='pypi') - m = api.render('hdf5storage')[0][0] - assert m.meta['test']['requires'] == ['nose >=1.0'] +def test_setuptools_test_requirements(tmp_path: Path): + api.skeletonize(packages="hdf5storage", repo="pypi", output_dir=tmp_path) + m = api.render(str(tmp_path / "hdf5storage"))[0][0] + assert m.meta["test"]["requires"] == ["nose >=1.0"] @pytest.mark.skipif(sys.version_info < (3, 8), reason="sympy is python 3.8+") -def test_pypi_section_order_preserved(): +def test_pypi_section_order_preserved(tmp_path: Path): """ Test whether sections have been written in the correct order. """ @@ -424,16 +429,18 @@ def test_pypi_section_order_preserved(): REQUIREMENTS_ORDER, PYPI_META_STATIC) - api.skeletonize(packages='sympy', repo='pypi') + api.skeletonize(packages="sympy", repo="pypi", output_dir=tmp_path) # Since we want to check the order of items in the recipe (not whether # the metadata values themselves are sensible), read the file as (ordered) # yaml, and check the order. - with open('sympy/meta.yaml') as file: - lines = [ln for ln in file.readlines() if not ln.startswith("{%")] + lines = [ + line + for line in (tmp_path / "sympy" / "meta.yaml").read_text().splitlines() + if not line.startswith("{%") + ] # The loader below preserves the order of entries... - recipe = ruamel_yaml.load('\n'.join(lines), - Loader=ruamel_yaml.RoundTripLoader) + recipe = ruamel.yaml.load("\n".join(lines), Loader=ruamel.yaml.RoundTripLoader) major_sections = list(recipe.keys()) # Blank fields are omitted when skeletonizing, so prune any missing ones @@ -448,19 +455,31 @@ def test_pypi_section_order_preserved(): @pytest.mark.slow @pytest.mark.flaky(rerun=5, reruns_delay=2) -@pytest.mark.skipif(not external.find_executable("shellcheck"), reason="requires shellcheck >=0.7.0") +@pytest.mark.skipif(on_win, reason="shellcheck is not available on Windows") @pytest.mark.parametrize( - "package, repo", [("r-rmarkdown", "cran"), ("Perl::Lint", "cpan"), ("screen", "rpm")] + "package, repo", + [ + ("r-rmarkdown", "cran"), + ("Perl::Lint", "cpan"), + ("screen", "rpm"), + ], ) -def test_build_sh_shellcheck_clean(package, repo, testing_workdir, testing_config): - api.skeletonize(packages=package, repo=repo, output_dir=testing_workdir, config=testing_config) - - matches = [] - for root, dirnames, filenames in os.walk(testing_workdir): - for filename in fnmatch.filter(filenames, "build.sh"): - matches.append(os.path.join(root, filename)) +def test_build_sh_shellcheck_clean( + package: str, repo: str, tmp_path: Path, testing_config +): + api.skeletonize( + packages=package, + repo=repo, + output_dir=tmp_path, + config=testing_config, + ) - build_sh = matches[0] + build_sh = next( + Path(root, filename) + for root, _, filenames in os.walk(tmp_path) + for filename in filenames + if filename == "build.sh" + ) cmd = [ "shellcheck", "--enable=all", @@ -471,7 +490,6 @@ def test_build_sh_shellcheck_clean(package, repo, testing_workdir, testing_confi ] p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - sc_stdout, _ = p.communicate() - findings = sc_stdout.decode(sys.stdout.encoding).replace("\r\n", "\n").splitlines() - assert findings == [] + stdout, _ = p.communicate() + assert not stdout assert p.returncode == 0 From 96f10effd1a96a86eb124838b36f48ebc9dd9ba3 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 27 Jan 2023 17:58:07 +0100 Subject: [PATCH 057/366] `test_cli.py`: split into subcommand specific test files (#4749) * Split test_cli into subcommand specific tests * Change sys.argv mutation into monkeypatch --- tests/cli/test_main_build.py | 484 +++++++++++++++++++ tests/cli/test_main_convert.py | 37 ++ tests/cli/test_main_debug.py | 45 +- tests/cli/test_main_develop.py | 29 ++ tests/cli/test_main_index.py | 11 + tests/cli/test_main_inspect.py | 83 ++++ tests/cli/test_main_metapackage.py | 102 ++++ tests/cli/test_main_render.py | 167 +++++++ tests/cli/test_main_skeleton.py | 54 +++ tests/test_cli.py | 751 ----------------------------- 10 files changed, 984 insertions(+), 779 deletions(-) create mode 100644 tests/cli/test_main_build.py create mode 100644 tests/cli/test_main_convert.py create mode 100644 tests/cli/test_main_develop.py create mode 100644 tests/cli/test_main_index.py create mode 100644 tests/cli/test_main_inspect.py create mode 100644 tests/cli/test_main_metapackage.py create mode 100644 tests/cli/test_main_render.py create mode 100644 tests/cli/test_main_skeleton.py delete mode 100644 tests/test_cli.py diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py new file mode 100644 index 0000000000..8290362849 --- /dev/null +++ b/tests/cli/test_main_build.py @@ -0,0 +1,484 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import os +import re +import sys + +import pytest + +from conda_build.conda_interface import cc_conda_build, context, reset_context + +import conda_build +from conda_build import api +from conda_build.cli import main_build, main_render +from conda_build.config import Config, zstd_compression_level_default +from conda_build.utils import on_win, get_build_folders, package_has_file +from conda_build.conda_interface import TemporaryDirectory +from conda_build.exceptions import DependencyNeedsBuildingError +from ..utils import metadata_dir + + +def _reset_config(search_path=None): + reset_context(search_path) + cc_conda_build.clear() + cc_conda_build.update( + context.conda_build if hasattr(context, "conda_build") else {} + ) + + +@pytest.mark.sanity +def test_build(): + args = [ + "--no-anaconda-upload", + os.path.join(metadata_dir, "empty_sections"), + "--no-activate", + "--no-anaconda-upload", + ] + main_build.execute(args) + + +@pytest.mark.serial +def test_build_add_channel(): + """This recipe requires the conda_build_test_requirement package, which is + only on the conda_build_test channel. This verifies that the -c argument + works.""" + + args = [ + "-c", + "conda_build_test", + "--no-activate", + "--no-anaconda-upload", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + ] + main_build.execute(args) + + +def test_build_without_channel_fails(testing_workdir): + # remove the conda forge channel from the arguments and make sure that we fail. If we don't, + # we probably have channels in condarc, and this is not a good test. + args = [ + "--no-anaconda-upload", + "--no-activate", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + ] + with pytest.raises(DependencyNeedsBuildingError): + main_build.execute(args) + + +def test_no_filename_hash(testing_workdir, testing_metadata, capfd): + api.output_yaml(testing_metadata, "meta.yaml") + args = ["--output", testing_workdir, "--old-build-string"] + main_render.execute(args) + output, error = capfd.readouterr() + assert not re.search("h[0-9a-f]{%d}" % testing_metadata.config.hash_length, output) + + args = [ + "--no-anaconda-upload", + "--no-activate", + testing_workdir, + "--old-build-string", + ] + main_build.execute(args) + output, error = capfd.readouterr() + assert not re.search( + "test_no_filename_hash.*h[0-9a-f]{%d}" % testing_metadata.config.hash_length, + output, + ) + assert not re.search( + "test_no_filename_hash.*h[0-9a-f]{%d}" % testing_metadata.config.hash_length, + error, + ) + + +def test_build_output_build_path( + testing_workdir, testing_metadata, testing_config, capfd +): + api.output_yaml(testing_metadata, "meta.yaml") + testing_config.verbose = False + testing_config.debug = False + args = ["--output", testing_workdir] + main_build.execute(args) + test_path = os.path.join( + sys.prefix, + "conda-bld", + testing_config.host_subdir, + "test_build_output_build_path-1.0-1.tar.bz2", + ) + output, error = capfd.readouterr() + assert test_path == output.rstrip(), error + assert error == "" + + +def test_build_output_build_path_multiple_recipes( + testing_workdir, testing_metadata, testing_config, capfd +): + api.output_yaml(testing_metadata, "meta.yaml") + testing_config.verbose = False + skip_recipe = os.path.join(metadata_dir, "build_skip") + args = ["--output", testing_workdir, skip_recipe] + + main_build.execute(args) + + test_path = lambda pkg: os.path.join( + sys.prefix, "conda-bld", testing_config.host_subdir, pkg + ) + test_paths = [ + test_path("test_build_output_build_path_multiple_recipes-1.0-1.tar.bz2"), + ] + + output, error = capfd.readouterr() + # assert error == "" + assert output.rstrip().splitlines() == test_paths, error + + +def test_slash_in_recipe_arg_keeps_build_id(testing_workdir, testing_config): + args = [ + os.path.join(metadata_dir, "has_prefix_files"), + "--croot", + testing_config.croot, + "--no-anaconda-upload", + ] + outputs = main_build.execute(args) + data = package_has_file(outputs[0], "binary-has-prefix", refresh_mode="forced") + assert data + if hasattr(data, "decode"): + data = data.decode("UTF-8") + assert "conda-build-test-has-prefix-files_1" in data + + +@pytest.mark.sanity +@pytest.mark.skipif(on_win, reason="prefix is always short on win.") +def test_build_long_test_prefix_default_enabled(mocker, testing_workdir): + recipe_path = os.path.join(metadata_dir, "_test_long_test_prefix") + args = [recipe_path, "--no-anaconda-upload"] + main_build.execute(args) + + args.append("--no-long-test-prefix") + with pytest.raises(SystemExit): + main_build.execute(args) + + +def test_build_no_build_id(testing_workdir, testing_config): + args = [ + os.path.join(metadata_dir, "has_prefix_files"), + "--no-build-id", + "--croot", + testing_config.croot, + "--no-activate", + "--no-anaconda-upload", + ] + outputs = main_build.execute(args) + data = package_has_file(outputs[0], "binary-has-prefix", refresh_mode="forced") + assert data + if hasattr(data, "decode"): + data = data.decode("UTF-8") + assert "has_prefix_files_1" not in data + + +def test_build_multiple_recipes(testing_metadata, testing_workdir, testing_config): + """Test that building two recipes in one CLI call separates the build environment for each""" + os.makedirs("recipe1") + os.makedirs("recipe2") + api.output_yaml(testing_metadata, "recipe1/meta.yaml") + with open("recipe1/run_test.py", "w") as f: + f.write( + "import os; assert 'test_build_multiple_recipes' in os.getenv('PREFIX')" + ) + testing_metadata.meta["package"]["name"] = "package2" + api.output_yaml(testing_metadata, "recipe2/meta.yaml") + with open("recipe2/run_test.py", "w") as f: + f.write("import os; assert 'package2' in os.getenv('PREFIX')") + args = ["--no-anaconda-upload", "recipe1", "recipe2"] + main_build.execute(args) + + +def test_build_output_folder(testing_workdir, testing_metadata, capfd): + api.output_yaml(testing_metadata, "meta.yaml") + with TemporaryDirectory() as tmp: + out = os.path.join(tmp, "out") + args = [ + testing_workdir, + "--no-build-id", + "--croot", + tmp, + "--no-activate", + "--no-anaconda-upload", + "--output-folder", + out, + ] + output = main_build.execute(args)[0] + assert os.path.isfile( + os.path.join( + out, testing_metadata.config.host_subdir, os.path.basename(output) + ) + ) + + +def test_build_source(testing_workdir): + with TemporaryDirectory() as tmp: + args = [ + os.path.join(metadata_dir, "_pyyaml_find_header"), + "--source", + "--no-build-id", + "--croot", + tmp, + "--no-activate", + "--no-anaconda-upload", + ] + main_build.execute(args) + assert os.path.isfile(os.path.join(tmp, "work", "setup.py")) + + +@pytest.mark.serial +def test_purge(testing_workdir, testing_metadata): + """ + purge clears out build folders - things like some_pkg_12048309850135 + + It does not clear out build packages from folders like osx-64 or linux-64. + """ + api.output_yaml(testing_metadata, "meta.yaml") + outputs = api.build(testing_workdir, notest=True) + args = ["purge"] + main_build.execute(args) + dirs = get_build_folders(testing_metadata.config.croot) + assert not dirs + # make sure artifacts are kept - only temporary folders get nuked + assert all(os.path.isfile(fn) for fn in outputs) + + +@pytest.mark.serial +def test_purge_all(testing_workdir, testing_metadata): + """ + purge-all clears out build folders as well as build packages in the osx-64 folders and such + """ + api.output_yaml(testing_metadata, "meta.yaml") + with TemporaryDirectory() as tmpdir: + testing_metadata.config.croot = tmpdir + outputs = api.build( + testing_workdir, config=testing_metadata.config, notest=True + ) + args = ["purge-all", "--croot", tmpdir] + main_build.execute(args) + assert not get_build_folders(testing_metadata.config.croot) + assert not any(os.path.isfile(fn) for fn in outputs) + + +@pytest.mark.serial +def test_no_force_upload(mocker, testing_workdir, testing_metadata, request): + with open(os.path.join(testing_workdir, ".condarc"), "w") as f: + f.write("anaconda_upload: True\n") + f.write("conda_build:\n") + f.write(" force_upload: False\n") + del testing_metadata.meta["test"] + api.output_yaml(testing_metadata, "meta.yaml") + args = ["--no-force-upload", testing_workdir] + call = mocker.patch.object(conda_build.build.subprocess, "call") + request.addfinalizer(_reset_config) + _reset_config([os.path.join(testing_workdir, ".condarc")]) + main_build.execute(args) + pkg = api.get_output_file_path(testing_metadata) + assert call.called_once_with(["anaconda", "upload", pkg]) + args = [testing_workdir] + with open(os.path.join(testing_workdir, ".condarc"), "w") as f: + f.write("anaconda_upload: True\n") + main_build.execute(args) + assert call.called_once_with(["anaconda", "upload", "--force", pkg]) + + +@pytest.mark.slow +def test_conda_py_no_period(testing_workdir, testing_metadata, monkeypatch): + monkeypatch.setenv("CONDA_PY", "36") + testing_metadata.meta["requirements"] = {"host": ["python"], "run": ["python"]} + api.output_yaml(testing_metadata, "meta.yaml") + outputs = api.build(testing_workdir, notest=True) + assert any("py36" in output for output in outputs) + + +def test_build_skip_existing(testing_workdir, capfd, mocker): + # build the recipe first + empty_sections = os.path.join(metadata_dir, "empty_sections") + args = ["--no-anaconda-upload", empty_sections] + main_build.execute(args) + args.insert(0, "--skip-existing") + import conda_build.source + + provide = mocker.patch.object(conda_build.source, "provide") + main_build.execute(args) + provide.assert_not_called() + output, error = capfd.readouterr() + assert "are already built" in output or "are already built" in error + + +def test_build_skip_existing_croot(testing_workdir, capfd): + # build the recipe first + empty_sections = os.path.join(metadata_dir, "empty_sections") + args = ["--no-anaconda-upload", "--croot", testing_workdir, empty_sections] + main_build.execute(args) + args.insert(0, "--skip-existing") + main_build.execute(args) + output, error = capfd.readouterr() + assert "are already built" in output + + +@pytest.mark.sanity +def test_package_test(testing_workdir, testing_metadata): + """Test calling conda build -t - rather than """ + api.output_yaml(testing_metadata, "recipe/meta.yaml") + output = api.build(testing_workdir, config=testing_metadata.config, notest=True)[0] + args = ["-t", output] + main_build.execute(args) + + +def test_activate_scripts_not_included(testing_workdir): + recipe = os.path.join(metadata_dir, "_activate_scripts_not_included") + args = ["--no-anaconda-upload", "--croot", testing_workdir, recipe] + main_build.execute(args) + out = api.get_output_file_paths(recipe, croot=testing_workdir)[0] + for f in ( + "bin/activate", + "bin/deactivate", + "bin/conda", + "Scripts/activate.bat", + "Scripts/deactivate.bat", + "Scripts/conda.bat", + "Scripts/activate.exe", + "Scripts/deactivate.exe", + "Scripts/conda.exe", + "Scripts/activate", + "Scripts/deactivate", + "Scripts/conda", + ): + assert not package_has_file(out, f) + + +def test_relative_path_croot(): + # this tries to build a package while specifying the croot with a relative path: + # conda-build --no-test --croot ./relative/path + + empty_sections = os.path.join(metadata_dir, "empty_with_build_script") + croot_rel = os.path.join(".", "relative", "path") + args = ["--no-anaconda-upload", "--croot", croot_rel, empty_sections] + outputfile = main_build.execute(args) + + assert len(outputfile) == 1 + assert os.path.isfile(outputfile[0]) + + +def test_relative_path_test_artifact(): + # this test builds a package into (cwd)/relative/path and then calls: + # conda-build --test ./relative/path/{platform}/{artifact}.tar.bz2 + + empty_sections = os.path.join(metadata_dir, "empty_with_build_script") + croot_rel = os.path.join(".", "relative", "path") + croot_abs = os.path.abspath(os.path.normpath(croot_rel)) + + # build the package + args = ["--no-anaconda-upload", "--no-test", "--croot", croot_abs, empty_sections] + output_file_abs = main_build.execute(args) + assert len(output_file_abs) == 1 + + output_file_rel = os.path.join( + croot_rel, os.path.relpath(output_file_abs[0], croot_abs) + ) + + # run the test stage with relative path + args = ["--no-anaconda-upload", "--test", output_file_rel] + main_build.execute(args) + + +def test_relative_path_test_recipe(): + # this test builds a package into (cwd)/relative/path and then calls: + # conda-build --test --croot ./relative/path/ /abs/path/to/recipe + + empty_sections = os.path.join(metadata_dir, "empty_with_build_script") + croot_rel = os.path.join(".", "relative", "path") + croot_abs = os.path.abspath(os.path.normpath(croot_rel)) + + # build the package + args = ["--no-anaconda-upload", "--no-test", "--croot", croot_abs, empty_sections] + output_file_abs = main_build.execute(args) + assert len(output_file_abs) == 1 + + # run the test stage with relative croot + args = ["--no-anaconda-upload", "--test", "--croot", croot_rel, empty_sections] + main_build.execute(args) + + +def test_test_extra_dep(testing_metadata): + testing_metadata.meta["test"]["imports"] = ["imagesize"] + api.output_yaml(testing_metadata, "meta.yaml") + output = api.build(testing_metadata, notest=True, anaconda_upload=False)[0] + + # tests version constraints. CLI would quote this - "click <6.7" + args = [output, "-t", "--extra-deps", "imagesize <1.0"] + # extra_deps will add it in + main_build.execute(args) + + # missing click dep will fail tests + with pytest.raises(SystemExit): + args = [output, "-t"] + # extra_deps will add it in + main_build.execute(args) + + +@pytest.mark.parametrize( + "additional_args, is_long_test_prefix", + [([], True), (["--long-test-prefix"], True), (["--no-long-test-prefix"], False)], +) +def test_long_test_prefix(additional_args, is_long_test_prefix): + args = ["non_existing_recipe"] + additional_args + parser, args = main_build.parse_args(args) + config = Config(**args.__dict__) + assert config.long_test_prefix is is_long_test_prefix + + +@pytest.mark.serial +@pytest.mark.parametrize( + "zstd_level_condarc, zstd_level_cli", + [ + (None, None), + (1, None), + (1, 2), + ], +) +def test_zstd_compression_level( + testing_workdir, request, zstd_level_condarc, zstd_level_cli +): + assert zstd_compression_level_default not in {zstd_level_condarc, zstd_level_cli} + if zstd_level_condarc: + with open(os.path.join(testing_workdir, ".condarc"), "w") as f: + print( + "conda_build:", + f" zstd_compression_level: {zstd_level_condarc}", + sep="\n", + file=f, + ) + request.addfinalizer(_reset_config) + _reset_config([os.path.join(testing_workdir, ".condarc")]) + args = ["non_existing_recipe"] + if zstd_level_cli: + args.append(f"--zstd-compression-level={zstd_level_cli}") + parser, args = main_build.parse_args(args) + config = Config(**args.__dict__) + if zstd_level_cli: + assert config.zstd_compression_level == zstd_level_cli + elif zstd_level_condarc: + assert config.zstd_compression_level == zstd_level_condarc + else: + assert config.zstd_compression_level == zstd_compression_level_default + + +def test_user_warning(tmpdir, recwarn): + dir_recipe_path = tmpdir.mkdir("recipe-path") + recipe = dir_recipe_path.join("meta.yaml") + recipe.write("") + + main_build.parse_args([str(recipe)]) + assert ( + f"RECIPE_PATH received is a file ({recipe}).\n" + "It should be a path to a folder.\n" + "Forcing conda-build to use the recipe file." + ) == str(recwarn.pop(UserWarning).message) + + main_build.parse_args([str(dir_recipe_path)]) + assert not recwarn.list diff --git a/tests/cli/test_main_convert.py b/tests/cli/test_main_convert.py new file mode 100644 index 0000000000..0be658b9d3 --- /dev/null +++ b/tests/cli/test_main_convert.py @@ -0,0 +1,37 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import os + +import pytest + +from conda_build.cli import main_convert +from conda_build.conda_interface import download +from conda_build.tarcheck import TarCheck +from conda_build.utils import on_win + + +@pytest.mark.xfail( + on_win, + reason="This is a flaky test that doesn't seem to be working well on Windows.", +) +def test_convert(testing_workdir, testing_config): + # download a sample py2.7 package + f = "https://repo.anaconda.com/pkgs/free/win-64/affine-2.0.0-py27_0.tar.bz2" + pkg_name = "affine-2.0.0-py27_0.tar.bz2" + download(f, pkg_name) + # convert it to all platforms + args = ["-o", "converted", "--platform", "all", pkg_name] + main_convert.execute(args) + platforms = ["osx-64", "win-32", "linux-64", "linux-32"] + for platform in platforms: + dirname = os.path.join("converted", platform) + if platform != "win-64": + assert os.path.isdir(dirname) + assert pkg_name in os.listdir(dirname) + testing_config.host_subdir = platform + with TarCheck( + os.path.join(dirname, pkg_name), config=testing_config + ) as tar: + tar.correct_subdir() + else: + assert not os.path.isdir(dirname) diff --git a/tests/cli/test_main_debug.py b/tests/cli/test_main_debug.py index 991000279b..056b2723b6 100644 --- a/tests/cli/test_main_debug.py +++ b/tests/cli/test_main_debug.py @@ -1,40 +1,30 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import io -import os.path +from pathlib import Path import sys from unittest import mock import pytest -from pytest import CaptureFixture +from pytest import CaptureFixture, MonkeyPatch from conda_build.cli import main_debug as debug, validators as valid -@pytest.fixture(scope='module') -def main_debug_help() -> str: - """Read what the current help message should be and return it as a fixture""" - sys.argv = ['conda-debug'] - parser = debug.get_parser() +def test_main_debug_help_message(capsys: CaptureFixture, monkeypatch: MonkeyPatch): + monkeypatch.setattr(sys, "argv", ["conda-debug", "-h"]) + help_blurb = debug.get_parser().format_help() - with io.StringIO() as fp: - parser.print_usage(file=fp) - fp.seek(0) - yield fp.read() - - sys.argv = [] - - -def test_main_debug_help_message(capsys: CaptureFixture, main_debug_help: str): with pytest.raises(SystemExit): debug.main() captured = capsys.readouterr() - assert main_debug_help in captured.err + assert help_blurb in captured.out -def test_main_debug_file_does_not_exist(capsys: CaptureFixture): - sys.argv = ['conda-debug', 'file-does-not-exist'] +def test_main_debug_file_does_not_exist( + capsys: CaptureFixture, monkeypatch: MonkeyPatch +): + monkeypatch.setattr(sys, "argv", ["conda-debug", "file-does-not-exist"]) with pytest.raises(SystemExit): debug.main() @@ -43,21 +33,20 @@ def test_main_debug_file_does_not_exist(capsys: CaptureFixture): assert valid.CONDA_PKG_OR_RECIPE_ERROR_MESSAGE in captured.err -def test_main_debug_happy_path(tmpdir, capsys: CaptureFixture): +def test_main_debug_happy_path( + tmp_path: Path, capsys: CaptureFixture, monkeypatch: MonkeyPatch +): """ Happy path through the main_debug.main function. """ - with mock.patch("conda_build.api.debug") as mock_debug: - fake_pkg_file = os.path.join(tmpdir, "fake-conda-pkg.conda") - fp = open(fake_pkg_file, "w") - fp.write("text") - fp.close() - sys.argv = ['conda-debug', fake_pkg_file] + fake = tmp_path / "fake-conda-pkg.conda" + fake.touch() + monkeypatch.setattr(sys, "argv", ["conda-debug", str(fake)]) + with mock.patch("conda_build.api.debug") as mock_debug: debug.main() captured = capsys.readouterr() - assert captured.err == '' assert len(mock_debug.mock_calls) == 2 diff --git a/tests/cli/test_main_develop.py b/tests/cli/test_main_develop.py new file mode 100644 index 0000000000..ede3758cfb --- /dev/null +++ b/tests/cli/test_main_develop.py @@ -0,0 +1,29 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import os +import sys + +from conda_build.cli import main_develop +from conda_build.conda_interface import download +from conda_build.utils import get_site_packages, tar_xf + + +def test_develop(testing_env): + f = "https://pypi.io/packages/source/c/conda_version_test/conda_version_test-0.1.0-1.tar.gz" + download(f, "conda_version_test.tar.gz") + tar_xf("conda_version_test.tar.gz", testing_env) + extract_folder = "conda_version_test-0.1.0-1" + cwd = os.getcwd() + args = ["-p", testing_env, extract_folder] + main_develop.execute(args) + py_ver = ".".join((str(sys.version_info.major), str(sys.version_info.minor))) + with open( + os.path.join(get_site_packages(testing_env, py_ver), "conda.pth") + ) as f_pth: + assert cwd in f_pth.read() + args = ["--uninstall", "-p", testing_env, extract_folder] + main_develop.execute(args) + with open( + os.path.join(get_site_packages(testing_env, py_ver), "conda.pth") + ) as f_pth: + assert cwd not in f_pth.read() diff --git a/tests/cli/test_main_index.py b/tests/cli/test_main_index.py new file mode 100644 index 0000000000..75b1926f78 --- /dev/null +++ b/tests/cli/test_main_index.py @@ -0,0 +1,11 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import os.path + +from conda_build.cli import main_index + + +def testing_index(testing_workdir): + args = ["."] + main_index.execute(args) + assert os.path.isfile("noarch/repodata.json") diff --git a/tests/cli/test_main_inspect.py b/tests/cli/test_main_inspect.py new file mode 100644 index 0000000000..9e25986609 --- /dev/null +++ b/tests/cli/test_main_inspect.py @@ -0,0 +1,83 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import os +import re +import sys +import yaml + +import pytest + +from conda_build import api +from conda_build.cli import main_inspect +from conda_build.utils import on_win +from ..utils import metadata_dir + + +def test_inspect_installable(testing_workdir): + args = ["channels", "--test-installable", "conda-team"] + main_inspect.execute(args) + + +def test_inspect_linkages(testing_workdir, capfd): + # get a package that has known object output + args = ["linkages", "python"] + if sys.platform == "win32": + with pytest.raises(SystemExit) as exc: + main_inspect.execute(args) + assert "conda inspect linkages is only implemented in Linux and OS X" in exc + else: + main_inspect.execute(args) + output, error = capfd.readouterr() + assert "libncursesw" in output + + +def test_inspect_objects(testing_workdir, capfd): + # get a package that has known object output + args = ["objects", "python"] + if sys.platform != "darwin": + with pytest.raises(SystemExit) as exc: + main_inspect.execute(args) + assert "conda inspect objects is only implemented in OS X" in exc + else: + main_inspect.execute(args) + output, error = capfd.readouterr() + assert re.search("rpath:.*@loader_path", output) + + +@pytest.mark.skipif(on_win, reason="Windows prefix length doesn't matter (yet?)") +def test_inspect_prefix_length(testing_workdir, capfd): + from conda_build import api + + # build our own known-length package here + test_base = os.path.expanduser("~/cbtmp") + config = api.Config(croot=test_base, anaconda_upload=False, verbose=True) + recipe_path = os.path.join(metadata_dir, "has_prefix_files") + config.prefix_length = 80 + outputs = api.build(recipe_path, config=config, notest=True) + + args = ["prefix-lengths"] + outputs + with pytest.raises(SystemExit): + main_inspect.execute(args) + output, error = capfd.readouterr() + assert "Packages with binary prefixes shorter than" in output + assert all(fn in output for fn in outputs) + + config.prefix_length = 255 + # reset the build id so that a new one is computed + config._build_id = "" + api.build(recipe_path, config=config, notest=True) + main_inspect.execute(args) + output, error = capfd.readouterr() + assert "No packages found with binary prefixes shorter" in output + + +def test_inspect_hash_input(testing_metadata, testing_workdir, capfd): + testing_metadata.meta["requirements"]["build"] = ["zlib"] + api.output_yaml(testing_metadata, "meta.yaml") + output = api.build(testing_workdir, notest=True)[0] + with open(os.path.join(testing_workdir, "conda_build_config.yaml"), "w") as f: + yaml.dump({"zlib": ["1.2.11"]}, f) + args = ["hash-inputs", output] + main_inspect.execute(args) + output, error = capfd.readouterr() + assert "zlib" in output diff --git a/tests/cli/test_main_metapackage.py b/tests/cli/test_main_metapackage.py new file mode 100644 index 0000000000..e56d2b7452 --- /dev/null +++ b/tests/cli/test_main_metapackage.py @@ -0,0 +1,102 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from glob import glob +import json +import os +import sys + +from conda_build.cli import main_metapackage +from conda_build.utils import package_has_file + + +def test_metapackage(testing_config, testing_workdir): + """the metapackage command creates a package with runtime dependencies specified on the CLI""" + args = ["metapackage_test", "1.0", "-d", "bzip2", "--no-anaconda-upload"] + main_metapackage.execute(args) + test_path = glob( + os.path.join( + sys.prefix, + "conda-bld", + testing_config.host_subdir, + "metapackage_test-1.0-0.tar.bz2", + ) + )[0] + assert os.path.isfile(test_path) + + +def test_metapackage_build_number(testing_config, testing_workdir): + """the metapackage command creates a package with runtime dependencies specified on the CLI""" + args = [ + "metapackage_test_build_number", + "1.0", + "-d", + "bzip2", + "--build-number", + "1", + "--no-anaconda-upload", + ] + main_metapackage.execute(args) + test_path = glob( + os.path.join( + sys.prefix, + "conda-bld", + testing_config.host_subdir, + "metapackage_test_build_number-1.0-1.tar.bz2", + ) + )[0] + assert os.path.isfile(test_path) + + +def test_metapackage_build_string(testing_config, testing_workdir): + """the metapackage command creates a package with runtime dependencies specified on the CLI""" + args = [ + "metapackage_test_build_string", + "1.0", + "-d", + "bzip2", + "--build-string", + "frank", + "--no-anaconda-upload", + ] + main_metapackage.execute(args) + test_path = glob( + os.path.join( + sys.prefix, + "conda-bld", + testing_config.host_subdir, + "metapackage_test_build_string-1.0-frank*.tar.bz2", + ) + )[0] + assert os.path.isfile(test_path) + + +def test_metapackage_metadata(testing_config, testing_workdir): + args = [ + "metapackage_testing_metadata", + "1.0", + "-d", + "bzip2", + "--home", + "http://abc.com", + "--summary", + "wee", + "--license", + "BSD", + "--no-anaconda-upload", + ] + main_metapackage.execute(args) + + test_path = glob( + os.path.join( + sys.prefix, + "conda-bld", + testing_config.host_subdir, + "metapackage_testing_metadata-1.0-0.tar.bz2", + ) + )[0] + assert os.path.isfile(test_path) + info = json.loads(package_has_file(test_path, "info/index.json")) + assert info["license"] == "BSD" + info = json.loads(package_has_file(test_path, "info/about.json")) + assert info["home"] == "http://abc.com" + assert info["summary"] == "wee" diff --git a/tests/cli/test_main_render.py b/tests/cli/test_main_render.py new file mode 100644 index 0000000000..33e0345c6d --- /dev/null +++ b/tests/cli/test_main_render.py @@ -0,0 +1,167 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import os +import sys +import yaml + +import pytest + +from conda_build import api +from conda_build.cli import main_render +from conda_build.conda_interface import TemporaryDirectory +from ..utils import metadata_dir + + +def test_render_add_channel(): + """This recipe requires the conda_build_test_requirement package, which is + only on the conda_build_test channel. This verifies that the -c argument + works for rendering.""" + with TemporaryDirectory() as tmpdir: + rendered_filename = os.path.join(tmpdir, "out.yaml") + args = [ + "-c", + "conda_build_test", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + "--file", + rendered_filename, + ] + main_render.execute(args) + with open(rendered_filename) as rendered_file: + rendered_meta = yaml.safe_load(rendered_file) + required_package_string = [ + pkg + for pkg in rendered_meta["requirements"]["build"] + if "conda_build_test_requirement" in pkg + ][0] + required_package_details = required_package_string.split(" ") + assert len(required_package_details) > 1, ( + "Expected version number on successful " + "rendering, but got only {}".format(required_package_details) + ) + assert ( + required_package_details[1] == "1.0" + ), f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}" + + +def test_render_without_channel_fails(): + # do make extra channel available, so the required package should not be found + with TemporaryDirectory() as tmpdir: + rendered_filename = os.path.join(tmpdir, "out.yaml") + args = [ + "--override-channels", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + "--file", + rendered_filename, + ] + main_render.execute(args) + with open(rendered_filename) as rendered_file: + rendered_meta = yaml.safe_load(rendered_file) + required_package_string = [ + pkg + for pkg in rendered_meta.get("requirements", {}).get("build", []) + if "conda_build_test_requirement" in pkg + ][0] + assert ( + required_package_string == "conda_build_test_requirement" + ), f"Expected to get only base package name because it should not be found, but got :{required_package_string}" + + +def test_render_output_build_path(testing_workdir, testing_metadata, capfd, caplog): + api.output_yaml(testing_metadata, "meta.yaml") + args = ["--output", testing_workdir] + main_render.execute(args) + test_path = os.path.join( + sys.prefix, + "conda-bld", + testing_metadata.config.host_subdir, + "test_render_output_build_path-1.0-1.tar.bz2", + ) + output, error = capfd.readouterr() + assert output.rstrip() == test_path, error + assert error == "" + + +def test_render_output_build_path_and_file( + testing_workdir, testing_metadata, capfd, caplog +): + api.output_yaml(testing_metadata, "meta.yaml") + rendered_filename = "out.yaml" + args = ["--output", "--file", rendered_filename, testing_workdir] + main_render.execute(args) + test_path = os.path.join( + sys.prefix, + "conda-bld", + testing_metadata.config.host_subdir, + "test_render_output_build_path_and_file-1.0-1.tar.bz2", + ) + output, error = capfd.readouterr() + assert output.rstrip() == test_path, error + assert error == "" + with open(rendered_filename) as rendered_file: + rendered_meta = yaml.safe_load(rendered_file) + assert rendered_meta["package"]["name"] == "test_render_output_build_path_and_file" + + +def test_render_output_build_path_set_python(testing_workdir, testing_metadata, capfd): + testing_metadata.meta["requirements"] = {"host": ["python"], "run": ["python"]} + api.output_yaml(testing_metadata, "meta.yaml") + # build the other major thing, whatever it is + if sys.version_info.major == 3: + version = "2.7" + else: + version = "3.5" + + api.output_yaml(testing_metadata, "meta.yaml") + metadata = api.render(testing_workdir, python=version)[0][0] + + args = ["--output", testing_workdir, "--python", version] + main_render.execute(args) + + _hash = metadata.hash_dependencies() + test_path = ( + "test_render_output_build_path_set_python-1.0-py{}{}{}_1.tar.bz2".format( + version.split(".")[0], version.split(".")[1], _hash + ) + ) + output, error = capfd.readouterr() + assert os.path.basename(output.rstrip()) == test_path, error + + +@pytest.mark.slow +def test_render_with_python_arg_reduces_subspace(capfd): + recipe = os.path.join(metadata_dir, "..", "variants", "20_subspace_selection_cli") + # build the package + args = [recipe, "--python=2.7", "--output"] + main_render.execute(args) + out, err = capfd.readouterr() + assert len(out.splitlines()) == 2 + + args = [recipe, "--python=3.9", "--output"] + main_render.execute(args) + out, err = capfd.readouterr() + assert len(out.splitlines()) == 1 + + # should raise an error, because python 3.6 is not in the matrix, so we don't know which vc + # to associate with + args = [recipe, "--python=3.6", "--output"] + with pytest.raises(ValueError): + main_render.execute(args) + + +def test_render_with_python_arg_CLI_reduces_subspace(capfd): + recipe = os.path.join(metadata_dir, "..", "variants", "20_subspace_selection_cli") + # build the package + args = [recipe, "--variants", "{python: [2.7, 3.9]}", "--output"] + main_render.execute(args) + out, err = capfd.readouterr() + assert len(out.splitlines()) == 3 + + args = [recipe, "--variants", "{python: 2.7}", "--output"] + main_render.execute(args) + out, err = capfd.readouterr() + assert len(out.splitlines()) == 2 + + args = [recipe, "--variants", "{python: 3.9}", "--output"] + main_render.execute(args) + out, err = capfd.readouterr() + assert len(out.splitlines()) == 1 diff --git a/tests/cli/test_main_skeleton.py b/tests/cli/test_main_skeleton.py new file mode 100644 index 0000000000..807e42b763 --- /dev/null +++ b/tests/cli/test_main_skeleton.py @@ -0,0 +1,54 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import os + +import pytest + +from conda_build import api + +from conda_build.cli import main_build, main_skeleton + + +@pytest.mark.sanity +def test_skeleton_pypi(testing_workdir, testing_config): + args = ["pypi", "peppercorn"] + main_skeleton.execute(args) + assert os.path.isdir("peppercorn") + + # ensure that recipe generated is buildable + main_build.execute(("peppercorn",)) + + +@pytest.mark.sanity +def test_skeleton_pypi_compatible_versions(testing_workdir, testing_config): + args = ["pypi", "openshift"] + main_skeleton.execute(args) + assert os.path.isdir("openshift") + + +@pytest.mark.slow +def test_skeleton_pypi_arguments_work(testing_workdir): + """ + These checks whether skeleton executes without error when these + options are specified on the command line AND whether the underlying + functionality works as a regression test for: + + https://github.com/conda/conda-build/pull/1384 + """ + args = ["pypi", "msumastro", "--version=1.1.6", "--pin-numpy"] + main_skeleton.execute(args) + assert os.path.isdir("msumastro") + + # Deliberately bypass metadata reading in conda build to get as + # close to the "ground truth" as possible. + with open(os.path.join("msumastro", "meta.yaml")) as f: + assert f.read().count("numpy x.x") == 2 + + args = ["pypi", "photutils", "--version=0.2.2", "--setup-options=--offline"] + main_skeleton.execute(args) + assert os.path.isdir("photutils") + # Check that the setup option occurs in bld.bat and build.sh. + + m = api.render("photutils")[0][0] + assert "--offline" in m.meta["build"]["script"] + assert m.version() == "0.2.2" diff --git a/tests/test_cli.py b/tests/test_cli.py deleted file mode 100644 index a365b5cff7..0000000000 --- a/tests/test_cli.py +++ /dev/null @@ -1,751 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -# For the most part, all functionality should be tested with the api tests, -# because they actually provide coverage. These tests are here to make -# sure that the CLI still works. - -from glob import glob -import json -import os -import re -import sys -import yaml - -import pytest - -from conda_build.conda_interface import cc_conda_build, context, download, reset_context -from conda_build.tarcheck import TarCheck - -from conda_build import api -from conda_build.config import Config, zstd_compression_level_default -from conda_build.utils import get_site_packages, on_win, get_build_folders, package_has_file, tar_xf -from conda_build.conda_interface import TemporaryDirectory -from conda_build.exceptions import DependencyNeedsBuildingError -import conda_build -from .utils import metadata_dir - -import conda_build.cli.main_build as main_build -import conda_build.cli.main_render as main_render -import conda_build.cli.main_convert as main_convert -import conda_build.cli.main_develop as main_develop -import conda_build.cli.main_metapackage as main_metapackage -import conda_build.cli.main_skeleton as main_skeleton -import conda_build.cli.main_inspect as main_inspect -import conda_build.cli.main_index as main_index - - -def _reset_config(search_path=None): - reset_context(search_path) - cc_conda_build.clear() - cc_conda_build.update( - context.conda_build if hasattr(context, 'conda_build') else {} - ) - - -@pytest.mark.sanity -def test_build(): - args = ['--no-anaconda-upload', os.path.join(metadata_dir, "empty_sections"), '--no-activate', - '--no-anaconda-upload'] - main_build.execute(args) - - -@pytest.mark.serial -def test_build_add_channel(): - """This recipe requires the conda_build_test_requirement package, which is - only on the conda_build_test channel. This verifies that the -c argument - works.""" - - args = ['-c', 'conda_build_test', '--no-activate', '--no-anaconda-upload', - os.path.join(metadata_dir, "_recipe_requiring_external_channel")] - main_build.execute(args) - - -def test_build_without_channel_fails(): - # remove the conda forge channel from the arguments and make sure that we fail. If we don't, - # we probably have channels in condarc, and this is not a good test. - args = ['--no-anaconda-upload', '--no-activate', - os.path.join(metadata_dir, "_recipe_requiring_external_channel")] - with pytest.raises(DependencyNeedsBuildingError): - main_build.execute(args) - - -def test_render_add_channel(): - """This recipe requires the conda_build_test_requirement package, which is - only on the conda_build_test channel. This verifies that the -c argument - works for rendering.""" - with TemporaryDirectory() as tmpdir: - rendered_filename = os.path.join(tmpdir, 'out.yaml') - args = ['-c', 'conda_build_test', os.path.join(metadata_dir, - "_recipe_requiring_external_channel"), '--file', rendered_filename] - main_render.execute(args) - with open(rendered_filename) as rendered_file: - rendered_meta = yaml.safe_load(rendered_file) - required_package_string = [pkg for pkg in rendered_meta['requirements']['build'] if - 'conda_build_test_requirement' in pkg][0] - required_package_details = required_package_string.split(' ') - assert len(required_package_details) > 1, ("Expected version number on successful " - "rendering, but got only {}".format(required_package_details)) - assert required_package_details[1] == '1.0', f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}" - - -def test_render_without_channel_fails(): - # do make extra channel available, so the required package should not be found - with TemporaryDirectory() as tmpdir: - rendered_filename = os.path.join(tmpdir, 'out.yaml') - args = ['--override-channels', os.path.join(metadata_dir, "_recipe_requiring_external_channel"), '--file', rendered_filename] - main_render.execute(args) - with open(rendered_filename) as rendered_file: - rendered_meta = yaml.safe_load(rendered_file) - required_package_string = [pkg for pkg in - rendered_meta.get('requirements', {}).get('build', []) - if 'conda_build_test_requirement' in pkg][0] - assert required_package_string == 'conda_build_test_requirement', \ - f"Expected to get only base package name because it should not be found, but got :{required_package_string}" - - -def test_no_filename_hash(testing_workdir, testing_metadata, capfd): - api.output_yaml(testing_metadata, 'meta.yaml') - args = ['--output', testing_workdir, '--old-build-string'] - main_render.execute(args) - output, error = capfd.readouterr() - assert not re.search('h[0-9a-f]{%d}' % testing_metadata.config.hash_length, output) - - args = ['--no-anaconda-upload', '--no-activate', testing_workdir, '--old-build-string'] - main_build.execute(args) - output, error = capfd.readouterr() - assert not re.search('test_no_filename_hash.*h[0-9a-f]{%d}' % testing_metadata.config.hash_length, output) - assert not re.search('test_no_filename_hash.*h[0-9a-f]{%d}' % testing_metadata.config.hash_length, error) - - -def test_render_output_build_path(testing_workdir, testing_metadata, capfd): - api.output_yaml(testing_metadata, 'meta.yaml') - args = ['--output', testing_workdir] - main_render.execute(args) - test_path = os.path.join(sys.prefix, "conda-bld", testing_metadata.config.host_subdir, - "test_render_output_build_path-1.0-1.tar.bz2") - output, error = capfd.readouterr() - assert output.rstrip() == test_path, error - assert error == "" - - -def test_render_output_build_path_and_file(testing_workdir, testing_metadata, capfd): - api.output_yaml(testing_metadata, 'meta.yaml') - rendered_filename = 'out.yaml' - args = ['--output', '--file', rendered_filename, testing_workdir] - main_render.execute(args) - test_path = os.path.join(sys.prefix, "conda-bld", testing_metadata.config.host_subdir, - "test_render_output_build_path_and_file-1.0-1.tar.bz2") - output, error = capfd.readouterr() - assert output.rstrip() == test_path, error - assert error == "" - with open(rendered_filename) as rendered_file: - rendered_meta = yaml.safe_load(rendered_file) - assert rendered_meta['package']['name'] == 'test_render_output_build_path_and_file' - - -def test_build_output_build_path(testing_workdir, testing_metadata, testing_config, capfd): - api.output_yaml(testing_metadata, 'meta.yaml') - testing_config.verbose = False - testing_config.debug = False - args = ['--output', testing_workdir] - main_build.execute(args) - test_path = os.path.join(sys.prefix, "conda-bld", testing_config.host_subdir, - "test_build_output_build_path-1.0-1.tar.bz2") - output, error = capfd.readouterr() - assert test_path == output.rstrip(), error - assert error == "" - - -def test_build_output_build_path_multiple_recipes(testing_workdir, testing_metadata, - testing_config, capfd): - api.output_yaml(testing_metadata, 'meta.yaml') - testing_config.verbose = False - skip_recipe = os.path.join(metadata_dir, "build_skip") - args = ['--output', testing_workdir, skip_recipe] - - main_build.execute(args) - - test_path = lambda pkg: os.path.join(sys.prefix, "conda-bld", testing_config.host_subdir, pkg) - test_paths = [test_path("test_build_output_build_path_multiple_recipes-1.0-1.tar.bz2"), ] - - output, error = capfd.readouterr() - # assert error == "" - assert output.rstrip().splitlines() == test_paths, error - - -def test_slash_in_recipe_arg_keeps_build_id(testing_config): - args = [os.path.join(metadata_dir, "has_prefix_files"), '--croot', testing_config.croot, - '--no-anaconda-upload'] - outputs = main_build.execute(args) - data = package_has_file(outputs[0], 'binary-has-prefix', refresh_mode='forced') - assert data - if hasattr(data, 'decode'): - data = data.decode('UTF-8') - assert 'conda-build-test-has-prefix-files_1' in data - - -@pytest.mark.sanity -@pytest.mark.skipif(on_win, reason="prefix is always short on win.") -def test_build_long_test_prefix_default_enabled(): - recipe_path = os.path.join(metadata_dir, '_test_long_test_prefix') - args = [recipe_path, '--no-anaconda-upload'] - main_build.execute(args) - - args.append('--no-long-test-prefix') - with pytest.raises(SystemExit): - main_build.execute(args) - - -def test_build_no_build_id(testing_config): - args = [os.path.join(metadata_dir, "has_prefix_files"), '--no-build-id', - '--croot', testing_config.croot, '--no-activate', '--no-anaconda-upload'] - outputs = main_build.execute(args) - data = package_has_file(outputs[0], 'binary-has-prefix', refresh_mode='forced') - assert data - if hasattr(data, 'decode'): - data = data.decode('UTF-8') - assert 'has_prefix_files_1' not in data - - -def test_build_multiple_recipes(testing_metadata): - """Test that building two recipes in one CLI call separates the build environment for each""" - os.makedirs('recipe1') - os.makedirs('recipe2') - api.output_yaml(testing_metadata, 'recipe1/meta.yaml') - with open('recipe1/run_test.py', 'w') as f: - f.write("import os; assert 'test_build_multiple_recipes' in os.getenv('PREFIX')") - testing_metadata.meta['package']['name'] = 'package2' - api.output_yaml(testing_metadata, 'recipe2/meta.yaml') - with open('recipe2/run_test.py', 'w') as f: - f.write("import os; assert 'package2' in os.getenv('PREFIX')") - args = ['--no-anaconda-upload', 'recipe1', 'recipe2'] - main_build.execute(args) - - -def test_build_output_folder(testing_workdir, testing_metadata): - api.output_yaml(testing_metadata, 'meta.yaml') - with TemporaryDirectory() as tmp: - out = os.path.join(tmp, 'out') - args = [testing_workdir, '--no-build-id', - '--croot', tmp, '--no-activate', '--no-anaconda-upload', - '--output-folder', out] - output = main_build.execute(args)[0] - assert os.path.isfile(os.path.join(out, testing_metadata.config.host_subdir, - os.path.basename(output))) - - -def test_build_source(): - with TemporaryDirectory() as tmp: - args = [os.path.join(metadata_dir, '_pyyaml_find_header'), '--source', '--no-build-id', - '--croot', tmp, '--no-activate', '--no-anaconda-upload', ] - main_build.execute(args) - assert os.path.isfile(os.path.join(tmp, 'work', 'setup.py')) - - -def test_render_output_build_path_set_python(testing_workdir, testing_metadata, capfd): - testing_metadata.meta['requirements'] = {'host': ['python'], - 'run': ['python']} - api.output_yaml(testing_metadata, 'meta.yaml') - # build the other major thing, whatever it is - if sys.version_info.major == 3: - version = "2.7" - else: - version = "3.5" - - api.output_yaml(testing_metadata, 'meta.yaml') - metadata = api.render(testing_workdir, python=version)[0][0] - - args = ['--output', testing_workdir, '--python', version] - main_render.execute(args) - - _hash = metadata.hash_dependencies() - test_path = "test_render_output_build_path_set_python-1.0-py{}{}{}_1.tar.bz2".format( - version.split('.')[0], version.split('.')[1], _hash) - output, error = capfd.readouterr() - assert os.path.basename(output.rstrip()) == test_path, error - - -@pytest.mark.sanity -def test_skeleton_pypi(): - args = ['pypi', 'peppercorn'] - main_skeleton.execute(args) - assert os.path.isdir('peppercorn') - - # ensure that recipe generated is buildable - main_build.execute(('peppercorn',)) - - -@pytest.mark.sanity -def test_skeleton_pypi_compatible_versions(): - args = ['pypi', 'openshift'] - main_skeleton.execute(args) - assert os.path.isdir('openshift') - - -@pytest.mark.slow -def test_skeleton_pypi_arguments_work(): - """ - These checks whether skeleton executes without error when these - options are specified on the command line AND whether the underlying - functionality works as a regression test for: - - https://github.com/conda/conda-build/pull/1384 - """ - args = ['pypi', 'msumastro', '--version=1.1.6', '--pin-numpy'] - main_skeleton.execute(args) - assert os.path.isdir('msumastro') - - # Deliberately bypass metadata reading in conda build to get as - # close to the "ground truth" as possible. - with open(os.path.join('msumastro', 'meta.yaml')) as f: - assert f.read().count('numpy x.x') == 2 - - args = ['pypi', 'photutils', '--version=0.2.2', '--setup-options=--offline'] - main_skeleton.execute(args) - assert os.path.isdir('photutils') - # Check that the setup option occurs in bld.bat and build.sh. - - m = api.render('photutils')[0][0] - assert '--offline' in m.meta['build']['script'] - assert m.version() == '0.2.2' - - -def test_metapackage(testing_config): - """the metapackage command creates a package with runtime dependencies specified on the CLI""" - args = ['metapackage_test', '1.0', '-d', 'bzip2', '--no-anaconda-upload'] - main_metapackage.execute(args) - test_path = glob(os.path.join(sys.prefix, "conda-bld", testing_config.host_subdir, - 'metapackage_test-1.0-0.tar.bz2'))[0] - assert os.path.isfile(test_path) - - -def test_metapackage_build_number(testing_config): - """the metapackage command creates a package with runtime dependencies specified on the CLI""" - args = ['metapackage_test_build_number', '1.0', '-d', 'bzip2', '--build-number', '1', - '--no-anaconda-upload'] - main_metapackage.execute(args) - test_path = glob(os.path.join(sys.prefix, "conda-bld", testing_config.host_subdir, - 'metapackage_test_build_number-1.0-1.tar.bz2'))[0] - assert os.path.isfile(test_path) - - -def test_metapackage_build_string(testing_config): - """the metapackage command creates a package with runtime dependencies specified on the CLI""" - args = ['metapackage_test_build_string', '1.0', '-d', 'bzip2', '--build-string', 'frank', - '--no-anaconda-upload'] - main_metapackage.execute(args) - test_path = glob(os.path.join(sys.prefix, "conda-bld", testing_config.host_subdir, - 'metapackage_test_build_string-1.0-frank*.tar.bz2'))[0] - assert os.path.isfile(test_path) - - -def test_metapackage_metadata(testing_config): - args = ['metapackage_testing_metadata', '1.0', '-d', 'bzip2', "--home", "http://abc.com", - "--summary", "wee", "--license", "BSD", '--no-anaconda-upload'] - main_metapackage.execute(args) - - test_path = glob(os.path.join(sys.prefix, "conda-bld", testing_config.host_subdir, - 'metapackage_testing_metadata-1.0-0.tar.bz2'))[0] - assert os.path.isfile(test_path) - info = json.loads(package_has_file(test_path, 'info/index.json')) - assert info['license'] == 'BSD' - info = json.loads(package_has_file(test_path, 'info/about.json')) - assert info['home'] == 'http://abc.com' - assert info['summary'] == 'wee' - - -def testing_index(): - args = ['.'] - main_index.execute(args) - assert os.path.isfile('noarch/repodata.json') - - -def test_inspect_installable(): - args = ['channels', '--test-installable', 'conda-team'] - main_inspect.execute(args) - - -def test_inspect_linkages(capfd): - # get a package that has known object output - args = ['linkages', 'python'] - if sys.platform == 'win32': - with pytest.raises(SystemExit) as exc: - main_inspect.execute(args) - assert 'conda inspect linkages is only implemented in Linux and OS X' in exc - else: - main_inspect.execute(args) - output, error = capfd.readouterr() - assert 'libncursesw' in output - - -def test_inspect_objects(capfd): - # get a package that has known object output - args = ['objects', 'python'] - if sys.platform != 'darwin': - with pytest.raises(SystemExit) as exc: - main_inspect.execute(args) - assert 'conda inspect objects is only implemented in OS X' in exc - else: - main_inspect.execute(args) - output, error = capfd.readouterr() - assert re.search('rpath:.*@loader_path', output) - - -@pytest.mark.skipif(on_win, reason="Windows prefix length doesn't matter (yet?)") -def test_inspect_prefix_length(capfd): - from conda_build import api - # build our own known-length package here - test_base = os.path.expanduser("~/cbtmp") - config = api.Config(croot=test_base, anaconda_upload=False, verbose=True) - recipe_path = os.path.join(metadata_dir, "has_prefix_files") - config.prefix_length = 80 - outputs = api.build(recipe_path, config=config, notest=True) - - args = ['prefix-lengths'] + outputs - with pytest.raises(SystemExit): - main_inspect.execute(args) - output, error = capfd.readouterr() - assert 'Packages with binary prefixes shorter than' in output - assert all(fn in output for fn in outputs) - - config.prefix_length = 255 - # reset the build id so that a new one is computed - config._build_id = "" - api.build(recipe_path, config=config, notest=True) - main_inspect.execute(args) - output, error = capfd.readouterr() - assert 'No packages found with binary prefixes shorter' in output - - -def test_inspect_hash_input(testing_metadata, testing_workdir, capfd): - testing_metadata.meta['requirements']['build'] = ['zlib'] - api.output_yaml(testing_metadata, 'meta.yaml') - output = api.build(testing_workdir, notest=True)[0] - with open(os.path.join(testing_workdir, 'conda_build_config.yaml'), 'w') as f: - yaml.dump({'zlib': ['1.2.11']}, f) - args = ['hash-inputs', output] - main_inspect.execute(args) - output, error = capfd.readouterr() - assert 'zlib' in output - - -def test_develop(testing_env): - f = "https://pypi.io/packages/source/c/conda_version_test/conda_version_test-0.1.0-1.tar.gz" - download(f, "conda_version_test.tar.gz") - tar_xf("conda_version_test.tar.gz", testing_env) - extract_folder = 'conda_version_test-0.1.0-1' - cwd = os.getcwd() - args = ['-p', testing_env, extract_folder] - main_develop.execute(args) - py_ver = '.'.join((str(sys.version_info.major), str(sys.version_info.minor))) - with open(os.path.join(get_site_packages(testing_env, py_ver), 'conda.pth')) as f_pth: - assert cwd in f_pth.read() - args = ['--uninstall', '-p', testing_env, extract_folder] - main_develop.execute(args) - with open(os.path.join(get_site_packages(testing_env, py_ver), 'conda.pth')) as f_pth: - assert cwd not in f_pth.read() - - -@pytest.mark.xfail(on_win, reason="This is a flaky test that doesn't seem to be working well on Windows.") -def test_convert(testing_config): - # download a sample py2.7 package - f = 'https://repo.anaconda.com/pkgs/free/win-64/affine-2.0.0-py27_0.tar.bz2' - pkg_name = "affine-2.0.0-py27_0.tar.bz2" - download(f, pkg_name) - # convert it to all platforms - args = ['-o', 'converted', '--platform', 'all', pkg_name] - main_convert.execute(args) - platforms = ['osx-64', 'win-32', 'linux-64', 'linux-32'] - for platform in platforms: - dirname = os.path.join('converted', platform) - if platform != 'win-64': - assert os.path.isdir(dirname) - assert pkg_name in os.listdir(dirname) - testing_config.host_subdir = platform - with TarCheck(os.path.join(dirname, pkg_name), config=testing_config) as tar: - tar.correct_subdir() - else: - assert not os.path.isdir(dirname) - - -@pytest.mark.serial -def test_purge(testing_workdir, testing_metadata): - """ - purge clears out build folders - things like some_pkg_12048309850135 - - It does not clear out build packages from folders like osx-64 or linux-64. - """ - api.output_yaml(testing_metadata, 'meta.yaml') - outputs = api.build(testing_workdir, notest=True) - args = ['purge'] - main_build.execute(args) - dirs = get_build_folders(testing_metadata.config.croot) - assert not dirs - # make sure artifacts are kept - only temporary folders get nuked - assert all(os.path.isfile(fn) for fn in outputs) - - -@pytest.mark.serial -def test_purge_all(testing_workdir, testing_metadata): - """ - purge-all clears out build folders as well as build packages in the osx-64 folders and such - """ - api.output_yaml(testing_metadata, 'meta.yaml') - with TemporaryDirectory() as tmpdir: - testing_metadata.config.croot = tmpdir - outputs = api.build(testing_workdir, config=testing_metadata.config, notest=True) - args = ['purge-all', '--croot', tmpdir] - main_build.execute(args) - assert not get_build_folders(testing_metadata.config.croot) - assert not any(os.path.isfile(fn) for fn in outputs) - - -@pytest.mark.serial -def test_no_force_upload(mocker, testing_workdir, testing_metadata, request): - with open(os.path.join(testing_workdir, '.condarc'), 'w') as f: - f.write('anaconda_upload: True\n') - f.write('conda_build:\n') - f.write(' force_upload: False\n') - del testing_metadata.meta['test'] - api.output_yaml(testing_metadata, 'meta.yaml') - args = ['--no-force-upload', testing_workdir] - call = mocker.patch.object(conda_build.build.subprocess, 'call') - request.addfinalizer(_reset_config) - _reset_config([os.path.join(testing_workdir, '.condarc')]) - main_build.execute(args) - pkg = api.get_output_file_path(testing_metadata) - assert call.called_once_with(['anaconda', 'upload', pkg]) - args = [testing_workdir] - with open(os.path.join(testing_workdir, '.condarc'), 'w') as f: - f.write('anaconda_upload: True\n') - main_build.execute(args) - assert call.called_once_with(['anaconda', 'upload', '--force', pkg]) - - -@pytest.mark.slow -def test_conda_py_no_period(testing_workdir, testing_metadata, monkeypatch): - monkeypatch.setenv('CONDA_PY', '36') - testing_metadata.meta['requirements'] = {'host': ['python'], - 'run': ['python']} - api.output_yaml(testing_metadata, 'meta.yaml') - outputs = api.build(testing_workdir, notest=True) - assert any('py36' in output for output in outputs) - - -def test_build_skip_existing(capfd, mocker): - # build the recipe first - empty_sections = os.path.join(metadata_dir, "empty_sections") - args = ['--no-anaconda-upload', empty_sections] - main_build.execute(args) - args.insert(0, '--skip-existing') - import conda_build.source - provide = mocker.patch.object(conda_build.source, 'provide') - main_build.execute(args) - provide.assert_not_called() - output, error = capfd.readouterr() - assert ("are already built" in output or "are already built" in error) - - -def test_build_skip_existing_croot(testing_workdir, capfd): - # build the recipe first - empty_sections = os.path.join(metadata_dir, "empty_sections") - args = ['--no-anaconda-upload', '--croot', testing_workdir, empty_sections] - main_build.execute(args) - args.insert(0, '--skip-existing') - main_build.execute(args) - output, error = capfd.readouterr() - assert "are already built" in output - - -@pytest.mark.sanity -def test_package_test(testing_workdir, testing_metadata): - """Test calling conda build -t - rather than """ - api.output_yaml(testing_metadata, 'recipe/meta.yaml') - output = api.build(testing_workdir, config=testing_metadata.config, notest=True)[0] - args = ['-t', output] - main_build.execute(args) - - -def test_activate_scripts_not_included(testing_workdir): - recipe = os.path.join(metadata_dir, '_activate_scripts_not_included') - args = ['--no-anaconda-upload', '--croot', testing_workdir, recipe] - main_build.execute(args) - out = api.get_output_file_paths(recipe, croot=testing_workdir)[0] - for f in ('bin/activate', 'bin/deactivate', 'bin/conda', - 'Scripts/activate.bat', 'Scripts/deactivate.bat', 'Scripts/conda.bat', - 'Scripts/activate.exe', 'Scripts/deactivate.exe', 'Scripts/conda.exe', - 'Scripts/activate', 'Scripts/deactivate', 'Scripts/conda'): - assert not package_has_file(out, f) - - -def test_relative_path_croot(): - # this tries to build a package while specifying the croot with a relative path: - # conda-build --no-test --croot ./relative/path - - empty_sections = os.path.join(metadata_dir, "empty_with_build_script") - croot_rel = os.path.join('.', 'relative', 'path') - args = ['--no-anaconda-upload', '--croot', croot_rel, empty_sections] - outputfile = main_build.execute(args) - - assert len(outputfile) == 1 - assert os.path.isfile(outputfile[0]) - - -def test_relative_path_test_artifact(): - # this test builds a package into (cwd)/relative/path and then calls: - # conda-build --test ./relative/path/{platform}/{artifact}.tar.bz2 - - empty_sections = os.path.join(metadata_dir, "empty_with_build_script") - croot_rel = os.path.join('.', 'relative', 'path') - croot_abs = os.path.abspath(os.path.normpath(croot_rel)) - - # build the package - args = ['--no-anaconda-upload', '--no-test', '--croot', croot_abs, empty_sections] - output_file_abs = main_build.execute(args) - assert len(output_file_abs) == 1 - - output_file_rel = os.path.join(croot_rel, os.path.relpath(output_file_abs[0], croot_abs)) - - # run the test stage with relative path - args = ['--no-anaconda-upload', '--test', output_file_rel] - main_build.execute(args) - - -def test_relative_path_test_recipe(): - # this test builds a package into (cwd)/relative/path and then calls: - # conda-build --test --croot ./relative/path/ /abs/path/to/recipe - - empty_sections = os.path.join(metadata_dir, "empty_with_build_script") - croot_rel = os.path.join('.', 'relative', 'path') - croot_abs = os.path.abspath(os.path.normpath(croot_rel)) - - # build the package - args = ['--no-anaconda-upload', '--no-test', '--croot', croot_abs, empty_sections] - output_file_abs = main_build.execute(args) - assert len(output_file_abs) == 1 - - # run the test stage with relative croot - args = ['--no-anaconda-upload', '--test', '--croot', croot_rel, empty_sections] - main_build.execute(args) - - -@pytest.mark.slow -def test_render_with_python_arg_reduces_subspace(capfd): - recipe = os.path.join(metadata_dir, "..", "variants", "20_subspace_selection_cli") - # build the package - args = [recipe, '--python=2.7', '--output'] - main_render.execute(args) - out, err = capfd.readouterr() - assert len(out.splitlines()) == 2 - - args = [recipe, '--python=3.9', '--output'] - main_render.execute(args) - out, err = capfd.readouterr() - assert len(out.splitlines()) == 1 - - # should raise an error, because python 3.6 is not in the matrix, so we don't know which vc - # to associate with - args = [recipe, '--python=3.6', '--output'] - with pytest.raises(ValueError): - main_render.execute(args) - - -def test_render_with_python_arg_CLI_reduces_subspace(capfd): - recipe = os.path.join(metadata_dir, "..", "variants", "20_subspace_selection_cli") - # build the package - args = [recipe, '--variants', '{python: [2.7, 3.9]}', '--output'] - main_render.execute(args) - out, err = capfd.readouterr() - assert len(out.splitlines()) == 3 - - args = [recipe, '--variants', '{python: 2.7}', '--output'] - main_render.execute(args) - out, err = capfd.readouterr() - assert len(out.splitlines()) == 2 - - args = [recipe, '--variants', '{python: 3.9}', '--output'] - main_render.execute(args) - out, err = capfd.readouterr() - assert len(out.splitlines()) == 1 - - -def test_test_extra_dep(testing_metadata): - testing_metadata.meta['test']['imports'] = ['imagesize'] - api.output_yaml(testing_metadata, 'meta.yaml') - output = api.build(testing_metadata, notest=True, anaconda_upload=False)[0] - - # tests version constraints. CLI would quote this - "click <6.7" - args = [output, '-t', '--extra-deps', 'imagesize <1.0'] - # extra_deps will add it in - main_build.execute(args) - - # missing click dep will fail tests - with pytest.raises(SystemExit): - args = [output, '-t'] - # extra_deps will add it in - main_build.execute(args) - - -@pytest.mark.parametrize( - 'additional_args, is_long_test_prefix', - [ - ([], True), - (['--long-test-prefix'], True), - (['--no-long-test-prefix'], False) - ], -) -def test_long_test_prefix(additional_args, is_long_test_prefix): - args = ['non_existing_recipe'] + additional_args - parser, args = main_build.parse_args(args) - config = Config(**args.__dict__) - assert config.long_test_prefix is is_long_test_prefix - - -@pytest.mark.serial -@pytest.mark.parametrize( - 'zstd_level_condarc, zstd_level_cli', - [ - (None, None), - (1, None), - (1, 2), - ], -) -def test_zstd_compression_level(testing_workdir, request, zstd_level_condarc, zstd_level_cli): - assert zstd_compression_level_default not in {zstd_level_condarc, zstd_level_cli} - if zstd_level_condarc: - with open(os.path.join(testing_workdir, '.condarc'), 'w') as f: - print( - 'conda_build:', - f' zstd_compression_level: {zstd_level_condarc}', - sep='\n', - file=f, - ) - request.addfinalizer(_reset_config) - _reset_config([os.path.join(testing_workdir, '.condarc')]) - args = ['non_existing_recipe'] - if zstd_level_cli: - args.append(f'--zstd-compression-level={zstd_level_cli}') - parser, args = main_build.parse_args(args) - config = Config(**args.__dict__) - if zstd_level_cli: - assert config.zstd_compression_level == zstd_level_cli - elif zstd_level_condarc: - assert config.zstd_compression_level == zstd_level_condarc - else: - assert config.zstd_compression_level == zstd_compression_level_default - - -def test_user_warning(tmpdir, recwarn): - dir_recipe_path = tmpdir.mkdir("recipe-path") - recipe = dir_recipe_path.join("meta.yaml") - recipe.write("") - - main_build.parse_args([str(recipe)]) - assert ( - f"RECIPE_PATH received is a file ({recipe}).\n" - "It should be a path to a folder.\n" - "Forcing conda-build to use the recipe file." - ) == str(recwarn.pop(UserWarning).message) - - main_build.parse_args([str(dir_recipe_path)]) - assert not recwarn.list From 44defeda1c4cf252a42c07dbd813a03bcc500a51 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 30 Jan 2023 18:22:00 +0100 Subject: [PATCH 058/366] [pre-commit.ci] pre-commit autoupdate (#4759) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/Lucas-C/pre-commit-hooks: v1.4.1 → v1.4.2](https://github.com/Lucas-C/pre-commit-hooks/compare/v1.4.1...v1.4.2) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 527258861b..17a6f7927e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -68,7 +68,7 @@ repos: # ignore all tests, not just tests data exclude: ^tests/ - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.4.1 + rev: v1.4.2 hooks: - id: insert-license files: \.py$ From 26be73a368d80a27f255a09cb4c013eac3888087 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 31 Jan 2023 21:07:16 +0100 Subject: [PATCH 059/366] `test_api_debug`: collapse all debug tests into parametrize (#4757) * Adding -vv option in test.yml * Remove unused skipif and unused imports * Collapse debug tests into parametrize --------- Co-authored-by: Bianca Henderson Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Srivas Venkatesh <110486050+sven6002@users.noreply.github.com> --- tests/test_api_debug.py | 214 +++++++++++++++++++--------------------- 1 file changed, 103 insertions(+), 111 deletions(-) diff --git a/tests/test_api_debug.py b/tests/test_api_debug.py index 10415c15d1..c41cc8f062 100644 --- a/tests/test_api_debug.py +++ b/tests/test_api_debug.py @@ -4,119 +4,111 @@ This module tests the test API. These are high-level integration tests. Lower level unit tests should go in test_render.py """ +from __future__ import annotations -import os -from glob import glob - -import pytest +from contextlib import nullcontext +from pathlib import Path import subprocess -import sys - -from conda_build import api -from tests import utils - -from .utils import metadata_dir, thisdir, on_win - -recipe_path = os.path.join(metadata_dir, "_debug_pkg") -ambiguous_recipe_path = os.path.join(metadata_dir, "_debug_pkg_multiple_outputs") -tarball_path = os.path.join(thisdir, "archives", "test_debug_pkg-1.0-0.tar.bz2") - -if on_win: - shell_cmd = ["cmd.exe", "/d", "/c"] -else: - shell_cmd = ["bash", "-c"] - - -def assert_correct_folders(work_dir, build=True): - base_dir = os.path.dirname(work_dir) - build_set = "_b*", "_h*" - test_set = "_t*", "test_tmp" - for prefix in build_set: - assert bool(glob(os.path.join(base_dir, prefix))) == build - for prefix in test_set: - assert bool(glob(os.path.join(base_dir, prefix))) != build - - -def check_build_files_present(work_dir, build=True): - if on_win: - assert os.path.exists(os.path.join(work_dir, "bld.bat")) == build - else: - assert os.path.exists(os.path.join(work_dir, "conda_build.sh")) == build - - -def check_test_files_present(work_dir, test=True): - if on_win: - assert os.path.exists(os.path.join(work_dir, "conda_test_runner.bat")) == test - else: - assert os.path.exists(os.path.join(work_dir, "conda_test_runner.sh")) == test - - -@pytest.mark.slow -def test_debug_recipe_default_path(testing_config): - activation_string = api.debug(recipe_path, config=testing_config) - assert activation_string and "debug_1" in activation_string - _, work_dir, _, src_command, env_activation_script = activation_string.split() - _shell_cmd = shell_cmd + [' '.join((src_command, env_activation_script))] - subprocess.check_call(_shell_cmd, cwd=work_dir) - check_build_files_present(work_dir, True) - check_test_files_present(work_dir, False) - assert_correct_folders(work_dir) - +from conda.common.compat import on_win +import pytest -@pytest.mark.skipif( - utils.on_win and sys.version_info <= (3, 4), - reason="Skipping on windows and vc<14" +from conda_build.api import debug + +from .utils import metadata_path, archive_path + + +DEBUG_PKG = metadata_path / "_debug_pkg" +MULTI_OUT = metadata_path / "_debug_pkg_multiple_outputs" +TARBALL = archive_path / "test_debug_pkg-1.0-0.tar.bz2" +SHELL_CMD = ("cmd.exe", "/d", "/c") if on_win else ("bash", "-c") + + +@pytest.mark.parametrize( + "recipe,path,config,output_id,has_error,has_build", + [ + # w/ config + pytest.param(DEBUG_PKG, False, True, None, False, True, id="recipe w/ config"), + pytest.param(TARBALL, False, True, None, False, False, id="tarball w/ config"), + # w/ path + pytest.param(DEBUG_PKG, True, False, None, False, True, id="recipe w/ path"), + pytest.param(TARBALL, True, False, None, False, False, id="tarball w/ path"), + # w/ outputs + pytest.param( + MULTI_OUT, + False, + False, + "output1*", + False, + True, + id="outputs w/ valid filtering", + ), + pytest.param( + MULTI_OUT, + False, + False, + None, + True, + False, + id="outputs w/ no filtering", + ), + pytest.param( + MULTI_OUT, + False, + False, + "frank", + True, + False, + id="outputs w/ invalid filtering", + ), + ], ) -def test_debug_package_default_path(testing_config): - activation_string = api.debug(tarball_path, config=testing_config) - _, work_dir, _, src_command, env_activation_script = activation_string.split() - _shell_cmd = shell_cmd + [' '.join((src_command, env_activation_script))] - subprocess.check_call(_shell_cmd, cwd=work_dir) - check_build_files_present(work_dir, False) - check_test_files_present(work_dir, True) - assert_correct_folders(work_dir, build=False) - - -@pytest.mark.slow -def test_debug_recipe_custom_path(testing_workdir): - activation_string = api.debug(recipe_path, path=testing_workdir) - assert activation_string and "debug_1" not in activation_string - _, work_dir, _, src_command, env_activation_script = activation_string.split() - _shell_cmd = shell_cmd + [' '.join((src_command, env_activation_script))] - subprocess.check_call(_shell_cmd, cwd=work_dir) - check_build_files_present(work_dir, True) - check_test_files_present(work_dir, False) - assert_correct_folders(work_dir) - - -def test_debug_package_custom_path(testing_workdir): - activation_string = api.debug(tarball_path, path=testing_workdir) - _, work_dir, _, src_command, env_activation_script = activation_string.split() - _shell_cmd = shell_cmd + [' '.join((src_command, env_activation_script))] - subprocess.check_call(_shell_cmd, cwd=work_dir) - check_build_files_present(work_dir, False) - check_test_files_present(work_dir, True) - assert_correct_folders(work_dir, build=False) - - -def test_specific_output(): - activation_string = api.debug(ambiguous_recipe_path, output_id="output1*") - _, work_dir, _, src_command, env_activation_script = activation_string.split() - _shell_cmd = shell_cmd + [' '.join((src_command, env_activation_script))] - subprocess.check_call(_shell_cmd, cwd=work_dir) - check_build_files_present(work_dir, True) - check_test_files_present(work_dir, False) - assert_correct_folders(work_dir, build=True) - - -@pytest.mark.sanity -def test_error_on_ambiguous_output(): - with pytest.raises(ValueError): - api.debug(ambiguous_recipe_path) - - -@pytest.mark.sanity -def test_error_on_unmatched_output(): - with pytest.raises(ValueError): - api.debug(ambiguous_recipe_path, output_id="frank") +def test_debug( + recipe: Path, + path: bool, + config: bool, + output_id: str | None, + has_error: bool, + has_build: bool, + tmp_path: Path, + testing_config, +): + with pytest.raises(ValueError) if has_error else nullcontext(): + activation = debug( + str(recipe), + path=tmp_path if path else None, + config=testing_config if config else None, + output_id=output_id, + ) + + # if we expected an error there wont be anything else to test + if has_error: + return + + # e.g.: activation = "cd /path/to/work && source /path/to/work/build_env_setup.sh" + _, work_dir, _, source, script = activation.split() + work_path = Path(work_dir) + + # recipes and tarballs are installed into different locations + if recipe.suffixes[-2:] == [".tar", ".bz2"]: + assert work_path.name == "test_tmp" + elif path: + assert work_path.parent == tmp_path + else: + assert work_path.parent.name.startswith("debug_") + + # check build files are present + name = "bld.bat" if on_win else "conda_build.sh" + assert (work_path / name).exists() is has_build + for prefix in ("_b*", "_h*"): + assert bool(next(work_path.parent.glob(prefix), False)) is has_build + + # check test files are present + name = f"conda_test_runner{('.bat' if on_win else '.sh')}" + has_test = not has_build + assert (work_path / name).exists() is has_test + for prefix in ("_t*", "test_tmp"): + assert bool(next(work_path.parent.glob(prefix), False)) is has_test + + # ensure it's possible to activate the environment + subprocess.check_call([*SHELL_CMD, f"{source} {script}"], cwd=work_path) From 10ad77cb244c6355934d484581cc131b0ad49e5a Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 1 Feb 2023 05:02:58 +0100 Subject: [PATCH 060/366] Remove unnecessary fixtures (#4761) * Remove unused helper methods * Remove local recipe fixtures * Numpy is always installed, no need to check --- tests/test_api_build.py | 23 ++++++++++--------- tests/test_published_examples.py | 8 ++++++- tests/test_subpackages.py | 23 +++++++++++-------- tests/utils.py | 39 +------------------------------- 4 files changed, 33 insertions(+), 60 deletions(-) diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 4948bb2bc0..59af1b1c42 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -7,6 +7,7 @@ from glob import glob import logging import os +from pathlib import Path import re import subprocess import sys @@ -37,7 +38,7 @@ from conda_build.conda_interface import reset_context from conda.exceptions import ClobberError, CondaMultiError -from .utils import is_valid_dir, metadata_dir, fail_dir, add_mangling, numpy_installed +from .utils import get_valid_recipes, metadata_dir, fail_dir, add_mangling # define a few commonly used recipes - use os.path.join(metadata_dir, recipe) elsewhere empty_sections = os.path.join(metadata_dir, "empty_sections") @@ -77,26 +78,27 @@ def describe_root(cwd=None): return tag -@pytest.fixture(params=[dirname for dirname in os.listdir(metadata_dir) - if is_valid_dir(metadata_dir, dirname)]) -def recipe(request): - return os.path.join(metadata_dir, request.param) - - # This tests any of the folders in the test-recipes/metadata folder that don't start with _ @pytest.mark.slow @pytest.mark.serial -def test_recipe_builds(recipe, testing_config, testing_workdir, monkeypatch): +@pytest.mark.parametrize( + "recipe", + [ + pytest.param(recipe, id=recipe.name) + for recipe in get_valid_recipes(metadata_dir) + ], +) +def test_recipe_builds(recipe: Path, testing_config, testing_workdir, monkeypatch): # TODO: After we fix #3754 this mark can be removed. This specific test # ``source_setup_py_data_subdir`` reproduces the problem. - if os.path.basename(recipe) == "source_setup_py_data_subdir": + if recipe.name == "source_setup_py_data_subdir": pytest.xfail("Issue related to #3754 on conda-build.") # These variables are defined solely for testing purposes, # so they can be checked within build scripts testing_config.activate = True monkeypatch.setenv("CONDA_TEST_VAR", "conda_test") monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2") - api.build(recipe, config=testing_config) + api.build(str(recipe), config=testing_config) @pytest.mark.serial @@ -494,7 +496,6 @@ def test_build_metadata_object(testing_metadata): @pytest.mark.serial -@pytest.mark.skipif(not numpy_installed(), reason="numpy not installed in base environment") def test_numpy_setup_py_data(testing_config): recipe_path = os.path.join(metadata_dir, '_numpy_setup_py_data') # this shows an error that is OK to ignore: diff --git a/tests/test_published_examples.py b/tests/test_published_examples.py index 45c17fbd31..0df93c8747 100644 --- a/tests/test_published_examples.py +++ b/tests/test_published_examples.py @@ -21,7 +21,13 @@ def test_skeleton_pypi(): @pytest.mark.sanity -@pytest.mark.parametrize("recipe", get_valid_recipes(published_path)) +@pytest.mark.parametrize( + "recipe", + [ + pytest.param(recipe, id=recipe.name) + for recipe in get_valid_recipes(published_path) + ], +) def test_recipe_builds(recipe, testing_config): # These variables are defined solely for testing purposes, # so they can be checked within build scripts diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index f42e21ab45..569834d239 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -3,26 +3,29 @@ from glob import glob import json import os -import pytest +from pathlib import Path import re import sys +import pytest + from conda_build.render import finalize_metadata from conda_build.conda_interface import subdir from conda_build import api, utils -from .utils import subpackage_dir, is_valid_dir - - -@pytest.fixture(params=[dirname for dirname in os.listdir(subpackage_dir) - if is_valid_dir(subpackage_dir, dirname)]) -def recipe(request): - return os.path.join(subpackage_dir, request.param) +from .utils import subpackage_dir, get_valid_recipes @pytest.mark.slow -def test_subpackage_recipes(recipe, testing_config): - api.build(recipe, config=testing_config) +@pytest.mark.parametrize( + "recipe", + [ + pytest.param(recipe, id=recipe.name) + for recipe in get_valid_recipes(subpackage_dir) + ], +) +def test_subpackage_recipes(recipe: Path, testing_config): + api.build(str(recipe), config=testing_config) @pytest.mark.sanity diff --git a/tests/utils.py b/tests/utils.py index 9f94cd4a0e..8c839a6c3e 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -2,21 +2,14 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -import contextlib import os from pathlib import Path import shlex import sys from typing import Generator -import pytest -from conda.common.compat import on_mac, on_win +from conda.common.compat import on_mac from conda_build.metadata import MetaData -from conda_build.conda_interface import linked - - -def numpy_installed(): - return any([True for dist in linked(sys.prefix) if dist.name == "numpy"]) tests_path = Path(__file__).parent @@ -146,37 +139,7 @@ def assert_package_consistency(package_path): assert len(errors) == 0, "\n".join(errors) -@contextlib.contextmanager -def put_bad_conda_on_path(testing_workdir): - path_backup = os.environ["PATH"] - # it is easier to add an intentionally bad path than it is to try to scrub any existing path - os.environ["PATH"] = os.pathsep.join([testing_workdir, os.environ["PATH"]]) - - exe_name = "conda.bat" if on_win else "conda" - out_exe = os.path.join(testing_workdir, exe_name) - with open(out_exe, "w") as f: - f.write("exit 1") - st = os.stat(out_exe) - os.chmod(out_exe, st.st_mode | 0o111) - try: - yield - except: - raise - finally: - os.environ["PATH"] = path_backup - - def get_noarch_python_meta(meta): d = meta.meta d["build"]["noarch"] = "python" return MetaData.fromdict(d, config=meta.config) - - -@pytest.fixture(autouse=True) -def skip_serial(request): - if ( - request.node.get_marker("serial") - and getattr(request.config, "slaveinput", {}).get("slaveid", "local") != "local" - ): - # under xdist and serial - pytest.skip("serial") From bc104566effa06417b15b8a7abadec1f0ceb5b9c Mon Sep 17 00:00:00 2001 From: Daniel Holth Date: Fri, 3 Feb 2023 17:41:57 -0500 Subject: [PATCH 061/366] format environ.py with black (#4765) * format environ.py with black * pyupgrade change --- conda_build/environ.py | 1047 ++++++++++++++++++++++++---------------- conda_build/utils.py | 966 ++++++++++++++++++++++-------------- 2 files changed, 1231 insertions(+), 782 deletions(-) diff --git a/conda_build/environ.py b/conda_build/environ.py index 5a57359a4b..841b622ee7 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -14,9 +14,20 @@ from glob import glob from os.path import join, normpath -from .conda_interface import (CondaError, LinkError, LockError, NoPackagesFoundError, - PaddingError, UnsatisfiableError) -from .conda_interface import display_actions, execute_actions, execute_plan, install_actions +from .conda_interface import ( + CondaError, + LinkError, + LockError, + NoPackagesFoundError, + PaddingError, + UnsatisfiableError, +) +from .conda_interface import ( + display_actions, + execute_actions, + execute_plan, + install_actions, +) from .conda_interface import package_cache, TemporaryDirectory from .conda_interface import pkgs_dirs, root_dir, create_default_packages from .conda_interface import reset_context @@ -33,37 +44,47 @@ # these are things that we provide env vars for more explicitly. This list disables the # pass-through of variant values to env vars for these keys. -LANGUAGES = ('PERL', 'LUA', 'R', "NUMPY", 'PYTHON') -R_PACKAGES = ('r-base', 'mro-base', 'r-impl') +LANGUAGES = ("PERL", "LUA", "R", "NUMPY", "PYTHON") +R_PACKAGES = ("r-base", "mro-base", "r-impl") def get_perl_ver(config): - return '.'.join(config.variant.get('perl', get_default_variant(config)['perl']).split('.')[:2]) + return ".".join( + config.variant.get("perl", get_default_variant(config)["perl"]).split(".")[:2] + ) def get_lua_ver(config): - return '.'.join(config.variant.get('lua', get_default_variant(config)['lua']).split('.')[:2]) + return ".".join( + config.variant.get("lua", get_default_variant(config)["lua"]).split(".")[:2] + ) def get_py_ver(config): - py = config.variant.get('python', get_default_variant(config)['python']) - if not hasattr(py, 'split'): + py = config.variant.get("python", get_default_variant(config)["python"]) + if not hasattr(py, "split"): py = py[0] - return '.'.join(py.split('.')[:2]) + return ".".join(py.split(".")[:2]) def get_r_ver(config): - return '.'.join(config.variant.get('r_base', - get_default_variant(config)['r_base']).split('.')[:3]) + return ".".join( + config.variant.get("r_base", get_default_variant(config)["r_base"]).split(".")[ + :3 + ] + ) def get_npy_ver(config): - conda_npy = ''.join(str(config.variant.get('numpy') or - get_default_variant(config)['numpy']).split('.')) + conda_npy = "".join( + str(config.variant.get("numpy") or get_default_variant(config)["numpy"]).split( + "." + ) + ) # Convert int -> string, e.g. # 17 -> '1.7' # 110 -> '1.10' - return conda_npy[0] + '.' + conda_npy[1:] + return conda_npy[0] + "." + conda_npy[1:] def get_lua_include_dir(config): @@ -71,8 +92,9 @@ def get_lua_include_dir(config): @lru_cache(maxsize=None) -def verify_git_repo(git_exe, git_dir, git_url, git_commits_since_tag, debug=False, - expected_rev='HEAD'): +def verify_git_repo( + git_exe, git_dir, git_url, git_commits_since_tag, debug=False, expected_rev="HEAD" +): env = os.environ.copy() log = utils.get_logger(__name__) @@ -83,51 +105,61 @@ def verify_git_repo(git_exe, git_dir, git_url, git_commits_since_tag, debug=Fals OK = True - env['GIT_DIR'] = git_dir + env["GIT_DIR"] = git_dir try: # Verify current commit (minus our locally applied patches) matches expected commit - current_commit = utils.check_output_env([git_exe, - "log", - "-n1", - "--format=%H", - "HEAD" + "^" * git_commits_since_tag], - env=env, stderr=stderr) - current_commit = current_commit.decode('utf-8') - expected_tag_commit = utils.check_output_env([git_exe, "log", "-n1", "--format=%H", - expected_rev], - env=env, stderr=stderr) - expected_tag_commit = expected_tag_commit.decode('utf-8') + current_commit = utils.check_output_env( + [ + git_exe, + "log", + "-n1", + "--format=%H", + "HEAD" + "^" * git_commits_since_tag, + ], + env=env, + stderr=stderr, + ) + current_commit = current_commit.decode("utf-8") + expected_tag_commit = utils.check_output_env( + [git_exe, "log", "-n1", "--format=%H", expected_rev], env=env, stderr=stderr + ) + expected_tag_commit = expected_tag_commit.decode("utf-8") if current_commit != expected_tag_commit: return False # Verify correct remote url. Need to find the git cache directory, # and check the remote from there. - cache_details = utils.check_output_env([git_exe, "remote", "-v"], env=env, - stderr=stderr) - cache_details = cache_details.decode('utf-8') - cache_dir = cache_details.split('\n')[0].split()[1] + cache_details = utils.check_output_env( + [git_exe, "remote", "-v"], env=env, stderr=stderr + ) + cache_details = cache_details.decode("utf-8") + cache_dir = cache_details.split("\n")[0].split()[1] if not isinstance(cache_dir, str): # On Windows, subprocess env can't handle unicode. - cache_dir = cache_dir.encode(sys.getfilesystemencoding() or 'utf-8') + cache_dir = cache_dir.encode(sys.getfilesystemencoding() or "utf-8") try: - remote_details = utils.check_output_env([git_exe, "--git-dir", cache_dir, - "remote", "-v"], - env=env, stderr=stderr) + remote_details = utils.check_output_env( + [git_exe, "--git-dir", cache_dir, "remote", "-v"], + env=env, + stderr=stderr, + ) except subprocess.CalledProcessError: - if sys.platform == 'win32' and cache_dir.startswith('/'): + if sys.platform == "win32" and cache_dir.startswith("/"): cache_dir = utils.convert_unix_path_to_win(cache_dir) - remote_details = utils.check_output_env([git_exe, "--git-dir", cache_dir, - "remote", "-v"], - env=env, stderr=stderr) - remote_details = remote_details.decode('utf-8') - remote_url = remote_details.split('\n')[0].split()[1] + remote_details = utils.check_output_env( + [git_exe, "--git-dir", cache_dir, "remote", "-v"], + env=env, + stderr=stderr, + ) + remote_details = remote_details.decode("utf-8") + remote_url = remote_details.split("\n")[0].split()[1] # on windows, remote URL comes back to us as cygwin or msys format. Python doesn't # know how to normalize it. Need to convert it to a windows path. - if sys.platform == 'win32' and remote_url.startswith('/'): + if sys.platform == "win32" and remote_url.startswith("/"): remote_url = utils.convert_unix_path_to_win(git_url) if os.path.exists(remote_url): @@ -167,18 +199,21 @@ def get_git_info(git_exe, repo, debug): # grab information from describe env = os.environ.copy() - env['GIT_DIR'] = repo + env["GIT_DIR"] = repo keys = ["GIT_DESCRIBE_TAG", "GIT_DESCRIBE_NUMBER", "GIT_DESCRIBE_HASH"] try: - output = utils.check_output_env([git_exe, "describe", "--tags", "--long", "HEAD"], - env=env, cwd=os.path.dirname(repo), - stderr=stderr).splitlines()[0] - output = output.decode('utf-8') - parts = output.rsplit('-', 2) + output = utils.check_output_env( + [git_exe, "describe", "--tags", "--long", "HEAD"], + env=env, + cwd=os.path.dirname(repo), + stderr=stderr, + ).splitlines()[0] + output = output.decode("utf-8") + parts = output.rsplit("-", 2) if len(parts) == 3: d.update(dict(zip(keys, parts))) - d['GIT_DESCRIBE_TAG_PEP440'] = str(get_version_from_git_tag(output)) + d["GIT_DESCRIBE_TAG_PEP440"] = str(get_version_from_git_tag(output)) except subprocess.CalledProcessError: msg = ( "Failed to obtain git tag information.\n" @@ -191,35 +226,42 @@ def get_git_info(git_exe, repo, debug): # Try to get the short hash from describing with all refs (not just the tags). if "GIT_DESCRIBE_HASH" not in d: try: - output = utils.check_output_env([git_exe, "describe", "--all", "--long", "HEAD"], - env=env, cwd=os.path.dirname(repo), - stderr=stderr).splitlines()[0] - output = output.decode('utf-8') - parts = output.rsplit('-', 2) + output = utils.check_output_env( + [git_exe, "describe", "--all", "--long", "HEAD"], + env=env, + cwd=os.path.dirname(repo), + stderr=stderr, + ).splitlines()[0] + output = output.decode("utf-8") + parts = output.rsplit("-", 2) if len(parts) == 3: # Don't save GIT_DESCRIBE_TAG and GIT_DESCRIBE_NUMBER because git (probably) # described a branch. We just want to save the short hash. - d['GIT_DESCRIBE_HASH'] = parts[-1] + d["GIT_DESCRIBE_HASH"] = parts[-1] except subprocess.CalledProcessError as error: log.debug("Error obtaining git commit information. Error was: ") log.debug(str(error)) try: # get the _full_ hash of the current HEAD - output = utils.check_output_env([git_exe, "rev-parse", "HEAD"], - env=env, cwd=os.path.dirname(repo), - stderr=stderr).splitlines()[0] - output = output.decode('utf-8') - - d['GIT_FULL_HASH'] = output + output = utils.check_output_env( + [git_exe, "rev-parse", "HEAD"], + env=env, + cwd=os.path.dirname(repo), + stderr=stderr, + ).splitlines()[0] + output = output.decode("utf-8") + + d["GIT_FULL_HASH"] = output except subprocess.CalledProcessError as error: log.debug("Error obtaining git commit information. Error was: ") log.debug(str(error)) # set up the build string if "GIT_DESCRIBE_NUMBER" in d and "GIT_DESCRIBE_HASH" in d: - d['GIT_BUILD_STR'] = '{}_{}'.format(d["GIT_DESCRIBE_NUMBER"], - d["GIT_DESCRIBE_HASH"]) + d["GIT_BUILD_STR"] = "{}_{}".format( + d["GIT_DESCRIBE_NUMBER"], d["GIT_DESCRIBE_HASH"] + ) # issues on Windows with the next line of the command prompt being recorded here. assert not any("\n" in value for value in d.values()) @@ -228,29 +270,41 @@ def get_git_info(git_exe, repo, debug): def get_hg_build_info(repo): env = os.environ.copy() - env['HG_DIR'] = repo + env["HG_DIR"] = repo env = {str(key): str(value) for key, value in env.items()} d = {} - cmd = ["hg", "log", "--template", - "{rev}|{node|short}|{latesttag}|{latesttagdistance}|{branch}", - "--rev", "."] + cmd = [ + "hg", + "log", + "--template", + "{rev}|{node|short}|{latesttag}|{latesttagdistance}|{branch}", + "--rev", + ".", + ] output = utils.check_output_env(cmd, env=env, cwd=os.path.dirname(repo)) - output = output.decode('utf-8') - rev, short_id, tag, distance, branch = output.split('|') - if tag != 'null': - d['HG_LATEST_TAG'] = tag + output = output.decode("utf-8") + rev, short_id, tag, distance, branch = output.split("|") + if tag != "null": + d["HG_LATEST_TAG"] = tag if branch == "": - branch = 'default' - d['HG_BRANCH'] = branch - d['HG_NUM_ID'] = rev - d['HG_LATEST_TAG_DISTANCE'] = distance - d['HG_SHORT_ID'] = short_id - d['HG_BUILD_STR'] = '{}_{}'.format(d['HG_NUM_ID'], d['HG_SHORT_ID']) + branch = "default" + d["HG_BRANCH"] = branch + d["HG_NUM_ID"] = rev + d["HG_LATEST_TAG_DISTANCE"] = distance + d["HG_SHORT_ID"] = short_id + d["HG_BUILD_STR"] = "{}_{}".format(d["HG_NUM_ID"], d["HG_SHORT_ID"]) return d -def get_dict(m, prefix=None, for_env=True, skip_build_id=False, escape_backslash=False, variant=None): +def get_dict( + m, + prefix=None, + for_env=True, + skip_build_id=False, + escape_backslash=False, + variant=None, +): if not prefix: prefix = m.config.host_prefix @@ -272,8 +326,7 @@ def get_dict(m, prefix=None, for_env=True, skip_build_id=False, escape_backslash d.update(os_vars(m, prefix)) # features - d.update({feat.upper(): str(int(value)) for feat, value in - feature_list}) + d.update({feat.upper(): str(int(value)) for feat, value in feature_list}) variant = variant or m.config.variant for k, v in variant.items(): @@ -283,34 +336,36 @@ def get_dict(m, prefix=None, for_env=True, skip_build_id=False, escape_backslash def conda_build_vars(prefix, config): - src_dir = config.test_dir if os.path.basename(prefix)[:2] == '_t' else config.work_dir + src_dir = ( + config.test_dir if os.path.basename(prefix)[:2] == "_t" else config.work_dir + ) return { - 'CONDA_BUILD': '1', - 'PYTHONNOUSERSITE': '1', - 'CONDA_DEFAULT_ENV': config.host_prefix, - 'ARCH': str(config.host_arch), + "CONDA_BUILD": "1", + "PYTHONNOUSERSITE": "1", + "CONDA_DEFAULT_ENV": config.host_prefix, + "ARCH": str(config.host_arch), # This is the one that is most important for where people put artifacts that get bundled. # It is fed from our function argument, and can be any of: # 1. Build prefix - when host requirements are not explicitly set, # then prefix = build prefix = host prefix # 2. Host prefix - when host requirements are explicitly set, prefix = host prefix # 3. Test prefix - during test runs, this points at the test prefix - 'PREFIX': prefix, + "PREFIX": prefix, # This is for things that are specifically build tools. Things that run on the build # platform, but probably should not be linked against, since they may not run on the # destination host platform # It can be equivalent to config.host_prefix if the host section is not explicitly set. - 'BUILD_PREFIX': config.build_prefix, - 'SYS_PREFIX': sys.prefix, - 'SYS_PYTHON': sys.executable, - 'SUBDIR': config.host_subdir, - 'build_platform': config.build_subdir, - 'SRC_DIR': src_dir, - 'HTTPS_PROXY': os.getenv('HTTPS_PROXY', ''), - 'HTTP_PROXY': os.getenv('HTTP_PROXY', ''), - 'REQUESTS_CA_BUNDLE': os.getenv('REQUESTS_CA_BUNDLE', ''), - 'DIRTY': '1' if config.dirty else '', - 'ROOT': root_dir, + "BUILD_PREFIX": config.build_prefix, + "SYS_PREFIX": sys.prefix, + "SYS_PYTHON": sys.executable, + "SUBDIR": config.host_subdir, + "build_platform": config.build_subdir, + "SRC_DIR": src_dir, + "HTTPS_PROXY": os.getenv("HTTPS_PROXY", ""), + "HTTP_PROXY": os.getenv("HTTP_PROXY", ""), + "REQUESTS_CA_BUNDLE": os.getenv("REQUESTS_CA_BUNDLE", ""), + "DIRTY": "1" if config.dirty else "", + "ROOT": root_dir, } @@ -320,140 +375,151 @@ def python_vars(metadata, prefix, escape_backslash): sp_dir = utils.get_site_packages(prefix, py_ver) if utils.on_win and escape_backslash: - stdlib_dir = stdlib_dir.replace('\\', '\\\\') - sp_dir = sp_dir.replace('\\', '\\\\') + stdlib_dir = stdlib_dir.replace("\\", "\\\\") + sp_dir = sp_dir.replace("\\", "\\\\") vars_ = { - 'CONDA_PY': ''.join(py_ver.split('.')[:2]), - 'PY3K': str(int(int(py_ver[0]) >= 3)), - 'PY_VER': py_ver, - 'STDLIB_DIR': stdlib_dir, - 'SP_DIR': sp_dir, - } - build_or_host = 'host' if metadata.is_cross else 'build' + "CONDA_PY": "".join(py_ver.split(".")[:2]), + "PY3K": str(int(int(py_ver[0]) >= 3)), + "PY_VER": py_ver, + "STDLIB_DIR": stdlib_dir, + "SP_DIR": sp_dir, + } + build_or_host = "host" if metadata.is_cross else "build" deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)] - if 'python' in deps or metadata.name(fail_ok=True) == 'python': + if "python" in deps or metadata.name(fail_ok=True) == "python": python_bin = metadata.config.python_bin(prefix, metadata.config.host_subdir) if utils.on_win and escape_backslash: - python_bin = python_bin.replace('\\', '\\\\') + python_bin = python_bin.replace("\\", "\\\\") - vars_.update({ - # host prefix is always fine, because it is the same as build when is_cross is False - 'PYTHON': python_bin, - }) + vars_.update( + { + # host prefix is always fine, because it is the same as build when is_cross is False + "PYTHON": python_bin, + } + ) - np_ver = metadata.config.variant.get('numpy', get_default_variant(metadata.config)['numpy']) - vars_['NPY_VER'] = '.'.join(np_ver.split('.')[:2]) - vars_['CONDA_NPY'] = ''.join(np_ver.split('.')[:2]) - vars_['NPY_DISTUTILS_APPEND_FLAGS'] = '1' + np_ver = metadata.config.variant.get( + "numpy", get_default_variant(metadata.config)["numpy"] + ) + vars_["NPY_VER"] = ".".join(np_ver.split(".")[:2]) + vars_["CONDA_NPY"] = "".join(np_ver.split(".")[:2]) + vars_["NPY_DISTUTILS_APPEND_FLAGS"] = "1" return vars_ def perl_vars(metadata, prefix, escape_backslash): vars_ = { - 'PERL_VER': get_perl_ver(metadata.config), - 'CONDA_PERL': get_perl_ver(metadata.config), - } - build_or_host = 'host' if metadata.is_cross else 'build' + "PERL_VER": get_perl_ver(metadata.config), + "CONDA_PERL": get_perl_ver(metadata.config), + } + build_or_host = "host" if metadata.is_cross else "build" deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)] - if 'perl' in deps or metadata.name(fail_ok=True) == 'perl': + if "perl" in deps or metadata.name(fail_ok=True) == "perl": perl_bin = metadata.config.perl_bin(prefix, metadata.config.host_subdir) if utils.on_win and escape_backslash: - perl_bin = perl_bin.replace('\\', '\\\\') + perl_bin = perl_bin.replace("\\", "\\\\") - vars_.update({ - # host prefix is always fine, because it is the same as build when is_cross is False - 'PERL': perl_bin, - }) + vars_.update( + { + # host prefix is always fine, because it is the same as build when is_cross is False + "PERL": perl_bin, + } + ) return vars_ def lua_vars(metadata, prefix, escape_backslash): vars_ = { - 'LUA_VER': get_lua_ver(metadata.config), - 'CONDA_LUA': get_lua_ver(metadata.config), - } - build_or_host = 'host' if metadata.is_cross else 'build' + "LUA_VER": get_lua_ver(metadata.config), + "CONDA_LUA": get_lua_ver(metadata.config), + } + build_or_host = "host" if metadata.is_cross else "build" deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)] - if 'lua' in deps: + if "lua" in deps: lua_bin = metadata.config.lua_bin(prefix, metadata.config.host_subdir) lua_include_dir = get_lua_include_dir(metadata.config) if utils.on_win and escape_backslash: - lua_bin = lua_bin.replace('\\', '\\\\') - lua_include_dir = lua_include_dir.replace('\\', '\\\\') + lua_bin = lua_bin.replace("\\", "\\\\") + lua_include_dir = lua_include_dir.replace("\\", "\\\\") - vars_.update({ - 'LUA': lua_bin, - 'LUA_INCLUDE_DIR': lua_include_dir, - }) + vars_.update( + { + "LUA": lua_bin, + "LUA_INCLUDE_DIR": lua_include_dir, + } + ) return vars_ def r_vars(metadata, prefix, escape_backslash): vars_ = { - 'R_VER': get_r_ver(metadata.config), - 'CONDA_R': get_r_ver(metadata.config), - } + "R_VER": get_r_ver(metadata.config), + "CONDA_R": get_r_ver(metadata.config), + } - build_or_host = 'host' if metadata.is_cross else 'build' + build_or_host = "host" if metadata.is_cross else "build" deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)] - if any(r_pkg in deps for r_pkg in R_PACKAGES) or \ - metadata.name(fail_ok=True) in R_PACKAGES: + if ( + any(r_pkg in deps for r_pkg in R_PACKAGES) + or metadata.name(fail_ok=True) in R_PACKAGES + ): r_bin = metadata.config.r_bin(prefix, metadata.config.host_subdir) # set R_USER explicitly to prevent crosstalk with existing R_LIBS_USER packages - r_user = join(prefix, 'Libs', 'R') + r_user = join(prefix, "Libs", "R") if utils.on_win and escape_backslash: - r_bin = r_bin.replace('\\', '\\\\') + r_bin = r_bin.replace("\\", "\\\\") - vars_.update({ - 'R': r_bin, - 'R_USER': r_user, - }) + vars_.update( + { + "R": r_bin, + "R_USER": r_user, + } + ) return vars_ def meta_vars(meta, skip_build_id=False): d = {} - for var_name in ensure_list(meta.get_value('build/script_env', [])): - if '=' in var_name: - var_name, value = var_name.split('=', 1) + for var_name in ensure_list(meta.get_value("build/script_env", [])): + if "=" in var_name: + var_name, value = var_name.split("=", 1) else: value = os.getenv(var_name) if value is None: warnings.warn( - "The environment variable '%s' is undefined." % var_name, - UserWarning + "The environment variable '%s' is undefined." % var_name, UserWarning ) else: d[var_name] = value warnings.warn( "The environment variable '%s' is being passed through with value '%s'. " "If you are splitting build and test phases with --no-test, please ensure " - "that this value is also set similarly at test time." % - (var_name, "" if meta.config.suppress_variables else value), - UserWarning + "that this value is also set similarly at test time." + % (var_name, "" if meta.config.suppress_variables else value), + UserWarning, ) - folder = meta.get_value('source/0/folder', '') + folder = meta.get_value("source/0/folder", "") repo_dir = join(meta.config.work_dir, folder) - git_dir = join(repo_dir, '.git') - hg_dir = join(repo_dir, '.hg') + git_dir = join(repo_dir, ".git") + hg_dir = join(repo_dir, ".hg") if not isinstance(git_dir, str): # On Windows, subprocess env can't handle unicode. - git_dir = git_dir.encode(sys.getfilesystemencoding() or 'utf-8') + git_dir = git_dir.encode(sys.getfilesystemencoding() or "utf-8") - git_exe = external.find_executable('git', meta.config.build_prefix) + git_exe = external.find_executable("git", meta.config.build_prefix) if git_exe and os.path.exists(git_dir): # We set all 'source' metavars using the FIRST source entry in meta.yaml. - git_url = meta.get_value('source/0/git_url') + git_url = meta.get_value("source/0/git_url") if os.path.exists(git_url): - if sys.platform == 'win32': + if sys.platform == "win32": git_url = utils.convert_unix_path_to_win(git_url) # If git_url is a relative path instead of a url, convert it to an abspath git_url = normpath(join(meta.path, git_url)) @@ -461,30 +527,34 @@ def meta_vars(meta, skip_build_id=False): _x = False if git_url: - _x = verify_git_repo(git_exe, - git_dir, - git_url, - meta.config.git_commits_since_tag, - meta.config.debug, - meta.get_value('source/0/git_rev', 'HEAD')) - - if _x or meta.get_value('source/0/path'): + _x = verify_git_repo( + git_exe, + git_dir, + git_url, + meta.config.git_commits_since_tag, + meta.config.debug, + meta.get_value("source/0/git_rev", "HEAD"), + ) + + if _x or meta.get_value("source/0/path"): d.update(get_git_info(git_exe, git_dir, meta.config.debug)) - elif external.find_executable('hg', meta.config.build_prefix) and os.path.exists(hg_dir): + elif external.find_executable("hg", meta.config.build_prefix) and os.path.exists( + hg_dir + ): d.update(get_hg_build_info(hg_dir)) # use `get_value` to prevent early exit while name is still unresolved during rendering - d['PKG_NAME'] = meta.get_value('package/name') - d['PKG_VERSION'] = meta.version() - d['PKG_BUILDNUM'] = str(meta.build_number()) + d["PKG_NAME"] = meta.get_value("package/name") + d["PKG_VERSION"] = meta.version() + d["PKG_BUILDNUM"] = str(meta.build_number()) if meta.final and not skip_build_id: - d['PKG_BUILD_STRING'] = str(meta.build_id()) - d['PKG_HASH'] = meta.hash_dependencies() + d["PKG_BUILD_STRING"] = str(meta.build_id()) + d["PKG_HASH"] = meta.hash_dependencies() else: - d['PKG_BUILD_STRING'] = 'placeholder' - d['PKG_HASH'] = '1234567' - d['RECIPE_DIR'] = meta.path + d["PKG_BUILD_STRING"] = "placeholder" + d["PKG_HASH"] = "1234567" + d["RECIPE_DIR"] = meta.path return d @@ -493,9 +563,10 @@ def get_cpu_count(): if sys.platform == "darwin": # multiprocessing.cpu_count() is not reliable on OSX # See issue #645 on github.com/conda/conda-build - out, _ = subprocess.Popen('sysctl -n hw.logicalcpu', shell=True, - stdout=subprocess.PIPE).communicate() - return out.decode('utf-8').strip() + out, _ = subprocess.Popen( + "sysctl -n hw.logicalcpu", shell=True, stdout=subprocess.PIPE + ).communicate() + return out.decode("utf-8").strip() else: try: return str(multiprocessing.cpu_count()) @@ -505,13 +576,13 @@ def get_cpu_count(): def get_shlib_ext(host_platform): # Return the shared library extension. - if host_platform.startswith('win'): - return '.dll' - elif host_platform in ['osx', 'darwin']: - return '.dylib' - elif host_platform.startswith('linux'): - return '.so' - elif host_platform == 'noarch': + if host_platform.startswith("win"): + return ".dll" + elif host_platform in ["osx", "darwin"]: + return ".dylib" + elif host_platform.startswith("linux"): + return ".so" + elif host_platform == "noarch": # noarch packages should not contain shared libraries, use the system # platform if this is requested return get_shlib_ext(sys.platform) @@ -522,89 +593,91 @@ def get_shlib_ext(host_platform): def windows_vars(m, get_default, prefix): """This is setting variables on a dict that is part of the get_default function""" # We have gone for the clang values here. - win_arch = 'i386' if str(m.config.host_arch) == '32' else 'amd64' - win_msvc = '19.0.0' - library_prefix = join(prefix, 'Library') - drive, tail = m.config.host_prefix.split(':') - get_default('SCRIPTS', join(prefix, 'Scripts')) - get_default('LIBRARY_PREFIX', library_prefix) - get_default('LIBRARY_BIN', join(library_prefix, 'bin')) - get_default('LIBRARY_INC', join(library_prefix, 'include')) - get_default('LIBRARY_LIB', join(library_prefix, 'lib')) - get_default('CYGWIN_PREFIX', ''.join(('/cygdrive/', drive.lower(), tail.replace('\\', '/')))) + win_arch = "i386" if str(m.config.host_arch) == "32" else "amd64" + win_msvc = "19.0.0" + library_prefix = join(prefix, "Library") + drive, tail = m.config.host_prefix.split(":") + get_default("SCRIPTS", join(prefix, "Scripts")) + get_default("LIBRARY_PREFIX", library_prefix) + get_default("LIBRARY_BIN", join(library_prefix, "bin")) + get_default("LIBRARY_INC", join(library_prefix, "include")) + get_default("LIBRARY_LIB", join(library_prefix, "lib")) + get_default( + "CYGWIN_PREFIX", "".join(("/cygdrive/", drive.lower(), tail.replace("\\", "/"))) + ) # see https://en.wikipedia.org/wiki/Environment_variable#Default_values - get_default('ALLUSERSPROFILE') - get_default('APPDATA') - get_default('CommonProgramFiles') - get_default('CommonProgramFiles(x86)') - get_default('CommonProgramW6432') - get_default('COMPUTERNAME') - get_default('ComSpec') - get_default('HOMEDRIVE') - get_default('HOMEPATH') - get_default('LOCALAPPDATA') - get_default('LOGONSERVER') - get_default('NUMBER_OF_PROCESSORS') - get_default('PATHEXT') - get_default('ProgramData') - get_default('ProgramFiles') - get_default('ProgramFiles(x86)') - get_default('ProgramW6432') - get_default('PROMPT') - get_default('PSModulePath') - get_default('PUBLIC') - get_default('SystemDrive') - get_default('SystemRoot') - get_default('TEMP') - get_default('TMP') - get_default('USERDOMAIN') - get_default('USERNAME') - get_default('USERPROFILE') - get_default('windir') + get_default("ALLUSERSPROFILE") + get_default("APPDATA") + get_default("CommonProgramFiles") + get_default("CommonProgramFiles(x86)") + get_default("CommonProgramW6432") + get_default("COMPUTERNAME") + get_default("ComSpec") + get_default("HOMEDRIVE") + get_default("HOMEPATH") + get_default("LOCALAPPDATA") + get_default("LOGONSERVER") + get_default("NUMBER_OF_PROCESSORS") + get_default("PATHEXT") + get_default("ProgramData") + get_default("ProgramFiles") + get_default("ProgramFiles(x86)") + get_default("ProgramW6432") + get_default("PROMPT") + get_default("PSModulePath") + get_default("PUBLIC") + get_default("SystemDrive") + get_default("SystemRoot") + get_default("TEMP") + get_default("TMP") + get_default("USERDOMAIN") + get_default("USERNAME") + get_default("USERPROFILE") + get_default("windir") # CPU data, see https://github.com/conda/conda-build/issues/2064 - get_default('PROCESSOR_ARCHITEW6432') - get_default('PROCESSOR_ARCHITECTURE') - get_default('PROCESSOR_IDENTIFIER') - get_default('BUILD', win_arch + '-pc-windows-' + win_msvc) + get_default("PROCESSOR_ARCHITEW6432") + get_default("PROCESSOR_ARCHITECTURE") + get_default("PROCESSOR_IDENTIFIER") + get_default("BUILD", win_arch + "-pc-windows-" + win_msvc) for k in os.environ.keys(): - if re.match('VS[0-9]{2,3}COMNTOOLS', k): + if re.match("VS[0-9]{2,3}COMNTOOLS", k): get_default(k) - elif re.match('VS[0-9]{4}INSTALLDIR', k): + elif re.match("VS[0-9]{4}INSTALLDIR", k): get_default(k) def unix_vars(m, get_default, prefix): """This is setting variables on a dict that is part of the get_default function""" - get_default('HOME', 'UNKNOWN') - get_default('PKG_CONFIG_PATH', join(prefix, 'lib', 'pkgconfig')) - get_default('CMAKE_GENERATOR', 'Unix Makefiles') - get_default('SSL_CERT_FILE') + get_default("HOME", "UNKNOWN") + get_default("PKG_CONFIG_PATH", join(prefix, "lib", "pkgconfig")) + get_default("CMAKE_GENERATOR", "Unix Makefiles") + get_default("SSL_CERT_FILE") def osx_vars(m, get_default, prefix): """This is setting variables on a dict that is part of the get_default function""" - if str(m.config.host_arch) == '32': - OSX_ARCH = 'i386' + if str(m.config.host_arch) == "32": + OSX_ARCH = "i386" MACOSX_DEPLOYMENT_TARGET = 10.9 - elif str(m.config.host_arch) == 'arm64': - OSX_ARCH = 'arm64' + elif str(m.config.host_arch) == "arm64": + OSX_ARCH = "arm64" MACOSX_DEPLOYMENT_TARGET = 11.0 else: - OSX_ARCH = 'x86_64' + OSX_ARCH = "x86_64" MACOSX_DEPLOYMENT_TARGET = 10.9 - if str(m.config.arch) == '32': - BUILD = 'i386-apple-darwin13.4.0' - elif str(m.config.arch) == 'arm64': - BUILD = 'arm64-apple-darwin20.0.0' + if str(m.config.arch) == "32": + BUILD = "i386-apple-darwin13.4.0" + elif str(m.config.arch) == "arm64": + BUILD = "arm64-apple-darwin20.0.0" else: - BUILD = 'x86_64-apple-darwin13.4.0' + BUILD = "x86_64-apple-darwin13.4.0" # 10.7 install_name_tool -delete_rpath causes broken dylibs, I will revisit this ASAP. # rpath = ' -Wl,-rpath,%(PREFIX)s/lib' % d # SIP workaround, DYLD_* no longer works. # d['LDFLAGS'] = ldflags + rpath + ' -arch %(OSX_ARCH)s' % d - get_default('OSX_ARCH', OSX_ARCH) - get_default('MACOSX_DEPLOYMENT_TARGET', MACOSX_DEPLOYMENT_TARGET) - get_default('BUILD', BUILD) + get_default("OSX_ARCH", OSX_ARCH) + get_default("MACOSX_DEPLOYMENT_TARGET", MACOSX_DEPLOYMENT_TARGET) + get_default("BUILD", BUILD) @lru_cache(maxsize=None) @@ -618,32 +691,35 @@ def linux_vars(m, get_default, prefix): build_arch = platform_machine # Python reports x86_64 when running a i686 Python binary on a 64-bit CPU # unless run through linux32. Issue a warning when we detect this. - if build_arch == 'x86_64' and platform_architecture[0] == '32bit': + if build_arch == "x86_64" and platform_architecture[0] == "32bit": print("Warning: You are running 32-bit Python on a 64-bit linux installation") print(" but have not launched it via linux32. Various qeuries *will*") print(" give unexpected results (uname -m, platform.machine() etc)") - build_arch = 'i686' + build_arch = "i686" # the GNU triplet is powerpc, not ppc. This matters. - if build_arch.startswith('ppc'): - build_arch = build_arch.replace('ppc', 'powerpc') - if build_arch.startswith('powerpc') or build_arch.startswith('aarch64') \ - or build_arch.startswith('s390x'): - build_distro = 'cos7' + if build_arch.startswith("ppc"): + build_arch = build_arch.replace("ppc", "powerpc") + if ( + build_arch.startswith("powerpc") + or build_arch.startswith("aarch64") + or build_arch.startswith("s390x") + ): + build_distro = "cos7" else: - build_distro = 'cos6' + build_distro = "cos6" # There is also QEMU_SET_ENV, but that needs to be # filtered so it only contains the result of `linux_vars` # which, before this change was empty, and after it only # contains other QEMU env vars. - get_default('CFLAGS') - get_default('CXXFLAGS') - get_default('LDFLAGS') - get_default('QEMU_LD_PREFIX') - get_default('QEMU_UNAME') - get_default('DEJAGNU') - get_default('DISPLAY') - get_default('LD_RUN_PATH', prefix + '/lib') - get_default('BUILD', build_arch + '-conda_' + build_distro + '-linux-gnu') + get_default("CFLAGS") + get_default("CXXFLAGS") + get_default("LDFLAGS") + get_default("QEMU_LD_PREFIX") + get_default("QEMU_UNAME") + get_default("DEJAGNU") + get_default("DISPLAY") + get_default("LD_RUN_PATH", prefix + "/lib") + get_default("BUILD", build_arch + "-conda_" + build_distro + "-linux-gnu") def set_from_os_or_variant(out_dict, key, variant, default): @@ -666,19 +742,21 @@ def system_vars(env_dict, m, prefix): def os_vars(m, prefix): d = dict() # note the dictionary is passed in here - variables are set in that dict if they are non-null - get_default = lambda key, default='': set_from_os_or_variant(d, key, m.config.variant, default) + get_default = lambda key, default="": set_from_os_or_variant( + d, key, m.config.variant, default + ) - get_default('CPU_COUNT', get_cpu_count()) - get_default('LANG') - get_default('LC_ALL') - get_default('MAKEFLAGS') - d['SHLIB_EXT'] = get_shlib_ext(m.config.host_platform) - d['PATH'] = os.environ.copy()['PATH'] + get_default("CPU_COUNT", get_cpu_count()) + get_default("LANG") + get_default("LC_ALL") + get_default("MAKEFLAGS") + d["SHLIB_EXT"] = get_shlib_ext(m.config.host_platform) + d["PATH"] = os.environ.copy()["PATH"] if not m.config.activate: d = prepend_bin_path(d, m.config.host_prefix) - if sys.platform == 'win32': + if sys.platform == "win32": windows_vars(m, get_default, prefix) else: unix_vars(m, get_default, prefix) @@ -711,7 +789,7 @@ def _load_all_json(path): root, _, files = next(utils.walk(path)) result = {} for f in files: - if f.endswith('.json'): + if f.endswith(".json"): result[f] = _load_json(join(root, f)) return result @@ -725,11 +803,11 @@ def __init__(self, path): created. """ self.path = path - self._meta = join(path, 'conda-meta') + self._meta = join(path, "conda-meta") if os.path.isdir(path) and os.path.isdir(self._meta): self._packages = {} else: - raise InvalidEnvironment(f'Unable to load environment {path}') + raise InvalidEnvironment(f"Unable to load environment {path}") def _read_package_json(self): if not self._packages: @@ -743,8 +821,8 @@ def package_specs(self): json_objs = self._packages.values() specs = [] for i in json_objs: - p, v, b = i['name'], i['version'], i['build'] - specs.append(f'{p} {v} {b}') + p, v, b = i["name"], i["version"], i["build"] + specs.append(f"{p} {v} {b}") return specs @@ -752,10 +830,22 @@ def package_specs(self): last_index_ts = 0 -def get_install_actions(prefix, specs, env, retries=0, subdir=None, - verbose=True, debug=False, locking=True, - bldpkgs_dirs=None, timeout=900, disable_pip=False, - max_env_retry=3, output_folder=None, channel_urls=None): +def get_install_actions( + prefix, + specs, + env, + retries=0, + subdir=None, + verbose=True, + debug=False, + locking=True, + bldpkgs_dirs=None, + timeout=900, + disable_pip=False, + max_env_retry=3, + output_folder=None, + channel_urls=None, +): global cached_actions global last_index_ts actions = {} @@ -772,20 +862,34 @@ def get_install_actions(prefix, specs, env, retries=0, subdir=None, capture = utils.capture for feature, value in feature_list: if value: - specs.append('%s@' % feature) + specs.append("%s@" % feature) bldpkgs_dirs = ensure_list(bldpkgs_dirs) - index, index_ts, _ = get_build_index(subdir, list(bldpkgs_dirs)[0], output_folder=output_folder, - channel_urls=channel_urls, debug=debug, verbose=verbose, - locking=locking, timeout=timeout) - specs = tuple(utils.ensure_valid_spec(spec) for spec in specs if not str(spec).endswith('@')) + index, index_ts, _ = get_build_index( + subdir, + list(bldpkgs_dirs)[0], + output_folder=output_folder, + channel_urls=channel_urls, + debug=debug, + verbose=verbose, + locking=locking, + timeout=timeout, + ) + specs = tuple( + utils.ensure_valid_spec(spec) for spec in specs if not str(spec).endswith("@") + ) - if ((specs, env, subdir, channel_urls, disable_pip) in cached_actions and - last_index_ts >= index_ts): + if ( + specs, + env, + subdir, + channel_urls, + disable_pip, + ) in cached_actions and last_index_ts >= index_ts: actions = cached_actions[(specs, env, subdir, channel_urls, disable_pip)].copy() if "PREFIX" in actions: - actions['PREFIX'] = prefix + actions["PREFIX"] = prefix elif specs: # this is hiding output like: # Fetching package metadata ........... @@ -796,60 +900,101 @@ def get_install_actions(prefix, specs, env, retries=0, subdir=None, actions = install_actions(prefix, index, specs, force=True) except (NoPackagesFoundError, UnsatisfiableError) as exc: raise DependencyNeedsBuildingError(exc, subdir=subdir) - except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, - CondaError, AssertionError, BuildLockError) as exc: - if 'lock' in str(exc): - log.warn("failed to get install actions, retrying. exception was: %s", - str(exc)) - elif ('requires a minimum conda version' in str(exc) or - 'link a source that does not' in str(exc) or - isinstance(exc, AssertionError)): - locks = utils.get_conda_operation_locks(locking, bldpkgs_dirs, timeout) + except ( + SystemExit, + PaddingError, + LinkError, + DependencyNeedsBuildingError, + CondaError, + AssertionError, + BuildLockError, + ) as exc: + if "lock" in str(exc): + log.warn( + "failed to get install actions, retrying. exception was: %s", + str(exc), + ) + elif ( + "requires a minimum conda version" in str(exc) + or "link a source that does not" in str(exc) + or isinstance(exc, AssertionError) + ): + locks = utils.get_conda_operation_locks( + locking, bldpkgs_dirs, timeout + ) with utils.try_acquire_locks(locks, timeout=timeout): pkg_dir = str(exc) folder = 0 - while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20: + while ( + os.path.dirname(pkg_dir) not in pkgs_dirs + and folder < 20 + ): pkg_dir = os.path.dirname(pkg_dir) folder += 1 - log.warn("I think conda ended up with a partial extraction for %s. " - "Removing the folder and retrying", pkg_dir) + log.warn( + "I think conda ended up with a partial extraction for %s. " + "Removing the folder and retrying", + pkg_dir, + ) if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retries < max_env_retry: - log.warn("failed to get install actions, retrying. exception was: %s", - str(exc)) - actions = get_install_actions(prefix, tuple(specs), env, - retries=retries + 1, - subdir=subdir, - verbose=verbose, - debug=debug, - locking=locking, - bldpkgs_dirs=tuple(bldpkgs_dirs), - timeout=timeout, - disable_pip=disable_pip, - max_env_retry=max_env_retry, - output_folder=output_folder, - channel_urls=tuple(channel_urls)) + log.warn( + "failed to get install actions, retrying. exception was: %s", + str(exc), + ) + actions = get_install_actions( + prefix, + tuple(specs), + env, + retries=retries + 1, + subdir=subdir, + verbose=verbose, + debug=debug, + locking=locking, + bldpkgs_dirs=tuple(bldpkgs_dirs), + timeout=timeout, + disable_pip=disable_pip, + max_env_retry=max_env_retry, + output_folder=output_folder, + channel_urls=tuple(channel_urls), + ) else: - log.error("Failed to get install actions, max retries exceeded.") + log.error( + "Failed to get install actions, max retries exceeded." + ) raise if disable_pip: - for pkg in ('pip', 'setuptools', 'wheel'): + for pkg in ("pip", "setuptools", "wheel"): # specs are the raw specifications, not the conda-derived actual specs # We're testing that pip etc. are manually specified - if not any(re.match(r'^%s(?:$|[\s=].*)' % pkg, str(dep)) for dep in specs): - actions['LINK'] = [spec for spec in actions['LINK'] if spec.name != pkg] + if not any( + re.match(r"^%s(?:$|[\s=].*)" % pkg, str(dep)) for dep in specs + ): + actions["LINK"] = [ + spec for spec in actions["LINK"] if spec.name != pkg + ] utils.trim_empty_keys(actions) cached_actions[(specs, env, subdir, channel_urls, disable_pip)] = actions.copy() last_index_ts = index_ts return actions -def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, retry=0, - locks=None, is_cross=False, is_conda=False): - ''' +def create_env( + prefix, + specs_or_actions, + env, + config, + subdir, + clear_cache=True, + retry=0, + locks=None, + is_cross=False, + is_conda=False, +): + """ Create a conda envrionment for the given prefix and specs. - ''' + """ if config.debug: external_logger_context = utils.LoggingContext(logging.DEBUG) else: @@ -874,85 +1019,141 @@ def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, try: with utils.try_acquire_locks(locks, timeout=config.timeout): # input is a list - it's specs in MatchSpec format - if not hasattr(specs_or_actions, 'keys'): + if not hasattr(specs_or_actions, "keys"): specs = list(set(specs_or_actions)) - actions = get_install_actions(prefix, tuple(specs), env, - subdir=subdir, - verbose=config.verbose, - debug=config.debug, - locking=config.locking, - bldpkgs_dirs=tuple(config.bldpkgs_dirs), - timeout=config.timeout, - disable_pip=config.disable_pip, - max_env_retry=config.max_env_retry, - output_folder=config.output_folder, - channel_urls=tuple(config.channel_urls)) + actions = get_install_actions( + prefix, + tuple(specs), + env, + subdir=subdir, + verbose=config.verbose, + debug=config.debug, + locking=config.locking, + bldpkgs_dirs=tuple(config.bldpkgs_dirs), + timeout=config.timeout, + disable_pip=config.disable_pip, + max_env_retry=config.max_env_retry, + output_folder=config.output_folder, + channel_urls=tuple(config.channel_urls), + ) else: actions = specs_or_actions - index, _, _ = get_build_index(subdir=subdir, - bldpkgs_dir=config.bldpkgs_dir, - output_folder=config.output_folder, - channel_urls=config.channel_urls, - debug=config.debug, - verbose=config.verbose, - locking=config.locking, - timeout=config.timeout) + index, _, _ = get_build_index( + subdir=subdir, + bldpkgs_dir=config.bldpkgs_dir, + output_folder=config.output_folder, + channel_urls=config.channel_urls, + debug=config.debug, + verbose=config.verbose, + locking=config.locking, + timeout=config.timeout, + ) utils.trim_empty_keys(actions) display_actions(actions, index) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) - with env_var('CONDA_QUIET', not config.verbose, reset_context): - with env_var('CONDA_JSON', not config.verbose, reset_context): + with env_var("CONDA_QUIET", not config.verbose, reset_context): + with env_var("CONDA_JSON", not config.verbose, reset_context): execute_actions(actions, index) - except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, - CondaError, BuildLockError) as exc: - if (("too short in" in str(exc) or - re.search('post-link failed for: (?:[a-zA-Z]*::)?openssl', str(exc)) or - isinstance(exc, PaddingError)) and - config.prefix_length > 80): + except ( + SystemExit, + PaddingError, + LinkError, + DependencyNeedsBuildingError, + CondaError, + BuildLockError, + ) as exc: + if ( + "too short in" in str(exc) + or re.search( + "post-link failed for: (?:[a-zA-Z]*::)?openssl", str(exc) + ) + or isinstance(exc, PaddingError) + ) and config.prefix_length > 80: if config.prefix_length_fallback: - log.warn("Build prefix failed with prefix length %d", - config.prefix_length) + log.warn( + "Build prefix failed with prefix length %d", + config.prefix_length, + ) log.warn("Error was: ") log.warn(str(exc)) - log.warn("One or more of your package dependencies needs to be rebuilt " - "with a longer prefix length.") - log.warn("Falling back to legacy prefix length of 80 characters.") - log.warn("Your package will not install into prefixes > 80 characters.") + log.warn( + "One or more of your package dependencies needs to be rebuilt " + "with a longer prefix length." + ) + log.warn( + "Falling back to legacy prefix length of 80 characters." + ) + log.warn( + "Your package will not install into prefixes > 80 characters." + ) config.prefix_length = 80 - host = '_h_env' in prefix + host = "_h_env" in prefix # Set this here and use to create environ # Setting this here is important because we use it below (symlink) prefix = config.host_prefix if host else config.build_prefix - actions['PREFIX'] = prefix - - create_env(prefix, actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, is_cross=is_cross) + actions["PREFIX"] = prefix + + create_env( + prefix, + actions, + config=config, + subdir=subdir, + env=env, + clear_cache=clear_cache, + is_cross=is_cross, + ) else: raise - elif 'lock' in str(exc): + elif "lock" in str(exc): if retry < config.max_env_retry: - log.warn("failed to create env, retrying. exception was: %s", str(exc)) - create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) - elif ('requires a minimum conda version' in str(exc) or - 'link a source that does not' in str(exc)): + log.warn( + "failed to create env, retrying. exception was: %s", + str(exc), + ) + create_env( + prefix, + specs_or_actions, + config=config, + subdir=subdir, + env=env, + clear_cache=clear_cache, + retry=retry + 1, + is_cross=is_cross, + ) + elif "requires a minimum conda version" in str( + exc + ) or "link a source that does not" in str(exc): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = str(exc) folder = 0 while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20: pkg_dir = os.path.dirname(pkg_dir) folder += 1 - log.warn("I think conda ended up with a partial extraction for %s. " - "Removing the folder and retrying", pkg_dir) + log.warn( + "I think conda ended up with a partial extraction for %s. " + "Removing the folder and retrying", + pkg_dir, + ) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: - log.warn("failed to create env, retrying. exception was: %s", str(exc)) - create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) + log.warn( + "failed to create env, retrying. exception was: %s", + str(exc), + ) + create_env( + prefix, + specs_or_actions, + config=config, + subdir=subdir, + env=env, + clear_cache=clear_cache, + retry=retry + 1, + is_cross=is_cross, + ) else: log.error("Failed to create env, max retries exceeded.") raise @@ -960,18 +1161,37 @@ def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, raise # HACK: some of the time, conda screws up somehow and incomplete packages result. # Just retry. - except (AssertionError, OSError, ValueError, RuntimeError, LockError) as exc: + except ( + AssertionError, + OSError, + ValueError, + RuntimeError, + LockError, + ) as exc: if isinstance(exc, AssertionError): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = os.path.dirname(os.path.dirname(str(exc))) - log.warn("I think conda ended up with a partial extraction for %s. " - "Removing the folder and retrying", pkg_dir) + log.warn( + "I think conda ended up with a partial extraction for %s. " + "Removing the folder and retrying", + pkg_dir, + ) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: - log.warn("failed to create env, retrying. exception was: %s", str(exc)) - create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) + log.warn( + "failed to create env, retrying. exception was: %s", str(exc) + ) + create_env( + prefix, + specs_or_actions, + config=config, + subdir=subdir, + env=env, + clear_cache=clear_cache, + retry=retry + 1, + is_cross=is_cross, + ) else: log.error("Failed to create env, max retries exceeded.") raise @@ -989,7 +1209,7 @@ def remove_existing_packages(dirs, fns, config): for fn in fns: all_files = [fn] if not os.path.isabs(fn): - all_files = glob(os.path.join(folder, fn + '*')) + all_files = glob(os.path.join(folder, fn + "*")) for entry in all_files: utils.rm_rf(entry) @@ -1005,8 +1225,8 @@ def clean_pkg_cache(dist, config): locks = get_pkg_dirs_locks([config.bldpkgs_dir] + pkgs_dirs, config) with utils.try_acquire_locks(locks, timeout=config.timeout): rmplan = [ - 'RM_EXTRACTED {0} local::{0}'.format(dist), - 'RM_FETCHED {0} local::{0}'.format(dist), + "RM_EXTRACTED {0} local::{0}".format(dist), + "RM_FETCHED {0} local::{0}".format(dist), ] execute_plan(rmplan) @@ -1014,12 +1234,18 @@ def clean_pkg_cache(dist, config): # Conda's cleanup is still necessary - it keeps track of its own in-memory # list of downloaded things. for folder in pkgs_dirs: - if (os.path.exists(os.path.join(folder, dist)) or - os.path.exists(os.path.join(folder, dist + '.tar.bz2')) or - any(pkg_id in package_cache() for pkg_id in [dist, 'local::' + dist])): + if ( + os.path.exists(os.path.join(folder, dist)) + or os.path.exists(os.path.join(folder, dist + ".tar.bz2")) + or any( + pkg_id in package_cache() for pkg_id in [dist, "local::" + dist] + ) + ): log = utils.get_logger(__name__) - log.debug("Conda caching error: %s package remains in cache after removal", - dist) + log.debug( + "Conda caching error: %s package remains in cache after removal", + dist, + ) log.debug("manually removing to compensate") cache = package_cache() keys = [key for key in cache.keys() if dist in key] @@ -1033,18 +1259,23 @@ def clean_pkg_cache(dist, config): def get_pinned_deps(m, section): - with TemporaryDirectory(prefix='_') as tmpdir: - actions = get_install_actions(tmpdir, - tuple(m.ms_depends(section)), section, - subdir=m.config.target_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) - runtime_deps = [' '.join(link.dist_name.rsplit('-', 2)) for link in actions.get('LINK', [])] + with TemporaryDirectory(prefix="_") as tmpdir: + actions = get_install_actions( + tmpdir, + tuple(m.ms_depends(section)), + section, + subdir=m.config.target_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls), + ) + runtime_deps = [ + " ".join(link.dist_name.rsplit("-", 2)) for link in actions.get("LINK", []) + ] return runtime_deps diff --git a/conda_build/utils.py b/conda_build/utils.py index ded2ac728a..5c8affae92 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -13,8 +13,18 @@ import logging.config import mmap import os -from os.path import (dirname, getmtime, getsize, isdir, join, isfile, abspath, islink, - expanduser, expandvars) +from os.path import ( + dirname, + getmtime, + getsize, + isdir, + join, + isfile, + abspath, + islink, + expanduser, + expandvars, +) import re import stat import subprocess @@ -37,26 +47,44 @@ import conda_package_handling.api try: - from conda.base.constants import CONDA_PACKAGE_EXTENSIONS, CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2 + from conda.base.constants import ( + CONDA_PACKAGE_EXTENSIONS, + CONDA_PACKAGE_EXTENSION_V1, + CONDA_PACKAGE_EXTENSION_V2, + ) except Exception: - from conda.base.constants import CONDA_TARBALL_EXTENSION as CONDA_PACKAGE_EXTENSION_V1 + from conda.base.constants import ( + CONDA_TARBALL_EXTENSION as CONDA_PACKAGE_EXTENSION_V1, + ) + CONDA_PACKAGE_EXTENSION_V2 = ".conda" CONDA_PACKAGE_EXTENSIONS = (CONDA_PACKAGE_EXTENSION_V2, CONDA_PACKAGE_EXTENSION_V1) -from conda.api import PackageCacheData # noqa +from conda.api import PackageCacheData # noqa + +from .conda_interface import ( + hashsum_file, + md5_file, + unix_path_to_win, + win_path_to_unix, +) # noqa +from .conda_interface import root_dir, pkgs_dirs # noqa +from .conda_interface import StringIO # noqa +from .conda_interface import VersionOrder, MatchSpec # noqa +from .conda_interface import cc_conda_build # noqa +from .conda_interface import Dist # noqa +from .conda_interface import context # noqa +from .conda_interface import ( + download, + TemporaryDirectory, + get_conda_channel, + CondaHTTPError, +) # noqa -from .conda_interface import hashsum_file, md5_file, unix_path_to_win, win_path_to_unix # noqa -from .conda_interface import root_dir, pkgs_dirs # noqa -from .conda_interface import StringIO # noqa -from .conda_interface import VersionOrder, MatchSpec # noqa -from .conda_interface import cc_conda_build # noqa -from .conda_interface import Dist # noqa -from .conda_interface import context # noqa -from .conda_interface import download, TemporaryDirectory, get_conda_channel, CondaHTTPError # noqa # NOQA because it is not used in this file. -from conda_build.conda_interface import rm_rf as _rm_rf # noqa -from conda_build.exceptions import BuildLockError # noqa -from conda_build.os_utils import external # noqa +from conda_build.conda_interface import rm_rf as _rm_rf # noqa +from conda_build.exceptions import BuildLockError # noqa +from conda_build.os_utils import external # noqa import urllib.parse as urlparse import urllib.request as urllib @@ -71,14 +99,15 @@ def glob(pathname, recursive=True): # NOQA because it is not used in this file. from contextlib import ExitStack # NOQA + PermissionError = PermissionError # NOQA FileNotFoundError = FileNotFoundError -on_win = (sys.platform == 'win32') +on_win = sys.platform == "win32" -codec = getpreferredencoding() or 'utf-8' +codec = getpreferredencoding() or "utf-8" on_win = sys.platform == "win32" -root_script_dir = os.path.join(root_dir, 'Scripts' if on_win else 'bin') +root_script_dir = os.path.join(root_dir, "Scripts" if on_win else "bin") mmap_MAP_PRIVATE = 0 if on_win else mmap.MAP_PRIVATE mmap_PROT_READ = 0 if on_win else mmap.PROT_READ mmap_PROT_WRITE = 0 if on_win else mmap.PROT_WRITE @@ -156,19 +185,20 @@ def directory_size_slow(path): def directory_size(path): - ''' - ''' + """ """ try: if on_win: command = 'dir /s "{}"' # Windows path can have spaces out = subprocess.check_output(command.format(path), shell=True) else: command = "du -s {}" - out = subprocess.check_output(command.format(path).split(), stderr=subprocess.PIPE) + out = subprocess.check_output( + command.format(path).split(), stderr=subprocess.PIPE + ) - if hasattr(out, 'decode'): + if hasattr(out, "decode"): try: - out = out.decode(errors='ignore') + out = out.decode(errors="ignore") # This isn't important anyway so give up. Don't try search on bytes. except (UnicodeDecodeError, IndexError): if on_win: @@ -177,10 +207,10 @@ def directory_size(path): pass if on_win: # Windows can give long output, we need only 2nd to last line - out = out.strip().rsplit('\r\n', 2)[-2] + out = out.strip().rsplit("\r\n", 2)[-2] pattern = r"\s([\d\W]+).+" # Language and punctuation neutral out = re.search(pattern, out.strip()).group(1).strip() - out = out.replace(',', '').replace('.', '').replace(' ', '') + out = out.replace(",", "").replace(".", "").replace(" ", "") else: out = out.split()[0] except subprocess.CalledProcessError: @@ -215,11 +245,11 @@ def _setup_rewrite_pipe(env): replacements[v] = k r_fd, w_fd = os.pipe() - r = os.fdopen(r_fd, 'rt') - if sys.platform == 'win32': - replacement_t = '%{}%' + r = os.fdopen(r_fd, "rt") + if sys.platform == "win32": + replacement_t = "%{}%" else: - replacement_t = '${}' + replacement_t = "${}" def rewriter(): while True: @@ -236,7 +266,7 @@ def rewriter(): except UnicodeDecodeError: try: txt = os.read(r, 10000) - sys.stdout.write(txt or '') + sys.stdout.write(txt or "") except TypeError: pass @@ -264,19 +294,26 @@ def __init__(self, *args, **kwargs): def _execute(self, *args, **kwargs): try: import psutil - psutil_exceptions = psutil.NoSuchProcess, psutil.AccessDenied, psutil.NoSuchProcess + + psutil_exceptions = ( + psutil.NoSuchProcess, + psutil.AccessDenied, + psutil.NoSuchProcess, + ) except ImportError as e: psutil = None psutil_exceptions = (OSError, ValueError) log = get_logger(__name__) log.warn(f"psutil import failed. Error was {e}") - log.warn("only disk usage and time statistics will be available. Install psutil to " - "get CPU time and memory usage statistics.") + log.warn( + "only disk usage and time statistics will be available. Install psutil to " + "get CPU time and memory usage statistics." + ) # The polling interval (in seconds) - time_int = kwargs.pop('time_int', 2) + time_int = kwargs.pop("time_int", 2) - disk_usage_dir = kwargs.get('cwd', sys.prefix) + disk_usage_dir = kwargs.get("cwd", sys.prefix) # Create a process of this (the parent) process parent = psutil.Process(os.getpid()) if psutil else DummyPsutilProcess() @@ -285,7 +322,11 @@ def _execute(self, *args, **kwargs): # Using the convenience Popen class provided by psutil start_time = time.time() - _popen = psutil.Popen(*args, **kwargs) if psutil else subprocess.Popen(*args, **kwargs) + _popen = ( + psutil.Popen(*args, **kwargs) + if psutil + else subprocess.Popen(*args, **kwargs) + ) try: while self.returncode is None: # We need to get all of the children of our process since our @@ -306,8 +347,8 @@ def _execute(self, *args, **kwargs): # we are instead looping over children and getting each individually. # https://psutil.readthedocs.io/en/latest/#psutil.Process.cpu_times cpu_stats = child.cpu_times() - child_cpu_usage['sys'] = cpu_stats.system - child_cpu_usage['user'] = cpu_stats.user + child_cpu_usage["sys"] = cpu_stats.system + child_cpu_usage["user"] = cpu_stats.user cpu_usage[child.pid] = child_cpu_usage except psutil_exceptions: # process already died. Just ignore it. @@ -317,8 +358,8 @@ def _execute(self, *args, **kwargs): # Sum the memory usage of all the children together (2D columnwise sum) self.rss = max(rss, self.rss) self.vms = max(vms, self.vms) - self.cpu_sys = sum(child['sys'] for child in cpu_usage.values()) - self.cpu_user = sum(child['user'] for child in cpu_usage.values()) + self.cpu_sys = sum(child["sys"] for child in cpu_usage.values()) + self.cpu_user = sum(child["user"] for child in cpu_usage.values()) self.processes = max(processes, self.processes) # Get disk usage @@ -337,74 +378,83 @@ def _execute(self, *args, **kwargs): return _popen.stdout, _popen.stderr def __repr__(self): - return str({'elapsed': self.elapsed, - 'rss': self.rss, - 'vms': self.vms, - 'disk': self.disk, - 'processes': self.processes, - 'cpu_user': self.cpu_user, - 'cpu_sys': self.cpu_sys, - 'returncode': self.returncode}) + return str( + { + "elapsed": self.elapsed, + "rss": self.rss, + "vms": self.vms, + "disk": self.disk, + "processes": self.processes, + "cpu_user": self.cpu_user, + "cpu_sys": self.cpu_sys, + "returncode": self.returncode, + } + ) def _func_defaulting_env_to_os_environ(func, *popenargs, **kwargs): - if 'env' not in kwargs: + if "env" not in kwargs: kwargs = kwargs.copy() env_copy = os.environ.copy() - kwargs.update({'env': env_copy}) - kwargs['env'] = {str(key): str(value) for key, value in kwargs['env'].items()} + kwargs.update({"env": env_copy}) + kwargs["env"] = {str(key): str(value) for key, value in kwargs["env"].items()} _args = [] - if 'stdin' not in kwargs: - kwargs['stdin'] = subprocess.PIPE + if "stdin" not in kwargs: + kwargs["stdin"] = subprocess.PIPE for arg in popenargs: # arguments to subprocess need to be bytestrings - if sys.version_info.major < 3 and hasattr(arg, 'encode'): + if sys.version_info.major < 3 and hasattr(arg, "encode"): arg = arg.encode(codec) - elif sys.version_info.major >= 3 and hasattr(arg, 'decode'): + elif sys.version_info.major >= 3 and hasattr(arg, "decode"): arg = arg.decode(codec) _args.append(str(arg)) - stats = kwargs.get('stats') - if 'stats' in kwargs: - del kwargs['stats'] + stats = kwargs.get("stats") + if "stats" in kwargs: + del kwargs["stats"] - rewrite_stdout_env = kwargs.pop('rewrite_stdout_env', None) + rewrite_stdout_env = kwargs.pop("rewrite_stdout_env", None) if rewrite_stdout_env: - kwargs['stdout'] = _setup_rewrite_pipe(rewrite_stdout_env) + kwargs["stdout"] = _setup_rewrite_pipe(rewrite_stdout_env) out = None if stats is not None: proc = PopenWrapper(_args, **kwargs) - if func == 'output': + if func == "output": out = proc.out.read() if proc.returncode != 0: raise subprocess.CalledProcessError(proc.returncode, _args) - stats.update({'elapsed': proc.elapsed, - 'disk': proc.disk, - 'processes': proc.processes, - 'cpu_user': proc.cpu_user, - 'cpu_sys': proc.cpu_sys, - 'rss': proc.rss, - 'vms': proc.vms}) + stats.update( + { + "elapsed": proc.elapsed, + "disk": proc.disk, + "processes": proc.processes, + "cpu_user": proc.cpu_user, + "cpu_sys": proc.cpu_sys, + "rss": proc.rss, + "vms": proc.vms, + } + ) else: - if func == 'call': + if func == "call": subprocess.check_call(_args, **kwargs) else: - if 'stdout' in kwargs: - del kwargs['stdout'] + if "stdout" in kwargs: + del kwargs["stdout"] out = subprocess.check_output(_args, **kwargs) return out def check_call_env(popenargs, **kwargs): - return _func_defaulting_env_to_os_environ('call', *popenargs, **kwargs) + return _func_defaulting_env_to_os_environ("call", *popenargs, **kwargs) def check_output_env(popenargs, **kwargs): - return _func_defaulting_env_to_os_environ('output', stdout=subprocess.PIPE, - *popenargs, **kwargs).rstrip() + return _func_defaulting_env_to_os_environ( + "output", stdout=subprocess.PIPE, *popenargs, **kwargs + ).rstrip() def bytes2human(n): @@ -413,14 +463,14 @@ def bytes2human(n): # '9.8K' # >>> bytes2human(100001221) # '95.4M' - symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') + symbols = ("K", "M", "G", "T", "P", "E", "Z", "Y") prefix = {} for i, s in enumerate(symbols): prefix[s] = 1 << (i + 1) * 10 for s in reversed(symbols): if n >= prefix[s]: value = float(n) / prefix[s] - return f'{value:.1f}{s}' + return f"{value:.1f}{s}" return "%sB" % n @@ -438,17 +488,20 @@ def get_recipe_abspath(recipe): and needs cleanup. """ if isfile(recipe): - if recipe.lower().endswith(decompressible_exts) or recipe.lower().endswith(CONDA_PACKAGE_EXTENSIONS): + if recipe.lower().endswith(decompressible_exts) or recipe.lower().endswith( + CONDA_PACKAGE_EXTENSIONS + ): recipe_dir = tempfile.mkdtemp() if recipe.lower().endswith(CONDA_PACKAGE_EXTENSIONS): import conda_package_handling.api + conda_package_handling.api.extract(recipe, recipe_dir) else: tar_xf(recipe, recipe_dir) # At some stage the old build system started to tar up recipes. - recipe_tarfile = os.path.join(recipe_dir, 'info', 'recipe.tar') + recipe_tarfile = os.path.join(recipe_dir, "info", "recipe.tar") if isfile(recipe_tarfile): - tar_xf(recipe_tarfile, os.path.join(recipe_dir, 'info')) + tar_xf(recipe_tarfile, os.path.join(recipe_dir, "info")) need_cleanup = True else: print("Ignoring non-recipe: %s" % recipe) @@ -471,7 +524,7 @@ def try_acquire_locks(locks, timeout): http://stackoverflow.com/questions/9814008/multiple-mutex-locking-strategies-and-why-libraries-dont-use-address-comparison """ t = time.time() - while (time.time() - t < timeout): + while time.time() - t < timeout: # Continuously try to acquire all locks. # By passing a short timeout to each individual lock, we give other # processes that might be trying to acquire the same locks (and may @@ -495,7 +548,7 @@ def try_acquire_locks(locks, timeout): # If we reach this point, we weren't able to acquire all locks within # the specified timeout. We shouldn't be holding any locks anymore at # this point, so we just raise an exception. - raise BuildLockError('Failed to acquire all locks') + raise BuildLockError("Failed to acquire all locks") try: yield @@ -518,8 +571,12 @@ def _copy_with_shell_fallback(src, dst): continue if not is_copied: try: - subprocess.check_call(f'cp -a {src} {dst}', shell=True, - stderr=subprocess.PIPE, stdout=subprocess.PIPE) + subprocess.check_call( + f"cp -a {src} {dst}", + shell=True, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + ) except subprocess.CalledProcessError as e: if not os.path.isfile(dst): raise OSError(f"Failed to copy {src} to {dst}. Error was: {e}") @@ -534,7 +591,9 @@ def get_prefix_replacement_paths(src, dst): return os.path.join(*ssplit), os.path.join(*dsplit) -def copy_into(src, dst, timeout=900, symlinks=False, lock=None, locking=True, clobber=False): +def copy_into( + src, dst, timeout=900, symlinks=False, lock=None, locking=True, clobber=False +): """Copy all the files and directories in src to the directory dst""" log = get_logger(__name__) if symlinks and islink(src): @@ -555,7 +614,15 @@ def copy_into(src, dst, timeout=900, symlinks=False, lock=None, locking=True, cl except: pass # lchmod not available elif isdir(src): - merge_tree(src, dst, symlinks, timeout=timeout, lock=lock, locking=locking, clobber=clobber) + merge_tree( + src, + dst, + symlinks, + timeout=timeout, + lock=lock, + locking=locking, + clobber=clobber, + ) else: if isdir(dst): @@ -574,7 +641,7 @@ def copy_into(src, dst, timeout=900, symlinks=False, lock=None, locking=True, cl src_folder = os.getcwd() if os.path.islink(src) and not os.path.exists(os.path.realpath(src)): - log.warn('path %s is a broken symlink - ignoring copy', src) + log.warn("path %s is a broken symlink - ignoring copy", src) return if not lock and locking: @@ -591,8 +658,9 @@ def copy_into(src, dst, timeout=900, symlinks=False, lock=None, locking=True, cl try: _copy_with_shell_fallback(src, dst_fn) except shutil.Error: - log.debug("skipping %s - already exists in %s", - os.path.basename(src), dst) + log.debug( + "skipping %s - already exists in %s", os.path.basename(src), dst + ) def move_with_fallback(src, dst): @@ -604,7 +672,9 @@ def move_with_fallback(src, dst): os.unlink(src) except PermissionError: log = get_logger(__name__) - log.debug(f"Failed to copy/remove path from {src} to {dst} due to permission error") + log.debug( + f"Failed to copy/remove path from {src} to {dst} due to permission error" + ) # http://stackoverflow.com/a/22331852/1170370 @@ -618,8 +688,8 @@ def copytree(src, dst, symlinks=False, ignore=None, dry_run=False): lst = [x for x in lst if x not in excl] # do not copy lock files - if '.conda_lock' in lst: - lst.remove('.conda_lock') + if ".conda_lock" in lst: + lst.remove(".conda_lock") dst_lst = [os.path.join(dst, item) for item in lst] @@ -645,7 +715,9 @@ def copytree(src, dst, symlinks=False, ignore=None, dry_run=False): return dst_lst -def merge_tree(src, dst, symlinks=False, timeout=900, lock=None, locking=True, clobber=False): +def merge_tree( + src, dst, symlinks=False, timeout=900, lock=None, locking=True, clobber=False +): """ Merge src into dst recursively by copying all files from src into dst. Return a list of all files copied. @@ -655,17 +727,20 @@ def merge_tree(src, dst, symlinks=False, timeout=900, lock=None, locking=True, c """ dst = os.path.normpath(os.path.normcase(dst)) src = os.path.normpath(os.path.normcase(src)) - assert not dst.startswith(src), ("Can't merge/copy source into subdirectory of itself. " - "Please create separate spaces for these things.\n" - " src: {}\n" - " dst: {}".format(src, dst)) + assert not dst.startswith(src), ( + "Can't merge/copy source into subdirectory of itself. " + "Please create separate spaces for these things.\n" + " src: {}\n" + " dst: {}".format(src, dst) + ) new_files = copytree(src, dst, symlinks=symlinks, dry_run=True) existing = [f for f in new_files if isfile(f)] if existing and not clobber: - raise OSError("Can't merge {} into {}: file exists: " - "{}".format(src, dst, existing[0])) + raise OSError( + "Can't merge {} into {}: file exists: " "{}".format(src, dst, existing[0]) + ) locks = [] if locking: @@ -679,8 +754,10 @@ def merge_tree(src, dst, symlinks=False, timeout=900, lock=None, locking=True, c # purpose here is that we want *one* lock per location on disk. It can be locked or unlocked # at any time, but the lock within this process should all be tied to the same tracking # mechanism. -_lock_folders = (os.path.join(root_dir, 'locks'), - os.path.expanduser(os.path.join('~', '.conda_build_locks'))) +_lock_folders = ( + os.path.join(root_dir, "locks"), + os.path.expanduser(os.path.join("~", ".conda_build_locks")), +) def get_lock(folder, timeout=900): @@ -690,28 +767,30 @@ def get_lock(folder, timeout=900): except OSError: location = folder b_location = location - if hasattr(b_location, 'encode'): + if hasattr(b_location, "encode"): b_location = b_location.encode() # Hash the entire filename to avoid collisions. lock_filename = hashlib.sha256(b_location).hexdigest() - if hasattr(lock_filename, 'decode'): + if hasattr(lock_filename, "decode"): lock_filename = lock_filename.decode() for locks_dir in _lock_folders: try: if not os.path.isdir(locks_dir): os.makedirs(locks_dir) lock_file = os.path.join(locks_dir, lock_filename) - with open(lock_file, 'w') as f: + with open(lock_file, "w") as f: f.write("") fl = filelock.FileLock(lock_file, timeout) break except OSError: continue else: - raise RuntimeError("Could not write locks folder to either system location ({})" - "or user location ({}). Aborting.".format(*_lock_folders)) + raise RuntimeError( + "Could not write locks folder to either system location ({})" + "or user location ({}). Aborting.".format(*_lock_folders) + ) return fl @@ -728,48 +807,63 @@ def get_conda_operation_locks(locking=True, bldpkgs_dirs=None, timeout=900): lock = get_lock(folder, timeout=timeout) locks.append(lock) # lock used to generally indicate a conda operation occurring - locks.append(get_lock('conda-operation', timeout=timeout)) + locks.append(get_lock("conda-operation", timeout=timeout)) return locks -def relative(f, d='lib'): - assert not f.startswith('/'), f - assert not d.startswith('/'), d - d = d.strip('/').split('/') - if d == ['.']: +def relative(f, d="lib"): + assert not f.startswith("/"), f + assert not d.startswith("/"), d + d = d.strip("/").split("/") + if d == ["."]: d = [] - f = dirname(f).split('/') - if f == ['']: + f = dirname(f).split("/") + if f == [""]: f = [] while d and f and d[0] == f[0]: d.pop(0) f.pop(0) - return '/'.join(((['..'] * len(f)) if f else ['.']) + d) + return "/".join((([".."] * len(f)) if f else ["."]) + d) # This is the lowest common denominator of the formats supported by our libarchive/python-libarchive-c # packages across all platforms -decompressible_exts = ('.7z', '.tar', '.tar.bz2', '.tar.gz', '.tar.lzma', '.tar.xz', - '.tar.z', '.tar.zst', '.tgz', '.whl', '.zip', '.rpm', '.deb') - - -def _tar_xf_fallback(tarball, dir_path, mode='r:*'): - if tarball.lower().endswith('.tar.z'): - uncompress = external.find_executable('uncompress') +decompressible_exts = ( + ".7z", + ".tar", + ".tar.bz2", + ".tar.gz", + ".tar.lzma", + ".tar.xz", + ".tar.z", + ".tar.zst", + ".tgz", + ".whl", + ".zip", + ".rpm", + ".deb", +) + + +def _tar_xf_fallback(tarball, dir_path, mode="r:*"): + if tarball.lower().endswith(".tar.z"): + uncompress = external.find_executable("uncompress") if not uncompress: - uncompress = external.find_executable('gunzip') + uncompress = external.find_executable("gunzip") if not uncompress: - sys.exit("""\ + sys.exit( + """\ uncompress (or gunzip) is required to unarchive .z source files. -""") - check_call_env([uncompress, '-f', tarball]) +""" + ) + check_call_env([uncompress, "-f", tarball]) tarball = tarball[:-2] t = tarfile.open(tarball, mode) members = t.getmembers() for i, member in enumerate(members, 0): if os.path.isabs(member.name): - member.name = os.path.relpath(member.name, '/') + member.name = os.path.relpath(member.name, "/") cwd = os.path.realpath(os.getcwd()) if not os.path.realpath(member.name).startswith(cwd): member.name = member.name.replace("../", "") @@ -783,6 +877,7 @@ def _tar_xf_fallback(tarball, dir_path, mode='r:*'): def tar_xf_file(tarball, entries): from conda_build.utils import ensure_list + entries = ensure_list(entries) if not os.path.isabs(tarball): tarball = os.path.join(os.getcwd(), tarball) @@ -814,11 +909,13 @@ def tar_xf_getnames(tarball): def tar_xf(tarball, dir_path): - flags = libarchive.extract.EXTRACT_TIME | \ - libarchive.extract.EXTRACT_PERM | \ - libarchive.extract.EXTRACT_SECURE_NODOTDOT | \ - libarchive.extract.EXTRACT_SECURE_SYMLINKS | \ - libarchive.extract.EXTRACT_SECURE_NOABSOLUTEPATHS + flags = ( + libarchive.extract.EXTRACT_TIME + | libarchive.extract.EXTRACT_PERM + | libarchive.extract.EXTRACT_SECURE_NODOTDOT + | libarchive.extract.EXTRACT_SECURE_SYMLINKS + | libarchive.extract.EXTRACT_SECURE_NOABSOLUTEPATHS + ) if not os.path.isabs(tarball): tarball = os.path.join(os.getcwd(), tarball) try: @@ -828,17 +925,21 @@ def tar_xf(tarball, dir_path): # try again, maybe we are on Windows and the archive contains symlinks # https://github.com/conda/conda-build/issues/3351 # https://github.com/libarchive/libarchive/pull/1030 - if tarball.lower().endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2', '.tar.z', '.tar.xz')): + if tarball.lower().endswith( + (".tar", ".tar.gz", ".tgz", ".tar.bz2", ".tar.z", ".tar.xz") + ): _tar_xf_fallback(tarball, dir_path) else: raise def file_info(path): - return {'size': getsize(path), - 'md5': md5_file(path), - 'sha256': hashsum_file(path, 'sha256'), - 'mtime': getmtime(path)} + return { + "size": getsize(path), + "md5": md5_file(path), + "sha256": hashsum_file(path, "sha256"), + "mtime": getmtime(path), + } def comma_join(items): @@ -854,7 +955,11 @@ def comma_join(items): >>> comma_join(['a', 'b', 'c']) 'a, b, and c' """ - return ' and '.join(items) if len(items) <= 2 else ', '.join(items[:-1]) + ', and ' + items[-1] + return ( + " and ".join(items) + if len(items) <= 2 + else ", ".join(items[:-1]) + ", and " + items[-1] + ) def safe_print_unicode(*args, **kwargs): @@ -867,12 +972,12 @@ def safe_print_unicode(*args, **kwargs): :param end: ending character (defaults to '\n') :param errors: error handler for encoding errors (defaults to 'replace') """ - sep = kwargs.pop('sep', ' ') - end = kwargs.pop('end', '\n') - errors = kwargs.pop('errors', 'replace') + sep = kwargs.pop("sep", " ") + end = kwargs.pop("end", "\n") + errors = kwargs.pop("errors", "replace") func = sys.stdout.buffer.write line = sep.join(args) + end - encoding = sys.stdout.encoding or 'utf8' + encoding = sys.stdout.encoding or "utf8" func(line.encode(encoding, errors)) @@ -901,7 +1006,7 @@ def rec_glob(path, patterns, ignores=None): def convert_unix_path_to_win(path): - if external.find_executable('cygpath'): + if external.find_executable("cygpath"): cmd = f"cygpath -w {path}" path = subprocess.getoutput(cmd) @@ -911,7 +1016,7 @@ def convert_unix_path_to_win(path): def convert_win_path_to_unix(path): - if external.find_executable('cygpath'): + if external.find_executable("cygpath"): cmd = f"cygpath -u {path}" path = subprocess.getoutput(cmd) @@ -923,25 +1028,25 @@ def convert_win_path_to_unix(path): # Used for translating local paths into url (file://) paths # http://stackoverflow.com/a/14298190/1170370 def path2url(path): - return urlparse.urljoin('file:', urllib.pathname2url(path)) + return urlparse.urljoin("file:", urllib.pathname2url(path)) def get_stdlib_dir(prefix, py_ver): - if sys.platform == 'win32': - lib_dir = os.path.join(prefix, 'Lib') + if sys.platform == "win32": + lib_dir = os.path.join(prefix, "Lib") else: - lib_dir = os.path.join(prefix, 'lib') - python_folder = glob(os.path.join(lib_dir, 'python?.*')) + lib_dir = os.path.join(prefix, "lib") + python_folder = glob(os.path.join(lib_dir, "python?.*")) python_folder = sorted(filterfalse(islink, python_folder)) if python_folder: lib_dir = os.path.join(lib_dir, python_folder[0]) else: - lib_dir = os.path.join(lib_dir, f'python{py_ver}') + lib_dir = os.path.join(lib_dir, f"python{py_ver}") return lib_dir def get_site_packages(prefix, py_ver): - return os.path.join(get_stdlib_dir(prefix, py_ver), 'site-packages') + return os.path.join(get_stdlib_dir(prefix, py_ver), "site-packages") def get_build_folders(croot): @@ -950,16 +1055,22 @@ def get_build_folders(croot): def prepend_bin_path(env, prefix, prepend_prefix=False): - env['PATH'] = join(prefix, "bin") + os.pathsep + env['PATH'] + env["PATH"] = join(prefix, "bin") + os.pathsep + env["PATH"] if sys.platform == "win32": - env['PATH'] = join(prefix, "Library", "mingw-w64", "bin") + os.pathsep + \ - join(prefix, "Library", "usr", "bin") + os.pathsep + \ - join(prefix, "Library", "bin") + os.pathsep + \ - join(prefix, "Scripts") + os.pathsep + \ - env['PATH'] + env["PATH"] = ( + join(prefix, "Library", "mingw-w64", "bin") + + os.pathsep + + join(prefix, "Library", "usr", "bin") + + os.pathsep + + join(prefix, "Library", "bin") + + os.pathsep + + join(prefix, "Scripts") + + os.pathsep + + env["PATH"] + ) prepend_prefix = True # windows has Python in the prefix. Use it. if prepend_prefix: - env['PATH'] = prefix + os.pathsep + env['PATH'] + env["PATH"] = prefix + os.pathsep + env["PATH"] return env @@ -970,13 +1081,13 @@ def prepend_bin_path(env, prefix, prepend_prefix=False): def sys_path_prepended(prefix): path_backup = sys.path[:] if on_win: - sys.path.insert(1, os.path.join(prefix, 'lib', 'site-packages')) + sys.path.insert(1, os.path.join(prefix, "lib", "site-packages")) else: - lib_dir = os.path.join(prefix, 'lib') - python_dir = glob(os.path.join(lib_dir, r'python[0-9\.]*')) + lib_dir = os.path.join(prefix, "lib") + python_dir = glob(os.path.join(lib_dir, r"python[0-9\.]*")) if python_dir: python_dir = python_dir[0] - sys.path.insert(1, os.path.join(python_dir, 'site-packages')) + sys.path.insert(1, os.path.join(python_dir, "site-packages")) try: yield finally: @@ -986,17 +1097,19 @@ def sys_path_prepended(prefix): @contextlib.contextmanager def path_prepended(prefix, prepend_prefix=True): # FIXME: Unclear why prepend_prefix=True for all platforms. - old_path = os.environ['PATH'] - os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix, prepend_prefix)['PATH'] + old_path = os.environ["PATH"] + os.environ["PATH"] = prepend_bin_path(os.environ.copy(), prefix, prepend_prefix)[ + "PATH" + ] try: yield finally: - os.environ['PATH'] = old_path + os.environ["PATH"] = old_path -bin_dirname = 'Scripts' if sys.platform == 'win32' else 'bin' +bin_dirname = "Scripts" if sys.platform == "win32" else "bin" -entry_pat = re.compile(r'\s*([\w\-\.]+)\s*=\s*([\w.]+):([\w.]+)\s*$') +entry_pat = re.compile(r"\s*([\w\-\.]+)\s*=\s*([\w.]+):([\w.]+)\s*$") def iter_entry_points(items): @@ -1008,22 +1121,24 @@ def iter_entry_points(items): def create_entry_point(path, module, func, config): - import_name = func.split('.')[0] - pyscript = PY_TMPL % { - 'module': module, 'func': func, 'import_name': import_name} + import_name = func.split(".")[0] + pyscript = PY_TMPL % {"module": module, "func": func, "import_name": import_name} if on_win: - with open(path + '-script.py', 'w') as fo: - if os.path.isfile(os.path.join(config.host_prefix, 'python_d.exe')): - fo.write('#!python_d\n') + with open(path + "-script.py", "w") as fo: + if os.path.isfile(os.path.join(config.host_prefix, "python_d.exe")): + fo.write("#!python_d\n") fo.write(pyscript) - copy_into(join(dirname(__file__), f'cli-{str(config.host_arch)}.exe'), - path + '.exe', config.timeout) + copy_into( + join(dirname(__file__), f"cli-{str(config.host_arch)}.exe"), + path + ".exe", + config.timeout, + ) else: if os.path.islink(path): os.remove(path) - with open(path, 'w') as fo: + with open(path, "w") as fo: if not config.noarch: - fo.write('#!%s\n' % config.host_python) + fo.write("#!%s\n" % config.host_python) fo.write(pyscript) os.chmod(path, 0o775) @@ -1051,36 +1166,45 @@ def get_ext_files(start_path, pattern): def convert_path_for_cygwin_or_msys2(exe, path): "If exe is a Cygwin or MSYS2 executable then filters it through `cygpath -u`" - if sys.platform != 'win32': + if sys.platform != "win32": return path if exe not in _posix_exes_cache: with open(exe, "rb") as exe_file: exe_binary = exe_file.read() - msys2_cygwin = re.findall(b'(cygwin1.dll|msys-2.0.dll)', exe_binary) + msys2_cygwin = re.findall(b"(cygwin1.dll|msys-2.0.dll)", exe_binary) _posix_exes_cache[exe] = True if msys2_cygwin else False if _posix_exes_cache[exe]: try: - path = check_output_env(['cygpath', '-u', - path]).splitlines()[0].decode(getpreferredencoding()) + path = ( + check_output_env(["cygpath", "-u", path]) + .splitlines()[0] + .decode(getpreferredencoding()) + ) except OSError: log = get_logger(__name__) - log.debug('cygpath executable not found. Passing native path. This is OK for msys2.') + log.debug( + "cygpath executable not found. Passing native path. This is OK for msys2." + ) return path def get_skip_message(m): - return ("Skipped: {} from {} defines build/skip for this configuration ({}).".format( - m.name(), m.path, - {k: m.config.variant[k] for k in m.get_used_vars()})) + return "Skipped: {} from {} defines build/skip for this configuration ({}).".format( + m.name(), m.path, {k: m.config.variant[k] for k in m.get_used_vars()} + ) -def package_has_file(package_path, file_path, refresh_mode='modified'): +def package_has_file(package_path, file_path, refresh_mode="modified"): # This version does nothing to the package cache. with TemporaryDirectory() as td: - if file_path.startswith('info'): - conda_package_handling.api.extract(package_path, dest_dir=td, components='info') + if file_path.startswith("info"): + conda_package_handling.api.extract( + package_path, dest_dir=td, components="info" + ) else: - conda_package_handling.api.extract(package_path, dest_dir=td, components=file_path) + conda_package_handling.api.extract( + package_path, dest_dir=td, components=file_path + ) resolved_file_path = os.path.join(td, file_path) if os.path.exists(resolved_file_path): # TODO :: Remove this text-mode load. Files are binary. @@ -1088,7 +1212,7 @@ def package_has_file(package_path, file_path, refresh_mode='modified'): with open(resolved_file_path) as f: content = f.read() except UnicodeDecodeError: - with open(resolved_file_path, 'rb') as f: + with open(resolved_file_path, "rb") as f: content = f.read() else: content = False @@ -1145,7 +1269,7 @@ def islist(arg, uniform=False, include_dict=True): :return: Whether `arg` is a `list` :rtype: bool """ - if isinstance(arg, str) or not hasattr(arg, '__iter__'): + if isinstance(arg, str) or not hasattr(arg, "__iter__"): # str and non-iterables are not lists return False elif not include_dict and isinstance(arg, dict): @@ -1204,13 +1328,13 @@ def expand_globs(path_list, root_dir): glob_files = glob(path) if not glob_files: log = get_logger(__name__) - log.error(f'Glob {path} did not match in root_dir {root_dir}') + log.error(f"Glob {path} did not match in root_dir {root_dir}") # https://docs.python.org/3/library/glob.html#glob.glob states that # "whether or not the results are sorted depends on the file system". # Avoid this potential ambiguity by sorting. (see #4185) files.extend(sorted(glob_files)) - prefix_path_re = re.compile('^' + re.escape(f'{root_dir}{os.path.sep}')) - files = [prefix_path_re.sub('', f, 1) for f in files] + prefix_path_re = re.compile("^" + re.escape(f"{root_dir}{os.path.sep}")) + files = [prefix_path_re.sub("", f, 1) for f in files] return files @@ -1228,12 +1352,16 @@ def find_recipe(path): if os.path.isfile(path): if os.path.basename(path) in VALID_METAS: return path - raise OSError("{} is not a valid meta file ({})".format(path, ", ".join(VALID_METAS))) + raise OSError( + "{} is not a valid meta file ({})".format(path, ", ".join(VALID_METAS)) + ) results = list(rec_glob(path, VALID_METAS, ignores=(".AppleDouble",))) if not results: - raise OSError("No meta files ({}) found in {}".format(", ".join(VALID_METAS), path)) + raise OSError( + "No meta files ({}) found in {}".format(", ".join(VALID_METAS), path) + ) if len(results) == 1: return results[0] @@ -1243,20 +1371,40 @@ def find_recipe(path): metas = [m for m in VALID_METAS if os.path.isfile(os.path.join(path, m))] if len(metas) == 1: - get_logger(__name__).warn("Multiple meta files found. " - "The %s file in the base directory (%s) " - "will be used." % (metas[0], path)) + get_logger(__name__).warn( + "Multiple meta files found. " + "The %s file in the base directory (%s) " + "will be used." % (metas[0], path) + ) return os.path.join(path, metas[0]) - raise OSError("More than one meta files ({}) found in {}".format(", ".join(VALID_METAS), path)) + raise OSError( + "More than one meta files ({}) found in {}".format(", ".join(VALID_METAS), path) + ) class LoggingContext: - default_loggers = ['conda', 'binstar', 'install', 'conda.install', 'fetch', 'conda.instructions', - 'fetch.progress', 'print', 'progress', 'dotupdate', 'stdoutlog', 'requests', - 'conda.core.package_cache', 'conda.plan', 'conda.gateways.disk.delete', - 'conda_build', 'conda_build.index', 'conda_build.noarch_python', - 'urllib3.connectionpool'] + default_loggers = [ + "conda", + "binstar", + "install", + "conda.install", + "fetch", + "conda.instructions", + "fetch.progress", + "print", + "progress", + "dotupdate", + "stdoutlog", + "requests", + "conda.core.package_cache", + "conda.plan", + "conda.gateways.disk.delete", + "conda_build", + "conda_build.index", + "conda_build.noarch_python", + "urllib3.connectionpool", + ] def __init__(self, level=logging.WARN, handler=None, close=True, loggers=None): self.level = level @@ -1274,8 +1422,11 @@ def __enter__(self): if isinstance(logger, str): log = logging.getLogger(logger) self.old_levels[logger] = log.level - log.setLevel(self.level if ('install' not in logger or - self.level < logging.INFO) else self.level + 10) + log.setLevel( + self.level + if ("install" not in logger or self.level < logging.INFO) + else self.level + 10 + ) if self.handler: self.logger.addHandler(self.handler) @@ -1295,23 +1446,23 @@ def __exit__(self, et, ev, tb): def get_installed_packages(path): - ''' + """ Scan all json files in 'path' and return a dictionary with their contents. Files are assumed to be in 'index.json' format. - ''' + """ installed = dict() - for filename in glob(os.path.join(path, 'conda-meta', '*.json')): + for filename in glob(os.path.join(path, "conda-meta", "*.json")): with open(filename) as file: data = json.load(file) - installed[data['name']] = data + installed[data["name"]] = data return installed def _convert_lists_to_sets(_dict): for k, v in _dict.items(): - if hasattr(v, 'keys'): + if hasattr(v, "keys"): _dict[k] = HashableDict(_convert_lists_to_sets(v)) - elif hasattr(v, '__iter__') and not isinstance(v, str): + elif hasattr(v, "__iter__") and not isinstance(v, str): try: _dict[k] = sorted(list(set(v))) except TypeError: @@ -1320,8 +1471,8 @@ def _convert_lists_to_sets(_dict): class HashableDict(dict): - """use hashable frozen dictionaries for resources and resource types so that they can be in sets - """ + """use hashable frozen dictionaries for resources and resource types so that they can be in sets""" + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self = _convert_lists_to_sets(self) @@ -1339,7 +1490,7 @@ def represent_hashabledict(dumper, data): value.append((node_key, node_value)) - return yaml.nodes.MappingNode('tag:yaml.org,2002:map', value) + return yaml.nodes.MappingNode("tag:yaml.org,2002:map", value) yaml.add_representer(HashableDict, represent_hashabledict) @@ -1349,6 +1500,7 @@ def represent_hashabledict(dumper, data): @contextlib.contextmanager def capture(): import sys + oldout, olderr = sys.stdout, sys.stderr try: out = [StringIO(), StringIO()] @@ -1383,19 +1535,19 @@ def env_var(name, value, callback=None): def trim_empty_keys(dict_): to_remove = set() - negative_means_empty = ('final', 'noarch_python', 'zip_keys') + negative_means_empty = ("final", "noarch_python", "zip_keys") for k, v in dict_.items(): - if hasattr(v, 'keys'): + if hasattr(v, "keys"): trim_empty_keys(v) # empty lists and empty strings, and None are always empty. - if v == list() or v == '' or v is None or v == dict(): + if v == list() or v == "" or v is None or v == dict(): to_remove.add(k) # other things that evaluate as False may not be "empty" - things can be manually set to # false, and we need to keep that setting. if not v and k in negative_means_empty: to_remove.add(k) - if 'zip_keys' in dict_ and not any(v for v in dict_['zip_keys']): - to_remove.add('zip_keys') + if "zip_keys" in dict_ and not any(v for v in dict_["zip_keys"]): + to_remove.add("zip_keys") for k in to_remove: del dict_[k] @@ -1403,17 +1555,17 @@ def trim_empty_keys(dict_): def _increment(version, alpha_ver): try: if alpha_ver: - suffix = 'a' + suffix = "a" else: - suffix = '.0a0' + suffix = ".0a0" last_version = str(int(version) + 1) + suffix except ValueError: last_version = chr(ord(version) + 1) return last_version -def apply_pin_expressions(version, min_pin='x.x.x.x.x.x.x', max_pin='x'): - pins = [len(p.split('.')) if p else None for p in (min_pin, max_pin)] +def apply_pin_expressions(version, min_pin="x.x.x.x.x.x.x", max_pin="x"): + pins = [len(p.split(".")) if p else None for p in (min_pin, max_pin)] parsed_version = VersionOrder(version).version[1:] nesting_position = None flat_list = [] @@ -1423,9 +1575,9 @@ def apply_pin_expressions(version, min_pin='x.x.x.x.x.x.x', max_pin='x'): flat_list.extend(item) else: flat_list.append(item) - if max_pin and len(max_pin.split('.')) > len(flat_list): + if max_pin and len(max_pin.split(".")) > len(flat_list): pins[1] = len(flat_list) - versions = ['', ''] + versions = ["", ""] # first idx is lower bound pin; second is upper bound pin. # pin value is number of places to pin. for p_idx, pin in enumerate(pins): @@ -1439,8 +1591,8 @@ def apply_pin_expressions(version, min_pin='x.x.x.x.x.x.x', max_pin='x'): v = _increment(v, alpha_ver) versions[p_idx] += str(v) if v_idx != nesting_position: - versions[p_idx] += '.' - if versions[p_idx][-1] == '.': + versions[p_idx] += "." + if versions[p_idx][-1] == ".": versions[p_idx] = versions[p_idx][:-1] if versions[0]: if version.endswith(".*"): @@ -1452,55 +1604,68 @@ def apply_pin_expressions(version, min_pin='x.x.x.x.x.x.x', max_pin='x'): if version_order < VersionOrder(versions[0]): # If the minimum is greater than the version this is a pre-release build. # Use the version as the lower bound - versions[0] = '>=' + version + versions[0] = ">=" + version else: - versions[0] = '>=' + versions[0] + versions[0] = ">=" + versions[0] if versions[1]: - versions[1] = '<' + versions[1] - return ','.join([v for v in versions if v]) - - -def filter_files(files_list, prefix, filter_patterns=(r'(.*[\\/])?\.git[\\/].*', - r'(.*[\\/])?\.git$', - r'(.*)?\.DS_Store.*', - r'.*\.la$', - r'conda-meta.*', - r'.*\.conda_trash(?:_\d+)*$')): + versions[1] = "<" + versions[1] + return ",".join([v for v in versions if v]) + + +def filter_files( + files_list, + prefix, + filter_patterns=( + r"(.*[\\/])?\.git[\\/].*", + r"(.*[\\/])?\.git$", + r"(.*)?\.DS_Store.*", + r".*\.la$", + r"conda-meta.*", + r".*\.conda_trash(?:_\d+)*$", + ), +): """Remove things like the .git directory from the list of files to be copied""" for pattern in filter_patterns: r = re.compile(pattern) files_list = set(files_list) - set(filter(r.match, files_list)) - return [f for f in files_list - if not os.path.isdir(os.path.join(prefix, f)) or - os.path.islink(os.path.join(prefix, f))] + return [ + f + for f in files_list + if not os.path.isdir(os.path.join(prefix, f)) + or os.path.islink(os.path.join(prefix, f)) + ] def filter_info_files(files_list, prefix): - return filter_files(files_list, prefix, filter_patterns=( - 'info[\\\\/]index.json', - 'info[\\\\/]files', - 'info[\\\\/]paths.json', - 'info[\\\\/]about.json', - 'info[\\\\/]has_prefix', - 'info[\\\\/]hash_input_files', # legacy, not used anymore - 'info[\\\\/]hash_input.json', - 'info[\\\\/]run_exports.yaml', # legacy - 'info[\\\\/]run_exports.json', # current - 'info[\\\\/]git', - 'info[\\\\/]recipe[\\\\/].*', - 'info[\\\\/]recipe_log.json', - 'info[\\\\/]recipe.tar', - 'info[\\\\/]test[\\\\/].*', - 'info[\\\\/]LICENSE.txt', # legacy, some tests rely on this - 'info[\\\\/]licenses[\\\\/]*', - 'info[\\\\/]prelink_messages[\\\\/]*', - 'info[\\\\/]requires', - 'info[\\\\/]meta', - 'info[\\\\/]platform', - 'info[\\\\/]no_link', - 'info[\\\\/]link.json', - 'info[\\\\/]icon.png', - )) + return filter_files( + files_list, + prefix, + filter_patterns=( + "info[\\\\/]index.json", + "info[\\\\/]files", + "info[\\\\/]paths.json", + "info[\\\\/]about.json", + "info[\\\\/]has_prefix", + "info[\\\\/]hash_input_files", # legacy, not used anymore + "info[\\\\/]hash_input.json", + "info[\\\\/]run_exports.yaml", # legacy + "info[\\\\/]run_exports.json", # current + "info[\\\\/]git", + "info[\\\\/]recipe[\\\\/].*", + "info[\\\\/]recipe_log.json", + "info[\\\\/]recipe.tar", + "info[\\\\/]test[\\\\/].*", + "info[\\\\/]LICENSE.txt", # legacy, some tests rely on this + "info[\\\\/]licenses[\\\\/]*", + "info[\\\\/]prelink_messages[\\\\/]*", + "info[\\\\/]requires", + "info[\\\\/]meta", + "info[\\\\/]platform", + "info[\\\\/]no_link", + "info[\\\\/]link.json", + "info[\\\\/]icon.png", + ), + ) def rm_rf(path, config=None): @@ -1544,12 +1709,12 @@ def filter(self, record): warning_error_stderr_filter = GreaterThanFilter(logging.INFO) # set filelock's logger to only show warnings by default -logging.getLogger('filelock').setLevel(logging.WARN) +logging.getLogger("filelock").setLevel(logging.WARN) # quiet some of conda's less useful output -logging.getLogger('conda.core.linked_data').setLevel(logging.WARN) -logging.getLogger('conda.gateways.disk.delete').setLevel(logging.WARN) -logging.getLogger('conda.gateways.disk.test').setLevel(logging.WARN) +logging.getLogger("conda.core.linked_data").setLevel(logging.WARN) +logging.getLogger("conda.gateways.disk.delete").setLevel(logging.WARN) +logging.getLogger("conda.gateways.disk.test").setLevel(logging.WARN) def reset_deduplicator(): @@ -1561,15 +1726,17 @@ def reset_deduplicator(): def get_logger(name, level=logging.INFO, dedupe=True, add_stdout_stderr_handlers=True): config_file = None - if cc_conda_build.get('log_config_file'): - config_file = abspath(expanduser(expandvars(cc_conda_build.get('log_config_file')))) + if cc_conda_build.get("log_config_file"): + config_file = abspath( + expanduser(expandvars(cc_conda_build.get("log_config_file"))) + ) # by loading config file here, and then only adding handlers later, people # should be able to override conda-build's logger settings here. if config_file: with open(config_file) as f: config_dict = yaml.safe_load(f) logging.config.dictConfig(config_dict) - level = config_dict.get('loggers', {}).get(name, {}).get('level', level) + level = config_dict.get("loggers", {}).get(name, {}).get("level", level) log = logging.getLogger(name) log.setLevel(level) if dedupe: @@ -1592,25 +1759,30 @@ def _equivalent(base_value, value, path): equivalent = value == base_value if isinstance(value, str) and isinstance(base_value, str): if not os.path.isabs(base_value): - base_value = os.path.abspath(os.path.normpath(os.path.join(path, base_value))) + base_value = os.path.abspath( + os.path.normpath(os.path.join(path, base_value)) + ) if not os.path.isabs(value): value = os.path.abspath(os.path.normpath(os.path.join(path, value))) equivalent |= base_value == value return equivalent -def merge_or_update_dict(base, new, path="", merge=True, raise_on_clobber=False, add_missing_keys=True): +def merge_or_update_dict( + base, new, path="", merge=True, raise_on_clobber=False, add_missing_keys=True +): if base == new: return base log = get_logger(__name__) for key, value in new.items(): if key in base or add_missing_keys: base_value = base.get(key, value) - if hasattr(value, 'keys'): - base_value = merge_or_update_dict(base_value, value, path, merge, - raise_on_clobber=raise_on_clobber) + if hasattr(value, "keys"): + base_value = merge_or_update_dict( + base_value, value, path, merge, raise_on_clobber=raise_on_clobber + ) base[key] = base_value - elif hasattr(value, '__iter__') and not isinstance(value, str): + elif hasattr(value, "__iter__") and not isinstance(value, str): if merge: if base_value != value: try: @@ -1624,10 +1796,17 @@ def merge_or_update_dict(base, new, path="", merge=True, raise_on_clobber=False, else: base[key] = value else: - if (base_value and merge and not _equivalent(base_value, value, path) and - raise_on_clobber): - log.debug('clobbering key {} (original value {}) with value {}'.format(key, - base_value, value)) + if ( + base_value + and merge + and not _equivalent(base_value, value, path) + and raise_on_clobber + ): + log.debug( + "clobbering key {} (original value {}) with value {}".format( + key, base_value, value + ) + ) if value is None and key in base: del base[key] else: @@ -1636,49 +1815,60 @@ def merge_or_update_dict(base, new, path="", merge=True, raise_on_clobber=False, def merge_dicts_of_lists(dol1, dol2): - ''' + """ From Alex Martelli: https://stackoverflow.com/a/1495821/3257826 - ''' + """ keys = set(dol1).union(dol2) no = [] return {k: dol1.get(k, no) + dol2.get(k, no) for k in keys} def prefix_files(prefix): - ''' + """ Returns a set of all files in prefix. - ''' + """ res = set() prefix_rep = prefix + os.path.sep for root, dirs, files in walk(prefix): for fn in files: # this is relpath, just hacked to be faster - res.add(join(root, fn).replace(prefix_rep, '', 1)) + res.add(join(root, fn).replace(prefix_rep, "", 1)) for dn in dirs: path = join(root, dn) if islink(path): - res.add(path.replace(prefix_rep, '', 1)) - res.update(expand_globs((path, ), prefix)) + res.add(path.replace(prefix_rep, "", 1)) + res.update(expand_globs((path,), prefix)) return res -def mmap_mmap(fileno, length, tagname=None, flags=0, prot=mmap_PROT_READ | mmap_PROT_WRITE, - access=None, offset=0): - ''' +def mmap_mmap( + fileno, + length, + tagname=None, + flags=0, + prot=mmap_PROT_READ | mmap_PROT_WRITE, + access=None, + offset=0, +): + """ Hides the differences between mmap.mmap on Windows and Unix. Windows has `tagname`. Unix does not, but makes up for it with `flags` and `prot`. On both, the default value for `access` is determined from how the file was opened so must not be passed in at all to get this default behaviour. - ''' + """ if on_win: if access: - return mmap.mmap(fileno, length, tagname=tagname, access=access, offset=offset) + return mmap.mmap( + fileno, length, tagname=tagname, access=access, offset=offset + ) else: return mmap.mmap(fileno, length, tagname=tagname) else: if access: - return mmap.mmap(fileno, length, flags=flags, prot=prot, access=access, offset=offset) + return mmap.mmap( + fileno, length, flags=flags, prot=prot, access=access, offset=offset + ) else: return mmap.mmap(fileno, length, flags=flags, prot=prot) @@ -1686,21 +1876,21 @@ def mmap_mmap(fileno, length, tagname=None, flags=0, prot=mmap_PROT_READ | mmap_ def remove_pycache_from_scripts(build_prefix): """Remove pip created pycache directory from bin or Scripts.""" if on_win: - scripts_path = os.path.join(build_prefix, 'Scripts') + scripts_path = os.path.join(build_prefix, "Scripts") else: - scripts_path = os.path.join(build_prefix, 'bin') + scripts_path = os.path.join(build_prefix, "bin") if os.path.isdir(scripts_path): for entry in os.listdir(scripts_path): entry_path = os.path.join(scripts_path, entry) - if os.path.isdir(entry_path) and entry.strip(os.sep) == '__pycache__': + if os.path.isdir(entry_path) and entry.strip(os.sep) == "__pycache__": shutil.rmtree(entry_path) - elif os.path.isfile(entry_path) and entry_path.endswith('.pyc'): + elif os.path.isfile(entry_path) and entry_path.endswith(".pyc"): os.remove(entry_path) -def sort_list_in_nested_structure(dictionary, omissions=''): +def sort_list_in_nested_structure(dictionary, omissions=""): """Recurse through a nested dictionary and sort any lists that are found. If the list that is found contains anything but strings, it is skipped @@ -1713,9 +1903,11 @@ def sort_list_in_nested_structure(dictionary, omissions=''): section = dictionary[field][key] if isinstance(section, dict): sort_list_in_nested_structure(section) - elif (isinstance(section, list) and - '{}/{}' .format(field, key) not in omissions and - all(isinstance(item, str) for item in section)): + elif ( + isinstance(section, list) + and f"{field}/{key}" not in omissions + and all(isinstance(item, str) for item in section) + ): section.sort() # there's a possibility for nested lists containing dictionaries @@ -1738,51 +1930,63 @@ def sort_list_in_nested_structure(dictionary, omissions=''): # if you are seeing mysterious unsatisfiable errors, with the package you're building being the # unsatisfiable part, then you probably need to update this regex. -spec_needing_star_re = re.compile(r"([\w\d\.\-\_]+)\s+((?<=])[\w\d\.\-\_]+?(?!\*))(\s+[\w\d\.\_]+)?$") # NOQA +spec_needing_star_re = re.compile( + r"([\w\d\.\-\_]+)\s+((?<=])[\w\d\.\-\_]+?(?!\*))(\s+[\w\d\.\_]+)?$" +) # NOQA spec_ver_needing_star_re = re.compile(r"^([0-9a-zA-Z\.]+)$") def ensure_valid_spec(spec, warn=False): if isinstance(spec, MatchSpec): - if (hasattr(spec, 'version') and spec.version and (not spec.get('build', '')) and - spec_ver_needing_star_re.match(str(spec.version))): - if str(spec.name) not in ('python', 'numpy') or str(spec.version) != 'x.x': - spec = MatchSpec("{} {}".format(str(spec.name), str(spec.version) + '.*')) + if ( + hasattr(spec, "version") + and spec.version + and (not spec.get("build", "")) + and spec_ver_needing_star_re.match(str(spec.version)) + ): + if str(spec.name) not in ("python", "numpy") or str(spec.version) != "x.x": + spec = MatchSpec( + "{} {}".format(str(spec.name), str(spec.version) + ".*") + ) else: match = spec_needing_star_re.match(spec) # ignore exact pins (would be a 3rd group) if match and not match.group(3): - if match.group(1) in ('python', 'numpy') and match.group(2) == 'x.x': + if match.group(1) in ("python", "numpy") and match.group(2) == "x.x": spec = spec_needing_star_re.sub(r"\1 \2", spec) else: if "*" not in spec: - if match.group(1) not in ('python', 'vc') and warn: + if match.group(1) not in ("python", "vc") and warn: log = get_logger(__name__) - log.warn("Adding .* to spec '{}' to ensure satisfiability. Please " - "consider putting {{{{ var_name }}}}.* or some relational " - "operator (>/=/<=) on this spec in meta.yaml, or if req is " - "also a build req, using {{{{ pin_compatible() }}}} jinja2 " - "function instead. See " - "https://conda.io/docs/user-guide/tasks/build-packages/variants.html#pinning-at-the-variant-level" # NOQA - .format(spec)) + log.warn( + "Adding .* to spec '{}' to ensure satisfiability. Please " + "consider putting {{{{ var_name }}}}.* or some relational " + "operator (>/=/<=) on this spec in meta.yaml, or if req is " + "also a build req, using {{{{ pin_compatible() }}}} jinja2 " + "function instead. See " + "https://conda.io/docs/user-guide/tasks/build-packages/variants.html#pinning-at-the-variant-level".format( # NOQA + spec + ) + ) spec = spec_needing_star_re.sub(r"\1 \2.*", spec) return spec def insert_variant_versions(requirements_dict, variant, env): - build_deps = (ensure_list(requirements_dict.get('build')) + - ensure_list(requirements_dict.get('host'))) + build_deps = ensure_list(requirements_dict.get("build")) + ensure_list( + requirements_dict.get("host") + ) reqs = ensure_list(requirements_dict.get(env)) for key, val in variant.items(): - regex = re.compile(r'^(%s)(?:\s*$)' % key.replace('_', '[-_]')) + regex = re.compile(r"^(%s)(?:\s*$)" % key.replace("_", "[-_]")) matches = [regex.match(pkg) for pkg in reqs] if any(matches): for i, x in enumerate(matches): - if x and (env in ('build', 'host') or x.group(1) in build_deps): + if x and (env in ("build", "host") or x.group(1) in build_deps): del reqs[i] if not isinstance(val, str): val = val[0] - reqs.insert(i, ensure_valid_spec(' '.join((x.group(1), val)))) + reqs.insert(i, ensure_valid_spec(" ".join((x.group(1), val)))) xx_re = re.compile(r"([0-9a-zA-Z\.\-\_]+)\s+x\.x") @@ -1791,7 +1995,10 @@ def insert_variant_versions(requirements_dict, variant, env): for i, x in enumerate(matches): if x: del reqs[i] - reqs.insert(i, ensure_valid_spec(' '.join((x.group(1), variant.get(x.group(1)))))) + reqs.insert( + i, + ensure_valid_spec(" ".join((x.group(1), variant.get(x.group(1))))), + ) if reqs: requirements_dict[env] = reqs @@ -1822,10 +2029,13 @@ def match_peer_job(target_matchspec, other_m, this_m=None): def expand_reqs(reqs_entry): - if not hasattr(reqs_entry, 'keys'): + if not hasattr(reqs_entry, "keys"): original = ensure_list(reqs_entry)[:] - reqs_entry = {'host': ensure_list(original), - 'run': ensure_list(original)} if original else {} + reqs_entry = ( + {"host": ensure_list(original), "run": ensure_list(original)} + if original + else {} + ) else: for sec in reqs_entry: reqs_entry[sec] = ensure_list(reqs_entry[sec]) @@ -1840,8 +2050,8 @@ def sha256_checksum(filename, buffersize=65536): if not isfile(filename): return None sha256 = hashlib.sha256() - with open(filename, 'rb') as f: - for block in iter(lambda: f.read(buffersize), b''): + with open(filename, "rb") as f: + for block in iter(lambda: f.read(buffersize), b""): sha256.update(block) return sha256.hexdigest() @@ -1865,11 +2075,11 @@ def write_bat_activation_text(file_handle, m): # exists to identify a valid conda environment # conda 4.6 changes this one final time, by adding a '--stack' flag to the 'activate' # command, and 'activate' does not stack environments by default without that flag - history_file = join(m.config.host_prefix, 'conda-meta', 'history') + history_file = join(m.config.host_prefix, "conda-meta", "history") if not isfile(history_file): if not isdir(dirname(history_file)): os.makedirs(dirname(history_file)) - open(history_file, 'a').close() + open(history_file, "a").close() file_handle.write( f'call "{root_script_dir}\\..\\condabin\\conda.bat" activate "{m.config.host_prefix}"\n' @@ -1880,47 +2090,51 @@ def write_bat_activation_text(file_handle, m): f'call "{root_script_dir}\\..\\condabin\\conda.bat" activate --stack "{m.config.build_prefix}"\n' ) from conda_build.os_utils.external import find_executable - ccache = find_executable('ccache', m.config.build_prefix, False) + + ccache = find_executable("ccache", m.config.build_prefix, False) if ccache: if isinstance(ccache, list): ccache = ccache[0] ccache_methods = {} - ccache_methods['env_vars'] = False - ccache_methods['symlinks'] = False - ccache_methods['native'] = False - if hasattr(m.config, 'ccache_method'): + ccache_methods["env_vars"] = False + ccache_methods["symlinks"] = False + ccache_methods["native"] = False + if hasattr(m.config, "ccache_method"): ccache_methods[m.config.ccache_method] = True for method, value in ccache_methods.items(): if value: - if method == 'env_vars': + if method == "env_vars": file_handle.write(f'set "CC={ccache} %CC%"\n') file_handle.write(f'set "CXX={ccache} %CXX%"\n') - elif method == 'symlinks': - dirname_ccache_ln_bin = join(m.config.build_prefix, 'ccache-ln-bin') - file_handle.write(f'mkdir {dirname_ccache_ln_bin}\n') - file_handle.write(f'pushd {dirname_ccache_ln_bin}\n') + elif method == "symlinks": + dirname_ccache_ln_bin = join(m.config.build_prefix, "ccache-ln-bin") + file_handle.write(f"mkdir {dirname_ccache_ln_bin}\n") + file_handle.write(f"pushd {dirname_ccache_ln_bin}\n") # If you use mklink.exe instead of mklink here it breaks as it's a builtin. - for ext in ('.exe', ''): + for ext in (".exe", ""): # MSVC - file_handle.write(f'mklink cl{ext} {ccache}\n') - file_handle.write(f'mklink link{ext} {ccache}\n') + file_handle.write(f"mklink cl{ext} {ccache}\n") + file_handle.write(f"mklink link{ext} {ccache}\n") # GCC - file_handle.write(f'mklink gcc{ext} {ccache}\n') - file_handle.write(f'mklink g++{ext} {ccache}\n') - file_handle.write(f'mklink cc{ext} {ccache}\n') - file_handle.write(f'mklink c++{ext} {ccache}\n') - file_handle.write(f'mklink as{ext} {ccache}\n') - file_handle.write(f'mklink ar{ext} {ccache}\n') - file_handle.write(f'mklink nm{ext} {ccache}\n') - file_handle.write(f'mklink ranlib{ext} {ccache}\n') - file_handle.write(f'mklink gcc-ar{ext} {ccache}\n') - file_handle.write(f'mklink gcc-nm{ext} {ccache}\n') - file_handle.write(f'mklink gcc-ranlib{ext} {ccache}\n') - file_handle.write('popd\n') - file_handle.write('set PATH={dirname_ccache_ln};{dirname_ccache};%PATH%\n'.format( - dirname_ccache_ln=dirname_ccache_ln_bin, - dirname_ccache=os.path.dirname(ccache))) - elif method == 'native': + file_handle.write(f"mklink gcc{ext} {ccache}\n") + file_handle.write(f"mklink g++{ext} {ccache}\n") + file_handle.write(f"mklink cc{ext} {ccache}\n") + file_handle.write(f"mklink c++{ext} {ccache}\n") + file_handle.write(f"mklink as{ext} {ccache}\n") + file_handle.write(f"mklink ar{ext} {ccache}\n") + file_handle.write(f"mklink nm{ext} {ccache}\n") + file_handle.write(f"mklink ranlib{ext} {ccache}\n") + file_handle.write(f"mklink gcc-ar{ext} {ccache}\n") + file_handle.write(f"mklink gcc-nm{ext} {ccache}\n") + file_handle.write(f"mklink gcc-ranlib{ext} {ccache}\n") + file_handle.write("popd\n") + file_handle.write( + "set PATH={dirname_ccache_ln};{dirname_ccache};%PATH%\n".format( + dirname_ccache_ln=dirname_ccache_ln_bin, + dirname_ccache=os.path.dirname(ccache), + ) + ) + elif method == "native": pass else: print("ccache method {} not implemented") @@ -1931,15 +2145,15 @@ def write_bat_activation_text(file_handle, m): def download_channeldata(channel_url): global channeldata_cache - if channel_url.startswith('file://') or channel_url not in channeldata_cache: + if channel_url.startswith("file://") or channel_url not in channeldata_cache: urls = get_conda_channel(channel_url).urls() - urls = {url.rsplit('/', 1)[0] for url in urls} + urls = {url.rsplit("/", 1)[0] for url in urls} data = {} for url in urls: with TemporaryDirectory() as td: tf = os.path.join(td, "channeldata.json") try: - download(url + '/channeldata.json', tf) + download(url + "/channeldata.json", tf) with open(tf) as f: new_channeldata = json.load(f) except (JSONDecodeError, CondaHTTPError): @@ -1977,20 +2191,26 @@ def shutil_move_more_retrying(src, dest, debug_name): log.info(f"shutil.move({debug_name})={src}, dest={dest})") shutil.move(src, dest) if attempts_left != 5: - log.warning("shutil.move({}={}, dest={}) succeeded on attempt number {}".format(debug_name, src, dest, - 6 - attempts_left)) + log.warning( + "shutil.move({}={}, dest={}) succeeded on attempt number {}".format( + debug_name, src, dest, 6 - attempts_left + ) + ) attempts_left = -1 except: attempts_left = attempts_left - 1 if attempts_left > 0: log.warning( "Failed to rename {} directory, check with strace, struss or procmon. " - "Will sleep for 3 seconds and try again!".format(debug_name)) + "Will sleep for 3 seconds and try again!".format(debug_name) + ) import time + time.sleep(3) elif attempts_left != -1: log.error( - f"Failed to rename {debug_name} directory despite sleeping and retrying.") + f"Failed to rename {debug_name} directory despite sleeping and retrying." + ) def is_conda_pkg(pkg_path: str) -> bool: @@ -1999,8 +2219,6 @@ def is_conda_pkg(pkg_path: str) -> bool: """ path = Path(pkg_path) - return ( - path.is_file() and ( - any(path.name.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) - ) + return path.is_file() and ( + any(path.name.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) ) From 9c294f040b6265c9dc36b4b7d5da924eec4b922e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 7 Feb 2023 08:31:32 +0100 Subject: [PATCH 062/366] [pre-commit.ci] pre-commit autoupdate (#4768) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/PyCQA/pylint: v2.16.0b1 → v2.16.1](https://github.com/PyCQA/pylint/compare/v2.16.0b1...v2.16.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 17a6f7927e..e41b255bb2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -56,7 +56,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/PyCQA/pylint - rev: v2.16.0b1 + rev: v2.16.1 hooks: - id: pylint args: [--exit-zero] From 56d65e11c6d33d5ccf053804d5f28527f1033a99 Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Wed, 8 Feb 2023 12:21:04 -0500 Subject: [PATCH 063/366] Remove more unused test fixtures (#4769) --- tests/test_api_build.py | 58 ++++++++++++++++++++--------------------- tests/test_index.py | 4 +-- tests/test_metadata.py | 2 +- 3 files changed, 32 insertions(+), 32 deletions(-) diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 59af1b1c42..bd80b5068f 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -88,7 +88,7 @@ def describe_root(cwd=None): for recipe in get_valid_recipes(metadata_dir) ], ) -def test_recipe_builds(recipe: Path, testing_config, testing_workdir, monkeypatch): +def test_recipe_builds(recipe: Path, testing_config, monkeypatch): # TODO: After we fix #3754 this mark can be removed. This specific test # ``source_setup_py_data_subdir`` reproduces the problem. if recipe.name == "source_setup_py_data_subdir": @@ -136,7 +136,7 @@ def test_ignore_some_prefix_files(testing_config, monkeypatch): @pytest.mark.serial @pytest.mark.xfail -def test_token_upload(testing_workdir, testing_metadata): +def test_token_upload(testing_metadata): folder_uuid = uuid.uuid4().hex # generated with conda_test_account user, command: # anaconda auth --create --name CONDA_BUILD_UPLOAD_TEST --scopes 'api repos conda' @@ -167,7 +167,7 @@ def test_token_upload(testing_workdir, testing_metadata): @pytest.mark.sanity @pytest.mark.serial @pytest.mark.parametrize("service_name", ["binstar", "anaconda"]) -def test_no_anaconda_upload_condarc(service_name, testing_workdir, testing_config, capfd): +def test_no_anaconda_upload_condarc(service_name, testing_config, capfd): api.build(empty_sections, config=testing_config, notest=True) output, error = capfd.readouterr() assert "Automatic uploading is disabled" in output, error @@ -233,7 +233,7 @@ def test_early_abort(testing_config, capfd): assert "Hello World" in output -def test_output_build_path_git_source(testing_workdir, testing_config): +def test_output_build_path_git_source(testing_config): recipe_path = os.path.join(metadata_dir, "source_git_jinja2") m = api.render(recipe_path, config=testing_config)[0][0] output = api.get_output_file_paths(m)[0] @@ -262,7 +262,7 @@ def test_build_with_activate_does_activate(): @pytest.mark.sanity @pytest.mark.skipif(sys.platform == "win32", reason="no binary prefix manipulation done on windows.") -def test_binary_has_prefix_files(testing_workdir, testing_config): +def test_binary_has_prefix_files(testing_config): api.build(os.path.join(metadata_dir, '_binary_has_prefix_files'), config=testing_config) @@ -270,11 +270,11 @@ def test_binary_has_prefix_files(testing_workdir, testing_config): @pytest.mark.sanity @pytest.mark.skipif(sys.platform == "win32", reason="no binary prefix manipulation done on windows.") -def test_binary_has_prefix_files_non_utf8(testing_workdir, testing_config): +def test_binary_has_prefix_files_non_utf8(testing_config): api.build(os.path.join(metadata_dir, '_binary_has_utf_non_8'), config=testing_config) -def test_relative_path_git_versioning(testing_workdir, testing_config): +def test_relative_path_git_versioning(testing_config): # conda_build_test_recipe is a manual step. Clone it at the same level as # your conda-build source. cwd = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', @@ -286,7 +286,7 @@ def test_relative_path_git_versioning(testing_workdir, testing_config): assert tag in output -def test_relative_git_url_git_versioning(testing_workdir, testing_config): +def test_relative_git_url_git_versioning(testing_config): cwd = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'conda_build_test_recipe')) tag = describe_root(cwd) @@ -295,7 +295,7 @@ def test_relative_git_url_git_versioning(testing_workdir, testing_config): assert tag in output -def test_dirty_variable_available_in_build_scripts(testing_workdir, testing_config): +def test_dirty_variable_available_in_build_scripts(testing_config): recipe = os.path.join(metadata_dir, "_dirty_skip_section") testing_config.dirty = True api.build(recipe, config=testing_config) @@ -378,7 +378,7 @@ def test_build_msvc_compiler(msvc_ver, monkeypatch): @pytest.mark.sanity @pytest.mark.parametrize("platform", platforms) @pytest.mark.parametrize("target_compiler", compilers) -def test_cmake_generator(platform, target_compiler, testing_workdir, testing_config): +def test_cmake_generator(platform, target_compiler, testing_config): testing_config.variant['python'] = target_compiler testing_config.activate = True api.build(os.path.join(metadata_dir, '_cmake_generator'), config=testing_config) @@ -386,19 +386,19 @@ def test_cmake_generator(platform, target_compiler, testing_workdir, testing_con @pytest.mark.skipif(sys.platform == "win32", reason="No windows symlinks") -def test_symlink_fail(testing_workdir, testing_config): +def test_symlink_fail(testing_config): with pytest.raises((SystemExit, FileNotFoundError)): api.build(os.path.join(fail_dir, "symlinks"), config=testing_config) @pytest.mark.sanity -def test_pip_in_meta_yaml_fail(testing_workdir, testing_config): +def test_pip_in_meta_yaml_fail(testing_config): with pytest.raises(ValueError, match='environment.yml'): api.build(os.path.join(fail_dir, "pip_reqs_fail_informatively"), config=testing_config) @pytest.mark.sanity -def test_recursive_fail(testing_workdir, testing_config): +def test_recursive_fail(testing_config): with pytest.raises((RuntimeError, exceptions.DependencyNeedsBuildingError), match="recursive-build2"): api.build(os.path.join(fail_dir, "recursive-build"), config=testing_config) @@ -408,13 +408,13 @@ def test_recursive_fail(testing_workdir, testing_config): @pytest.mark.sanity -def test_jinja_typo(testing_workdir, testing_config): +def test_jinja_typo(testing_config): with pytest.raises(SystemExit, match="GIT_DSECRIBE_TAG"): api.build(os.path.join(fail_dir, "source_git_jinja2_oops"), config=testing_config) @pytest.mark.sanity -def test_skip_existing(testing_workdir, testing_config, capfd): +def test_skip_existing(testing_config, capfd): # build the recipe first api.build(empty_sections, config=testing_config) api.build(empty_sections, config=testing_config, skip_existing=True) @@ -445,14 +445,14 @@ def test_skip_existing_url(testing_metadata, testing_workdir, capfd): assert "are already built" in output -def test_failed_tests_exit_build(testing_workdir, testing_config): +def test_failed_tests_exit_build(testing_config): """https://github.com/conda/conda-build/issues/1112""" with pytest.raises(SystemExit, match="TESTS FAILED"): api.build(os.path.join(metadata_dir, "_test_failed_test_exits"), config=testing_config) @pytest.mark.sanity -def test_requirements_txt_for_run_reqs(testing_workdir, testing_config): +def test_requirements_txt_for_run_reqs(testing_config): """ If run reqs are blank, then conda-build looks for requirements.txt in the recipe folder. There has been a report of issue with unsatisfiable requirements at @@ -470,7 +470,7 @@ def test_requirements_txt_for_run_reqs(testing_workdir, testing_config): sys.version_info >= (3, 10), reason="Python 3.10+, py_compile terminates once it finds an invalid file", ) -def test_compileall_compiles_all_good_files(testing_workdir, testing_config): +def test_compileall_compiles_all_good_files(testing_config): output = api.build(os.path.join(metadata_dir, "_compile-test"), config=testing_config)[0] good_files = ['f1.py', 'f3.py'] bad_file = 'f2_bad.py' @@ -484,7 +484,7 @@ def test_compileall_compiles_all_good_files(testing_workdir, testing_config): @pytest.mark.sanity @pytest.mark.skipif(not on_win, reason="only Windows is insane enough to have backslashes in paths") -def test_backslash_in_always_include_files_path(testing_config): +def test_backslash_in_always_include_files_path(): api.build(os.path.join(metadata_dir, '_backslash_in_include_files')) with pytest.raises(RuntimeError): api.build(os.path.join(fail_dir, 'backslash_in_include_files')) @@ -673,7 +673,7 @@ def test_noarch(testing_workdir): assert (os.path.sep + "noarch" + os.path.sep not in output or noarch) -def test_disable_pip(testing_config, testing_metadata): +def test_disable_pip(testing_metadata): testing_metadata.config.disable_pip = True testing_metadata.meta['requirements'] = {'host': ['python'], 'run': ['python']} @@ -699,7 +699,7 @@ def test_rpath_unix(testing_config, variants_conda_build_sysroot): ) -def test_noarch_none_value(testing_workdir, testing_config): +def test_noarch_none_value(testing_config): recipe = os.path.join(metadata_dir, "_noarch_none") with pytest.raises(exceptions.CondaBuildException): api.build(recipe, config=testing_config) @@ -729,7 +729,7 @@ def test_about_json_content(testing_metadata): @pytest.mark.parametrize( "name,field", [("license", "license_file"), ("prelink_message", "prelink_message")] ) -def test_about_license_file_and_prelink_message(testing_workdir, testing_config, name, field): +def test_about_license_file_and_prelink_message(testing_config, name, field): base_dir = os.path.join(metadata_dir, f"_about_{field}/recipes") recipe = os.path.join(base_dir, "single") @@ -889,7 +889,7 @@ def test_info_files_json(testing_config): assert file.get("file_mode") is None -def test_build_expands_wildcards(mocker, testing_workdir): +def test_build_expands_wildcards(mocker): build_tree = mocker.patch("conda_build.build.build_tree") config = api.Config() files = ['abc', 'acb'] @@ -1165,7 +1165,7 @@ def test_unknown_selectors(testing_config): # the locks can be very flaky on GitHub Windows Runners # https://github.com/conda/conda-build/issues/4685 @pytest.mark.flaky(reruns=5, reruns_delay=2) -def test_failed_recipe_leaves_folders(testing_config, testing_workdir): +def test_failed_recipe_leaves_folders(testing_config): recipe = os.path.join(fail_dir, 'recursive-build') m = api.render(recipe, config=testing_config)[0][0] locks = get_conda_operation_locks(m.config) @@ -1216,7 +1216,7 @@ def test_no_locking(testing_config): @pytest.mark.sanity -def test_test_dependencies(testing_workdir, testing_config): +def test_test_dependencies(testing_config): recipe = os.path.join(fail_dir, 'check_test_dependencies') with pytest.raises(exceptions.DependencyNeedsBuildingError) as e: @@ -1227,7 +1227,7 @@ def test_test_dependencies(testing_workdir, testing_config): @pytest.mark.sanity -def test_runtime_dependencies(testing_workdir, testing_config): +def test_runtime_dependencies(testing_config): recipe = os.path.join(fail_dir, 'check_runtime_dependencies') with pytest.raises(exceptions.DependencyNeedsBuildingError) as e: @@ -1280,14 +1280,14 @@ def test_python_xx(testing_config): @pytest.mark.sanity -def test_indirect_numpy_dependency(testing_metadata, testing_workdir, testing_config): +def test_indirect_numpy_dependency(testing_metadata, testing_workdir): testing_metadata.meta['requirements']['build'] = ['pandas'] api.output_yaml(testing_metadata, os.path.join(testing_workdir, 'meta.yaml')) api.render(testing_workdir, numpy='1.13', notest=True) @pytest.mark.sanity -def test_dependencies_with_notest(testing_workdir, testing_config): +def test_dependencies_with_notest(testing_config): recipe = os.path.join(metadata_dir, '_test_dependencies') api.build(recipe, config=testing_config, notest=True) @@ -1572,7 +1572,7 @@ def assert_keyword(keyword): @pytest.mark.slow -def test_activated_prefixes_in_actual_path(testing_config, testing_metadata): +def test_activated_prefixes_in_actual_path(testing_metadata): """ Check if build and host env are properly added to PATH in the correct order. Do this in an actual build and not just in a unit test to avoid regression. diff --git a/tests/test_index.py b/tests/test_index.py index 9e0acddc15..990b9d18a7 100644 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -939,7 +939,7 @@ def test_index_of_removed_pkg(testing_metadata): assert not repodata["packages"] -def test_patch_instructions_with_missing_subdir(testing_workdir): +def test_patch_instructions_with_missing_subdir(): os.makedirs("linux-64") os.makedirs("zos-z") conda_build.api.update_index(".") # what is the current working directory? @@ -1152,7 +1152,7 @@ def test_current_index_reduces_space(): } -def test_current_index_version_keys_keep_older_packages(testing_workdir): +def test_current_index_version_keys_keep_older_packages(): pkg_dir = os.path.join(os.path.dirname(__file__), "index_data", "packages") # pass no version file diff --git a/tests/test_metadata.py b/tests/test_metadata.py index d616d2ec4f..30ec75f50a 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -169,7 +169,7 @@ def test_build_bootstrap_env_by_path(testing_metadata): ], ) def test_native_compiler_metadata( - platform: str, arch: str, python: str, compilers: set[str], testing_config, mocker + platform: str, arch: str, python: str, compilers: set[str], testing_config ): testing_config.platform = platform metadata = api.render( From 925c2274ed517926b69aa1e5287d8629e4905be5 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 9 Feb 2023 10:50:53 +0100 Subject: [PATCH 064/366] Install downstream packages in correct `subdir` (#4763) --- conda_build/render.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/conda_build/render.py b/conda_build/render.py index 9f4e3c7cd2..f3d1801ec6 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -5,6 +5,7 @@ import json import os from os.path import isdir, isfile, abspath +from pathlib import Path import random import re import shutil @@ -217,9 +218,18 @@ def find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist, m, files_only=False): with tarfile.open(pkg_file, 'w:bz2') as archive: for entry in os.listdir(pkg_dir): archive.add(os.path.join(pkg_dir, entry), arcname=entry) - pkg_subdir = os.path.join(m.config.croot, m.config.host_subdir) + + # use the package's subdir + try: + info = json.loads(Path(pkg_dir, "info", "index.json").read_text()) + subdir = info["subdir"] + except (FileNotFoundError, KeyError): + subdir = m.config.host_subdir + + pkg_subdir = os.path.join(m.config.croot, subdir) pkg_loc = os.path.join(pkg_subdir, os.path.basename(pkg_file)) shutil.move(pkg_file, pkg_loc) + break return pkg_loc From df585c11d69808f3dfb9ff5c86e9e9426e6afd62 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 20 Feb 2023 11:21:52 -0600 Subject: [PATCH 065/366] [pre-commit.ci] pre-commit autoupdate (#4772) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/akaihola/darker: 1.6.1 → 1.7.0](https://github.com/akaihola/darker/compare/1.6.1...1.7.0) - [github.com/PyCQA/pylint: v2.16.1 → v2.16.2](https://github.com/PyCQA/pylint/compare/v2.16.1...v2.16.2) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e41b255bb2..c46d27a68d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -42,7 +42,7 @@ repos: - id: pyupgrade args: ["--py37-plus"] - repo: https://github.com/akaihola/darker - rev: 1.6.1 + rev: 1.7.0 hooks: - id: darker additional_dependencies: [black==22.10.0] @@ -56,7 +56,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/PyCQA/pylint - rev: v2.16.1 + rev: v2.16.2 hooks: - id: pylint args: [--exit-zero] From 5ebe12b1a1e4409f968f74c1404f33cf8d023aa9 Mon Sep 17 00:00:00 2001 From: Ernst Luring Date: Thu, 23 Feb 2023 20:40:15 +0100 Subject: [PATCH 066/366] Fixes the git lfs error by adding git lfs fetching (#4318) --- conda_build/source.py | 18 ++++++++++++++++++ news/gh-4318-Fixes-git-lfs-error.rst | 24 ++++++++++++++++++++++++ 2 files changed, 42 insertions(+) create mode 100644 news/gh-4318-Fixes-git-lfs-error.rst diff --git a/conda_build/source.py b/conda_build/source.py index 58954602be..572666ccdb 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -173,6 +173,20 @@ def unpack(source_dict, src_dir, cache_folder, recipe_path, croot, verbose=False shutil.move(os.path.join(tmpdir, f), os.path.join(src_dir, f)) +def check_git_lfs(git, cwd): + try: + lfs_list_output = check_output_env([git, 'lfs', 'ls-files', '--all'], cwd=cwd) + return lfs_list_output and lfs_list_output.strip() + except CalledProcessError: + return False + + +def git_lfs_fetch(git, cwd, stdout, stderr): + lfs_version = check_output_env([git, 'lfs', 'version'], cwd=cwd) + log.info(lfs_version) + check_call_env([git, 'lfs', 'fetch', 'origin', '--all'], cwd=cwd, stdout=stdout, stderr=stderr) + + def git_mirror_checkout_recursive(git, mirror_dir, checkout_dir, git_url, git_cache, git_ref=None, git_depth=-1, is_top_level=True, verbose=True): """ Mirror (and checkout) a Git repository recursively. @@ -216,6 +230,8 @@ def git_mirror_checkout_recursive(git, mirror_dir, checkout_dir, git_url, git_ca try: if git_ref != 'HEAD': check_call_env([git, 'fetch'], cwd=mirror_dir, stdout=stdout, stderr=stderr) + if check_git_lfs(git, mirror_dir): + git_lfs_fetch(git, mirror_dir, stdout, stderr) else: # Unlike 'git clone', fetch doesn't automatically update the cache's HEAD, # So here we explicitly store the remote HEAD in the cache's local refs/heads, @@ -242,6 +258,8 @@ def git_mirror_checkout_recursive(git, mirror_dir, checkout_dir, git_url, git_ca args += ['--depth', str(git_depth)] try: check_call_env(args + [git_url, git_mirror_dir], stdout=stdout, stderr=stderr) + if check_git_lfs(git, mirror_dir): + git_lfs_fetch(git, mirror_dir, stdout, stderr) except CalledProcessError: # on windows, remote URL comes back to us as cygwin or msys format. Python doesn't # know how to normalize it. Need to convert it to a windows path. diff --git a/news/gh-4318-Fixes-git-lfs-error.rst b/news/gh-4318-Fixes-git-lfs-error.rst new file mode 100644 index 0000000000..9c87811510 --- /dev/null +++ b/news/gh-4318-Fixes-git-lfs-error.rst @@ -0,0 +1,24 @@ +Enhancements: +------------- + +* + +Bug fixes: +---------- + +* Fixes the failing `git clone` when source has LFS files. (#4318) + +Deprecations: +------------- + +* + +Docs: +----- + +* + +Other: +------ + +* From be737d7b073948ae7ae670c297deaba8d73aeca6 Mon Sep 17 00:00:00 2001 From: Daniel Holth Date: Thu, 23 Feb 2023 17:22:31 -0500 Subject: [PATCH 067/366] Use tomli or Python 3.11 tomllib (#4783) * use tomli or Python 3.11 tomllib --- conda_build/jinja_context.py | 24 +++++++++++++++++++++--- news/tomllib-tomli | 19 +++++++++++++++++++ recipe/meta.yaml | 2 +- setup.py | 2 +- tests/requirements.txt | 2 +- 5 files changed, 43 insertions(+), 6 deletions(-) create mode 100644 news/tomllib-tomli diff --git a/conda_build/jinja_context.py b/conda_build/jinja_context.py index 585aece94e..f742153718 100644 --- a/conda_build/jinja_context.py +++ b/conda_build/jinja_context.py @@ -1,7 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause from functools import partial -from io import StringIO +from io import StringIO, TextIOBase import json import os import pathlib @@ -12,8 +12,11 @@ from warnings import warn import jinja2 -import toml import yaml +try: + import tomllib # Python 3.11 +except: + import tomli as tomllib from .environ import get_dict as get_environ from .utils import get_installed_packages, apply_pin_expressions, get_logger, HashableDict @@ -496,11 +499,26 @@ def resolved_packages(m, env, permit_undefined_jinja=False, return package_names +def _toml_load(stream): + """ + Load .toml from a pathname. + """ + if isinstance(stream, (TextIOBase, str)): + if isinstance(stream, TextIOBase): + data = stream.read() + else: + data = stream + return tomllib.loads(data) + + # tomllib prefers binary files + return tomllib.load(stream) + + _file_parsers = { "json": json.load, "yaml": yaml.safe_load, "yml": yaml.safe_load, - "toml": toml.load, + "toml": _toml_load } diff --git a/news/tomllib-tomli b/news/tomllib-tomli new file mode 100644 index 0000000000..0eec19abce --- /dev/null +++ b/news/tomllib-tomli @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Use tomllib (Python 3.11+) or tomli for .toml support. diff --git a/recipe/meta.yaml b/recipe/meta.yaml index afc694561e..8868ca77a3 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -48,7 +48,7 @@ requirements: - six - glob2 >=0.6 - pytz - - toml + - tomli # [py<311] - tqdm - conda-package-handling >=1.3 - python-libarchive-c diff --git a/setup.py b/setup.py index 52802c523a..355a64cc48 100755 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ "beautifulsoup4", "chardet", "pytz", - "toml", + "tomli", "tqdm", "psutil", "six", diff --git a/tests/requirements.txt b/tests/requirements.txt index 6daa8592f2..e5d8e6c11b 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -32,5 +32,5 @@ pytz requests ripgrep ruamel.yaml -toml +tomli tqdm From 142fd02c287c93126da0369f6f3d33e2af48e48e Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 23 Feb 2023 16:46:16 -0600 Subject: [PATCH 068/366] Convert manual conda_build_test_recipe clone into a fixture (#4781) * Create conda_build_test_recipe_path fixture to clone repo * Create conda_build_test_recipe_envvar fixture to expose environment variable * Remove manual conda_build_test_recipe clone from CI --- .github/workflows/tests.yml | 21 ----- CONTRIBUTING.md | 4 - Makefile | 13 +-- conda_build/api.py | 6 +- news/4781-convert-manual-clone-fixture | 19 +++++ tests/cli/test_main_build.py | 21 +++-- tests/conftest.py | 27 +++++++ .../fail/source_git_jinja2_oops/meta.yaml | 2 +- .../_osx_is_app_missing_python_app/meta.yaml | 2 +- .../meta.yaml | 2 +- .../meta.yaml | 2 +- .../metadata/_source_setuptools/meta.yaml | 2 +- .../metadata/empty_sections/meta.yaml | 2 +- .../empty_with_build_script/meta.yaml | 2 +- .../metadata/jinja2_build_str/meta.yaml | 2 +- .../metadata/osx_is_app/meta.yaml | 2 +- .../metadata/source_multiple/meta.yaml | 2 +- .../metadata/source_path/meta.yaml | 2 +- .../metadata/source_regex/meta.yaml | 2 +- .../source_regex_from_recipe_dir/meta.yaml | 2 +- .../metadata/source_setup_py_data/meta.yaml | 2 +- .../source_setup_py_data_subdir/meta.yaml | 2 +- .../building_jinja2_direct_env_vars/meta.yaml | 2 +- .../building_jinja2_environ/meta.yaml | 2 +- .../building_jinja2_setup_py_data/meta.yaml | 2 +- .../_alternate_type_wheel/meta.yaml | 2 +- .../_git_in_output_version/meta.yaml | 2 +- tests/test_api_build.py | 80 ++++++++++++------- tests/test_api_test.py | 9 ++- tests/test_published_examples.py | 7 +- tests/test_subpackages.py | 2 +- 31 files changed, 152 insertions(+), 97 deletions(-) create mode 100644 news/4781-convert-manual-clone-fixture diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 4c10e922f0..3123682447 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -88,13 +88,6 @@ jobs: path: ./src fetch-depth: 0 - - name: Checkout test recipe - uses: actions/checkout@v3 - with: - repository: conda/conda_build_test_recipe - path: ./conda_build_test_recipe - fetch-depth: 0 - - name: Timestamp run: echo "TIMESTAMP=$(date -u "+%Y%m")" >> $GITHUB_ENV shell: bash @@ -199,13 +192,6 @@ jobs: path: .\src fetch-depth: 0 - - name: Checkout test recipe - uses: actions/checkout@v3 - with: - repository: conda/conda_build_test_recipe - path: .\conda_build_test_recipe - fetch-depth: 0 - - name: Timestamp run: echo "TIMESTAMP=$(date -u "+%Y%m")" >> $GITHUB_ENV shell: bash @@ -315,13 +301,6 @@ jobs: path: ./src fetch-depth: 0 - - name: Checkout test recipe - uses: actions/checkout@v3 - with: - repository: conda/conda_build_test_recipe - path: ./conda_build_test_recipe - fetch-depth: 0 - - name: Timestamp run: echo "TIMESTAMP=$(date -u "+%Y%m")" >> $GITHUB_ENV shell: bash diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4c1c2227bf..675651e3ae 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -64,10 +64,6 @@ impact the functionality of `conda/conda-build` installed in your base environme ## Testing -Running our test suite requires cloning one other repo at the same level as `conda-build`: -https://github.com/conda/conda_build_test_recipe - this is necessary for relative path tests -outside of `conda-build`'s build tree. - Follow the installation instructions above to properly set up your environment for testing. The test suite runs with `pytest`. The following are some useful commands for running specific diff --git a/Makefile b/Makefile index f5f85b1a35..649d38b766 100644 --- a/Makefile +++ b/Makefile @@ -19,28 +19,23 @@ env-docs: .PHONY: $(MAKECMDGOALS) .PHONY: setup -setup: ../conda_build_test_recipe +setup: $(CONDA) create --name $(ENV_NAME) --file tests/requirements.txt --channel defaults python=$(PYTHON_VERSION) # Runs all tests .PHONY: test -test: ../conda_build_test_recipe $(TMPDIR) +test: $(TMPDIR) $(CONDA) run --no-capture-output -n $(ENV_NAME) python -m pytest tests/ --basetemp $(TMPDIR) # Run the serial tests .PHONY: test-serial -test-serial: ../conda_build_test_recipe $(TMPDIR) +test-serial: $(TMPDIR) $(CONDA) run --no-capture-output -n $(ENV_NAME) python -m pytest tests/ -m "serial" --basetemp $(TMPDIR) # Run the not serial tests AKA parallel tests .PHONY: test-parallel -test-parallel: ../conda_build_test_recipe $(TMPDIR) +test-parallel: $(TMPDIR) $(CONDA) run --no-capture-output -n $(ENV_NAME) python -m pytest tests/ -m "not serial" --basetemp $(TMPDIR) -# Checkout the required test recipes -# Requires write access to the directory above this -../conda_build_test_recipe: - git clone https://github.com/conda/conda_build_test_recipe ../conda_build_test_recipe - $(TMPDIR): mkdir -p $(TMPDIR) diff --git a/conda_build/api.py b/conda_build/api.py index 3b05833245..82a6e1bbbd 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -11,7 +11,8 @@ # imports are done locally to keep the api clean and limited strictly # to conda-build's functionality. - +from os.path import dirname, expanduser, join +from pathlib import Path import sys as _sys # make the Config class available in the api namespace @@ -20,7 +21,6 @@ from conda_build.utils import ensure_list as _ensure_list from conda_build.utils import expand_globs as _expand_globs from conda_build.utils import get_logger as _get_logger -from os.path import dirname, expanduser, join def render(recipe_path, config=None, variants=None, permit_unsatisfiable_variants=True, @@ -100,7 +100,7 @@ def get_output_file_paths(recipe_path_or_metadata, no_download_source=False, con metadata = recipe_path_or_metadata else: raise ValueError(f"received mixed list of metas: {recipe_path_or_metadata}") - elif isinstance(recipe_path_or_metadata, str): + elif isinstance(recipe_path_or_metadata, (str, Path)): # first, render the parent recipe (potentially multiple outputs, depending on variants). metadata = render(recipe_path_or_metadata, no_download_source=no_download_source, variants=variants, config=config, finalize=True, **kwargs) diff --git a/news/4781-convert-manual-clone-fixture b/news/4781-convert-manual-clone-fixture new file mode 100644 index 0000000000..37289bf87d --- /dev/null +++ b/news/4781-convert-manual-clone-fixture @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Eliminate test setup's manual clone of https://github.com/conda/conda_build_test_recipe in favor of a session fixture. (#4781) diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py index 8290362849..f35fdaa4de 100644 --- a/tests/cli/test_main_build.py +++ b/tests/cli/test_main_build.py @@ -27,7 +27,7 @@ def _reset_config(search_path=None): @pytest.mark.sanity -def test_build(): +def test_build_empty_sections(conda_build_test_recipe_envvar: str): args = [ "--no-anaconda-upload", os.path.join(metadata_dir, "empty_sections"), @@ -294,7 +294,12 @@ def test_conda_py_no_period(testing_workdir, testing_metadata, monkeypatch): assert any("py36" in output for output in outputs) -def test_build_skip_existing(testing_workdir, capfd, mocker): +def test_build_skip_existing( + testing_workdir, + capfd, + mocker, + conda_build_test_recipe_envvar: str, +): # build the recipe first empty_sections = os.path.join(metadata_dir, "empty_sections") args = ["--no-anaconda-upload", empty_sections] @@ -309,7 +314,11 @@ def test_build_skip_existing(testing_workdir, capfd, mocker): assert "are already built" in output or "are already built" in error -def test_build_skip_existing_croot(testing_workdir, capfd): +def test_build_skip_existing_croot( + testing_workdir, + capfd, + conda_build_test_recipe_envvar: str, +): # build the recipe first empty_sections = os.path.join(metadata_dir, "empty_sections") args = ["--no-anaconda-upload", "--croot", testing_workdir, empty_sections] @@ -351,7 +360,7 @@ def test_activate_scripts_not_included(testing_workdir): assert not package_has_file(out, f) -def test_relative_path_croot(): +def test_relative_path_croot(conda_build_test_recipe_envvar: str): # this tries to build a package while specifying the croot with a relative path: # conda-build --no-test --croot ./relative/path @@ -364,7 +373,7 @@ def test_relative_path_croot(): assert os.path.isfile(outputfile[0]) -def test_relative_path_test_artifact(): +def test_relative_path_test_artifact(conda_build_test_recipe_envvar: str): # this test builds a package into (cwd)/relative/path and then calls: # conda-build --test ./relative/path/{platform}/{artifact}.tar.bz2 @@ -386,7 +395,7 @@ def test_relative_path_test_artifact(): main_build.execute(args) -def test_relative_path_test_recipe(): +def test_relative_path_test_recipe(conda_build_test_recipe_envvar: str): # this test builds a package into (cwd)/relative/path and then calls: # conda-build --test --croot ./relative/path/ /abs/path/to/recipe diff --git a/tests/conftest.py b/tests/conftest.py index d29f92ef5e..7480bf8039 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause from collections import defaultdict +from pathlib import Path import os import subprocess import sys @@ -225,3 +226,29 @@ def variants_conda_build_sysroot(monkeypatch, request): ).stdout.strip(), ) return request.param + + +@pytest.fixture(scope="session") +def conda_build_test_recipe_path(tmp_path_factory: pytest.TempPathFactory) -> Path: + """Clone conda_build_test_recipe. + + This exposes the special dummy package "source code" used to test various git/svn/local recipe configurations. + """ + # clone conda_build_test_recipe locally + repo = tmp_path_factory.mktemp("conda_build_test_recipe", numbered=False) + subprocess.run( + ["git", "clone", "https://github.com/conda/conda_build_test_recipe", str(repo)], + check=True, + ) + return repo + + +@pytest.fixture +def conda_build_test_recipe_envvar( + conda_build_test_recipe_path: Path, + monkeypatch: pytest.MonkeyPatch, +) -> str: + """Exposes the cloned conda_build_test_recipe as an environment variable.""" + name = "CONDA_BUILD_TEST_RECIPE_PATH" + monkeypatch.setenv(name, conda_build_test_recipe_path) + return name diff --git a/tests/test-recipes/fail/source_git_jinja2_oops/meta.yaml b/tests/test-recipes/fail/source_git_jinja2_oops/meta.yaml index 7213b08a6d..441ba67fd6 100644 --- a/tests/test-recipes/fail/source_git_jinja2_oops/meta.yaml +++ b/tests/test-recipes/fail/source_git_jinja2_oops/meta.yaml @@ -3,7 +3,7 @@ package: version: {{ GIT_DSECRIBE_TAG }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} git_tag: 1.20.2 requirements: diff --git a/tests/test-recipes/metadata/_osx_is_app_missing_python_app/meta.yaml b/tests/test-recipes/metadata/_osx_is_app_missing_python_app/meta.yaml index f48cef77a6..de141ac10e 100644 --- a/tests/test-recipes/metadata/_osx_is_app_missing_python_app/meta.yaml +++ b/tests/test-recipes/metadata/_osx_is_app_missing_python_app/meta.yaml @@ -3,7 +3,7 @@ package: version: 1.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} build: entry_points: diff --git a/tests/test-recipes/metadata/_source_git_jinja2_relative_git_url/meta.yaml b/tests/test-recipes/metadata/_source_git_jinja2_relative_git_url/meta.yaml index e913098671..44de041e88 100644 --- a/tests/test-recipes/metadata/_source_git_jinja2_relative_git_url/meta.yaml +++ b/tests/test-recipes/metadata/_source_git_jinja2_relative_git_url/meta.yaml @@ -7,4 +7,4 @@ build: string: {{ environ.get('GIT_DESCRIBE_NUMBER', '0') + '_JPMC' }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} diff --git a/tests/test-recipes/metadata/_source_git_jinja2_relative_path/meta.yaml b/tests/test-recipes/metadata/_source_git_jinja2_relative_path/meta.yaml index 47abc99dee..e89e55fcc5 100644 --- a/tests/test-recipes/metadata/_source_git_jinja2_relative_path/meta.yaml +++ b/tests/test-recipes/metadata/_source_git_jinja2_relative_path/meta.yaml @@ -7,4 +7,4 @@ build: string: {{ environ.get('GIT_DESCRIBE_NUMBER', '0') + '_JPMC' }} source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} diff --git a/tests/test-recipes/metadata/_source_setuptools/meta.yaml b/tests/test-recipes/metadata/_source_setuptools/meta.yaml index 0383f2f894..0d4d1a89d4 100644 --- a/tests/test-recipes/metadata/_source_setuptools/meta.yaml +++ b/tests/test-recipes/metadata/_source_setuptools/meta.yaml @@ -13,7 +13,7 @@ package: version: {{ data.get('version') }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} git_tag: 1.21.0 requirements: diff --git a/tests/test-recipes/metadata/empty_sections/meta.yaml b/tests/test-recipes/metadata/empty_sections/meta.yaml index 4b5d82b6db..e19a684f7e 100644 --- a/tests/test-recipes/metadata/empty_sections/meta.yaml +++ b/tests/test-recipes/metadata/empty_sections/meta.yaml @@ -3,7 +3,7 @@ package: version: 0.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} build: diff --git a/tests/test-recipes/metadata/empty_with_build_script/meta.yaml b/tests/test-recipes/metadata/empty_with_build_script/meta.yaml index a466377831..8ee7bf4efa 100644 --- a/tests/test-recipes/metadata/empty_with_build_script/meta.yaml +++ b/tests/test-recipes/metadata/empty_with_build_script/meta.yaml @@ -3,7 +3,7 @@ package: version: 0.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} build: diff --git a/tests/test-recipes/metadata/jinja2_build_str/meta.yaml b/tests/test-recipes/metadata/jinja2_build_str/meta.yaml index 2ad274b10f..97f45ae361 100644 --- a/tests/test-recipes/metadata/jinja2_build_str/meta.yaml +++ b/tests/test-recipes/metadata/jinja2_build_str/meta.yaml @@ -3,7 +3,7 @@ package: version: 1.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} build: string: {{ PKG_BUILDNUM }}_g{{ GIT_FULL_HASH[:7] }} diff --git a/tests/test-recipes/metadata/osx_is_app/meta.yaml b/tests/test-recipes/metadata/osx_is_app/meta.yaml index c6f6a887c2..2195d740ce 100644 --- a/tests/test-recipes/metadata/osx_is_app/meta.yaml +++ b/tests/test-recipes/metadata/osx_is_app/meta.yaml @@ -3,7 +3,7 @@ package: version: 1.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} build: entry_points: diff --git a/tests/test-recipes/metadata/source_multiple/meta.yaml b/tests/test-recipes/metadata/source_multiple/meta.yaml index bb35ae7356..bbd2cb4f03 100644 --- a/tests/test-recipes/metadata/source_multiple/meta.yaml +++ b/tests/test-recipes/metadata/source_multiple/meta.yaml @@ -3,7 +3,7 @@ package: version: 1.0 source: - - path: ../../../../../conda_build_test_recipe + - path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} - git_url: https://github.com/conda/conda_build_test_recipe git_tag: 1.20.2 diff --git a/tests/test-recipes/metadata/source_path/meta.yaml b/tests/test-recipes/metadata/source_path/meta.yaml index a352bf502e..3ac5ab81f8 100644 --- a/tests/test-recipes/metadata/source_path/meta.yaml +++ b/tests/test-recipes/metadata/source_path/meta.yaml @@ -3,4 +3,4 @@ package: version: 1.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} diff --git a/tests/test-recipes/metadata/source_regex/meta.yaml b/tests/test-recipes/metadata/source_regex/meta.yaml index 1e1a34873d..e0f07f5527 100644 --- a/tests/test-recipes/metadata/source_regex/meta.yaml +++ b/tests/test-recipes/metadata/source_regex/meta.yaml @@ -10,7 +10,7 @@ package: version: {{ data.group(1) }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} git_tag: 1.21.0 build: diff --git a/tests/test-recipes/metadata/source_regex_from_recipe_dir/meta.yaml b/tests/test-recipes/metadata/source_regex_from_recipe_dir/meta.yaml index 57e002f047..931cf10b8b 100644 --- a/tests/test-recipes/metadata/source_regex_from_recipe_dir/meta.yaml +++ b/tests/test-recipes/metadata/source_regex_from_recipe_dir/meta.yaml @@ -10,7 +10,7 @@ package: version: {{ data.group(1) }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} git_tag: 1.21.0 build: diff --git a/tests/test-recipes/metadata/source_setup_py_data/meta.yaml b/tests/test-recipes/metadata/source_setup_py_data/meta.yaml index fd76f67c43..d4a3b21e7b 100644 --- a/tests/test-recipes/metadata/source_setup_py_data/meta.yaml +++ b/tests/test-recipes/metadata/source_setup_py_data/meta.yaml @@ -11,7 +11,7 @@ package: version: {{ data.get('version') }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} git_tag: 1.21.0 build: diff --git a/tests/test-recipes/metadata/source_setup_py_data_subdir/meta.yaml b/tests/test-recipes/metadata/source_setup_py_data_subdir/meta.yaml index 624dc3e72d..81d29feece 100644 --- a/tests/test-recipes/metadata/source_setup_py_data_subdir/meta.yaml +++ b/tests/test-recipes/metadata/source_setup_py_data_subdir/meta.yaml @@ -11,7 +11,7 @@ package: version: {{ data.get('version') }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} git_tag: 1.21.0 folder: src diff --git a/tests/test-recipes/published_code/building_jinja2_direct_env_vars/meta.yaml b/tests/test-recipes/published_code/building_jinja2_direct_env_vars/meta.yaml index 5b67ea45ca..3ed3d1cf53 100644 --- a/tests/test-recipes/published_code/building_jinja2_direct_env_vars/meta.yaml +++ b/tests/test-recipes/published_code/building_jinja2_direct_env_vars/meta.yaml @@ -10,4 +10,4 @@ build: string: {{ GIT_BUILD_STR }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} diff --git a/tests/test-recipes/published_code/building_jinja2_environ/meta.yaml b/tests/test-recipes/published_code/building_jinja2_environ/meta.yaml index f5e84cdcfd..cd32d276af 100644 --- a/tests/test-recipes/published_code/building_jinja2_environ/meta.yaml +++ b/tests/test-recipes/published_code/building_jinja2_environ/meta.yaml @@ -10,4 +10,4 @@ build: string: {{ environ.get('GIT_BUILD_STR', '') }} source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} diff --git a/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml b/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml index 44f74bec88..8596e5c574 100644 --- a/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml +++ b/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml @@ -7,7 +7,7 @@ package: # source will be downloaded prior to filling in jinja templates # Example assumes that this folder has setup.py in it source: - git_url: ../../../../../conda_build_test_recipe + git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} git_tag: 1.21.0 requirements: diff --git a/tests/test-recipes/split-packages/_alternate_type_wheel/meta.yaml b/tests/test-recipes/split-packages/_alternate_type_wheel/meta.yaml index dda10e6be8..ba0bd54f85 100644 --- a/tests/test-recipes/split-packages/_alternate_type_wheel/meta.yaml +++ b/tests/test-recipes/split-packages/_alternate_type_wheel/meta.yaml @@ -3,7 +3,7 @@ package: version: 1.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} outputs: - type: wheel diff --git a/tests/test-recipes/split-packages/_git_in_output_version/meta.yaml b/tests/test-recipes/split-packages/_git_in_output_version/meta.yaml index 1b363535e7..8ef7c2c8d8 100644 --- a/tests/test-recipes/split-packages/_git_in_output_version/meta.yaml +++ b/tests/test-recipes/split-packages/_git_in_output_version/meta.yaml @@ -3,7 +3,7 @@ package: version: 1.0 source: - path: ../../../../../conda_build_test_recipe + path: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} requirements: build: diff --git a/tests/test_api_build.py b/tests/test_api_build.py index bd80b5068f..3896b08511 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -38,10 +38,13 @@ from conda_build.conda_interface import reset_context from conda.exceptions import ClobberError, CondaMultiError -from .utils import get_valid_recipes, metadata_dir, fail_dir, add_mangling - -# define a few commonly used recipes - use os.path.join(metadata_dir, recipe) elsewhere -empty_sections = os.path.join(metadata_dir, "empty_sections") +from .utils import ( + get_valid_recipes, + metadata_dir, + fail_dir, + add_mangling, + metadata_path, +) def represent_ordereddict(dumper, data): @@ -88,7 +91,12 @@ def describe_root(cwd=None): for recipe in get_valid_recipes(metadata_dir) ], ) -def test_recipe_builds(recipe: Path, testing_config, monkeypatch): +def test_recipe_builds( + recipe: Path, + testing_config, + monkeypatch: pytest.MonkeyPatch, + conda_build_test_recipe_envvar: str, +): # TODO: After we fix #3754 this mark can be removed. This specific test # ``source_setup_py_data_subdir`` reproduces the problem. if recipe.name == "source_setup_py_data_subdir": @@ -167,8 +175,13 @@ def test_token_upload(testing_metadata): @pytest.mark.sanity @pytest.mark.serial @pytest.mark.parametrize("service_name", ["binstar", "anaconda"]) -def test_no_anaconda_upload_condarc(service_name, testing_config, capfd): - api.build(empty_sections, config=testing_config, notest=True) +def test_no_anaconda_upload_condarc( + service_name: str, + testing_config, + capfd, + conda_build_test_recipe_envvar: str, +): + api.build(str(metadata_path / "empty_sections"), config=testing_config, notest=True) output, error = capfd.readouterr() assert "Automatic uploading is disabled" in output, error @@ -176,9 +189,11 @@ def test_no_anaconda_upload_condarc(service_name, testing_config, capfd): @pytest.mark.sanity @pytest.mark.serial @pytest.mark.parametrize("service_name", ["binstar", "anaconda"]) -def test_offline(service_name, testing_config): - with env_var('CONDA_OFFLINE', 'True', reset_context): - api.build(empty_sections, config=testing_config) +def test_offline( + service_name: str, testing_config, conda_build_test_recipe_envvar: str +): + with env_var("CONDA_OFFLINE", "True", reset_context): + api.build(str(metadata_path / "empty_sections"), config=testing_config) def test_git_describe_info_on_branch(testing_config): @@ -274,24 +289,29 @@ def test_binary_has_prefix_files_non_utf8(testing_config): api.build(os.path.join(metadata_dir, '_binary_has_utf_non_8'), config=testing_config) -def test_relative_path_git_versioning(testing_config): - # conda_build_test_recipe is a manual step. Clone it at the same level as - # your conda-build source. - cwd = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', - 'conda_build_test_recipe')) - tag = describe_root(cwd) - output = api.get_output_file_path(os.path.join(metadata_dir, - "_source_git_jinja2_relative_path"), - config=testing_config)[0] +def test_relative_path_git_versioning( + testing_config, + conda_build_test_recipe_path: Path, + conda_build_test_recipe_envvar: str, +): + tag = describe_root(conda_build_test_recipe_path) + output = api.get_output_file_paths( + metadata_path / "_source_git_jinja2_relative_path", + config=testing_config, + )[0] assert tag in output -def test_relative_git_url_git_versioning(testing_config): - cwd = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', - 'conda_build_test_recipe')) - tag = describe_root(cwd) - recipe = os.path.join(metadata_dir, "_source_git_jinja2_relative_git_url") - output = api.get_output_file_path(recipe, config=testing_config)[0] +def test_relative_git_url_git_versioning( + testing_config, + conda_build_test_recipe_path: Path, + conda_build_test_recipe_envvar: str, +): + tag = describe_root(conda_build_test_recipe_path) + output = api.get_output_file_paths( + metadata_path / "_source_git_jinja2_relative_git_url", + config=testing_config, + )[0] assert tag in output @@ -414,10 +434,12 @@ def test_jinja_typo(testing_config): @pytest.mark.sanity -def test_skip_existing(testing_config, capfd): +def test_skip_existing(testing_config, capfd, conda_build_test_recipe_envvar: str): # build the recipe first - api.build(empty_sections, config=testing_config) - api.build(empty_sections, config=testing_config, skip_existing=True) + api.build(str(metadata_path / "empty_sections"), config=testing_config) + api.build( + str(metadata_path / "empty_sections"), config=testing_config, skip_existing=True + ) output, error = capfd.readouterr() assert "are already built" in output @@ -954,7 +976,7 @@ def test_workdir_removal_warning(testing_config, caplog): @pytest.mark.sanity @pytest.mark.skipif(sys.platform != 'darwin', reason="relevant to mac only") -def test_append_python_app_osx(testing_config): +def test_append_python_app_osx(testing_config, conda_build_test_recipe_envvar: str): """Recipes that use osx_is_app need to have python.app in their runtime requirements. conda-build will add it if it's missing.""" diff --git a/tests/test_api_test.py b/tests/test_api_test.py index 1bce60a224..ed432e66b4 100644 --- a/tests/test_api_test.py +++ b/tests/test_api_test.py @@ -3,7 +3,6 @@ """ This module tests the test API. These are high-level integration tests. """ - import os import pytest @@ -37,8 +36,12 @@ def test_package_test_without_recipe_in_package(testing_metadata): api.test(output, config=testing_metadata.config) -def test_package_with_jinja2_does_not_redownload_source(testing_config, mocker): - recipe = os.path.join(metadata_dir, 'jinja2_build_str') +def test_package_with_jinja2_does_not_redownload_source( + testing_config, + mocker, + conda_build_test_recipe_envvar: str, +): + recipe = os.path.join(metadata_dir, "jinja2_build_str") metadata = api.render(recipe, config=testing_config, dirty=True)[0][0] outputs = api.build(metadata, notest=True, anaconda_upload=False) # this recipe uses jinja2, which should trigger source download, except that source download diff --git a/tests/test_published_examples.py b/tests/test_published_examples.py index 0df93c8747..28390f28d2 100644 --- a/tests/test_published_examples.py +++ b/tests/test_published_examples.py @@ -1,6 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import os +from pathlib import Path import sys import pytest @@ -28,7 +29,11 @@ def test_skeleton_pypi(): for recipe in get_valid_recipes(published_path) ], ) -def test_recipe_builds(recipe, testing_config): +def test_recipe_builds( + recipe: Path, + testing_config, + conda_build_test_recipe_envvar: str, +): # These variables are defined solely for testing purposes, # so they can be checked within build scripts build(str(recipe), config=testing_config) diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index 569834d239..46ca19893f 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -107,7 +107,7 @@ def test_intradependencies(testing_config): outputs2_set) -def test_git_in_output_version(testing_config): +def test_git_in_output_version(testing_config, conda_build_test_recipe_envvar: str): recipe = os.path.join(subpackage_dir, '_git_in_output_version') outputs = api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True) assert len(outputs) == 1 From 31dc4f8cc856bf13583d7fb6aa797a4de73308c7 Mon Sep 17 00:00:00 2001 From: T Coxon <97948946+tttc3@users.noreply.github.com> Date: Fri, 24 Feb 2023 00:34:03 +0000 Subject: [PATCH 069/366] Updated broken example recipe links. (#4580) --- docs/source/concepts/recipe.rst | 9 +++++---- .../user-guide/recipes/sample-recipes.rst | 18 ++++++++--------- news/4580-update-broken-links | 20 +++++++++++++++++++ 3 files changed, 34 insertions(+), 13 deletions(-) create mode 100644 news/4580-update-broken-links diff --git a/docs/source/concepts/recipe.rst b/docs/source/concepts/recipe.rst index 3f287182c5..91b323324f 100644 --- a/docs/source/concepts/recipe.rst +++ b/docs/source/concepts/recipe.rst @@ -76,8 +76,8 @@ Conda-build performs the following steps: * Runs the test scripts. -The `conda-recipes`_ repo -contains example recipes for many conda packages. +The archived `conda-recipes`_ repo, `AnacondaRecipes`_ aggregate repo, +and `conda-forge`_ feedstocks repo contain example recipes for many conda packages. .. caution:: All recipe files, including ``meta.yaml`` and build @@ -129,7 +129,7 @@ The prefix will take the form:: /conda-bld//h_env_placeholder… -`Conda-forge`_ downloads your package source and then builds the conda +Conda-build downloads your package source and then builds the conda package in the context of the build environment. For example, you may direct it to download from a Git repo or pull down a tarball from another source. See the :ref:`source-section` for more information. @@ -212,5 +212,6 @@ components of a recipe, including: .. _`conda packages`: https://conda.io/projects/conda/en/latest/user-guide/concepts/packages.html .. _`conda-recipes`: https://github.com/continuumio/conda-recipes -.. _`Conda-forge`: https://anaconda.org/conda-forge +.. _`AnacondaRecipes`: https://github.com/AnacondaRecipes/aggregate +.. _`conda-forge`: https://github.com/conda-forge/feedstocks/tree/main/feedstocks .. _PyPI: https://pypi.python.org/pypi diff --git a/docs/source/user-guide/recipes/sample-recipes.rst b/docs/source/user-guide/recipes/sample-recipes.rst index b5b079fec9..c4d2e40649 100644 --- a/docs/source/user-guide/recipes/sample-recipes.rst +++ b/docs/source/user-guide/recipes/sample-recipes.rst @@ -7,21 +7,21 @@ that are not Python related. The first 2 sample recipes, ``boost`` and ``libtiff``, are examples of non-Python libraries, meaning they do not require Python to run or build. -* `boost `_ is an example +* `boost `_ is an example of a popular programming library and illustrates the use of selectors in a recipe. -* `libtiff `_ is +* `libtiff `_ is another example of a compiled library, which shows how conda can apply patches to source directories before building the package. -* `msgpack `_, - `blosc `_, and - `cytoolz `_ +* `msgpack `_, + `blosc `_, and + `cytoolz `_ are examples of Python libraries with extensions. -* `toolz `_, - `sympy `_, - `six `_, and - `gensim `_ are +* `toolz `_, + `sympy `_, + `six `_, and + `gensim `_ are examples of Python-only libraries. ``gensim`` works on Python 2, and all of the others work on both Python 2 and Python 3. diff --git a/news/4580-update-broken-links b/news/4580-update-broken-links new file mode 100644 index 0000000000..0c580fd993 --- /dev/null +++ b/news/4580-update-broken-links @@ -0,0 +1,20 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* Updated broken links to example conda recipes and updated link to the now archived conda-recipes, with additional links to AnacondaRecipes aggregated feedstocks and conda-forge feedstocks. (#4580) + + +### Other + +* From ae841ab1ecd7f7deb713148e6554655707640c20 Mon Sep 17 00:00:00 2001 From: Peet Whittaker Date: Fri, 24 Feb 2023 00:35:43 +0000 Subject: [PATCH 070/366] Update conda-build.rst with missing args (#4662) --- .../source/resources/commands/conda-build.rst | 31 ++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/docs/source/resources/commands/conda-build.rst b/docs/source/resources/commands/conda-build.rst index 7c79ee7c7d..1a8f39dab7 100644 --- a/docs/source/resources/commands/conda-build.rst +++ b/docs/source/resources/commands/conda-build.rst @@ -145,6 +145,9 @@ conda-build Run the post-build logic. Implies --no-test and --noana- conda-upload. + -p, --test-run-post + Run the post-build logic during testing. + --skip-existing Skip recipes for which there already exists an existing build (locally or in the channels). @@ -176,20 +179,36 @@ conda-build Disable force upload to anaconda.org, preventing overwriting any existing packages + --zstd-compression-level {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22} + When building v2 packages, set the compression level used by + conda-package-handling. Defaults to 19. Note that using levels + above 19 is not advised due to high memory consumption. + --no-activate do not activate the build and test envs; just prepend to PATH --no-build-id do not generate unique build folder names. Use if having issues - with paths being too long. + with paths being too long. Deprecated, please use --build-id-pat + instead + + --build-id-pat BUILD_ID_PAT + specify a templated pattern to use as build folder names. Use if + having issues with paths being too long. --croot CROOT Build root folder. Equivalent to CONDA_BLD_PATH, but applies only to this call of conda-build. --verify + run verification on recipes or packages when building + + --no-verify do not run verification on recipes or packages when building + --strict-verify + Exit if any conda-verify check fail, instead of only printing them + --output-folder OUTPUT_FOLDER folder to dump output package to. Package are moved here if build or test succeeds. Destination folder must exist prior to @@ -236,6 +255,16 @@ conda-build built packages. This is currently the default behavior, but will change in conda-build 4.0. + --error-overdepending + Enable error when packages with names beginning lib or which have + run_exports are not auto-loaded by the OSes DSO loading mechanism + by any of the files in this package. + + --no-error-overdepending + Disable error when packages with names beginning lib or which have + run_exports are not auto-loaded by the OSes DSO loading mechanism + by any of the files in this package. + --long-test-prefix Use a long prefix for the test prefix, as well as the build pre- fix. Affects only Linux and Mac. Prefix length matches the From c7e667fb526468ff0d88a42093f7cfc58bf70b43 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Fri, 24 Feb 2023 03:53:29 +0100 Subject: [PATCH 071/366] =?UTF-8?q?=F0=9F=94=84=20Synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4785)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/project.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index fc5a9a3600..d0f01160b9 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -15,5 +15,7 @@ jobs: steps: - uses: actions/add-to-project@v0.3.0 with: - project-url: https://github.com/orgs/conda/projects/2 + # issues are added to the Planning project + # PRs are added to the Review project + project-url: https://github.com/orgs/conda/projects/${{ github.event_name == 'issues' && 2 || 16 }} github-token: ${{ secrets.PROJECT_TOKEN }} From 3e94bd248c2e8cf617bbe68cdd2a7a3531d948e7 Mon Sep 17 00:00:00 2001 From: Daniel Bast <2790401+dbast@users.noreply.github.com> Date: Fri, 24 Feb 2023 19:27:20 +0100 Subject: [PATCH 072/366] Re-enable coverage reporting to codecov (#4767) * Re-enable coverage reporting to codecov Allows so see the current test coverage (or untested / potential dead code). The action `codecov/codecov-action@v3` requries no secrete for public repos. The repo already contains a .codecov.yml that disables PR comments and the README already contains a codecov badge, so no further steps required to get this enabled again. * News * Measure branch coverage additionally to line coverage --------- Co-authored-by: Ken Odegard --- .github/workflows/tests.yml | 15 +++++++++++++++ news/4767-reenable-coverage-reporting | 19 +++++++++++++++++++ 2 files changed, 34 insertions(+) create mode 100644 news/4767-reenable-coverage-reporting diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 3123682447..e00dcf85b2 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -128,6 +128,7 @@ jobs: --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" \ --cov conda_build \ --cov-append \ + --cov-branch \ --cov-report xml \ --replay-record-dir="${{ env.REPLAY_DIR }}" \ --replay-base-name="${{ env.REPLAY_NAME }}" \ @@ -135,6 +136,10 @@ jobs: -m "${{ env.PYTEST_MARKER }}" \ ./tests + - uses: codecov/codecov-action@v3 + with: + flags: ${{ matrix.test-type }},${{ matrix.python-version }},linux-64 + - name: Tar Allure Results if: always() run: tar -zcf "${{ env.ALLURE_DIR }}.tar.gz" "${{ env.ALLURE_DIR }}" @@ -233,6 +238,7 @@ jobs: --basetemp "${{ runner.temp }}\${{ matrix.test-type}}" ` --cov conda_build ` --cov-append ` + --cov-branch ` --cov-report xml ` --replay-record-dir="${{ env.REPLAY_DIR }}" ` --replay-base-name="${{ env.REPLAY_NAME }}" ` @@ -240,6 +246,10 @@ jobs: -m "${{ env.PYTEST_MARKER }}" ` .\tests + - uses: codecov/codecov-action@v3 + with: + flags: ${{ matrix.test-type }},${{ matrix.python-version }},win-64 + - name: Tar Allure Results if: always() run: tar -zcf "${{ env.ALLURE_DIR }}.tar.gz" "${{ env.ALLURE_DIR }}" @@ -342,6 +352,7 @@ jobs: --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" \ --cov conda_build \ --cov-append \ + --cov-branch \ --cov-report xml \ --replay-record-dir="${{ env.REPLAY_DIR }}" \ --replay-base-name="${{ env.REPLAY_NAME }}" \ @@ -349,6 +360,10 @@ jobs: -m "${{ env.PYTEST_MARKER }}" \ ./tests + - uses: codecov/codecov-action@v3 + with: + flags: ${{ matrix.test-type }},${{ matrix.python-version }},osx-64 + - name: Tar Allure Results if: always() run: tar -zcf "${{ env.ALLURE_DIR }}.tar.gz" "${{ env.ALLURE_DIR }}" diff --git a/news/4767-reenable-coverage-reporting b/news/4767-reenable-coverage-reporting new file mode 100644 index 0000000000..8c9b433a90 --- /dev/null +++ b/news/4767-reenable-coverage-reporting @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Re-enable code coverage reporting to codecov. (#4767) From 2b1d02ad0e5f0e8e6de47743ca506f387801a14e Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Mon, 27 Feb 2023 11:27:09 -0600 Subject: [PATCH 073/366] Remove src working directory for CI (#4784) --- .github/workflows/tests.yml | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index e00dcf85b2..ab9ef39801 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -58,7 +58,6 @@ jobs: defaults: run: shell: bash -l {0} - working-directory: ./src strategy: fail-fast: false matrix: @@ -85,7 +84,6 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 with: - path: ./src fetch-depth: 0 - name: Timestamp @@ -101,7 +99,7 @@ jobs: - name: Setup miniconda uses: conda-incubator/setup-miniconda@v2 with: - condarc-file: ./src/ci/github/.condarc + condarc-file: ./ci/github/.condarc python-version: ${{ matrix.python-version }} run-post: false # skip post cleanup @@ -165,9 +163,6 @@ jobs: if: needs.changes.outputs.code == 'true' runs-on: windows-2019 - defaults: - run: - working-directory: .\src strategy: fail-fast: false matrix: @@ -194,7 +189,6 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 with: - path: .\src fetch-depth: 0 - name: Timestamp @@ -210,7 +204,7 @@ jobs: - name: Setup miniconda uses: conda-incubator/setup-miniconda@v2 with: - condarc-file: .\src\ci\github\.condarc + condarc-file: .\ci\github\.condarc python-version: ${{ matrix.python-version }} run-post: false # skip post cleanup @@ -281,7 +275,6 @@ jobs: defaults: run: shell: bash -l {0} - working-directory: ./src strategy: fail-fast: false matrix: @@ -308,7 +301,6 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 with: - path: ./src fetch-depth: 0 - name: Timestamp @@ -324,7 +316,7 @@ jobs: - name: Setup miniconda uses: conda-incubator/setup-miniconda@v2 with: - condarc-file: ./src/ci/github/.condarc + condarc-file: ./ci/github/.condarc python-version: ${{ matrix.python-version }} run-post: false # skip post cleanup From c91ac132151de449f89857712303b7f8c95ea3d1 Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Fri, 3 Mar 2023 16:21:17 -0500 Subject: [PATCH 074/366] Remove unnecessary reassignments and unused vars (#4790) * Remove unnecessary reassignments and unused vars * Update conda_build/conda_interface.py Co-authored-by: Ken Odegard --- conda_build/conda_interface.py | 206 ++++++++++++++------------------- 1 file changed, 85 insertions(+), 121 deletions(-) diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 733dc31090..b21ee1007c 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -4,123 +4,93 @@ from functools import partial import os -from importlib import import_module +from importlib import import_module # noqa: F401 import warnings -from conda import __version__ as CONDA_VERSION +from conda import __version__ as CONDA_VERSION # noqa: F401 -CONDA_VERSION = CONDA_VERSION - - -def try_exports(module, attr): - # this assumes conda.exports exists, so only use for conda 4.3 onward - try: - return getattr(import_module('conda.exports'), attr) - except AttributeError: - return getattr(import_module(module), attr) - - -try: - # This monkey patch is addressed at #1825. The ensure_use_local is an outdated vestige - # and no longer has any relevant effect. - import conda.cli.common - conda.cli.common.ensure_use_local = lambda x: None -except ImportError: - # no need to patch if it doesn't exist - pass - -# All of these conda's are older than our minimum dependency -conda_43 = True -conda_44 = True -conda_45 = True -conda_46 = True -conda_47 = True -conda_48 = True -conda_411 = True - -from conda.exports import ( # noqa: E402 +from conda.exports import ( # noqa: F401 + Channel, display_actions, execute_actions, execute_plan, install_actions, ) -display_actions, execute_actions, execute_plan = display_actions, execute_actions, execute_plan -install_actions = install_actions - -from conda.exports import _toposort # NOQA - -_toposort = _toposort - -from conda.auxlib.packaging import _get_version_from_git_tag # NOQA - -get_version_from_git_tag = _get_version_from_git_tag - -from conda.exports import TmpDownload, download, handle_proxy_407 # NOQA -from conda.exports import untracked, walk_prefix # NOQA -from conda.exports import MatchSpec, NoPackagesFound, Resolve, Unsatisfiable, normalized_version # NOQA -from conda.exports import human_bytes, hashsum_file, md5_file, memoized, unix_path_to_win, win_path_to_unix, url_path # NOQA -from conda.exports import get_index # NOQA -from conda.exports import (Completer, InstalledPackages, add_parser_channels, # NOQA - add_parser_prefix, # NOQA - specs_from_args, spec_from_line, specs_from_url) # NOQA -from conda.exports import ArgumentParser # NOQA -from conda.exports import (is_linked, linked, linked_data, prefix_placeholder, # NOQA - rm_rf, symlink_conda, package_cache) # NOQA -from conda.exports import CondaSession # NOQA -from conda.exports import (StringIO, input, lchmod, # NOQA - TemporaryDirectory) # NOQA -from conda.exports import VersionOrder # NOQA - - -TmpDownload = TmpDownload -download, handle_proxy_407, untracked, walk_prefix = download, handle_proxy_407, untracked, walk_prefix # NOQA -MatchSpec, Resolve, normalized_version = MatchSpec, Resolve, normalized_version -human_bytes, hashsum_file, md5_file, memoized = human_bytes, hashsum_file, md5_file, memoized -unix_path_to_win, win_path_to_unix, url_path = unix_path_to_win, win_path_to_unix, url_path -get_index, Completer, InstalledPackages = get_index, Completer, InstalledPackages -add_parser_channels, add_parser_prefix = add_parser_channels, add_parser_prefix -specs_from_args, spec_from_line, specs_from_url = specs_from_args, spec_from_line, specs_from_url -is_linked, linked, linked_data, prefix_placeholder = is_linked, linked, linked_data, prefix_placeholder # NOQA -rm_rf, symlink_conda, package_cache = rm_rf, symlink_conda, package_cache -input, lchmod = input, lchmod -TemporaryDirectory = TemporaryDirectory -ArgumentParser, CondaSession, VersionOrder = ArgumentParser, CondaSession, VersionOrder - - -from conda.core.package_cache import ProgressiveFetchExtract # NOQA -from conda.models.dist import Dist, IndexRecord # NOQA - -ProgressiveFetchExtract = ProgressiveFetchExtract -Dist, IndexRecord = Dist, IndexRecord +from conda.exports import _toposort # noqa: F401 -import configparser # NOQA -configparser = configparser - - -from conda.exports import FileMode, PathType # NOQA -FileMode, PathType = FileMode, PathType -from conda.exports import EntityEncoder # NOQA - -EntityEncoder, FileMode, PathType = EntityEncoder, FileMode, PathType - - -CondaError = try_exports("conda.exceptions", "CondaError") -CondaHTTPError = try_exports("conda.exceptions", "CondaHTTPError") -LinkError = try_exports("conda.exceptions", "LinkError") -LockError = try_exports("conda.exceptions", "LockError") -NoPackagesFoundError = try_exports("conda.exceptions", "NoPackagesFoundError") -PaddingError = try_exports("conda.exceptions", "PaddingError") -UnsatisfiableError = try_exports("conda.exceptions", "UnsatisfiableError") +from conda.auxlib.packaging import ( # noqa: F401 + _get_version_from_git_tag as get_version_from_git_tag, +) -non_x86_linux_machines = try_exports("conda.base.context", "non_x86_linux_machines") -context = try_exports("conda.base.context", "context") -context_get_prefix = try_exports("conda.base.context", "get_prefix") -reset_context = try_exports("conda.base.context", "reset_context") -get_conda_build_local_url = try_exports("conda.models.channel", "get_conda_build_local_url") +from conda.exports import TmpDownload, download, handle_proxy_407 # noqa: F401 +from conda.exports import untracked, walk_prefix # noqa: F401 +from conda.exports import ( # noqa: F401 + MatchSpec, + NoPackagesFound, + Resolve, + Unsatisfiable, + normalized_version, +) +from conda.exports import ( # noqa: F401 + human_bytes, + hashsum_file, + md5_file, + memoized, + unix_path_to_win, + win_path_to_unix, + url_path, +) +from conda.exports import get_index # noqa: F401 +from conda.exports import ( # noqa: F401 + Completer, + InstalledPackages, + add_parser_channels, + add_parser_prefix, + specs_from_args, + spec_from_line, + specs_from_url, +) +from conda.exports import ArgumentParser # noqa: F401 +from conda.exports import ( # noqa: F401 + is_linked, + linked, + linked_data, + prefix_placeholder, + rm_rf, + symlink_conda, + package_cache, +) +from conda.exports import CondaSession # noqa: F401 +from conda.exports import StringIO, input, lchmod, TemporaryDirectory # noqa: F401 +from conda.exports import VersionOrder # noqa: F401 + +from conda.core.package_cache import ProgressiveFetchExtract # noqa: F401 +from conda.models.dist import Dist, IndexRecord # noqa: F401 + +import configparser # noqa: F401 + +from conda.exports import FileMode, PathType # noqa: F401 +from conda.exports import EntityEncoder # noqa: F401 +from conda.exceptions import ( # noqa: F401 + CondaError, + CondaHTTPError, + LinkError, + LockError, + NoPackagesFoundError, + PaddingError, + UnsatisfiableError, +) +from conda.base.context import ( # noqa: F401 + non_x86_machines as non_x86_linux_machines, + context, + get_prefix as context_get_prefix, + reset_context, +) +from conda.models.channel import get_conda_build_local_url # noqa: F401 +# TODO: Go to references of all properties below and import them from `context` instead binstar_upload = context.binstar_upload -bits = context.bits default_python = context.default_python envs_dirs = context.envs_dirs pkgs_dirs = list(context.pkgs_dirs) @@ -134,21 +104,12 @@ def try_exports(module, attr): get_prefix = partial(context_get_prefix, context) cc_conda_build = context.conda_build if hasattr(context, 'conda_build') else {} -from conda.exports import Channel # NOQA get_conda_channel = Channel.from_value -# disallow softlinks. This avoids a lot of dumb issues, at the potential cost of disk space. +# Disallow softlinks. This avoids a lot of dumb issues, at the potential cost of disk space. os.environ['CONDA_ALLOW_SOFTLINKS'] = 'false' reset_context() -get_local_urls = lambda: list(get_conda_build_local_url()) or [] -arch_name = context.arch_name - - -CondaError, CondaHTTPError, get_prefix, LinkError = CondaError, CondaHTTPError, get_prefix, LinkError # NOQA -LockError, non_x86_linux_machines, NoPackagesFoundError = LockError, non_x86_linux_machines, NoPackagesFoundError # NOQA -PaddingError, UnsatisfiableError = PaddingError, UnsatisfiableError - class CrossPlatformStLink: def __call__(self, path: str | os.PathLike) -> int: @@ -165,12 +126,13 @@ def st_nlink(cls, path: str | os.PathLike) -> int: class SignatureError(Exception): + # TODO: What is this? 🤔 pass def which_package(path): """ - given the path (of a (presumably) conda installed file) iterate over + Given the path (of a (presumably) conda installed file) iterate over the conda packages the file came from. Usually the iteration yields only one package. """ @@ -187,7 +149,7 @@ def which_package(path): def which_prefix(path): """ - given the path (to a (presumably) conda installed file) return the + Given the path (to a (presumably) conda installed file) return the environment prefix in which the file in located """ from os.path import abspath, join, isdir, dirname @@ -195,7 +157,7 @@ def which_prefix(path): iteration = 0 while iteration < 20: if isdir(join(prefix, 'conda-meta')): - # we found the it, so let's return it + # we found it, so let's return it break if prefix == dirname(prefix): # we cannot chop off any more directories, so we didn't find it @@ -207,8 +169,10 @@ def which_prefix(path): def get_installed_version(prefix, pkgs): - """Primarily used by conda-forge, but may be useful in general for checking when a package - needs to be updated""" + """ + Primarily used by conda-forge, but may be useful in general for checking when + a package needs to be updated + """ from conda_build.utils import ensure_list pkgs = ensure_list(pkgs) linked_pkgs = linked(prefix) @@ -220,6 +184,6 @@ def get_installed_version(prefix, pkgs): return versions -# when deactivating envs (e.g. switching from root to build/test) this env var is used, -# except the PR that removed this has been reverted (for now) and Windows doesnt need it. +# When deactivating envs (e.g. switching from root to build/test) this env var is used, +# except the PR that removed this has been reverted (for now) and Windows doesn't need it. env_path_backup_var_exists = os.environ.get('CONDA_PATH_BACKUP', None) From e69d3ef80a19ed44162fc6329233e447077b2c45 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 6 Mar 2023 11:35:47 -0600 Subject: [PATCH 075/366] [pre-commit.ci] pre-commit autoupdate (#4794) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/PyCQA/pylint: v2.16.2 → v2.16.4](https://github.com/PyCQA/pylint/compare/v2.16.2...v2.16.4) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c46d27a68d..51b8c05c6a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -56,7 +56,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/PyCQA/pylint - rev: v2.16.2 + rev: v2.16.4 hooks: - id: pylint args: [--exit-zero] From ab1d12470deadc05b8dfcf9777691bcfd2b51ec2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20K=C3=BChnl?= Date: Tue, 7 Mar 2023 22:37:07 +0100 Subject: [PATCH 076/366] Remove false-positives in the list of Perl core modules (#4592) * Remove false-positives in the list of Perl core modules * test: Add CPAN skeleton test for Perl core modules --------- Co-authored-by: Felix Kuehnl Co-authored-by: Marcel Bargull --- conda_build/skeletons/cpan.py | 4 +++- news/4592-perl-core-module-detection | 20 ++++++++++++++++++++ tests/test_cpan_skeleton.py | 28 ++++++++++++++++++++++++++++ 3 files changed, 51 insertions(+), 1 deletion(-) create mode 100644 news/4592-perl-core-module-detection create mode 100644 tests/test_cpan_skeleton.py diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index 94a1efa5c7..b7cb2275d7 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -325,7 +325,9 @@ def install_perl_get_core_modules(version): with TemporaryDirectory() as tmpdir: environ.create_env(tmpdir, [f'perl={version}'], env='host', config=config, subdir=subdirs[0]) args = [f'{join(tmpdir, *subdirs[1:])}', '-e', - 'use Module::CoreList; print join "\n", Module::CoreList->find_modules(qr/.*/);'] + 'use Module::CoreList; ' + 'my @modules = grep {Module::CoreList::is_core($_)} Module::CoreList->find_modules(qr/.*/); ' + 'print join "\n", @modules;'] from subprocess import check_output all_core_modules = check_output(args, shell=False).decode('utf-8').replace('\r\n', '\n').split('\n') return all_core_modules diff --git a/news/4592-perl-core-module-detection b/news/4592-perl-core-module-detection new file mode 100644 index 0000000000..222844d97f --- /dev/null +++ b/news/4592-perl-core-module-detection @@ -0,0 +1,20 @@ + +### Enhancements + +* + +### Bug fixes + +* fix many false-positives during the detection of Perl core modules in `conda skeleton cpan` + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test_cpan_skeleton.py b/tests/test_cpan_skeleton.py new file mode 100644 index 0000000000..89a6bb49cf --- /dev/null +++ b/tests/test_cpan_skeleton.py @@ -0,0 +1,28 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +""" +Unit tests of the CPAN skeleton utility functions +""" + + +from pathlib import Path + +import pytest + +from conda_build.variants import get_default_variant +from conda_build.skeletons.cpan import get_core_modules_for_this_perl_version + + +@pytest.mark.slow +def test_core_modules(testing_config): + """ + Check expected core modules are recognized + (excluding known removed ones, e.g., Module::Build) + """ + cache_dir = Path(testing_config.src_cache_root, ".conda-build", "pickled.cb") + perl_version = testing_config.variant.get( + "perl", get_default_variant(testing_config)["perl"] + ) + core_modules = get_core_modules_for_this_perl_version(perl_version, str(cache_dir)) + assert "Config" in core_modules + assert "Module::Build" not in core_modules From f7e8bc358fe10bc3d60b1e2f2b88797633abb652 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20K=C3=BChnl?= Date: Tue, 7 Mar 2023 22:45:52 +0100 Subject: [PATCH 077/366] Add C compiler dep when `.xs` files are found. (#4599) * Added `.xs` to the list of extensions that trigger the addition of a C compiler app for Perl recipes created by conda skeleton. * test: Add CPAN skeleton test for XS --------- Co-authored-by: Felix Kuehnl Co-authored-by: Marcel Bargull --- conda_build/skeletons/cpan.py | 2 +- news/4599-add-cpan-xs-detection | 19 +++++++++++++++++++ tests/test_api_skeleton_cpan.py | 23 +++++++++++++++++++++++ 3 files changed, 43 insertions(+), 1 deletion(-) create mode 100644 news/4599-add-cpan-xs-detection create mode 100644 tests/test_api_skeleton_cpan.py diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index b7cb2275d7..bbde883050 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -205,7 +205,7 @@ def get_build_dependencies_from_src_archive(package_url, sha256, src_cache): need_f = any([f.name.lower().endswith(('.f', '.f90', '.f77', '.f95', '.f03')) for f in tf]) # Fortran builds use CC to perform the link (they do not call the linker directly). need_c = True if need_f else \ - any([f.name.lower().endswith('.c') for f in tf]) + any([f.name.lower().endswith(('.c', '.xs')) for f in tf]) need_cxx = any([f.name.lower().endswith(('.cxx', '.cpp', '.cc', '.c++')) for f in tf]) need_autotools = any([f.name.lower().endswith('/configure') for f in tf]) diff --git a/news/4599-add-cpan-xs-detection b/news/4599-add-cpan-xs-detection new file mode 100644 index 0000000000..c0938e4785 --- /dev/null +++ b/news/4599-add-cpan-xs-detection @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* conda skeleton cpan now correctly adds a C compiler as dependency if the distribution contains an `.xs` file + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test_api_skeleton_cpan.py b/tests/test_api_skeleton_cpan.py new file mode 100644 index 0000000000..815cb43522 --- /dev/null +++ b/tests/test_api_skeleton_cpan.py @@ -0,0 +1,23 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +''' +Integrative tests of the CPAN skeleton that start from +conda_build.api.skeletonize and check the output files +''' + + +import pytest + +from conda_build import api +from conda_build.jinja_context import compiler + + +@pytest.mark.slow +@pytest.mark.flaky(rerun=5, reruns_delay=2) +def test_xs_needs_c_compiler(testing_config): + """Perl packages with XS files need a C compiler""" + # This uses Sub::Identify=0.14 since it includes no .c files but a .xs file. + api.skeletonize("Sub::Identify", version="0.14", repo="cpan", config=testing_config) + m = api.render("perl-sub-identify/0.14", finalize=False, bypass_env_check=True)[0][0] + build_requirements = m.get_value("requirements/build") + assert compiler("c", testing_config) in build_requirements From 4a7f632f5e3f79288d494526025f18fdda82c743 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Tue, 7 Mar 2023 23:27:14 +0100 Subject: [PATCH 078/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4789)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/issues.yml | 10 +++------- .github/workflows/labels.yml | 6 +++--- .github/workflows/project.yml | 2 +- 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/.github/workflows/issues.yml b/.github/workflows/issues.yml index 6c4eea15e0..b4e6de62f3 100644 --- a/.github/workflows/issues.yml +++ b/.github/workflows/issues.yml @@ -11,19 +11,15 @@ env: SUPPORT_LBL: pending::support jobs: - # NOTE: doesn't catch cases where multiple users act as the author/reporter, - # this is just an effort to catch the majority of support cases + # NOTE: will update label if anyone responds, not just the author/reporter # TODO: create conda-issue-sorting team and modify this to toggle label based on # whether a non-issue-sorting engineer commented pending_support: - # if [pending::feedback] and the author responds + # if [pending::feedback] and anyone responds if: >- !github.event.repository.fork - && github.event_name == 'issue_comment' - && github.event.action == 'created' && !github.event.issue.pull_request && contains(github.event.issue.labels.*.name, 'pending::feedback') - && github.event.issue.user.login == github.event.comment.user.login runs-on: ubuntu-latest steps: # remove [pending::feedback] @@ -31,7 +27,7 @@ jobs: with: labels: ${{ env.FEEDBACK_LBL }} github_token: ${{ secrets.PROJECT_TOKEN }} - # add [pending::feedback], if still open + # add [pending::support], if still open - uses: actions-ecosystem/action-add-labels@v1.1.0 if: github.event.issue.state == 'open' with: diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index 072572a709..b1eb9db1ec 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -21,18 +21,18 @@ jobs: steps: - uses: actions/checkout@v3 - id: has_local - uses: andstor/file-existence-action@v1.0.1 + uses: andstor/file-existence-action@v2.0.0 with: files: ${{ env.LOCAL }} - name: Global Only - uses: EndBug/label-sync@v2.3.0 + uses: EndBug/label-sync@v2.3.2 if: steps.has_local.outputs.files_exists == 'false' with: config-file: ${{ env.GLOBAL }} delete-other-labels: true dry-run: ${{ github.event.inputs.dryrun }} - name: Global & Local - uses: EndBug/label-sync@v2.3.0 + uses: EndBug/label-sync@v2.3.2 if: steps.has_local.outputs.files_exists == 'true' with: config-file: | diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index d0f01160b9..8c3f9f87ad 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -13,7 +13,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/add-to-project@v0.3.0 + - uses: actions/add-to-project@v0.4.1 with: # issues are added to the Planning project # PRs are added to the Review project From 6aafd4a370b093ca22cf6fe7d52b2927ef049d47 Mon Sep 17 00:00:00 2001 From: Min RK Date: Tue, 7 Mar 2023 23:28:31 +0100 Subject: [PATCH 079/366] Document build.force_use_keys and build.force_ignore_keys (#4776) Co-authored-by: Bianca Henderson --- docs/source/resources/define-metadata.rst | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index 13981d868c..59d9444e51 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -312,6 +312,22 @@ OR * package uses {{ compiler() }} jinja2 function +You can also influence which variables are considered for the hash with: + +.. code-block:: yaml + + build: + force_use_keys: + - package_1 + force_ignore_keys: + - package_2 + +This will ensure that the value of ``package_2`` will *not* be considered for the hash, +and ``package_1`` *will* be, regardless of what conda-build discovers is used by its inspection. + +This may be useful to further split complex multi-output builds, to ensure each package is built, +or to ensure the right package hash when using more complex templating or scripting. + Python entry points ------------------- From 52983545c79eb0b3213511ced84a4831021cdfbc Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Fri, 10 Mar 2023 17:17:13 +0100 Subject: [PATCH 080/366] Drop Python 3.7 support. (#4797) --- .github/workflows/tests.yml | 6 +++--- conda_build/cli/main_render.py | 2 +- conda_build/skeletons/pypi.py | 2 +- news/4797-drop-python37-support | 19 +++++++++++++++++++ setup.py | 2 -- tests/test_index.py | 1 - 6 files changed, 24 insertions(+), 8 deletions(-) create mode 100644 news/4797-drop-python37-support diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ab9ef39801..da7637be2a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -62,7 +62,7 @@ jobs: fail-fast: false matrix: # test all lower versions (w/ stable conda) and upper version (w/ canary conda) - python-version: ['3.7', '3.8', '3.9'] + python-version: ['3.8', '3.9'] conda-version: [release] test-type: [serial, parallel] include: @@ -167,7 +167,7 @@ jobs: fail-fast: false matrix: # test lower version (w/ stable conda) and upper version (w/ canary conda) - python-version: ['3.7'] + python-version: ['3.8'] conda-version: [release] test-type: [serial, parallel] include: @@ -279,7 +279,7 @@ jobs: fail-fast: false matrix: # test lower version (w/ stable conda) and upper version (w/ canary conda) - python-version: ['3.7'] + python-version: ['3.8'] conda-version: [release] test-type: [serial, parallel] include: diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index b2280db449..72973a1802 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -143,7 +143,7 @@ def get_render_parser(): nargs=1, action=ParseYAMLArgument, help=('Variants to extend the build matrix. Must be a valid YAML instance, ' - 'such as "{python: [3.6, 3.7]}"')) + 'such as "{python: [3.8, 3.9]}"')) add_parser_channels(p) return p diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index 61f5baafb0..765751ef19 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -489,7 +489,7 @@ def add_parser(repos): action='store', default=default_python, help="""Version of Python to use to run setup.py. Default is %(default)s.""", - choices=['2.7', '3.5', '3.6', '3.7', '3.8'], + choices=['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10', '3.11'], ) pypi.add_argument( diff --git a/news/4797-drop-python37-support b/news/4797-drop-python37-support new file mode 100644 index 0000000000..7f68466549 --- /dev/null +++ b/news/4797-drop-python37-support @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Drop Python 3.7 support. (#4796) + +### Docs + +* + +### Other + +* diff --git a/setup.py b/setup.py index 355a64cc48..5ee01e6cf1 100755 --- a/setup.py +++ b/setup.py @@ -51,8 +51,6 @@ "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10" diff --git a/tests/test_index.py b/tests/test_index.py index 990b9d18a7..4375cfe7b5 100644 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -606,7 +606,6 @@ def _build_test_index(workdir): """ Copy repodata.json, packages to workdir for testing. """ - # Python 3.7 workaround "no dirs_exist_ok flag" index_hotfix_pkgs = join(here, "index_hotfix_pkgs") for path in os.scandir(index_hotfix_pkgs): From bd49d736019d3450d99aca21fd46f9e1b959a2a4 Mon Sep 17 00:00:00 2001 From: jakirkham Date: Mon, 13 Mar 2023 04:16:31 -0700 Subject: [PATCH 081/366] Drop dependency on Setuptools (#4443) Instead of relying on `setuptools` to get functionality from `pkg_resources` (which is slowly being refactored away), leverage `packaging` for the functionality used by Conda-Build. Co-authored-by: Jannis Leidel --- .pre-commit-config.yaml | 1 + conda_build/skeletons/cpan.py | 2 +- conda_build/skeletons/pypi.py | 2 +- conda_build/variants.py | 2 +- conda_build/version.py | 160 ++++++++++++++++++++++++++++++++++ recipe/meta.yaml | 2 +- setup.py | 2 +- tests/test_api_skeleton.py | 2 +- 8 files changed, 167 insertions(+), 6 deletions(-) create mode 100644 conda_build/version.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 51b8c05c6a..f2d1cc76da 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -73,3 +73,4 @@ repos: - id: insert-license files: \.py$ args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] + exclude: ^conda_build/version.py diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index bbde883050..a1c53735dc 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -7,7 +7,6 @@ import codecs import hashlib -from pkg_resources import parse_version from glob import glob import gzip import json @@ -28,6 +27,7 @@ from conda_build.config import get_or_merge_config from conda_build.utils import on_win, check_call_env from conda_build.variants import get_default_variant +from conda_build.version import _parse as parse_version import requests from conda_build import environ diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index 765751ef19..152be0b164 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -12,7 +12,7 @@ from os import makedirs, listdir, getcwd, chdir from os.path import join, isdir, exists, isfile, abspath -from pkg_resources import parse_version +from conda_build.version import _parse as parse_version import re from shutil import copy2 import subprocess diff --git a/conda_build/variants.py b/conda_build/variants.py index 19c1e87a5f..6e1295459c 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -8,7 +8,6 @@ from functools import lru_cache from itertools import product import os.path -from pkg_resources import parse_version import re import sys @@ -16,6 +15,7 @@ from conda_build.conda_interface import subdir from conda_build.conda_interface import cc_conda_build +from conda_build.version import _parse as parse_version from conda_build.utils import ensure_list, get_logger, islist, on_win, trim_empty_keys DEFAULT_VARIANTS = { diff --git a/conda_build/version.py b/conda_build/version.py new file mode 100644 index 0000000000..880d0d2c8a --- /dev/null +++ b/conda_build/version.py @@ -0,0 +1,160 @@ +# Copyright (C) Donald Stufft and individual contributors +# SPDX-License-Identifier: BSD-2-Clause +""" +This file was partially copied from the packaging.version module before the +LegacyVersion class was removed to continue to support version parsing in +a backward-compatible way where PEP 440 support can't be used. + +Copyright (c) Donald Stufft and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" +import re +from typing import Iterator, List, Tuple, Union +from packaging.version import _BaseVersion, Version, InvalidVersion + +LegacyCmpKey = Tuple[int, Tuple[str, ...]] + + +def _parse(version: str) -> Union["_LegacyVersion", "Version"]: + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`_LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return _LegacyVersion(version) + + +class _LegacyVersion(_BaseVersion): + + def __init__(self, version: str) -> None: + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + def __str__(self) -> str: + return self._version + + def __repr__(self) -> str: + return f"<_LegacyVersion('{self}')>" + + @property + def public(self) -> str: + return self._version + + @property + def base_version(self) -> str: + return self._version + + @property + def epoch(self) -> int: + return -1 + + @property + def release(self) -> None: + return None + + @property + def pre(self) -> None: + return None + + @property + def post(self) -> None: + return None + + @property + def dev(self) -> None: + return None + + @property + def local(self) -> None: + return None + + @property + def is_prerelease(self) -> bool: + return False + + @property + def is_postrelease(self) -> bool: + return False + + @property + def is_devrelease(self) -> bool: + return False + + +_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) + +_legacy_version_replacement_map = { + "pre": "c", + "preview": "c", + "-": "final-", + "rc": "c", + "dev": "@", +} + + +def _parse_version_parts(s: str) -> Iterator[str]: + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version: str) -> LegacyCmpKey: + + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts: List[str] = [] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + + return epoch, tuple(parts) diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 8868ca77a3..67bab3e894 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -34,6 +34,7 @@ requirements: - filelock - futures # [py<3] - jinja2 + - packaging - patchelf # [linux] - patch >=2.6 # [not win] - m2-patch >=2.6 # [win] @@ -44,7 +45,6 @@ requirements: - pyyaml - requests - scandir # [py<34] - - setuptools - six - glob2 >=0.6 - pytz diff --git a/setup.py b/setup.py index 5ee01e6cf1..2cb1d8ea56 100755 --- a/setup.py +++ b/setup.py @@ -26,7 +26,7 @@ "psutil", "six", "libarchive-c", - "setuptools", + "packaging", # "conda-package-handling", # remove comment once released on PyPI "glob2", ] diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index ab54d16278..645d01039a 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -7,7 +7,7 @@ import subprocess import sys -from pkg_resources import parse_version +from conda_build.version import _parse as parse_version import pytest import ruamel.yaml From 6c0e065e99c23afcc9764e05bff42c70cc413e07 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Mon, 13 Mar 2023 17:24:00 +0100 Subject: [PATCH 082/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4800)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- HOW_WE_USE_GITHUB.md | 21 ++++++--------------- 1 file changed, 6 insertions(+), 15 deletions(-) diff --git a/HOW_WE_USE_GITHUB.md b/HOW_WE_USE_GITHUB.md index e9cf001e64..491e92aa04 100644 --- a/HOW_WE_USE_GITHUB.md +++ b/HOW_WE_USE_GITHUB.md @@ -6,7 +6,7 @@ [project-sorting]: https://github.com/orgs/conda/projects/2/views/11 [project-support]: https://github.com/orgs/conda/projects/2/views/12 [project-backlog]: https://github.com/orgs/conda/projects/2/views/13 -[project-sprint]: https://github.com/orgs/conda/projects/2/views/14 +[project-in-progress]: https://github.com/orgs/conda/projects/2/views/14 [docs-toc]: https://github.blog/changelog/2021-04-13-table-of-contents-support-in-markdown-files/ [docs-actions]: https://docs.github.com/en/actions @@ -58,8 +58,8 @@ flowchart LR board_backlog-- refine -->board_backlog end - subgraph flow_sprint [Sprint] - board_sprint{{Sprint}} + subgraph flow_progress [In Progress] + board_progress{{In Progress}} end state_new(New Issues) @@ -69,9 +69,9 @@ flowchart LR board_sorting-- investigated -->board_backlog board_sorting-- duplicates, off-topic -->state_closed board_support-- resolved, unresponsive -->state_closed - board_backlog-- pending work -->board_sprint + board_backlog-- pending work -->board_progress board_backlog-- resolved, irrelevant -->state_closed - board_sprint-- resolved -->state_closed + board_progress-- resolved -->state_closed ``` In order to explain how various `conda` issues are evaluated, the following document will provide information about our sorting process in the form of an FAQ. @@ -129,7 +129,7 @@ The additional tabs in the project board that the issues can be moved to include All sorted issues will be reviewed by sorting engineers during a weekly Refinement meeting in order to understand how those particular issues fit into the short- and long-term roadmap of `conda`. These meetings enable the sorting engineers to get together to collectively prioritize issues, earmark feature requests for specific future releases (versus a more open-ended backlog), tag issues as ideal for first-time contributors, as well as whether or not to close/reject specific feature requests. -Once issues are deemed ready to be worked on, they will be moved to the [`conda` Backlog tab of the Planning board][project-backlog] on GitHub. Once actively in progress, the issues will be moved to the [Sprint tab of the Planning board][project-sprint] and then closed out once the work is complete. +Once issues are deemed ready to be worked on, they will be moved to the [`conda` Backlog tab of the Planning board][project-backlog] on GitHub. Once actively in progress, the issues will be moved to the [In Progress tab of the Planning board][project-in-progress] and then closed out once the work is complete. #### What is the purpose of having a "Backlog"? @@ -137,15 +137,6 @@ Once issues are deemed ready to be worked on, they will be moved to the [`conda` Issues are "backlogged" when they have been sorted but not yet earmarked for an upcoming release. Weekly Refinement meetings are a time when the `conda` engineers will transition issues from "[Sorting][project-sorting]" to "[Backlog][project-backlog]". Additionally, this time of handoff will include discussions around the kind of issues that were raised, which provides an opportunity to identify any patterns that may point to a larger problem. -#### What is the purpose of a "development sprint"? - -After issues have been sorted and backlogged, they will eventually be moved into the "Sprint Candidate", "Short-Term", "Medium-Term", "Long-Term", or "No Time Frame" sections of the [Backlog tab of the Planning board][project-backlog] and get one or more sprint cycles dedicated to them. - -The purpose of a development sprint is to enable a steady delivery of enhancements, features, and bug fixes by setting aside pre-determined portions of time that are meant for focusing on specifically-assigned items. - -Sprints also serve to focus the engineering team's attention on more accurate planning for what is to come during the entire release cycle, as well as keep the scope of development work concise. They enable the setting aside of dedicated time for the engineers to resolve any problems with the work involved, instead of pushing these problems to the end of the release cycle when there may not be any time remaining to fix issues. - - #### How does labeling work? Labeling is a very important means for sorting engineers to keep track of the current state of an issue with regards to the asynchronous nature of communicating with users. Utilizing the proper labels helps to identify the severity of the issue as well as to quickly understand the current state of a discussion. From c5146a02c1b86033a7989718b1511969838db5dd Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 14 Mar 2023 14:46:18 +0100 Subject: [PATCH 083/366] [pre-commit.ci] pre-commit autoupdate (#4802) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f2d1cc76da..2c5a2676cb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -56,12 +56,12 @@ repos: hooks: - id: flake8 - repo: https://github.com/PyCQA/pylint - rev: v2.16.4 + rev: v2.17.0 hooks: - id: pylint args: [--exit-zero] - repo: https://github.com/PyCQA/bandit - rev: 1.7.4 + rev: 1.7.5 hooks: - id: bandit args: [--exit-zero] From 6751f09968f00ae175c5c9df941f0171f156984b Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Mon, 20 Mar 2023 20:19:15 +0100 Subject: [PATCH 084/366] Add tests for #4763 (#4803) --- conda_build/render.py | 56 ++++++++++++++++++------------------ news/4763-subdir | 19 ++++++++++++ tests/test_render.py | 67 +++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 114 insertions(+), 28 deletions(-) create mode 100644 news/4763-subdir diff --git a/conda_build/render.py b/conda_build/render.py index f3d1801ec6..499f99a2a9 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + from collections import OrderedDict, defaultdict from functools import lru_cache import json @@ -8,7 +10,6 @@ from pathlib import Path import random import re -import shutil import string import subprocess import sys @@ -200,37 +201,36 @@ def _filter_run_exports(specs, ignore_list): return filtered_specs -def find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist, m, files_only=False): - _pkgs_dirs = pkgs_dirs + list(m.config.bldpkgs_dirs) - pkg_loc = None - for pkgs_dir in _pkgs_dirs: - pkg_dir = os.path.join(pkgs_dir, pkg_dist) - pkg_file = os.path.join(pkgs_dir, pkg_dist + CONDA_PACKAGE_EXTENSION_V1) - if not files_only and os.path.isdir(pkg_dir): - pkg_loc = pkg_dir - break - elif os.path.isfile(pkg_file): - pkg_loc = pkg_file - break - elif files_only and os.path.isdir(pkg_dir): - pkg_loc = pkg_file - # create the tarball on demand. This is so that testing on archives works. - with tarfile.open(pkg_file, 'w:bz2') as archive: - for entry in os.listdir(pkg_dir): - archive.add(os.path.join(pkg_dir, entry), arcname=entry) - - # use the package's subdir +def find_pkg_dir_or_file_in_pkgs_dirs( + distribution: str, m: MetaData, files_only: bool = False +) -> str | None: + for cache in map(Path, (*pkgs_dirs, *m.config.bldpkgs_dirs)): + package = cache / (distribution + CONDA_PACKAGE_EXTENSION_V1) + if package.is_file(): + return str(package) + + directory = cache / distribution + if directory.is_dir(): + if not files_only: + return str(directory) + + # get the package's subdir try: - info = json.loads(Path(pkg_dir, "info", "index.json").read_text()) - subdir = info["subdir"] + subdir = json.loads((directory / "info" / "index.json").read_text())[ + "subdir" + ] except (FileNotFoundError, KeyError): subdir = m.config.host_subdir - pkg_subdir = os.path.join(m.config.croot, subdir) - pkg_loc = os.path.join(pkg_subdir, os.path.basename(pkg_file)) - shutil.move(pkg_file, pkg_loc) - break - return pkg_loc + # create the tarball on demand so testing on archives works + package = Path( + m.config.croot, subdir, distribution + CONDA_PACKAGE_EXTENSION_V1 + ) + with tarfile.open(package, "w:bz2") as archive: + for entry in directory.iterdir(): + archive.add(entry, arcname=entry.name) + + return str(package) @lru_cache(maxsize=None) diff --git a/news/4763-subdir b/news/4763-subdir new file mode 100644 index 0000000000..16512f6b8e --- /dev/null +++ b/news/4763-subdir @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Install downstream packages in correct subdir. (#4763, #4803) + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test_render.py b/tests/test_render.py index 2fca77ad25..ff1e67666b 100644 --- a/tests/test_render.py +++ b/tests/test_render.py @@ -1,11 +1,18 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import json import os +from pathlib import Path +from uuid import uuid4 import pytest from conda_build import api from conda_build import render +from conda_build.metadata import MetaData +from conda_build.utils import CONDA_PACKAGE_EXTENSION_V1 @pytest.mark.parametrize( @@ -45,3 +52,63 @@ def test_pin_run_as_build_preserve_string(testing_metadata): {'pkg': '1.2.3 somestring_h1234'} ) assert dep == 'pkg >=1.2.3,<1.3.0a0 somestring*' + + +@pytest.mark.parametrize( + "create_package,subdir,is_file,files_only", + [ + pytest.param(False, None, None, None, id="not found"), + pytest.param(True, None, False, False, id="directory"), + pytest.param(True, None, False, True, id="on demand"), + pytest.param(True, "magic", False, True, id="on demand, different subdir"), + pytest.param(True, None, True, None, id="file"), + ], +) +def test_find_package( + testing_metadata: MetaData, + tmp_path: Path, + create_package: bool, + subdir: str | None, + is_file: bool, + files_only: bool, +): + """ + Testing our ability to find the package directory or archive. + + The render.find_pkg_dir_or_file_in_pkgs_dirs function will scan the various + locations where packages may exist locally and returns the full package path + if found. + """ + # setup + distribution = uuid4().hex[:20] + testing_metadata.config.croot = tmp_path + host_cache = tmp_path / testing_metadata.config.host_subdir + host_cache.mkdir() + subdir = subdir or testing_metadata.config.host_subdir + other_cache = tmp_path / subdir + other_cache.mkdir(exist_ok=True) + + # generate a dummy package as needed + package = None + if create_package: + # generate dummy package + if is_file: + (host_cache / (distribution + CONDA_PACKAGE_EXTENSION_V1)).touch() + else: + info = host_cache / distribution / "info" + info.mkdir(parents=True) + (info / "index.json").write_text(json.dumps({"subdir": subdir})) + + # expected package path + if is_file or files_only: + package = other_cache / (distribution + CONDA_PACKAGE_EXTENSION_V1) + else: + package = other_cache / distribution + + # attempt to find the package and check we found the expected path + found = render.find_pkg_dir_or_file_in_pkgs_dirs( + distribution, + testing_metadata, + files_only=files_only, + ) + assert package is found is None or package.samefile(found) From 2335d1021e38d80869ce7c0e349cef025d736972 Mon Sep 17 00:00:00 2001 From: Johnny Date: Wed, 22 Mar 2023 00:34:22 +0100 Subject: [PATCH 085/366] Update supported Python version in setup.py (#4804) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 2cb1d8ea56..a634b5b7d2 100755 --- a/setup.py +++ b/setup.py @@ -55,7 +55,7 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10" ], - python_requires=">=3.6", + python_requires=">=3.8", description="tools for building conda packages", long_description=Path("README.md").read_text(), packages=[ From 904e0fbdea22d417d3a3eb0f588f6b384779b962 Mon Sep 17 00:00:00 2001 From: Ryan Date: Wed, 22 Mar 2023 02:11:02 -0600 Subject: [PATCH 086/366] Some minor code cleanup (#4791) --- conda_build/index.py | 2 -- conda_build/metapackage.py | 2 +- conda_build/os_utils/elf.py | 7 ------- 3 files changed, 1 insertion(+), 10 deletions(-) diff --git a/conda_build/index.py b/conda_build/index.py index 342c92ad16..4db7a224c4 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -1,7 +1,5 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -# Copyright (C) 2018 Anaconda, Inc -# SPDX-License-Identifier: Proprietary import bz2 from collections import OrderedDict diff --git a/conda_build/metapackage.py b/conda_build/metapackage.py index fd993c1236..0d06fa8a34 100644 --- a/conda_build/metapackage.py +++ b/conda_build/metapackage.py @@ -7,7 +7,7 @@ def create_metapackage(name, version, entry_points=(), build_string=None, build_number=0, dependencies=(), home=None, license_name=None, summary=None, config=None): - # local import to avoid circular import, we provid create_metapackage in api + # local import to avoid circular import, we provide create_metapackage in api from conda_build.api import build if not config: diff --git a/conda_build/os_utils/elf.py b/conda_build/os_utils/elf.py index e8b2386f55..7aa9d594d4 100644 --- a/conda_build/os_utils/elf.py +++ b/conda_build/os_utils/elf.py @@ -1,6 +1,5 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import sys from os.path import islink, isfile @@ -20,9 +19,3 @@ def is_elf(path): with open(path, 'rb') as fi: head = fi.read(4) return bool(head == MAGIC) - - -if __name__ == '__main__': - if sys.platform.startswith('linux'): - for path in '/usr/bin/ls', '/etc/mtab': - print(path, is_elf(path)) From 30af5caad7ea79377613f5c6de17409b446b1337 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Wed, 22 Mar 2023 15:04:07 +0100 Subject: [PATCH 087/366] Release 3.24.0 (#4801) --- .authors.yml | 52 +++++++++++++++---- .mailmap | 6 +++ AUTHORS.md | 6 +++ CHANGELOG.md | 52 +++++++++++++++++-- news/4580-update-broken-links | 20 ------- news/4592-perl-core-module-detection | 20 ------- news/4599-add-cpan-xs-detection | 19 ------- news/4677-remove-pre-conda413-code | 19 ------- news/4691-update-test-matrix | 19 ------- ...changed-to-anacondaorg-in-conda-build-docs | 19 ------- news/4728-deprecate-CrossPlatformStLink | 19 ------- news/4763-subdir | 19 ------- news/4767-reenable-coverage-reporting | 19 ------- news/4781-convert-manual-clone-fixture | 19 ------- news/4797-drop-python37-support | 19 ------- news/gh-4318-Fixes-git-lfs-error.rst | 24 --------- news/tomllib-tomli | 19 ------- 17 files changed, 101 insertions(+), 269 deletions(-) delete mode 100644 news/4580-update-broken-links delete mode 100644 news/4592-perl-core-module-detection delete mode 100644 news/4599-add-cpan-xs-detection delete mode 100644 news/4677-remove-pre-conda413-code delete mode 100644 news/4691-update-test-matrix delete mode 100644 news/4719-anaconda-cloud-needs-changed-to-anacondaorg-in-conda-build-docs delete mode 100644 news/4728-deprecate-CrossPlatformStLink delete mode 100644 news/4763-subdir delete mode 100644 news/4767-reenable-coverage-reporting delete mode 100644 news/4781-convert-manual-clone-fixture delete mode 100644 news/4797-drop-python37-support delete mode 100644 news/gh-4318-Fixes-git-lfs-error.rst delete mode 100644 news/tomllib-tomli diff --git a/.authors.yml b/.authors.yml index a5a07b6390..626c87d60d 100644 --- a/.authors.yml +++ b/.authors.yml @@ -161,7 +161,7 @@ aliases: - MinRK github: minrk - num_commits: 14 + num_commits: 15 first_commit: 2014-02-13 19:43:59 - name: Matty G email: meawoppl@gmail.com @@ -754,7 +754,7 @@ alternate_emails: - kirkhamj@janelia.hhmi.org - jakirkham@gmail.com - num_commits: 138 + num_commits: 140 first_commit: 2015-04-21 13:26:39 github: jakirkham - name: Anthony Scopatz @@ -1056,7 +1056,7 @@ github: spalmrot-tic - name: Daniel Bast email: 2790401+dbast@users.noreply.github.com - num_commits: 14 + num_commits: 15 first_commit: 2019-06-07 02:44:13 github: dbast - name: Duncan Macleod @@ -1199,7 +1199,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 63 + num_commits: 106 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1222,7 +1222,7 @@ first_commit: 2020-11-19 10:46:41 - name: Jannis Leidel email: jannis@leidel.info - num_commits: 22 + num_commits: 33 github: jezdez first_commit: 2020-11-19 10:46:41 - name: Christof Kaufmann @@ -1237,7 +1237,7 @@ github: pre-commit-ci[bot] aliases: - pre-commit-ci[bot] - num_commits: 29 + num_commits: 38 first_commit: 2021-11-20 01:47:17 - name: Jacob Walls email: jacobtylerwalls@gmail.com @@ -1248,7 +1248,7 @@ github: beeankha alternate_emails: - beeankha@gmail.com - num_commits: 5 + num_commits: 14 first_commit: 2022-01-19 16:40:06 - name: Conda Bot email: 18747875+conda-bot@users.noreply.github.com @@ -1259,7 +1259,7 @@ alternate_emails: - ad-team+condabot@anaconda.com - 18747875+conda-bot@users.noreply.github.com - num_commits: 36 + num_commits: 42 first_commit: 2022-01-17 18:09:22 - name: Uwe L. Korn email: xhochy@users.noreply.github.com @@ -1268,7 +1268,7 @@ - name: Daniel Holth email: dholth@anaconda.com github: dholth - num_commits: 3 + num_commits: 6 first_commit: 2022-04-28 05:22:14 - name: Rylan Chord email: rchord@users.noreply.github.com @@ -1300,12 +1300,12 @@ - name: Katherine Kinnaman email: kkinnaman@anaconda.com github: kathatherine - num_commits: 1 + num_commits: 2 first_commit: 2022-07-07 10:56:31 - name: dependabot[bot] email: 49699333+dependabot[bot]@users.noreply.github.com github: dependabot[bot] - num_commits: 1 + num_commits: 2 first_commit: 2022-05-31 04:34:40 - name: Serhii Kupriienko email: 79282962+skupr-anaconda@users.noreply.github.com @@ -1334,3 +1334,33 @@ github: brettcannon num_commits: 1 first_commit: 2022-11-16 21:54:14 +- name: Srivas Venkatesh + email: 110486050+sven6002@users.noreply.github.com + num_commits: 1 + first_commit: 2022-12-14 19:50:36 + github: sven6002 +- name: Ernst Luring + email: ernst@ejldigital.com + num_commits: 1 + first_commit: 2023-02-23 20:40:15 + github: ernstluring +- name: Felix Kühnl + email: xileF1337@users.noreply.github.com + num_commits: 2 + first_commit: 2023-03-07 22:37:07 + github: xileF1337 +- name: T Coxon + email: 97948946+tttc3@users.noreply.github.com + num_commits: 1 + first_commit: 2023-02-24 01:34:03 + github: tttc3 +- name: Peet Whittaker + email: peet.whittaker@gmail.com + num_commits: 1 + first_commit: 2023-02-24 01:35:43 + github: peetw +- name: Johnny + email: johnnync13@gmail.com + num_commits: 1 + first_commit: 2023-03-22 00:34:22 + github: johnnynunez diff --git a/.mailmap b/.mailmap index 6d590c020a..95320e0be9 100644 --- a/.mailmap +++ b/.mailmap @@ -84,8 +84,10 @@ Ed Campbell Eli Rykoff erykoff Elliot Hughes Eric Dill +Ernst Luring Evan Hubinger Evan Klitzke +Felix Kühnl Filipe Fernandes ocefpaf Floris Bruynooghe Gabriel Reis @@ -125,6 +127,7 @@ John Kirkham jakirkham John Kirkham John Kirkham John Kirkham John Kirkham John Omotani +Johnny Jonathan J. Helmus Jonathan Helmus Joseph Crail Joseph Hunkeler @@ -186,6 +189,7 @@ Oleksandr Pavlyk Patrick Snape Patrick Snape Patrick Sodré Paul Madden +Peet Whittaker Peter Williams Phil Elson pelson Phil Reinhold @@ -227,12 +231,14 @@ Sophia Castellarin sophia soapy1 Sophia Castellarin soapy1 Sophian Guidara +Srivas Venkatesh <110486050+sven6002@users.noreply.github.com> Stas Bekman Stefan Scherfke Stefan Zimmermann Stephan Hoyer Stephen Palmroth spalmrot-tic Stuart Berg +T Coxon <97948946+tttc3@users.noreply.github.com> Tadeu Manoel Tadeu Manoel Takafumi Arakaki Teake Nutma Teake Nutma diff --git a/AUTHORS.md b/AUTHORS.md index 36b300279f..60a06dd9fe 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -67,8 +67,10 @@ Authors are sorted alphabetically. * Eli Rykoff * Elliot Hughes * Eric Dill +* Ernst Luring * Evan Hubinger * Evan Klitzke +* Felix Kühnl * Filipe Fernandes * Floris Bruynooghe * Gabriel Reis @@ -104,6 +106,7 @@ Authors are sorted alphabetically. * John Blischak * John Kirkham * John Omotani +* Johnny * Jonathan J. Helmus * Joseph Crail * Joseph Hunkeler @@ -156,6 +159,7 @@ Authors are sorted alphabetically. * Patrick Snape * Patrick Sodré * Paul Madden +* Peet Whittaker * Peter Williams * Phil Elson * Phil Reinhold @@ -187,12 +191,14 @@ Authors are sorted alphabetically. * Siu Kwan Lam * Sophia Castellarin * Sophian Guidara +* Srivas Venkatesh * Stas Bekman * Stefan Scherfke * Stefan Zimmermann * Stephan Hoyer * Stephen Palmroth * Stuart Berg +* T Coxon * Tadeu Manoel * Takafumi Arakaki * Teake Nutma diff --git a/CHANGELOG.md b/CHANGELOG.md index 394447e77c..cdcbe6e6f6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,53 @@ [//]: # (current developments) +## 3.24.0 (2023-03-22) + +### Bug fixes + +* Fix the failing `git clone` when source has LFS files. (#4318) +* Fix many false-positives during the detection of Perl core modules in `conda skeleton cpan`. (#4592) +* `conda skeleton cpan` now correctly adds a C compiler as dependency if the distribution contains an `.xs` file. (#4599) +* Install downstream packages in correct subdir. (#4763, #4803) +* Update supported Python version in setup.py. (#4804) + +### Deprecations + +* Removed conda <4.13 logic. (#4677) +* `conda_build.conda_interface.CrossPlatformStLink` is pending deprecation in favor of using `os.stat().st_nlink`. (#4728) +* Drop Python 3.7 support. (#4796) + +### Docs + +* Updated broken links to example conda recipes and updated link to the now archived conda-recipes, with additional links to AnacondaRecipes aggregated feedstocks and conda-forge feedstocks. (#4580) +* Replaced two instances of "Anaconda Cloud" with "anaconda.org". (#4719) + +### Other + +* Update test matrix to run tests on all supported Python versions on Linux. Only run tests on lower & upper Python bounds for Windows and macOS. (#4691) +* Re-enable code coverage reporting to `codecov`. (#4767) +* Eliminate test setup's manual clone of https://github.com/conda/conda_build_test_recipe in favor of a session fixture. (#4781) +* Use `tomllib` (Python 3.11+) or `tomli` for `.toml` support. (#4783) + +### Contributors + +* @beeankha +* @conda-bot +* @dbast +* @dholth +* @ernstluring made their first contribution in https://github.com/conda/conda-build/pull/4318 +* @xileF1337 made their first contribution in https://github.com/conda/conda-build/pull/4592 +* @jezdez +* @jakirkham +* @johnnynunez made their first contribution in https://github.com/conda/conda-build/pull/4804 +* @kathatherine +* @kenodegard +* @minrk +* @peetw made their first contribution in https://github.com/conda/conda-build/pull/4662 +* @sven6002 made their first contribution in https://github.com/conda/conda-build/pull/4621 +* @tttc3 made their first contribution in https://github.com/conda/conda-build/pull/4580 +* @dependabot[bot] +* @pre-commit-ci[bot] + ## 3.23.3 (2022-12-06) ### Bug fixes @@ -18,8 +66,6 @@ * @kenodegard * @mbargull - - ## 3.23.2 (2022-11-30) ### Bug fixes @@ -32,8 +78,6 @@ * @mbargull * @pre-commit-ci[bot] - - ## 3.23.1 (2022-11-17) ### Bug fixes diff --git a/news/4580-update-broken-links b/news/4580-update-broken-links deleted file mode 100644 index 0c580fd993..0000000000 --- a/news/4580-update-broken-links +++ /dev/null @@ -1,20 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* Updated broken links to example conda recipes and updated link to the now archived conda-recipes, with additional links to AnacondaRecipes aggregated feedstocks and conda-forge feedstocks. (#4580) - - -### Other - -* diff --git a/news/4592-perl-core-module-detection b/news/4592-perl-core-module-detection deleted file mode 100644 index 222844d97f..0000000000 --- a/news/4592-perl-core-module-detection +++ /dev/null @@ -1,20 +0,0 @@ - -### Enhancements - -* - -### Bug fixes - -* fix many false-positives during the detection of Perl core modules in `conda skeleton cpan` - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4599-add-cpan-xs-detection b/news/4599-add-cpan-xs-detection deleted file mode 100644 index c0938e4785..0000000000 --- a/news/4599-add-cpan-xs-detection +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* conda skeleton cpan now correctly adds a C compiler as dependency if the distribution contains an `.xs` file - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4677-remove-pre-conda413-code b/news/4677-remove-pre-conda413-code deleted file mode 100644 index 27eb7f9aa3..0000000000 --- a/news/4677-remove-pre-conda413-code +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Removed conda <4.13 logic. (#4677) - -### Docs - -* - -### Other - -* diff --git a/news/4691-update-test-matrix b/news/4691-update-test-matrix deleted file mode 100644 index d8fd6fdba5..0000000000 --- a/news/4691-update-test-matrix +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Update test matrix to run tests on all supported Python versions on Linux. Only run tests on lower & upper Python bounds for Windows and macOS. (#4691) diff --git a/news/4719-anaconda-cloud-needs-changed-to-anacondaorg-in-conda-build-docs b/news/4719-anaconda-cloud-needs-changed-to-anacondaorg-in-conda-build-docs deleted file mode 100644 index e46a165c11..0000000000 --- a/news/4719-anaconda-cloud-needs-changed-to-anacondaorg-in-conda-build-docs +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* Replaced two instances of "Anaconda Cloud" with "anaconda.org". (#4719) - -### Other - -* diff --git a/news/4728-deprecate-CrossPlatformStLink b/news/4728-deprecate-CrossPlatformStLink deleted file mode 100644 index 995757be81..0000000000 --- a/news/4728-deprecate-CrossPlatformStLink +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* `conda_build.conda_interface.CrossPlatformStLink` is pending deprecation in favor of using `os.stat().st_nlink`. (#4728) - -### Docs - -* - -### Other - -* diff --git a/news/4763-subdir b/news/4763-subdir deleted file mode 100644 index 16512f6b8e..0000000000 --- a/news/4763-subdir +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Install downstream packages in correct subdir. (#4763, #4803) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4767-reenable-coverage-reporting b/news/4767-reenable-coverage-reporting deleted file mode 100644 index 8c9b433a90..0000000000 --- a/news/4767-reenable-coverage-reporting +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Re-enable code coverage reporting to codecov. (#4767) diff --git a/news/4781-convert-manual-clone-fixture b/news/4781-convert-manual-clone-fixture deleted file mode 100644 index 37289bf87d..0000000000 --- a/news/4781-convert-manual-clone-fixture +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Eliminate test setup's manual clone of https://github.com/conda/conda_build_test_recipe in favor of a session fixture. (#4781) diff --git a/news/4797-drop-python37-support b/news/4797-drop-python37-support deleted file mode 100644 index 7f68466549..0000000000 --- a/news/4797-drop-python37-support +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Drop Python 3.7 support. (#4796) - -### Docs - -* - -### Other - -* diff --git a/news/gh-4318-Fixes-git-lfs-error.rst b/news/gh-4318-Fixes-git-lfs-error.rst deleted file mode 100644 index 9c87811510..0000000000 --- a/news/gh-4318-Fixes-git-lfs-error.rst +++ /dev/null @@ -1,24 +0,0 @@ -Enhancements: -------------- - -* - -Bug fixes: ----------- - -* Fixes the failing `git clone` when source has LFS files. (#4318) - -Deprecations: -------------- - -* - -Docs: ------ - -* - -Other: ------- - -* diff --git a/news/tomllib-tomli b/news/tomllib-tomli deleted file mode 100644 index 0eec19abce..0000000000 --- a/news/tomllib-tomli +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Use tomllib (Python 3.11+) or tomli for .toml support. From 49dd797acebcbaefff89841a9cab81cfdc3fde7c Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 23 Mar 2023 17:45:28 +0100 Subject: [PATCH 088/366] Stop using pending deprecation `get_prefix` (#4818) --- conda_build/cli/main_develop.py | 5 +++-- conda_build/cli/main_inspect.py | 36 ++++++++++++++++++++++++--------- conda_build/conda_interface.py | 4 ++-- 3 files changed, 31 insertions(+), 14 deletions(-) diff --git a/conda_build/cli/main_develop.py b/conda_build/cli/main_develop.py index 7fc473725c..8300a03a64 100644 --- a/conda_build/cli/main_develop.py +++ b/conda_build/cli/main_develop.py @@ -3,7 +3,8 @@ import logging import sys -from conda_build.conda_interface import ArgumentParser, add_parser_prefix, get_prefix +from conda.base.context import context, determine_target_prefix +from conda_build.conda_interface import ArgumentParser, add_parser_prefix from conda_build import api @@ -57,7 +58,7 @@ def parse_args(args): def execute(args): _, args = parse_args(args) - prefix = get_prefix(args) + prefix = determine_target_prefix(context, args) api.develop(args.source, prefix=prefix, no_pth_file=args.no_pth_file, build_ext=args.build_ext, clean=args.clean, uninstall=args.uninstall) diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index 21c09a570b..4f8b133d23 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -5,7 +5,8 @@ from pprint import pprint import sys -from conda_build.conda_interface import ArgumentParser, add_parser_prefix, get_prefix +from conda.base.context import context, determine_target_prefix +from conda_build.conda_interface import ArgumentParser, add_parser_prefix from conda_build import api @@ -187,15 +188,30 @@ def execute(args): parser.error("At least one option (--test-installable) is required.") else: print(api.test_installable(args.channel)) - elif args.subcommand == 'linkages': - print(api.inspect_linkages(args.packages, prefix=get_prefix(args), - untracked=args.untracked, all_packages=args.all, - show_files=args.show_files, groupby=args.groupby, - sysroot=expanduser(args.sysroot))) - elif args.subcommand == 'objects': - print(api.inspect_objects(args.packages, prefix=get_prefix(args), groupby=args.groupby)) - elif args.subcommand == 'prefix-lengths': - if not api.inspect_prefix_length(args.packages, min_prefix_length=args.min_prefix_length): + elif args.subcommand == "linkages": + print( + api.inspect_linkages( + args.packages, + prefix=determine_target_prefix(context, args), + untracked=args.untracked, + all_packages=args.all, + show_files=args.show_files, + groupby=args.groupby, + sysroot=expanduser(args.sysroot), + ) + ) + elif args.subcommand == "objects": + print( + api.inspect_objects( + args.packages, + prefix=determine_target_prefix(context, args), + groupby=args.groupby, + ) + ) + elif args.subcommand == "prefix-lengths": + if not api.inspect_prefix_length( + args.packages, min_prefix_length=args.min_prefix_length + ): sys.exit(1) elif args.subcommand == 'hash-inputs': pprint(api.inspect_hash_inputs(args.packages)) diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index b21ee1007c..988dce454b 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -84,7 +84,7 @@ from conda.base.context import ( # noqa: F401 non_x86_machines as non_x86_linux_machines, context, - get_prefix as context_get_prefix, + determine_target_prefix, reset_context, ) from conda.models.channel import get_conda_build_local_url # noqa: F401 @@ -101,7 +101,7 @@ create_default_packages = context.create_default_packages get_rc_urls = lambda: list(context.channels) -get_prefix = partial(context_get_prefix, context) +get_prefix = partial(determine_target_prefix, context) cc_conda_build = context.conda_build if hasattr(context, 'conda_build') else {} get_conda_channel = Channel.from_value From af340b66df89308c187a5a161de92ecfa9d1f12c Mon Sep 17 00:00:00 2001 From: Daniel Holth Date: Fri, 24 Mar 2023 13:54:25 -0400 Subject: [PATCH 089/366] Patch environment variable to str instead of Path (#4823) * setenv as str not Path * show 16 test durations instead of all of them --- setup.cfg | 2 +- tests/conftest.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index 3f6eb0840f..91017e0bac 100644 --- a/setup.cfg +++ b/setup.cfg @@ -26,7 +26,7 @@ addopts = --tb native --strict --strict-markers - --durations=0 + --durations=16 log_level = DEBUG env = PYTHONHASHSEED=0 diff --git a/tests/conftest.py b/tests/conftest.py index 7480bf8039..11f1603311 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -250,5 +250,5 @@ def conda_build_test_recipe_envvar( ) -> str: """Exposes the cloned conda_build_test_recipe as an environment variable.""" name = "CONDA_BUILD_TEST_RECIPE_PATH" - monkeypatch.setenv(name, conda_build_test_recipe_path) + monkeypatch.setenv(name, str(conda_build_test_recipe_path)) return name From 424b736079f0c0317f5f5e47940555866d74ee6c Mon Sep 17 00:00:00 2001 From: Daniel Holth Date: Fri, 24 Mar 2023 15:07:23 -0400 Subject: [PATCH 090/366] skip troubled skeleton tests; grayskull is available (#4825) * skip troubled skeleton tests; grayskull is available --- tests/test_api_skeleton.py | 2 ++ tests/test_api_skeleton_cran.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index 645d01039a..62c09933fc 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -139,6 +139,7 @@ def pylint_metadata(): } +@pytest.mark.skip("Use separate grayskull package instead of skeleton.") @pytest.mark.parametrize( "prefix, repo, package, version", [ @@ -453,6 +454,7 @@ def test_pypi_section_order_preserved(tmp_path: Path): assert list(v.keys()) == list(recipe[k]) +@pytest.mark.skip("Use separate grayskull package instead of skeleton.") @pytest.mark.slow @pytest.mark.flaky(rerun=5, reruns_delay=2) @pytest.mark.skipif(on_win, reason="shellcheck is not available on Windows") diff --git a/tests/test_api_skeleton_cran.py b/tests/test_api_skeleton_cran.py index 6a487c3ad3..0522bde3a3 100644 --- a/tests/test_api_skeleton_cran.py +++ b/tests/test_api_skeleton_cran.py @@ -13,6 +13,7 @@ from conda_build.skeletons.cran import CRAN_BUILD_SH_SOURCE, CRAN_META +@pytest.mark.skip("Use separate grayskull package instead of skeleton.") @pytest.mark.slow @pytest.mark.parametrize( "package,license_id,license_family,license_files", @@ -48,6 +49,7 @@ def test_cran_license( } == set(license_files) +@pytest.mark.skip("Use separate grayskull package instead of skeleton.") @pytest.mark.parametrize( "package,skip_text", [ From 8f558b058807a1c4deb5ff00f3c8b6d0c8d94de2 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 28 Mar 2023 14:24:55 +0200 Subject: [PATCH 091/366] Drop `conda < 4.3.2` check (#4831) --- conda_build/build.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/conda_build/build.py b/conda_build/build.py index 0a5cf08c65..5fa2f6ae98 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -35,7 +35,6 @@ from .conda_interface import env_path_backup_var_exists from .conda_interface import prefix_placeholder from .conda_interface import TemporaryDirectory -from .conda_interface import VersionOrder from .conda_interface import PathType, FileMode from .conda_interface import EntityEncoder from .conda_interface import get_rc_urls @@ -1929,9 +1928,6 @@ def create_build_envs(m, notest): m.config._merge_build_host = m.build_is_host if m.is_cross and not m.build_is_host: - if VersionOrder(conda_version) < VersionOrder('4.3.2'): - raise RuntimeError("Non-native subdir support only in conda >= 4.3.2") - host_actions = environ.get_install_actions(m.config.host_prefix, tuple(host_ms_deps), 'host', subdir=m.config.host_subdir, From 17de8b91adb0d4fb7733fe7c0cf947436b1db9e2 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 30 Mar 2023 11:03:35 -0500 Subject: [PATCH 092/366] Update Python auto formatting Replace darker with black. Add isort. --- .pre-commit-config.yaml | 49 ++++++++++++++++++++--------------------- news/4836-auto-format | 20 +++++++++++++++++ pyproject.toml | 5 ++++- 3 files changed, 48 insertions(+), 26 deletions(-) create mode 100644 news/4836-auto-format diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2c5a2676cb..964d6b446c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ # disable autofixing PRs, commenting "pre-commit.ci autofix" on a pull request triggers a autofix ci: - autofix_prs: false + autofix_prs: false # generally speaking we ignore all vendored code as well as tests data # ignore patches/diffs since slight reformatting can break them exclude: | @@ -17,6 +17,7 @@ exclude: | conda_build/_version.py ) repos: + # generic verification and formatting - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: @@ -36,41 +37,39 @@ repos: ) # catch git merge/rebase problems - id: check-merge-conflict + # Python verification and formatting + - repo: https://github.com/Lucas-C/pre-commit-hooks + rev: v1.4.2 + hooks: + # auto inject license blurb + - id: insert-license + files: \.py$ + args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] + exclude: ^conda_build/version.py - repo: https://github.com/asottile/pyupgrade rev: v3.3.1 hooks: + # upgrade standard Python codes - id: pyupgrade - args: ["--py37-plus"] - - repo: https://github.com/akaihola/darker - rev: 1.7.0 + args: [--py38-plus] + - repo: https://github.com/pycqa/isort + rev: 5.12.0 hooks: - - id: darker - additional_dependencies: [black==22.10.0] + # auto sort Python imports + - id: isort + - repo: https://github.com/psf/black + rev: 23.3.0 + hooks: + # auto format Python codes + - id: black - repo: https://github.com/asottile/blacken-docs rev: 1.13.0 hooks: + # auto format Python codes within docstrings - id: blacken-docs additional_dependencies: [black] - repo: https://github.com/PyCQA/flake8 rev: 6.0.0 hooks: + # lint Python codes - id: flake8 - - repo: https://github.com/PyCQA/pylint - rev: v2.17.0 - hooks: - - id: pylint - args: [--exit-zero] - - repo: https://github.com/PyCQA/bandit - rev: 1.7.5 - hooks: - - id: bandit - args: [--exit-zero] - # ignore all tests, not just tests data - exclude: ^tests/ - - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.4.2 - hooks: - - id: insert-license - files: \.py$ - args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] - exclude: ^conda_build/version.py diff --git a/news/4836-auto-format b/news/4836-auto-format new file mode 100644 index 0000000000..60660bdd5e --- /dev/null +++ b/news/4836-auto-format @@ -0,0 +1,20 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Format with black and replaced pre-commit's darker hook with black. (#4836) +* Format with isort and add pre-commit isort hook. (#4836) diff --git a/pyproject.toml b/pyproject.toml index 36619ccf54..ac3c265a6c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,2 +1,5 @@ [tool.black] -target-version = ['py36', 'py37', 'py38'] +target-version = ['py38', 'py39', 'py310'] + +[tool.isort] +profile = "black" From 4131968d4ae902e6419028bf6c5102f3e061a950 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 30 Mar 2023 12:24:00 -0500 Subject: [PATCH 093/366] Auto format --- benchmarks/time_render.py | 27 +- bin/conda-build | 1 + bin/conda-convert | 1 + bin/conda-develop | 1 + bin/conda-index | 1 + bin/conda-inspect | 1 + bin/conda-metapackage | 1 + bin/conda-render | 1 + bin/conda-skeleton | 1 + conda_build/__init__.py | 18 +- conda_build/_link.py | 80 +- conda_build/_load_setup_py_data.py | 89 +- conda_build/api.py | 548 ++- conda_build/bdist_conda.py | 178 +- conda_build/build.py | 3109 +++++++++++------ conda_build/cli/actions.py | 10 +- conda_build/cli/main_build.py | 442 ++- conda_build/cli/main_convert.py | 70 +- conda_build/cli/main_debug.py | 85 +- conda_build/cli/main_develop.py | 81 +- conda_build/cli/main_index.py | 79 +- conda_build/cli/main_inspect.py | 107 +- conda_build/cli/main_metapackage.py | 43 +- conda_build/cli/main_render.py | 152 +- conda_build/cli/main_skeleton.py | 25 +- conda_build/cli/validators.py | 2 +- conda_build/conda_interface.py | 139 +- conda_build/config.py | 662 ++-- conda_build/convert.py | 481 +-- conda_build/create_test.py | 215 +- conda_build/develop.py | 79 +- conda_build/environ.py | 30 +- conda_build/exceptions.py | 58 +- conda_build/features.py | 15 +- conda_build/index.py | 1039 ++++-- conda_build/inspect_pkg.py | 231 +- conda_build/jinja_context.py | 493 ++- conda_build/license_family.py | 60 +- conda_build/metadata.py | 35 +- conda_build/metapackage.py | 33 +- conda_build/noarch_python.py | 84 +- conda_build/os_utils/elf.py | 23 +- conda_build/os_utils/external.py | 51 +- conda_build/os_utils/ldd.py | 64 +- conda_build/os_utils/liefldd.py | 802 +++-- conda_build/os_utils/macho.py | 188 +- conda_build/os_utils/pyldd.py | 716 ++-- conda_build/post.py | 1327 ++++--- conda_build/render.py | 706 ++-- conda_build/skeletons/_example_skeleton.py | 8 +- conda_build/skeletons/cpan.py | 810 +++-- conda_build/skeletons/cran.py | 1374 +++++--- conda_build/skeletons/luarocks.py | 165 +- conda_build/skeletons/pypi.py | 817 +++-- conda_build/skeletons/rpm.py | 773 ++-- conda_build/source.py | 771 ++-- conda_build/tarcheck.py | 64 +- conda_build/utils.py | 92 +- conda_build/variants.py | 386 +- conda_build/version.py | 5 +- conda_build/windows.py | 206 +- docs/scrape_help.py | 159 +- recipe/run_test.py | 2 +- recipe/test_bdist_conda_setup.py | 1 + setup.py | 3 +- tests/bdist-recipe/bin/test-script-setup.py | 2 + .../conda_build_test/manual_entry.py | 4 +- tests/bdist-recipe/setup.py | 7 +- tests/cli/test_main_build.py | 12 +- tests/cli/test_main_debug.py | 7 +- tests/cli/test_main_inspect.py | 3 +- tests/cli/test_main_metapackage.py | 2 +- tests/cli/test_main_render.py | 3 +- tests/cli/test_main_skeleton.py | 1 - tests/cli/test_validators.py | 25 +- tests/conftest.py | 8 +- tests/test_api_build.py | 1231 ++++--- tests/test_api_build_conda_v2.py | 6 +- tests/test_api_consistency.py | 158 +- tests/test_api_convert.py | 295 +- tests/test_api_debug.py | 7 +- tests/test_api_inspect.py | 1 + tests/test_api_render.py | 162 +- tests/test_api_skeleton.py | 173 +- tests/test_api_skeleton_cpan.py | 8 +- tests/test_api_skeleton_cran.py | 10 +- tests/test_api_test.py | 11 +- tests/test_build.py | 226 +- tests/test_check.py | 3 +- tests/test_conda_interface.py | 6 +- tests/test_config.py | 39 +- tests/test_cpan_skeleton.py | 2 +- tests/test_cran_skeleton.py | 31 +- tests/test_create_test.py | 2 +- tests/test_environ.py | 13 +- tests/test_index.py | 7 +- tests/test_inspect.py | 14 +- tests/test_jinja_context.py | 137 +- tests/test_license_family.py | 7 +- tests/test_metadata.py | 134 +- tests/test_misc.py | 5 +- tests/test_os_utils_external.py | 1 + tests/test_patch.py | 4 +- tests/test_post.py | 74 +- tests/test_published_examples.py | 7 +- tests/test_pypi_skeleton.py | 4 +- tests/test_render.py | 13 +- tests/test_source.py | 201 +- tests/test_subpackages.py | 335 +- tests/test_utils.py | 356 +- tests/test_variants.py | 272 +- tests/utils.py | 4 +- 112 files changed, 13543 insertions(+), 8810 deletions(-) diff --git a/benchmarks/time_render.py b/benchmarks/time_render.py index 5fe76f6f45..41db953c87 100644 --- a/benchmarks/time_render.py +++ b/benchmarks/time_render.py @@ -2,28 +2,37 @@ # SPDX-License-Identifier: BSD-3-Clause import os -from conda_build import api - # god-awful hack to get data from the test recipes import sys + +from conda_build import api + _thisdir = os.path.dirname(__file__) sys.path.append(os.path.dirname(_thisdir)) from tests.utils import metadata_dir # noqa: E402 -variant_dir = os.path.join(metadata_dir, '..', 'variants') + +variant_dir = os.path.join(metadata_dir, "..", "variants") def time_simple_render(): - api.render(os.path.join(metadata_dir, 'python_run'), finalize=False, - bypass_env_check=True) + api.render( + os.path.join(metadata_dir, "python_run"), finalize=False, bypass_env_check=True + ) def time_top_level_variant_render(): - api.render(os.path.join(variant_dir, '02_python_version'), finalize=False, - bypass_env_check=True) + api.render( + os.path.join(variant_dir, "02_python_version"), + finalize=False, + bypass_env_check=True, + ) def time_single_top_level_multi_output(): - api.render(os.path.join(variant_dir, 'test_python_as_subpackage_loop'), - finalize=False, bypass_env_check=True) + api.render( + os.path.join(variant_dir, "test_python_as_subpackage_loop"), + finalize=False, + bypass_env_check=True, + ) diff --git a/bin/conda-build b/bin/conda-build index b203e80e1e..bf14475007 100755 --- a/bin/conda-build +++ b/bin/conda-build @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_build import main sys.exit(main()) diff --git a/bin/conda-convert b/bin/conda-convert index 66cf207213..ca85184ba4 100755 --- a/bin/conda-convert +++ b/bin/conda-convert @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_convert import main sys.exit(main()) diff --git a/bin/conda-develop b/bin/conda-develop index 7fd11d4a1b..657a533493 100755 --- a/bin/conda-develop +++ b/bin/conda-develop @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_develop import main sys.exit(main()) diff --git a/bin/conda-index b/bin/conda-index index 5a83e54d16..b1d0f34958 100755 --- a/bin/conda-index +++ b/bin/conda-index @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_index import main sys.exit(main()) diff --git a/bin/conda-inspect b/bin/conda-inspect index 59f4975780..b8204c2746 100755 --- a/bin/conda-inspect +++ b/bin/conda-inspect @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_inspect import main sys.exit(main()) diff --git a/bin/conda-metapackage b/bin/conda-metapackage index 2e976ef9d3..4a57921b0f 100755 --- a/bin/conda-metapackage +++ b/bin/conda-metapackage @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_metapackage import main sys.exit(main()) diff --git a/bin/conda-render b/bin/conda-render index 4a1ddeed18..3372118d73 100755 --- a/bin/conda-render +++ b/bin/conda-render @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_render import main sys.exit(main()) diff --git a/bin/conda-skeleton b/bin/conda-skeleton index 9a66dbe27c..ffc0aa3127 100755 --- a/bin/conda-skeleton +++ b/bin/conda-skeleton @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + from conda_build.cli.main_skeleton import main sys.exit(main()) diff --git a/conda_build/__init__.py b/conda_build/__init__.py index 481c819391..6f2d2f37b6 100644 --- a/conda_build/__init__.py +++ b/conda_build/__init__.py @@ -1,17 +1,17 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause from . import _version -__version__ = _version.get_versions()['version'] + +__version__ = _version.get_versions()["version"] # Sub commands added by conda-build to the conda command sub_commands = [ - 'build', - 'convert', - 'develop', - 'index', - 'inspect', - 'metapackage', - 'render' - 'skeleton', + "build", + "convert", + "develop", + "index", + "inspect", + "metapackage", + "render" "skeleton", ] diff --git a/conda_build/_link.py b/conda_build/_link.py index 5f6d2a4c5a..21ea66aaed 100644 --- a/conda_build/_link.py +++ b/conda_build/_link.py @@ -7,12 +7,11 @@ from __future__ import annotations import os -from os.path import dirname, exists, isdir, join, normpath -from pathlib import Path import re -import sys import shutil - +import sys +from os.path import dirname, exists, isdir, join, normpath +from pathlib import Path # Silence pyflakes. This variable is added when link.py is written by # conda_build.noarch_python. @@ -21,22 +20,24 @@ THIS_DIR = dirname(__file__) PREFIX = normpath(sys.prefix) -if sys.platform == 'win32': - BIN_DIR = join(PREFIX, 'Scripts') - SITE_PACKAGES = 'Lib/site-packages' +if sys.platform == "win32": + BIN_DIR = join(PREFIX, "Scripts") + SITE_PACKAGES = "Lib/site-packages" else: - BIN_DIR = join(PREFIX, 'bin') - SITE_PACKAGES = 'lib/python%s/site-packages' % sys.version[:3] + BIN_DIR = join(PREFIX, "bin") + SITE_PACKAGES = "lib/python%s/site-packages" % sys.version[:3] # the list of these files is going to be store in info/_files FILES = [] # three capture groups: whole_shebang, executable, options -SHEBANG_REGEX = (br'^(#!' # pretty much the whole match string - br'(?:[ ]*)' # allow spaces between #! and beginning of the executable path - br'(/(?:\\ |[^ \n\r\t])*)' # the executable is the next text block without an escaped space or non-space whitespace character # NOQA - br'(.*)' # the rest of the line can contain option flags - br')$') # end whole_shebang group +SHEBANG_REGEX = ( + rb"^(#!" # pretty much the whole match string + rb"(?:[ ]*)" # allow spaces between #! and beginning of the executable path + rb"(/(?:\\ |[^ \n\r\t])*)" # the executable is the next text block without an escaped space or non-space whitespace character # NOQA + rb"(.*)" # the rest of the line can contain option flags + rb")$" +) # end whole_shebang group def _link(src, dst): @@ -78,48 +79,50 @@ def link_files(src_root, dst_root, files): if exists(dst): _unlink(dst) _link(src, dst) - f = f'{dst_root}/{f}' + f = f"{dst_root}/{f}" FILES.append(f) - if f.endswith('.py'): + if f.endswith(".py"): FILES.append(pyc_f(f)) # yanked from conda def replace_long_shebang(data): # this function only changes a shebang line if it exists and is greater than 127 characters - if hasattr(data, 'encode'): + if hasattr(data, "encode"): data = data.encode() shebang_match = re.match(SHEBANG_REGEX, data, re.MULTILINE) if shebang_match: whole_shebang, executable, options = shebang_match.groups() if len(whole_shebang) > 127: - executable_name = executable.decode('utf-8').split('/')[-1] - new_shebang = '#!/usr/bin/env {}{}'.format(executable_name, options.decode('utf-8')) - data = data.replace(whole_shebang, new_shebang.encode('utf-8')) - if hasattr(data, 'decode'): + executable_name = executable.decode("utf-8").split("/")[-1] + new_shebang = "#!/usr/bin/env {}{}".format( + executable_name, options.decode("utf-8") + ) + data = data.replace(whole_shebang, new_shebang.encode("utf-8")) + if hasattr(data, "decode"): data = data.decode() return data def create_script(fn): - src = join(THIS_DIR, 'python-scripts', fn) + src = join(THIS_DIR, "python-scripts", fn) dst = join(BIN_DIR, fn) - if sys.platform == 'win32': - shutil.copy2(src, dst + '-script.py') - FILES.append('Scripts/%s-script.py' % fn) - shutil.copy2(join(THIS_DIR, - 'cli-%d.exe' % (8 * tuple.__itemsize__)), - dst + '.exe') - FILES.append('Scripts/%s.exe' % fn) + if sys.platform == "win32": + shutil.copy2(src, dst + "-script.py") + FILES.append("Scripts/%s-script.py" % fn) + shutil.copy2( + join(THIS_DIR, "cli-%d.exe" % (8 * tuple.__itemsize__)), dst + ".exe" + ) + FILES.append("Scripts/%s.exe" % fn) else: with open(src) as fi: data = fi.read() - with open(dst, 'w') as fo: - shebang = replace_long_shebang('#!%s\n' % normpath(sys.executable)) + with open(dst, "w") as fo: + shebang = replace_long_shebang("#!%s\n" % normpath(sys.executable)) fo.write(shebang) fo.write(data) os.chmod(dst, 0o775) - FILES.append('bin/%s' % fn) + FILES.append("bin/%s" % fn) def create_scripts(files): @@ -132,15 +135,14 @@ def create_scripts(files): def main(): - create_scripts(DATA['python-scripts']) - link_files('site-packages', SITE_PACKAGES, DATA['site-packages']) - link_files('Examples', 'Examples', DATA['Examples']) + create_scripts(DATA["python-scripts"]) + link_files("site-packages", SITE_PACKAGES, DATA["site-packages"]) + link_files("Examples", "Examples", DATA["Examples"]) - with open(join(PREFIX, 'conda-meta', - '%s.files' % DATA['dist']), 'w') as fo: + with open(join(PREFIX, "conda-meta", "%s.files" % DATA["dist"]), "w") as fo: for f in FILES: - fo.write('%s\n' % f) + fo.write("%s\n" % f) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/conda_build/_load_setup_py_data.py b/conda_build/_load_setup_py_data.py index fd4bef91f1..71b96dbaa7 100644 --- a/conda_build/_load_setup_py_data.py +++ b/conda_build/_load_setup_py_data.py @@ -1,18 +1,24 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +import logging import os import sys -import logging -def load_setup_py_data(setup_file, from_recipe_dir=False, recipe_dir=None, work_dir=None, - permit_undefined_jinja=True): +def load_setup_py_data( + setup_file, + from_recipe_dir=False, + recipe_dir=None, + work_dir=None, + permit_undefined_jinja=True, +): _setuptools_data = {} log = logging.getLogger(__name__) - import setuptools import distutils.core + import setuptools + cd_to_work = False path_backup = sys.path @@ -39,8 +45,10 @@ def _change_cwd(target_dir): if not os.path.isabs(setup_file): setup_file = os.path.join(work_dir, setup_file) else: - message = ("Did not find setup.py file in manually specified location, and source " - "not downloaded yet.") + message = ( + "Did not find setup.py file in manually specified location, and source " + "not downloaded yet." + ) if permit_undefined_jinja: log.debug(message) return {} @@ -58,7 +66,7 @@ def _change_cwd(target_dir): except ImportError: pass # setuptools <30.3.0 cannot read metadata / options from 'setup.cfg' else: - setup_cfg = os.path.join(os.path.dirname(setup_file), 'setup.cfg') + setup_cfg = os.path.join(os.path.dirname(setup_file), "setup.cfg") if os.path.isfile(setup_cfg): # read_configuration returns a dict of dicts. Each dict (keys: 'metadata', # 'options'), if present, provides keyword arguments for the setup function. @@ -77,12 +85,13 @@ def setup(**kw): numpy_setup = None versioneer = None - if 'versioneer' in sys.modules: - versioneer = sys.modules['versioneer'] - del sys.modules['versioneer'] + if "versioneer" in sys.modules: + versioneer = sys.modules["versioneer"] + del sys.modules["versioneer"] try: import numpy.distutils.core + numpy_setup = numpy.distutils.core.setup numpy.distutils.core.setup = setup except ImportError: @@ -90,19 +99,19 @@ def setup(**kw): setuptools.setup = distutils.core.setup = setup ns = { - '__name__': '__main__', - '__doc__': None, - '__file__': setup_file, + "__name__": "__main__", + "__doc__": None, + "__file__": setup_file, } if os.path.isfile(setup_file): with open(setup_file) as f: - code = compile(f.read(), setup_file, 'exec', dont_inherit=1) + code = compile(f.read(), setup_file, "exec", dont_inherit=1) exec(code, ns, ns) else: if not permit_undefined_jinja: - raise TypeError(f'{setup_file} is not a file that can be read') + raise TypeError(f"{setup_file} is not a file that can be read") - sys.modules['versioneer'] = versioneer + sys.modules["versioneer"] = versioneer distutils.core.setup = distutils_setup setuptools.setup = setuptools_setup @@ -116,26 +125,42 @@ def setup(**kw): return _setuptools_data -if __name__ == '__main__': - import json +if __name__ == "__main__": import argparse - parser = argparse.ArgumentParser(description='run setup.py file to obtain metadata') - parser.add_argument('work_dir', help=('path to work dir, where we\'ll write the output data ' - 'json, and potentially also where setup.py should be found')) - parser.add_argument('setup_file', help='path or filename of setup.py file') - parser.add_argument('--from-recipe-dir', help=('look for setup.py file in recipe ' - 'dir (as opposed to work dir)'), - default=False, action="store_true") - parser.add_argument('--recipe-dir', help=('(optional) path to recipe dir, where ' - 'setup.py should be found')) - - parser.add_argument('--permit-undefined-jinja', help=('look for setup.py file in recipe ' - 'dir (as opposed to work dir)'), - default=False, action="store_true") + import json + + parser = argparse.ArgumentParser(description="run setup.py file to obtain metadata") + parser.add_argument( + "work_dir", + help=( + "path to work dir, where we'll write the output data " + "json, and potentially also where setup.py should be found" + ), + ) + parser.add_argument("setup_file", help="path or filename of setup.py file") + parser.add_argument( + "--from-recipe-dir", + help=("look for setup.py file in recipe " "dir (as opposed to work dir)"), + default=False, + action="store_true", + ) + parser.add_argument( + "--recipe-dir", + help=("(optional) path to recipe dir, where " "setup.py should be found"), + ) + + parser.add_argument( + "--permit-undefined-jinja", + help=("look for setup.py file in recipe " "dir (as opposed to work dir)"), + default=False, + action="store_true", + ) args = parser.parse_args() # we get back a dict of the setup data data = load_setup_py_data(**args.__dict__) - with open(os.path.join(args.work_dir, 'conda_build_loaded_setup_py.json'), 'w') as f: + with open( + os.path.join(args.work_dir, "conda_build_loaded_setup_py.json"), "w" + ) as f: # this is lossy. Anything that can't be serialized is either forced to None or # removed completely. json.dump(data, f, skipkeys=True, default=lambda x: None) diff --git a/conda_build/api.py b/conda_build/api.py index 82a6e1bbbd..cc31f6e339 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -9,67 +9,92 @@ but only use those kwargs in config. Config must change to support new features elsewhere. """ +import sys as _sys + # imports are done locally to keep the api clean and limited strictly # to conda-build's functionality. from os.path import dirname, expanduser, join from pathlib import Path -import sys as _sys # make the Config class available in the api namespace -from conda_build.config import (Config, get_or_merge_config, get_channel_urls, - DEFAULT_PREFIX_LENGTH as _prefix_length) +from conda_build.config import DEFAULT_PREFIX_LENGTH as _prefix_length +from conda_build.config import Config, get_channel_urls, get_or_merge_config from conda_build.utils import ensure_list as _ensure_list from conda_build.utils import expand_globs as _expand_globs from conda_build.utils import get_logger as _get_logger -def render(recipe_path, config=None, variants=None, permit_unsatisfiable_variants=True, - finalize=True, bypass_env_check=False, **kwargs): +def render( + recipe_path, + config=None, + variants=None, + permit_unsatisfiable_variants=True, + finalize=True, + bypass_env_check=False, + **kwargs, +): """Given path to a recipe, return the MetaData object(s) representing that recipe, with jinja2 templates evaluated. Returns a list of (metadata, needs_download, needs_reparse in env) tuples""" - from conda_build.render import render_recipe, finalize_metadata - from conda_build.exceptions import DependencyNeedsBuildingError - from conda_build.conda_interface import NoPackagesFoundError from collections import OrderedDict + + from conda_build.conda_interface import NoPackagesFoundError + from conda_build.exceptions import DependencyNeedsBuildingError + from conda_build.render import finalize_metadata, render_recipe + config = get_or_merge_config(config, **kwargs) - metadata_tuples = render_recipe(recipe_path, bypass_env_check=bypass_env_check, - no_download_source=config.no_download_source, - config=config, variants=variants, - permit_unsatisfiable_variants=permit_unsatisfiable_variants) + metadata_tuples = render_recipe( + recipe_path, + bypass_env_check=bypass_env_check, + no_download_source=config.no_download_source, + config=config, + variants=variants, + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + ) output_metas = OrderedDict() for meta, download, render_in_env in metadata_tuples: if not meta.skip() or not config.trim_skip: for od, om in meta.get_output_metadata_set( - permit_unsatisfiable_variants=permit_unsatisfiable_variants, - permit_undefined_jinja=not finalize, - bypass_env_check=bypass_env_check): + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + permit_undefined_jinja=not finalize, + bypass_env_check=bypass_env_check, + ): if not om.skip() or not config.trim_skip: - if 'type' not in od or od['type'] == 'conda': + if "type" not in od or od["type"] == "conda": if finalize and not om.final: try: - om = finalize_metadata(om, - permit_unsatisfiable_variants=permit_unsatisfiable_variants) + om = finalize_metadata( + om, + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + ) except (DependencyNeedsBuildingError, NoPackagesFoundError): if not permit_unsatisfiable_variants: raise # remove outputs section from output objects for simplicity - if not om.path and om.meta.get('outputs'): - om.parent_outputs = om.meta['outputs'] - del om.meta['outputs'] - - output_metas[om.dist(), om.config.variant.get('target_platform'), - tuple((var, om.config.variant[var]) - for var in om.get_used_vars())] = \ - ((om, download, render_in_env)) + if not om.path and om.meta.get("outputs"): + om.parent_outputs = om.meta["outputs"] + del om.meta["outputs"] + + output_metas[ + om.dist(), + om.config.variant.get("target_platform"), + tuple( + (var, om.config.variant[var]) + for var in om.get_used_vars() + ), + ] = (om, download, render_in_env) else: - output_metas[f"{om.type}: {om.name()}", om.config.variant.get('target_platform'), - tuple((var, om.config.variant[var]) - for var in om.get_used_vars())] = \ - ((om, download, render_in_env)) + output_metas[ + f"{om.type}: {om.name()}", + om.config.variant.get("target_platform"), + tuple( + (var, om.config.variant[var]) + for var in om.get_used_vars() + ), + ] = (om, download, render_in_env) return list(output_metas.values()) @@ -77,11 +102,17 @@ def render(recipe_path, config=None, variants=None, permit_unsatisfiable_variant def output_yaml(metadata, file_path=None, suppress_outputs=False): """Save a rendered recipe in its final form to the path given by file_path""" from conda_build.render import output_yaml + return output_yaml(metadata, file_path, suppress_outputs=suppress_outputs) -def get_output_file_paths(recipe_path_or_metadata, no_download_source=False, config=None, - variants=None, **kwargs): +def get_output_file_paths( + recipe_path_or_metadata, + no_download_source=False, + config=None, + variants=None, + **kwargs, +): """Get output file paths for any packages that would be created by a recipe Both split packages (recipes with more than one output) and build matrices, @@ -89,12 +120,17 @@ def get_output_file_paths(recipe_path_or_metadata, no_download_source=False, con """ from conda_build.render import bldpkg_path from conda_build.utils import get_skip_message + config = get_or_merge_config(config, **kwargs) - if hasattr(recipe_path_or_metadata, '__iter__') and not isinstance(recipe_path_or_metadata, - str): - list_of_metas = [hasattr(item[0], 'config') for item in recipe_path_or_metadata - if len(item) == 3] + if hasattr(recipe_path_or_metadata, "__iter__") and not isinstance( + recipe_path_or_metadata, str + ): + list_of_metas = [ + hasattr(item[0], "config") + for item in recipe_path_or_metadata + if len(item) == 3 + ] if list_of_metas and all(list_of_metas): metadata = recipe_path_or_metadata @@ -102,15 +138,22 @@ def get_output_file_paths(recipe_path_or_metadata, no_download_source=False, con raise ValueError(f"received mixed list of metas: {recipe_path_or_metadata}") elif isinstance(recipe_path_or_metadata, (str, Path)): # first, render the parent recipe (potentially multiple outputs, depending on variants). - metadata = render(recipe_path_or_metadata, no_download_source=no_download_source, - variants=variants, config=config, finalize=True, **kwargs) + metadata = render( + recipe_path_or_metadata, + no_download_source=no_download_source, + variants=variants, + config=config, + finalize=True, + **kwargs, + ) else: - assert hasattr(recipe_path_or_metadata, 'config'), ("Expecting metadata object - got {}" - .format(recipe_path_or_metadata)) + assert hasattr( + recipe_path_or_metadata, "config" + ), f"Expecting metadata object - got {recipe_path_or_metadata}" metadata = [(recipe_path_or_metadata, None, None)] # Next, loop over outputs that each metadata defines outs = [] - for (m, _, _) in metadata: + for m, _, _ in metadata: if m.skip(): outs.append(get_skip_message(m)) else: @@ -118,20 +161,31 @@ def get_output_file_paths(recipe_path_or_metadata, no_download_source=False, con return sorted(list(set(outs))) -def get_output_file_path(recipe_path_or_metadata, no_download_source=False, config=None, - variants=None, **kwargs): +def get_output_file_path( + recipe_path_or_metadata, + no_download_source=False, + config=None, + variants=None, + **kwargs, +): """Get output file paths for any packages that would be created by a recipe Both split packages (recipes with more than one output) and build matrices, created with variants, contribute to the list of file paths here. """ log = _get_logger(__name__) - log.warn("deprecation warning: this function has been renamed to get_output_file_paths, " - "to reflect that potentially multiple paths are returned. This function will be " - "removed in the conda-build 4.0 release.") - return get_output_file_paths(recipe_path_or_metadata, - no_download_source=no_download_source, - config=config, variants=variants, **kwargs) + log.warn( + "deprecation warning: this function has been renamed to get_output_file_paths, " + "to reflect that potentially multiple paths are returned. This function will be " + "removed in the conda-build 4.0 release." + ) + return get_output_file_paths( + recipe_path_or_metadata, + no_download_source=no_download_source, + config=config, + variants=variants, + **kwargs, + ) def check(recipe_path, no_download_source=False, config=None, variants=None, **kwargs): @@ -141,24 +195,39 @@ def check(recipe_path, no_download_source=False, config=None, variants=None, **k valid fields, with some value checking. """ config = get_or_merge_config(config, **kwargs) - metadata = render(recipe_path, no_download_source=no_download_source, - config=config, variants=variants) + metadata = render( + recipe_path, + no_download_source=no_download_source, + config=config, + variants=variants, + ) return all(m[0].check_fields() for m in metadata) -def build(recipe_paths_or_metadata, post=None, need_source_download=True, - build_only=False, notest=False, config=None, variants=None, stats=None, - **kwargs): +def build( + recipe_paths_or_metadata, + post=None, + need_source_download=True, + build_only=False, + notest=False, + config=None, + variants=None, + stats=None, + **kwargs, +): """Run the build step. If recipe paths are provided, renders recipe before building. Tests built packages by default. notest=True to skip test.""" import os + from conda_build.build import build_tree from conda_build.utils import find_recipe - assert post in (None, True, False), ("post must be boolean or None. Remember, you must pass " - "other arguments (config) by keyword.") + assert post in (None, True, False), ( + "post must be boolean or None. Remember, you must pass " + "other arguments (config) by keyword." + ) recipes = [] for recipe in _ensure_list(recipe_paths_or_metadata): @@ -175,7 +244,9 @@ def build(recipe_paths_or_metadata, post=None, need_source_download=True, raise ValueError(f"Recipe passed was unrecognized object: {recipe}") if not recipes: - raise ValueError(f'No valid recipes found for input: {recipe_paths_or_metadata}') + raise ValueError( + f"No valid recipes found for input: {recipe_paths_or_metadata}" + ) return build_tree( recipes, @@ -186,18 +257,24 @@ def build(recipe_paths_or_metadata, post=None, need_source_download=True, build_only=build_only, post=post, notest=notest, - variants=variants + variants=variants, ) -def test(recipedir_or_package_or_metadata, move_broken=True, config=None, stats=None, **kwargs): +def test( + recipedir_or_package_or_metadata, + move_broken=True, + config=None, + stats=None, + **kwargs, +): """Run tests on either packages (.tar.bz2 or extracted) or recipe folders For a recipe folder, it renders the recipe enough to know what package to download, and obtains it from your currently configuured channels.""" from conda_build.build import test - if hasattr(recipedir_or_package_or_metadata, 'config'): + if hasattr(recipedir_or_package_or_metadata, "config"): config = recipedir_or_package_or_metadata.config else: config = get_or_merge_config(config, **kwargs) @@ -212,17 +289,23 @@ def test(recipedir_or_package_or_metadata, move_broken=True, config=None, stats= # doesn't already have one. What this means is that if we're # running a test immediately after build, we use the one that the # build already provided - test_result = test(recipedir_or_package_or_metadata, config=config, move_broken=move_broken, - stats=stats) + test_result = test( + recipedir_or_package_or_metadata, + config=config, + move_broken=move_broken, + stats=stats, + ) return test_result def list_skeletons(): """List available skeletons for generating conda recipes from external sources. - The returned list is generally the names of supported repositories (pypi, cran, etc.)""" + The returned list is generally the names of supported repositories (pypi, cran, etc.) + """ import pkgutil - modules = pkgutil.iter_modules([join(dirname(__file__), 'skeletons')]) + + modules = pkgutil.iter_modules([join(dirname(__file__), "skeletons")]) files = [] for _, name, _ in modules: if not name.startswith("_"): @@ -230,24 +313,25 @@ def list_skeletons(): return files -def skeletonize(packages, repo, output_dir=".", version=None, recursive=False, - config=None, **kwargs): +def skeletonize( + packages, repo, output_dir=".", version=None, recursive=False, config=None, **kwargs +): """Generate a conda recipe from an external repo. Translates metadata from external sources into expected conda recipe format.""" version = getattr(config, "version", version) if version: - kwargs.update({'version': version}) + kwargs.update({"version": version}) if recursive: - kwargs.update({'recursive': recursive}) + kwargs.update({"recursive": recursive}) if output_dir != ".": output_dir = expanduser(output_dir) - kwargs.update({'output_dir': output_dir}) + kwargs.update({"output_dir": output_dir}) # here we're dumping all extra kwargs as attributes on the config object. We'll extract # only relevant ones below config = get_or_merge_config(config, **kwargs) - config.compute_build_id('skeleton') + config.compute_build_id("skeleton") packages = _ensure_list(packages) # This is a little bit of black magic. The idea is that for any keyword argument that @@ -255,9 +339,12 @@ def skeletonize(packages, repo, output_dir=".", version=None, recursive=False, # off of the config object, and pass it as a keyword argument. This is sort of the # inverse of what we do in the CLI code - there we take CLI arguments and dangle them # all on the config object as attributes. - module = getattr(__import__("conda_build.skeletons", globals=globals(), locals=locals(), - fromlist=[repo]), - repo) + module = getattr( + __import__( + "conda_build.skeletons", globals=globals(), locals=locals(), fromlist=[repo] + ), + repo, + ) func_args = module.skeletonize.__code__.co_varnames kwargs = {name: getattr(config, name) for name in dir(config) if name in func_args} @@ -267,72 +354,126 @@ def skeletonize(packages, repo, output_dir=".", version=None, recursive=False, if arg in kwargs: del kwargs[arg] with config: - skeleton_return = module.skeletonize(packages, output_dir=output_dir, version=version, - recursive=recursive, config=config, **kwargs) + skeleton_return = module.skeletonize( + packages, + output_dir=output_dir, + version=version, + recursive=recursive, + config=config, + **kwargs, + ) return skeleton_return -def develop(recipe_dir, prefix=_sys.prefix, no_pth_file=False, - build_ext=False, clean=False, uninstall=False): +def develop( + recipe_dir, + prefix=_sys.prefix, + no_pth_file=False, + build_ext=False, + clean=False, + uninstall=False, +): """Install a Python package in 'development mode'. -This works by creating a conda.pth file in site-packages.""" + This works by creating a conda.pth file in site-packages.""" from .develop import execute + recipe_dir = _ensure_list(recipe_dir) return execute(recipe_dir, prefix, no_pth_file, build_ext, clean, uninstall) -def convert(package_file, output_dir=".", show_imports=False, platforms=None, force=False, - dependencies=None, verbose=False, quiet=True, dry_run=False): +def convert( + package_file, + output_dir=".", + show_imports=False, + platforms=None, + force=False, + dependencies=None, + verbose=False, + quiet=True, + dry_run=False, +): """Convert changes a package from one platform to another. It applies only to things that are portable, such as pure python, or header-only C/C++ libraries.""" from .convert import conda_convert + platforms = _ensure_list(platforms) - if package_file.endswith('tar.bz2'): - return conda_convert(package_file, output_dir=output_dir, show_imports=show_imports, - platforms=platforms, force=force, verbose=verbose, quiet=quiet, - dry_run=dry_run, dependencies=dependencies) - elif package_file.endswith('.whl'): - raise RuntimeError('Conversion from wheel packages is not ' - 'implemented yet, stay tuned.') + if package_file.endswith("tar.bz2"): + return conda_convert( + package_file, + output_dir=output_dir, + show_imports=show_imports, + platforms=platforms, + force=force, + verbose=verbose, + quiet=quiet, + dry_run=dry_run, + dependencies=dependencies, + ) + elif package_file.endswith(".whl"): + raise RuntimeError( + "Conversion from wheel packages is not " "implemented yet, stay tuned." + ) else: raise RuntimeError("cannot convert: %s" % package_file) -def test_installable(channel='defaults'): +def test_installable(channel="defaults"): """Check to make sure that packages in channel are installable. This is a consistency check for the channel.""" from .inspect_pkg import test_installable + return test_installable(channel) -def inspect_linkages(packages, prefix=_sys.prefix, untracked=False, all_packages=False, - show_files=False, groupby='package', sysroot=''): +def inspect_linkages( + packages, + prefix=_sys.prefix, + untracked=False, + all_packages=False, + show_files=False, + groupby="package", + sysroot="", +): from .inspect_pkg import inspect_linkages + packages = _ensure_list(packages) - return inspect_linkages(packages, prefix=prefix, untracked=untracked, all_packages=all_packages, - show_files=show_files, groupby=groupby, sysroot=sysroot) + return inspect_linkages( + packages, + prefix=prefix, + untracked=untracked, + all_packages=all_packages, + show_files=show_files, + groupby=groupby, + sysroot=sysroot, + ) -def inspect_objects(packages, prefix=_sys.prefix, groupby='filename'): +def inspect_objects(packages, prefix=_sys.prefix, groupby="filename"): from .inspect_pkg import inspect_objects + packages = _ensure_list(packages) return inspect_objects(packages, prefix=prefix, groupby=groupby) def inspect_prefix_length(packages, min_prefix_length=_prefix_length): from conda_build.tarcheck import check_prefix_lengths + config = Config(prefix_length=min_prefix_length) packages = _ensure_list(packages) prefix_lengths = check_prefix_lengths(packages, config) if prefix_lengths: - print("Packages with binary prefixes shorter than %d characters:" - % min_prefix_length) + print( + "Packages with binary prefixes shorter than %d characters:" + % min_prefix_length + ) for fn, length in prefix_lengths.items(): print(f"{fn} ({length} chars)") else: - print("No packages found with binary prefixes shorter than %d characters." - % min_prefix_length) + print( + "No packages found with binary prefixes shorter than %d characters." + % min_prefix_length + ) return len(prefix_lengths) == 0 @@ -343,27 +484,63 @@ def inspect_hash_inputs(packages): from the package's info/hash_input.json file """ from .inspect_pkg import get_hash_input + return get_hash_input(packages) -def create_metapackage(name, version, entry_points=(), build_string=None, build_number=0, - dependencies=(), home=None, license_name=None, summary=None, - config=None, **kwargs): +def create_metapackage( + name, + version, + entry_points=(), + build_string=None, + build_number=0, + dependencies=(), + home=None, + license_name=None, + summary=None, + config=None, + **kwargs, +): from .metapackage import create_metapackage + config = get_or_merge_config(config, **kwargs) - return create_metapackage(name=name, version=version, entry_points=entry_points, - build_string=build_string, build_number=build_number, - dependencies=dependencies, home=home, - license_name=license_name, summary=summary, config=config) + return create_metapackage( + name=name, + version=version, + entry_points=entry_points, + build_string=build_string, + build_number=build_number, + dependencies=dependencies, + home=home, + license_name=license_name, + summary=summary, + config=config, + ) -def update_index(dir_paths, config=None, force=False, check_md5=False, remove=False, channel_name=None, - subdir=None, threads=None, patch_generator=None, verbose=False, progress=False, - hotfix_source_repo=None, current_index_versions=None, **kwargs): - import yaml +def update_index( + dir_paths, + config=None, + force=False, + check_md5=False, + remove=False, + channel_name=None, + subdir=None, + threads=None, + patch_generator=None, + verbose=False, + progress=False, + hotfix_source_repo=None, + current_index_versions=None, + **kwargs, +): import os + + import yaml + from conda_build.index import update_index from conda_build.utils import ensure_list + dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)] if isinstance(current_index_versions, str): @@ -371,60 +548,95 @@ def update_index(dir_paths, config=None, force=False, check_md5=False, remove=Fa current_index_versions = yaml.safe_load(f) for path in dir_paths: - update_index(path, check_md5=check_md5, channel_name=channel_name, - patch_generator=patch_generator, threads=threads, verbose=verbose, - progress=progress, hotfix_source_repo=hotfix_source_repo, - subdirs=ensure_list(subdir), current_index_versions=current_index_versions, - index_file=kwargs.get('index_file', None)) - - -def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, - output_id=None, config=None, verbose=True, link_source_method='auto', **kwargs): + update_index( + path, + check_md5=check_md5, + channel_name=channel_name, + patch_generator=patch_generator, + threads=threads, + verbose=verbose, + progress=progress, + hotfix_source_repo=hotfix_source_repo, + subdirs=ensure_list(subdir), + current_index_versions=current_index_versions, + index_file=kwargs.get("index_file", None), + ) + + +def debug( + recipe_or_package_path_or_metadata_tuples, + path=None, + test=False, + output_id=None, + config=None, + verbose=True, + link_source_method="auto", + **kwargs, +): """Set up either build/host or test environments, leaving you with a quick tool to debug your package's build or test phase. """ - from fnmatch import fnmatch import logging import os import time - from conda_build.build import test as run_test, build as run_build - from conda_build.utils import CONDA_PACKAGE_EXTENSIONS, on_win, LoggingContext + from fnmatch import fnmatch + + from conda_build.build import build as run_build + from conda_build.build import test as run_test + from conda_build.utils import CONDA_PACKAGE_EXTENSIONS, LoggingContext, on_win + is_package = False default_config = get_or_merge_config(config, **kwargs) args = {"set_build_id": False} path_is_build_dir = False - workdirs = [os.path.join(recipe_or_package_path_or_metadata_tuples, d) - for d in (os.listdir(recipe_or_package_path_or_metadata_tuples) if - os.path.isdir(recipe_or_package_path_or_metadata_tuples) else []) - if (d.startswith('work') and - os.path.isdir(os.path.join(recipe_or_package_path_or_metadata_tuples, d)))] - metadatas_conda_debug = [os.path.join(f, "metadata_conda_debug.yaml") for f in workdirs - if os.path.isfile(os.path.join(f, "metadata_conda_debug.yaml"))] + workdirs = [ + os.path.join(recipe_or_package_path_or_metadata_tuples, d) + for d in ( + os.listdir(recipe_or_package_path_or_metadata_tuples) + if os.path.isdir(recipe_or_package_path_or_metadata_tuples) + else [] + ) + if ( + d.startswith("work") + and os.path.isdir( + os.path.join(recipe_or_package_path_or_metadata_tuples, d) + ) + ) + ] + metadatas_conda_debug = [ + os.path.join(f, "metadata_conda_debug.yaml") + for f in workdirs + if os.path.isfile(os.path.join(f, "metadata_conda_debug.yaml")) + ] metadatas_conda_debug = sorted(metadatas_conda_debug) if len(metadatas_conda_debug): path_is_build_dir = True path = recipe_or_package_path_or_metadata_tuples if not path: path = os.path.join(default_config.croot, f"debug_{int(time.time() * 1000)}") - config = get_or_merge_config(config=default_config, croot=path, verbose=verbose, _prefix_length=10, - **args) + config = get_or_merge_config( + config=default_config, croot=path, verbose=verbose, _prefix_length=10, **args + ) config.channel_urls = get_channel_urls(kwargs) metadata_tuples = [] - best_link_source_method = 'skip' + best_link_source_method = "skip" if isinstance(recipe_or_package_path_or_metadata_tuples, str): if path_is_build_dir: for metadata_conda_debug in metadatas_conda_debug: - best_link_source_method = 'symlink' + best_link_source_method = "symlink" from conda_build.metadata import MetaData + metadata = MetaData(metadata_conda_debug, config, {}) metadata_tuples.append((metadata, False, True)) else: ext = os.path.splitext(recipe_or_package_path_or_metadata_tuples)[1] if not ext or not any(ext in _ for _ in CONDA_PACKAGE_EXTENSIONS): - metadata_tuples = render(recipe_or_package_path_or_metadata_tuples, config=config, **kwargs) + metadata_tuples = render( + recipe_or_package_path_or_metadata_tuples, config=config, **kwargs + ) else: # this is a package, we only support testing test = True @@ -436,15 +648,23 @@ def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, outputs = get_output_file_paths(metadata_tuples) matched_outputs = outputs if output_id: - matched_outputs = [_ for _ in outputs if fnmatch(os.path.basename(_), output_id)] + matched_outputs = [ + _ for _ in outputs if fnmatch(os.path.basename(_), output_id) + ] if len(matched_outputs) > 1: - raise ValueError("Specified --output-id matches more than one output ({}). Please refine your output id so that only " - "a single output is found.".format(matched_outputs)) + raise ValueError( + "Specified --output-id matches more than one output ({}). Please refine your output id so that only " + "a single output is found.".format(matched_outputs) + ) elif not matched_outputs: - raise ValueError(f"Specified --output-id did not match any outputs. Available outputs are: {outputs} Please check it and try again") + raise ValueError( + f"Specified --output-id did not match any outputs. Available outputs are: {outputs} Please check it and try again" + ) if len(matched_outputs) > 1 and not path_is_build_dir: - raise ValueError("More than one output found for this recipe ({}). Please use the --output-id argument to filter down " - "to a single output.".format(outputs)) + raise ValueError( + "More than one output found for this recipe ({}). Please use the --output-id argument to filter down " + "to a single output.".format(outputs) + ) else: matched_outputs = outputs @@ -452,11 +672,18 @@ def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, # make sure that none of the _placehold stuff gets added to env paths target_metadata.config.prefix_length = 10 - if best_link_source_method == 'symlink': + if best_link_source_method == "symlink": for metadata, _, _ in metadata_tuples: - debug_source_loc = os.path.join(os.sep + 'usr', 'local', 'src', 'conda', - '{}-{}'.format(metadata.get_value('package/name'), - metadata.get_value('package/version'))) + debug_source_loc = os.path.join( + os.sep + "usr", + "local", + "src", + "conda", + "{}-{}".format( + metadata.get_value("package/name"), + metadata.get_value("package/version"), + ), + ) link_target = os.path.dirname(metadata.meta_path) try: dn = os.path.dirname(debug_source_loc) @@ -468,14 +695,22 @@ def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, os.unlink(debug_source_loc) except: pass - print(f"Making debug info source symlink: {debug_source_loc} => {link_target}") + print( + f"Making debug info source symlink: {debug_source_loc} => {link_target}" + ) os.symlink(link_target, debug_source_loc) except PermissionError as e: - raise Exception("You do not have the necessary permissions to create symlinks in {}\nerror: {}" - .format(dn, str(e))) + raise Exception( + "You do not have the necessary permissions to create symlinks in {}\nerror: {}".format( + dn, str(e) + ) + ) except Exception as e: - raise Exception("Unknown error creating symlinks in {}\nerror: {}" - .format(dn, str(e))) + raise Exception( + "Unknown error creating symlinks in {}\nerror: {}".format( + dn, str(e) + ) + ) ext = ".bat" if on_win else ".sh" if verbose: @@ -488,7 +723,10 @@ def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, activation_string = "cd {work_dir} && {source} {activation_file}\n".format( work_dir=target_metadata.config.work_dir, source="call" if on_win else "source", - activation_file=os.path.join(target_metadata.config.work_dir, activation_file)) + activation_file=os.path.join( + target_metadata.config.work_dir, activation_file + ), + ) elif not test: with log_context: run_build(target_metadata, stats={}, provision_only=True) @@ -496,11 +734,16 @@ def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, activation_string = "cd {work_dir} && {source} {activation_file}\n".format( work_dir=target_metadata.config.work_dir, source="call" if on_win else "source", - activation_file=os.path.join(target_metadata.config.work_dir, activation_file)) + activation_file=os.path.join( + target_metadata.config.work_dir, activation_file + ), + ) else: if not is_package: - raise ValueError("Debugging for test mode is only supported for package files that already exist. " - "Please build your package first, then use it to create the debugging environment.") + raise ValueError( + "Debugging for test mode is only supported for package files that already exist. " + "Please build your package first, then use it to create the debugging environment." + ) else: test_input = recipe_or_package_path_or_metadata_tuples # use the package to create an env and extract the test files. Stop short of running the tests. @@ -511,5 +754,6 @@ def debug(recipe_or_package_path_or_metadata_tuples, path=None, test=False, activation_string = "cd {work_dir} && {source} {activation_file}\n".format( work_dir=config.test_dir, source="call" if on_win else "source", - activation_file=os.path.join(config.test_dir, activation_file)) + activation_file=os.path.join(config.test_dir, activation_file), + ) return activation_string diff --git a/conda_build/bdist_conda.py b/conda_build/bdist_conda.py index d3b901e3a0..9e9d29e162 100644 --- a/conda_build/bdist_conda.py +++ b/conda_build/bdist_conda.py @@ -7,21 +7,17 @@ import sys import time - from collections import defaultdict - from distutils.command.install import install -from distutils.errors import DistutilsOptionError, DistutilsGetoptError from distutils.dist import Distribution +from distutils.errors import DistutilsGetoptError, DistutilsOptionError -from conda_build.conda_interface import StringIO, configparser -from conda_build.conda_interface import spec_from_line -from conda_build.metadata import MetaData from conda_build import api -from conda_build.skeletons import pypi from conda_build.build import handle_anaconda_upload +from conda_build.conda_interface import StringIO, configparser, spec_from_line from conda_build.config import Config - +from conda_build.metadata import MetaData +from conda_build.skeletons import pypi # TODO: Add support for all the options that conda build has @@ -72,6 +68,7 @@ class CondaDistribution(Distribution): setup(), or 0. Overrides any conda_buildnum passed to setup(). """ + # Unfortunately, there's no way to warn the users that they need to use # distclass=CondaDistribution when they try to use a conda option to # setup(). Distribution.__init__ will just print a warning when it sees an @@ -79,14 +76,14 @@ class CondaDistribution(Distribution): # attr: default conda_attrs = { - 'conda_buildnum': 0, - 'conda_buildstr': None, - 'conda_import_tests': True, - 'conda_command_tests': True, - 'conda_binary_relocation': True, - 'conda_preserve_egg_dir': None, - 'conda_features': None, - 'conda_track_features': None, + "conda_buildnum": 0, + "conda_buildstr": None, + "conda_import_tests": True, + "conda_command_tests": True, + "conda_binary_relocation": True, + "conda_preserve_egg_dir": None, + "conda_features": None, + "conda_track_features": None, } def __init__(self, attrs=None): @@ -106,8 +103,9 @@ def __init__(self, attrs=None): class bdist_conda(install): description = "create a conda package" - config = Config(build_id="bdist_conda" + "_" + str(int(time.time() * 1000)), - build_is_host=True) + config = Config( + build_id="bdist_conda" + "_" + str(int(time.time() * 1000)), build_is_host=True + ) def initialize_options(self): super().initialize_options() @@ -115,10 +113,10 @@ def initialize_options(self): self.anaconda_upload = False def finalize_options(self): - opt_dict = self.distribution.get_option_dict('install') + opt_dict = self.distribution.get_option_dict("install") if self.prefix: raise DistutilsOptionError("--prefix is not allowed") - opt_dict['prefix'] = ("bdist_conda", self.config.host_prefix) + opt_dict["prefix"] = ("bdist_conda", self.config.host_prefix) super().finalize_options() def run(self): @@ -130,8 +128,7 @@ def run(self): for attr in CondaDistribution.conda_attrs: if not hasattr(metadata, attr): - setattr(metadata, attr, - CondaDistribution.conda_attrs[attr]) + setattr(metadata, attr, CondaDistribution.conda_attrs[attr]) # The command line takes precedence if self.buildnum is not None: @@ -140,100 +137,109 @@ def run(self): d = defaultdict(dict) # PyPI allows uppercase letters but conda does not, so we fix the # name here. - d['package']['name'] = metadata.name.lower() - d['package']['version'] = metadata.version - d['build']['number'] = metadata.conda_buildnum + d["package"]["name"] = metadata.name.lower() + d["package"]["version"] = metadata.version + d["build"]["number"] = metadata.conda_buildnum # MetaData does the auto stuff if the build string is None - d['build']['string'] = metadata.conda_buildstr + d["build"]["string"] = metadata.conda_buildstr - d['build']['binary_relocation'] = metadata.conda_binary_relocation - d['build']['preserve_egg_dir'] = metadata.conda_preserve_egg_dir - d['build']['features'] = metadata.conda_features - d['build']['track_features'] = metadata.conda_track_features + d["build"]["binary_relocation"] = metadata.conda_binary_relocation + d["build"]["preserve_egg_dir"] = metadata.conda_preserve_egg_dir + d["build"]["features"] = metadata.conda_features + d["build"]["track_features"] = metadata.conda_track_features # XXX: I'm not really sure if it is correct to combine requires # and install_requires - d['requirements']['run'] = d['requirements']['build'] = \ - [spec_from_line(i) for i in - (metadata.requires or []) + - (getattr(self.distribution, 'install_requires', []) or - [])] + ['python'] - if hasattr(self.distribution, 'tests_require'): + d["requirements"]["run"] = d["requirements"]["build"] = [ + spec_from_line(i) + for i in (metadata.requires or []) + + (getattr(self.distribution, "install_requires", []) or []) + ] + ["python"] + if hasattr(self.distribution, "tests_require"): # A lot of packages use extras_require['test'], but # tests_require is the one that is officially supported by # setuptools. - d['test']['requires'] = [spec_from_line(i) for i in - self.distribution.tests_require or []] + d["test"]["requires"] = [ + spec_from_line(i) for i in self.distribution.tests_require or [] + ] - d['about']['home'] = metadata.url + d["about"]["home"] = metadata.url # Don't worry about classifiers. This isn't skeleton pypi. We # don't need to make this work with random stuff in the wild. If # someone writes their setup.py wrong and this doesn't work, it's # their fault. - d['about']['license'] = metadata.license - d['about']['summary'] = metadata.description + d["about"]["license"] = metadata.license + d["about"]["summary"] = metadata.description # This is similar logic from conda skeleton pypi - entry_points = getattr(self.distribution, 'entry_points', []) + entry_points = getattr(self.distribution, "entry_points", []) if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted - newstr = "\n".join(x.strip() for x in - entry_points.splitlines()) + newstr = "\n".join(x.strip() for x in entry_points.splitlines()) c = configparser.ConfigParser() entry_points = {} try: c.read_file(StringIO(newstr)) except Exception as err: # This seems to be the best error here - raise DistutilsGetoptError("ERROR: entry-points not understood: " + - str(err) + "\nThe string was" + newstr) + raise DistutilsGetoptError( + "ERROR: entry-points not understood: " + + str(err) + + "\nThe string was" + + newstr + ) else: for section in c.sections(): - if section in ['console_scripts', 'gui_scripts']: - value = [f'{option}={c.get(section, option)}' - for option in c.options(section)] + if section in ["console_scripts", "gui_scripts"]: + value = [ + f"{option}={c.get(section, option)}" + for option in c.options(section) + ] entry_points[section] = value else: # Make sure setuptools is added as a dependency below entry_points[section] = None if not isinstance(entry_points, dict): - raise DistutilsGetoptError("ERROR: Could not add entry points. They were:\n" + - entry_points) + raise DistutilsGetoptError( + "ERROR: Could not add entry points. They were:\n" + entry_points + ) else: - rs = entry_points.get('scripts', []) - cs = entry_points.get('console_scripts', []) - gs = entry_points.get('gui_scripts', []) + rs = entry_points.get("scripts", []) + cs = entry_points.get("console_scripts", []) + gs = entry_points.get("gui_scripts", []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not rs and not cs and not gs and len(entry_points) > 1: - d['requirements']['run'].append('setuptools') - d['requirements']['build'].append('setuptools') + d["requirements"]["run"].append("setuptools") + d["requirements"]["build"].append("setuptools") entry_list = rs + cs + gs - if gs and self.config.platform == 'osx': - d['build']['osx_is_app'] = True + if gs and self.config.platform == "osx": + d["build"]["osx_is_app"] = True if len(cs + gs) != 0: - d['build']['entry_points'] = entry_list + d["build"]["entry_points"] = entry_list if metadata.conda_command_tests is True: - d['test']['commands'] = list(map(str, - pypi.make_entry_tests(entry_list))) + d["test"]["commands"] = list( + map(str, pypi.make_entry_tests(entry_list)) + ) - if 'setuptools' in d['requirements']['run']: - d['build']['preserve_egg_dir'] = True + if "setuptools" in d["requirements"]["run"]: + d["build"]["preserve_egg_dir"] = True if metadata.conda_import_tests: if metadata.conda_import_tests is True: - d['test']['imports'] = ((self.distribution.packages or []) + - (self.distribution.py_modules or [])) + d["test"]["imports"] = (self.distribution.packages or []) + ( + self.distribution.py_modules or [] + ) else: - d['test']['imports'] = metadata.conda_import_tests + d["test"]["imports"] = metadata.conda_import_tests - if (metadata.conda_command_tests and not - isinstance(metadata.conda_command_tests, - bool)): - d['test']['commands'] = list(map(str, metadata.conda_command_tests)) + if metadata.conda_command_tests and not isinstance( + metadata.conda_command_tests, bool + ): + d["test"]["commands"] = list(map(str, metadata.conda_command_tests)) d = dict(d) self.config.keep_old_work = True @@ -241,8 +247,9 @@ def run(self): # Shouldn't fail, but do you really trust the code above? m.check_fields() m.config.set_build_id = False - m.config.variant['python'] = ".".join((str(sys.version_info.major), - str(sys.version_info.minor))) + m.config.variant["python"] = ".".join( + (str(sys.version_info.major), str(sys.version_info.minor)) + ) api.build(m, build_only=True, notest=True) self.config = m.config # prevent changes in the build ID from here, so that we're working in the same prefix @@ -252,15 +259,20 @@ def run(self): api.test(output, config=m.config) m.config.clean() if self.anaconda_upload: + class args: anaconda_upload = self.anaconda_upload + handle_anaconda_upload(output, args) else: - no_upload_message = """\ + no_upload_message = ( + """\ # If you want to upload this package to anaconda.org later, type: # # $ anaconda upload %s -""" % output +""" + % output + ) print(no_upload_message) @@ -269,12 +281,18 @@ class args: # to keep the options from the superclass (and because I don't feel like # making a metaclass just to make this work). -bdist_conda.user_options.extend([ - ('buildnum=', None, '''The build number of +bdist_conda.user_options.extend( + [ + ( + "buildnum=", + None, + """The build number of the conda package. Defaults to 0, or the conda_buildnum specified in the setup() function. The command line flag overrides the option to - setup().'''), - ('anaconda-upload', None, ("""Upload the finished package to anaconda.org""")), -]) + setup().""", + ), + ("anaconda-upload", None, ("""Upload the finished package to anaconda.org""")), + ] +) -bdist_conda.boolean_options.extend(['anaconda-upload']) +bdist_conda.boolean_options.extend(["anaconda-upload"]) diff --git a/conda_build/build.py b/conda_build/build.py index 5fa2f6ae98..fd596919d0 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -1,16 +1,17 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -''' +""" Module that does most of the heavy lifting for the ``conda build`` command. -''' +""" -from collections import deque, OrderedDict +# this is to compensate for a requests idna encoding error. Conda is a better place to fix, +# eventually +# exception is raises: "LookupError: unknown encoding: idna" +# http://stackoverflow.com/a/13057751/1170370 +import encodings.idna # NOQA import fnmatch -import glob2 import json import os -import warnings -from os.path import isdir, isfile, islink, join, dirname import random import re import shutil @@ -19,80 +20,104 @@ import subprocess import sys import time - -# this is to compensate for a requests idna encoding error. Conda is a better place to fix, -# eventually -# exception is raises: "LookupError: unknown encoding: idna" -# http://stackoverflow.com/a/13057751/1170370 -import encodings.idna # NOQA - -from bs4 import UnicodeDammit -import yaml +import warnings +from collections import OrderedDict, deque +from os.path import dirname, isdir, isfile, islink, join import conda_package_handling.api +import glob2 +import yaml +from bs4 import UnicodeDammit +from conda import __version__ as conda_version -# used to get version -from .conda_interface import env_path_backup_var_exists -from .conda_interface import prefix_placeholder -from .conda_interface import TemporaryDirectory -from .conda_interface import PathType, FileMode -from .conda_interface import EntityEncoder -from .conda_interface import get_rc_urls -from .conda_interface import url_path -from .conda_interface import root_dir -from .conda_interface import MatchSpec -from .conda_interface import reset_context -from .conda_interface import context -from .conda_interface import UnsatisfiableError -from .conda_interface import NoPackagesFoundError -from .conda_interface import CondaError -from .conda_interface import pkgs_dirs -from .conda_interface import get_conda_channel -from .utils import (CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, - CONDA_PACKAGE_EXTENSIONS, env_var, glob, - shutil_move_more_retrying, tmp_chdir) +import conda_build.noarch_python as noarch_python +import conda_build.os_utils.external as external +from conda_build import __version__ as conda_build_version from conda_build import environ, source, tarcheck, utils from conda_build.config import Config +from conda_build.create_test import create_all_test_files +from conda_build.exceptions import CondaBuildException, DependencyNeedsBuildingError from conda_build.index import get_build_index, update_index -from conda_build.render import (output_yaml, bldpkg_path, render_recipe, reparse, distribute_variants, - expand_outputs, try_download, execute_download_actions, - add_upstream_pins) -import conda_build.os_utils.external as external from conda_build.metadata import FIELDS, MetaData -from conda_build.post import (post_process, post_build, - fix_permissions, get_build_metadata) - -from conda_build.exceptions import DependencyNeedsBuildingError, CondaBuildException -from conda_build.variants import (set_language_env_vars, dict_of_lists_to_list_of_dicts, - get_package_variants) -from conda_build.create_test import create_all_test_files - -import conda_build.noarch_python as noarch_python - -from conda import __version__ as conda_version -from conda_build import __version__ as conda_build_version +from conda_build.post import ( + fix_permissions, + get_build_metadata, + post_build, + post_process, +) +from conda_build.render import ( + add_upstream_pins, + bldpkg_path, + distribute_variants, + execute_download_actions, + expand_outputs, + output_yaml, + render_recipe, + reparse, + try_download, +) +from conda_build.variants import ( + dict_of_lists_to_list_of_dicts, + get_package_variants, + set_language_env_vars, +) -if sys.platform == 'win32': +# used to get version +from .conda_interface import ( + CondaError, + EntityEncoder, + FileMode, + MatchSpec, + NoPackagesFoundError, + PathType, + TemporaryDirectory, + UnsatisfiableError, + context, + env_path_backup_var_exists, + get_conda_channel, + get_rc_urls, + pkgs_dirs, + prefix_placeholder, + reset_context, + root_dir, + url_path, +) +from .utils import ( + CONDA_PACKAGE_EXTENSION_V1, + CONDA_PACKAGE_EXTENSION_V2, + CONDA_PACKAGE_EXTENSIONS, + env_var, + glob, + shutil_move_more_retrying, + tmp_chdir, +) + +if sys.platform == "win32": import conda_build.windows as windows -if 'bsd' in sys.platform: - shell_path = '/bin/sh' +if "bsd" in sys.platform: + shell_path = "/bin/sh" elif utils.on_win: - shell_path = 'bash' + shell_path = "bash" else: - shell_path = '/bin/bash' + shell_path = "/bin/bash" def stats_key(metadata, desc): # get the build string from whatever conda-build makes of the configuration used_loop_vars = metadata.get_used_loop_vars() - build_vars = '-'.join([k + '_' + str(metadata.config.variant[k]) for k in used_loop_vars - if k != 'target_platform']) + build_vars = "-".join( + [ + k + "_" + str(metadata.config.variant[k]) + for k in used_loop_vars + if k != "target_platform" + ] + ) # kind of a special case. Target platform determines a lot of output behavior, but may not be # explicitly listed in the recipe. - tp = metadata.config.variant.get('target_platform') - if tp and tp != metadata.config.subdir and 'target_platform' not in build_vars: - build_vars += '-target_' + tp + tp = metadata.config.variant.get("target_platform") + if tp and tp != metadata.config.subdir and "target_platform" not in build_vars: + build_vars += "-target_" + tp key = [metadata.name(), metadata.version()] if build_vars: key.append(build_vars) @@ -112,10 +137,16 @@ def log_stats(stats_dict, descriptor): " Time elapsed: {elapsed}\n" "\n".format( descriptor=descriptor, - processes=stats_dict.get('processes', 1), - cpu_sys=utils.seconds2human(stats_dict["cpu_sys"]) if stats_dict.get("cpu_sys") else "-", - cpu_user=utils.seconds2human(stats_dict["cpu_user"]) if stats_dict.get("cpu_user") else "-", - memory=utils.bytes2human(stats_dict["rss"]) if stats_dict.get("rss") else "-", + processes=stats_dict.get("processes", 1), + cpu_sys=utils.seconds2human(stats_dict["cpu_sys"]) + if stats_dict.get("cpu_sys") + else "-", + cpu_user=utils.seconds2human(stats_dict["cpu_user"]) + if stats_dict.get("cpu_user") + else "-", + memory=utils.bytes2human(stats_dict["rss"]) + if stats_dict.get("rss") + else "-", disk=utils.bytes2human(stats_dict["disk"]), elapsed=utils.seconds2human(stats_dict["elapsed"]), ) @@ -123,26 +154,28 @@ def log_stats(stats_dict, descriptor): def create_post_scripts(m): - ''' + """ Create scripts to run after build step - ''' - ext = '.bat' if utils.on_win else '.sh' - for tp in 'pre-link', 'post-link', 'pre-unlink': + """ + ext = ".bat" if utils.on_win else ".sh" + for tp in "pre-link", "post-link", "pre-unlink": # To have per-output link scripts they must be prefixed by the output name or be explicitly # specified in the build section - is_output = 'package:' not in m.get_recipe_text() + is_output = "package:" not in m.get_recipe_text() scriptname = tp if is_output: - if m.meta.get('build', {}).get(tp, ''): - scriptname = m.meta['build'][tp] + if m.meta.get("build", {}).get(tp, ""): + scriptname = m.meta["build"][tp] else: - scriptname = m.name() + '-' + tp + scriptname = m.name() + "-" + tp scriptname += ext - dst_name = '.' + m.name() + '-' + tp + ext + dst_name = "." + m.name() + "-" + tp + ext src = join(m.path, scriptname) if isfile(src): - dst_dir = join(m.config.host_prefix, - 'Scripts' if m.config.host_subdir.startswith('win-') else 'bin') + dst_dir = join( + m.config.host_prefix, + "Scripts" if m.config.host_subdir.startswith("win-") else "bin", + ) if not isdir(dst_dir): os.makedirs(dst_dir, 0o775) dst = join(dst_dir, dst_name) @@ -151,9 +184,9 @@ def create_post_scripts(m): def prefix_replacement_excluded(path): - if path.endswith(('.pyc', '.pyo')) or not isfile(path): + if path.endswith((".pyc", ".pyo")) or not isfile(path): return True - if sys.platform != 'darwin' and islink(path): + if sys.platform != "darwin" and islink(path): # OSX does not allow hard-linking symbolic links, so we cannot # skip symbolic links (as we can on Linux) return True @@ -161,24 +194,24 @@ def prefix_replacement_excluded(path): def have_prefix_files(files, prefix): - ''' + """ Yields files that contain the current prefix in them, and modifies them to replace the prefix with a placeholder. :param files: Filenames to check for instances of prefix :type files: list of tuples containing strings (prefix, mode, filename) - ''' + """ prefix_bytes = prefix.encode(utils.codec) prefix_placeholder_bytes = prefix_placeholder.encode(utils.codec) searches = {prefix: prefix_bytes} if utils.on_win: # some windows libraries use unix-style path separators - forward_slash_prefix = prefix.replace('\\', '/') + forward_slash_prefix = prefix.replace("\\", "/") forward_slash_prefix_bytes = forward_slash_prefix.encode(utils.codec) searches[forward_slash_prefix] = forward_slash_prefix_bytes # some windows libraries have double backslashes as escaping - double_backslash_prefix = prefix.replace('\\', '\\\\') + double_backslash_prefix = prefix.replace("\\", "\\\\") double_backslash_prefix_bytes = double_backslash_prefix.encode(utils.codec) searches[double_backslash_prefix] = double_backslash_prefix_bytes searches[prefix_placeholder] = prefix_placeholder_bytes @@ -188,32 +221,40 @@ def have_prefix_files(files, prefix): # Really, ripgrep could be used on its own with a bit more work though. rg_matches = [] prefix_len = len(prefix) + 1 - rg = external.find_executable('rg') + rg = external.find_executable("rg") if rg: for rep_prefix, _ in searches.items(): try: - args = [rg, - '--unrestricted', - '--no-heading', - '--with-filename', - '--files-with-matches', - '--fixed-strings', - '--text', - rep_prefix, - prefix] + args = [ + rg, + "--unrestricted", + "--no-heading", + "--with-filename", + "--files-with-matches", + "--fixed-strings", + "--text", + rep_prefix, + prefix, + ] matches = subprocess.check_output(args) - rg_matches.extend(matches.decode('utf-8').replace('\r\n', '\n').splitlines()) + rg_matches.extend( + matches.decode("utf-8").replace("\r\n", "\n").splitlines() + ) except subprocess.CalledProcessError: continue # HACK: this is basically os.path.relpath, just simpler and faster # NOTE: path normalization needs to be in sync with create_info_files if utils.on_win: - rg_matches = [rg_match.replace('\\', '/')[prefix_len:] for rg_match in rg_matches] + rg_matches = [ + rg_match.replace("\\", "/")[prefix_len:] for rg_match in rg_matches + ] else: rg_matches = [rg_match[prefix_len:] for rg_match in rg_matches] else: - print("WARNING: Detecting which files contain PREFIX is slow, installing ripgrep makes it faster." - " 'conda install ripgrep'") + print( + "WARNING: Detecting which files contain PREFIX is slow, installing ripgrep makes it faster." + " 'conda install ripgrep'" + ) for f in files: if os.path.isabs(f): @@ -230,18 +271,20 @@ def have_prefix_files(files, prefix): continue try: - fi = open(path, 'rb+') + fi = open(path, "rb+") except OSError: log = utils.get_logger(__name__) log.warn("failed to open %s for detecting prefix. Skipping it." % f) continue try: - mm = utils.mmap_mmap(fi.fileno(), 0, tagname=None, flags=utils.mmap_MAP_PRIVATE) + mm = utils.mmap_mmap( + fi.fileno(), 0, tagname=None, flags=utils.mmap_MAP_PRIVATE + ) except OSError: mm = fi.read() - mode = 'binary' if mm.find(b'\x00') != -1 else 'text' - if mode == 'text': + mode = "binary" if mm.find(b"\x00") != -1 else "text" + if mode == "text": # TODO :: Ask why we do not do this on Windows too?! if not utils.on_win and mm.find(prefix_bytes) != -1: # Use the placeholder for maximal backwards compatibility, and @@ -250,9 +293,13 @@ def have_prefix_files(files, prefix): data = mm[:] mm.close() fi.close() - rewrite_file_with_new_prefix(path, data, prefix_bytes, prefix_placeholder_bytes) - fi = open(path, 'rb+') - mm = utils.mmap_mmap(fi.fileno(), 0, tagname=None, flags=utils.mmap_MAP_PRIVATE) + rewrite_file_with_new_prefix( + path, data, prefix_bytes, prefix_placeholder_bytes + ) + fi = open(path, "rb+") + mm = utils.mmap_mmap( + fi.fileno(), 0, tagname=None, flags=utils.mmap_MAP_PRIVATE + ) for rep_prefix, rep_prefix_bytes in searches.items(): if mm.find(rep_prefix_bytes) != -1: yield (rep_prefix, mode, f) @@ -273,36 +320,51 @@ def chunks(line, n): # + 3 incase a shell is used: 1 space and 2 quotes. size = size + len(line[i]) + 3 if i == len(line) - 1: - yield line[start:i + 1] + yield line[start : i + 1] elif size > n: - yield line[start:i + 1] + yield line[start : i + 1] start = i size = 0 def get_bytes_or_text_as_bytes(parent): - if 'bytes' in parent: - return parent['bytes'] - return parent['text'].encode('utf-8') - - -def regex_files_rg(files, prefix, tag, rg, regex_rg, replacement_re, - also_binaries=False, debug_this=False, match_records=OrderedDict()): + if "bytes" in parent: + return parent["bytes"] + return parent["text"].encode("utf-8") + + +def regex_files_rg( + files, + prefix, + tag, + rg, + regex_rg, + replacement_re, + also_binaries=False, + debug_this=False, + match_records=OrderedDict(), +): # If we run out of space for args (could happen) we'll need to either: # 1. Batching the calls. # 2. Call for all (text?) files by passing just 'prefix' then filter out ones we don't care about (slow). # 3. Use a shell prefixed with `cd prefix && ` (could still hit size limits, just later). # I have gone for batching! - args_base = [rg.encode('utf-8'), - b'--unrestricted', - b'--no-heading', - b'--with-filename', - b'--json', - regex_rg] - pu = prefix.encode('utf-8') - prefix_files = [os.path.join(pu, f.replace('/', os.sep).encode('utf-8')) for f in files] - args_len = len(b' '.join(args_base)) - file_lists = list(chunks(prefix_files, (32760 if utils.on_win else 131071) - args_len)) + args_base = [ + rg.encode("utf-8"), + b"--unrestricted", + b"--no-heading", + b"--with-filename", + b"--json", + regex_rg, + ] + pu = prefix.encode("utf-8") + prefix_files = [ + os.path.join(pu, f.replace("/", os.sep).encode("utf-8")) for f in files + ] + args_len = len(b" ".join(args_base)) + file_lists = list( + chunks(prefix_files, (32760 if utils.on_win else 131071) - args_len) + ) for file_list in file_lists: args = args_base[:] + file_list # This will not work now our args are binary strings: @@ -310,9 +372,11 @@ def regex_files_rg(files, prefix, tag, rg, regex_rg, replacement_re, # print(quote_for_shell(args)) try: if utils.on_win: - args = [a.decode('utf-8') for a in args] - matches = subprocess.check_output(args, shell=False).rstrip(b'\n').split(b'\n') - matches = b'[' + b','.join(matches) + b']\n' + args = [a.decode("utf-8") for a in args] + matches = ( + subprocess.check_output(args, shell=False).rstrip(b"\n").split(b"\n") + ) + matches = b"[" + b",".join(matches) + b"]\n" matches = json.loads(matches) except subprocess.CalledProcessError as _: # noqa # Just means rg returned 1 as no matches were found. @@ -320,61 +384,85 @@ def regex_files_rg(files, prefix, tag, rg, regex_rg, replacement_re, except Exception as e: raise e if matches: - stage = 'pre-begin' + stage = "pre-begin" for match in matches: - new_stage = match['type'] - if new_stage == 'begin': + new_stage = match["type"] + if new_stage == "begin": stage = new_stage - match_filename_begin = match['data']['path']['text'][len(prefix) + 1:].replace(os.sep, '/') - match_filename_type = 'unknown' + match_filename_begin = match["data"]["path"]["text"][ + len(prefix) + 1 : + ].replace(os.sep, "/") + match_filename_type = "unknown" # TODO :: Speed this up, and generalise it, the python version does similar. - with open(os.path.join(prefix, match_filename_begin), 'rb') as fh: + with open(os.path.join(prefix, match_filename_begin), "rb") as fh: data = mmap_or_read(fh) - match_filename_type = 'binary' if data.find(b'\x00') != -1 else 'text' - assert match_filename_type != 'unknown' - elif new_stage == 'match': + match_filename_type = ( + "binary" if data.find(b"\x00") != -1 else "text" + ) + assert match_filename_type != "unknown" + elif new_stage == "match": old_stage = stage - assert stage == 'begin' or stage == 'match' or stage == 'end' + assert stage == "begin" or stage == "match" or stage == "end" stage = new_stage - match_filename = match['data']['path']['text'][len(prefix) + 1:].replace(os.sep, '/') + match_filename = match["data"]["path"]["text"][ + len(prefix) + 1 : + ].replace(os.sep, "/") # Get stuff from the 'line' (to be consistent with the python version we ignore this). # match_line = get_bytes_or_text_as_bytes(match['data']['lines']) # match_line_number = match['data']['line_number'] # match_absolute_offset = match['data']['absolute_offset'] - if old_stage == 'begin': - assert match_filename_begin == match_filename, '{} != \n {}'\ - .format(match_filename_begin, match_filename) + if old_stage == "begin": + assert ( + match_filename_begin == match_filename + ), f"{match_filename_begin} != \n {match_filename}" if match_filename not in match_records: if debug_this: # We could add: #'line': match_line, 'line_number': match_line_number but it would # break our ability to compare against the python code. - match_records[match_filename] = {'type': match_filename_type, - 'submatches': []} + match_records[match_filename] = { + "type": match_filename_type, + "submatches": [], + } else: - match_records[match_filename] = {'type': match_filename_type, - 'submatches': []} - for submatch in match['data']['submatches']: - submatch_match_text = get_bytes_or_text_as_bytes(submatch['match']) - submatch_start = submatch['start'] + match['data']['absolute_offset'] - submatch_end = submatch['end'] + match['data']['absolute_offset'] + match_records[match_filename] = { + "type": match_filename_type, + "submatches": [], + } + for submatch in match["data"]["submatches"]: + submatch_match_text = get_bytes_or_text_as_bytes( + submatch["match"] + ) + submatch_start = ( + submatch["start"] + match["data"]["absolute_offset"] + ) + submatch_end = ( + submatch["end"] + match["data"]["absolute_offset"] + ) # print("{}({}) :: {}..{} = {}".format( # match_filename, match_line_number, # submatch_start, submatch_end, submatch_match_text)) - submatch_record = {'tag': tag, - 'text': submatch_match_text, - 'start': submatch_start, - 'end': submatch_end, - 'regex_re': regex_rg, - 'replacement_re': replacement_re} - if submatch_record not in match_records[match_filename]['submatches']: - match_records[match_filename]['submatches'].append(submatch_record) - elif new_stage == 'end': - assert stage == 'match' + submatch_record = { + "tag": tag, + "text": submatch_match_text, + "start": submatch_start, + "end": submatch_end, + "regex_re": regex_rg, + "replacement_re": replacement_re, + } + if ( + submatch_record + not in match_records[match_filename]["submatches"] + ): + match_records[match_filename]["submatches"].append( + submatch_record + ) + elif new_stage == "end": + assert stage == "match" stage = new_stage - elif new_stage == 'elpased_total': - assert stage == 'end' + elif new_stage == "elpased_total": + assert stage == "end" stage = new_stage - print('ELAPSED TOTAL') + print("ELAPSED TOTAL") return sort_matches(match_records) @@ -386,17 +474,25 @@ def mmap_or_read(fh): return mm -def regex_files_py(files, prefix, tag, regex_re, replacement_re, - also_binaries=False, match_records=OrderedDict()): +def regex_files_py( + files, + prefix, + tag, + regex_re, + replacement_re, + also_binaries=False, + match_records=OrderedDict(), +): import re + re_re = re.compile(regex_re) for file in files: - with open(join(prefix, file), 'rb+') as f: + with open(join(prefix, file), "rb+") as f: if os.fstat(f.fileno()).st_size == 0: continue data = mmap_or_read(f) - type = 'binary' if data.find(b'\x00') != -1 else 'text' - if not also_binaries and type == 'binary': + type = "binary" if data.find(b"\x00") != -1 else "text" + if not also_binaries and type == "binary": continue # data2 = f.read() for match in re.finditer(re_re, data): @@ -404,8 +500,7 @@ def regex_files_py(files, prefix, tag, regex_re, replacement_re, # absolute_offset = match.pos if file not in match_records: # Could add 'absolute_offset': absolute_offset, - match_records[file] = {'type': type, - 'submatches': []} + match_records[file] = {"type": type, "submatches": []} # else: # if match_records[file]['absolute_offset'] != absolute_offset: # print("Dropping match.pos() of {}, neq {}".format(absolute_offset, match_records[file]['absolute_offset'])) @@ -420,12 +515,16 @@ def regex_files_py(files, prefix, tag, regex_re, replacement_re, submatch_start = match.start(g_index) submatch_end = match.end(g_index) # print("found {} ({}..{})".format(submatch_match_text, submatch_start, submatch_end)) - match_records[file]['submatches'].append({'tag': tag, - 'text': submatch_match_text, - 'start': submatch_start, - 'end': submatch_end, - 'regex_re': regex_re, - 'replacement_re': replacement_re}) + match_records[file]["submatches"].append( + { + "tag": tag, + "text": submatch_match_text, + "start": submatch_start, + "end": submatch_end, + "regex_re": regex_re, + "replacement_re": replacement_re, + } + ) # assert data2[match.start(g_index):match.end(g_index)] == match_text # print(data2[match.start(g_index):match.end(g_index)]) return sort_matches(match_records) @@ -435,37 +534,48 @@ def regex_matches_tighten_re(match_records, regex_re, tag=None): # Do we need to shrink the matches? if match_records: import re + re_re = re.compile(regex_re) for filename, match in match_records.items(): - for submatch in match['submatches']: - if tag and submatch['tag'] != tag: + for submatch in match["submatches"]: + if tag and submatch["tag"] != tag: continue - match_re = re.match(re_re, submatch['text']) + match_re = re.match(re_re, submatch["text"]) if match_re: groups = match_re.groups() if groups: match_tigher = match_re.group(len(groups)) else: match_tigher = str(match_re) - if match_tigher != submatch['text']: + if match_tigher != submatch["text"]: # Assert we can find submatches correctly at their start and end in the line. - if 'line' in match: - assert (match['line'][submatch['start'] - - match['absolute_offset']:submatch['end'] - - match['absolute_offset']] == submatch['text']) - index = submatch['text'].find(match_tigher) + if "line" in match: + assert ( + match["line"][ + submatch["start"] + - match["absolute_offset"] : submatch["end"] + - match["absolute_offset"] + ] + == submatch["text"] + ) + index = submatch["text"].find(match_tigher) assert index != -1 - submatch['start'] += index - submatch['end'] = submatch['start'] + len(match_tigher) + submatch["start"] += index + submatch["end"] = submatch["start"] + len(match_tigher) # print("from {} to {} (index={})".format(submatch['text'], match_tigher, index)) - submatch['text'] = match_tigher + submatch["text"] = match_tigher # Assert we can still find submatches correctly at their start and end in the line. - if 'line' in match: - assert (match['line'][submatch['start'] - - match['absolute_offset']:submatch['end'] - - match['absolute_offset']] == submatch['text']) + if "line" in match: + assert ( + match["line"][ + submatch["start"] + - match["absolute_offset"] : submatch["end"] + - match["absolute_offset"] + ] + == submatch["text"] + ) # Even if the match was not tighter we overwrite the regex. - submatch['regex_re'] = regex_re + submatch["regex_re"] = regex_re else: print("ERROR :: Tighter regex_re does not match") return sort_matches(match_records) @@ -475,7 +585,7 @@ def regex_matches_tighten_re(match_records, regex_re, tag=None): def sort_matches(match_records): match_records_o = OrderedDict(sorted(match_records.items())) for file, match in match_records_o.items(): - match['submatches'] = sorted(match['submatches'], key=lambda x: x['start']) + match["submatches"] = sorted(match["submatches"], key=lambda x: x["start"]) return match_records_o @@ -483,19 +593,36 @@ def check_matches(prefix, match_records): print("::CHECKING MATCHES::") for file, match in match_records.items(): data = None - with open(join(prefix, file), 'rb+') as f: + with open(join(prefix, file), "rb+") as f: data = f.read() if data: - for submatch in match['submatches']: - file_content = data[submatch['start']:submatch['end']] - if file_content != submatch['text']: - print("ERROR :: file_content {} != submatch {}".format(file_content, submatch['text'])) - print("{} :: ({}..{}) = {}".format(file, submatch['start'], submatch['end'], submatch['text'])) + for submatch in match["submatches"]: + file_content = data[submatch["start"] : submatch["end"]] + if file_content != submatch["text"]: + print( + "ERROR :: file_content {} != submatch {}".format( + file_content, submatch["text"] + ) + ) + print( + "{} :: ({}..{}) = {}".format( + file, submatch["start"], submatch["end"], submatch["text"] + ) + ) -def have_regex_files(files, prefix, tag, regex_re, replacement_re, - also_binaries=False, match_records={}, regex_rg=None, debug=False): - ''' +def have_regex_files( + files, + prefix, + tag, + regex_re, + replacement_re, + also_binaries=False, + match_records={}, + regex_rg=None, + debug=False, +): + """ :param files: Filenames to check for instances of regex_re :param prefix: Prefix in which to search for these files :param regex_re: The regex to use @@ -509,44 +636,61 @@ def have_regex_files(files, prefix, tag, regex_re, replacement_re, decision. :param match_records: A dictionary of previous results should you wish to augment it :return: input match_records augmented with matches - ''' + """ if not len(files): return match_records import copy - match_records_rg, match_records_re = copy.deepcopy(match_records), copy.deepcopy(match_records) + + match_records_rg, match_records_re = copy.deepcopy(match_records), copy.deepcopy( + match_records + ) if not isinstance(regex_re, (bytes, bytearray)): - regex_re = regex_re.encode('utf-8') + regex_re = regex_re.encode("utf-8") if regex_rg and not isinstance(regex_rg, (bytes, bytearray)): - regex_rg = regex_rg.encode('utf-8') - rg = external.find_executable('rg') + regex_rg = regex_rg.encode("utf-8") + rg = external.find_executable("rg") if rg: - match_records_rg = regex_files_rg(files, prefix, tag, - rg, - regex_rg if regex_rg else regex_re, - replacement_re, - also_binaries=also_binaries, - debug_this=debug, - match_records=match_records_rg) + match_records_rg = regex_files_rg( + files, + prefix, + tag, + rg, + regex_rg if regex_rg else regex_re, + replacement_re, + also_binaries=also_binaries, + debug_this=debug, + match_records=match_records_rg, + ) if regex_rg and regex_re: match_records_rg = regex_matches_tighten_re(match_records_rg, regex_re, tag) if not rg or debug: - match_records_re = regex_files_py(files, prefix, tag, - regex_re if regex_re else regex_rg, - replacement_re, - also_binaries=also_binaries, - match_records=match_records_re) + match_records_re = regex_files_py( + files, + prefix, + tag, + regex_re if regex_re else regex_rg, + replacement_re, + also_binaries=also_binaries, + match_records=match_records_re, + ) if debug: check_matches(prefix, match_records_rg) check_matches(prefix, match_records_re) if match_records_rg != match_records_re: - for (k, v), (k2, v2) in zip(match_records_rg.items(), match_records_re.items()): + for (k, v), (k2, v2) in zip( + match_records_rg.items(), match_records_re.items() + ): if k != k2: print(f"File Mismatch:\n{k}\n{k2}") elif v != v2: print(f"Match Mismatch ({v}):\n{v2}\n{k}") - for submatch, submatch2 in zip(v['submatches'], v2['submatches']): + for submatch, submatch2 in zip( + v["submatches"], v2["submatches"] + ): if submatch != submatch2: - print(f"Submatch Mismatch ({submatch}):\n{submatch2}\n{k}") + print( + f"Submatch Mismatch ({submatch}):\n{submatch2}\n{k}" + ) return match_records_rg if rg else match_records_re @@ -556,7 +700,7 @@ def rewrite_file_with_new_prefix(path, data, old_prefix, new_prefix): st = os.stat(path) data = data.replace(old_prefix, new_prefix) # Save as - with open(path, 'wb') as fo: + with open(path, "wb") as fo: fo.write(data) os.chmod(path, stat.S_IMODE(st.st_mode) | stat.S_IWUSR) # chmod u+w return data @@ -565,61 +709,73 @@ def rewrite_file_with_new_prefix(path, data, old_prefix, new_prefix): def perform_replacements(matches, prefix, verbose=False, diff=None): for file, match in matches.items(): filename = os.path.join(prefix, file) - filename_tmp = filename + '.cbpatch.tmp' + filename_tmp = filename + ".cbpatch.tmp" if os.path.exists(filename_tmp): os.unlink() shutil.copy2(filename, filename_tmp) - filename_short = filename.replace(prefix + os.sep, '') - print("Patching '{}' in {} {}".format(filename_short, - len(match['submatches']), - 'places' if len(match['submatches']) > 1 else 'place')) - with open(filename_tmp, 'wb+') as file_tmp: + filename_short = filename.replace(prefix + os.sep, "") + print( + "Patching '{}' in {} {}".format( + filename_short, + len(match["submatches"]), + "places" if len(match["submatches"]) > 1 else "place", + ) + ) + with open(filename_tmp, "wb+") as file_tmp: file_tmp.truncate() - with open(filename, 'rb') as file: + with open(filename, "rb") as file: last_index = 0 - for submatch in match['submatches']: - length = submatch['start'] - last_index + for submatch in match["submatches"]: + length = submatch["start"] - last_index data = file.read(length) assert len(data) == length file_tmp.write(data) - original = submatch['text'] + original = submatch["text"] # Ideally you wouldn't pass to this function any submatches with replacement_re of None, # Still, it's easily handled. - if submatch['replacement_re']: - replacement_re = submatch['replacement_re'] + if submatch["replacement_re"]: + replacement_re = submatch["replacement_re"] if not isinstance(replacement_re, (bytes, bytearray)): - replacement_re = replacement_re.encode('utf-8') - new_string = re.sub(submatch['regex_re'], replacement_re, original) + replacement_re = replacement_re.encode("utf-8") + new_string = re.sub( + submatch["regex_re"], replacement_re, original + ) else: new_string = original - if match['type'] == 'binary': + if match["type"] == "binary": if len(original) < len(new_string): - print("ERROR :: Cannot replace {} with {} in binary file {}".format(original, - new_string, - filename)) - new_string = new_string.ljust(len(original), b'\0') + print( + "ERROR :: Cannot replace {} with {} in binary file {}".format( + original, new_string, filename + ) + ) + new_string = new_string.ljust(len(original), b"\0") assert len(new_string) == len(original) file_tmp.write(new_string) # discarded (but also verified) actual_original = file.read(len(original)) - if match['type'] == 'binary': + if match["type"] == "binary": assert actual_original == original last_index += length + len(original) - if submatch == match['submatches'][len(match['submatches']) - 1]: + if submatch == match["submatches"][len(match["submatches"]) - 1]: # Write the remainder. data = file.read() file_tmp.write(data) # Could assert the lengths of binaries are the same here for extra safety. if os.path.exists(filename_tmp): - if diff and match['type'] == 'text': + if diff and match["type"] == "text": diffo = f"Diff returned no difference after patching {filename_short}" # Always expect an exception. try: - diffo = subprocess.check_output([diff, '-urN', filename, filename_tmp], stderr=subprocess.PIPE) - print(f'WARNING :: Non-deferred patching of "{filename}" did not change it') + diffo = subprocess.check_output( + [diff, "-urN", filename, filename_tmp], stderr=subprocess.PIPE + ) + print( + f'WARNING :: Non-deferred patching of "{filename}" did not change it' + ) except subprocess.CalledProcessError as e: diffo = e.output - print(diffo.decode('utf-8')) + print(diffo.decode("utf-8")) if os.path.exists(filename): os.unlink(filename) shutil.move(filename_tmp, filename) @@ -627,7 +783,7 @@ def perform_replacements(matches, prefix, verbose=False, diff=None): def _copy_top_level_recipe(path, config, dest_dir, destination_subdir=None): files = utils.rec_glob(path, "*") - file_paths = sorted(f.replace(path + os.sep, '') for f in files) + file_paths = sorted(f.replace(path + os.sep, "") for f in files) # when this actually has a value, we're copying the top-level recipe into a subdirectory, # so that we have record of what parent recipe produced subpackages. @@ -635,36 +791,47 @@ def _copy_top_level_recipe(path, config, dest_dir, destination_subdir=None): dest_dir = join(dest_dir, destination_subdir) else: # exclude meta.yaml because the json dictionary captures its content - file_paths = [f for f in file_paths if not (f == 'meta.yaml' or - f == 'conda_build_config.yaml')] + file_paths = [ + f + for f in file_paths + if not (f == "meta.yaml" or f == "conda_build_config.yaml") + ] file_paths = utils.filter_files(file_paths, path) for f in file_paths: - utils.copy_into(join(path, f), join(dest_dir, f), - timeout=config.timeout, - locking=config.locking, clobber=True) + utils.copy_into( + join(path, f), + join(dest_dir, f), + timeout=config.timeout, + locking=config.locking, + clobber=True, + ) def _copy_output_recipe(m, dest_dir): - _copy_top_level_recipe(m.path, m.config, dest_dir, 'parent') + _copy_top_level_recipe(m.path, m.config, dest_dir, "parent") this_output = m.get_rendered_output(m.name()) or {} - install_script = this_output.get('script') + install_script = this_output.get("script") build_inputs = [] inputs = [install_script] + build_inputs file_paths = [script for script in inputs if script] file_paths = utils.filter_files(file_paths, m.path) for f in file_paths: - utils.copy_into(join(m.path, f), join(dest_dir, f), - timeout=m.config.timeout, - locking=m.config.locking, clobber=True) + utils.copy_into( + join(m.path, f), + join(dest_dir, f), + timeout=m.config.timeout, + locking=m.config.locking, + clobber=True, + ) def copy_recipe(m): if m.config.include_recipe and m.include_recipe(): # store the rendered meta.yaml file, plus information about where it came from # and what version of conda-build created it - recipe_dir = join(m.config.info_dir, 'recipe') + recipe_dir = join(m.config.info_dir, "recipe") try: os.makedirs(recipe_dir) except: @@ -680,27 +847,28 @@ def copy_recipe(m): output_metadata = m.copy() # hard code the build string, so that tests don't get it mixed up - build = output_metadata.meta.get('build', {}) - build['string'] = output_metadata.build_id() - output_metadata.meta['build'] = build + build = output_metadata.meta.get("build", {}) + build["string"] = output_metadata.build_id() + output_metadata.meta["build"] = build # just for lack of confusion, don't show outputs in final rendered recipes - if 'outputs' in output_metadata.meta: - del output_metadata.meta['outputs'] - if 'parent_recipe' in output_metadata.meta.get('extra', {}): - del output_metadata.meta['extra']['parent_recipe'] + if "outputs" in output_metadata.meta: + del output_metadata.meta["outputs"] + if "parent_recipe" in output_metadata.meta.get("extra", {}): + del output_metadata.meta["extra"]["parent_recipe"] - utils.sort_list_in_nested_structure(output_metadata.meta, - ('build/script', 'test/commands')) + utils.sort_list_in_nested_structure( + output_metadata.meta, ("build/script", "test/commands") + ) rendered = output_yaml(output_metadata) if original_recipe: - with open(original_recipe, 'rb') as f: + with open(original_recipe, "rb") as f: original_recipe_text = UnicodeDammit(f.read()).unicode_markup if not original_recipe or not original_recipe_text == rendered: - with open(join(recipe_dir, "meta.yaml"), 'w') as f: + with open(join(recipe_dir, "meta.yaml"), "w") as f: f.write(f"# This file created by conda-build {conda_build_version}\n") if original_recipe: f.write("# meta.yaml template originally from:\n") @@ -708,16 +876,21 @@ def copy_recipe(m): f.write("# ------------------------------------------------\n\n") f.write(rendered) if original_recipe: - utils.copy_into(original_recipe, os.path.join(recipe_dir, 'meta.yaml.template'), - timeout=m.config.timeout, locking=m.config.locking, clobber=True) + utils.copy_into( + original_recipe, + os.path.join(recipe_dir, "meta.yaml.template"), + timeout=m.config.timeout, + locking=m.config.locking, + clobber=True, + ) # dump the full variant in use for this package to the recipe folder - with open(os.path.join(recipe_dir, 'conda_build_config.yaml'), 'w') as f: + with open(os.path.join(recipe_dir, "conda_build_config.yaml"), "w") as f: yaml.dump(m.config.variant, f) def copy_readme(m): - readme = m.get_value('about/readme') + readme = m.get_value("about/readme") if readme: src = join(m.config.work_dir, readme) if not isfile(src): @@ -725,8 +898,11 @@ def copy_readme(m): dst = join(m.config.info_dir, readme) utils.copy_into(src, dst, m.config.timeout, locking=m.config.locking) if os.path.split(readme)[1] not in {"README.md", "README.rst", "README"}: - print("WARNING: anaconda.org only recognizes about/readme " - "as README.md and README.rst", file=sys.stderr) + print( + "WARNING: anaconda.org only recognizes about/readme " + "as README.md and README.rst", + file=sys.stderr, + ) def jsonify_info_yamls(m): @@ -739,17 +915,22 @@ def jsonify_info_yamls(m): for file in files: file = join(root, file) bn, ext = os.path.splitext(os.path.basename(file)) - if ext == '.yaml': - dst = join(m.config.info_dir, ijd, bn + '.json') + if ext == ".yaml": + dst = join(m.config.info_dir, ijd, bn + ".json") try: os.makedirs(os.path.dirname(dst)) except: pass - with open(file) as i, open(dst, 'w') as o: + with open(file) as i, open(dst, "w") as o: import yaml + yaml = yaml.full_load(i) - json.dump(yaml, o, sort_keys=True, indent=2, separators=(',', ': ')) - res.append(join(os.path.basename(m.config.info_dir), ijd, bn + '.json')) + json.dump( + yaml, o, sort_keys=True, indent=2, separators=(",", ": ") + ) + res.append( + join(os.path.basename(m.config.info_dir), ijd, bn + ".json") + ) return res @@ -762,7 +943,7 @@ def copy_license(m): def generic_copy(m, name, field): - all_files = utils.ensure_list(m.get_value(f'about/{field}', [])) + all_files = utils.ensure_list(m.get_value(f"about/{field}", [])) if not all_files: return count = 0 @@ -792,8 +973,9 @@ def generic_copy(m, name, field): filename = single_file utils.copy_into( src_file, - join(m.config.info_dir, f'{name}s', filename), m.config.timeout, - locking=m.config.locking + join(m.config.info_dir, f"{name}s", filename), + m.config.timeout, + locking=m.config.locking, ) else: raise ValueError( @@ -807,34 +989,39 @@ def generic_copy(m, name, field): def copy_recipe_log(m): # the purpose of this file is to capture some change history metadata that may tell people # why a given build was changed the way that it was - log_file = m.get_value('about/recipe_log_file') or "recipe_log.json" + log_file = m.get_value("about/recipe_log_file") or "recipe_log.json" # look in recipe folder first src_file = os.path.join(m.path, log_file) if not os.path.isfile(src_file): src_file = join(m.config.work_dir, log_file) if os.path.isfile(src_file): - utils.copy_into(src_file, - join(m.config.info_dir, 'recipe_log.json'), m.config.timeout, - locking=m.config.locking) + utils.copy_into( + src_file, + join(m.config.info_dir, "recipe_log.json"), + m.config.timeout, + locking=m.config.locking, + ) def copy_test_source_files(m, destination): - src_dir = '' + src_dir = "" if os.listdir(m.config.work_dir): src_dir = m.config.work_dir - elif hasattr(m.config, 'recipe_dir') and m.config.recipe_dir: - src_dir = os.path.join(m.config.recipe_dir, 'info', 'test') + elif hasattr(m.config, "recipe_dir") and m.config.recipe_dir: + src_dir = os.path.join(m.config.recipe_dir, "info", "test") src_dirs = [src_dir] - if os.path.isdir(os.path.join(src_dir, 'parent')): - src_dirs.append(os.path.join(src_dir, 'parent')) + if os.path.isdir(os.path.join(src_dir, "parent")): + src_dirs.append(os.path.join(src_dir, "parent")) for src_dir in src_dirs: if src_dir and os.path.isdir(src_dir) and src_dir != destination: - for pattern in utils.ensure_list(m.get_value('test/source_files', [])): - if utils.on_win and '\\' in pattern: - raise RuntimeError("test/source_files paths must use / " - "as the path delimiter on Windows") + for pattern in utils.ensure_list(m.get_value("test/source_files", [])): + if utils.on_win and "\\" in pattern: + raise RuntimeError( + "test/source_files paths must use / " + "as the path delimiter on Windows" + ) files = glob(join(src_dir, pattern)) if not files: msg = "Did not find any source_files for test with pattern {0}" @@ -843,17 +1030,25 @@ def copy_test_source_files(m, destination): try: # disable locking to avoid locking a temporary directory (the extracted # test folder) - utils.copy_into(f, f.replace(src_dir, destination), m.config.timeout, - locking=False, clobber=True) + utils.copy_into( + f, + f.replace(src_dir, destination), + m.config.timeout, + locking=False, + clobber=True, + ) except OSError as e: log = utils.get_logger(__name__) - log.warn("Failed to copy {} into test files. Error was: {}".format(f, - str(e))) - for ext in '.pyc', '.pyo': + log.warn( + "Failed to copy {} into test files. Error was: {}".format( + f, str(e) + ) + ) + for ext in ".pyc", ".pyo": for f in utils.get_ext_files(destination, ext): os.remove(f) - recipe_test_files = m.get_value('test/files') + recipe_test_files = m.get_value("test/files") if recipe_test_files: orig_recipe_dir = m.path for pattern in recipe_test_files: @@ -861,17 +1056,21 @@ def copy_test_source_files(m, destination): for f in files: basedir = orig_recipe_dir if not os.path.isfile(f): - basedir = os.path.join(orig_recipe_dir, 'parent') + basedir = os.path.join(orig_recipe_dir, "parent") dest = f.replace(basedir, destination) if f != dest: - utils.copy_into(f, f.replace(basedir, destination), - timeout=m.config.timeout, locking=m.config.locking, - clobber=True) + utils.copy_into( + f, + f.replace(basedir, destination), + timeout=m.config.timeout, + locking=m.config.locking, + clobber=True, + ) def write_hash_input(m): recipe_input = m.get_hash_contents() - with open(os.path.join(m.config.info_dir, 'hash_input.json'), 'w') as f: + with open(os.path.join(m.config.info_dir, "hash_input.json"), "w") as f: json.dump(recipe_input, f, indent=2) @@ -889,24 +1088,33 @@ def get_all_replacements(variant): if isinstance(variant, Config): variant = variant.variant - if not variant or 'replacements' not in variant: + if not variant or "replacements" not in variant: # short circuit if no variant or no replacements keyword return [] - repl = variant['replacements'] - assert isinstance(repl, dict), f"Found 'replacements' ({repl}), but it is not a dict" - assert 'all_replacements' in repl, f"Found 'replacements' ({repl}), but it doesn't contain 'all_replacements'" - - repl = repl['all_replacements'] - assert isinstance(repl, list), f"Found 'all_replacements' ({repl}), but it is not a list" + repl = variant["replacements"] + assert isinstance( + repl, dict + ), f"Found 'replacements' ({repl}), but it is not a dict" + assert ( + "all_replacements" in repl + ), f"Found 'replacements' ({repl}), but it doesn't contain 'all_replacements'" + + repl = repl["all_replacements"] + assert isinstance( + repl, list + ), f"Found 'all_replacements' ({repl}), but it is not a list" if repl: - assert isinstance(repl[0], dict), f"Found 'all_replacements[0]' ({repl[0]}), but it is not a dict" + assert isinstance( + repl[0], dict + ), f"Found 'all_replacements[0]' ({repl[0]}), but it is not a dict" return repl def get_files_with_prefix(m, replacements, files_in, prefix): import time + start = time.time() # It is nonsensical to replace anything in a symlink. files = sorted(f for f in files_in if not os.path.islink(os.path.join(prefix, f))) @@ -916,82 +1124,121 @@ def get_files_with_prefix(m, replacements, files_in, prefix): if ignore_files is True: ignore_types.update((FileMode.text.name, FileMode.binary.name)) ignore_files = [] - if (not m.get_value('build/detect_binary_files_with_prefix', True if not utils.on_win else False) and - not m.get_value('build/binary_has_prefix_files', None)): + if not m.get_value( + "build/detect_binary_files_with_prefix", True if not utils.on_win else False + ) and not m.get_value("build/binary_has_prefix_files", None): ignore_types.update((FileMode.binary.name,)) - files_with_prefix = [(None, FileMode.binary.name if - open(os.path.join(prefix, f), 'rb+').read().find(b'\x00') != -1 else - FileMode.text.name, f) for f in files] + files_with_prefix = [ + ( + None, + FileMode.binary.name + if open(os.path.join(prefix, f), "rb+").read().find(b"\x00") != -1 + else FileMode.text.name, + f, + ) + for f in files + ] ignore_files.extend( - f[2] for f in files_with_prefix if (f[1] in ignore_types and - f[2] not in ignore_files) or prefix_replacement_excluded(os.path.join(prefix, f[2]))) + f[2] + for f in files_with_prefix + if (f[1] in ignore_types and f[2] not in ignore_files) + or prefix_replacement_excluded(os.path.join(prefix, f[2])) + ) files_with_prefix = [f for f in files_with_prefix if f[2] not in ignore_files] - prefix_u = prefix.replace('\\', '/') if utils.on_win else prefix + prefix_u = prefix.replace("\\", "/") if utils.on_win else prefix # If we've cross compiled on Windows to unix, chances are many files will refer to Windows # paths. - if utils.on_win or m.config.subdir.startswith('win'): + if utils.on_win or m.config.subdir.startswith("win"): # TODO :: Should we also handle MSYS2 paths (/c/blah) here? Probably! - pfx_variants = [prefix[0].upper() + prefix[1:], - prefix[0].lower() + prefix[1:], - prefix_u, - prefix_placeholder.replace('\\', '\''), - prefix_placeholder.replace('/', '\\')] + pfx_variants = [ + prefix[0].upper() + prefix[1:], + prefix[0].lower() + prefix[1:], + prefix_u, + prefix_placeholder.replace("\\", "'"), + prefix_placeholder.replace("/", "\\"), + ] # some python/json files store an escaped version of prefix - pfx_variants.extend([pfx.replace('\\', '\\\\') for pfx in pfx_variants]) + pfx_variants.extend([pfx.replace("\\", "\\\\") for pfx in pfx_variants]) else: pfx_variants = (prefix, prefix_placeholder) # replacing \ with \\ here is for regex escaping - re_test = b'(' + b'|'.join(v.encode('utf-8').replace(b'\\', b'\\\\') for v in pfx_variants) + b')' - pfx_matches = have_regex_files([f[2] for f in files_with_prefix], prefix=prefix, - tag='prefix', - regex_re=re_test, - # We definitely do not want this as a replacement_re as it'd replace - # /opt/anaconda1anaconda2anaconda3 with the prefix. As it happens we - # do not do any replacement at all here. - # replacement_re=prefix.encode('utf-8').replace(b'\\', b'\\\\'), - replacement_re=None, - also_binaries=True, - match_records={}, - debug=m.config.debug) + re_test = ( + b"(" + + b"|".join(v.encode("utf-8").replace(b"\\", b"\\\\") for v in pfx_variants) + + b")" + ) + pfx_matches = have_regex_files( + [f[2] for f in files_with_prefix], + prefix=prefix, + tag="prefix", + regex_re=re_test, + # We definitely do not want this as a replacement_re as it'd replace + # /opt/anaconda1anaconda2anaconda3 with the prefix. As it happens we + # do not do any replacement at all here. + # replacement_re=prefix.encode('utf-8').replace(b'\\', b'\\\\'), + replacement_re=None, + also_binaries=True, + match_records={}, + debug=m.config.debug, + ) prefixes_for_file = {} # This is for Windows mainly, though we may want to allow multiple searches at once in a file on # all OSes some-day. It is harmless to do this on all systems anyway. for filename, match in pfx_matches.items(): - prefixes_for_file[filename] = {sm['text'] for sm in match['submatches']} + prefixes_for_file[filename] = {sm["text"] for sm in match["submatches"]} files_with_prefix_new = [] - for (_, mode, filename) in files_with_prefix: + for _, mode, filename in files_with_prefix: np = filename if np in prefixes_for_file and np in pfx_matches: for pfx in prefixes_for_file[np]: - files_with_prefix_new.append((pfx.decode('utf-8'), mode, filename)) + files_with_prefix_new.append((pfx.decode("utf-8"), mode, filename)) files_with_prefix = files_with_prefix_new all_matches = {} # variant = m.config.variant if 'replacements' in m.config.variant else m.config.variants - replacement_tags = '' + replacement_tags = "" if len(replacements): last = len(replacements) - 1 for index, replacement in enumerate(replacements): - all_matches = have_regex_files(files=[f for f in files if any( - glob2.fnmatch.fnmatch(f, r) for r in replacement['glob_patterns'])], - prefix=prefix, - tag=replacement['tag'], - regex_re=replacement['regex_re'], - replacement_re=replacement['replacement_re'], - match_records=all_matches, - regex_rg=replacement['regex_rg'] if 'regex_rg' in replacement else None, - debug=m.config.debug) - replacement_tags = replacement_tags + '"' + replacement['tag'] + ('"' if - index == last else '", ') + all_matches = have_regex_files( + files=[ + f + for f in files + if any( + glob2.fnmatch.fnmatch(f, r) + for r in replacement["glob_patterns"] + ) + ], + prefix=prefix, + tag=replacement["tag"], + regex_re=replacement["regex_re"], + replacement_re=replacement["replacement_re"], + match_records=all_matches, + regex_rg=replacement["regex_rg"] if "regex_rg" in replacement else None, + debug=m.config.debug, + ) + replacement_tags = ( + replacement_tags + + '"' + + replacement["tag"] + + ('"' if index == last else '", ') + ) perform_replacements(all_matches, prefix) end = time.time() - total_replacements = sum(map(lambda i: len(all_matches[i]['submatches']), all_matches)) - print("INFO :: Time taken to mark (prefix){}\n" - " {} replacements in {} files was {:.2f} seconds".format( - f" and mark+peform ({replacement_tags})" if replacement_tags else '', - total_replacements, len(all_matches), end - start)) - ''' + total_replacements = sum( + map(lambda i: len(all_matches[i]["submatches"]), all_matches) + ) + print( + "INFO :: Time taken to mark (prefix){}\n" + " {} replacements in {} files was {:.2f} seconds".format( + f" and mark+peform ({replacement_tags})" if replacement_tags else "", + total_replacements, + len(all_matches), + end - start, + ) + ) + """ # Keeping this around just for a while. files_with_prefix2 = sorted(have_prefix_files(files_in, prefix)) end = time.time() @@ -1015,12 +1262,11 @@ def get_files_with_prefix(m, replacements, files_in, prefix): files1 = set([f for _, _, f in files_with_prefix]) files2 = set([f for _, _, f in files_with_prefix2]) assert not (files2 - files1), "New ripgrep prefix search missed the following files:\n{}\n".format(files2 - files1) - ''' + """ return sorted(files_with_prefix) def record_prefix_files(m, files_with_prefix): - filtered = [] if not files_with_prefix: return filtered @@ -1043,42 +1289,61 @@ def record_prefix_files(m, files_with_prefix): # Don't do it everywhere because paths on Unix can contain quotes, # and we don't have a good method of escaping, and because older # versions of conda don't support quotes in has_prefix - fmt_str = '%s %s %s\n' + fmt_str = "%s %s %s\n" print("Files containing CONDA_PREFIX") print("-----------------------------") - detect_binary_files_with_prefix = m.get_value('build/detect_binary_files_with_prefix', - not len_binary_has_prefix_files and not utils.on_win) - with open(join(m.config.info_dir, 'has_prefix'), 'w') as fo: + detect_binary_files_with_prefix = m.get_value( + "build/detect_binary_files_with_prefix", + not len_binary_has_prefix_files and not utils.on_win, + ) + with open(join(m.config.info_dir, "has_prefix"), "w") as fo: for pfix, mode, fn in files_with_prefix: ignored_because = None - if (fn in binary_has_prefix_files or ((not len_binary_has_prefix_files or - detect_binary_files_with_prefix) and mode == 'binary')): + if fn in binary_has_prefix_files or ( + (not len_binary_has_prefix_files or detect_binary_files_with_prefix) + and mode == "binary" + ): if fn in binary_has_prefix_files: - if mode != 'binary': - mode = 'binary' - elif fn in binary_has_prefix_files and detect_binary_files_with_prefix: - print("File {} force-identified as 'binary', " - "But it is 'binary' anyway, suggest removing it from " - "`build/binary_has_prefix_files`".format(fn)) + if mode != "binary": + mode = "binary" + elif ( + fn in binary_has_prefix_files + and detect_binary_files_with_prefix + ): + print( + "File {} force-identified as 'binary', " + "But it is 'binary' anyway, suggest removing it from " + "`build/binary_has_prefix_files`".format(fn) + ) if fn in binary_has_prefix_files: binary_has_prefix_files.remove(fn) - elif (fn in text_has_prefix_files or (not len_text_has_prefix_files and mode == 'text') or - os.path.dirname(fn) == 'python-scripts'): - if mode != 'text': - mode = 'text' + elif ( + fn in text_has_prefix_files + or (not len_text_has_prefix_files and mode == "text") + or os.path.dirname(fn) == "python-scripts" + ): + if mode != "text": + mode = "text" elif fn in text_has_prefix_files and not len_text_has_prefix_files: - print("File {} force-identified as 'text', " - "But it is 'text' anyway, suggest removing it from " - "`build/has_prefix_files`".format(fn)) + print( + "File {} force-identified as 'text', " + "But it is 'text' anyway, suggest removing it from " + "`build/has_prefix_files`".format(fn) + ) if fn in text_has_prefix_files: text_has_prefix_files.remove(fn) else: ignored_because = " (not in build/%s_has_prefix_files)" % (mode) - print("{fn} ({mode}): {action}{reason}".format(fn=fn, mode=mode, - action="Ignoring" if ignored_because else "Patching", - reason=ignored_because if ignored_because else "")) + print( + "{fn} ({mode}): {action}{reason}".format( + fn=fn, + mode=mode, + action="Ignoring" if ignored_because else "Patching", + reason=ignored_because if ignored_because else "", + ) + ) if ignored_because is None: fo.write(fmt_str % (pfix, mode, fn)) filtered.append((pfix, mode, fn)) @@ -1088,7 +1353,9 @@ def record_prefix_files(m, files_with_prefix): for f in text_has_prefix_files: errstr += "Did not detect hard-coded path in %s from has_prefix_files\n" % f for f in binary_has_prefix_files: - errstr += "Did not detect hard-coded path in %s from binary_has_prefix_files\n" % f + errstr += ( + "Did not detect hard-coded path in %s from binary_has_prefix_files\n" % f + ) if errstr: raise RuntimeError(errstr) @@ -1096,41 +1363,41 @@ def record_prefix_files(m, files_with_prefix): def sanitize_channel(channel): - return get_conda_channel(channel).urls(with_credentials=False, subdirs=[''])[0] + return get_conda_channel(channel).urls(with_credentials=False, subdirs=[""])[0] def write_info_files_file(m, files): - entry_point_scripts = m.get_value('build/entry_points') + entry_point_scripts = m.get_value("build/entry_points") entry_point_script_names = get_entry_point_script_names(entry_point_scripts) - mode_dict = {'mode': 'w', 'encoding': 'utf-8'} - with open(join(m.config.info_dir, 'files'), **mode_dict) as fo: - if m.noarch == 'python': + mode_dict = {"mode": "w", "encoding": "utf-8"} + with open(join(m.config.info_dir, "files"), **mode_dict) as fo: + if m.noarch == "python": for f in sorted(files): if f.find("site-packages") >= 0: - fo.write(f[f.find("site-packages"):] + '\n') + fo.write(f[f.find("site-packages") :] + "\n") elif f.startswith("bin") and (f not in entry_point_script_names): - fo.write(f.replace("bin", "python-scripts") + '\n') + fo.write(f.replace("bin", "python-scripts") + "\n") elif f.startswith("Scripts") and (f not in entry_point_script_names): - fo.write(f.replace("Scripts", "python-scripts") + '\n') + fo.write(f.replace("Scripts", "python-scripts") + "\n") else: - fo.write(f + '\n') + fo.write(f + "\n") else: for f in sorted(files): - fo.write(f + '\n') + fo.write(f + "\n") def write_link_json(m): package_metadata = OrderedDict() - noarch_type = m.get_value('build/noarch') + noarch_type = m.get_value("build/noarch") if noarch_type: noarch_type_str = str(noarch_type) noarch_dict = OrderedDict(type=noarch_type_str) if noarch_type_str.lower() == "python": - entry_points = m.get_value('build/entry_points') + entry_points = m.get_value("build/entry_points") if entry_points: - noarch_dict['entry_points'] = entry_points - package_metadata['noarch'] = noarch_dict + noarch_dict["entry_points"] = entry_points + package_metadata["noarch"] = noarch_dict preferred_env = m.get_value("build/preferred_env") if preferred_env: @@ -1144,41 +1411,45 @@ def write_link_json(m): # now changed it to info/link.json. Still, we must indefinitely keep the key name # package_metadata_version, or we break conda. package_metadata["package_metadata_version"] = 1 - with open(os.path.join(m.config.info_dir, "link.json"), 'w') as fh: - fh.write(json.dumps(package_metadata, sort_keys=True, indent=2, separators=(',', ': '))) + with open(os.path.join(m.config.info_dir, "link.json"), "w") as fh: + fh.write( + json.dumps( + package_metadata, sort_keys=True, indent=2, separators=(",", ": ") + ) + ) def write_about_json(m): - with open(join(m.config.info_dir, 'about.json'), 'w') as fo: + with open(join(m.config.info_dir, "about.json"), "w") as fo: d = {} for key, default in FIELDS["about"].items(): - value = m.get_value('about/%s' % key) + value = m.get_value("about/%s" % key) if value: d[key] = value if default is list: d[key] = utils.ensure_list(value) # for sake of reproducibility, record some conda info - d['conda_version'] = conda_version - d['conda_build_version'] = conda_build_version + d["conda_version"] = conda_version + d["conda_build_version"] = conda_build_version # conda env will be in most, but not necessarily all installations. # Don't die if we don't see it. stripped_channels = [] for channel in get_rc_urls() + list(m.config.channel_urls): stripped_channels.append(sanitize_channel(channel)) - d['channels'] = stripped_channels - evars = ['CIO_TEST'] + d["channels"] = stripped_channels + evars = ["CIO_TEST"] - d['env_vars'] = {ev: os.getenv(ev, '') for ev in evars} + d["env_vars"] = {ev: os.getenv(ev, "") for ev in evars} # Adding this to extra since its arbitrary info - extra = m.get_section('extra') + extra = m.get_section("extra") # Add burn-in information to extra if m.config.extra_meta: extra.update(m.config.extra_meta) env = environ.Environment(root_dir) - d['root_pkgs'] = env.package_specs() + d["root_pkgs"] = env.package_specs() # Include the extra section of the metadata in the about.json - d['extra'] = extra + d["extra"] = extra json.dump(d, fo, indent=2, sort_keys=True) @@ -1186,12 +1457,12 @@ def write_info_json(m): info_index = m.info_index() if m.pin_depends: # Wtih 'strict' depends, we will have pinned run deps during rendering - if m.pin_depends == 'strict': - runtime_deps = m.meta.get('requirements', {}).get('run', []) - info_index['depends'] = runtime_deps + if m.pin_depends == "strict": + runtime_deps = m.meta.get("requirements", {}).get("run", []) + info_index["depends"] = runtime_deps else: - runtime_deps = environ.get_pinned_deps(m, 'run') - with open(join(m.config.info_dir, 'requires'), 'w') as fo: + runtime_deps = environ.get_pinned_deps(m, "run") + with open(join(m.config.info_dir, "requires"), "w") as fo: fo.write( "# This file as created when building:\n" "#\n" @@ -1203,29 +1474,29 @@ def write_info_json(m): m.config.build_subdir, ) ) - for dist in sorted(runtime_deps + [' '.join(m.dist().rsplit('-', 2))]): - fo.write('%s\n' % '='.join(dist.split())) + for dist in sorted(runtime_deps + [" ".join(m.dist().rsplit("-", 2))]): + fo.write("%s\n" % "=".join(dist.split())) - mode_dict = {'mode': 'w', 'encoding': 'utf-8'} - with open(join(m.config.info_dir, 'index.json'), **mode_dict) as fo: + mode_dict = {"mode": "w", "encoding": "utf-8"} + with open(join(m.config.info_dir, "index.json"), **mode_dict) as fo: json.dump(info_index, fo, indent=2, sort_keys=True) def write_no_link(m, files): - no_link = m.get_value('build/no_link') + no_link = m.get_value("build/no_link") if no_link: if not isinstance(no_link, list): no_link = [no_link] - with open(join(m.config.info_dir, 'no_link'), 'w') as fo: + with open(join(m.config.info_dir, "no_link"), "w") as fo: for f in files: if any(fnmatch.fnmatch(f, p) for p in no_link): - fo.write(f + '\n') + fo.write(f + "\n") def get_entry_point_script_names(entry_point_scripts): scripts = [] for entry_point in entry_point_scripts: - cmd = entry_point[:entry_point.find("=")].strip() + cmd = entry_point[: entry_point.find("=")].strip() if utils.on_win: scripts.append("Scripts\\%s-script.py" % cmd) scripts.append("Scripts\\%s.exe" % cmd) @@ -1235,11 +1506,11 @@ def get_entry_point_script_names(entry_point_scripts): def write_run_exports(m): - run_exports = m.meta.get('build', {}).get('run_exports', {}) + run_exports = m.meta.get("build", {}).get("run_exports", {}) if run_exports: - with open(os.path.join(m.config.info_dir, 'run_exports.json'), 'w') as f: - if not hasattr(run_exports, 'keys'): - run_exports = {'weak': run_exports} + with open(os.path.join(m.config.info_dir, "run_exports.json"), "w") as f: + if not hasattr(run_exports, "keys"): + run_exports = {"weak": run_exports} for k in utils.RUN_EXPORTS_TYPES: if k in run_exports: run_exports[k] = utils.ensure_list(run_exports[k]) @@ -1247,17 +1518,17 @@ def write_run_exports(m): def create_info_files(m, replacements, files, prefix): - ''' + """ Creates the metadata files that will be stored in the built package. :param m: Package metadata :type m: Metadata :param files: Paths to files to include in package :type files: list of str - ''' + """ if utils.on_win: # make sure we use '/' path separators in metadata - files = [_f.replace('\\', '/') for _f in files] + files = [_f.replace("\\", "/") for _f in files] if m.config.filename_hashing: write_hash_input(m) @@ -1273,47 +1544,63 @@ def create_info_files(m, replacements, files, prefix): copy_recipe_log(m) files.extend(jsonify_info_yamls(m)) - create_all_test_files(m, test_dir=join(m.config.info_dir, 'test')) + create_all_test_files(m, test_dir=join(m.config.info_dir, "test")) if m.config.copy_test_source_files: - copy_test_source_files(m, join(m.config.info_dir, 'test')) + copy_test_source_files(m, join(m.config.info_dir, "test")) write_info_files_file(m, files) files_with_prefix = get_files_with_prefix(m, replacements, files, prefix) files_with_prefix = record_prefix_files(m, files_with_prefix) - checksums = create_info_files_json_v1(m, m.config.info_dir, prefix, files, files_with_prefix) + checksums = create_info_files_json_v1( + m, m.config.info_dir, prefix, files, files_with_prefix + ) write_no_link(m, files) - sources = m.get_section('source') - if hasattr(sources, 'keys'): + sources = m.get_section("source") + if hasattr(sources, "keys"): sources = [sources] - with open(join(m.config.info_dir, 'git'), 'w', encoding='utf-8') as fo: + with open(join(m.config.info_dir, "git"), "w", encoding="utf-8") as fo: for src in sources: - if src.get('git_url'): - source.git_info(os.path.join(m.config.work_dir, src.get('folder', '')), - m.config.build_prefix, git=None, verbose=m.config.verbose, fo=fo) - - if m.get_value('app/icon'): - utils.copy_into(join(m.path, m.get_value('app/icon')), - join(m.config.info_dir, 'icon.png'), - m.config.timeout, locking=m.config.locking) + if src.get("git_url"): + source.git_info( + os.path.join(m.config.work_dir, src.get("folder", "")), + m.config.build_prefix, + git=None, + verbose=m.config.verbose, + fo=fo, + ) + + if m.get_value("app/icon"): + utils.copy_into( + join(m.path, m.get_value("app/icon")), + join(m.config.info_dir, "icon.png"), + m.config.timeout, + locking=m.config.locking, + ) return checksums def get_short_path(m, target_file): - if m.noarch == 'python': - entry_point_script_names = get_entry_point_script_names(m.get_value('build/entry_points')) + if m.noarch == "python": + entry_point_script_names = get_entry_point_script_names( + m.get_value("build/entry_points") + ) if target_file.find("site-packages") >= 0: - return target_file[target_file.find("site-packages"):] - elif target_file.startswith("bin") and (target_file not in entry_point_script_names): + return target_file[target_file.find("site-packages") :] + elif target_file.startswith("bin") and ( + target_file not in entry_point_script_names + ): return target_file.replace("bin", "python-scripts") - elif target_file.startswith("Scripts") and (target_file not in entry_point_script_names): + elif target_file.startswith("Scripts") and ( + target_file not in entry_point_script_names + ): return target_file.replace("Scripts", "python-scripts") else: return target_file - elif m.get_value('build/noarch_python', None): + elif m.get_value("build/noarch_python", None): return None else: return target_file @@ -1339,8 +1626,11 @@ def get_inode(file): def get_inode_paths(files, target_short_path, prefix): utils.ensure_list(files) target_short_path_inode = get_inode(join(prefix, target_short_path)) - hardlinked_files = [sp for sp in files - if os.lstat(join(prefix, sp)).st_ino == target_short_path_inode] + hardlinked_files = [ + sp + for sp in files + if os.lstat(join(prefix, sp)).st_ino == target_short_path_inode + ] return sorted(hardlinked_files) @@ -1377,14 +1667,14 @@ def _recurse_symlink_to_size(path, seen=None): return _recurse_symlink_to_size(dest, seen=seen) elif not isfile(dest): # this is a symlink that points to nowhere, so is zero bytes - warnings.warn('file %s is a symlink with no target' % path, UserWarning) + warnings.warn("file %s is a symlink with no target" % path, UserWarning) return 0 return 0 def build_info_files_json_v1(m, prefix, files, files_with_prefix): - no_link_files = m.get_value('build/no_link') + no_link_files = m.get_value("build/no_link") files_json = [] files_inodes = get_inodes(files, prefix) for fi in sorted(files): @@ -1392,7 +1682,7 @@ def build_info_files_json_v1(m, prefix, files, files_with_prefix): path = os.path.join(prefix, fi) short_path = get_short_path(m, fi) if short_path: - short_path = short_path.replace('\\', '/').replace('\\\\', '/') + short_path = short_path.replace("\\", "/").replace("\\\\", "/") file_info = { "_path": short_path, "sha256": utils.sha256_checksum(path), @@ -1415,7 +1705,11 @@ def build_info_files_json_v1(m, prefix, files, files_with_prefix): and os.stat(path).st_nlink > 1 ): target_short_path_inode = get_inode(path) - inode_paths = [files[index] for index, ino in enumerate(files_inodes) if ino == target_short_path_inode] + inode_paths = [ + files[index] + for index, ino in enumerate(files_inodes) + if ino == target_short_path_inode + ] file_info["inode_paths"] = inode_paths files_json.append(file_info) return files_json @@ -1432,20 +1726,26 @@ def create_info_files_json_v1(m, info_dir, prefix, files, files_with_prefix): # don't create info/paths.json file if this is an old noarch package if not m.noarch_python: - with open(join(info_dir, 'paths.json'), "w") as files_json: - json.dump(files_json_info, files_json, sort_keys=True, indent=2, separators=(',', ': '), - cls=EntityEncoder) + with open(join(info_dir, "paths.json"), "w") as files_json: + json.dump( + files_json_info, + files_json, + sort_keys=True, + indent=2, + separators=(",", ": "), + cls=EntityEncoder, + ) # Return a dict of file: sha1sum. We could (but currently do not) # use this to detect overlap and mutated overlap. checksums = dict() for file in files_json_files: - checksums[file['_path']] = file['sha256'] + checksums[file["_path"]] = file["sha256"] return checksums def post_process_files(m, initial_prefix_files): - package_name = m.get_value('package/name') + package_name = m.get_value("package/name") host_prefix = m.config.host_prefix missing = [] for f in initial_prefix_files: @@ -1453,33 +1753,41 @@ def post_process_files(m, initial_prefix_files): missing.append(f) if len(missing): log = utils.get_logger(__name__) - log.warning("The install/build script(s) for {} deleted the following " - "files (from dependencies) from the prefix:\n{}\n" - "This will cause the post-link checks to mis-report. Please " - "try not to delete and files (DSOs in particular) from the " - "prefix".format(package_name, missing)) + log.warning( + "The install/build script(s) for {} deleted the following " + "files (from dependencies) from the prefix:\n{}\n" + "This will cause the post-link checks to mis-report. Please " + "try not to delete and files (DSOs in particular) from the " + "prefix".format(package_name, missing) + ) get_build_metadata(m) create_post_scripts(m) # this is new-style noarch, with a value of 'python' - if m.noarch != 'python': - utils.create_entry_points(m.get_value('build/entry_points'), config=m.config) + if m.noarch != "python": + utils.create_entry_points(m.get_value("build/entry_points"), config=m.config) current_prefix_files = utils.prefix_files(prefix=host_prefix) - python = (m.config.build_python if os.path.isfile(m.config.build_python) else - m.config.host_python) - post_process(package_name, m.get_value('package/version'), - sorted(current_prefix_files - initial_prefix_files), - prefix=host_prefix, - config=m.config, - preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')), - noarch=m.get_value('build/noarch'), - skip_compile_pyc=m.get_value('build/skip_compile_pyc')) + python = ( + m.config.build_python + if os.path.isfile(m.config.build_python) + else m.config.host_python + ) + post_process( + package_name, + m.get_value("package/version"), + sorted(current_prefix_files - initial_prefix_files), + prefix=host_prefix, + config=m.config, + preserve_egg_dir=bool(m.get_value("build/preserve_egg_dir")), + noarch=m.get_value("build/noarch"), + skip_compile_pyc=m.get_value("build/skip_compile_pyc"), + ) # The post processing may have deleted some files (like easy-install.pth) current_prefix_files = utils.prefix_files(prefix=host_prefix) new_files = sorted(current_prefix_files - initial_prefix_files) - ''' + """ if m.noarch == 'python' and m.config.subdir == 'win-32': # Delete any PIP-created .exe launchers and fix entry_points.txt # .. but we need to provide scripts instead here. @@ -1488,12 +1796,13 @@ def post_process_files(m, initial_prefix_files): for ff in exes: os.unlink(os.path.join(m.config.host_prefix, ff)) new_files.remove(ff) - ''' + """ new_files = utils.filter_files(new_files, prefix=host_prefix) meta_dir = m.config.meta_dir if any(meta_dir in join(host_prefix, f) for f in new_files): - meta_files = (tuple(f for f in new_files if m.config.meta_dir in - join(host_prefix, f)),) + meta_files = ( + tuple(f for f in new_files if m.config.meta_dir in join(host_prefix, f)), + ) sys.exit( "Error: Untracked file(s) {} found in conda-meta directory. This error usually comes " "from using conda in the build script. Avoid doing this, as it can lead to packages " @@ -1503,18 +1812,22 @@ def post_process_files(m, initial_prefix_files): ) post_build(m, new_files, build_python=python) - entry_point_script_names = get_entry_point_script_names(m.get_value('build/entry_points')) - if m.noarch == 'python': + entry_point_script_names = get_entry_point_script_names( + m.get_value("build/entry_points") + ) + if m.noarch == "python": pkg_files = [fi for fi in new_files if fi not in entry_point_script_names] else: pkg_files = new_files # the legacy noarch - if m.get_value('build/noarch_python'): + if m.get_value("build/noarch_python"): noarch_python.transform(m, new_files, host_prefix) # new way: build/noarch: python - elif m.noarch == 'python': - noarch_python.populate_files(m, pkg_files, host_prefix, entry_point_script_names) + elif m.noarch == "python": + noarch_python.populate_files( + m, pkg_files, host_prefix, entry_point_script_names + ) current_prefix_files = utils.prefix_files(prefix=host_prefix) new_files = current_prefix_files - initial_prefix_files @@ -1525,9 +1838,9 @@ def post_process_files(m, initial_prefix_files): def bundle_conda(output, metadata, env, stats, **kw): log = utils.get_logger(__name__) - log.info('Packaging %s', metadata.dist()) + log.info("Packaging %s", metadata.dist()) get_all_replacements(metadata.config) - files = output.get('files', []) + files = output.get("files", []) # this is because without any requirements at all, we still need to have the host prefix exist try: @@ -1536,114 +1849,145 @@ def bundle_conda(output, metadata, env, stats, **kw): pass # Use script from recipe? - script = utils.ensure_list(metadata.get_value('build/script', None)) + script = utils.ensure_list(metadata.get_value("build/script", None)) # need to treat top-level stuff specially. build/script in top-level stuff should not be # re-run for an output with a similar name to the top-level recipe - is_output = 'package:' not in metadata.get_recipe_text() + is_output = "package:" not in metadata.get_recipe_text() # metadata.get_top_level_recipe_without_outputs is destructive to replacements. replacements = get_all_replacements(metadata.config) - top_build = metadata.get_top_level_recipe_without_outputs().get('build', {}) or {} + top_build = metadata.get_top_level_recipe_without_outputs().get("build", {}) or {} activate_script = metadata.activate_build_script - if (script and not output.get('script')) and (is_output or not top_build.get('script')): + if (script and not output.get("script")) and ( + is_output or not top_build.get("script") + ): # do add in activation, but only if it's not disabled activate_script = metadata.config.activate - script = '\n'.join(script) + script = "\n".join(script) suffix = "bat" if utils.on_win else "sh" - script_fn = output.get('script') or f'output_script.{suffix}' - with open(os.path.join(metadata.config.work_dir, script_fn), 'w') as f: - f.write('\n') + script_fn = output.get("script") or f"output_script.{suffix}" + with open(os.path.join(metadata.config.work_dir, script_fn), "w") as f: + f.write("\n") f.write(script) - f.write('\n') - output['script'] = script_fn + f.write("\n") + output["script"] = script_fn - if output.get('script'): + if output.get("script"): env = environ.get_dict(m=metadata) - interpreter = output.get('script_interpreter') + interpreter = output.get("script_interpreter") if not interpreter: - interpreter_and_args = guess_interpreter(output['script']) - interpreter_and_args[0] = external.find_executable(interpreter_and_args[0], - metadata.config.build_prefix) + interpreter_and_args = guess_interpreter(output["script"]) + interpreter_and_args[0] = external.find_executable( + interpreter_and_args[0], metadata.config.build_prefix + ) if not interpreter_and_args[0]: - log.error("Did not find an interpreter to run {}, looked for {}".format( - output['script'], interpreter_and_args[0])) - if 'system32' in interpreter_and_args[0] and 'bash' in interpreter_and_args[0]: - print("ERROR :: WSL bash.exe detected, this will not work (PRs welcome!). Please\n" - " use MSYS2 packages. Add `m2-base` and more (depending on what your" - " script needs) to `requirements/build` instead.") + log.error( + "Did not find an interpreter to run {}, looked for {}".format( + output["script"], interpreter_and_args[0] + ) + ) + if ( + "system32" in interpreter_and_args[0] + and "bash" in interpreter_and_args[0] + ): + print( + "ERROR :: WSL bash.exe detected, this will not work (PRs welcome!). Please\n" + " use MSYS2 packages. Add `m2-base` and more (depending on what your" + " script needs) to `requirements/build` instead." + ) sys.exit(1) else: - interpreter_and_args = interpreter.split(' ') + interpreter_and_args = interpreter.split(" ") initial_files = utils.prefix_files(metadata.config.host_prefix) env_output = env.copy() - env_output['TOP_PKG_NAME'] = env['PKG_NAME'] - env_output['TOP_PKG_VERSION'] = env['PKG_VERSION'] - env_output['PKG_VERSION'] = metadata.version() - env_output['PKG_NAME'] = metadata.get_value('package/name') - env_output['RECIPE_DIR'] = metadata.path - env_output['MSYS2_PATH_TYPE'] = 'inherit' - env_output['CHERE_INVOKING'] = '1' - for var in utils.ensure_list(metadata.get_value('build/script_env')): - if '=' in var: - val = var.split('=', 1)[1] - var = var.split('=', 1)[0] + env_output["TOP_PKG_NAME"] = env["PKG_NAME"] + env_output["TOP_PKG_VERSION"] = env["PKG_VERSION"] + env_output["PKG_VERSION"] = metadata.version() + env_output["PKG_NAME"] = metadata.get_value("package/name") + env_output["RECIPE_DIR"] = metadata.path + env_output["MSYS2_PATH_TYPE"] = "inherit" + env_output["CHERE_INVOKING"] = "1" + for var in utils.ensure_list(metadata.get_value("build/script_env")): + if "=" in var: + val = var.split("=", 1)[1] + var = var.split("=", 1)[0] elif var not in os.environ: - raise ValueError("env var '{}' specified in script_env, but is not set." - .format(var)) + raise ValueError( + f"env var '{var}' specified in script_env, but is not set." + ) else: val = os.environ[var] env_output[var] = val - dest_file = os.path.join(metadata.config.work_dir, output['script']) - utils.copy_into(os.path.join(metadata.path, output['script']), dest_file) + dest_file = os.path.join(metadata.config.work_dir, output["script"]) + utils.copy_into(os.path.join(metadata.path, output["script"]), dest_file) from os import stat + st = stat(dest_file) os.chmod(dest_file, st.st_mode | 0o200) if activate_script: _write_activation_text(dest_file, metadata) bundle_stats = {} - utils.check_call_env(interpreter_and_args + [dest_file], - cwd=metadata.config.work_dir, env=env_output, stats=bundle_stats) + utils.check_call_env( + interpreter_and_args + [dest_file], + cwd=metadata.config.work_dir, + env=env_output, + stats=bundle_stats, + ) log_stats(bundle_stats, f"bundling {metadata.name()}") if stats is not None: - stats[stats_key(metadata, f'bundle_{metadata.name()}')] = bundle_stats + stats[stats_key(metadata, f"bundle_{metadata.name()}")] = bundle_stats if files: # Files is specified by the output # we exclude the list of files that we want to keep, so post-process picks them up as "new" - keep_files = {os.path.normpath(pth) - for pth in utils.expand_globs(files, metadata.config.host_prefix)} + keep_files = { + os.path.normpath(pth) + for pth in utils.expand_globs(files, metadata.config.host_prefix) + } pfx_files = set(utils.prefix_files(metadata.config.host_prefix)) - initial_files = {item for item in (pfx_files - keep_files) - if not any(keep_file.startswith(item + os.path.sep) - for keep_file in keep_files)} - elif not output.get('script'): + initial_files = { + item + for item in (pfx_files - keep_files) + if not any( + keep_file.startswith(item + os.path.sep) for keep_file in keep_files + ) + } + elif not output.get("script"): if not metadata.always_include_files(): - log.warn("No files or script found for output {}".format(output.get('name'))) - build_deps = metadata.get_value('requirements/build') - host_deps = metadata.get_value('requirements/host') + log.warn( + "No files or script found for output {}".format(output.get("name")) + ) + build_deps = metadata.get_value("requirements/build") + host_deps = metadata.get_value("requirements/host") build_pkgs = [pkg.split()[0] for pkg in build_deps] host_pkgs = [pkg.split()[0] for pkg in host_deps] - dangerous_double_deps = {'python': 'PYTHON', 'r-base': 'R'} + dangerous_double_deps = {"python": "PYTHON", "r-base": "R"} for dep, env_var_name in dangerous_double_deps.items(): if all(dep in pkgs_list for pkgs_list in (build_pkgs, host_pkgs)): - raise CondaBuildException("Empty package; {0} present in build and host deps. " - "You probably picked up the build environment's {0} " - " executable. You need to alter your recipe to " - " use the {1} env var in your recipe to " - "run that executable.".format(dep, env_var_name)) - elif (dep in build_pkgs and metadata.uses_new_style_compiler_activation): - link = ("https://conda.io/docs/user-guide/tasks/build-packages/" - "define-metadata.html#host") - raise CondaBuildException("Empty package; {0} dep present in build but not " - "host requirements. You need to move your {0} dep " - "to the host requirements section. See {1} for more " - "info." .format(dep, link)) + raise CondaBuildException( + "Empty package; {0} present in build and host deps. " + "You probably picked up the build environment's {0} " + " executable. You need to alter your recipe to " + " use the {1} env var in your recipe to " + "run that executable.".format(dep, env_var_name) + ) + elif dep in build_pkgs and metadata.uses_new_style_compiler_activation: + link = ( + "https://conda.io/docs/user-guide/tasks/build-packages/" + "define-metadata.html#host" + ) + raise CondaBuildException( + "Empty package; {0} dep present in build but not " + "host requirements. You need to move your {0} dep " + "to the host requirements section. See {1} for more " + "info.".format(dep, link) + ) initial_files = set(utils.prefix_files(metadata.config.host_prefix)) for pat in metadata.always_include_files(): @@ -1657,36 +2001,48 @@ def bundle_conda(output, metadata, env, stats, **kw): log.warn("Glob %s from always_include_files does not match any files", pat) files = post_process_files(metadata, initial_files) - if output.get('name') and output.get('name') != 'conda': - assert 'bin/conda' not in files and 'Scripts/conda.exe' not in files, ("Bug in conda-build " + if output.get("name") and output.get("name") != "conda": + assert "bin/conda" not in files and "Scripts/conda.exe" not in files, ( + "Bug in conda-build " "has included conda binary in package. Please report this on the conda-build issue " - "tracker.") + "tracker." + ) # first filter is so that info_files does not pick up ignored files files = utils.filter_files(files, prefix=metadata.config.host_prefix) # this is also copying things like run_test.sh into info/recipe - utils.rm_rf(os.path.join(metadata.config.info_dir, 'test')) + utils.rm_rf(os.path.join(metadata.config.info_dir, "test")) with tmp_chdir(metadata.config.host_prefix): - output['checksums'] = create_info_files(metadata, replacements, files, prefix=metadata.config.host_prefix) + output["checksums"] = create_info_files( + metadata, replacements, files, prefix=metadata.config.host_prefix + ) # here we add the info files into the prefix, so we want to re-collect the files list prefix_files = set(utils.prefix_files(metadata.config.host_prefix)) - files = utils.filter_files(prefix_files - initial_files, prefix=metadata.config.host_prefix) + files = utils.filter_files( + prefix_files - initial_files, prefix=metadata.config.host_prefix + ) - basename = '-'.join([output['name'], metadata.version(), metadata.build_id()]) + basename = "-".join([output["name"], metadata.version(), metadata.build_id()]) tmp_archives = [] final_outputs = [] cph_kwargs = {} ext = CONDA_PACKAGE_EXTENSION_V1 - if (output.get('type') == 'conda_v2' or metadata.config.conda_pkg_format == "2"): + if output.get("type") == "conda_v2" or metadata.config.conda_pkg_format == "2": ext = CONDA_PACKAGE_EXTENSION_V2 cph_kwargs["compression_tuple"] = ( - '.tar.zst', 'zstd', f'zstd:compression-level={metadata.config.zstd_compression_level}' + ".tar.zst", + "zstd", + f"zstd:compression-level={metadata.config.zstd_compression_level}", ) with TemporaryDirectory() as tmp: conda_package_handling.api.create( - metadata.config.host_prefix, files, basename + ext, out_folder=tmp, **cph_kwargs + metadata.config.host_prefix, + files, + basename + ext, + out_folder=tmp, + **cph_kwargs, ) tmp_archives = [os.path.join(tmp, basename + ext)] @@ -1701,38 +2057,57 @@ def bundle_conda(output, metadata, env, stats, **kw): from conda_verify.verify import Verify except ImportError: Verify = None - log.warn("Importing conda-verify failed. Please be sure to test your packages. " - "conda install conda-verify to make this message go away.") + log.warn( + "Importing conda-verify failed. Please be sure to test your packages. " + "conda install conda-verify to make this message go away." + ) if getattr(metadata.config, "verify", False) and Verify: verifier = Verify() - checks_to_ignore = (utils.ensure_list(metadata.config.ignore_verify_codes) + - metadata.ignore_verify_codes()) + checks_to_ignore = ( + utils.ensure_list(metadata.config.ignore_verify_codes) + + metadata.ignore_verify_codes() + ) try: - verifier.verify_package(path_to_package=tmp_path, checks_to_ignore=checks_to_ignore, - exit_on_error=metadata.config.exit_on_verify_error) + verifier.verify_package( + path_to_package=tmp_path, + checks_to_ignore=checks_to_ignore, + exit_on_error=metadata.config.exit_on_verify_error, + ) except KeyError as e: - log.warn("Package doesn't have necessary files. It might be too old to inspect." - "Legacy noarch packages are known to fail. Full message was {}".format(e)) + log.warn( + "Package doesn't have necessary files. It might be too old to inspect." + "Legacy noarch packages are known to fail. Full message was {}".format( + e + ) + ) try: crossed_subdir = metadata.config.target_subdir except AttributeError: crossed_subdir = metadata.config.host_subdir - subdir = ('noarch' if (metadata.noarch or metadata.noarch_python) - else crossed_subdir) + subdir = ( + "noarch" + if (metadata.noarch or metadata.noarch_python) + else crossed_subdir + ) if metadata.config.output_folder: output_folder = os.path.join(metadata.config.output_folder, subdir) else: - output_folder = os.path.join(os.path.dirname(metadata.config.bldpkgs_dir), subdir) + output_folder = os.path.join( + os.path.dirname(metadata.config.bldpkgs_dir), subdir + ) final_output = os.path.join(output_folder, output_filename) if os.path.isfile(final_output): utils.rm_rf(final_output) # disable locking here. It's just a temp folder getting locked. Removing it proved to be # a major bottleneck. - utils.copy_into(tmp_path, final_output, metadata.config.timeout, - locking=False) + utils.copy_into( + tmp_path, final_output, metadata.config.timeout, locking=False + ) final_outputs.append(final_output) - update_index(os.path.dirname(output_folder), verbose=metadata.config.debug, threads=1) + update_index( + os.path.dirname(output_folder), verbose=metadata.config.debug, threads=1 + ) # clean out host prefix so that this output's files don't interfere with other outputs # We have a backup of how things were before any output scripts ran. That's @@ -1740,9 +2115,10 @@ def bundle_conda(output, metadata, env, stats, **kw): if metadata.config.keep_old_work: prefix = metadata.config.host_prefix - dest = os.path.join(os.path.dirname(prefix), - '_'.join(('_h_env_moved', metadata.dist(), - metadata.config.host_subdir))) + dest = os.path.join( + os.path.dirname(prefix), + "_".join(("_h_env_moved", metadata.dist(), metadata.config.host_subdir)), + ) shutil_move_more_retrying(prefix, dest, "host env") else: utils.rm_rf(metadata.config.host_prefix) @@ -1753,35 +2129,43 @@ def bundle_conda(output, metadata, env, stats, **kw): def bundle_wheel(output, metadata, env, stats): ext = ".bat" if utils.on_win else ".sh" with TemporaryDirectory() as tmpdir, utils.tmp_chdir(metadata.config.work_dir): - dest_file = os.path.join(metadata.config.work_dir, 'wheel_output' + ext) - with open(dest_file, 'w') as f: - f.write('\n') - f.write(f'pip wheel --wheel-dir {tmpdir} --no-deps .') - f.write('\n') + dest_file = os.path.join(metadata.config.work_dir, "wheel_output" + ext) + with open(dest_file, "w") as f: + f.write("\n") + f.write(f"pip wheel --wheel-dir {tmpdir} --no-deps .") + f.write("\n") if metadata.config.activate: _write_activation_text(dest_file, metadata) # run the appropriate script env = environ.get_dict(m=metadata).copy() - env['TOP_PKG_NAME'] = env['PKG_NAME'] - env['TOP_PKG_VERSION'] = env['PKG_VERSION'] - env['PKG_VERSION'] = metadata.version() - env['PKG_NAME'] = metadata.get_value('package/name') + env["TOP_PKG_NAME"] = env["PKG_NAME"] + env["TOP_PKG_VERSION"] = env["PKG_VERSION"] + env["PKG_VERSION"] = metadata.version() + env["PKG_NAME"] = metadata.get_value("package/name") interpreter_and_args = guess_interpreter(dest_file) bundle_stats = {} - utils.check_call_env(interpreter_and_args + [dest_file], - cwd=metadata.config.work_dir, env=env, stats=bundle_stats) + utils.check_call_env( + interpreter_and_args + [dest_file], + cwd=metadata.config.work_dir, + env=env, + stats=bundle_stats, + ) log_stats(bundle_stats, f"bundling wheel {metadata.name()}") if stats is not None: - stats[stats_key(metadata, f'bundle_wheel_{metadata.name()}')] = bundle_stats + stats[stats_key(metadata, f"bundle_wheel_{metadata.name()}")] = bundle_stats wheel_files = glob(os.path.join(tmpdir, "*.whl")) if not wheel_files: - raise RuntimeError("Wheel creation failed. Please see output above to debug.") + raise RuntimeError( + "Wheel creation failed. Please see output above to debug." + ) wheel_file = wheel_files[0] if metadata.config.output_folder: - output_folder = os.path.join(metadata.config.output_folder, metadata.config.subdir) + output_folder = os.path.join( + metadata.config.output_folder, metadata.config.subdir + ) else: output_folder = metadata.config.bldpkgs_dir utils.copy_into(wheel_file, output_folder, locking=metadata.config.locking) @@ -1789,22 +2173,22 @@ def bundle_wheel(output, metadata, env, stats): def scan_metadata(path): - ''' + """ Scan all json files in 'path' and return a dictionary with their contents. Files are assumed to be in 'index.json' format. - ''' + """ installed = dict() - for filename in glob(os.path.join(path, '*.json')): + for filename in glob(os.path.join(path, "*.json")): with open(filename) as file: data = json.load(file) - installed[data['name']] = data + installed[data["name"]] = data return installed bundlers = { - 'conda': bundle_conda, - 'conda_v2': bundle_conda, - 'wheel': bundle_wheel, + "conda": bundle_conda, + "conda_v2": bundle_conda, + "wheel": bundle_wheel, } @@ -1812,7 +2196,7 @@ def _write_sh_activation_text(file_handle, m): cygpath_prefix = "$(cygpath -u " if utils.on_win else "" cygpath_suffix = " )" if utils.on_win else "" - py_flags = '-I -m' if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") else '-m' + py_flags = "-I -m" if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") else "-m" file_handle.write( f"""eval "$('{sys.executable}' {py_flags} conda shell.bash hook)"\n""" ) @@ -1834,35 +2218,36 @@ def _write_sh_activation_text(file_handle, m): # exists to identify a valid conda environment # conda 4.6 changes this one final time, by adding a '--stack' flag to the 'activate' # command, and 'activate' does not stack environments by default without that flag - history_file = join(m.config.host_prefix, 'conda-meta', 'history') + history_file = join(m.config.host_prefix, "conda-meta", "history") if not isfile(history_file): if not isdir(dirname(history_file)): os.makedirs(dirname(history_file)) - open(history_file, 'a').close() - host_prefix_path = ''.join((cygpath_prefix, - m.config.host_prefix.replace('\\', '\\\\'), - cygpath_suffix)) - file_handle.write(f"conda activate \"{host_prefix_path}\"\n") + open(history_file, "a").close() + host_prefix_path = "".join( + (cygpath_prefix, m.config.host_prefix.replace("\\", "\\\\"), cygpath_suffix) + ) + file_handle.write(f'conda activate "{host_prefix_path}"\n') # Write build prefix activation AFTER host prefix, so that its executables come first - build_prefix_path = ''.join((cygpath_prefix, - m.config.build_prefix.replace('\\', '\\\\'), - cygpath_suffix)) + build_prefix_path = "".join( + (cygpath_prefix, m.config.build_prefix.replace("\\", "\\\\"), cygpath_suffix) + ) # Do not stack against base env when not cross. - stack = '--stack' if m.is_cross else '' - file_handle.write(f"conda activate {stack} \"{build_prefix_path}\"\n") + stack = "--stack" if m.is_cross else "" + file_handle.write(f'conda activate {stack} "{build_prefix_path}"\n') from conda_build.os_utils.external import find_executable - ccache = find_executable('ccache', m.config.build_prefix, False) + + ccache = find_executable("ccache", m.config.build_prefix, False) if ccache: if isinstance(ccache, list): ccache = ccache[0] ccache_methods = {} - ccache_methods['env_vars'] = False - ccache_methods['symlinks'] = False - ccache_methods['native'] = False - if hasattr(m.config, 'ccache_method'): + ccache_methods["env_vars"] = False + ccache_methods["symlinks"] = False + ccache_methods["native"] = False + if hasattr(m.config, "ccache_method"): ccache_methods[m.config.ccache_method] = True done_necessary_env = False for method, value in ccache_methods.items(): @@ -1872,130 +2257,180 @@ def _write_sh_activation_text(file_handle, m): # 'export CCACHE_SLOPPINESS="pch_defines,time_macros${CCACHE_SLOPPINESS+,$CCACHE_SLOPPINESS}"\n') # file_handle.write('export CCACHE_CPP2=true\n') done_necessary_env = True - if method == 'symlinks': - dirname_ccache_ln_bin = join(m.config.build_prefix, 'ccache-ln-bin') - file_handle.write(f'mkdir {dirname_ccache_ln_bin}\n') - file_handle.write(f'pushd {dirname_ccache_ln_bin}\n') + if method == "symlinks": + dirname_ccache_ln_bin = join(m.config.build_prefix, "ccache-ln-bin") + file_handle.write(f"mkdir {dirname_ccache_ln_bin}\n") + file_handle.write(f"pushd {dirname_ccache_ln_bin}\n") file_handle.write('if [ -n "$CC" ]; then\n') - file_handle.write(' [ -f {ccache} ] && [ ! -f $(basename $CC) ] && ln -s {ccache} $(basename $CC) || true\n'.format(ccache=ccache)) - file_handle.write('fi\n') + file_handle.write( + " [ -f {ccache} ] && [ ! -f $(basename $CC) ] && ln -s {ccache} $(basename $CC) || true\n".format( + ccache=ccache + ) + ) + file_handle.write("fi\n") file_handle.write('if [ -n "$CXX" ]; then\n') - file_handle.write(' [ -f {ccache} ] && [ ! -f $(basename $CXX) ] && ln -s {ccache} $(basename $CXX) || true\n'.format(ccache=ccache)) - file_handle.write('fi\n') - file_handle.write('popd\n') + file_handle.write( + " [ -f {ccache} ] && [ ! -f $(basename $CXX) ] && ln -s {ccache} $(basename $CXX) || true\n".format( + ccache=ccache + ) + ) + file_handle.write("fi\n") + file_handle.write("popd\n") # We really don't want to be doing this. file_handle.write(f'export "PATH={dirname_ccache_ln_bin}:$PATH"\n') - elif method == 'env_vars': + elif method == "env_vars": file_handle.write(f'export CC="{ccache} $CC"\n') file_handle.write(f'export CXX="{ccache} $CXX"\n') file_handle.write(f'export LD="{ccache} $LD"\n') - elif method == 'native': + elif method == "native": pass else: print("ccache method {} not implemented") # conda 4.4 requires a conda-meta/history file for a valid conda prefix - history_file = join(m.config.build_prefix, 'conda-meta', 'history') + history_file = join(m.config.build_prefix, "conda-meta", "history") if not isfile(history_file): if not isdir(dirname(history_file)): os.makedirs(dirname(history_file)) - open(history_file, 'a').close() + open(history_file, "a").close() def _write_activation_text(script_path, m): - with open(script_path, 'r+') as fh: + with open(script_path, "r+") as fh: data = fh.read() fh.seek(0) if os.path.splitext(script_path)[1].lower() == ".bat": - if m.config.build_subdir.startswith('win'): + if m.config.build_subdir.startswith("win"): from conda_build.utils import write_bat_activation_text write_bat_activation_text(fh, m) elif os.path.splitext(script_path)[1].lower() == ".sh": _write_sh_activation_text(fh, m) else: log = utils.get_logger(__name__) - log.warn("not adding activation to {} - I don't know how to do so for " - "this file type".format(script_path)) + log.warn( + "not adding activation to {} - I don't know how to do so for " + "this file type".format(script_path) + ) fh.write(data) def create_build_envs(m, notest): - build_ms_deps = m.ms_depends('build') + build_ms_deps = m.ms_depends("build") build_ms_deps = [utils.ensure_valid_spec(spec) for spec in build_ms_deps] - host_ms_deps = m.ms_depends('host') + host_ms_deps = m.ms_depends("host") host_ms_deps = [utils.ensure_valid_spec(spec) for spec in host_ms_deps] m.config._merge_build_host = m.build_is_host if m.is_cross and not m.build_is_host: - host_actions = environ.get_install_actions(m.config.host_prefix, - tuple(host_ms_deps), 'host', - subdir=m.config.host_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) - environ.create_env(m.config.host_prefix, host_actions, env='host', config=m.config, - subdir=m.config.host_subdir, is_cross=m.is_cross, - is_conda=m.name() == 'conda') + host_actions = environ.get_install_actions( + m.config.host_prefix, + tuple(host_ms_deps), + "host", + subdir=m.config.host_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls), + ) + environ.create_env( + m.config.host_prefix, + host_actions, + env="host", + config=m.config, + subdir=m.config.host_subdir, + is_cross=m.is_cross, + is_conda=m.name() == "conda", + ) if m.build_is_host: build_ms_deps.extend(host_ms_deps) - build_actions = environ.get_install_actions(m.config.build_prefix, - tuple(build_ms_deps), 'build', - subdir=m.config.build_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) + build_actions = environ.get_install_actions( + m.config.build_prefix, + tuple(build_ms_deps), + "build", + subdir=m.config.build_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls), + ) try: if not notest: - utils.insert_variant_versions(m.meta.get('requirements', {}), - m.config.variant, 'run') - test_run_ms_deps = utils.ensure_list(m.get_value('test/requires', [])) + \ - utils.ensure_list(m.get_value('requirements/run', [])) + utils.insert_variant_versions( + m.meta.get("requirements", {}), m.config.variant, "run" + ) + test_run_ms_deps = utils.ensure_list( + m.get_value("test/requires", []) + ) + utils.ensure_list(m.get_value("requirements/run", [])) # make sure test deps are available before taking time to create build env - environ.get_install_actions(m.config.test_prefix, - tuple(test_run_ms_deps), 'test', - subdir=m.config.host_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) + environ.get_install_actions( + m.config.test_prefix, + tuple(test_run_ms_deps), + "test", + subdir=m.config.host_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls), + ) except DependencyNeedsBuildingError as e: # subpackages are not actually missing. We just haven't built them yet. from .conda_interface import MatchSpec - other_outputs = (m.other_outputs.values() if hasattr(m, 'other_outputs') else - m.get_output_metadata_set(permit_undefined_jinja=True)) - missing_deps = {MatchSpec(pkg).name for pkg in e.packages} - {out.name() for _, out in other_outputs} + other_outputs = ( + m.other_outputs.values() + if hasattr(m, "other_outputs") + else m.get_output_metadata_set(permit_undefined_jinja=True) + ) + missing_deps = {MatchSpec(pkg).name for pkg in e.packages} - { + out.name() for _, out in other_outputs + } if missing_deps: e.packages = missing_deps raise e - if (not m.config.dirty or not os.path.isdir(m.config.build_prefix) or not os.listdir(m.config.build_prefix)): - environ.create_env(m.config.build_prefix, build_actions, env='build', - config=m.config, subdir=m.config.build_subdir, - is_cross=m.is_cross, is_conda=m.name() == 'conda') + if ( + not m.config.dirty + or not os.path.isdir(m.config.build_prefix) + or not os.listdir(m.config.build_prefix) + ): + environ.create_env( + m.config.build_prefix, + build_actions, + env="build", + config=m.config, + subdir=m.config.build_subdir, + is_cross=m.is_cross, + is_conda=m.name() == "conda", + ) -def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=False, - built_packages=None, notest=False, provision_only=False): - ''' +def build( + m, + stats, + post=None, + need_source_download=True, + need_reparse_in_env=False, + built_packages=None, + notest=False, + provision_only=False, +): + """ Build the package with the specified metadata. :param m: Package metadata @@ -2004,7 +2439,7 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa post only. False means stop just before the post. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated - ''' + """ default_return = {} if not built_packages: built_packages = {} @@ -2041,7 +2476,7 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa top_level_pkg = m top_level_needs_finalizing = True for _, om in output_metas: - if om.skip() or (m.config.skip_existing and is_package_built(om, 'host')): + if om.skip() or (m.config.skip_existing and is_package_built(om, "host")): skipped.append(bldpkg_path(om)) else: package_locations.append(bldpkg_path(om)) @@ -2049,63 +2484,89 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa top_level_pkg = om top_level_needs_finalizing = False if not package_locations: - print("Packages for ", m.path or m.name(), "with variant {} " - "are already built and available from your configured channels " - "(including local) or are otherwise specified to be skipped." - .format(m.get_hash_contents())) + print( + "Packages for ", + m.path or m.name(), + "with variant {} " + "are already built and available from your configured channels " + "(including local) or are otherwise specified to be skipped.".format( + m.get_hash_contents() + ), + ) return default_return if not provision_only: printed_fns = [] for pkg in package_locations: - if (os.path.splitext(pkg)[1] and any( - os.path.splitext(pkg)[1] in ext for ext in CONDA_PACKAGE_EXTENSIONS)): + if os.path.splitext(pkg)[1] and any( + os.path.splitext(pkg)[1] in ext for ext in CONDA_PACKAGE_EXTENSIONS + ): printed_fns.append(os.path.basename(pkg)) else: printed_fns.append(pkg) print("BUILD START:", printed_fns) - environ.remove_existing_packages([m.config.bldpkgs_dir], - [pkg for pkg in package_locations if pkg not in built_packages], m.config) + environ.remove_existing_packages( + [m.config.bldpkgs_dir], + [pkg for pkg in package_locations if pkg not in built_packages], + m.config, + ) - specs = [ms.spec for ms in m.ms_depends('build')] - if any(out.get('type') == 'wheel' for out in m.meta.get('outputs', [])): - specs.extend(['pip', 'wheel']) + specs = [ms.spec for ms in m.ms_depends("build")] + if any(out.get("type") == "wheel" for out in m.meta.get("outputs", [])): + specs.extend(["pip", "wheel"]) # TODO :: This is broken. It does not respect build/script for example and also if you need git # you should add it as s build dep manually. vcs_source = m.uses_vcs_in_build if vcs_source and vcs_source not in specs: vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source - has_vcs_available = os.path.isfile(external.find_executable(vcs_executable, - m.config.build_prefix) or "") + has_vcs_available = os.path.isfile( + external.find_executable(vcs_executable, m.config.build_prefix) or "" + ) if not has_vcs_available: - if (vcs_source != "mercurial" or not any(spec.startswith('python') and "3." in spec for spec in specs)): + if vcs_source != "mercurial" or not any( + spec.startswith("python") and "3." in spec for spec in specs + ): specs.append(vcs_source) - log.warn("Your recipe depends on %s at build time (for templates), " - "but you have not listed it as a build dependency. Doing " - "so for this build.", vcs_source) + log.warn( + "Your recipe depends on %s at build time (for templates), " + "but you have not listed it as a build dependency. Doing " + "so for this build.", + vcs_source, + ) else: - raise ValueError("Your recipe uses mercurial in build, but mercurial" - " does not yet support Python 3. Please handle all of " - "your mercurial actions outside of your build script.") + raise ValueError( + "Your recipe uses mercurial in build, but mercurial" + " does not yet support Python 3. Please handle all of " + "your mercurial actions outside of your build script." + ) if top_level_needs_finalizing: utils.insert_variant_versions( - top_level_pkg.meta.get('requirements', {}), top_level_pkg.config.variant, 'build') + top_level_pkg.meta.get("requirements", {}), + top_level_pkg.config.variant, + "build", + ) utils.insert_variant_versions( - top_level_pkg.meta.get('requirements', {}), top_level_pkg.config.variant, 'host') + top_level_pkg.meta.get("requirements", {}), + top_level_pkg.config.variant, + "host", + ) exclude_pattern = None - excludes = set(top_level_pkg.config.variant.get('ignore_version', [])) + excludes = set(top_level_pkg.config.variant.get("ignore_version", [])) if excludes: - for key in top_level_pkg.config.variant.get('pin_run_as_build', {}).keys(): + for key in top_level_pkg.config.variant.get( + "pin_run_as_build", {} + ).keys(): if key in excludes: excludes.remove(key) if excludes: - exclude_pattern = re.compile(r'|'.join(fr'(?:^{exc}(?:\s|$|\Z))' - for exc in excludes)) + exclude_pattern = re.compile( + r"|".join(rf"(?:^{exc}(?:\s|$|\Z))" for exc in excludes) + ) add_upstream_pins(m, False, exclude_pattern) create_build_envs(top_level_pkg, notest) @@ -2126,7 +2587,7 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa # Write out metadata for `conda debug`, making it obvious that this is what it is, must be done # after try_download() - output_yaml(m, os.path.join(m.config.work_dir, 'metadata_conda_debug.yaml')) + output_yaml(m, os.path.join(m.config.work_dir, "metadata_conda_debug.yaml")) # get_dir here might be just work, or it might be one level deeper, # dependening on the source. @@ -2141,65 +2602,83 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa utils.rm_rf(m.config.info_dir) files1 = utils.prefix_files(prefix=m.config.host_prefix) - with open(join(m.config.build_folder, 'prefix_files.txt'), 'w') as f: - f.write('\n'.join(sorted(list(files1)))) - f.write('\n') + with open(join(m.config.build_folder, "prefix_files.txt"), "w") as f: + f.write("\n".join(sorted(list(files1)))) + f.write("\n") # Use script from recipe? - script = utils.ensure_list(m.get_value('build/script', None)) + script = utils.ensure_list(m.get_value("build/script", None)) if script: - script = '\n'.join(script) + script = "\n".join(script) if isdir(src_dir): build_stats = {} if utils.on_win: - build_file = join(m.path, 'bld.bat') + build_file = join(m.path, "bld.bat") if script: - build_file = join(src_dir, 'bld.bat') + build_file = join(src_dir, "bld.bat") import codecs - with codecs.getwriter('utf-8')(open(build_file, 'wb')) as bf: + + with codecs.getwriter("utf-8")(open(build_file, "wb")) as bf: bf.write(script) - windows.build(m, build_file, stats=build_stats, provision_only=provision_only) + windows.build( + m, build_file, stats=build_stats, provision_only=provision_only + ) else: - build_file = join(m.path, 'build.sh') + build_file = join(m.path, "build.sh") if isfile(build_file) and script: - raise CondaBuildException("Found a build.sh script and a build/script section " - "inside meta.yaml. Either remove the build.sh script " - "or remove the build/script section in meta.yaml.") + raise CondaBuildException( + "Found a build.sh script and a build/script section " + "inside meta.yaml. Either remove the build.sh script " + "or remove the build/script section in meta.yaml." + ) # There is no sense in trying to run an empty build script. if isfile(build_file) or script: work_file, _ = write_build_scripts(m, script, build_file) if not provision_only: - cmd = [shell_path] + (['-x'] if m.config.debug else []) + ['-o', 'errexit', work_file] + cmd = ( + [shell_path] + + (["-x"] if m.config.debug else []) + + ["-o", "errexit", work_file] + ) # rewrite long paths in stdout back to their env variables if m.config.debug or m.config.no_rewrite_stdout_env: rewrite_env = None else: - rewrite_vars = ['PREFIX', 'SRC_DIR'] + rewrite_vars = ["PREFIX", "SRC_DIR"] if not m.build_is_host: - rewrite_vars.insert(1, 'BUILD_PREFIX') - rewrite_env = { - k: env[k] - for k in rewrite_vars if k in env - } + rewrite_vars.insert(1, "BUILD_PREFIX") + rewrite_env = {k: env[k] for k in rewrite_vars if k in env} for k, v in rewrite_env.items(): - print('{} {}={}' - .format('set' if build_file.endswith('.bat') else 'export', k, v)) + print( + "{} {}={}".format( + "set" + if build_file.endswith(".bat") + else "export", + k, + v, + ) + ) # clear this, so that the activate script will get run as necessary - del env['CONDA_BUILD'] + del env["CONDA_BUILD"] # this should raise if any problems occur while building - utils.check_call_env(cmd, env=env, rewrite_stdout_env=rewrite_env, - cwd=src_dir, stats=build_stats) + utils.check_call_env( + cmd, + env=env, + rewrite_stdout_env=rewrite_env, + cwd=src_dir, + stats=build_stats, + ) utils.remove_pycache_from_scripts(m.config.host_prefix) if build_stats and not provision_only: log_stats(build_stats, f"building {m.name()}") if stats is not None: - stats[stats_key(m, 'build')] = build_stats + stats[stats_key(m, "build")] = build_stats - prefix_file_list = join(m.config.build_folder, 'prefix_files.txt') + prefix_file_list = join(m.config.build_folder, "prefix_files.txt") initial_files = set() if os.path.isfile(prefix_file_list): with open(prefix_file_list) as f: @@ -2208,13 +2687,15 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa new_pkgs = default_return if not provision_only and post in [True, None]: - outputs = output_metas or m.get_output_metadata_set(permit_unsatisfiable_variants=False) + outputs = output_metas or m.get_output_metadata_set( + permit_unsatisfiable_variants=False + ) get_all_replacements(outputs[0][1].config) top_level_meta = m # this is the old, default behavior: conda package, with difference between start # set of files and end set of files - prefix_file_list = join(m.config.build_folder, 'prefix_files.txt') + prefix_file_list = join(m.config.build_folder, "prefix_files.txt") if os.path.isfile(prefix_file_list): with open(prefix_file_list) as f: initial_files = set(f.read().splitlines()) @@ -2222,22 +2703,27 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa initial_files = set() # subdir needs to always be some real platform - so ignore noarch. - subdir = (m.config.host_subdir if m.config.host_subdir != 'noarch' else - m.config.subdir) + subdir = ( + m.config.host_subdir + if m.config.host_subdir != "noarch" + else m.config.subdir + ) with TemporaryDirectory() as prefix_files_backup: # back up new prefix files, because we wipe the prefix before each output build for f in new_prefix_files: - utils.copy_into(os.path.join(m.config.host_prefix, f), - os.path.join(prefix_files_backup, f), - symlinks=True) + utils.copy_into( + os.path.join(m.config.host_prefix, f), + os.path.join(prefix_files_backup, f), + symlinks=True, + ) # this is the inner loop, where we loop over any vars used only by # outputs (not those used by the top-level recipe). The metadata # objects here are created by the m.get_output_metadata_set, which # is distributing the matrix of used variables. - for (output_d, m) in outputs: + for output_d, m in outputs: get_all_replacements(m.config.variants) get_all_replacements(m.config.variant) if m.skip(): @@ -2246,44 +2732,60 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa # TODO: should we check both host and build envs? These are the same, except when # cross compiling - if m.config.skip_existing and is_package_built(m, 'host'): + if m.config.skip_existing and is_package_built(m, "host"): print(utils.get_skip_message(m)) new_pkgs[bldpkg_path(m)] = output_d, m continue - if (top_level_meta.name() == output_d.get('name') and not (output_d.get('files') or - output_d.get('script'))): - output_d['files'] = (utils.prefix_files(prefix=m.config.host_prefix) - - initial_files) + if top_level_meta.name() == output_d.get("name") and not ( + output_d.get("files") or output_d.get("script") + ): + output_d["files"] = ( + utils.prefix_files(prefix=m.config.host_prefix) - initial_files + ) # ensure that packaging scripts are copied over into the workdir - if 'script' in output_d: - utils.copy_into(os.path.join(m.path, output_d['script']), m.config.work_dir) + if "script" in output_d: + utils.copy_into( + os.path.join(m.path, output_d["script"]), m.config.work_dir + ) # same thing, for test scripts - test_script = output_d.get('test', {}).get('script') + test_script = output_d.get("test", {}).get("script") if test_script: if not os.path.isfile(os.path.join(m.path, test_script)): - raise ValueError("test script specified as {} does not exist. Please " - "check for typos or create the file and try again." - .format(test_script)) - utils.copy_into(os.path.join(m.path, test_script), - os.path.join(m.config.work_dir, test_script)) - - assert output_d.get('type') != 'conda' or m.final, ( - f"output metadata for {m.dist()} is not finalized") + raise ValueError( + "test script specified as {} does not exist. Please " + "check for typos or create the file and try again.".format( + test_script + ) + ) + utils.copy_into( + os.path.join(m.path, test_script), + os.path.join(m.config.work_dir, test_script), + ) + + assert ( + output_d.get("type") != "conda" or m.final + ), f"output metadata for {m.dist()} is not finalized" pkg_path = bldpkg_path(m) if pkg_path not in built_packages and pkg_path not in new_pkgs: log.info(f"Packaging {m.name()}") # for more than one output, we clear and rebuild the environment before each # package. We also do this for single outputs that present their own # build reqs. - if not (m.is_output or - (os.path.isdir(m.config.host_prefix) and - len(os.listdir(m.config.host_prefix)) <= 1)): + if not ( + m.is_output + or ( + os.path.isdir(m.config.host_prefix) + and len(os.listdir(m.config.host_prefix)) <= 1 + ) + ): # This log message contradicts both the not (m.is_output or ..) check above # and also the comment "For more than one output, ..." - log.debug('Not creating new env for output - already exists from top-level') + log.debug( + "Not creating new env for output - already exists from top-level" + ) else: m.config._merge_build_host = m.build_is_host @@ -2291,95 +2793,136 @@ def build(m, stats, post=None, need_source_download=True, need_reparse_in_env=Fa utils.rm_rf(m.config.build_prefix) utils.rm_rf(m.config.test_prefix) - host_ms_deps = m.ms_depends('host') - sub_build_ms_deps = m.ms_depends('build') + host_ms_deps = m.ms_depends("host") + sub_build_ms_deps = m.ms_depends("build") if m.is_cross and not m.build_is_host: - host_actions = environ.get_install_actions(m.config.host_prefix, - tuple(host_ms_deps), 'host', - subdir=m.config.host_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) - environ.create_env(m.config.host_prefix, host_actions, env='host', - config=m.config, subdir=subdir, is_cross=m.is_cross, - is_conda=m.name() == 'conda') + host_actions = environ.get_install_actions( + m.config.host_prefix, + tuple(host_ms_deps), + "host", + subdir=m.config.host_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls), + ) + environ.create_env( + m.config.host_prefix, + host_actions, + env="host", + config=m.config, + subdir=subdir, + is_cross=m.is_cross, + is_conda=m.name() == "conda", + ) else: # When not cross-compiling, the build deps aggregate 'build' and 'host'. sub_build_ms_deps.extend(host_ms_deps) - build_actions = environ.get_install_actions(m.config.build_prefix, - tuple(sub_build_ms_deps), 'build', - subdir=m.config.build_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) - environ.create_env(m.config.build_prefix, build_actions, env='build', - config=m.config, subdir=m.config.build_subdir, - is_cross=m.is_cross, - is_conda=m.name() == 'conda') + build_actions = environ.get_install_actions( + m.config.build_prefix, + tuple(sub_build_ms_deps), + "build", + subdir=m.config.build_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls), + ) + environ.create_env( + m.config.build_prefix, + build_actions, + env="build", + config=m.config, + subdir=m.config.build_subdir, + is_cross=m.is_cross, + is_conda=m.name() == "conda", + ) to_remove = set() - for f in output_d.get('files', []): - if f.startswith('conda-meta'): + for f in output_d.get("files", []): + if f.startswith("conda-meta"): to_remove.add(f) # This is wrong, files has not been expanded at this time and could contain # wildcards. Also well, I just do not understand this, because when this # does contain wildcards, the files in to_remove will slip back in. - if 'files' in output_d: - output_d['files'] = set(output_d['files']) - to_remove + if "files" in output_d: + output_d["files"] = set(output_d["files"]) - to_remove # copies the backed-up new prefix files into the newly created host env for f in new_prefix_files: - utils.copy_into(os.path.join(prefix_files_backup, f), - os.path.join(m.config.host_prefix, f), - symlinks=True) + utils.copy_into( + os.path.join(prefix_files_backup, f), + os.path.join(m.config.host_prefix, f), + symlinks=True, + ) # we must refresh the environment variables because our env for each package # can be different from the env for the top level build. with utils.path_prepended(m.config.build_prefix): env = environ.get_dict(m=m) - pkg_type = 'conda' if not hasattr(m, 'type') else m.type + pkg_type = "conda" if not hasattr(m, "type") else m.type newly_built_packages = bundlers[pkg_type](output_d, m, env, stats) # warn about overlapping files. - if 'checksums' in output_d: - for file, csum in output_d['checksums'].items(): + if "checksums" in output_d: + for file, csum in output_d["checksums"].items(): for _, prev_om in new_pkgs.items(): prev_output_d, _ = prev_om - if file in prev_output_d.get('checksums', {}): - prev_csum = prev_output_d['checksums'][file] - nature = 'Exact' if csum == prev_csum else 'Inexact' - log.warning("{} overlap between {} in packages {} and {}" - .format(nature, file, output_d['name'], - prev_output_d['name'])) + if file in prev_output_d.get("checksums", {}): + prev_csum = prev_output_d["checksums"][file] + nature = "Exact" if csum == prev_csum else "Inexact" + log.warning( + "{} overlap between {} in packages {} and {}".format( + nature, + file, + output_d["name"], + prev_output_d["name"], + ) + ) for built_package in newly_built_packages: new_pkgs[built_package] = (output_d, m) # must rebuild index because conda has no way to incrementally add our last # package to the index. - subdir = ('noarch' if (m.noarch or m.noarch_python) - else m.config.host_subdir) + subdir = ( + "noarch" + if (m.noarch or m.noarch_python) + else m.config.host_subdir + ) if m.is_cross: - get_build_index(subdir=subdir, bldpkgs_dir=m.config.bldpkgs_dir, - output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, - debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking, - timeout=m.config.timeout, clear_cache=True) - get_build_index(subdir=subdir, bldpkgs_dir=m.config.bldpkgs_dir, - output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, - debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking, - timeout=m.config.timeout, clear_cache=True) + get_build_index( + subdir=subdir, + bldpkgs_dir=m.config.bldpkgs_dir, + output_folder=m.config.output_folder, + channel_urls=m.config.channel_urls, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + timeout=m.config.timeout, + clear_cache=True, + ) + get_build_index( + subdir=subdir, + bldpkgs_dir=m.config.bldpkgs_dir, + output_folder=m.config.output_folder, + channel_urls=m.config.channel_urls, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + timeout=m.config.timeout, + clear_cache=True, + ) else: if not provision_only: print("STOPPING BUILD BEFORE POST:", m.dist()) @@ -2393,48 +2936,60 @@ def guess_interpreter(script_filename): # Since the MSYS2 installation is probably a set of conda packages we do not # need to worry about system environmental pollution here. For that reason I # do not pass -l on other OSes. - extensions_to_run_commands = {'.sh': ['bash.exe', '-el'] if utils.on_win else ['bash', '-e'], - '.bat': [os.environ.get('COMSPEC', 'cmd.exe'), '/d', '/c'], - '.ps1': ['powershell', '-executionpolicy', 'bypass', '-File'], - '.py': ['python']} + extensions_to_run_commands = { + ".sh": ["bash.exe", "-el"] if utils.on_win else ["bash", "-e"], + ".bat": [os.environ.get("COMSPEC", "cmd.exe"), "/d", "/c"], + ".ps1": ["powershell", "-executionpolicy", "bypass", "-File"], + ".py": ["python"], + } file_ext = os.path.splitext(script_filename)[1] for ext, command in extensions_to_run_commands.items(): if file_ext.lower().startswith(ext): interpreter_command = command break else: - raise NotImplementedError("Don't know how to run {} file. Please specify " - "script_interpreter for {} output".format(file_ext, - script_filename)) + raise NotImplementedError( + "Don't know how to run {} file. Please specify " + "script_interpreter for {} output".format(file_ext, script_filename) + ) return interpreter_command def warn_on_use_of_SRC_DIR(metadata): - test_files = glob(os.path.join(metadata.path, 'run_test*')) + test_files = glob(os.path.join(metadata.path, "run_test*")) for f in test_files: with open(f) as _f: contents = _f.read() - if ("SRC_DIR" in contents and 'source_files' not in metadata.get_section('test') and - metadata.config.remove_work_dir): - raise ValueError("In conda-build 2.1+, the work dir is removed by default before the " - "test scripts run. You are using the SRC_DIR variable in your test " - "script, but these files have been deleted. Please see the " - " documentation regarding the test/source_files meta.yaml section, " - "or pass the --no-remove-work-dir flag.") + if ( + "SRC_DIR" in contents + and "source_files" not in metadata.get_section("test") + and metadata.config.remove_work_dir + ): + raise ValueError( + "In conda-build 2.1+, the work dir is removed by default before the " + "test scripts run. You are using the SRC_DIR variable in your test " + "script, but these files have been deleted. Please see the " + " documentation regarding the test/source_files meta.yaml section, " + "or pass the --no-remove-work-dir flag." + ) def _construct_metadata_for_test_from_recipe(recipe_dir, config): config.need_cleanup = False config.recipe_dir = None hash_input = {} - metadata = expand_outputs(render_recipe(recipe_dir, config=config, reset_build_id=False))[0][1] + metadata = expand_outputs( + render_recipe(recipe_dir, config=config, reset_build_id=False) + )[0][1] log = utils.get_logger(__name__) - log.warn("Testing based on recipes is deprecated as of conda-build 3.16.0. Please adjust " - "your code to pass your desired conda package to test instead.") + log.warn( + "Testing based on recipes is deprecated as of conda-build 3.16.0. Please adjust " + "your code to pass your desired conda package to test instead." + ) utils.rm_rf(metadata.config.test_dir) - if metadata.meta.get('test', {}).get('source_files'): + if metadata.meta.get("test", {}).get("source_files"): if not metadata.source_provided: try_download(metadata, no_download_source=False) @@ -2447,16 +3002,16 @@ def _construct_metadata_for_test_from_package(package, config): config.recipe_dir = recipe_dir hash_input = {} - info_dir = os.path.normpath(os.path.join(recipe_dir, 'info')) - with open(os.path.join(info_dir, 'index.json')) as f: + info_dir = os.path.normpath(os.path.join(recipe_dir, "info")) + with open(os.path.join(info_dir, "index.json")) as f: package_data = json.load(f) - if package_data['subdir'] != 'noarch': - config.host_subdir = package_data['subdir'] + if package_data["subdir"] != "noarch": + config.host_subdir = package_data["subdir"] # We may be testing an (old) package built without filename hashing. - hash_input = os.path.join(info_dir, 'hash_input.json') + hash_input = os.path.join(info_dir, "hash_input.json") if os.path.isfile(hash_input): - with open(os.path.join(info_dir, 'hash_input.json')) as f: + with open(os.path.join(info_dir, "hash_input.json")) as f: hash_input = json.load(f) else: config.filename_hashing = False @@ -2473,16 +3028,18 @@ def _construct_metadata_for_test_from_package(package, config): # get last part of the path last_element = os.path.basename(local_pkg_location) is_channel = False - for platform in ('win-', 'linux-', 'osx-', 'noarch'): + for platform in ("win-", "linux-", "osx-", "noarch"): if last_element.startswith(platform): is_channel = True if not is_channel: - log.warn("Copying package to conda-build croot. No packages otherwise alongside yours will" - " be available unless you specify -c local. To avoid this warning, your package " - "must reside in a channel structure with platform-subfolders. See more info on " - "what a valid channel is at " - "https://conda.io/docs/user-guide/tasks/create-custom-channels.html") + log.warn( + "Copying package to conda-build croot. No packages otherwise alongside yours will" + " be available unless you specify -c local. To avoid this warning, your package " + "must reside in a channel structure with platform-subfolders. See more info on " + "what a valid channel is at " + "https://conda.io/docs/user-guide/tasks/create-custom-channels.html" + ) local_dir = config.bldpkgs_dir try: @@ -2499,30 +3056,39 @@ def _construct_metadata_for_test_from_package(package, config): update_index(local_channel, verbose=config.debug, threads=1) try: - metadata = render_recipe(os.path.join(info_dir, 'recipe'), config=config, - reset_build_id=False)[0][0] + metadata = render_recipe( + os.path.join(info_dir, "recipe"), config=config, reset_build_id=False + )[0][0] # no recipe in package. Fudge metadata except (OSError, SystemExit): # force the build string to line up - recomputing it would # yield a different result - metadata = MetaData.fromdict({'package': {'name': package_data['name'], - 'version': package_data['version']}, - 'build': {'number': int(package_data['build_number']), - 'string': package_data['build']}, - 'requirements': {'run': package_data['depends']} - }, config=config) + metadata = MetaData.fromdict( + { + "package": { + "name": package_data["name"], + "version": package_data["version"], + }, + "build": { + "number": int(package_data["build_number"]), + "string": package_data["build"], + }, + "requirements": {"run": package_data["depends"]}, + }, + config=config, + ) # HACK: because the recipe is fully baked, detecting "used" variables no longer works. The set # of variables in the hash_input suffices, though. if metadata.noarch: - metadata.config.variant['target_platform'] = "noarch" + metadata.config.variant["target_platform"] = "noarch" metadata.config.used_vars = list(hash_input.keys()) urls = list(utils.ensure_list(metadata.config.channel_urls)) local_path = url_path(local_channel) # replace local with the appropriate real channel. Order is maintained. - urls = [url if url != 'local' else local_path for url in urls] + urls = [url if url != "local" else local_path for url in urls] if local_path not in urls: urls.insert(0, local_path) metadata.config.channel_urls = urls @@ -2531,10 +3097,14 @@ def _construct_metadata_for_test_from_package(package, config): def _extract_test_files_from_package(metadata): - recipe_dir = metadata.config.recipe_dir if hasattr(metadata.config, "recipe_dir") else metadata.path + recipe_dir = ( + metadata.config.recipe_dir + if hasattr(metadata.config, "recipe_dir") + else metadata.path + ) if recipe_dir: - info_dir = os.path.normpath(os.path.join(recipe_dir, 'info')) - test_files = os.path.join(info_dir, 'test') + info_dir = os.path.normpath(os.path.join(recipe_dir, "info")) + test_files = os.path.join(info_dir, "test") if os.path.exists(test_files) and os.path.isdir(test_files): # things are re-extracted into the test dir because that's cwd when tests are run, # and provides the most intuitive experience. This is a little @@ -2542,29 +3112,41 @@ def _extract_test_files_from_package(metadata): # work_dir, for legacy behavior where people aren't using # test/source_files. It would be better to change SRC_DIR in # test phase to always point to test_dir. Maybe one day. - utils.copy_into(test_files, metadata.config.test_dir, - metadata.config.timeout, symlinks=True, - locking=metadata.config.locking, clobber=True) - dependencies_file = os.path.join(test_files, 'test_time_dependencies.json') + utils.copy_into( + test_files, + metadata.config.test_dir, + metadata.config.timeout, + symlinks=True, + locking=metadata.config.locking, + clobber=True, + ) + dependencies_file = os.path.join(test_files, "test_time_dependencies.json") test_deps = [] if os.path.isfile(dependencies_file): with open(dependencies_file) as f: test_deps = json.load(f) - test_section = metadata.meta.get('test', {}) - test_section['requires'] = test_deps - metadata.meta['test'] = test_section + test_section = metadata.meta.get("test", {}) + test_section["requires"] = test_deps + metadata.meta["test"] = test_section else: - if metadata.meta.get('test', {}).get('source_files'): + if metadata.meta.get("test", {}).get("source_files"): if not metadata.source_provided: try_download(metadata, no_download_source=False) def construct_metadata_for_test(recipedir_or_package, config): - if os.path.isdir(recipedir_or_package) or os.path.basename(recipedir_or_package) == 'meta.yaml': - m, hash_input = _construct_metadata_for_test_from_recipe(recipedir_or_package, config) + if ( + os.path.isdir(recipedir_or_package) + or os.path.basename(recipedir_or_package) == "meta.yaml" + ): + m, hash_input = _construct_metadata_for_test_from_recipe( + recipedir_or_package, config + ) else: - m, hash_input = _construct_metadata_for_test_from_package(recipedir_or_package, config) + m, hash_input = _construct_metadata_for_test_from_package( + recipedir_or_package, config + ) return m, hash_input @@ -2583,7 +3165,7 @@ def write_build_scripts(m, script, build_file): # Note that pip env "NO" variables are inverted logic. # PIP_NO_BUILD_ISOLATION=False means don't use build isolation. # - env["PIP_NO_BUILD_ISOLATION"] = 'False' + env["PIP_NO_BUILD_ISOLATION"] = "False" # some other env vars to have pip ignore dependencies. # we supply them ourselves instead. env["PIP_NO_DEPENDENCIES"] = True @@ -2593,7 +3175,7 @@ def write_build_scripts(m, script, build_file): # .dist-info directories being created, see gh-3094 # set PIP_CACHE_DIR to a path in the work dir that does not exist. - env['PIP_CACHE_DIR'] = m.config.pip_cache_dir + env["PIP_CACHE_DIR"] = m.config.pip_cache_dir # tell pip to not get anything from PyPI, please. We have everything we need # locally, and if we don't, it's a problem. @@ -2606,18 +3188,18 @@ def write_build_scripts(m, script, build_file): if "replacements" in env: del env["replacements"] - work_file = join(m.config.work_dir, 'conda_build.sh') - env_file = join(m.config.work_dir, 'build_env_setup.sh') - with open(env_file, 'w') as bf: + work_file = join(m.config.work_dir, "conda_build.sh") + env_file = join(m.config.work_dir, "build_env_setup.sh") + with open(env_file, "w") as bf: for k, v in env.items(): - if v != '' and v is not None: + if v != "" and v is not None: bf.write(f'export {k}="{v}"\n') if m.activate_build_script: _write_sh_activation_text(bf, m) - with open(work_file, 'w') as bf: + with open(work_file, "w") as bf: # bf.write('set -ex\n') - bf.write('if [ -z ${CONDA_BUILD+x} ]; then\n') + bf.write("if [ -z ${CONDA_BUILD+x} ]; then\n") bf.write(f" source {env_file}\n") bf.write("fi\n") if script: @@ -2629,46 +3211,73 @@ def write_build_scripts(m, script, build_file): return work_file, env_file -def _write_test_run_script(metadata, test_run_script, test_env_script, py_files, pl_files, - lua_files, r_files, shell_files, trace): +def _write_test_run_script( + metadata, + test_run_script, + test_env_script, + py_files, + pl_files, + lua_files, + r_files, + shell_files, + trace, +): log = utils.get_logger(__name__) - with open(test_run_script, 'w') as tf: - tf.write('{source} "{test_env_script}"\n'.format( - source="call" if utils.on_win else "source", - test_env_script=test_env_script)) + with open(test_run_script, "w") as tf: + tf.write( + '{source} "{test_env_script}"\n'.format( + source="call" if utils.on_win else "source", + test_env_script=test_env_script, + ) + ) if utils.on_win: tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") else: - tf.write(f'set {trace}-e\n') + tf.write(f"set {trace}-e\n") if py_files: test_python = metadata.config.test_python # use pythonw for import tests when osx_is_app is set - if metadata.get_value('build/osx_is_app') and sys.platform == 'darwin': - test_python = test_python + 'w' - tf.write('"{python}" -s "{test_file}"\n'.format( - python=test_python, - test_file=join(metadata.config.test_dir, 'run_test.py'))) + if metadata.get_value("build/osx_is_app") and sys.platform == "darwin": + test_python = test_python + "w" + tf.write( + '"{python}" -s "{test_file}"\n'.format( + python=test_python, + test_file=join(metadata.config.test_dir, "run_test.py"), + ) + ) if utils.on_win: tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") if pl_files: - tf.write('"{perl}" "{test_file}"\n'.format( - perl=metadata.config.perl_bin(metadata.config.test_prefix, - metadata.config.host_platform), - test_file=join(metadata.config.test_dir, 'run_test.pl'))) + tf.write( + '"{perl}" "{test_file}"\n'.format( + perl=metadata.config.perl_bin( + metadata.config.test_prefix, metadata.config.host_platform + ), + test_file=join(metadata.config.test_dir, "run_test.pl"), + ) + ) if utils.on_win: tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") if lua_files: - tf.write('"{lua}" "{test_file}"\n'.format( - lua=metadata.config.lua_bin(metadata.config.test_prefix, - metadata.config.host_platform), - test_file=join(metadata.config.test_dir, 'run_test.lua'))) + tf.write( + '"{lua}" "{test_file}"\n'.format( + lua=metadata.config.lua_bin( + metadata.config.test_prefix, metadata.config.host_platform + ), + test_file=join(metadata.config.test_dir, "run_test.lua"), + ) + ) if utils.on_win: tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") if r_files: - tf.write('"{r}" "{test_file}"\n'.format( - r=metadata.config.rscript_bin(metadata.config.test_prefix, - metadata.config.host_platform), - test_file=join(metadata.config.test_dir, 'run_test.r'))) + tf.write( + '"{r}" "{test_file}"\n'.format( + r=metadata.config.rscript_bin( + metadata.config.test_prefix, metadata.config.host_platform + ), + test_file=join(metadata.config.test_dir, "run_test.r"), + ) + ) if utils.on_win: tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") if shell_files: @@ -2678,20 +3287,30 @@ def _write_test_run_script(metadata, test_run_script, test_env_script, py_files, tf.write(f'call "{shell_file}"\n') tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") else: - log.warn("Found sh test file on windows. Ignoring this for now (PRs welcome)") + log.warn( + "Found sh test file on windows. Ignoring this for now (PRs welcome)" + ) elif os.path.splitext(shell_file)[1] == ".sh": # TODO: Run the test/commands here instead of in run_test.py - tf.write('"{shell_path}" {trace}-e "{test_file}"\n'.format(shell_path=shell_path, - test_file=shell_file, - trace=trace)) + tf.write( + '"{shell_path}" {trace}-e "{test_file}"\n'.format( + shell_path=shell_path, test_file=shell_file, trace=trace + ) + ) -def write_test_scripts(metadata, env_vars, py_files, pl_files, lua_files, r_files, shell_files, trace=""): - if not metadata.config.activate or metadata.name() == 'conda': +def write_test_scripts( + metadata, env_vars, py_files, pl_files, lua_files, r_files, shell_files, trace="" +): + if not metadata.config.activate or metadata.name() == "conda": # prepend bin (or Scripts) directory - env_vars = utils.prepend_bin_path(env_vars, metadata.config.test_prefix, prepend_prefix=True) + env_vars = utils.prepend_bin_path( + env_vars, metadata.config.test_prefix, prepend_prefix=True + ) if utils.on_win: - env_vars['PATH'] = metadata.config.test_prefix + os.pathsep + env_vars['PATH'] + env_vars["PATH"] = ( + metadata.config.test_prefix + os.pathsep + env_vars["PATH"] + ) # set variables like CONDA_PY in the test environment env_vars.update(set_language_env_vars(metadata.config.variant)) @@ -2699,32 +3318,36 @@ def write_test_scripts(metadata, env_vars, py_files, pl_files, lua_files, r_file # Python 2 Windows requires that envs variables be string, not unicode env_vars = {str(key): str(value) for key, value in env_vars.items()} suffix = "bat" if utils.on_win else "sh" - test_env_script = join(metadata.config.test_dir, - f"conda_test_env_vars.{suffix}") - test_run_script = join(metadata.config.test_dir, - f"conda_test_runner.{suffix}") + test_env_script = join(metadata.config.test_dir, f"conda_test_env_vars.{suffix}") + test_run_script = join(metadata.config.test_dir, f"conda_test_runner.{suffix}") - with open(test_env_script, 'w') as tf: + with open(test_env_script, "w") as tf: if not utils.on_win: - tf.write(f'set {trace}-e\n') - if metadata.config.activate and not metadata.name() == 'conda': + tf.write(f"set {trace}-e\n") + if metadata.config.activate and not metadata.name() == "conda": if utils.on_win: tf.write( 'set "CONDA_SHLVL=" ' - '&& @CALL {}\\condabin\\conda_hook.bat {}' - '&& set CONDA_EXE={python_exe}' - '&& set CONDA_PYTHON_EXE={python_exe}' - '&& set _CE_I={}' - '&& set _CE_M=-m' - '&& set _CE_CONDA=conda\n'.format( + "&& @CALL {}\\condabin\\conda_hook.bat {}" + "&& set CONDA_EXE={python_exe}" + "&& set CONDA_PYTHON_EXE={python_exe}" + "&& set _CE_I={}" + "&& set _CE_M=-m" + "&& set _CE_CONDA=conda\n".format( sys.prefix, - '--dev' if metadata.config.debug else '', - "-i" if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") else "", - python_exe=sys.executable + "--dev" if metadata.config.debug else "", + "-i" + if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") + else "", + python_exe=sys.executable, ) ) else: - py_flags = '-I -m' if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") else '-m' + py_flags = ( + "-I -m" + if os.environ.get("_CONDA_BUILD_ISOLATED_ACTIVATION") + else "-m" + ) tf.write( f"""eval "$('{sys.executable}' {py_flags} conda shell.bash hook)"\n""" ) @@ -2733,41 +3356,59 @@ def write_test_scripts(metadata, env_vars, py_files, pl_files, lua_files, r_file tf.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") # In-case people source this, it's essential errors are not fatal in an interactive shell. if not utils.on_win: - tf.write('set +e\n') - - _write_test_run_script(metadata, test_run_script, test_env_script, py_files, pl_files, - lua_files, r_files, shell_files, trace) + tf.write("set +e\n") + + _write_test_run_script( + metadata, + test_run_script, + test_env_script, + py_files, + pl_files, + lua_files, + r_files, + shell_files, + trace, + ) return test_run_script, test_env_script -def test(recipedir_or_package_or_metadata, config, stats, move_broken=True, provision_only=False): - ''' +def test( + recipedir_or_package_or_metadata, + config, + stats, + move_broken=True, + provision_only=False, +): + """ Execute any test scripts for the given package. :param m: Package's metadata. :type m: Metadata - ''' + """ log = utils.get_logger(__name__) # we want to know if we're dealing with package input. If so, we can move the input on success. hash_input = {} # store this name to keep it consistent. By changing files, we change the hash later. # It matches the build hash now, so let's keep it around. - test_package_name = (recipedir_or_package_or_metadata.dist() - if hasattr(recipedir_or_package_or_metadata, 'dist') - else recipedir_or_package_or_metadata) + test_package_name = ( + recipedir_or_package_or_metadata.dist() + if hasattr(recipedir_or_package_or_metadata, "dist") + else recipedir_or_package_or_metadata + ) if not provision_only: print("TEST START:", test_package_name) - if hasattr(recipedir_or_package_or_metadata, 'config'): + if hasattr(recipedir_or_package_or_metadata, "config"): metadata = recipedir_or_package_or_metadata utils.rm_rf(metadata.config.test_dir) else: - metadata, hash_input = construct_metadata_for_test(recipedir_or_package_or_metadata, - config) + metadata, hash_input = construct_metadata_for_test( + recipedir_or_package_or_metadata, config + ) - trace = '-x ' if metadata.config.debug else '' + trace = "-x " if metadata.config.debug else "" # Must download *after* computing build id, or else computing build id will change # folder destination @@ -2776,43 +3417,62 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True, prov # When testing a .tar.bz2 in the pkgs dir, clean_pkg_cache() will remove it. # Prevent this. When https://github.com/conda/conda/issues/5708 gets fixed # I think we can remove this call to clean_pkg_cache(). - in_pkg_cache = (not hasattr(recipedir_or_package_or_metadata, 'config') and - os.path.isfile(recipedir_or_package_or_metadata) and - recipedir_or_package_or_metadata.endswith(CONDA_PACKAGE_EXTENSIONS) and - os.path.dirname(recipedir_or_package_or_metadata) in pkgs_dirs[0]) + in_pkg_cache = ( + not hasattr(recipedir_or_package_or_metadata, "config") + and os.path.isfile(recipedir_or_package_or_metadata) + and recipedir_or_package_or_metadata.endswith(CONDA_PACKAGE_EXTENSIONS) + and os.path.dirname(recipedir_or_package_or_metadata) in pkgs_dirs[0] + ) if not in_pkg_cache: environ.clean_pkg_cache(metadata.dist(), metadata.config) copy_test_source_files(metadata, metadata.config.test_dir) # this is also copying tests/source_files from work_dir to testing workdir - _, pl_files, py_files, r_files, lua_files, shell_files = create_all_test_files(metadata) - if not any([py_files, shell_files, pl_files, lua_files, r_files]) and not metadata.config.test_run_post: + _, pl_files, py_files, r_files, lua_files, shell_files = create_all_test_files( + metadata + ) + if ( + not any([py_files, shell_files, pl_files, lua_files, r_files]) + and not metadata.config.test_run_post + ): print("Nothing to test for:", test_package_name) return True if metadata.config.remove_work_dir: - for name, prefix in (('host', metadata.config.host_prefix), - ('build', metadata.config.build_prefix)): + for name, prefix in ( + ("host", metadata.config.host_prefix), + ("build", metadata.config.build_prefix), + ): if os.path.isdir(prefix): # move host folder to force hardcoded paths to host env to break during tests # (so that they can be properly addressed by recipe author) - dest = os.path.join(os.path.dirname(prefix), - '_'.join(('%s_prefix_moved' % name, metadata.dist(), - getattr(metadata.config, '%s_subdir' % name)))) + dest = os.path.join( + os.path.dirname(prefix), + "_".join( + ( + "%s_prefix_moved" % name, + metadata.dist(), + getattr(metadata.config, "%s_subdir" % name), + ) + ), + ) # Needs to come after create_files in case there's test/source_files shutil_move_more_retrying(prefix, dest, f"{prefix} prefix") # nested if so that there's no warning when we just leave the empty workdir in place if metadata.source_provided: - dest = os.path.join(os.path.dirname(metadata.config.work_dir), - '_'.join(('work_moved', metadata.dist(), - metadata.config.host_subdir))) + dest = os.path.join( + os.path.dirname(metadata.config.work_dir), + "_".join(("work_moved", metadata.dist(), metadata.config.host_subdir)), + ) # Needs to come after create_files in case there's test/source_files shutil_move_more_retrying(config.work_dir, dest, "work") else: - log.warn("Not moving work directory after build. Your package may depend on files " - "in the work directory that are not included with your package") + log.warn( + "Not moving work directory after build. Your package may depend on files " + "in the work directory that are not included with your package" + ) get_build_metadata(metadata) @@ -2826,55 +3486,82 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True, prov if env_path_backup_var_exists: env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] - if not metadata.config.activate or metadata.name() == 'conda': + if not metadata.config.activate or metadata.name() == "conda": # prepend bin (or Scripts) directory - env = utils.prepend_bin_path(env, metadata.config.test_prefix, prepend_prefix=True) + env = utils.prepend_bin_path( + env, metadata.config.test_prefix, prepend_prefix=True + ) if utils.on_win: - env['PATH'] = metadata.config.test_prefix + os.pathsep + env['PATH'] + env["PATH"] = metadata.config.test_prefix + os.pathsep + env["PATH"] - env['PREFIX'] = metadata.config.test_prefix - if 'BUILD_PREFIX' in env: - del env['BUILD_PREFIX'] + env["PREFIX"] = metadata.config.test_prefix + if "BUILD_PREFIX" in env: + del env["BUILD_PREFIX"] # In the future, we will need to support testing cross compiled # packages on physical hardware. until then it is expected that # something like QEMU or Wine will be used on the build machine, # therefore, for now, we use host_subdir. - subdir = ('noarch' if (metadata.noarch or metadata.noarch_python) - else metadata.config.host_subdir) + subdir = ( + "noarch" + if (metadata.noarch or metadata.noarch_python) + else metadata.config.host_subdir + ) # ensure that the test prefix isn't kept between variants utils.rm_rf(metadata.config.test_prefix) try: - actions = environ.get_install_actions(metadata.config.test_prefix, - tuple(specs), 'host', - subdir=subdir, - debug=metadata.config.debug, - verbose=metadata.config.verbose, - locking=metadata.config.locking, - bldpkgs_dirs=tuple(metadata.config.bldpkgs_dirs), - timeout=metadata.config.timeout, - disable_pip=metadata.config.disable_pip, - max_env_retry=metadata.config.max_env_retry, - output_folder=metadata.config.output_folder, - channel_urls=tuple(metadata.config.channel_urls)) - except (DependencyNeedsBuildingError, NoPackagesFoundError, UnsatisfiableError, - CondaError, AssertionError) as exc: - log.warn("failed to get install actions, retrying. exception was: %s", - str(exc)) - tests_failed(metadata, move_broken=move_broken, broken_dir=metadata.config.broken_dir, - config=metadata.config) + actions = environ.get_install_actions( + metadata.config.test_prefix, + tuple(specs), + "host", + subdir=subdir, + debug=metadata.config.debug, + verbose=metadata.config.verbose, + locking=metadata.config.locking, + bldpkgs_dirs=tuple(metadata.config.bldpkgs_dirs), + timeout=metadata.config.timeout, + disable_pip=metadata.config.disable_pip, + max_env_retry=metadata.config.max_env_retry, + output_folder=metadata.config.output_folder, + channel_urls=tuple(metadata.config.channel_urls), + ) + except ( + DependencyNeedsBuildingError, + NoPackagesFoundError, + UnsatisfiableError, + CondaError, + AssertionError, + ) as exc: + log.warn( + "failed to get install actions, retrying. exception was: %s", str(exc) + ) + tests_failed( + metadata, + move_broken=move_broken, + broken_dir=metadata.config.broken_dir, + config=metadata.config, + ) raise # upgrade the warning from silently clobbering to warning. If it is preventing, just # keep it that way. - conflict_verbosity = ('warn' if str(context.path_conflict) == 'clobber' else - str(context.path_conflict)) - with env_var('CONDA_PATH_CONFLICT', conflict_verbosity, reset_context): - environ.create_env(metadata.config.test_prefix, actions, config=metadata.config, - env='host', subdir=subdir, is_cross=metadata.is_cross, - is_conda=metadata.name() == 'conda') + conflict_verbosity = ( + "warn" + if str(context.path_conflict) == "clobber" + else str(context.path_conflict) + ) + with env_var("CONDA_PATH_CONFLICT", conflict_verbosity, reset_context): + environ.create_env( + metadata.config.test_prefix, + actions, + config=metadata.config, + env="host", + subdir=subdir, + is_cross=metadata.is_cross, + is_conda=metadata.name() == "conda", + ) with utils.path_prepended(metadata.config.test_prefix): env = dict(os.environ.copy()) @@ -2885,8 +3572,9 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True, prov if config.test_run_post: from conda_build.utils import get_installed_packages + installed = get_installed_packages(metadata.config.test_prefix) - files = installed[metadata.meta['package']['name']]['files'] + files = installed[metadata.meta["package"]["name"]]["files"] replacements = get_all_replacements(metadata.config) try_download(metadata, False, True) create_info_files(metadata, replacements, files, metadata.config.test_prefix) @@ -2897,14 +3585,20 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True, prov # intuitive relative path behavior, though, not work_dir, so we need to adjust where # SRC_DIR points. The initial CWD during tests is test_dir. if metadata.config.remove_work_dir: - env['SRC_DIR'] = metadata.config.test_dir + env["SRC_DIR"] = metadata.config.test_dir - test_script, _ = write_test_scripts(metadata, env, py_files, pl_files, lua_files, r_files, shell_files, trace) + test_script, _ = write_test_scripts( + metadata, env, py_files, pl_files, lua_files, r_files, shell_files, trace + ) if utils.on_win: - cmd = [os.environ.get('COMSPEC', 'cmd.exe'), "/d", "/c", test_script] + cmd = [os.environ.get("COMSPEC", "cmd.exe"), "/d", "/c", test_script] else: - cmd = [shell_path] + (['-x'] if metadata.config.debug else []) + ['-o', 'errexit', test_script] + cmd = ( + [shell_path] + + (["-x"] if metadata.config.debug else []) + + ["-o", "errexit", test_script] + ) try: test_stats = {} if not provision_only: @@ -2912,24 +3606,36 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True, prov if metadata.config.debug or metadata.config.no_rewrite_stdout_env: rewrite_env = None else: - rewrite_env = { - k: env[k] - for k in ['PREFIX', 'SRC_DIR'] if k in env - } + rewrite_env = {k: env[k] for k in ["PREFIX", "SRC_DIR"] if k in env} if metadata.config.verbose: for k, v in rewrite_env.items(): - print('{} {}={}' - .format('set' if test_script.endswith('.bat') else 'export', k, v)) - utils.check_call_env(cmd, env=env, cwd=metadata.config.test_dir, stats=test_stats, rewrite_stdout_env=rewrite_env) + print( + "{} {}={}".format( + "set" if test_script.endswith(".bat") else "export", + k, + v, + ) + ) + utils.check_call_env( + cmd, + env=env, + cwd=metadata.config.test_dir, + stats=test_stats, + rewrite_stdout_env=rewrite_env, + ) log_stats(test_stats, f"testing {metadata.name()}") if stats is not None and metadata.config.variants: - stats[stats_key(metadata, f'test_{metadata.name()}')] = test_stats - if os.path.exists(join(metadata.config.test_dir, 'TEST_FAILED')): - raise subprocess.CalledProcessError(-1, '') + stats[stats_key(metadata, f"test_{metadata.name()}")] = test_stats + if os.path.exists(join(metadata.config.test_dir, "TEST_FAILED")): + raise subprocess.CalledProcessError(-1, "") print("TEST END:", test_package_name) except subprocess.CalledProcessError as _: # noqa - tests_failed(metadata, move_broken=move_broken, broken_dir=metadata.config.broken_dir, - config=metadata.config) + tests_failed( + metadata, + move_broken=move_broken, + broken_dir=metadata.config.broken_dir, + config=metadata.config, + ) raise if config.need_cleanup and config.recipe_dir is not None and not provision_only: @@ -2939,16 +3645,16 @@ def test(recipedir_or_package_or_metadata, config, stats, move_broken=True, prov def tests_failed(package_or_metadata, move_broken, broken_dir, config): - ''' + """ Causes conda to exit if any of the given package's tests failed. :param m: Package's metadata :type m: Metadata - ''' + """ if not isdir(broken_dir): os.makedirs(broken_dir) - if hasattr(package_or_metadata, 'config'): + if hasattr(package_or_metadata, "config"): pkg = bldpkg_path(package_or_metadata) else: pkg = package_or_metadata @@ -2958,17 +3664,22 @@ def tests_failed(package_or_metadata, move_broken, broken_dir, config): log = utils.get_logger(__name__) try: shutil.move(pkg, dest) - log.warn('Tests failed for {} - moving package to {}'.format(os.path.basename(pkg), - broken_dir)) + log.warn( + "Tests failed for {} - moving package to {}".format( + os.path.basename(pkg), broken_dir + ) + ) except OSError: pass - update_index(os.path.dirname(os.path.dirname(pkg)), verbose=config.debug, threads=1) + update_index( + os.path.dirname(os.path.dirname(pkg)), verbose=config.debug, threads=1 + ) sys.exit("TESTS FAILED: " + os.path.basename(pkg)) def check_external(): - if sys.platform.startswith('linux'): - patchelf = external.find_executable('patchelf') + if sys.platform.startswith("linux"): + patchelf = external.find_executable("patchelf") if patchelf is None: sys.exit( "Error:\n" @@ -2981,13 +3692,14 @@ def check_external(): ) -def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=False, variants=None): - +def build_tree( + recipe_list, config, stats, build_only=False, post=None, notest=False, variants=None +): to_build_recursive = [] recipe_list = deque(recipe_list) if utils.on_win: - trash_dir = os.path.join(os.path.dirname(sys.executable), 'pkgs', '.trash') + trash_dir = os.path.join(os.path.dirname(sys.executable), "pkgs", ".trash") if os.path.isdir(trash_dir): # We don't really care if this does a complete job. # Cleaning up some files is better than none. @@ -3017,11 +3729,13 @@ def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=F # This loop recursively builds dependencies if recipes exist try: recipe = recipe_list.popleft() - name = recipe.name() if hasattr(recipe, 'name') else recipe - if hasattr(recipe, 'config'): + name = recipe.name() if hasattr(recipe, "name") else recipe + if hasattr(recipe, "config"): metadata = recipe cfg = metadata.config - cfg.anaconda_upload = config.anaconda_upload # copy over anaconda_upload setting + cfg.anaconda_upload = ( + config.anaconda_upload + ) # copy over anaconda_upload setting # this code is duplicated below because we need to be sure that the build id is set # before downloading happens - or else we lose where downloads are @@ -3031,15 +3745,19 @@ def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=F to_build_recursive.append(metadata.name()) if not metadata.final: - variants_ = (dict_of_lists_to_list_of_dicts(variants) if variants else - get_package_variants(metadata)) + variants_ = ( + dict_of_lists_to_list_of_dicts(variants) + if variants + else get_package_variants(metadata) + ) # This is where reparsing happens - we need to re-evaluate the meta.yaml for any # jinja2 templating - metadata_tuples = distribute_variants(metadata, variants_, - permit_unsatisfiable_variants=False) + metadata_tuples = distribute_variants( + metadata, variants_, permit_unsatisfiable_variants=False + ) else: - metadata_tuples = ((metadata, False, False), ) + metadata_tuples = ((metadata, False, False),) else: cfg = config @@ -3050,10 +3768,14 @@ def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=F # each tuple is: # metadata, need_source_download, need_reparse_in_env = # We get one tuple per variant - metadata_tuples = render_recipe(recipe, config=cfg, variants=variants, - permit_unsatisfiable_variants=False, - reset_build_id=not cfg.dirty, - bypass_env_check=True) + metadata_tuples = render_recipe( + recipe, + config=cfg, + variants=variants, + permit_unsatisfiable_variants=False, + reset_build_id=not cfg.dirty, + bypass_env_check=True, + ) # restrict to building only one variant for bdist_conda. The way it splits the build # job breaks variants horribly. if post in (True, False): @@ -3062,82 +3784,132 @@ def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=F # This is the "TOP LEVEL" loop. Only vars used in the top-level # recipe are looped over here. - for (metadata, need_source_download, need_reparse_in_env) in metadata_tuples: + for metadata, need_source_download, need_reparse_in_env in metadata_tuples: get_all_replacements(metadata.config.variant) if post is None: utils.rm_rf(metadata.config.host_prefix) utils.rm_rf(metadata.config.build_prefix) utils.rm_rf(metadata.config.test_prefix) if metadata.name() not in metadata.config.build_folder: - metadata.config.compute_build_id(metadata.name(), metadata.version(), reset=True) - - packages_from_this = build(metadata, stats, - post=post, - need_source_download=need_source_download, - need_reparse_in_env=need_reparse_in_env, - built_packages=built_packages, - notest=notest, - ) + metadata.config.compute_build_id( + metadata.name(), metadata.version(), reset=True + ) + + packages_from_this = build( + metadata, + stats, + post=post, + need_source_download=need_source_download, + need_reparse_in_env=need_reparse_in_env, + built_packages=built_packages, + notest=notest, + ) if not notest: for pkg, dict_and_meta in packages_from_this.items(): - if pkg.endswith(CONDA_PACKAGE_EXTENSIONS) and os.path.isfile(pkg): + if pkg.endswith(CONDA_PACKAGE_EXTENSIONS) and os.path.isfile( + pkg + ): # we only know how to test conda packages test(pkg, config=metadata.config.copy(), stats=stats) _, meta = dict_and_meta - downstreams = meta.meta.get('test', {}).get('downstreams') + downstreams = meta.meta.get("test", {}).get("downstreams") if downstreams: - channel_urls = tuple(utils.ensure_list(metadata.config.channel_urls) + - [utils.path2url(os.path.abspath(os.path.dirname( - os.path.dirname(pkg))))]) + channel_urls = tuple( + utils.ensure_list(metadata.config.channel_urls) + + [ + utils.path2url( + os.path.abspath( + os.path.dirname(os.path.dirname(pkg)) + ) + ) + ] + ) log = utils.get_logger(__name__) # downstreams can be a dict, for adding capability for worker labels - if hasattr(downstreams, 'keys'): + if hasattr(downstreams, "keys"): downstreams = list(downstreams.keys()) - log.warn("Dictionary keys for downstreams are being " - "ignored right now. Coming soon...") + log.warn( + "Dictionary keys for downstreams are being " + "ignored right now. Coming soon..." + ) else: downstreams = utils.ensure_list(downstreams) for dep in downstreams: log.info(f"Testing downstream package: {dep}") # resolve downstream packages to a known package - r_string = ''.join(random.choice( - string.ascii_uppercase + string.digits) for _ in range(10)) - specs = meta.ms_depends('run') + [MatchSpec(dep), - MatchSpec(' '.join(meta.dist().rsplit('-', 2)))] - specs = [utils.ensure_valid_spec(spec) for spec in specs] + r_string = "".join( + random.choice( + string.ascii_uppercase + string.digits + ) + for _ in range(10) + ) + specs = meta.ms_depends("run") + [ + MatchSpec(dep), + MatchSpec(" ".join(meta.dist().rsplit("-", 2))), + ] + specs = [ + utils.ensure_valid_spec(spec) for spec in specs + ] try: - with TemporaryDirectory(prefix="_", suffix=r_string) as tmpdir: + with TemporaryDirectory( + prefix="_", suffix=r_string + ) as tmpdir: actions = environ.get_install_actions( - tmpdir, specs, env='run', + tmpdir, + specs, + env="run", subdir=meta.config.host_subdir, bldpkgs_dirs=meta.config.bldpkgs_dirs, - channel_urls=channel_urls) - except (UnsatisfiableError, DependencyNeedsBuildingError) as e: - log.warn("Skipping downstream test for spec {}; was " - "unsatisfiable. Error was {}".format(dep, e)) + channel_urls=channel_urls, + ) + except ( + UnsatisfiableError, + DependencyNeedsBuildingError, + ) as e: + log.warn( + "Skipping downstream test for spec {}; was " + "unsatisfiable. Error was {}".format(dep, e) + ) continue # make sure to download that package to the local cache if not there - local_file = execute_download_actions(meta, actions, 'host', - package_subset=dep, - require_files=True) + local_file = execute_download_actions( + meta, + actions, + "host", + package_subset=dep, + require_files=True, + ) # test that package, using the local channel so that our new # upstream dep gets used - test(list(local_file.values())[0][0], - config=meta.config.copy(), stats=stats) + test( + list(local_file.values())[0][0], + config=meta.config.copy(), + stats=stats, + ) built_packages.update({pkg: dict_and_meta}) else: built_packages.update(packages_from_this) - if (os.path.exists(metadata.config.work_dir) and not - (metadata.config.dirty or metadata.config.keep_old_work or - metadata.get_value('build/no_move_top_level_workdir_loops'))): + if os.path.exists(metadata.config.work_dir) and not ( + metadata.config.dirty + or metadata.config.keep_old_work + or metadata.get_value("build/no_move_top_level_workdir_loops") + ): # force the build string to include hashes as necessary metadata.final = True - dest = os.path.join(os.path.dirname(metadata.config.work_dir), - '_'.join(('work_moved', metadata.dist(), - metadata.config.host_subdir, "main_build_loop"))) + dest = os.path.join( + os.path.dirname(metadata.config.work_dir), + "_".join( + ( + "work_moved", + metadata.dist(), + metadata.config.host_subdir, + "main_build_loop", + ) + ), + ) # Needs to come after create_files in case there's test/source_files shutil_move_more_retrying(metadata.config.work_dir, dest, "work") @@ -3150,23 +3922,33 @@ def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=F # os.unlink(os.path.join(metadata.config.work_dir, 'metadata_conda_debug.yaml')) except DependencyNeedsBuildingError as e: - skip_names = ['python', 'r', 'r-base', 'mro-base', 'perl', 'lua'] + skip_names = ["python", "r", "r-base", "mro-base", "perl", "lua"] built_package_paths = [entry[1][1].path for entry in built_packages.items()] add_recipes = [] # add the failed one back in at the beginning - but its deps may come before it recipe_list.extendleft([recipe]) for pkg, matchspec in zip(e.packages, e.matchspecs): - pkg_name = pkg.split(' ')[0].split('=')[0] + pkg_name = pkg.split(" ")[0].split("=")[0] # if we hit missing dependencies at test time, the error we get says that our # package that we just built needs to be built. Very confusing. Bomb out # if any of our output metadatas are in the exception list of pkgs. - if metadata and any(pkg_name == output_meta.name() for (_, output_meta) in - metadata.get_output_metadata_set(permit_undefined_jinja=True)): + if metadata and any( + pkg_name == output_meta.name() + for (_, output_meta) in metadata.get_output_metadata_set( + permit_undefined_jinja=True + ) + ): raise if pkg in to_build_recursive: cfg.clean(remove_folders=False) - raise RuntimeError("Can't build {} due to environment creation error:\n" - .format(recipe) + str(e.message) + "\n" + extra_help) + raise RuntimeError( + "Can't build {} due to environment creation error:\n".format( + recipe + ) + + str(e.message) + + "\n" + + extra_help + ) if pkg in skip_names: to_build_recursive.append(pkg) @@ -3181,22 +3963,34 @@ def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=F recipe_glob = glob(os.path.join(recipe_parent_dir, pkg_name)) # conda-forge style. meta.yaml lives one level deeper. if not recipe_glob: - recipe_glob = glob(os.path.join(recipe_parent_dir, '..', pkg_name)) - feedstock_glob = glob(os.path.join(recipe_parent_dir, pkg_name + '-feedstock')) + recipe_glob = glob(os.path.join(recipe_parent_dir, "..", pkg_name)) + feedstock_glob = glob( + os.path.join(recipe_parent_dir, pkg_name + "-feedstock") + ) if not feedstock_glob: - feedstock_glob = glob(os.path.join(recipe_parent_dir, '..', - pkg_name + '-feedstock')) + feedstock_glob = glob( + os.path.join(recipe_parent_dir, "..", pkg_name + "-feedstock") + ) available = False if recipe_glob or feedstock_glob: for recipe_dir in recipe_glob + feedstock_glob: - if not any(path.startswith(recipe_dir) for path in built_package_paths): - dep_metas = render_recipe(recipe_dir, config=metadata.config) + if not any( + path.startswith(recipe_dir) for path in built_package_paths + ): + dep_metas = render_recipe( + recipe_dir, config=metadata.config + ) for dep_meta in dep_metas: - if utils.match_peer_job(MatchSpec(matchspec), dep_meta[0], - metadata): - print(("Missing dependency {0}, but found" + - " recipe directory, so building " + - "{0} first").format(pkg)) + if utils.match_peer_job( + MatchSpec(matchspec), dep_meta[0], metadata + ): + print( + ( + "Missing dependency {0}, but found" + + " recipe directory, so building " + + "{0} first" + ).format(pkg) + ) add_recipes.append(recipe_dir) available = True if not available: @@ -3204,33 +3998,43 @@ def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=F raise # if we failed to render due to unsatisfiable dependencies, we should only bail out # if we've already retried this recipe. - if (not metadata and retried_recipes.count(recipe) and - retried_recipes.count(recipe) >= len(metadata.ms_depends('build'))): + if ( + not metadata + and retried_recipes.count(recipe) + and retried_recipes.count(recipe) >= len(metadata.ms_depends("build")) + ): cfg.clean(remove_folders=False) - raise RuntimeError("Can't build {} due to environment creation error:\n" - .format(recipe) + str(e.message) + "\n" + extra_help) + raise RuntimeError( + f"Can't build {recipe} due to environment creation error:\n" + + str(e.message) + + "\n" + + extra_help + ) retried_recipes.append(os.path.basename(name)) recipe_list.extendleft(add_recipes) tarballs = [f for f in built_packages if f.endswith(CONDA_PACKAGE_EXTENSIONS)] if post in [True, None]: # TODO: could probably use a better check for pkg type than this... - wheels = [f for f in built_packages if f.endswith('.whl')] + wheels = [f for f in built_packages if f.endswith(".whl")] handle_anaconda_upload(tarballs, config=config) handle_pypi_upload(wheels, config=config) # Print the variant information for each package because it is very opaque and never printed. from conda_build.inspect_pkg import get_hash_input + hash_inputs = get_hash_input(tarballs) - print("\nINFO :: The inputs making up the hashes for the built packages are as follows:") + print( + "\nINFO :: The inputs making up the hashes for the built packages are as follows:" + ) print(json.dumps(hash_inputs, sort_keys=True, indent=2)) print("\n") total_time = time.time() - initial_time - max_memory_used = max([step.get('rss') for step in stats.values()] or [0]) - total_disk = sum([step.get('disk') for step in stats.values()] or [0]) - total_cpu_sys = sum([step.get('cpu_sys') for step in stats.values()] or [0]) - total_cpu_user = sum([step.get('cpu_user') for step in stats.values()] or [0]) + max_memory_used = max([step.get("rss") for step in stats.values()] or [0]) + total_disk = sum([step.get("disk") for step in stats.values()] or [0]) + total_cpu_sys = sum([step.get("cpu_sys") for step in stats.values()] or [0]) + total_cpu_user = sum([step.get("cpu_user") for step in stats.values()] or [0]) print( "{bar}\n" @@ -3249,14 +4053,14 @@ def build_tree(recipe_list, config, stats, build_only=False, post=None, notest=F ) ) - stats['total'] = { - 'time': total_time, - 'memory': max_memory_used, - 'disk': total_disk, + stats["total"] = { + "time": total_time, + "memory": max_memory_used, + "disk": total_disk, } if config.stats_file: - with open(config.stats_file, 'w') as f: + with open(config.stats_file, "w") as f: json.dump(stats, f) return list(built_packages.keys()) @@ -3278,19 +4082,18 @@ def handle_anaconda_upload(paths, config): else: upload = True - anaconda = find_executable('anaconda') + anaconda = find_executable("anaconda") - no_upload_message = '' - if not utils.on_win or 'MSYSTEM' in os.environ: + no_upload_message = "" + if not utils.on_win or "MSYSTEM" in os.environ: joiner = " \\\n " - prompter = '' + prompter = "" else: joiner = " ^\n " - prompter = '$ ' + prompter = "$ " if not upload or anaconda is None: no_upload_message = ( - "# If you want to upload package(s) to anaconda.org later, type:\n" - "\n" + "# If you want to upload package(s) to anaconda.org later, type:\n" "\n" ) no_upload_message += ( "\n" @@ -3310,17 +4113,19 @@ def handle_anaconda_upload(paths, config): "# Try:\n" "# {}conda install anaconda-client".format(prompter) ) - cmd = [anaconda, ] + cmd = [ + anaconda, + ] if config.token: - cmd.extend(['--token', config.token]) - cmd.append('upload') + cmd.extend(["--token", config.token]) + cmd.append("upload") if config.force_upload: - cmd.append('--force') + cmd.append("--force") if config.user: - cmd.extend(['--user', config.user]) + cmd.extend(["--user", config.user]) for label in config.labels: - cmd.extend(['--label', label]) + cmd.extend(["--label", label]) for package in paths: try: print(f"Uploading {os.path.basename(package)} to anaconda.org") @@ -3331,19 +4136,26 @@ def handle_anaconda_upload(paths, config): def handle_pypi_upload(wheels, config): - args = ['twine', 'upload', '--sign-with', config.sign_with, '--repository', config.repository] + args = [ + "twine", + "upload", + "--sign-with", + config.sign_with, + "--repository", + config.repository, + ] if config.user: - args.extend(['--user', config.user]) + args.extend(["--user", config.user]) if config.password: - args.extend(['--password', config.password]) + args.extend(["--password", config.password]) if config.sign: - args.extend(['--sign']) + args.extend(["--sign"]) if config.identity: - args.extend(['--identity', config.identity]) + args.extend(["--identity", config.identity]) if config.config_file: - args.extend(['--config-file', config.config_file]) + args.extend(["--config-file", config.config_file]) if config.repository: - args.extend(['--repository', config.repository]) + args.extend(["--repository", config.repository]) wheels = utils.ensure_list(wheels) @@ -3353,8 +4165,10 @@ def handle_pypi_upload(wheels, config): try: utils.check_call_env(args + [f]) except: - utils.get_logger(__name__).warn("wheel upload failed - is twine installed?" - " Is this package registered?") + utils.get_logger(__name__).warn( + "wheel upload failed - is twine installed?" + " Is this package registered?" + ) utils.get_logger(__name__).warn(f"Wheel file left in {f}") else: @@ -3363,7 +4177,7 @@ def handle_pypi_upload(wheels, config): def print_build_intermediate_warning(config): print("\n") - print('#' * 84) + print("#" * 84) print("Source and build intermediates have been left in " + config.croot + ".") build_folders = utils.get_build_folders(config.croot) print(f"There are currently {len(build_folders)} accumulated.") @@ -3382,14 +4196,17 @@ def is_package_built(metadata, env, include_local=True): if not os.path.isdir(d): os.makedirs(d) update_index(d, verbose=metadata.config.debug, warn=False, threads=1) - subdir = getattr(metadata.config, f'{env}_subdir') + subdir = getattr(metadata.config, f"{env}_subdir") - urls = [url_path(metadata.config.output_folder), 'local'] if include_local else [] + urls = [url_path(metadata.config.output_folder), "local"] if include_local else [] urls += get_rc_urls() if metadata.config.channel_urls: urls.extend(metadata.config.channel_urls) - spec = MatchSpec(name=metadata.name(), version=metadata.version(), build=metadata.build_id()) + spec = MatchSpec( + name=metadata.name(), version=metadata.version(), build=metadata.build_id() + ) from conda.api import SubdirData + return bool(SubdirData.query_all(spec, channels=urls, subdirs=(subdir, "noarch"))) diff --git a/conda_build/cli/actions.py b/conda_build/cli/actions.py index 0067f370de..93281d9b62 100644 --- a/conda_build/cli/actions.py +++ b/conda_build/cli/actions.py @@ -8,11 +8,11 @@ def __call__(self, parser, namespace, items, option_string=None): setattr(namespace, self.dest, dict()) for item in items: - key, value = item.split('=') + key, value = item.split("=") if key in getattr(namespace, self.dest): raise KeyError( - f"Key {key} cannot be overwritten. " - "It's likely that the key you've used " - "is already in use by conda-build." - ) + f"Key {key} cannot be overwritten. " + "It's likely that the key you've used " + "is already in use by conda-build." + ) getattr(namespace, self.dest)[key] = value diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index 316ff998b1..ca3bb8a3cf 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -1,28 +1,31 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import argparse -from glob2 import glob -from itertools import chain import logging -from os.path import abspath, expanduser, expandvars -from pathlib import Path import sys import warnings +from itertools import chain +from os.path import abspath, expanduser, expandvars +from pathlib import Path +import filelock from conda.auxlib.ish import dals from conda.common.io import dashlist -import filelock +from glob2 import glob import conda_build.api as api import conda_build.build as build +import conda_build.source as source import conda_build.utils as utils -from conda_build.conda_interface import (add_parser_channels, binstar_upload, - cc_conda_build) -from conda_build.cli.main_render import get_render_parser from conda_build.cli.actions import KeyValueAction -import conda_build.source as source +from conda_build.cli.main_render import get_render_parser +from conda_build.conda_interface import ( + add_parser_channels, + binstar_upload, + cc_conda_build, +) +from conda_build.config import Config, get_channel_urls, zstd_compression_level_default from conda_build.utils import LoggingContext -from conda_build.config import Config, zstd_compression_level_default, get_channel_urls def parse_args(args): @@ -45,331 +48,426 @@ def parse_args(args): "--no-anaconda-upload", action="store_false", help="Do not ask to upload the package to anaconda.org.", - dest='anaconda_upload', + dest="anaconda_upload", default=binstar_upload, ) p.add_argument( "--no-binstar-upload", action="store_false", help=argparse.SUPPRESS, - dest='anaconda_upload', + dest="anaconda_upload", default=binstar_upload, ) p.add_argument( "--no-include-recipe", action="store_false", help="Don't include the recipe inside the built package.", - dest='include_recipe', - default=cc_conda_build.get('include_recipe', 'true').lower() == 'true', + dest="include_recipe", + default=cc_conda_build.get("include_recipe", "true").lower() == "true", ) p.add_argument( - '-s', "--source", + "-s", + "--source", action="store_true", help="Only obtain the source (but don't build).", ) p.add_argument( - '-t', "--test", + "-t", + "--test", action="store_true", help="Test package (assumes package is already built). RECIPE_DIR argument must be a " "path to built package .tar.bz2 file.", ) p.add_argument( - '--no-test', - action='store_true', - dest='notest', + "--no-test", + action="store_true", + dest="notest", help="Do not test the package.", ) p.add_argument( - '-b', '--build-only', + "-b", + "--build-only", action="store_true", help="""Only run the build, without any post processing or testing. Implies --no-test and --no-anaconda-upload.""", ) p.add_argument( - '-p', '--post', + "-p", + "--post", action="store_true", help="Run the post-build logic. Implies --no-anaconda-upload.", ) p.add_argument( - '-p', '--test-run-post', + "-p", + "--test-run-post", action="store_true", help="Run the post-build logic during testing.", ) p.add_argument( - 'recipe', - metavar='RECIPE_PATH', - nargs='+', + "recipe", + metavar="RECIPE_PATH", + nargs="+", help="Path to recipe directory. Pass 'purge' here to clean the " "work and test intermediates. Pass 'purge-all' to also remove " "previously built packages.", ) p.add_argument( - '--skip-existing', - action='store_true', - help=("Skip recipes for which there already exists an existing build " - "(locally or in the channels)."), - default=cc_conda_build.get('skip_existing', 'false').lower() == 'true', + "--skip-existing", + action="store_true", + help=( + "Skip recipes for which there already exists an existing build " + "(locally or in the channels)." + ), + default=cc_conda_build.get("skip_existing", "false").lower() == "true", ) p.add_argument( - '--keep-old-work', - action='store_true', - dest='keep_old_work', + "--keep-old-work", + action="store_true", + dest="keep_old_work", help="Do not remove anything from environment, even after successful " - "build and test." + "build and test.", ) p.add_argument( - '--dirty', - action='store_true', - help='Do not remove work directory or _build environment, ' - 'to speed up debugging. Does not apply patches or download source.' + "--dirty", + action="store_true", + help="Do not remove work directory or _build environment, " + "to speed up debugging. Does not apply patches or download source.", ) p.add_argument( - '-q', "--quiet", + "-q", + "--quiet", action="store_true", help="do not display progress bar", - default=cc_conda_build.get('quiet', 'false').lower() == 'true', + default=cc_conda_build.get("quiet", "false").lower() == "true", ) p.add_argument( - '--debug', + "--debug", action="store_true", help="Show debug output from source checkouts and conda", ) p.add_argument( - '--token', + "--token", help="Token to pass through to anaconda upload", - default=cc_conda_build.get('anaconda_token'), + default=cc_conda_build.get("anaconda_token"), ) p.add_argument( - '--user', + "--user", help="User/organization to upload packages to on anaconda.org or pypi", - default=cc_conda_build.get('user'), + default=cc_conda_build.get("user"), ) p.add_argument( - '--label', action='append', dest='labels', default=[], + "--label", + action="append", + dest="labels", + default=[], help="Label argument to pass through to anaconda upload", ) p.add_argument( - '--no-force-upload', + "--no-force-upload", help="Disable force upload to anaconda.org, preventing overwriting any existing packages", - dest='force_upload', + dest="force_upload", default=True, - action='store_false', + action="store_false", ) p.add_argument( "--zstd-compression-level", - help=("When building v2 packages, set the compression level used by " - "conda-package-handling. " - f"Defaults to {zstd_compression_level_default}."), + help=( + "When building v2 packages, set the compression level used by " + "conda-package-handling. " + f"Defaults to {zstd_compression_level_default}." + ), type=int, choices=range(1, 23), - default=cc_conda_build.get('zstd_compression_level', zstd_compression_level_default), + default=cc_conda_build.get( + "zstd_compression_level", zstd_compression_level_default + ), ) pypi_grp = p.add_argument_group("PyPI upload parameters (twine)") pypi_grp.add_argument( - '--password', + "--password", help="password to use when uploading packages to pypi", ) pypi_grp.add_argument( - '--sign', default=False, - help="sign files when uploading to pypi" + "--sign", default=False, help="sign files when uploading to pypi" ) pypi_grp.add_argument( - '--sign-with', default='gpg', dest='sign_with', - help="program to use to sign files when uploading to pypi" + "--sign-with", + default="gpg", + dest="sign_with", + help="program to use to sign files when uploading to pypi", ) pypi_grp.add_argument( - '--identity', - help="GPG identity to use to sign files when uploading to pypi" + "--identity", help="GPG identity to use to sign files when uploading to pypi" ) pypi_grp.add_argument( - '--config-file', + "--config-file", help="path to .pypirc file to use when uploading to pypi", - default=(abspath(expanduser(expandvars(cc_conda_build.get('pypirc')))) - if cc_conda_build.get('pypirc') - else cc_conda_build.get('pypirc')), + default=( + abspath(expanduser(expandvars(cc_conda_build.get("pypirc")))) + if cc_conda_build.get("pypirc") + else cc_conda_build.get("pypirc") + ), ) pypi_grp.add_argument( - '--repository', '-r', help="PyPI repository to upload to", - default=cc_conda_build.get('pypi_repository', 'pypitest'), + "--repository", + "-r", + help="PyPI repository to upload to", + default=cc_conda_build.get("pypi_repository", "pypitest"), ) p.add_argument( "--no-activate", action="store_false", help="do not activate the build and test envs; just prepend to PATH", - dest='activate', - default=cc_conda_build.get('activate', 'true').lower() == 'true', + dest="activate", + default=cc_conda_build.get("activate", "true").lower() == "true", ) p.add_argument( "--no-build-id", action="store_false", - help=("do not generate unique build folder names. Use if having issues with " - "paths being too long. Deprecated, please use --build-id-pat='' instead"), - dest='set_build_id', + help=( + "do not generate unique build folder names. Use if having issues with " + "paths being too long. Deprecated, please use --build-id-pat='' instead" + ), + dest="set_build_id", # note: inverted - dest stores positive logic - default=cc_conda_build.get('set_build_id', 'true').lower() == 'true', + default=cc_conda_build.get("set_build_id", "true").lower() == "true", ) p.add_argument( "--build-id-pat", - help=("specify a templated pattern to use as build folder names. Use if having issues with " - "paths being too long."), - dest='build_id_pat', - default=cc_conda_build.get('build_id_pat', '{n}_{t}'), + help=( + "specify a templated pattern to use as build folder names. Use if having issues with " + "paths being too long." + ), + dest="build_id_pat", + default=cc_conda_build.get("build_id_pat", "{n}_{t}"), ) p.add_argument( "--croot", - help=("Build root folder. Equivalent to CONDA_BLD_PATH, but applies only " - "to this call of conda-build.") + help=( + "Build root folder. Equivalent to CONDA_BLD_PATH, but applies only " + "to this call of conda-build." + ), ) p.add_argument( "--verify", action="store_true", help="run verification on recipes or packages when building", - default=cc_conda_build.get('verify', 'true').lower() == 'true', + default=cc_conda_build.get("verify", "true").lower() == "true", ) p.add_argument( "--no-verify", action="store_false", dest="verify", help="do not run verification on recipes or packages when building", - default=cc_conda_build.get('verify', 'true').lower() == 'true', + default=cc_conda_build.get("verify", "true").lower() == "true", ) p.add_argument( "--strict-verify", action="store_true", dest="exit_on_verify_error", help="Exit if any conda-verify check fail, instead of only printing them", - default=cc_conda_build.get('exit_on_verify_error', 'false').lower() == 'true', + default=cc_conda_build.get("exit_on_verify_error", "false").lower() == "true", ) p.add_argument( "--output-folder", - help=("folder to dump output package to. Package are moved here if build or test succeeds." - " Destination folder must exist prior to using this."), - default=cc_conda_build.get('output_folder') + help=( + "folder to dump output package to. Package are moved here if build or test succeeds." + " Destination folder must exist prior to using this." + ), + default=cc_conda_build.get("output_folder"), ) p.add_argument( - "--no-prefix-length-fallback", dest='prefix_length_fallback', + "--no-prefix-length-fallback", + dest="prefix_length_fallback", action="store_false", - help=("Disable fallback to older 80 character prefix length if environment creation" - " fails due to insufficient prefix length in dependency packages"), + help=( + "Disable fallback to older 80 character prefix length if environment creation" + " fails due to insufficient prefix length in dependency packages" + ), default=True, ) p.add_argument( - "--prefix-length-fallback", dest='prefix_length_fallback', + "--prefix-length-fallback", + dest="prefix_length_fallback", action="store_true", - help=("Disable fallback to older 80 character prefix length if environment creation" - " fails due to insufficient prefix length in dependency packages"), + help=( + "Disable fallback to older 80 character prefix length if environment creation" + " fails due to insufficient prefix length in dependency packages" + ), # this default will change to false in the future, when we deem that the community has # had enough time to build long-prefix length packages. default=True, ) p.add_argument( - "--prefix-length", dest='_prefix_length', - help=("length of build prefix. For packages with binaries that embed the path, this is" - " critical to ensuring that your package can run as many places as possible. Note" - "that this value can be altered by the OS below conda-build (e.g. encrypted " - "filesystems on Linux), and you should prefer to set --croot to a non-encrypted " - "location instead, so that you maintain a known prefix length."), + "--prefix-length", + dest="_prefix_length", + help=( + "length of build prefix. For packages with binaries that embed the path, this is" + " critical to ensuring that your package can run as many places as possible. Note" + "that this value can be altered by the OS below conda-build (e.g. encrypted " + "filesystems on Linux), and you should prefer to set --croot to a non-encrypted " + "location instead, so that you maintain a known prefix length." + ), # this default will change to false in the future, when we deem that the community has # had enough time to build long-prefix length packages. - default=255, type=int, + default=255, + type=int, ) p.add_argument( - "--no-locking", dest='locking', default=True, action="store_false", - help=("Disable locking, to avoid unresolved race condition issues. Unsafe to run multiple " - "builds at once on one system with this set.") + "--no-locking", + dest="locking", + default=True, + action="store_false", + help=( + "Disable locking, to avoid unresolved race condition issues. Unsafe to run multiple " + "builds at once on one system with this set." + ), + ) + p.add_argument( + "--no-remove-work-dir", + dest="remove_work_dir", + default=True, + action="store_false", + help=( + "Disable removal of the work dir before testing. Be careful using this option, as" + " you package may depend on files that are not included in the package, and may pass " + "tests, but ultimately fail on installed systems." + ), ) p.add_argument( - "--no-remove-work-dir", dest='remove_work_dir', default=True, action="store_false", - help=("Disable removal of the work dir before testing. Be careful using this option, as" - " you package may depend on files that are not included in the package, and may pass " - "tests, but ultimately fail on installed systems.") + "--error-overlinking", + dest="error_overlinking", + action="store_true", + help=( + "Enable error when shared libraries from transitive dependencies are directly " + "linked to any executables or shared libraries in built packages. This is disabled " + "by default, but will be enabled by default in conda-build 4.0." + ), + default=cc_conda_build.get("error_overlinking", "false").lower() == "true", ) p.add_argument( - "--error-overlinking", dest='error_overlinking', action="store_true", - help=("Enable error when shared libraries from transitive dependencies are directly " - "linked to any executables or shared libraries in built packages. This is disabled " - "by default, but will be enabled by default in conda-build 4.0."), - default=cc_conda_build.get('error_overlinking', 'false').lower() == 'true', + "--no-error-overlinking", + dest="error_overlinking", + action="store_false", + help=( + "Disable error when shared libraries from transitive dependencies are directly " + "linked to any executables or shared libraries in built packages. This is currently " + "the default behavior, but will change in conda-build 4.0." + ), + default=cc_conda_build.get("error_overlinking", "false").lower() == "true", + ) + p.add_argument( + "--error-overdepending", + dest="error_overdepending", + action="store_true", + help=( + "Enable error when packages with names beginning `lib` or which have " + "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " + "any of the files in this package." + ), + default=cc_conda_build.get("error_overdepending", "false").lower() == "true", ) p.add_argument( - "--no-error-overlinking", dest='error_overlinking', action="store_false", - help=("Disable error when shared libraries from transitive dependencies are directly " - "linked to any executables or shared libraries in built packages. This is currently " - "the default behavior, but will change in conda-build 4.0."), - default=cc_conda_build.get('error_overlinking', 'false').lower() == 'true', + "--no-error-overdepending", + dest="error_overdepending", + action="store_false", + help=( + "Disable error when packages with names beginning `lib` or which have " + "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " + "any of the files in this package." + ), + default=cc_conda_build.get("error_overdepending", "false").lower() == "true", ) p.add_argument( - "--error-overdepending", dest='error_overdepending', action="store_true", - help=("Enable error when packages with names beginning `lib` or which have " - "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " - "any of the files in this package."), - default=cc_conda_build.get('error_overdepending', 'false').lower() == 'true', + "--long-test-prefix", + action="store_true", + help=( + "Use a long prefix for the test prefix, as well as the build prefix. Affects only " + "Linux and Mac. Prefix length matches the --prefix-length flag. This is on by " + "default in conda-build 3.0+" + ), + default=cc_conda_build.get("long_test_prefix", "true").lower() == "true", ) p.add_argument( - "--no-error-overdepending", dest='error_overdepending', action="store_false", - help=("Disable error when packages with names beginning `lib` or which have " - "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " - "any of the files in this package."), - default=cc_conda_build.get('error_overdepending', 'false').lower() == 'true', + "--no-long-test-prefix", + dest="long_test_prefix", + action="store_false", + help=( + "Do not use a long prefix for the test prefix, as well as the build prefix." + " Affects only Linux and Mac. Prefix length matches the --prefix-length flag. " + ), + default=cc_conda_build.get("long_test_prefix", "true").lower() == "true", ) p.add_argument( - "--long-test-prefix", action="store_true", - help=("Use a long prefix for the test prefix, as well as the build prefix. Affects only " - "Linux and Mac. Prefix length matches the --prefix-length flag. This is on by " - "default in conda-build 3.0+"), - default=cc_conda_build.get('long_test_prefix', 'true').lower() == 'true', + "--keep-going", + "-k", + action="store_true", + help=( + "When running tests, keep going after each failure. Default is to stop on the first " + "failure." + ), ) p.add_argument( - "--no-long-test-prefix", dest="long_test_prefix", action="store_false", - help=("Do not use a long prefix for the test prefix, as well as the build prefix." - " Affects only Linux and Mac. Prefix length matches the --prefix-length flag. "), - default=cc_conda_build.get('long_test_prefix', 'true').lower() == 'true', + "--cache-dir", + help=( + "Path to store the source files (archives, git clones, etc.) during the build." + ), + default=( + abspath(expanduser(expandvars(cc_conda_build.get("cache_dir")))) + if cc_conda_build.get("cache_dir") + else cc_conda_build.get("cache_dir") + ), ) p.add_argument( - '--keep-going', '-k', action='store_true', - help=("When running tests, keep going after each failure. Default is to stop on the first " - "failure.") + "--no-copy-test-source-files", + dest="copy_test_source_files", + action="store_false", + default=cc_conda_build.get("copy_test_source_files", "true").lower() == "true", + help=( + "Disables copying the files necessary for testing the package into " + "the info/test folder. Passing this argument means it may not be possible " + "to test the package without internet access. There is also a danger that " + "the source archive(s) containing the files could become unavailable sometime " + "in the future." + ), ) p.add_argument( - '--cache-dir', - help=('Path to store the source files (archives, git clones, etc.) during the build.'), - default=(abspath(expanduser(expandvars(cc_conda_build.get('cache_dir')))) - if cc_conda_build.get('cache_dir') - else cc_conda_build.get('cache_dir')), + "--merge-build-host", + action="store_true", + help=( + "Merge the build and host directories, even when host section or compiler " + "jinja2 is present" + ), + default=cc_conda_build.get("merge_build_host", "false").lower() == "true", ) p.add_argument( - "--no-copy-test-source-files", dest="copy_test_source_files", action="store_false", - default=cc_conda_build.get('copy_test_source_files', 'true').lower() == 'true', - help=("Disables copying the files necessary for testing the package into " - "the info/test folder. Passing this argument means it may not be possible " - "to test the package without internet access. There is also a danger that " - "the source archive(s) containing the files could become unavailable sometime " - "in the future.") + "--stats-file", + help=("File path to save build statistics to. Stats are " "in JSON format"), ) p.add_argument( - '--merge-build-host', action="store_true", - help=('Merge the build and host directories, even when host section or compiler ' - 'jinja2 is present'), - default=cc_conda_build.get('merge_build_host', 'false').lower() == 'true', + "--extra-deps", + nargs="+", + help=( + "Extra dependencies to add to all environment creation steps. This " + "is only enabled for testing with the -t or --test flag. Change " + "meta.yaml or use templates otherwise." + ), ) - p.add_argument('--stats-file', help=('File path to save build statistics to. Stats are ' - 'in JSON format'), ) - p.add_argument('--extra-deps', - nargs='+', - help=('Extra dependencies to add to all environment creation steps. This ' - 'is only enabled for testing with the -t or --test flag. Change ' - 'meta.yaml or use templates otherwise.'), ) p.add_argument( - '--extra-meta', - nargs='*', + "--extra-meta", + nargs="*", action=KeyValueAction, help="Key value pairs of metadata to add to about.json. Should be " "defined as Key=Value with a space separating each pair.", metavar="KEY=VALUE", ) - p.add_argument('--suppress-variables', - action='store_true', - help=("Do not display value of environment variables specified in build.script_env."), ) + p.add_argument( + "--suppress-variables", + action="store_true", + help=( + "Do not display value of environment variables specified in build.script_env." + ), + ) add_parser_channels(p) args = p.parse_args(args) @@ -393,7 +491,7 @@ def check_recipe(path_list): "It should be a path to a folder.\n" "Forcing conda-build to use the recipe file." ), - UserWarning + UserWarning, ) @@ -402,13 +500,13 @@ def output_action(recipe, config): config.verbose = False config.debug = False paths = api.get_output_file_paths(recipe, config=config) - print('\n'.join(sorted(paths))) + print("\n".join(sorted(paths))) def source_action(recipe, config): metadata = api.render(recipe, config=config)[0][0] source.provide(metadata) - print('Source tree in:', metadata.config.work_dir) + print("Source tree in:", metadata.config.work_dir) def test_action(recipe, config): @@ -430,11 +528,11 @@ def execute(args): config.override_channels = args.override_channels config.verbose = not args.quiet or args.debug - if 'purge' in args.recipe: + if "purge" in args.recipe: build.clean_build(config) return - if 'purge-all' in args.recipe: + if "purge-all" in args.recipe: build.clean_build(config) config.clean_pkgs() return @@ -497,7 +595,9 @@ def main(): print(str(e)) sys.exit(1) except filelock.Timeout as e: - print("File lock on {} could not be obtained. You might need to try fewer builds at once." - " Otherwise, run conda clean --lock".format(e.lock_file)) + print( + "File lock on {} could not be obtained. You might need to try fewer builds at once." + " Otherwise, run conda clean --lock".format(e.lock_file) + ) sys.exit(1) return diff --git a/conda_build/cli/main_convert.py b/conda_build/cli/main_convert.py index e1ada47bf9..43006a1e3b 100644 --- a/conda_build/cli/main_convert.py +++ b/conda_build/cli/main_convert.py @@ -1,12 +1,11 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import logging -from os.path import abspath, expanduser import sys - -from conda_build.conda_interface import ArgumentParser +from os.path import abspath, expanduser from conda_build import api +from conda_build.conda_interface import ArgumentParser logging.basicConfig(level=logging.INFO) @@ -45,52 +44,65 @@ def parse_args(args): ) # TODO: Factor this into a subcommand, since it's python package specific + p.add_argument("files", nargs="+", help="Package files to convert.") p.add_argument( - 'files', - nargs='+', - help="Package files to convert." - ) - p.add_argument( - '-p', "--platform", - dest='platforms', + "-p", + "--platform", + dest="platforms", action="append", - choices=['osx-64', 'osx-arm64', - 'linux-32', 'linux-64', 'linux-ppc64', 'linux-ppc64le', - 'linux-s390x', 'linux-armv6l', 'linux-armv7l', 'linux-aarch64', - 'win-32', 'win-64', 'win-arm64', 'all'], + choices=[ + "osx-64", + "osx-arm64", + "linux-32", + "linux-64", + "linux-ppc64", + "linux-ppc64le", + "linux-s390x", + "linux-armv6l", + "linux-armv7l", + "linux-aarch64", + "win-32", + "win-64", + "win-arm64", + "all", + ], help="Platform to convert the packages to.", - default=None + default=None, ) p.add_argument( - "--dependencies", "-d", - nargs='*', + "--dependencies", + "-d", + nargs="*", help="""Additional (besides python) dependencies of the converted package. To specify a version restriction for a dependency, wrap the dependency in quotes, like 'package >=2.0'.""", ) p.add_argument( - '--show-imports', - action='store_true', + "--show-imports", + action="store_true", default=False, help="Show Python imports for compiled parts of the package.", ) p.add_argument( - '-f', "--force", + "-f", + "--force", action="store_true", help="Force convert, even when a package has compiled C extensions.", ) p.add_argument( - '-o', '--output-dir', - default='.', + "-o", + "--output-dir", + default=".", help="""Directory to write the output files. The packages will be organized in platform/ subdirectories, e.g., - win-32/package-1.0-py27_0.tar.bz2.""" + win-32/package-1.0-py27_0.tar.bz2.""", ) p.add_argument( - '-v', '--verbose', + "-v", + "--verbose", default=False, - action='store_true', - help="Print verbose output." + action="store_true", + help="Print verbose output.", ) p.add_argument( "--dry-run", @@ -98,9 +110,7 @@ def parse_args(args): help="Only display what would have been done.", ) p.add_argument( - "-q", "--quiet", - action="store_true", - help="Don't print as much output." + "-q", "--quiet", action="store_true", help="Don't print as much output." ) args = p.parse_args(args) @@ -110,7 +120,7 @@ def parse_args(args): def execute(args): _, args = parse_args(args) files = args.files - del args.__dict__['files'] + del args.__dict__["files"] for f in files: f = abspath(expanduser(f)) diff --git a/conda_build/cli/main_debug.py b/conda_build/cli/main_debug.py index 22cdf2cb73..702fbfc798 100644 --- a/conda_build/cli/main_debug.py +++ b/conda_build/cli/main_debug.py @@ -5,11 +5,12 @@ from argparse import ArgumentParser from conda_build import api -from conda_build.utils import on_win +from conda_build.cli import validators as valid + # we extend the render parser because we basically need to render the recipe before # we can say what env to create. This is not really true for debugging tests, but meh... from conda_build.cli.main_render import get_render_parser -from conda_build.cli import validators as valid +from conda_build.utils import on_win logging.basicConfig(level=logging.INFO) @@ -24,29 +25,65 @@ def get_parser() -> ArgumentParser: """ # we do this one separately because we only allow one entry to conda render p.add_argument( - 'recipe_or_package_file_path', - help=("Path to recipe directory or package file to use for dependency and source information. " - "If you use a recipe, you get the build/host env and source work directory. If you use " - "a package file, you get the test environments and the test_tmp folder."), - type=valid.validate_is_conda_pkg_or_recipe_dir + "recipe_or_package_file_path", + help=( + "Path to recipe directory or package file to use for dependency and source information. " + "If you use a recipe, you get the build/host env and source work directory. If you use " + "a package file, you get the test environments and the test_tmp folder." + ), + type=valid.validate_is_conda_pkg_or_recipe_dir, + ) + p.add_argument( + "-p", + "--path", + help=( + "root path in which to place envs, source and activation script. Defaults to a " + "standard conda-build work folder (packagename_timestamp) in your conda-bld folder." + ), + ) + p.add_argument( + "-o", + "--output-id", + help=( + "fnmatch pattern that is associated with the output that you want to create an env for. " + "Must match only one file, as we don't support creating envs for more than one output at a time. " + "The top-level recipe can be specified by passing 'TOPLEVEL' here" + ), + ) + p.add_argument( + "-a", + "--activate-string-only", + action="store_true", + help="Output only the string to the used generated activation script. Use this for creating envs in scripted " + "environments.", ) - p.add_argument("-p", "--path", - help=("root path in which to place envs, source and activation script. Defaults to a " - "standard conda-build work folder (packagename_timestamp) in your conda-bld folder.")) - p.add_argument("-o", "--output-id", - help=("fnmatch pattern that is associated with the output that you want to create an env for. " - "Must match only one file, as we don't support creating envs for more than one output at a time. " - "The top-level recipe can be specified by passing 'TOPLEVEL' here")) - p.add_argument("-a", "--activate-string-only", action="store_true", - help="Output only the string to the used generated activation script. Use this for creating envs in scripted " - "environments.") # cut out some args from render that don't make sense here # https://stackoverflow.com/a/32809642/1170370 - p._handle_conflict_resolve(None, [('--output', [_ for _ in p._actions if _.option_strings == ['--output']][0])]) - p._handle_conflict_resolve(None, [('--bootstrap', [_ for _ in p._actions if _.option_strings == ['--bootstrap']][0])]) - p._handle_conflict_resolve(None, [('--old-build-string', [_ for _ in p._actions if - _.option_strings == ['--old-build-string']][0])]) + p._handle_conflict_resolve( + None, + [("--output", [_ for _ in p._actions if _.option_strings == ["--output"]][0])], + ) + p._handle_conflict_resolve( + None, + [ + ( + "--bootstrap", + [_ for _ in p._actions if _.option_strings == ["--bootstrap"]][0], + ) + ], + ) + p._handle_conflict_resolve( + None, + [ + ( + "--old-build-string", + [_ for _ in p._actions if _.option_strings == ["--old-build-string"]][ + 0 + ], + ) + ], + ) return p @@ -58,7 +95,7 @@ def execute(): activation_string = api.debug( args.recipe_or_package_file_path, verbose=(not args.activate_string_only), - **args.__dict__ + **args.__dict__, ) if not args.activate_string_only: @@ -76,7 +113,9 @@ def execute(): print("#" * 80) except ValueError as e: - print(f"Error: conda-debug encountered the following error:\n{e}", file=sys.stderr) + print( + f"Error: conda-debug encountered the following error:\n{e}", file=sys.stderr + ) sys.exit(1) diff --git a/conda_build/cli/main_develop.py b/conda_build/cli/main_develop.py index 8300a03a64..ec33555748 100644 --- a/conda_build/cli/main_develop.py +++ b/conda_build/cli/main_develop.py @@ -4,9 +4,9 @@ import sys from conda.base.context import context, determine_target_prefix -from conda_build.conda_interface import ArgumentParser, add_parser_prefix -from conda_build import api +from conda_build import api +from conda_build.conda_interface import ArgumentParser, add_parser_prefix logging.basicConfig(level=logging.INFO) @@ -22,32 +22,49 @@ def parse_args(args): ) p.add_argument( - 'source', - metavar='PATH', - nargs='+', - help="Path to the source directory." + "source", metavar="PATH", nargs="+", help="Path to the source directory." + ) + p.add_argument( + "-npf", + "--no-pth-file", + action="store_true", + help=( + "Relink compiled extension dependencies against " + "libraries found in current conda env. " + "Do not add source to conda.pth." + ), + ) + p.add_argument( + "-b", + "--build_ext", + action="store_true", + help=( + "Build extensions inplace, invoking: " + "python setup.py build_ext --inplace; " + "add to conda.pth; relink runtime libraries to " + "environment's lib/." + ), + ) + p.add_argument( + "-c", + "--clean", + action="store_true", + help=( + "Invoke clean on setup.py: " + "python setup.py clean " + "use with build_ext to clean before building." + ), + ) + p.add_argument( + "-u", + "--uninstall", + action="store_true", + help=( + "Removes package if installed in 'development mode' " + "by deleting path from conda.pth file. Ignore other " + "options - just uninstall and exit" + ), ) - p.add_argument('-npf', '--no-pth-file', - action='store_true', - help=("Relink compiled extension dependencies against " - "libraries found in current conda env. " - "Do not add source to conda.pth.")) - p.add_argument('-b', '--build_ext', - action='store_true', - help=("Build extensions inplace, invoking: " - "python setup.py build_ext --inplace; " - "add to conda.pth; relink runtime libraries to " - "environment's lib/.")) - p.add_argument('-c', '--clean', - action='store_true', - help=("Invoke clean on setup.py: " - "python setup.py clean " - "use with build_ext to clean before building.")) - p.add_argument('-u', '--uninstall', - action='store_true', - help=("Removes package if installed in 'development mode' " - "by deleting path from conda.pth file. Ignore other " - "options - just uninstall and exit")) add_parser_prefix(p) p.set_defaults(func=execute) @@ -59,8 +76,14 @@ def parse_args(args): def execute(args): _, args = parse_args(args) prefix = determine_target_prefix(context, args) - api.develop(args.source, prefix=prefix, no_pth_file=args.no_pth_file, - build_ext=args.build_ext, clean=args.clean, uninstall=args.uninstall) + api.develop( + args.source, + prefix=prefix, + no_pth_file=args.no_pth_file, + build_ext=args.build_ext, + clean=args.clean, + uninstall=args.uninstall, + ) def main(): diff --git a/conda_build/cli/main_index.py b/conda_build/cli/main_index.py index 9d3f4009b9..fe504d4dc4 100644 --- a/conda_build/cli/main_index.py +++ b/conda_build/cli/main_index.py @@ -4,9 +4,8 @@ import os import sys -from conda_build.conda_interface import ArgumentParser - from conda_build import api +from conda_build.conda_interface import ArgumentParser from conda_build.index import MAX_THREADS_DEFAULT from conda_build.utils import DEFAULT_SUBDIRS @@ -15,55 +14,63 @@ def parse_args(args): p = ArgumentParser( - description="Update package index metadata files in given directories.") + description="Update package index metadata files in given directories." + ) p.add_argument( - 'dir', - help='Directory that contains an index to be updated.', - nargs='*', + "dir", + help="Directory that contains an index to be updated.", + nargs="*", default=[os.getcwd()], ) p.add_argument( - '-c', "--check-md5", + "-c", + "--check-md5", action="store_true", help="""Use hash values instead of file modification times for determining if a package's metadata needs to be updated.""", ) p.add_argument( - "-n", "--channel-name", + "-n", + "--channel-name", help="Customize the channel name listed in each channel's index.html.", ) p.add_argument( - '-s', '--subdir', - action='append', - help='Optional. The subdir to index. Can be given multiple times. If not provided, will ' - 'default to all of %s. If provided, will not create channeldata.json for the channel.' - '' % ', '.join(DEFAULT_SUBDIRS), + "-s", + "--subdir", + action="append", + help="Optional. The subdir to index. Can be given multiple times. If not provided, will " + "default to all of %s. If provided, will not create channeldata.json for the channel." + "" % ", ".join(DEFAULT_SUBDIRS), ) p.add_argument( - '-t', '--threads', + "-t", + "--threads", default=MAX_THREADS_DEFAULT, type=int, ) p.add_argument( - "-p", "--patch-generator", - help='Path to Python file that outputs metadata patch instructions from its ' - '_patch_repodata function or a .tar.bz2/.conda file which contains a ' - 'patch_instructions.json file for each subdir' + "-p", + "--patch-generator", + help="Path to Python file that outputs metadata patch instructions from its " + "_patch_repodata function or a .tar.bz2/.conda file which contains a " + "patch_instructions.json file for each subdir", ) p.add_argument( "--hotfix-source-repo", - help="Deprecated, will be removed in a future version of conda build" + help="Deprecated, will be removed in a future version of conda build", ) + p.add_argument("--verbose", help="show extra debugging info", action="store_true") p.add_argument( - "--verbose", help="show extra debugging info", action="store_true" + "--no-progress", + help="Hide progress bars", + action="store_false", + dest="progress", ) p.add_argument( - "--no-progress", help="Hide progress bars", action="store_false", dest="progress" - ) - p.add_argument( - "--current-index-versions-file", "-m", + "--current-index-versions-file", + "-m", help=""" YAML file containing name of package as key, and list of versions as values. The current_index.json will contain the newest from this series of versions. For example: @@ -73,12 +80,13 @@ def parse_args(args): - 3.6 will keep python 2.7.X and 3.6.Y in the current_index.json, instead of only the very latest python version. - """ + """, ) p.add_argument( - "-f", "--file", + "-f", + "--file", help="A file that contains a new line separated list of packages to add to repodata.", - action="store" + action="store", ) args = p.parse_args(args) @@ -88,10 +96,19 @@ def parse_args(args): def execute(args): _, args = parse_args(args) - api.update_index(args.dir, check_md5=args.check_md5, channel_name=args.channel_name, - threads=args.threads, subdir=args.subdir, patch_generator=args.patch_generator, - verbose=args.verbose, progress=args.progress, hotfix_source_repo=args.hotfix_source_repo, - current_index_versions=args.current_index_versions_file, index_file=args.file) + api.update_index( + args.dir, + check_md5=args.check_md5, + channel_name=args.channel_name, + threads=args.threads, + subdir=args.subdir, + patch_generator=args.patch_generator, + verbose=args.verbose, + progress=args.progress, + hotfix_source_repo=args.hotfix_source_repo, + current_index_versions=args.current_index_versions_file, + index_file=args.file, + ) def main(): diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index 4f8b133d23..9bdded9128 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -1,29 +1,28 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import logging +import sys from os.path import expanduser from pprint import pprint -import sys from conda.base.context import context, determine_target_prefix -from conda_build.conda_interface import ArgumentParser, add_parser_prefix from conda_build import api +from conda_build.conda_interface import ArgumentParser, add_parser_prefix logging.basicConfig(level=logging.INFO) def parse_args(args): p = ArgumentParser( - description='Tools for inspecting conda packages.', + description="Tools for inspecting conda packages.", epilog=""" Run --help on the subcommands like 'conda inspect linkages --help' to see the options available. """, - ) subcommand = p.add_subparsers( - dest='subcommand', + dest="subcommand", ) linkages_help = """ @@ -40,39 +39,39 @@ def parse_args(args): description=linkages_help, ) linkages.add_argument( - 'packages', - action='store', - nargs='*', - help='Conda packages to inspect.', + "packages", + action="store", + nargs="*", + help="Conda packages to inspect.", ) linkages.add_argument( - '--untracked', - action='store_true', + "--untracked", + action="store_true", help="""Inspect the untracked files in the environment. This is useful when used in conjunction with conda build --build-only.""", ) linkages.add_argument( - '--show-files', + "--show-files", action="store_true", help="Show the files in the package that link to each library", ) linkages.add_argument( - '--groupby', - action='store', - default='package', - choices=('package', 'dependency'), + "--groupby", + action="store", + default="package", + choices=("package", "dependency"), help="""Attribute to group by (default: %(default)s). Useful when used in conjunction with --all.""", ) linkages.add_argument( - '--sysroot', - action='store', - help='System root in which to look for system libraries.', - default='', + "--sysroot", + action="store", + help="System root in which to look for system libraries.", + default="", ) linkages.add_argument( - '--all', - action='store_true', + "--all", + action="store_true", help="Generate a report for all packages in the environment.", ) add_parser_prefix(linkages) @@ -89,28 +88,28 @@ def parse_args(args): description=objects_help, ) objects.add_argument( - 'packages', - action='store', - nargs='*', - help='Conda packages to inspect.', + "packages", + action="store", + nargs="*", + help="Conda packages to inspect.", ) objects.add_argument( - '--untracked', - action='store_true', + "--untracked", + action="store_true", help="""Inspect the untracked files in the environment. This is useful when used in conjunction with conda build --build-only.""", ) # TODO: Allow groupby to include the package (like for --all) objects.add_argument( - '--groupby', - action='store', - default='filename', - choices=('filename', 'filetype', 'rpath'), - help='Attribute to group by (default: %(default)s).', + "--groupby", + action="store", + default="filename", + choices=("filename", "filetype", "rpath"), + help="Attribute to group by (default: %(default)s).", ) objects.add_argument( - '--all', - action='store_true', + "--all", + action="store_true", help="Generate a report for all packages in the environment.", ) add_parser_prefix(objects) @@ -124,22 +123,23 @@ def parse_args(args): description=channels_help, ) channels.add_argument( - '--verbose', - action='store_true', + "--verbose", + action="store_true", help="""Show verbose output. Note that error output to stderr will always be shown regardless of this flag. """, ) channels.add_argument( - '--test-installable', '-t', - action='store_true', + "--test-installable", + "-t", + action="store_true", help="""Test every package in the channel to see if it is installable by conda.""", ) channels.add_argument( "channel", - nargs='?', + nargs="?", default="defaults", - help="The channel to test. The default is %(default)s." + help="The channel to test. The default is %(default)s.", ) prefix_lengths = subcommand.add_parser( @@ -149,14 +149,15 @@ def parse_args(args): description=linkages_help, ) prefix_lengths.add_argument( - 'packages', - action='store', - nargs='+', - help='Conda packages to inspect.', + "packages", + action="store", + nargs="+", + help="Conda packages to inspect.", ) prefix_lengths.add_argument( - '--min-prefix-length', '-m', - help='Minimum length. Only packages with prefixes below this are shown.', + "--min-prefix-length", + "-m", + help="Minimum length. Only packages with prefixes below this are shown.", default=api.Config().prefix_length, type=int, ) @@ -167,10 +168,10 @@ def parse_args(args): description="Show data used to compute hash identifier (h????) for package", ) hash_inputs.add_argument( - 'packages', - action='store', - nargs='*', - help='Conda packages to inspect.', + "packages", + action="store", + nargs="*", + help="Conda packages to inspect.", ) args = p.parse_args(args) return p, args @@ -183,7 +184,7 @@ def execute(args): parser.print_help() exit() - elif args.subcommand == 'channels': + elif args.subcommand == "channels": if not args.test_installable: parser.error("At least one option (--test-installable) is required.") else: @@ -213,7 +214,7 @@ def execute(args): args.packages, min_prefix_length=args.min_prefix_length ): sys.exit(1) - elif args.subcommand == 'hash-inputs': + elif args.subcommand == "hash-inputs": pprint(api.inspect_hash_inputs(args.packages)) else: raise ValueError(f"Unrecognized subcommand: {args.subcommand}.") diff --git a/conda_build/cli/main_metapackage.py b/conda_build/cli/main_metapackage.py index 5a9613c273..7657432fdc 100644 --- a/conda_build/cli/main_metapackage.py +++ b/conda_build/cli/main_metapackage.py @@ -4,17 +4,19 @@ import logging import sys -from conda_build.conda_interface import binstar_upload -from conda_build.conda_interface import ArgumentParser -from conda_build.conda_interface import add_parser_channels from conda_build import api +from conda_build.conda_interface import ( + ArgumentParser, + add_parser_channels, + binstar_upload, +) logging.basicConfig(level=logging.INFO) def parse_args(args): p = ArgumentParser( - description=''' + description=""" Tool for building conda metapackages. A metapackage is a package with no files, only metadata. They are typically used to collect several packages together into a single package via dependencies. @@ -22,33 +24,32 @@ def parse_args(args): NOTE: Metapackages can also be created by creating a recipe with the necessary metadata in the meta.yaml, but a metapackage can be created entirely from the command line with the conda metapackage command. -''', +""", ) p.add_argument( "--no-anaconda-upload", action="store_false", help="Do not ask to upload the package to anaconda.org.", - dest='anaconda_upload', + dest="anaconda_upload", default=binstar_upload, ) p.add_argument( "--no-binstar-upload", action="store_false", help=argparse.SUPPRESS, - dest='anaconda_upload', + dest="anaconda_upload", default=binstar_upload, ) + p.add_argument("--token", help="Token to pass through to anaconda upload") p.add_argument( - '--token', - help="Token to pass through to anaconda upload" + "--user", help="User/organization to upload packages to on anaconda.org" ) p.add_argument( - '--user', - help="User/organization to upload packages to on anaconda.org" - ) - p.add_argument( - '--label', action='append', dest='labels', default=[], + "--label", + action="append", + dest="labels", + default=[], help="Label argument to pass through to anaconda upload", ) p.add_argument( @@ -71,8 +72,9 @@ def parse_args(args): help="Build string for the package (default is automatically generated).", ) p.add_argument( - "--dependencies", "-d", - nargs='*', + "--dependencies", + "-d", + nargs="*", default=(), help="""The dependencies of the package. To specify a version restriction for a dependency, wrap the dependency in quotes, like 'package >=2.0'.""", @@ -80,12 +82,9 @@ def parse_args(args): p.add_argument( "--home", help="The homepage for the metapackage.", - ) p.add_argument( - "--license", - help="The license of the metapackage.", - dest='license_name' + "--license", help="The license of the metapackage.", dest="license_name" ) p.add_argument( "--summary", @@ -96,7 +95,7 @@ def parse_args(args): ) p.add_argument( "--entry-points", - nargs='*', + nargs="*", default=(), help="""Python entry points to create automatically. They should use the same syntax as in the meta.yaml of a recipe, e.g., --entry-points @@ -111,7 +110,7 @@ def parse_args(args): def execute(args): _, args = parse_args(args) - channel_urls = args.__dict__.get('channel') or args.__dict__.get('channels') or () + channel_urls = args.__dict__.get("channel") or args.__dict__.get("channels") or () api.create_metapackage(channel_urls=channel_urls, **args.__dict__) diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 72973a1802..5aa7b8f3fd 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -8,16 +8,17 @@ import yaml from yaml.parser import ParserError -from conda_build.conda_interface import (ArgumentParser, add_parser_channels, - cc_conda_build) - from conda_build import __version__, api - -from conda_build.config import get_or_merge_config, get_channel_urls -from conda_build.variants import get_package_variants, set_language_env_vars +from conda_build.conda_interface import ( + ArgumentParser, + add_parser_channels, + cc_conda_build, +) +from conda_build.config import get_channel_urls, get_or_merge_config from conda_build.utils import LoggingContext +from conda_build.variants import get_package_variants, set_language_env_vars -on_win = (sys.platform == 'win32') +on_win = sys.platform == "win32" log = logging.getLogger(__name__) @@ -30,11 +31,15 @@ def __call__(self, parser, namespace, values, option_string=None): try: my_dict = yaml.load(values[0], Loader=yaml.BaseLoader) if not isinstance(my_dict, dict): - raise RuntimeError(f"The argument of {option_string} is not a YAML dictionary.") + raise RuntimeError( + f"The argument of {option_string} is not a YAML dictionary." + ) setattr(namespace, self.dest, my_dict) except ParserError as e: - raise RuntimeError(f'The argument of {option_string} is not a valid YAML. The parser error was: \n\n{str(e)}') + raise RuntimeError( + f"The argument of {option_string} is not a valid YAML. The parser error was: \n\n{str(e)}" + ) def get_render_parser(): @@ -45,16 +50,18 @@ def get_render_parser(): other components. conda keeps track of dependencies between packages and platform specifics, making it simple to create working environments from different sets of packages.""", - conflict_handler='resolve' + conflict_handler="resolve", ) p.add_argument( - '-V', '--version', - action='version', - help='Show the conda-build version number and exit.', - version='conda-build %s' % __version__, + "-V", + "--version", + action="version", + help="Show the conda-build version number and exit.", + version="conda-build %s" % __version__, ) p.add_argument( - '-n', "--no-source", + "-n", + "--no-source", action="store_true", help="When templating can't be completed, do not obtain the \ source to try fill in related template variables.", @@ -62,88 +69,100 @@ def get_render_parser(): p.add_argument( "--output", action="store_true", - help="Output the conda package filename which would have been " - "created", + help="Output the conda package filename which would have been " "created", ) p.add_argument( - '--python', + "--python", action="append", help="Set the Python version used by conda build.", ) p.add_argument( - '--perl', + "--perl", action="append", help="Set the Perl version used by conda build.", ) p.add_argument( - '--numpy', + "--numpy", action="append", help="Set the NumPy version used by conda build.", ) p.add_argument( - '--R', + "--R", action="append", help="""Set the R version used by conda build.""", - dest="r_base" + dest="r_base", ) p.add_argument( - '--lua', + "--lua", action="append", help="Set the Lua version used by conda build.", ) p.add_argument( - '--bootstrap', + "--bootstrap", help="""Provide initial configuration in addition to recipe. Can be a path to or name of an environment, which will be emulated in the package.""", ) p.add_argument( - '--append-file', + "--append-file", help="""Append data in meta.yaml with fields from this file. Jinja2 is not done on appended fields""", - dest='append_sections_file', + dest="append_sections_file", ) p.add_argument( - '--clobber-file', + "--clobber-file", help="""Clobber data in meta.yaml with fields from this file. Jinja2 is not done on clobbered fields.""", - dest='clobber_sections_file', + dest="clobber_sections_file", ) p.add_argument( - '-m', '--variant-config-files', + "-m", + "--variant-config-files", action="append", help="""Additional variant config files to add. These yaml files can contain - keys such as `c_compiler` and `target_platform` to form a build matrix.""" + keys such as `c_compiler` and `target_platform` to form a build matrix.""", ) p.add_argument( - '-e', '--exclusive-config-files', '--exclusive-config-file', + "-e", + "--exclusive-config-files", + "--exclusive-config-file", action="append", help="""Exclusive variant config files to add. Providing files here disables searching in your home directory and in cwd. The files specified here come at the start of the order, as opposed to the end with --variant-config-files. Any config files in recipes and any config files specified with --variant-config-files will - override values from these files.""" + override values from these files.""", + ) + p.add_argument( + "--old-build-string", + dest="filename_hashing", + action="store_false", + default=cc_conda_build.get("filename_hashing", "true").lower() == "true", + help=( + "Disable hash additions to filenames to distinguish package " + "variants from one another. NOTE: any filename collisions are " + "yours to handle. Any variants with overlapping names within a " + "build will clobber each other." + ), ) p.add_argument( - "--old-build-string", dest="filename_hashing", action="store_false", - default=cc_conda_build.get('filename_hashing', 'true').lower() == 'true', - help=("Disable hash additions to filenames to distinguish package " - "variants from one another. NOTE: any filename collisions are " - "yours to handle. Any variants with overlapping names within a " - "build will clobber each other.") + "--use-channeldata", + action="store_true", + dest="use_channeldata", + help=( + "Use channeldata, if available, to determine run_exports. Otherwise packages " + "are downloaded to determine this information" + ), ) p.add_argument( - '--use-channeldata', - action='store_true', - dest='use_channeldata', - help=("Use channeldata, if available, to determine run_exports. Otherwise packages " - "are downloaded to determine this information") + "--variants", + nargs=1, + action=ParseYAMLArgument, + help=( + "Variants to extend the build matrix. Must be a valid YAML instance, " + 'such as "{python: [3.8, 3.9]}"' + ), ) - p.add_argument('--variants', - nargs=1, - action=ParseYAMLArgument, - help=('Variants to extend the build matrix. Must be a valid YAML instance, ' - 'such as "{python: [3.8, 3.9]}"')) add_parser_channels(p) return p @@ -151,21 +170,22 @@ def get_render_parser(): def parse_args(args): p = get_render_parser() p.add_argument( - '-f', '--file', + "-f", + "--file", help="write YAML to file, given as argument here.\ - Overwrites existing files." + Overwrites existing files.", ) # we do this one separately because we only allow one entry to conda render p.add_argument( - 'recipe', - metavar='RECIPE_PATH', + "recipe", + metavar="RECIPE_PATH", help="Path to recipe directory.", ) # this is here because we have a different default than build p.add_argument( - '--verbose', - action='store_true', - help='Enable verbose output from download tools and progress updates', + "--verbose", + action="store_true", + help="Enable verbose output from download tools and progress updates", ) args, _ = p.parse_known_args(args) return p, args @@ -178,6 +198,7 @@ def execute(args, print_results=True): variants = get_package_variants(args.recipe, config, variants=args.variants) from conda_build.build import get_all_replacements + get_all_replacements(variants) set_language_env_vars(variants) @@ -189,25 +210,30 @@ def execute(args, print_results=True): config.verbose = False config.debug = False - metadata_tuples = api.render(args.recipe, config=config, - no_download_source=args.no_source, - variants=args.variants) + metadata_tuples = api.render( + args.recipe, + config=config, + no_download_source=args.no_source, + variants=args.variants, + ) if args.file and len(metadata_tuples) > 1: - log.warning("Multiple variants rendered. " - "Only one will be written to the file you specified ({}).".format(args.file)) + log.warning( + "Multiple variants rendered. " + "Only one will be written to the file you specified ({}).".format(args.file) + ) if print_results: if args.output: with LoggingContext(logging.CRITICAL + 1): paths = api.get_output_file_paths(metadata_tuples, config=config) - print('\n'.join(sorted(paths))) + print("\n".join(sorted(paths))) if args.file: m = metadata_tuples[-1][0] api.output_yaml(m, args.file, suppress_outputs=True) else: logging.basicConfig(level=logging.INFO) - for (m, _, _) in metadata_tuples: + for m, _, _ in metadata_tuples: print("--------------") print("Hash contents:") print("--------------") @@ -224,5 +250,5 @@ def main(): return execute(sys.argv[1:]) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/conda_build/cli/main_skeleton.py b/conda_build/cli/main_skeleton.py index 993d3f3ee6..7642bc14da 100644 --- a/conda_build/cli/main_skeleton.py +++ b/conda_build/cli/main_skeleton.py @@ -6,9 +6,8 @@ import pkgutil import sys -from conda_build.conda_interface import ArgumentParser - import conda_build.api as api +from conda_build.conda_interface import ArgumentParser from conda_build.config import Config thisdir = os.path.dirname(os.path.abspath(__file__)) @@ -27,12 +26,12 @@ def parse_args(args): """, ) - repos = p.add_subparsers( - dest="repo" - ) + repos = p.add_subparsers(dest="repo") - skeletons = [name for _, name, _ in - pkgutil.iter_modules([os.path.join(thisdir, '../skeletons')])] + skeletons = [ + name + for _, name, _ in pkgutil.iter_modules([os.path.join(thisdir, "../skeletons")]) + ] for skeleton in skeletons: if skeleton.startswith("_"): continue @@ -51,13 +50,19 @@ def execute(args): parser.print_help() sys.exit() - api.skeletonize(args.packages, args.repo, output_dir=args.output_dir, recursive=args.recursive, - version=args.version, config=config) + api.skeletonize( + args.packages, + args.repo, + output_dir=args.output_dir, + recursive=args.recursive, + version=args.version, + config=config, + ) def main(): return execute(sys.argv[1:]) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/conda_build/cli/validators.py b/conda_build/cli/validators.py index fdebdcba5b..b1c1144662 100644 --- a/conda_build/cli/validators.py +++ b/conda_build/cli/validators.py @@ -5,8 +5,8 @@ import os from argparse import ArgumentError -from conda_build.utils import CONDA_PACKAGE_EXTENSIONS from conda_build import utils +from conda_build.utils import CONDA_PACKAGE_EXTENSIONS CONDA_PKG_OR_RECIPE_ERROR_MESSAGE = ( "\nUnable to parse provided recipe directory or package file.\n\n" diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 988dce454b..93996332ca 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -2,92 +2,81 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -from functools import partial +import configparser # noqa: F401 import os -from importlib import import_module # noqa: F401 import warnings +from functools import partial +from importlib import import_module # noqa: F401 from conda import __version__ as CONDA_VERSION # noqa: F401 - -from conda.exports import ( # noqa: F401 - Channel, - display_actions, - execute_actions, - execute_plan, - install_actions, -) - -from conda.exports import _toposort # noqa: F401 - from conda.auxlib.packaging import ( # noqa: F401 _get_version_from_git_tag as get_version_from_git_tag, ) - -from conda.exports import TmpDownload, download, handle_proxy_407 # noqa: F401 -from conda.exports import untracked, walk_prefix # noqa: F401 -from conda.exports import ( # noqa: F401 - MatchSpec, - NoPackagesFound, - Resolve, - Unsatisfiable, - normalized_version, -) -from conda.exports import ( # noqa: F401 - human_bytes, - hashsum_file, - md5_file, - memoized, - unix_path_to_win, - win_path_to_unix, - url_path, +from conda.base.context import context, determine_target_prefix +from conda.base.context import non_x86_machines as non_x86_linux_machines # noqa: F401 +from conda.base.context import reset_context +from conda.core.package_cache import ProgressiveFetchExtract # noqa: F401 +from conda.exceptions import ( # noqa: F401 + CondaError, + CondaHTTPError, + LinkError, + LockError, + NoPackagesFoundError, + PaddingError, + UnsatisfiableError, ) +from conda.exports import ArgumentParser # noqa: F401 +from conda.exports import CondaSession # noqa: F401 +from conda.exports import EntityEncoder # noqa: F401 +from conda.exports import VersionOrder # noqa: F401 +from conda.exports import _toposort # noqa: F401 from conda.exports import get_index # noqa: F401 from conda.exports import ( # noqa: F401 + Channel, Completer, + FileMode, InstalledPackages, + MatchSpec, + NoPackagesFound, + PathType, + Resolve, + StringIO, + TemporaryDirectory, + TmpDownload, + Unsatisfiable, add_parser_channels, add_parser_prefix, - specs_from_args, - spec_from_line, - specs_from_url, -) -from conda.exports import ArgumentParser # noqa: F401 -from conda.exports import ( # noqa: F401 + display_actions, + download, + execute_actions, + execute_plan, + handle_proxy_407, + hashsum_file, + human_bytes, + input, + install_actions, is_linked, + lchmod, linked, linked_data, + md5_file, + memoized, + normalized_version, + package_cache, prefix_placeholder, rm_rf, + spec_from_line, + specs_from_args, + specs_from_url, symlink_conda, - package_cache, -) -from conda.exports import CondaSession # noqa: F401 -from conda.exports import StringIO, input, lchmod, TemporaryDirectory # noqa: F401 -from conda.exports import VersionOrder # noqa: F401 - -from conda.core.package_cache import ProgressiveFetchExtract # noqa: F401 -from conda.models.dist import Dist, IndexRecord # noqa: F401 - -import configparser # noqa: F401 - -from conda.exports import FileMode, PathType # noqa: F401 -from conda.exports import EntityEncoder # noqa: F401 -from conda.exceptions import ( # noqa: F401 - CondaError, - CondaHTTPError, - LinkError, - LockError, - NoPackagesFoundError, - PaddingError, - UnsatisfiableError, -) -from conda.base.context import ( # noqa: F401 - non_x86_machines as non_x86_linux_machines, - context, - determine_target_prefix, - reset_context, + unix_path_to_win, + untracked, + url_path, + walk_prefix, + win_path_to_unix, ) from conda.models.channel import get_conda_build_local_url # noqa: F401 +from conda.models.dist import Dist, IndexRecord # noqa: F401 # TODO: Go to references of all properties below and import them from `context` instead binstar_upload = context.binstar_upload @@ -102,12 +91,12 @@ get_rc_urls = lambda: list(context.channels) get_prefix = partial(determine_target_prefix, context) -cc_conda_build = context.conda_build if hasattr(context, 'conda_build') else {} +cc_conda_build = context.conda_build if hasattr(context, "conda_build") else {} get_conda_channel = Channel.from_value # Disallow softlinks. This avoids a lot of dumb issues, at the potential cost of disk space. -os.environ['CONDA_ALLOW_SOFTLINKS'] = 'false' +os.environ["CONDA_ALLOW_SOFTLINKS"] = "false" reset_context() @@ -137,13 +126,14 @@ def which_package(path): only one package. """ from os.path import abspath, join + path = abspath(path) prefix = which_prefix(path) if prefix is None: raise RuntimeError("could not determine conda prefix from: %s" % path) for dist in linked(prefix): meta = is_linked(prefix, dist) - if any(abspath(join(prefix, f)) == path for f in meta['files']): + if any(abspath(join(prefix, f)) == path for f in meta["files"]): yield dist @@ -152,11 +142,12 @@ def which_prefix(path): Given the path (to a (presumably) conda installed file) return the environment prefix in which the file in located """ - from os.path import abspath, join, isdir, dirname + from os.path import abspath, dirname, isdir, join + prefix = abspath(path) iteration = 0 while iteration < 20: - if isdir(join(prefix, 'conda-meta')): + if isdir(join(prefix, "conda-meta")): # we found it, so let's return it break if prefix == dirname(prefix): @@ -174,16 +165,20 @@ def get_installed_version(prefix, pkgs): a package needs to be updated """ from conda_build.utils import ensure_list + pkgs = ensure_list(pkgs) linked_pkgs = linked(prefix) versions = {} for pkg in pkgs: - vers_inst = [dist.split('::', 1)[-1].rsplit('-', 2)[1] for dist in linked_pkgs - if dist.split('::', 1)[-1].rsplit('-', 2)[0] == pkg] + vers_inst = [ + dist.split("::", 1)[-1].rsplit("-", 2)[1] + for dist in linked_pkgs + if dist.split("::", 1)[-1].rsplit("-", 2)[0] == pkg + ] versions[pkg] = vers_inst[0] if len(vers_inst) == 1 else None return versions # When deactivating envs (e.g. switching from root to build/test) this env var is used, # except the PR that removed this has been reverted (for now) and Windows doesn't need it. -env_path_backup_var_exists = os.environ.get('CONDA_PATH_BACKUP', None) +env_path_backup_var_exists = os.environ.get("CONDA_PATH_BACKUP", None) diff --git a/conda_build/config.py b/conda_build/config.py index db11b5d309..50d6505ce0 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -1,29 +1,33 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -''' +""" Module to store conda build settings. -''' +""" import copy -from collections import namedtuple import math import os -from os.path import abspath, expanduser, join, expandvars import re import shutil import sys import time - -from .conda_interface import root_dir, root_writable -from .conda_interface import binstar_upload +from collections import namedtuple +from os.path import abspath, expanduser, expandvars, join + +from .conda_interface import ( + binstar_upload, + cc_conda_build, + cc_platform, + root_dir, + root_writable, + subdir, + url_path, +) +from .utils import get_build_folders, get_conda_operation_locks, get_logger, rm_rf from .variants import get_default_variant -from .conda_interface import cc_platform, cc_conda_build, subdir, url_path - -from .utils import get_build_folders, rm_rf, get_logger, get_conda_operation_locks - -on_win = (sys.platform == 'win32') -invocation_time = '' +on_win = sys.platform == "win32" +invocation_time = "" def set_invocation_time(): @@ -40,13 +44,13 @@ def set_invocation_time(): conda_build = "conda-build" -filename_hashing_default = 'true' +filename_hashing_default = "true" _src_cache_root_default = None -error_overlinking_default = 'false' -error_overdepending_default = 'false' +error_overlinking_default = "false" +error_overdepending_default = "false" noarch_python_build_age_default = 0 -enable_static_default = 'true' -no_rewrite_stdout_env_default = 'false' +enable_static_default = "true" +no_rewrite_stdout_env_default = "false" ignore_verify_codes_default = [] exit_on_verify_error_default = False conda_pkg_format_default = None @@ -55,7 +59,9 @@ def set_invocation_time(): # Python2 silliness: def python2_fs_encode(strin): - return strin.decode(sys.getfilesystemencoding()) if hasattr(strin, 'decode') else strin + return ( + strin.decode(sys.getfilesystemencoding()) if hasattr(strin, "decode") else strin + ) def _ensure_dir(path): @@ -74,13 +80,13 @@ def _ensure_dir(path): # translate our internal more meaningful subdirs to the ones that conda understands SUBDIR_ALIASES = { - 'linux-cos5-x86_64': 'linux-64', - 'linux-cos6-x86_64': 'linux-64', - 'linux-cos5-x86': 'linux-32', - 'linux-cos6-x86': 'linux-32', - 'osx-109-x86_64': 'osx-64', - 'win-x86_64': 'win-64', - 'win-x86': 'win-32', + "linux-cos5-x86_64": "linux-64", + "linux-cos6-x86_64": "linux-64", + "linux-cos5-x86": "linux-32", + "linux-cos6-x86": "linux-32", + "osx-109-x86_64": "osx-64", + "win-x86_64": "win-64", + "win-x86": "win-32", } @@ -88,156 +94,176 @@ def _ensure_dir(path): def _get_default_settings(): - return [Setting('activate', True), - Setting('anaconda_upload', binstar_upload), - Setting('force_upload', True), - Setting('channel_urls', []), - Setting('dirty', False), - Setting('include_recipe', True), - Setting('no_download_source', False), - Setting('override_channels', False), - Setting('skip_existing', False), - Setting('token', None), - Setting('user', None), - Setting('labels', []), - Setting('verbose', True), - - Setting('debug', False), - Setting('timeout', 900), - Setting('set_build_id', True), - Setting('disable_pip', False), - Setting('_output_folder', None), - Setting('prefix_length_fallback', True), - Setting('_prefix_length', DEFAULT_PREFIX_LENGTH), - Setting('long_test_prefix', True), - Setting('locking', True), - Setting('max_env_retry', 3), - Setting('remove_work_dir', True), - Setting('_host_platform', None), - Setting('_host_arch', None), - Setting('test_run_post', False), - Setting('filename_hashing', cc_conda_build.get('filename_hashing', - filename_hashing_default).lower() == 'true'), - Setting('keep_old_work', False), - Setting('_src_cache_root', abspath(expanduser(expandvars( - cc_conda_build.get('cache_dir')))) if cc_conda_build.get('cache_dir') else _src_cache_root_default), - Setting('copy_test_source_files', True), - - # should rendering cut out any skipped metadata? - Setting('trim_skip', True), - - # Use channeldata.json for run_export information during rendering. - # Falls back to downloading packages if False or channeldata does - # not exist for the channel. - Setting('use_channeldata', False), - - # Disable the overlinking test for this package. This test checks that transitive DSOs - # are not referenced by DSOs in the package being built. When this happens something - # has gone wrong with: - # 1. Linker flags not being passed, or not working correctly: - # (GNU ld: -as-needed, Apple ld64: -dead_strip_dylibs -no_implicit_dylibs) - # 2. A missing package in reqs/run (maybe that package is missing run_exports?) - # 3. A missing (or broken) CDT package in reqs/build or (on systems without CDTs) - # 4. .. a missing value in the hard-coded but metadata-augmentable library whitelist - # It is important that packages do not suffer from 2 because uninstalling that missing - # package leads to an inability to run this package. - # - # default to not erroring with overlinking for now. We have specified in - # cli/main_build.py that this default will switch in conda-build 4.0. - Setting('error_overlinking', cc_conda_build.get('error_overlinking', - error_overlinking_default).lower() == 'true'), - Setting('error_overdepending', cc_conda_build.get('error_overdepending', - error_overdepending_default).lower() == 'true'), - Setting('noarch_python_build_age', cc_conda_build.get('noarch_python_build_age', - noarch_python_build_age_default)), - Setting('enable_static', cc_conda_build.get('enable_static', - enable_static_default).lower() == 'true'), - Setting('no_rewrite_stdout_env', cc_conda_build.get('no_rewrite_stdout_env', - no_rewrite_stdout_env_default).lower() == 'true'), - - - Setting('index', None), - # support legacy recipes where only build is specified and expected to be the - # folder that packaging is done on - Setting('build_is_host', False), - - # these are primarily for testing. They override the native build platform/arch, - # which is useful in tests, but makes little sense on actual systems. - Setting('_platform', None), - Setting('_arch', None), - Setting('_target_subdir', None), - - # variants - Setting('variant_config_files', []), - # these files preclude usage of any system-wide or cwd config files. - # Config files in recipes are still respected, and they override this file. - Setting('exclusive_config_files', []), - Setting('ignore_system_variants', False), - Setting('hash_length', 7), - - # append/clobber metadata section data (for global usage. Can also add files to - # recipe.) - Setting('append_sections_file', None), - Setting('clobber_sections_file', None), - Setting('bootstrap', None), - Setting('extra_meta', {}), - - # source provisioning. - Setting('git_commits_since_tag', 0), - - # pypi upload settings (twine) - Setting('password', None), - Setting('sign', False), - Setting('sign_with', 'gpg'), - Setting('identity', None), - Setting('config_file', None), - Setting('repository', 'pypitest'), - - Setting('verify', True), - Setting('ignore_verify_codes', - cc_conda_build.get('ignore_verify_codes', ignore_verify_codes_default)), - Setting('exit_on_verify_error', - cc_conda_build.get('exit_on_verify_error', exit_on_verify_error_default)), - - # Recipes that have no host section, only build, should bypass the build/host line. - # This is to make older recipes still work with cross-compiling. True cross-compiling - # involving compilers (not just python) will still require recipe modification to have - # distinct host and build sections, but simple python stuff should work without. - Setting('merge_build_host', False), - # this one is the state that can be set elsewhere, which affects how - # the "build_prefix" works. The one above is a setting. - Setting('_merge_build_host', False), - - # path to output build statistics to - Setting('stats_file', None), - - # extra deps to add to test env creation - Setting('extra_deps', []), - - # customize this so pip doesn't look in places we don't want. Per-build path by default. - Setting('_pip_cache_dir', None), - - Setting('zstd_compression_level', - cc_conda_build.get('zstd_compression_level', zstd_compression_level_default)), - - # this can be set to different values (currently only 2 means anything) to use package formats - Setting('conda_pkg_format', cc_conda_build.get('pkg_format', conda_pkg_format_default)), - - Setting('suppress_variables', False), - - Setting('build_id_pat', cc_conda_build.get('build_id_pat', - '{n}_{t}')), - - ] + return [ + Setting("activate", True), + Setting("anaconda_upload", binstar_upload), + Setting("force_upload", True), + Setting("channel_urls", []), + Setting("dirty", False), + Setting("include_recipe", True), + Setting("no_download_source", False), + Setting("override_channels", False), + Setting("skip_existing", False), + Setting("token", None), + Setting("user", None), + Setting("labels", []), + Setting("verbose", True), + Setting("debug", False), + Setting("timeout", 900), + Setting("set_build_id", True), + Setting("disable_pip", False), + Setting("_output_folder", None), + Setting("prefix_length_fallback", True), + Setting("_prefix_length", DEFAULT_PREFIX_LENGTH), + Setting("long_test_prefix", True), + Setting("locking", True), + Setting("max_env_retry", 3), + Setting("remove_work_dir", True), + Setting("_host_platform", None), + Setting("_host_arch", None), + Setting("test_run_post", False), + Setting( + "filename_hashing", + cc_conda_build.get("filename_hashing", filename_hashing_default).lower() + == "true", + ), + Setting("keep_old_work", False), + Setting( + "_src_cache_root", + abspath(expanduser(expandvars(cc_conda_build.get("cache_dir")))) + if cc_conda_build.get("cache_dir") + else _src_cache_root_default, + ), + Setting("copy_test_source_files", True), + # should rendering cut out any skipped metadata? + Setting("trim_skip", True), + # Use channeldata.json for run_export information during rendering. + # Falls back to downloading packages if False or channeldata does + # not exist for the channel. + Setting("use_channeldata", False), + # Disable the overlinking test for this package. This test checks that transitive DSOs + # are not referenced by DSOs in the package being built. When this happens something + # has gone wrong with: + # 1. Linker flags not being passed, or not working correctly: + # (GNU ld: -as-needed, Apple ld64: -dead_strip_dylibs -no_implicit_dylibs) + # 2. A missing package in reqs/run (maybe that package is missing run_exports?) + # 3. A missing (or broken) CDT package in reqs/build or (on systems without CDTs) + # 4. .. a missing value in the hard-coded but metadata-augmentable library whitelist + # It is important that packages do not suffer from 2 because uninstalling that missing + # package leads to an inability to run this package. + # + # default to not erroring with overlinking for now. We have specified in + # cli/main_build.py that this default will switch in conda-build 4.0. + Setting( + "error_overlinking", + cc_conda_build.get("error_overlinking", error_overlinking_default).lower() + == "true", + ), + Setting( + "error_overdepending", + cc_conda_build.get( + "error_overdepending", error_overdepending_default + ).lower() + == "true", + ), + Setting( + "noarch_python_build_age", + cc_conda_build.get( + "noarch_python_build_age", noarch_python_build_age_default + ), + ), + Setting( + "enable_static", + cc_conda_build.get("enable_static", enable_static_default).lower() + == "true", + ), + Setting( + "no_rewrite_stdout_env", + cc_conda_build.get( + "no_rewrite_stdout_env", no_rewrite_stdout_env_default + ).lower() + == "true", + ), + Setting("index", None), + # support legacy recipes where only build is specified and expected to be the + # folder that packaging is done on + Setting("build_is_host", False), + # these are primarily for testing. They override the native build platform/arch, + # which is useful in tests, but makes little sense on actual systems. + Setting("_platform", None), + Setting("_arch", None), + Setting("_target_subdir", None), + # variants + Setting("variant_config_files", []), + # these files preclude usage of any system-wide or cwd config files. + # Config files in recipes are still respected, and they override this file. + Setting("exclusive_config_files", []), + Setting("ignore_system_variants", False), + Setting("hash_length", 7), + # append/clobber metadata section data (for global usage. Can also add files to + # recipe.) + Setting("append_sections_file", None), + Setting("clobber_sections_file", None), + Setting("bootstrap", None), + Setting("extra_meta", {}), + # source provisioning. + Setting("git_commits_since_tag", 0), + # pypi upload settings (twine) + Setting("password", None), + Setting("sign", False), + Setting("sign_with", "gpg"), + Setting("identity", None), + Setting("config_file", None), + Setting("repository", "pypitest"), + Setting("verify", True), + Setting( + "ignore_verify_codes", + cc_conda_build.get("ignore_verify_codes", ignore_verify_codes_default), + ), + Setting( + "exit_on_verify_error", + cc_conda_build.get("exit_on_verify_error", exit_on_verify_error_default), + ), + # Recipes that have no host section, only build, should bypass the build/host line. + # This is to make older recipes still work with cross-compiling. True cross-compiling + # involving compilers (not just python) will still require recipe modification to have + # distinct host and build sections, but simple python stuff should work without. + Setting("merge_build_host", False), + # this one is the state that can be set elsewhere, which affects how + # the "build_prefix" works. The one above is a setting. + Setting("_merge_build_host", False), + # path to output build statistics to + Setting("stats_file", None), + # extra deps to add to test env creation + Setting("extra_deps", []), + # customize this so pip doesn't look in places we don't want. Per-build path by default. + Setting("_pip_cache_dir", None), + Setting( + "zstd_compression_level", + cc_conda_build.get( + "zstd_compression_level", zstd_compression_level_default + ), + ), + # this can be set to different values (currently only 2 means anything) to use package formats + Setting( + "conda_pkg_format", + cc_conda_build.get("pkg_format", conda_pkg_format_default), + ), + Setting("suppress_variables", False), + Setting("build_id_pat", cc_conda_build.get("build_id_pat", "{n}_{t}")), + ] def print_function_deprecation_warning(func): def func_wrapper(*args, **kw): log = get_logger(__name__) - log.warn("WARNING: attribute {} is deprecated and will be removed in conda-build 4.0. " - "Please update your code - file issues on the conda-build issue tracker " - "if you need help.".format(func.__name__)) + log.warn( + "WARNING: attribute {} is deprecated and will be removed in conda-build 4.0. " + "Please update your code - file issues on the conda-build issue tracker " + "if you need help.".format(func.__name__) + ) return func(*args, **kw) + return func_wrapper @@ -255,7 +281,9 @@ def __init__(self, variant=None, **kwargs): self._src_cache_root = os.path.expanduser(self._src_cache_root) def _set_attribute_from_kwargs(self, kwargs, attr, default): - value = kwargs.get(attr, getattr(self, attr) if hasattr(self, attr) else default) + value = kwargs.get( + attr, getattr(self, attr) if hasattr(self, attr) else default + ) setattr(self, attr, value) if attr in kwargs: del kwargs[attr] @@ -265,13 +293,13 @@ def env(lang, default): version = kwargs.pop(lang, None) if not version: # Hooray for corner cases. - if lang == 'python': - lang = 'py' - elif lang == 'numpy': - lang = 'npy' - elif lang == 'r_base': - lang = 'r' - var = 'CONDA_' + lang.upper() + if lang == "python": + lang = "py" + elif lang == "numpy": + lang = "npy" + elif lang == "r_base": + lang = "r" + var = "CONDA_" + lang.upper() version = os.getenv(var) if os.getenv(var) else default elif isinstance(version, list) and len(version) == 1: version = version[0] @@ -280,27 +308,28 @@ def env(lang, default): def set_lang(variant, lang): value = env(lang, self.variant.get(lang)) if value: - if '.' not in str(value): - value = '.'.join((value[0], value[1:])) + if "." not in str(value): + value = ".".join((value[0], value[1:])) variant[lang] = value # this is where we override any variant config files with the legacy CONDA_* vars # or CLI params - for lang in ('perl', 'lua', 'python', 'numpy', 'r_base'): + for lang in ("perl", "lua", "python", "numpy", "r_base"): set_lang(self.variant, lang) - self._build_id = kwargs.pop('build_id', getattr(self, '_build_id', "")) - source_cache = kwargs.pop('cache_dir', None) - croot = kwargs.pop('croot', None) + self._build_id = kwargs.pop("build_id", getattr(self, "_build_id", "")) + source_cache = kwargs.pop("cache_dir", None) + croot = kwargs.pop("croot", None) if source_cache: - self._src_cache_root = os.path.abspath(os.path.normpath( - os.path.expanduser(source_cache))) + self._src_cache_root = os.path.abspath( + os.path.normpath(os.path.expanduser(source_cache)) + ) if croot: self._croot = os.path.abspath(os.path.normpath(os.path.expanduser(croot))) else: # set default value (not actually None) - self._croot = getattr(self, '_croot', None) + self._croot = getattr(self, "_croot", None) # handle known values better than unknown (allow defaults) for value in _get_default_settings(): @@ -314,43 +343,52 @@ def set_lang(variant, lang): def arch(self): """Always the native (build system) arch, except when pretending to be some other platform""" - return self._arch or subdir.rsplit('-', 1)[1] + return self._arch or subdir.rsplit("-", 1)[1] @arch.setter def arch(self, value): log = get_logger(__name__) - log.warn("Setting build arch. This is only useful when pretending to be on another " - "arch, such as for rendering necessary dependencies on a non-native arch. " - "I trust that you know what you're doing.") + log.warn( + "Setting build arch. This is only useful when pretending to be on another " + "arch, such as for rendering necessary dependencies on a non-native arch. " + "I trust that you know what you're doing." + ) self._arch = str(value) @property def platform(self): """Always the native (build system) OS, except when pretending to be some other platform""" - return self._platform or subdir.rsplit('-', 1)[0] + return self._platform or subdir.rsplit("-", 1)[0] @platform.setter def platform(self, value): log = get_logger(__name__) - log.warn("Setting build platform. This is only useful when " - "pretending to be on another platform, such as " - "for rendering necessary dependencies on a non-native " - "platform. I trust that you know what you're doing.") - if value == 'noarch': - raise ValueError("config platform should never be noarch. Set host_platform instead.") + log.warn( + "Setting build platform. This is only useful when " + "pretending to be on another platform, such as " + "for rendering necessary dependencies on a non-native " + "platform. I trust that you know what you're doing." + ) + if value == "noarch": + raise ValueError( + "config platform should never be noarch. Set host_platform instead." + ) self._platform = value @property def build_subdir(self): """Determines channel to download build env packages from. - Should generally be the native platform. Does not preclude packages from noarch.""" - return '-'.join((self.platform, self.arch)) + Should generally be the native platform. Does not preclude packages from noarch. + """ + return "-".join((self.platform, self.arch)) @property def host_arch(self): try: - variant_arch = self.variant.get('target_platform', self.build_subdir).split('-', 1)[1] + variant_arch = self.variant.get("target_platform", self.build_subdir).split( + "-", 1 + )[1] except IndexError: variant_arch = 64 return self._host_arch or variant_arch @@ -361,7 +399,7 @@ def host_arch(self, value): @property def noarch(self): - return self.host_platform == 'noarch' + return self.host_platform == "noarch" def reset_platform(self): if not self.platform == cc_platform: @@ -373,8 +411,10 @@ def subdir(self): @property def host_platform(self): - return (self._host_platform or - self.variant.get('target_platform', self.build_subdir).split('-', 1)[0]) + return ( + self._host_platform + or self.variant.get("target_platform", self.build_subdir).split("-", 1)[0] + ) @host_platform.setter def host_platform(self, value): @@ -382,8 +422,8 @@ def host_platform(self, value): @property def host_subdir(self): - subdir = self.variant.get('target_platform', self.build_subdir) - if self.host_platform == 'noarch': + subdir = self.variant.get("target_platform", self.build_subdir) + if self.host_platform == "noarch": subdir = self.host_platform elif subdir != "-".join([self.host_platform, str(self.host_arch)]): subdir = "-".join([self.host_platform, str(self.host_arch)]) @@ -392,7 +432,7 @@ def host_subdir(self): @host_subdir.setter def host_subdir(self, value): value = SUBDIR_ALIASES.get(value, value) - values = value.rsplit('-', 1) + values = value.rsplit("-", 1) self.host_platform = values[0] if len(values) > 1: self.host_arch = values[1] @@ -415,8 +455,9 @@ def exclusive_config_file(self): def exclusive_config_file(self, value): if len(self.exclusive_config_files) > 1: raise ValueError( - 'Cannot set singular exclusive_config_file ' - 'if multiple exclusive_config_files are present.') + "Cannot set singular exclusive_config_file " + "if multiple exclusive_config_files are present." + ) if value is None: self.exclusive_config_files = [] else: @@ -434,16 +475,16 @@ def src_cache_root(self, value): def croot(self): """This is where source caches and work folders live""" if not self._croot: - _bld_root_env = os.getenv('CONDA_BLD_PATH') - _bld_root_rc = cc_conda_build.get('root-dir') + _bld_root_env = os.getenv("CONDA_BLD_PATH") + _bld_root_rc = cc_conda_build.get("root-dir") if _bld_root_env: self._croot = abspath(expanduser(_bld_root_env)) elif _bld_root_rc: self._croot = abspath(expanduser(expandvars(_bld_root_rc))) elif root_writable: - self._croot = join(root_dir, 'conda-bld') + self._croot = join(root_dir, "conda-bld") else: - self._croot = abspath(expanduser('~/conda-bld')) + self._croot = abspath(expanduser("~/conda-bld")) return python2_fs_encode(self._croot) @croot.setter @@ -469,144 +510,147 @@ def build_folder(self): @property @print_function_deprecation_warning def CONDA_LUA(self): - return self.variant.get('lua', get_default_variant(self)['lua']) + return self.variant.get("lua", get_default_variant(self)["lua"]) @CONDA_LUA.setter @print_function_deprecation_warning def CONDA_LUA(self, value): - self.variant['lua'] = value + self.variant["lua"] = value @property @print_function_deprecation_warning def CONDA_PY(self): - value = self.variant.get('python', get_default_variant(self)['python']) - return int(''.join(value.split('.'))) + value = self.variant.get("python", get_default_variant(self)["python"]) + return int("".join(value.split("."))) @CONDA_PY.setter @print_function_deprecation_warning def CONDA_PY(self, value): value = str(value) - self.variant['python'] = '.'.join((value[0], value[1:])) + self.variant["python"] = ".".join((value[0], value[1:])) @property @print_function_deprecation_warning def CONDA_NPY(self): - value = self.variant.get('numpy', get_default_variant(self)['numpy']) - return int(''.join(value.split('.'))) + value = self.variant.get("numpy", get_default_variant(self)["numpy"]) + return int("".join(value.split("."))) @CONDA_NPY.setter @print_function_deprecation_warning def CONDA_NPY(self, value): value = str(value) - self.variant['numpy'] = '.'.join((value[0], value[1:])) + self.variant["numpy"] = ".".join((value[0], value[1:])) @property @print_function_deprecation_warning def CONDA_PERL(self): - return self.variant.get('perl', get_default_variant(self)['perl']) + return self.variant.get("perl", get_default_variant(self)["perl"]) @CONDA_PERL.setter @print_function_deprecation_warning def CONDA_PERL(self, value): - self.variant['perl'] = value + self.variant["perl"] = value @property @print_function_deprecation_warning def CONDA_R(self): - - return self.variant.get('r_base', get_default_variant(self)['r_base']) + return self.variant.get("r_base", get_default_variant(self)["r_base"]) @CONDA_R.setter @print_function_deprecation_warning def CONDA_R(self, value): - self.variant['r_base'] = value + self.variant["r_base"] = value def _get_python(self, prefix, platform): - if platform.startswith('win') or (platform == "noarch" and sys.platform == "win32"): - if os.path.isfile(os.path.join(prefix, 'python_d.exe')): - res = join(prefix, 'python_d.exe') + if platform.startswith("win") or ( + platform == "noarch" and sys.platform == "win32" + ): + if os.path.isfile(os.path.join(prefix, "python_d.exe")): + res = join(prefix, "python_d.exe") else: - res = join(prefix, 'python.exe') + res = join(prefix, "python.exe") else: - res = join(prefix, 'bin/python') + res = join(prefix, "bin/python") return res def _get_perl(self, prefix, platform): - if platform.startswith('win'): - res = join(prefix, 'Library', 'bin', 'perl.exe') + if platform.startswith("win"): + res = join(prefix, "Library", "bin", "perl.exe") else: - res = join(prefix, 'bin/perl') + res = join(prefix, "bin/perl") return res # TODO: This is probably broken on Windows, but no one has a lua package on windows to test. def _get_lua(self, prefix, platform): - lua_ver = self.variant.get('lua', get_default_variant(self)['lua']) + lua_ver = self.variant.get("lua", get_default_variant(self)["lua"]) binary_name = "luajit" if (lua_ver and lua_ver[0] == "2") else "lua" - if platform.startswith('win'): - res = join(prefix, 'Library', 'bin', f'{binary_name}.exe') + if platform.startswith("win"): + res = join(prefix, "Library", "bin", f"{binary_name}.exe") else: - res = join(prefix, f'bin/{binary_name}') + res = join(prefix, f"bin/{binary_name}") return res def _get_r(self, prefix, platform): - if platform.startswith('win') or (platform == "noarch" and sys.platform == 'win32'): - res = join(prefix, 'Scripts', 'R.exe') + if platform.startswith("win") or ( + platform == "noarch" and sys.platform == "win32" + ): + res = join(prefix, "Scripts", "R.exe") # MRO test: if not os.path.exists(res): - res = join(prefix, 'bin', 'R.exe') + res = join(prefix, "bin", "R.exe") else: - res = join(prefix, 'bin', 'R') + res = join(prefix, "bin", "R") return res def _get_rscript(self, prefix, platform): - if platform.startswith('win'): - res = join(prefix, 'Scripts', 'Rscript.exe') + if platform.startswith("win"): + res = join(prefix, "Scripts", "Rscript.exe") # MRO test: if not os.path.exists(res): - res = join(prefix, 'bin', 'Rscript.exe') + res = join(prefix, "bin", "Rscript.exe") else: - res = join(prefix, 'bin', 'Rscript') + res = join(prefix, "bin", "Rscript") return res - def compute_build_id(self, package_name, package_version='0', reset=False): - time_re = r'([_-])([0-9]{13})' - pat_dict = {'n': package_name, - 'v': str(package_version), - 't': '{t}'} + def compute_build_id(self, package_name, package_version="0", reset=False): + time_re = r"([_-])([0-9]{13})" + pat_dict = {"n": package_name, "v": str(package_version), "t": "{t}"} # Use the most recent build with matching recipe name, or else the recipe name. build_folders = [] if not self.dirty: if reset: set_invocation_time() else: - old_build_id_t = self.build_id_pat if self.build_id_pat else '{n}-{v}_{t}' + old_build_id_t = self.build_id_pat if self.build_id_pat else "{n}-{v}_{t}" old_build_id_t = old_build_id_t.format(**pat_dict) build_folders_all = get_build_folders(self.croot) for folder_full in build_folders_all: folder = os.path.basename(folder_full) - untimed_folder = re.sub(time_re, r'\g<1>{t}', folder, flags=re.UNICODE) + untimed_folder = re.sub(time_re, r"\g<1>{t}", folder, flags=re.UNICODE) if untimed_folder == old_build_id_t: build_folders.append(folder_full) prev_build_id = None if build_folders: # Use the most recent build with matching recipe name prev_build_id = os.path.basename(build_folders[-1]) - old_dir = os.path.join(build_folders[-1], 'work') + old_dir = os.path.join(build_folders[-1], "work") else: # Maybe call set_invocation_time() here? - pat_dict['t'] = invocation_time + pat_dict["t"] = invocation_time test_old_dir = self.work_dir old_dir = test_old_dir if os.path.exists(test_old_dir) else None if self.set_build_id and (not self._build_id or reset): - assert not os.path.isabs(package_name), ("package name should not be a absolute path, " - "to preserve croot during path joins") + assert not os.path.isabs(package_name), ( + "package name should not be a absolute path, " + "to preserve croot during path joins" + ) if self.dirty and prev_build_id: old_dir = self.work_dir if len(os.listdir(self.work_dir)) > 0 else None self._build_id = prev_build_id else: # important: this is recomputing prefixes and determines where work folders are. - build_id = self.build_id_pat if self.build_id_pat else '{n}-{v}_{t}' + build_id = self.build_id_pat if self.build_id_pat else "{n}-{v}_{t}" self._build_id = build_id.format(**pat_dict) if old_dir: work_dir = self.work_dir @@ -624,8 +668,10 @@ def build_id(self): @build_id.setter def build_id(self, _build_id): _build_id = _build_id.rstrip("/").rstrip("\\") - assert not os.path.isabs(_build_id), ("build_id should not be an absolute path, " - "to preserve croot during path joins") + assert not os.path.isabs(_build_id), ( + "build_id should not be an absolute path, " + "to preserve croot during path joins" + ) self._build_id = python2_fs_encode(_build_id) @property @@ -638,14 +684,16 @@ def prefix_length(self, length): @property def _short_host_prefix(self): - return join(self.build_folder, '_h_env') + return join(self.build_folder, "_h_env") @property def _long_host_prefix(self): placeholder_length = self.prefix_length - len(self._short_host_prefix) - placeholder = '_placehold' + placeholder = "_placehold" repeats = int(math.ceil(placeholder_length / len(placeholder)) + 1) - placeholder = (self._short_host_prefix + repeats * placeholder)[:self.prefix_length] + placeholder = (self._short_host_prefix + repeats * placeholder)[ + : self.prefix_length + ] return max(self._short_host_prefix, placeholder) @property @@ -656,7 +704,7 @@ def build_prefix(self): if self._merge_build_host: prefix = self.host_prefix else: - prefix = join(self.build_folder, '_build_env') + prefix = join(self.build_folder, "_build_env") return prefix @property @@ -669,13 +717,13 @@ def host_prefix(self): @property def _short_test_prefix(self): - return join(self.build_folder, '_test_env') + return join(self.build_folder, "_test_env") def _long_prefix(self, base_prefix): placeholder_length = self.prefix_length - len(base_prefix) - placeholder = '_placehold' + placeholder = "_placehold" repeats = int(math.ceil(placeholder_length / len(placeholder)) + 1) - placeholder = (base_prefix + repeats * placeholder)[:self.prefix_length] + placeholder = (base_prefix + repeats * placeholder)[: self.prefix_length] return max(base_prefix, placeholder) @property @@ -717,7 +765,7 @@ def rscript_bin(self, prefix, platform): @property def info_dir(self): """Path to the info dir in the build prefix, where recipe metadata is stored""" - path = join(self.host_prefix, 'info') + path = join(self.host_prefix, "info") _ensure_dir(path) return path @@ -725,7 +773,7 @@ def info_dir(self): def meta_dir(self): """Path to the conda-meta dir in the build prefix, where package index json files are stored""" - path = join(self.host_prefix, 'conda-meta') + path = join(self.host_prefix, "conda-meta") _ensure_dir(path) return path @@ -738,57 +786,60 @@ def broken_dir(self): @property def bldpkgs_dir(self): - """ Dir where the package is saved. """ + """Dir where the package is saved.""" path = join(self.croot, self.host_subdir) _ensure_dir(path) return path @property def bldpkgs_dirs(self): - """ Dirs where previous build packages might be. """ + """Dirs where previous build packages might be.""" # The first two *might* be the same, but might not, depending on if this is a cross-compile. # subdir should be the native platform, while self.subdir would be the host platform. - return {join(self.croot, self.host_subdir), join(self.croot, subdir), - join(self.croot, "noarch"), } + return { + join(self.croot, self.host_subdir), + join(self.croot, subdir), + join(self.croot, "noarch"), + } @property def src_cache(self): """Where tarballs and zip files are downloaded and stored""" - path = join(self.src_cache_root, 'src_cache') + path = join(self.src_cache_root, "src_cache") _ensure_dir(path) return path @property def git_cache(self): """Where local clones of git sources are stored""" - path = join(self.src_cache_root, 'git_cache') + path = join(self.src_cache_root, "git_cache") _ensure_dir(path) return path @property def hg_cache(self): """Where local clones of hg sources are stored""" - path = join(self.src_cache_root, 'hg_cache') + path = join(self.src_cache_root, "hg_cache") _ensure_dir(path) return path @property def svn_cache(self): """Where local checkouts of svn sources are stored""" - path = join(self.src_cache_root, 'svn_cache') + path = join(self.src_cache_root, "svn_cache") _ensure_dir(path) return path @property def work_dir(self): """Where the source for the build is extracted/copied to.""" - path = join(self.build_folder, 'work') + path = join(self.build_folder, "work") _ensure_dir(path) return path @property def pip_cache_dir(self): - path = self._pip_cache_dir or join(self.build_folder, 'pip_cache') + path = self._pip_cache_dir or join(self.build_folder, "pip_cache") _ensure_dir(path) return path @@ -799,7 +850,7 @@ def pip_cache_dir(self, path): @property def test_dir(self): """The temporary folder where test files are copied to, and where tests start execution""" - path = join(self.build_folder, 'test_tmp') + path = join(self.build_folder, "test_tmp") _ensure_dir(path) return path @@ -810,24 +861,37 @@ def subdirs_same(self): def clean(self, remove_folders=True): # build folder is the whole burrito containing envs and source folders # It will only exist if we download source, or create a build or test environment - if remove_folders and not getattr(self, 'dirty') and not getattr(self, 'keep_old_work'): + if ( + remove_folders + and not getattr(self, "dirty") + and not getattr(self, "keep_old_work") + ): if self.build_id: if os.path.isdir(self.build_folder): rm_rf(self.build_folder) else: - for path in [self.work_dir, self.test_dir, self.build_prefix, self.test_prefix]: + for path in [ + self.work_dir, + self.test_dir, + self.build_prefix, + self.test_prefix, + ]: if os.path.isdir(path): rm_rf(path) - if os.path.isfile(os.path.join(self.build_folder, 'prefix_files')): - rm_rf(os.path.join(self.build_folder, 'prefix_files')) + if os.path.isfile(os.path.join(self.build_folder, "prefix_files")): + rm_rf(os.path.join(self.build_folder, "prefix_files")) else: - print("\nLeaving build/test directories:" - "\n Work:\n", self.work_dir, - "\n Test:\n", self.test_dir, - "\nLeaving build/test environments:" - "\n Test:\nsource activate ", self.test_prefix, - "\n Build:\nsource activate ", self.build_prefix, - "\n\n") + print( + "\nLeaving build/test directories:" "\n Work:\n", + self.work_dir, + "\n Test:\n", + self.test_dir, + "\nLeaving build/test environments:" "\n Test:\nsource activate ", + self.test_prefix, + "\n Build:\nsource activate ", + self.build_prefix, + "\n\n", + ) for lock in get_conda_operation_locks(self.locking, self.bldpkgs_dirs): if os.path.isfile(lock.lock_file): @@ -840,7 +904,7 @@ def clean_pkgs(self): def copy(self): new = copy.copy(self) new.variant = copy.deepcopy(self.variant) - if hasattr(self, 'variants'): + if hasattr(self, "variants"): new.variants = copy.deepcopy(self.variants) return new @@ -849,9 +913,15 @@ def __enter__(self): pass def __exit__(self, e_type, e_value, traceback): - if not getattr(self, 'dirty') and e_type is None and not getattr(self, 'keep_old_work'): - get_logger(__name__).info("--dirty flag and --keep-old-work not specified. " - "Removing build/test folder after successful build/test.\n") + if ( + not getattr(self, "dirty") + and e_type is None + and not getattr(self, "keep_old_work") + ): + get_logger(__name__).info( + "--dirty flag and --keep-old-work not specified. " + "Removing build/test folder after successful build/test.\n" + ) self.clean() else: self.clean(remove_folders=False) @@ -873,7 +943,7 @@ def get_or_merge_config(config, variant=None, **kwargs): def get_channel_urls(args): - channel_urls = args.get('channel') or args.get('channels') or () + channel_urls = args.get("channel") or args.get("channels") or () final_channel_urls = [] for url in channel_urls: diff --git a/conda_build/convert.py b/conda_build/convert.py index 9915ca799a..5c283cb98d 100644 --- a/conda_build/convert.py +++ b/conda_build/convert.py @@ -4,15 +4,15 @@ Tools for converting conda packages """ import glob -import json import hashlib +import json import os -from pathlib import Path import re import shutil import sys import tarfile import tempfile +from pathlib import Path from conda_build.utils import filter_info_files, walk @@ -31,14 +31,17 @@ def retrieve_c_extensions(file_path, show_imports=False): show_imports (bool) -- output the C extensions included in the package """ c_extension_pattern = re.compile( - r'(Lib\/|lib\/python\d\.\d\/|lib\/)(site-packages\/|lib-dynload)?(.*)') + r"(Lib\/|lib\/python\d\.\d\/|lib\/)(site-packages\/|lib-dynload)?(.*)" + ) imports = [] with tarfile.open(file_path) as tar: for filename in tar.getnames(): - if filename.endswith(('.pyd', '.so')): + if filename.endswith((".pyd", ".so")): filename_match = c_extension_pattern.match(filename) - import_name = 'import {}' .format(filename_match.group(3).replace('/', '.')) + import_name = "import {}".format( + filename_match.group(3).replace("/", ".") + ) imports.append(import_name) return imports @@ -51,23 +54,23 @@ def retrieve_package_platform(file_path): file_path (str) -- the file path to the source package tar file """ with tarfile.open(file_path) as tar: - index = json.loads(tar.extractfile('info/index.json').read().decode('utf-8')) + index = json.loads(tar.extractfile("info/index.json").read().decode("utf-8")) - platform = index['platform'] + platform = index["platform"] - if index.get('arch') == 'x86_64': - architecture = '64' - elif index.get('arch') == 'x86': - architecture = '32' + if index.get("arch") == "x86_64": + architecture = "64" + elif index.get("arch") == "x86": + architecture = "32" else: - architecture = index.get('arch') + architecture = index.get("arch") - if platform.startswith('linux') or platform.startswith('osx'): - return ('unix', platform, architecture) - elif index['platform'].startswith('win'): - return ('win', platform, architecture) + if platform.startswith("linux") or platform.startswith("osx"): + return ("unix", platform, architecture) + elif index["platform"].startswith("win"): + return ("win", platform, architecture) else: - raise RuntimeError('Package platform not recognized.') + raise RuntimeError("Package platform not recognized.") def retrieve_python_version(file_path): @@ -93,21 +96,26 @@ def retrieve_python_version(file_path): return matched.group(0) else: - if file_path.endswith(('.tar.bz2', '.tar')): + if file_path.endswith((".tar.bz2", ".tar")): with tarfile.open(file_path) as tar: - index = json.loads(tar.extractfile('info/index.json').read().decode('utf-8')) + index = json.loads( + tar.extractfile("info/index.json").read().decode("utf-8") + ) else: - path_file = os.path.join(file_path, 'info/index.json') + path_file = os.path.join(file_path, "info/index.json") with open(path_file) as index_file: index = json.load(index_file) - build_version_number = re.search(r'(.*)?(py)(\d\d)(.*)?', index['build']).group(3) - build_version = re.sub(r'\A.*py\d\d.*\Z', 'python', index['build']) + build_version_number = re.search(r"(.*)?(py)(\d\d)(.*)?", index["build"]).group( + 3 + ) + build_version = re.sub(r"\A.*py\d\d.*\Z", "python", index["build"]) - return '{}{}.{}' .format(build_version, - build_version_number[0], build_version_number[1]) + return "{}{}.{}".format( + build_version, build_version_number[0], build_version_number[1] + ) def extract_temporary_directory(file_path): @@ -167,41 +175,51 @@ def update_index_file(temp_dir, target_platform, dependencies, verbose): dependencies (List[str]) -- the dependencies passed from the command line verbose (bool) -- show output of items that are updated """ - index_file = os.path.join(temp_dir, 'info/index.json') + index_file = os.path.join(temp_dir, "info/index.json") with open(index_file) as file: index = json.load(file) - platform, architecture = target_platform.split('-') - other_platforms = ['linux-ppc64', 'linux-ppc64le', 'linux-s390x', - 'linux-armv6l', 'linux-armv7l', 'linux-aarch64'] + platform, architecture = target_platform.split("-") + other_platforms = [ + "linux-ppc64", + "linux-ppc64le", + "linux-s390x", + "linux-armv6l", + "linux-armv7l", + "linux-aarch64", + ] if target_platform in other_platforms: source_architecture = architecture - elif index.get('arch') == 'x86_64': - source_architecture = '64' + elif index.get("arch") == "x86_64": + source_architecture = "64" else: - source_architecture = '32' + source_architecture = "32" if verbose: - print('Updating platform from {} to {}' .format(index['platform'], platform)) - print('Updating subdir from {} to {}' .format(index['subdir'], target_platform)) - print('Updating architecture from {} to {}' .format(source_architecture, architecture)) - - index['platform'] = platform - index['subdir'] = target_platform - - if architecture == '64': - index['arch'] = 'x86_64' - elif architecture == '32': - index['arch'] = 'x86' + print("Updating platform from {} to {}".format(index["platform"], platform)) + print("Updating subdir from {} to {}".format(index["subdir"], target_platform)) + print( + "Updating architecture from {} to {}".format( + source_architecture, architecture + ) + ) + + index["platform"] = platform + index["subdir"] = target_platform + + if architecture == "64": + index["arch"] = "x86_64" + elif architecture == "32": + index["arch"] = "x86" else: - index['arch'] = architecture + index["arch"] = architecture if dependencies: - index['depends'] = update_dependencies(dependencies, index['depends']) + index["depends"] = update_dependencies(dependencies, index["depends"]) - with open(index_file, 'w') as file: + with open(index_file, "w") as file: json.dump(index, file, indent=2) return index_file @@ -221,14 +239,16 @@ def update_lib_path(path, target_platform, temp_dir=None): temp_dir (str) -- the file path to the temporary directory that contains the source package's extracted contents """ - if target_platform == 'win': + if target_platform == "win": python_version = retrieve_python_version(path) - renamed_lib_path = re.sub(r'\Alib', 'Lib', path).replace(python_version, '') + renamed_lib_path = re.sub(r"\Alib", "Lib", path).replace(python_version, "") - elif target_platform == 'unix': + elif target_platform == "unix": python_version = retrieve_python_version(temp_dir) - lib_python_version = os.path.join('lib', python_version).replace('\\', '\\\\') - renamed_lib_path = re.sub(r'\ALib', lib_python_version, path.replace('\\', '\\\\')) + lib_python_version = os.path.join("lib", python_version).replace("\\", "\\\\") + renamed_lib_path = re.sub( + r"\ALib", lib_python_version, path.replace("\\", "\\\\") + ) return os.path.normpath(renamed_lib_path) @@ -249,28 +269,29 @@ def update_lib_contents(lib_directory, temp_dir, target_platform, file_path): target_platform (str) -- the platform to target: 'unix' or win' file_path (str) -- the file path to the source package tar file """ - if target_platform == 'win': + if target_platform == "win": try: - for lib_file in glob.iglob('{}/python*/**' .format(lib_directory)): - if 'site-packages' in lib_file: + for lib_file in glob.iglob(f"{lib_directory}/python*/**"): + if "site-packages" in lib_file: new_site_packages_path = os.path.join( - temp_dir, os.path.join('lib', 'site-packages')) + temp_dir, os.path.join("lib", "site-packages") + ) os.renames(lib_file, new_site_packages_path) else: if retrieve_python_version(lib_file) is not None: python_version = retrieve_python_version(lib_file) - os.renames(lib_file, lib_file.replace(python_version, '')) + os.renames(lib_file, lib_file.replace(python_version, "")) except OSError: pass try: - shutil.rmtree(glob.glob('{}/python*' .format(lib_directory))[0]) + shutil.rmtree(glob.glob(f"{lib_directory}/python*")[0]) except IndexError: pass - shutil.move(os.path.join(temp_dir, 'lib'), os.path.join(temp_dir, 'Lib')) + shutil.move(os.path.join(temp_dir, "lib"), os.path.join(temp_dir, "Lib")) - elif target_platform == 'unix': + elif target_platform == "unix": temp_dir = Path(temp_dir) src_dir = temp_dir / "Lib" dst_dir = temp_dir / "lib" @@ -302,16 +323,18 @@ def update_executable_path(temp_dir, file_path, target_platform): file_path (str) -- the file path to the executable to rename in paths.json target_platform (str) -- the platform to target: 'unix' or 'win' """ - if target_platform == 'win': - if os.path.basename(file_path).startswith('.') or is_binary_file(temp_dir, file_path): - renamed_executable_path = re.sub(r'\Abin', 'Scripts', file_path) + if target_platform == "win": + if os.path.basename(file_path).startswith(".") or is_binary_file( + temp_dir, file_path + ): + renamed_executable_path = re.sub(r"\Abin", "Scripts", file_path) else: - renamed_path = os.path.splitext(re.sub(r'\Abin', 'Scripts', file_path))[0] - renamed_executable_path = '{}-script.py' .format(renamed_path) + renamed_path = os.path.splitext(re.sub(r"\Abin", "Scripts", file_path))[0] + renamed_executable_path = f"{renamed_path}-script.py" - elif target_platform == 'unix': - renamed_path = re.sub(r'\AScripts', 'bin', file_path) - renamed_executable_path = renamed_path.replace('-script.py', '') + elif target_platform == "unix": + renamed_path = re.sub(r"\AScripts", "bin", file_path) + renamed_executable_path = renamed_path.replace("-script.py", "") return renamed_executable_path @@ -323,7 +346,7 @@ def update_executable_sha(package_directory, executable_path): script files which requires to update the sha. """ - with open(os.path.join(package_directory, executable_path), 'rb') as script_file: + with open(os.path.join(package_directory, executable_path), "rb") as script_file: script_file_contents = script_file.read() return hashlib.sha256(script_file_contents).hexdigest() @@ -353,14 +376,16 @@ def add_new_windows_path(executable_directory, executable): executable_directory (str) -- the file path to temporary directory's 'Scripts' directory executable (str) -- the filename of the script to add to paths.json """ - with open(os.path.join(executable_directory, executable), 'rb') as script_file: + with open(os.path.join(executable_directory, executable), "rb") as script_file: script_file_contents = script_file.read() - new_path = {"_path": "Scripts/{}" .format(executable), - "path_type": "hardlink", - "sha256": hashlib.sha256(script_file_contents).hexdigest(), - "size_in_bytes": os.path.getsize( - os.path.join(executable_directory, executable)) - } + new_path = { + "_path": f"Scripts/{executable}", + "path_type": "hardlink", + "sha256": hashlib.sha256(script_file_contents).hexdigest(), + "size_in_bytes": os.path.getsize( + os.path.join(executable_directory, executable) + ), + } return new_path @@ -372,46 +397,56 @@ def update_paths_file(temp_dir, target_platform): package's extracted contents target_platform (str) -- the platform to target: 'unix' or 'win' """ - paths_file = os.path.join(temp_dir, 'info/paths.json') + paths_file = os.path.join(temp_dir, "info/paths.json") if os.path.isfile(paths_file): with open(paths_file) as file: paths = json.load(file) - if target_platform == 'win': - for path in paths['paths']: - if path['_path'].startswith('lib'): - path['_path'] = update_lib_path(path['_path'], 'win') + if target_platform == "win": + for path in paths["paths"]: + if path["_path"].startswith("lib"): + path["_path"] = update_lib_path(path["_path"], "win") - elif path['_path'].startswith('bin'): - path['_path'] = update_executable_path(temp_dir, path['_path'], 'win') - path['sha256'] = update_executable_sha(temp_dir, path['_path']) - path['size_in_bytes'] = update_executable_size(temp_dir, path['_path']) + elif path["_path"].startswith("bin"): + path["_path"] = update_executable_path( + temp_dir, path["_path"], "win" + ) + path["sha256"] = update_executable_sha(temp_dir, path["_path"]) + path["size_in_bytes"] = update_executable_size( + temp_dir, path["_path"] + ) - path['_path'] = path['_path'].replace('\\', '/').replace('\\\\', '/') + path["_path"] = path["_path"].replace("\\", "/").replace("\\\\", "/") - script_directory = os.path.join(temp_dir, 'Scripts') + script_directory = os.path.join(temp_dir, "Scripts") if os.path.isdir(script_directory): for script in os.listdir(script_directory): - if script.endswith('.exe'): - paths['paths'].append(add_new_windows_path(script_directory, script)) - - elif target_platform == 'unix': - for path in paths['paths']: - if path['_path'].startswith('Lib'): - path['_path'] = update_lib_path(path['_path'], 'unix', temp_dir) - - elif path['_path'].startswith('Scripts'): - path['_path'] = update_executable_path(temp_dir, path['_path'], 'unix') - path['sha256'] = update_executable_sha(temp_dir, path['_path']) - path['size_in_bytes'] = update_executable_size(temp_dir, path['_path']) - - path['_path'] = path['_path'].replace('\\', '/').replace('\\\\', '/') - - if path['_path'].endswith(('.bat', '.exe')): - paths['paths'].remove(path) - - with open(paths_file, 'w') as file: + if script.endswith(".exe"): + paths["paths"].append( + add_new_windows_path(script_directory, script) + ) + + elif target_platform == "unix": + for path in paths["paths"]: + if path["_path"].startswith("Lib"): + path["_path"] = update_lib_path(path["_path"], "unix", temp_dir) + + elif path["_path"].startswith("Scripts"): + path["_path"] = update_executable_path( + temp_dir, path["_path"], "unix" + ) + path["sha256"] = update_executable_sha(temp_dir, path["_path"]) + path["size_in_bytes"] = update_executable_size( + temp_dir, path["_path"] + ) + + path["_path"] = path["_path"].replace("\\", "/").replace("\\\\", "/") + + if path["_path"].endswith((".bat", ".exe")): + paths["paths"].remove(path) + + with open(paths_file, "w") as file: json.dump(paths, file, indent=2) @@ -442,11 +477,12 @@ def is_binary_file(directory, executable): file_path = os.path.join(directory, executable) if os.path.isfile(file_path): - with open(file_path, 'rb') as buffered_file: + with open(file_path, "rb") as buffered_file: file_contents = buffered_file.read(1024) - text_characters = bytearray({7, 8, 9, 10, 12, 13, 27}.union( - set(range(0x20, 0x100)) - {0x7f})) + text_characters = bytearray( + {7, 8, 9, 10, 12, 13, 27}.union(set(range(0x20, 0x100)) - {0x7F}) + ) return bool(file_contents.translate(None, text_characters)) @@ -468,31 +504,33 @@ def rename_executable(directory, executable, target_platform): """ old_executable_path = os.path.join(directory, executable) - if target_platform == 'win': - new_executable_path = os.path.join(directory, '{}-script.py' .format( - retrieve_executable_name(executable))) + if target_platform == "win": + new_executable_path = os.path.join( + directory, f"{retrieve_executable_name(executable)}-script.py" + ) with open(old_executable_path) as script_file_in: lines = script_file_in.read().splitlines() - with open(old_executable_path, 'w') as script_file_out: + with open(old_executable_path, "w") as script_file_out: for line in lines[1:]: - script_file_out.write(line + '\n') + script_file_out.write(line + "\n") os.renames(old_executable_path, new_executable_path) else: - if old_executable_path.endswith('.py'): - - new_executable_path = old_executable_path.replace('-script.py', '') + if old_executable_path.endswith(".py"): + new_executable_path = old_executable_path.replace("-script.py", "") with open(old_executable_path) as script_file_in: lines = script_file_in.read().splitlines() - with open(old_executable_path, 'w') as script_file_out: - script_file_out.write('#!/opt/anaconda1anaconda2anaconda3/bin/python' + '\n') + with open(old_executable_path, "w") as script_file_out: + script_file_out.write( + "#!/opt/anaconda1anaconda2anaconda3/bin/python" + "\n" + ) for line in lines: - script_file_out.write(line + '\n') + script_file_out.write(line + "\n") os.renames(old_executable_path, new_executable_path) @@ -507,7 +545,7 @@ def remove_executable(directory, executable): directory (str) -- the file path to the 'Scripts' directory executable (str) -- the filename of the executable to remove """ - if executable.endswith(('.exe', '.bat')): + if executable.endswith((".exe", ".bat")): script = os.path.join(directory, executable) os.remove(script) @@ -522,13 +560,13 @@ def create_exe_file(directory, executable, target_platform): """ exe_directory = os.path.dirname(__file__) - if target_platform.endswith('32'): - executable_file = os.path.join(exe_directory, 'cli-32.exe') + if target_platform.endswith("32"): + executable_file = os.path.join(exe_directory, "cli-32.exe") else: - executable_file = os.path.join(exe_directory, 'cli-64.exe') + executable_file = os.path.join(exe_directory, "cli-64.exe") - renamed_executable_file = os.path.join(directory, '{}.exe' .format(executable)) + renamed_executable_file = os.path.join(directory, f"{executable}.exe") shutil.copyfile(executable_file, renamed_executable_file) @@ -544,9 +582,9 @@ def update_prefix_file(temp_dir, prefixes): package's extracted contents prefixes (List[str])-- the prefixes to write to 'has_prefix' """ - has_prefix_file = os.path.join(temp_dir, 'info/has_prefix') + has_prefix_file = os.path.join(temp_dir, "info/has_prefix") - with open(has_prefix_file, 'w+') as prefix_file: + with open(has_prefix_file, "w+") as prefix_file: for prefix in prefixes: prefix_file.write(prefix) @@ -562,20 +600,20 @@ def update_files_file(temp_dir, verbose): package's extracted contents verbose (bool) -- show output of items that are updated """ - files_file = os.path.join(temp_dir, 'info/files') + files_file = os.path.join(temp_dir, "info/files") - with open(files_file, 'w') as files: + with open(files_file, "w") as files: file_paths = [] for dirpath, dirnames, filenames in walk(temp_dir): relative_dir = os.path.relpath(dirpath, temp_dir) filenames = [os.path.join(relative_dir, f) for f in filenames] - for filename in filter_info_files(filenames, ''): - file_paths.append(filename.replace('\\', '/').replace('\\\\', '/')) + for filename in filter_info_files(filenames, ""): + file_paths.append(filename.replace("\\", "/").replace("\\\\", "/")) if verbose: - print('Updating {}' .format(filename)) + print(f"Updating {filename}") for file_path in sorted(file_paths): - files.write(file_path + '\n') + files.write(file_path + "\n") def create_target_archive(file_path, temp_dir, platform, output_dir): @@ -595,7 +633,7 @@ def create_target_archive(file_path, temp_dir, platform, output_dir): destination = os.path.join(output_directory, os.path.basename(file_path)) - with tarfile.open(destination, 'w:bz2') as target: + with tarfile.open(destination, "w:bz2") as target: for dirpath, dirnames, filenames in walk(temp_dir): relative_dir = os.path.relpath(dirpath, temp_dir) filenames = [os.path.join(relative_dir, f) for f in filenames] @@ -603,7 +641,9 @@ def create_target_archive(file_path, temp_dir, platform, output_dir): target.add(os.path.join(temp_dir, filename), arcname=filename) -def convert_between_unix_platforms(file_path, output_dir, platform, dependencies, verbose): +def convert_between_unix_platforms( + file_path, output_dir, platform, dependencies, verbose +): """Convert package between unix platforms. Positional arguments: @@ -623,8 +663,9 @@ def convert_between_unix_platforms(file_path, output_dir, platform, dependencies shutil.rmtree(temp_dir) -def convert_between_windows_architechtures(file_path, output_dir, platform, - dependencies, verbose): +def convert_between_windows_architechtures( + file_path, output_dir, platform, dependencies, verbose +): """Convert package between windows architectures. Positional arguments: @@ -644,7 +685,9 @@ def convert_between_windows_architechtures(file_path, output_dir, platform, shutil.rmtree(temp_dir) -def convert_from_unix_to_windows(file_path, output_dir, platform, dependencies, verbose): +def convert_from_unix_to_windows( + file_path, output_dir, platform, dependencies, verbose +): """Convert a package from a unix platform to windows. Positional arguments: @@ -660,27 +703,33 @@ def convert_from_unix_to_windows(file_path, output_dir, platform, dependencies, for entry in os.listdir(temp_dir): directory = os.path.join(temp_dir, entry) - if os.path.isdir(directory) and entry.strip(os.sep) == 'lib': - update_lib_contents(directory, temp_dir, 'win', file_path) + if os.path.isdir(directory) and entry.strip(os.sep) == "lib": + update_lib_contents(directory, temp_dir, "win", file_path) - if os.path.isdir(directory) and entry.strip(os.sep) == 'bin': + if os.path.isdir(directory) and entry.strip(os.sep) == "bin": for script in os.listdir(directory): - if (os.path.isfile(os.path.join(directory, script)) and - not is_binary_file(directory, script) and - not script.startswith('.')): - rename_executable(directory, script, 'win') - create_exe_file(directory, retrieve_executable_name(script), - platform) - - prefixes.add('/opt/anaconda1anaconda2anaconda3 text Scripts/{}-script.py\n' - .format(retrieve_executable_name(script))) - - new_bin_path = os.path.join(temp_dir, 'Scripts') + if ( + os.path.isfile(os.path.join(directory, script)) + and not is_binary_file(directory, script) + and not script.startswith(".") + ): + rename_executable(directory, script, "win") + create_exe_file( + directory, retrieve_executable_name(script), platform + ) + + prefixes.add( + "/opt/anaconda1anaconda2anaconda3 text Scripts/{}-script.py\n".format( + retrieve_executable_name(script) + ) + ) + + new_bin_path = os.path.join(temp_dir, "Scripts") os.renames(directory, new_bin_path) update_index_file(temp_dir, platform, dependencies, verbose) update_prefix_file(temp_dir, prefixes) - update_paths_file(temp_dir, target_platform='win') + update_paths_file(temp_dir, target_platform="win") update_files_file(temp_dir, verbose) create_target_archive(file_path, temp_dir, platform, output_dir) @@ -688,7 +737,9 @@ def convert_from_unix_to_windows(file_path, output_dir, platform, dependencies, shutil.rmtree(temp_dir) -def convert_from_windows_to_unix(file_path, output_dir, platform, dependencies, verbose): +def convert_from_windows_to_unix( + file_path, output_dir, platform, dependencies, verbose +): """Convert a package from windows to a unix platform. Positional arguments: @@ -705,24 +756,27 @@ def convert_from_windows_to_unix(file_path, output_dir, platform, dependencies, for entry in os.listdir(temp_dir): directory = os.path.join(temp_dir, entry) - if os.path.isdir(directory) and 'Lib' in directory: - update_lib_contents(directory, temp_dir, 'unix', file_path) + if os.path.isdir(directory) and "Lib" in directory: + update_lib_contents(directory, temp_dir, "unix", file_path) - if os.path.isdir(directory) and 'Scripts' in directory: + if os.path.isdir(directory) and "Scripts" in directory: for script in os.listdir(directory): - if not is_binary_file(directory, script) and not script.startswith('.'): - rename_executable(directory, script, 'unix') + if not is_binary_file(directory, script) and not script.startswith("."): + rename_executable(directory, script, "unix") remove_executable(directory, script) - prefixes.add('/opt/anaconda1anaconda2anaconda3 text bin/{}\n' - .format(retrieve_executable_name(script))) + prefixes.add( + "/opt/anaconda1anaconda2anaconda3 text bin/{}\n".format( + retrieve_executable_name(script) + ) + ) - new_bin_path = os.path.join(temp_dir, 'bin') + new_bin_path = os.path.join(temp_dir, "bin") os.renames(directory, new_bin_path) update_index_file(temp_dir, platform, dependencies, verbose) update_prefix_file(temp_dir, prefixes) - update_paths_file(temp_dir, target_platform='unix') + update_paths_file(temp_dir, target_platform="unix") update_files_file(temp_dir, verbose) create_target_archive(file_path, temp_dir, platform, output_dir) @@ -730,8 +784,17 @@ def convert_from_windows_to_unix(file_path, output_dir, platform, dependencies, shutil.rmtree(temp_dir) -def conda_convert(file_path, output_dir=".", show_imports=False, platforms=None, force=False, - dependencies=None, verbose=False, quiet=False, dry_run=False): +def conda_convert( + file_path, + output_dir=".", + show_imports=False, + platforms=None, + force=False, + dependencies=None, + verbose=False, + quiet=False, + dry_run=False, +): """Convert a conda package between different platforms and architectures. Positional arguments: @@ -750,52 +813,74 @@ def conda_convert(file_path, output_dir=".", show_imports=False, platforms=None, if show_imports: imports = retrieve_c_extensions(file_path) if len(imports) == 0: - print('No imports found.') + print("No imports found.") else: for c_extension in imports: print(c_extension) sys.exit() if not show_imports and len(platforms) == 0: - sys.exit('Error: --platform option required for conda package conversion.') + sys.exit("Error: --platform option required for conda package conversion.") if len(retrieve_c_extensions(file_path)) > 0 and not force: - sys.exit('WARNING: Package {} contains C extensions; skipping conversion. ' - 'Use -f to force conversion.' .format(os.path.basename(file_path))) - - conversion_platform, source_platform, architecture = retrieve_package_platform(file_path) - source_platform_architecture = '{}-{}' .format(source_platform, architecture) - - if 'all' in platforms: - platforms = ['osx-64', 'osx-arm64', - 'linux-32', 'linux-64', 'linux-ppc64', 'linux-ppc64le', - 'linux-s390x', 'linux-armv6l', 'linux-armv7l', 'linux-aarch64', - 'win-32', 'win-64', 'win-arm64', - ] + sys.exit( + "WARNING: Package {} contains C extensions; skipping conversion. " + "Use -f to force conversion.".format(os.path.basename(file_path)) + ) + + conversion_platform, source_platform, architecture = retrieve_package_platform( + file_path + ) + source_platform_architecture = f"{source_platform}-{architecture}" + + if "all" in platforms: + platforms = [ + "osx-64", + "osx-arm64", + "linux-32", + "linux-64", + "linux-ppc64", + "linux-ppc64le", + "linux-s390x", + "linux-armv6l", + "linux-armv7l", + "linux-aarch64", + "win-32", + "win-64", + "win-arm64", + ] for platform in platforms: - if platform == source_platform_architecture: - print("Source platform '{}' and target platform '{}' are identical. " - "Skipping conversion." .format(source_platform_architecture, platform)) + print( + "Source platform '{}' and target platform '{}' are identical. " + "Skipping conversion.".format(source_platform_architecture, platform) + ) continue if not quiet: - print('Converting {} from {} to {}' .format( - os.path.basename(file_path), source_platform_architecture, platform)) - - if platform.startswith(('osx', 'linux')) and conversion_platform == 'unix': - convert_between_unix_platforms(file_path, output_dir, platform, - dependencies, verbose) - - elif platform.startswith('win') and conversion_platform == 'unix': - convert_from_unix_to_windows(file_path, output_dir, platform, - dependencies, verbose) - - elif platform.startswith(('osx', 'linux')) and conversion_platform == 'win': - convert_from_windows_to_unix(file_path, output_dir, platform, - dependencies, verbose) - - elif platform.startswith('win') and conversion_platform == 'win': - convert_between_windows_architechtures(file_path, output_dir, platform, - dependencies, verbose) + print( + "Converting {} from {} to {}".format( + os.path.basename(file_path), source_platform_architecture, platform + ) + ) + + if platform.startswith(("osx", "linux")) and conversion_platform == "unix": + convert_between_unix_platforms( + file_path, output_dir, platform, dependencies, verbose + ) + + elif platform.startswith("win") and conversion_platform == "unix": + convert_from_unix_to_windows( + file_path, output_dir, platform, dependencies, verbose + ) + + elif platform.startswith(("osx", "linux")) and conversion_platform == "win": + convert_from_windows_to_unix( + file_path, output_dir, platform, dependencies, verbose + ) + + elif platform.startswith("win") and conversion_platform == "win": + convert_between_windows_architechtures( + file_path, output_dir, platform, dependencies, verbose + ) diff --git a/conda_build/create_test.py b/conda_build/create_test.py index afd6bfbcb9..b3dc6e11a4 100644 --- a/conda_build/create_test.py +++ b/conda_build/create_test.py @@ -1,13 +1,13 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -''' +""" Module to handle generating test files. -''' +""" -import os -from os.path import join, exists import json +import os +from os.path import exists, join from conda_build.utils import copy_into, ensure_list, glob, on_win, rm_rf @@ -26,12 +26,17 @@ def create_files(m, test_dir=None): if not os.path.isdir(test_dir): os.makedirs(test_dir) - for pattern in ensure_list(m.get_value('test/files', [])): + for pattern in ensure_list(m.get_value("test/files", [])): has_files = True - files = glob(join(m.path, pattern.replace('/', os.sep))) + files = glob(join(m.path, pattern.replace("/", os.sep))) for f in files: - copy_into(f, f.replace(m.path, test_dir), m.config.timeout, locking=False, - clobber=True) + copy_into( + f, + f.replace(m.path, test_dir), + m.config.timeout, + locking=False, + clobber=True, + ) return has_files @@ -39,14 +44,14 @@ def _get_output_script_name(m, win_status): # the way this works is that each output needs to explicitly define a test script to run. # They do not automatically pick up run_test.*, but can be pointed at that explicitly. - ext = '.bat' if win_status else '.sh' - dst_name = 'run_test' + ext + ext = ".bat" if win_status else ".sh" + dst_name = "run_test" + ext src_name = dst_name if m.is_output: - src_name = 'no-file' - for out in m.meta.get('outputs', []): - if m.name() == out.get('name'): - out_test_script = out.get('test', {}).get('script', 'no-file') + src_name = "no-file" + for out in m.meta.get("outputs", []): + if m.name() == out.get("name"): + out_test_script = out.get("test", {}).get("script", "no-file") if os.path.splitext(out_test_script)[1].lower() == ext: src_name = out_test_script break @@ -68,48 +73,52 @@ def create_shell_files(m, test_dir=None): dest_file = join(test_dir, dst_name) if exists(join(m.path, src_name)): # disable locking to avoid locking a temporary directory (the extracted test folder) - copy_into(join(m.path, src_name), dest_file, m.config.timeout, locking=False) - if os.path.basename(test_dir) != 'test_tmp': - commands = ensure_list(m.get_value('test/commands', [])) + copy_into( + join(m.path, src_name), dest_file, m.config.timeout, locking=False + ) + if os.path.basename(test_dir) != "test_tmp": + commands = ensure_list(m.get_value("test/commands", [])) if commands: - with open(join(dest_file), 'a') as f: - f.write('\n\n') + with open(join(dest_file), "a") as f: + f.write("\n\n") if not status: - f.write('set -ex\n\n') - f.write('\n\n') + f.write("set -ex\n\n") + f.write("\n\n") for cmd in commands: f.write(cmd) - f.write('\n') + f.write("\n") if status: f.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") if status: - f.write('exit /B 0\n') + f.write("exit /B 0\n") else: - f.write('exit 0\n') + f.write("exit 0\n") if os.path.isfile(dest_file): shell_files.append(dest_file) return shell_files -def _create_test_files(m, test_dir, ext, comment_char='# '): - name = 'run_test' + ext +def _create_test_files(m, test_dir, ext, comment_char="# "): + name = "run_test" + ext if m.is_output: - name = '' + name = "" # the way this works is that each output needs to explicitly define a test script to run # They do not automatically pick up run_test.*, but can be pointed at that explicitly. - for out in m.meta.get('outputs', []): - if m.name() == out.get('name'): - out_test_script = out.get('test', {}).get('script', 'no-file') + for out in m.meta.get("outputs", []): + if m.name() == out.get("name"): + out_test_script = out.get("test", {}).get("script", "no-file") if out_test_script.endswith(ext): name = out_test_script break - out_file = join(test_dir, 'run_test' + ext) + out_file = join(test_dir, "run_test" + ext) if name: test_file = os.path.join(m.path, name) if os.path.isfile(test_file): - with open(out_file, 'w') as fo: - fo.write(f"{comment_char} tests for {m.dist()} (this is a generated file);\n") + with open(out_file, "w") as fo: + fo.write( + f"{comment_char} tests for {m.dist()} (this is a generated file);\n" + ) fo.write("print('===== testing package: %s =====');\n" % m.dist()) try: @@ -119,120 +128,148 @@ def _create_test_files(m, test_dir, ext, comment_char='# '): fo.write(fi.read()) fo.write(f"{comment_char} --- {name} (end) ---\n") except AttributeError: - fo.write("# tests were not packaged with this module, and cannot be run\n") + fo.write( + "# tests were not packaged with this module, and cannot be run\n" + ) fo.write("\nprint('===== %s OK =====');\n" % m.dist()) - return (out_file, bool(name) and os.path.isfile(out_file) and os.path.basename(test_file) != 'no-file') + return ( + out_file, + bool(name) + and os.path.isfile(out_file) + and os.path.basename(test_file) != "no-file", + ) def create_py_files(m, test_dir=None): if not test_dir: test_dir = m.config.test_dir - tf, tf_exists = _create_test_files(m, test_dir, '.py') + tf, tf_exists = _create_test_files(m, test_dir, ".py") # Ways in which we can mark imports as none python imports # 1. preface package name with r-, lua- or perl- # 2. use list of dicts for test/imports, and have lang set in those dicts pkg_name = m.name() - likely_r_pkg = pkg_name.startswith('r-') - likely_lua_pkg = pkg_name.startswith('lua-') - likely_perl_pkg = pkg_name.startswith('perl-') + likely_r_pkg = pkg_name.startswith("r-") + likely_lua_pkg = pkg_name.startswith("lua-") + likely_perl_pkg = pkg_name.startswith("perl-") likely_non_python_pkg = likely_r_pkg or likely_lua_pkg or likely_perl_pkg if likely_non_python_pkg: imports = [] - for import_item in ensure_list(m.get_value('test/imports', [])): + for import_item in ensure_list(m.get_value("test/imports", [])): # add any imports specifically marked as python - if (hasattr(import_item, 'keys') and 'lang' in import_item and - import_item['lang'] == 'python'): - imports.extend(import_item['imports']) + if ( + hasattr(import_item, "keys") + and "lang" in import_item + and import_item["lang"] == "python" + ): + imports.extend(import_item["imports"]) else: - imports = ensure_list(m.get_value('test/imports', [])) - imports = [item for item in imports if (not hasattr(item, 'keys') or - 'lang' in item and item['lang'] == 'python')] + imports = ensure_list(m.get_value("test/imports", [])) + imports = [ + item + for item in imports + if ( + not hasattr(item, "keys") or "lang" in item and item["lang"] == "python" + ) + ] if imports: - with open(tf, 'a') as fo: + with open(tf, "a") as fo: for name in imports: fo.write('print("import: %r")\n' % name) - fo.write('import %s\n' % name) - fo.write('\n') + fo.write("import %s\n" % name) + fo.write("\n") return tf if (tf_exists or imports) else False def create_r_files(m, test_dir=None): if not test_dir: test_dir = m.config.test_dir - tf, tf_exists = _create_test_files(m, test_dir, '.r') + tf, tf_exists = _create_test_files(m, test_dir, ".r") imports = None # two ways we can enable R import tests: # 1. preface package name with r- and just list imports in test/imports # 2. use list of dicts for test/imports, and have lang: 'r' set in one of those dicts - if m.name().startswith('r-'): - imports = ensure_list(m.get_value('test/imports', [])) + if m.name().startswith("r-"): + imports = ensure_list(m.get_value("test/imports", [])) else: - for import_item in ensure_list(m.get_value('test/imports', [])): - if (hasattr(import_item, 'keys') and 'lang' in import_item and - import_item['lang'] == 'r'): - imports = import_item['imports'] + for import_item in ensure_list(m.get_value("test/imports", [])): + if ( + hasattr(import_item, "keys") + and "lang" in import_item + and import_item["lang"] == "r" + ): + imports = import_item["imports"] break if imports: - with open(tf, 'a') as fo: + with open(tf, "a") as fo: for name in imports: fo.write('print("library(%r)")\n' % name) - fo.write('library(%s)\n' % name) - fo.write('\n') + fo.write("library(%s)\n" % name) + fo.write("\n") return tf if (tf_exists or imports) else False def create_pl_files(m, test_dir=None): if not test_dir: test_dir = m.config.test_dir - tf, tf_exists = _create_test_files(m, test_dir, '.pl') + tf, tf_exists = _create_test_files(m, test_dir, ".pl") imports = None - if m.name().startswith('perl-'): - imports = ensure_list(m.get_value('test/imports', [])) + if m.name().startswith("perl-"): + imports = ensure_list(m.get_value("test/imports", [])) else: - for import_item in ensure_list(m.get_value('test/imports', [])): - if (hasattr(import_item, 'keys') and 'lang' in import_item and - import_item['lang'] == 'perl'): - imports = import_item['imports'] + for import_item in ensure_list(m.get_value("test/imports", [])): + if ( + hasattr(import_item, "keys") + and "lang" in import_item + and import_item["lang"] == "perl" + ): + imports = import_item["imports"] break if tf_exists or imports: - with open(tf, 'a') as fo: - print(r'my $expected_version = "%s";' % m.version().rstrip('0'), - file=fo) + with open(tf, "a") as fo: + print(r'my $expected_version = "%s";' % m.version().rstrip("0"), file=fo) if imports: for name in imports: print(r'print("import: %s\n");' % name, file=fo) - print('use %s;\n' % name, file=fo) + print("use %s;\n" % name, file=fo) # Don't try to print version for complex imports - if ' ' not in name: - print(("if (defined {0}->VERSION) {{\n" + - "\tmy $given_version = {0}->VERSION;\n" + - "\t$given_version =~ s/0+$//;\n" + - "\tdie('Expected version ' . $expected_version . ' but" + - " found ' . $given_version) unless ($expected_version " + - "eq $given_version);\n" + - "\tprint('\tusing version ' . {0}->VERSION . '\n');\n" + - "\n}}").format(name), file=fo) + if " " not in name: + print( + ( + "if (defined {0}->VERSION) {{\n" + + "\tmy $given_version = {0}->VERSION;\n" + + "\t$given_version =~ s/0+$//;\n" + + "\tdie('Expected version ' . $expected_version . ' but" + + " found ' . $given_version) unless ($expected_version " + + "eq $given_version);\n" + + "\tprint('\tusing version ' . {0}->VERSION . '\n');\n" + + "\n}}" + ).format(name), + file=fo, + ) return tf if (tf_exists or imports) else False def create_lua_files(m, test_dir=None): if not test_dir: test_dir = m.config.test_dir - tf, tf_exists = _create_test_files(m, test_dir, '.lua') + tf, tf_exists = _create_test_files(m, test_dir, ".lua") imports = None - if m.name().startswith('lua-'): - imports = ensure_list(m.get_value('test/imports', [])) + if m.name().startswith("lua-"): + imports = ensure_list(m.get_value("test/imports", [])) else: - for import_item in ensure_list(m.get_value('test/imports', [])): - if (hasattr(import_item, 'keys') and 'lang' in import_item and - import_item['lang'] == 'lua'): - imports = import_item['imports'] + for import_item in ensure_list(m.get_value("test/imports", [])): + if ( + hasattr(import_item, "keys") + and "lang" in import_item + and import_item["lang"] == "lua" + ): + imports = import_item["imports"] break if imports: - with open(tf, 'a+') as fo: + with open(tf, "a+") as fo: for name in imports: print(r'print("require \"%s\"\n");' % name, file=fo) print('require "%s"\n' % name, file=fo) @@ -244,9 +281,9 @@ def create_all_test_files(m, test_dir=None): rm_rf(test_dir) os.makedirs(test_dir) # this happens when we're finishing the build. - test_deps = m.meta.get('test', {}).get('requires', []) + test_deps = m.meta.get("test", {}).get("requires", []) if test_deps: - with open(os.path.join(test_dir, 'test_time_dependencies.json'), 'w') as f: + with open(os.path.join(test_dir, "test_time_dependencies.json"), "w") as f: json.dump(test_deps, f) else: # this happens when we're running a package's tests diff --git a/conda_build/develop.py b/conda_build/develop.py index 6c34d63ec5..5e9c892e36 100644 --- a/conda_build/develop.py +++ b/conda_build/develop.py @@ -1,16 +1,16 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from os.path import join, isdir, abspath, expanduser, exists import shutil import sys +from os.path import abspath, exists, expanduser, isdir, join -from conda_build.post import mk_relative_osx -from conda_build.utils import check_call_env, rec_glob, get_site_packages from conda_build.os_utils.external import find_executable +from conda_build.post import mk_relative_osx +from conda_build.utils import check_call_env, get_site_packages, rec_glob def relink_sharedobjects(pkg_path, build_prefix): - ''' + """ invokes functions in post module to relink to libraries in conda env :param pkg_path: look for shared objects to relink in pkg_path @@ -20,26 +20,26 @@ def relink_sharedobjects(pkg_path, build_prefix): .. note:: develop mode builds the extensions in place and makes a link to package in site-packages/. The build_prefix points to conda environment since runtime libraries should be loaded from environment's lib/. first - ''' + """ # find binaries in package dir and make them relocatable - bin_files = rec_glob(pkg_path, ['.so']) + bin_files = rec_glob(pkg_path, [".so"]) for b_file in bin_files: - if sys.platform == 'darwin': + if sys.platform == "darwin": mk_relative_osx(b_file, build_prefix) else: print("Nothing to do on Linux or Windows.") def write_to_conda_pth(sp_dir, pkg_path): - ''' + """ Append pkg_path to conda.pth in site-packages directory for current environment. Only add path if it doens't already exist. :param sp_dir: path to site-packages/. directory :param pkg_path: the package path to append to site-packes/. dir. - ''' - c_file = join(sp_dir, 'conda.pth') - with open(c_file, 'a') as f: + """ + c_file = join(sp_dir, "conda.pth") + with open(c_file, "a") as f: with open(c_file) as cf: # make sure file exists, before we try to read from it hence nested # in append with block @@ -47,17 +47,17 @@ def write_to_conda_pth(sp_dir, pkg_path): pkgs_in_dev_mode = cf.readlines() # only append pkg_path if it doesn't already exist in conda.pth - if pkg_path + '\n' in pkgs_in_dev_mode: + if pkg_path + "\n" in pkgs_in_dev_mode: print("path exists, skipping " + pkg_path) else: - f.write(pkg_path + '\n') + f.write(pkg_path + "\n") print("added " + pkg_path) def get_setup_py(path_): - ''' Return full path to setup.py or exit if not found ''' + """Return full path to setup.py or exit if not found""" # build path points to source dir, builds are placed in the - setup_py = join(path_, 'setup.py') + setup_py = join(path_, "setup.py") if not exists(setup_py): sys.exit(f"No setup.py found in {path_}. Exiting.") @@ -66,21 +66,21 @@ def get_setup_py(path_): def _clean(setup_py): - ''' + """ This invokes: $ python setup.py clean :param setup_py: path to setup.py - ''' + """ # first call setup.py clean - cmd = ['python', setup_py, 'clean'] + cmd = ["python", setup_py, "clean"] check_call_env(cmd) print("Completed: " + " ".join(cmd)) print("===============================================") def _build_ext(setup_py): - ''' + """ Define a develop function - similar to build function todo: need to test on win32 and linux @@ -88,31 +88,31 @@ def _build_ext(setup_py): $ python setup.py build_ext --inplace :param setup_py: path to setup.py - ''' + """ # next call setup.py develop - cmd = ['python', setup_py, 'build_ext', '--inplace'] + cmd = ["python", setup_py, "build_ext", "--inplace"] check_call_env(cmd) print("Completed: " + " ".join(cmd)) print("===============================================") def _uninstall(sp_dir, pkg_path): - ''' + """ Look for pkg_path in conda.pth file in site-packages directory and remove it. If pkg_path is not found in conda.pth, it means package is not installed in 'development mode' via conda develop. :param sp_dir: path to site-packages/. directory :param pkg_path: the package path to be uninstalled. - ''' - o_c_pth = join(sp_dir, 'conda.pth') - n_c_pth = join(sp_dir, 'conda.pth.temp') + """ + o_c_pth = join(sp_dir, "conda.pth") + n_c_pth = join(sp_dir, "conda.pth.temp") found = False - with open(n_c_pth, 'w') as new_c: + with open(n_c_pth, "w") as new_c: with open(o_c_pth) as orig_c: for line in orig_c: - if line != pkg_path + '\n': + if line != pkg_path + "\n": new_c.write(line) else: print("uninstalled: " + pkg_path) @@ -125,21 +125,30 @@ def _uninstall(sp_dir, pkg_path): shutil.move(n_c_pth, o_c_pth) -def execute(recipe_dirs, prefix=sys.prefix, no_pth_file=False, - build_ext=False, clean=False, uninstall=False): - +def execute( + recipe_dirs, + prefix=sys.prefix, + no_pth_file=False, + build_ext=False, + clean=False, + uninstall=False, +): if not isdir(prefix): - sys.exit("""\ + sys.exit( + """\ Error: environment does not exist: %s # # Use 'conda create' to create the environment first. -#""" % prefix) +#""" + % prefix + ) - assert find_executable('python', prefix=prefix) + assert find_executable("python", prefix=prefix) # current environment's site-packages directory - sp_dir = get_site_packages(prefix, '.'.join((str(sys.version_info.major), - str(sys.version_info.minor)))) + sp_dir = get_site_packages( + prefix, ".".join((str(sys.version_info.major), str(sys.version_info.minor))) + ) if isinstance(recipe_dirs, str): recipe_dirs = [recipe_dirs] diff --git a/conda_build/environ.py b/conda_build/environ.py index 841b622ee7..853da44e02 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -14,33 +14,33 @@ from glob import glob from os.path import join, normpath +from conda_build import utils +from conda_build.exceptions import BuildLockError, DependencyNeedsBuildingError +from conda_build.features import feature_list +from conda_build.index import get_build_index +from conda_build.os_utils import external +from conda_build.utils import ensure_list, env_var, prepend_bin_path +from conda_build.variants import get_default_variant + from .conda_interface import ( CondaError, LinkError, LockError, NoPackagesFoundError, PaddingError, + TemporaryDirectory, UnsatisfiableError, -) -from .conda_interface import ( + create_default_packages, display_actions, execute_actions, execute_plan, + get_version_from_git_tag, install_actions, + package_cache, + pkgs_dirs, + reset_context, + root_dir, ) -from .conda_interface import package_cache, TemporaryDirectory -from .conda_interface import pkgs_dirs, root_dir, create_default_packages -from .conda_interface import reset_context -from .conda_interface import get_version_from_git_tag - -from conda_build import utils -from conda_build.exceptions import BuildLockError, DependencyNeedsBuildingError -from conda_build.features import feature_list -from conda_build.index import get_build_index -from conda_build.os_utils import external -from conda_build.utils import ensure_list, prepend_bin_path, env_var -from conda_build.variants import get_default_variant - # these are things that we provide env vars for more explicitly. This list disables the # pass-through of variant values to env vars for these keys. diff --git a/conda_build/exceptions.py b/conda_build/exceptions.py index 72d62a377d..d8ed36ff06 100644 --- a/conda_build/exceptions.py +++ b/conda_build/exceptions.py @@ -1,6 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import textwrap + SEPARATOR = "-" * 70 indent = lambda s: textwrap.fill(textwrap.dedent(s)) @@ -20,16 +21,20 @@ def __init__(self, original, *args, **kwargs): self.original = original def error_msg(self): - return "\n".join([ - SEPARATOR, - self.error_body(), - self.indented_exception(), - ]) + return "\n".join( + [ + SEPARATOR, + self.error_body(), + self.indented_exception(), + ] + ) def error_body(self): - return "\n".join([ - "Unable to parse meta.yaml file\n", - ]) + return "\n".join( + [ + "Unable to parse meta.yaml file\n", + ] + ) def indented_exception(self): orig = str(self.original) @@ -39,13 +44,17 @@ def indented_exception(self): class UnableToParseMissingJinja2(UnableToParse): def error_body(self): - return "\n".join([ - super().error_body(), - indent("""\ + return "\n".join( + [ + super().error_body(), + indent( + """\ It appears you are missing jinja2. Please install that package, then attempt to build. - """), - ]) + """ + ), + ] + ) class MissingDependency(CondaBuildException): @@ -61,7 +70,9 @@ def __init__(self, error, script, *args): class DependencyNeedsBuildingError(CondaBuildException): - def __init__(self, conda_exception=None, packages=None, subdir=None, *args, **kwargs): + def __init__( + self, conda_exception=None, packages=None, subdir=None, *args, **kwargs + ): self.subdir = subdir self.matchspecs = [] if packages: @@ -69,23 +80,26 @@ def __init__(self, conda_exception=None, packages=None, subdir=None, *args, **kw else: self.packages = packages or [] for line in str(conda_exception).splitlines(): - if not line.startswith(' - ') and (':' in line or ' -> ' not in line): + if not line.startswith(" - ") and (":" in line or " -> " not in line): continue - pkg = line.lstrip(' - ').split(' -> ')[-1] + pkg = line.lstrip(" - ").split(" -> ")[-1] self.matchspecs.append(pkg) - pkg = pkg.strip().split(' ')[0].split('=')[0].split('[')[0] + pkg = pkg.strip().split(" ")[0].split("=")[0].split("[")[0] self.packages.append(pkg) if not self.packages: - raise RuntimeError("failed to parse packages from exception:" - " {}".format(str(conda_exception))) + raise RuntimeError( + "failed to parse packages from exception:" + " {}".format(str(conda_exception)) + ) def __str__(self): return self.message @property def message(self): - return "Unsatisfiable dependencies for platform {}: {}".format(self.subdir, - set(self.matchspecs)) + return "Unsatisfiable dependencies for platform {}: {}".format( + self.subdir, set(self.matchspecs) + ) class RecipeError(CondaBuildException): @@ -93,7 +107,7 @@ class RecipeError(CondaBuildException): class BuildLockError(CondaBuildException): - """ Raised when we failed to acquire a lock. """ + """Raised when we failed to acquire a lock.""" class OverLinkingError(RuntimeError): diff --git a/conda_build/features.py b/conda_build/features.py index 9fe1389e39..4b506cbc80 100644 --- a/conda_build/features.py +++ b/conda_build/features.py @@ -3,11 +3,10 @@ import os import sys - env_vars = [ - 'FEATURE_DEBUG', - 'FEATURE_NOMKL', - 'FEATURE_OPT', + "FEATURE_DEBUG", + "FEATURE_NOMKL", + "FEATURE_OPT", ] # list of features, where each element is a tuple(name, boolean), i.e. having @@ -15,7 +14,9 @@ feature_list = [] for key, value in os.environ.items(): if key in env_vars: - if value not in ('0', '1'): - sys.exit("Error: did not expect environment variable '%s' " - "being set to '%s' (not '0' or '1')" % (key, value)) + if value not in ("0", "1"): + sys.exit( + "Error: did not expect environment variable '%s' " + "being set to '%s' (not '0' or '1')" % (key, value) + ) feature_list.append((key[8:].lower(), bool(int(value)))) diff --git a/conda_build/index.py b/conda_build/index.py index 4db7a224c4..088e895e77 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -2,53 +2,75 @@ # SPDX-License-Identifier: BSD-3-Clause import bz2 -from collections import OrderedDict import copy -from datetime import datetime +import fnmatch import functools -from itertools import groupby import json -from numbers import Number +import logging import os -from os.path import abspath, basename, getmtime, getsize, isdir, isfile, join, splitext, dirname import subprocess import sys import time +from collections import OrderedDict +from concurrent.futures import Executor, ProcessPoolExecutor +from datetime import datetime +from functools import partial +from itertools import groupby +from numbers import Number +from os.path import ( + abspath, + basename, + dirname, + getmtime, + getsize, + isdir, + isfile, + join, + splitext, +) from uuid import uuid4 +import conda_package_handling.api +import pytz +import yaml + # Lots of conda internals here. Should refactor to use exports. from conda.common.compat import ensure_binary -import pytz +# BAD BAD BAD - conda internals +from conda.core.subdir_data import SubdirData +from conda.models.channel import Channel +from conda_package_handling.api import InvalidArchiveError from jinja2 import Environment, PackageLoader from tqdm import tqdm -import yaml from yaml.constructor import ConstructorError from yaml.parser import ParserError -from yaml.scanner import ScannerError from yaml.reader import ReaderError - -import fnmatch -from functools import partial -import logging -import conda_package_handling.api -from conda_package_handling.api import InvalidArchiveError - -from concurrent.futures import ProcessPoolExecutor -from concurrent.futures import Executor - -# BAD BAD BAD - conda internals -from conda.core.subdir_data import SubdirData -from conda.models.channel import Channel +from yaml.scanner import ScannerError from conda_build import conda_interface, utils -from .conda_interface import MatchSpec, VersionOrder, human_bytes, context -from .conda_interface import CondaError, CondaHTTPError, get_index, url_path -from .conda_interface import TemporaryDirectory -from .conda_interface import Resolve -from .utils import (CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, - CONDA_PACKAGE_EXTENSIONS, FileNotFoundError, - JSONDecodeError, get_logger, glob) + +from .conda_interface import ( + CondaError, + CondaHTTPError, + MatchSpec, + Resolve, + TemporaryDirectory, + VersionOrder, + context, + get_index, + human_bytes, + url_path, +) +from .utils import ( + CONDA_PACKAGE_EXTENSION_V1, + CONDA_PACKAGE_EXTENSION_V2, + CONDA_PACKAGE_EXTENSIONS, + FileNotFoundError, + JSONDecodeError, + get_logger, + glob, +) log = get_logger(__name__) @@ -62,7 +84,7 @@ def map(self, func, *iterables): try: - from conda.base.constants import NAMESPACES_MAP, NAMESPACE_PACKAGE_NAMES + from conda.base.constants import NAMESPACE_PACKAGE_NAMES, NAMESPACES_MAP except ImportError: NAMESPACES_MAP = { # base package name, namespace "python": "python", @@ -97,8 +119,12 @@ def map(self, func, *iterables): # TODO: support for libarchive seems to have broken ability to use multiple threads here. # The new conda format is so much faster that it more than makes up for it. However, it # would be nice to fix this at some point. -MAX_THREADS_DEFAULT = os.cpu_count() if (hasattr(os, "cpu_count") and os.cpu_count() > 1) else 1 -if sys.platform == 'win32': # see https://github.com/python/cpython/commit/8ea0fd85bc67438f679491fae29dfe0a3961900a +MAX_THREADS_DEFAULT = ( + os.cpu_count() if (hasattr(os, "cpu_count") and os.cpu_count() > 1) else 1 +) +if ( + sys.platform == "win32" +): # see https://github.com/python/cpython/commit/8ea0fd85bc67438f679491fae29dfe0a3961900a MAX_THREADS_DEFAULT = min(48, MAX_THREADS_DEFAULT) LOCK_TIMEOUT_SECS = 3 * 3600 LOCKFILE_NAME = ".lock" @@ -107,9 +133,17 @@ def map(self, func, *iterables): # os.environ['CONDA_ADD_ANACONDA_TOKEN'] = "false" -def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False, - omit_defaults=False, channel_urls=None, debug=False, verbose=True, - **kwargs): +def get_build_index( + subdir, + bldpkgs_dir, + output_folder=None, + clear_cache=False, + omit_defaults=False, + channel_urls=None, + debug=False, + verbose=True, + **kwargs, +): global local_index_timestamp global local_subdir global local_output_folder @@ -124,17 +158,18 @@ def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False, output_folder = dirname(bldpkgs_dir) # check file modification time - this is the age of our local index. - index_file = os.path.join(output_folder, subdir, 'repodata.json') + index_file = os.path.join(output_folder, subdir, "repodata.json") if os.path.isfile(index_file): mtime = os.path.getmtime(index_file) - if (clear_cache or - not os.path.isfile(index_file) or - local_subdir != subdir or - local_output_folder != output_folder or - mtime > local_index_timestamp or - cached_channels != channel_urls): - + if ( + clear_cache + or not os.path.isfile(index_file) + or local_subdir != subdir + or local_output_folder != output_folder + or mtime > local_index_timestamp + or cached_channels != channel_urls + ): # priority: (local as either croot or output_folder IF NOT EXPLICITLY IN CHANNEL ARGS), # then channels passed as args (if local in this, it remains in same order), # then channels from condarc. @@ -146,14 +181,16 @@ def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False, elif verbose: log_context = partial(utils.LoggingContext, logging.WARN, loggers=loggers) else: - log_context = partial(utils.LoggingContext, logging.CRITICAL + 1, loggers=loggers) + log_context = partial( + utils.LoggingContext, logging.CRITICAL + 1, loggers=loggers + ) with log_context(): # this is where we add the "local" channel. It's a little smarter than conda, because # conda does not know about our output_folder when it is not the default setting. if os.path.isdir(output_folder): local_path = url_path(output_folder) # replace local with the appropriate real channel. Order is maintained. - urls = [url if url != 'local' else local_path for url in urls] + urls = [url if url != "local" else local_path for url in urls] if local_path not in urls: urls.insert(0, local_path) _ensure_valid_channel(output_folder, subdir) @@ -162,24 +199,28 @@ def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False, # replace noarch with native subdir - this ends up building an index with both the # native content and the noarch content. - if subdir == 'noarch': + if subdir == "noarch": subdir = conda_interface.subdir try: - cached_index = get_index(channel_urls=urls, - prepend=not omit_defaults, - use_local=False, - use_cache=context.offline, - platform=subdir) + cached_index = get_index( + channel_urls=urls, + prepend=not omit_defaults, + use_local=False, + use_cache=context.offline, + platform=subdir, + ) # HACK: defaults does not have the many subfolders we support. Omit it and # try again. except CondaHTTPError: - if 'defaults' in urls: - urls.remove('defaults') - cached_index = get_index(channel_urls=urls, - prepend=omit_defaults, - use_local=False, - use_cache=context.offline, - platform=subdir) + if "defaults" in urls: + urls.remove("defaults") + cached_index = get_index( + channel_urls=urls, + prepend=omit_defaults, + use_local=False, + use_cache=context.offline, + platform=subdir, + ) expanded_channels = {rec.channel for rec in cached_index.values()} @@ -190,10 +231,13 @@ def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False, location = channel.location if utils.on_win: location = location.lstrip("/") - elif (not os.path.isabs(channel.location) and - os.path.exists(os.path.join(os.path.sep, channel.location))): + elif not os.path.isabs(channel.location) and os.path.exists( + os.path.join(os.path.sep, channel.location) + ): location = os.path.join(os.path.sep, channel.location) - channeldata_file = os.path.join(location, channel.name, 'channeldata.json') + channeldata_file = os.path.join( + location, channel.name, "channeldata.json" + ) retry = 0 max_retries = 1 if os.path.isfile(channeldata_file): @@ -209,15 +253,19 @@ def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False, # download channeldata.json for url if not context.offline: try: - channel_data[channel.name] = utils.download_channeldata(channel.base_url + '/channeldata.json') + channel_data[channel.name] = utils.download_channeldata( + channel.base_url + "/channeldata.json" + ) except CondaHTTPError: continue # collapse defaults metachannel back into one superchannel, merging channeldata - if channel.base_url in context.default_channels and channel_data.get(channel.name): - packages = superchannel.get('packages', {}) + if channel.base_url in context.default_channels and channel_data.get( + channel.name + ): + packages = superchannel.get("packages", {}) packages.update(channel_data[channel.name]) - superchannel['packages'] = packages - channel_data['defaults'] = superchannel + superchannel["packages"] = packages + channel_data["defaults"] = superchannel local_index_timestamp = os.path.getmtime(index_file) local_subdir = subdir local_output_folder = output_folder @@ -226,15 +274,27 @@ def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False, def _ensure_valid_channel(local_folder, subdir): - for folder in {subdir, 'noarch'}: + for folder in {subdir, "noarch"}: path = os.path.join(local_folder, folder) if not os.path.isdir(path): os.makedirs(path) -def update_index(dir_path, check_md5=False, channel_name=None, patch_generator=None, threads=MAX_THREADS_DEFAULT, - verbose=False, progress=False, hotfix_source_repo=None, subdirs=None, warn=True, - current_index_versions=None, debug=False, index_file=None): +def update_index( + dir_path, + check_md5=False, + channel_name=None, + patch_generator=None, + threads=MAX_THREADS_DEFAULT, + verbose=False, + progress=False, + hotfix_source_repo=None, + subdirs=None, + warn=True, + current_index_versions=None, + debug=False, + index_file=None, +): """ If dir_path contains a directory named 'noarch', the path tree therein is treated as though it's a full channel, with a level of subdirs, each subdir having an update @@ -248,27 +308,43 @@ def update_index(dir_path, check_md5=False, channel_name=None, patch_generator=N base_path, dirname = os.path.split(dir_path) if dirname in utils.DEFAULT_SUBDIRS: if warn: - log.warn("The update_index function has changed to index all subdirs at once. You're pointing it at a single subdir. " - "Please update your code to point it at the channel root, rather than a subdir.") - return update_index(base_path, check_md5=check_md5, channel_name=channel_name, - threads=threads, verbose=verbose, progress=progress, - hotfix_source_repo=hotfix_source_repo, - current_index_versions=current_index_versions) - return ChannelIndex(dir_path, channel_name, subdirs=subdirs, threads=threads, - deep_integrity_check=check_md5, debug=debug).index( - patch_generator=patch_generator, verbose=verbose, - progress=progress, - hotfix_source_repo=hotfix_source_repo, - current_index_versions=current_index_versions, - index_file=index_file) + log.warn( + "The update_index function has changed to index all subdirs at once. You're pointing it at a single subdir. " + "Please update your code to point it at the channel root, rather than a subdir." + ) + return update_index( + base_path, + check_md5=check_md5, + channel_name=channel_name, + threads=threads, + verbose=verbose, + progress=progress, + hotfix_source_repo=hotfix_source_repo, + current_index_versions=current_index_versions, + ) + return ChannelIndex( + dir_path, + channel_name, + subdirs=subdirs, + threads=threads, + deep_integrity_check=check_md5, + debug=debug, + ).index( + patch_generator=patch_generator, + verbose=verbose, + progress=progress, + hotfix_source_repo=hotfix_source_repo, + current_index_versions=current_index_versions, + index_file=index_file, + ) def _determine_namespace(info): - if info.get('namespace'): - namespace = info['namespace'] + if info.get("namespace"): + namespace = info["namespace"] else: depends_names = set() - for spec in info.get('depends', []): + for spec in info.get("depends", []): try: depends_names.add(MatchSpec(spec).name) except CondaError: @@ -278,21 +354,23 @@ def _determine_namespace(info): namespace = NAMESPACES_MAP[spaces.pop()] else: namespace = "global" - info['namespace'] = namespace + info["namespace"] = namespace - if not info.get('namespace_in_name') and '-' in info['name']: - namespace_prefix, reduced_name = info['name'].split('-', 1) + if not info.get("namespace_in_name") and "-" in info["name"]: + namespace_prefix, reduced_name = info["name"].split("-", 1) if namespace_prefix == namespace: - info['name_in_channel'] = info['name'] - info['name'] = reduced_name + info["name_in_channel"] = info["name"] + info["name"] = reduced_name - return namespace, info.get('name_in_channel', info['name']), info['name'] + return namespace, info.get("name_in_channel", info["name"]), info["name"] def _make_seconds(timestamp): timestamp = int(timestamp) if timestamp > 253402300799: # 9999-12-31 - timestamp //= 1000 # convert milliseconds to seconds; see conda/conda-build#1988 + timestamp //= ( + 1000 # convert milliseconds to seconds; see conda/conda-build#1988 + ) return timestamp @@ -301,8 +379,8 @@ def _make_seconds(timestamp): REPODATA_VERSION = 1 CHANNELDATA_VERSION = 1 -REPODATA_JSON_FN = 'repodata.json' -REPODATA_FROM_PKGS_JSON_FN = 'repodata_from_packages.json' +REPODATA_JSON_FN = "repodata.json" +REPODATA_FROM_PKGS_JSON_FN = "repodata_from_packages.json" CHANNELDATA_FIELDS = ( "description", "dev_url", @@ -339,39 +417,51 @@ def _make_seconds(timestamp): def _clear_newline_chars(record, field_name): if field_name in record: try: - record[field_name] = record[field_name].strip().replace('\n', ' ') + record[field_name] = record[field_name].strip().replace("\n", " ") except AttributeError: # sometimes description gets added as a list instead of just a string - record[field_name] = record[field_name][0].strip().replace('\n', ' ') + record[field_name] = record[field_name][0].strip().replace("\n", " ") def _apply_instructions(subdir, repodata, instructions): repodata.setdefault("removed", []) - utils.merge_or_update_dict(repodata.get('packages', {}), instructions.get('packages', {}), merge=False, - add_missing_keys=False) + utils.merge_or_update_dict( + repodata.get("packages", {}), + instructions.get("packages", {}), + merge=False, + add_missing_keys=False, + ) # we could have totally separate instructions for .conda than .tar.bz2, but it's easier if we assume # that a similarly-named .tar.bz2 file is the same content as .conda, and shares fixes new_pkg_fixes = { k.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2): v - for k, v in instructions.get('packages', {}).items() + for k, v in instructions.get("packages", {}).items() } - utils.merge_or_update_dict(repodata.get('packages.conda', {}), new_pkg_fixes, merge=False, - add_missing_keys=False) - utils.merge_or_update_dict(repodata.get('packages.conda', {}), instructions.get('packages.conda', {}), merge=False, - add_missing_keys=False) + utils.merge_or_update_dict( + repodata.get("packages.conda", {}), + new_pkg_fixes, + merge=False, + add_missing_keys=False, + ) + utils.merge_or_update_dict( + repodata.get("packages.conda", {}), + instructions.get("packages.conda", {}), + merge=False, + add_missing_keys=False, + ) - for fn in instructions.get('revoke', ()): - for key in ('packages', 'packages.conda'): - if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == 'packages.conda': + for fn in instructions.get("revoke", ()): + for key in ("packages", "packages.conda"): + if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == "packages.conda": fn = fn.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2) if fn in repodata[key]: - repodata[key][fn]['revoked'] = True - repodata[key][fn]['depends'].append('package_has_been_revoked') + repodata[key][fn]["revoked"] = True + repodata[key][fn]["depends"].append("package_has_been_revoked") - for fn in instructions.get('remove', ()): - for key in ('packages', 'packages.conda'): - if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == 'packages.conda': + for fn in instructions.get("remove", ()): + for key in ("packages", "packages.conda"): + if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == "packages.conda": fn = fn.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2) popped = repodata[key].pop(fn, None) if popped: @@ -394,16 +484,16 @@ def _filter_add_href(text, link, **kwargs): kwargs_list = [f'href="{link}"'] kwargs_list.append(f'alt="{text}"') kwargs_list += [f'{k}="{v}"' for k, v in kwargs.items()] - return '{}'.format(' '.join(kwargs_list), text) + return "{}".format(" ".join(kwargs_list), text) else: return text environment = Environment( - loader=PackageLoader('conda_build', 'templates'), + loader=PackageLoader("conda_build", "templates"), ) - environment.filters['human_bytes'] = human_bytes - environment.filters['strftime'] = _filter_strftime - environment.filters['add_href'] = _filter_add_href + environment.filters["human_bytes"] = human_bytes + environment.filters["strftime"] = _filter_strftime + environment.filters["add_href"] = _filter_add_href environment.trim_blocks = True environment.lstrip_blocks = True @@ -413,14 +503,14 @@ def _filter_add_href(text, link, **kwargs): def _maybe_write(path, content, write_newline_end=False, content_is_binary=False): # Create the temp file next "path" so that we can use an atomic move, see # https://github.com/conda/conda-build/issues/3833 - temp_path = f'{path}.{uuid4()}' + temp_path = f"{path}.{uuid4()}" if not content_is_binary: content = ensure_binary(content) - with open(temp_path, 'wb') as fh: + with open(temp_path, "wb") as fh: fh.write(content) if write_newline_end: - fh.write(b'\n') + fh.write(b"\n") if isfile(path): if utils.md5_file(temp_path) == utils.md5_file(path): # No need to change mtimes. The contents already match. @@ -434,7 +524,7 @@ def _maybe_write(path, content, write_newline_end=False, content_is_binary=False def _make_build_string(build, build_number): build_number_as_string = str(build_number) if build.endswith(build_number_as_string): - build = build[:-len(build_number_as_string)] + build = build[: -len(build_number_as_string)] build = build.rstrip("_") build_string = build return build_string @@ -458,7 +548,7 @@ def _warn_on_missing_dependencies(missing_dependencies, patched_repodata): if missing_dependencies: builder = [ "WARNING: The following dependencies do not exist in the channel", - " and are not declared as external dependencies:" + " and are not declared as external dependencies:", ] for dep_name in sorted(missing_dependencies): builder.append(" %s" % dep_name) @@ -470,44 +560,54 @@ def _warn_on_missing_dependencies(missing_dependencies, patched_repodata): patched_repodata["removed"].append(fn) builder.append("The associated packages are being removed from the index.") - builder.append('') + builder.append("") log.warn("\n".join(builder)) def _cache_post_install_details(paths_cache_path, post_install_cache_path): - post_install_details_json = {'binary_prefix': False, 'text_prefix': False, - 'activate.d': False, 'deactivate.d': False, - 'pre_link': False, 'post_link': False, 'pre_unlink': False} + post_install_details_json = { + "binary_prefix": False, + "text_prefix": False, + "activate.d": False, + "deactivate.d": False, + "pre_link": False, + "post_link": False, + "pre_unlink": False, + } if os.path.lexists(paths_cache_path): with open(paths_cache_path) as f: - paths = json.load(f).get('paths', []) + paths = json.load(f).get("paths", []) - # get embedded prefix data from paths.json + # get embedded prefix data from paths.json for f in paths: - if f.get('prefix_placeholder'): - if f.get('file_mode') == 'binary': - post_install_details_json['binary_prefix'] = True - elif f.get('file_mode') == 'text': - post_install_details_json['text_prefix'] = True + if f.get("prefix_placeholder"): + if f.get("file_mode") == "binary": + post_install_details_json["binary_prefix"] = True + elif f.get("file_mode") == "text": + post_install_details_json["text_prefix"] = True # check for any activate.d/deactivate.d scripts - for k in ('activate.d', 'deactivate.d'): - if not post_install_details_json.get(k) and f['_path'].startswith('etc/conda/%s' % k): + for k in ("activate.d", "deactivate.d"): + if not post_install_details_json.get(k) and f["_path"].startswith( + "etc/conda/%s" % k + ): post_install_details_json[k] = True # check for any link scripts - for pat in ('pre-link', 'post-link', 'pre-unlink'): - if not post_install_details_json.get(pat) and fnmatch.fnmatch(f['_path'], '*/.*-%s.*' % pat): + for pat in ("pre-link", "post-link", "pre-unlink"): + if not post_install_details_json.get(pat) and fnmatch.fnmatch( + f["_path"], "*/.*-%s.*" % pat + ): post_install_details_json[pat.replace("-", "_")] = True - with open(post_install_cache_path, 'w') as fh: + with open(post_install_cache_path, "w") as fh: json.dump(post_install_details_json, fh) def _cache_recipe(tmpdir, recipe_cache_path): recipe_path_search_order = ( - 'info/recipe/meta.yaml.rendered', - 'info/recipe/meta.yaml', - 'info/meta.yaml', - ) + "info/recipe/meta.yaml.rendered", + "info/recipe/meta.yaml", + "info/meta.yaml", + ) for path in recipe_path_search_order: recipe_path = os.path.join(tmpdir, path) if os.path.lexists(recipe_path): @@ -524,9 +624,9 @@ def _cache_recipe(tmpdir, recipe_cache_path): try: recipe_json_str = json.dumps(recipe_json) except TypeError: - recipe_json.get('requirements', {}).pop('build') + recipe_json.get("requirements", {}).pop("build") recipe_json_str = json.dumps(recipe_json) - with open(recipe_cache_path, 'w') as fh: + with open(recipe_cache_path, "w") as fh: fh.write(recipe_json_str) return recipe_json @@ -534,15 +634,15 @@ def _cache_recipe(tmpdir, recipe_cache_path): def _cache_run_exports(tmpdir, run_exports_cache_path): run_exports = {} try: - with open(os.path.join(tmpdir, 'info', 'run_exports.json')) as f: + with open(os.path.join(tmpdir, "info", "run_exports.json")) as f: run_exports = json.load(f) except (OSError, FileNotFoundError): try: - with open(os.path.join(tmpdir, 'info', 'run_exports.yaml')) as f: + with open(os.path.join(tmpdir, "info", "run_exports.yaml")) as f: run_exports = yaml.safe_load(f) except (OSError, FileNotFoundError): log.debug("%s has no run_exports file (this is OK)" % tmpdir) - with open(run_exports_cache_path, 'w') as fh: + with open(run_exports_cache_path, "w") as fh: json.dump(run_exports, fh) @@ -552,11 +652,11 @@ def _cache_icon(tmpdir, recipe_json, icon_cache_path): # of the icon file as indicated by the meta.yaml `app/icon` key. # apparently right now conda-build renames all icons to 'icon.png' # What happens if it's an ico file, or a svg file, instead of a png? Not sure! - app_icon_path = recipe_json.get('app', {}).get('icon') + app_icon_path = recipe_json.get("app", {}).get("icon") if app_icon_path: - icon_path = os.path.join(tmpdir, 'info', 'recipe', app_icon_path) + icon_path = os.path.join(tmpdir, "info", "recipe", app_icon_path) if not os.path.lexists(icon_path): - icon_path = os.path.join(tmpdir, 'info', 'icon.png') + icon_path = os.path.join(tmpdir, "info", "icon.png") if os.path.lexists(icon_path): icon_cache_path += splitext(app_icon_path)[-1] utils.move_with_fallback(icon_path, icon_cache_path) @@ -564,9 +664,9 @@ def _cache_icon(tmpdir, recipe_json, icon_cache_path): def _make_subdir_index_html(channel_name, subdir, repodata_packages, extra_paths): environment = _get_jinja2_environment() - template = environment.get_template('subdir-index.html.j2') + template = environment.get_template("subdir-index.html.j2") rendered_html = template.render( - title="{}/{}".format(channel_name or '', subdir), + title="{}/{}".format(channel_name or "", subdir), packages=repodata_packages, current_time=datetime.utcnow().replace(tzinfo=pytz.timezone("UTC")), extra_paths=extra_paths, @@ -576,32 +676,40 @@ def _make_subdir_index_html(channel_name, subdir, repodata_packages, extra_paths def _make_channeldata_index_html(channel_name, channeldata): environment = _get_jinja2_environment() - template = environment.get_template('channeldata-index.html.j2') + template = environment.get_template("channeldata-index.html.j2") rendered_html = template.render( title=channel_name, - packages=channeldata['packages'], - subdirs=channeldata['subdirs'], + packages=channeldata["packages"], + subdirs=channeldata["subdirs"], current_time=datetime.utcnow().replace(tzinfo=pytz.timezone("UTC")), ) return rendered_html def _get_source_repo_git_info(path): - is_repo = subprocess.check_output(["git", "rev-parse", "--is-inside-work-tree"], cwd=path) - if is_repo.strip().decode('utf-8') == "true": - output = subprocess.check_output(['git', 'log', - "--pretty=format:'%h|%ad|%an|%s'", - "--date=unix"], cwd=path) + is_repo = subprocess.check_output( + ["git", "rev-parse", "--is-inside-work-tree"], cwd=path + ) + if is_repo.strip().decode("utf-8") == "true": + output = subprocess.check_output( + ["git", "log", "--pretty=format:'%h|%ad|%an|%s'", "--date=unix"], cwd=path + ) commits = [] for line in output.decode("utf-8").strip().splitlines(): _hash, _time, _author, _desc = line.split("|") - commits.append({"hash": _hash, "timestamp": int(_time), - "author": _author, "description": _desc}) + commits.append( + { + "hash": _hash, + "timestamp": int(_time), + "author": _author, + "description": _desc, + } + ) return commits def _cache_info_file(tmpdir, info_fn, cache_path): - info_path = os.path.join(tmpdir, 'info', info_fn) + info_path = os.path.join(tmpdir, "info", info_fn) if os.path.lexists(info_path): utils.move_with_fallback(info_path, cache_path) @@ -609,8 +717,8 @@ def _cache_info_file(tmpdir, info_fn, cache_path): def _alternate_file_extension(fn): cache_fn = fn for ext in CONDA_PACKAGE_EXTENSIONS: - cache_fn = cache_fn.replace(ext, '') - other_ext = set(CONDA_PACKAGE_EXTENSIONS) - {fn.replace(cache_fn, '')} + cache_fn = cache_fn.replace(ext, "") + other_ext = set(CONDA_PACKAGE_EXTENSIONS) - {fn.replace(cache_fn, "")} return cache_fn + next(iter(other_ext)) @@ -637,7 +745,7 @@ def _get_resolve_object(subdir, file_path=None, precs=None, repodata=None): "packages.conda": conda_packages, } - channel = Channel('https://conda.anaconda.org/dummy-channel/%s' % subdir) + channel = Channel("https://conda.anaconda.org/dummy-channel/%s" % subdir) sd = SubdirData(channel) sd._process_raw_repodata_str(json.dumps(repodata)) sd._loaded = True @@ -654,11 +762,11 @@ def _get_newest_versions(r, pins={}): if g_name in pins: matches = [] for pin in pins[g_name]: - version = r.find_matches(MatchSpec(f'{g_name}={pin}'))[0].version - matches.extend(r.find_matches(MatchSpec(f'{g_name}={version}'))) + version = r.find_matches(MatchSpec(f"{g_name}={pin}"))[0].version + matches.extend(r.find_matches(MatchSpec(f"{g_name}={version}"))) else: version = r.groups[g_name][0].version - matches = r.find_matches(MatchSpec(f'{g_name}={version}')) + matches = r.find_matches(MatchSpec(f"{g_name}={version}")) groups[g_name] = matches return [pkg for group in groups.values() for pkg in group] @@ -678,9 +786,11 @@ def _add_missing_deps(new_r, original_r): matches = original_r.find_matches(ms) if matches: version = matches[0].version - expanded_groups[ms.name] = ( - set(expanded_groups.get(ms.name, [])) | - set(original_r.find_matches(MatchSpec(f'{ms.name}={version}')))) + expanded_groups[ms.name] = set( + expanded_groups.get(ms.name, []) + ) | set( + original_r.find_matches(MatchSpec(f"{ms.name}={version}")) + ) seen_specs.add(dep_spec) return [pkg for group in expanded_groups.values() for pkg in group] @@ -700,14 +810,15 @@ def _add_prev_ver_for_features(new_r, orig_r): keep_m = None for i in range(len(orig_r.groups[g_name])): _m = orig_r.groups[g_name][i] - if ( - VersionOrder(str(_m.version)) <= latest_version and - not (_m.track_features or _m.features) + if VersionOrder(str(_m.version)) <= latest_version and not ( + _m.track_features or _m.features ): keep_m = _m break if keep_m is not None: - expanded_groups[g_name] = {keep_m} | set(expanded_groups.get(g_name, [])) + expanded_groups[g_name] = {keep_m} | set( + expanded_groups.get(g_name, []) + ) return [pkg for group in expanded_groups.values() for pkg in group] @@ -726,15 +837,17 @@ def _shard_newest_packages(subdir, r, pins=None): for g_name, g_recs in r.groups.items(): # always do the latest implicitly version = r.groups[g_name][0].version - matches = set(r.find_matches(MatchSpec(f'{g_name}={version}'))) + matches = set(r.find_matches(MatchSpec(f"{g_name}={version}"))) if g_name in pins: for pin_value in pins[g_name]: - version = r.find_matches(MatchSpec(f'{g_name}={pin_value}'))[0].version - matches.update(r.find_matches(MatchSpec(f'{g_name}={version}'))) + version = r.find_matches(MatchSpec(f"{g_name}={pin_value}"))[0].version + matches.update(r.find_matches(MatchSpec(f"{g_name}={version}"))) groups[g_name] = matches # add the deps of the stuff in the index - new_r = _get_resolve_object(subdir, precs=[pkg for group in groups.values() for pkg in group]) + new_r = _get_resolve_object( + subdir, precs=[pkg for group in groups.values() for pkg in group] + ) new_r = _get_resolve_object(subdir, precs=_add_missing_deps(new_r, r)) # now for any pkg with features, add at least one previous version @@ -745,30 +858,42 @@ def _shard_newest_packages(subdir, r, pins=None): def _build_current_repodata(subdir, repodata, pins): r = _get_resolve_object(subdir, repodata=repodata) keep_pkgs = _shard_newest_packages(subdir, r, pins) - new_repodata = {k: repodata[k] for k in set(repodata.keys()) - {'packages', 'packages.conda'}} + new_repodata = { + k: repodata[k] for k in set(repodata.keys()) - {"packages", "packages.conda"} + } packages = {} conda_packages = {} for keep_pkg in keep_pkgs: if keep_pkg.fn.endswith(CONDA_PACKAGE_EXTENSION_V2): - conda_packages[keep_pkg.fn] = repodata['packages.conda'][keep_pkg.fn] + conda_packages[keep_pkg.fn] = repodata["packages.conda"][keep_pkg.fn] # in order to prevent package churn we consider the md5 for the .tar.bz2 that matches the .conda file # This holds when .conda files contain the same files as .tar.bz2, which is an assumption we'll make # until it becomes more prevalent that people provide only .conda files and just skip .tar.bz2 - counterpart = keep_pkg.fn.replace(CONDA_PACKAGE_EXTENSION_V2, CONDA_PACKAGE_EXTENSION_V1) - conda_packages[keep_pkg.fn]['legacy_bz2_md5'] = repodata['packages'].get(counterpart, {}).get('md5') + counterpart = keep_pkg.fn.replace( + CONDA_PACKAGE_EXTENSION_V2, CONDA_PACKAGE_EXTENSION_V1 + ) + conda_packages[keep_pkg.fn]["legacy_bz2_md5"] = ( + repodata["packages"].get(counterpart, {}).get("md5") + ) elif keep_pkg.fn.endswith(CONDA_PACKAGE_EXTENSION_V1): - packages[keep_pkg.fn] = repodata['packages'][keep_pkg.fn] - new_repodata['packages'] = packages - new_repodata['packages.conda'] = conda_packages + packages[keep_pkg.fn] = repodata["packages"][keep_pkg.fn] + new_repodata["packages"] = packages + new_repodata["packages.conda"] = conda_packages return new_repodata class ChannelIndex: - - def __init__(self, channel_root, channel_name, subdirs=None, threads=MAX_THREADS_DEFAULT, - deep_integrity_check=False, debug=False): + def __init__( + self, + channel_root, + channel_name, + subdirs=None, + threads=MAX_THREADS_DEFAULT, + deep_integrity_check=False, + debug=False, + ): self.channel_root = abspath(channel_root) - self.channel_name = channel_name or basename(channel_root.rstrip('/')) + self.channel_name = channel_name or basename(channel_root.rstrip("/")) self._subdirs = subdirs self.thread_executor = ( DummyExecutor() @@ -777,8 +902,15 @@ def __init__(self, channel_root, channel_name, subdirs=None, threads=MAX_THREADS ) self.deep_integrity_check = deep_integrity_check - def index(self, patch_generator, hotfix_source_repo=None, verbose=False, progress=False, - current_index_versions=None, index_file=None): + def index( + self, + patch_generator, + hotfix_source_repo=None, + verbose=False, + progress=False, + current_index_versions=None, + index_file=None, + ): if verbose: level = logging.DEBUG else: @@ -792,40 +924,53 @@ def index(self, patch_generator, hotfix_source_repo=None, verbose=False, progres if subdir.name in utils.DEFAULT_SUBDIRS and subdir.is_dir() } log.debug("found subdirs %s" % detected_subdirs) - self.subdirs = subdirs = sorted(detected_subdirs | {'noarch'}) + self.subdirs = subdirs = sorted(detected_subdirs | {"noarch"}) else: - self.subdirs = subdirs = sorted(set(self._subdirs) | {'noarch'}) + self.subdirs = subdirs = sorted(set(self._subdirs) | {"noarch"}) # Step 1. Lock local channel. - with utils.try_acquire_locks([utils.get_lock(self.channel_root)], timeout=900): + with utils.try_acquire_locks( + [utils.get_lock(self.channel_root)], timeout=900 + ): channel_data = {} - channeldata_file = os.path.join(self.channel_root, 'channeldata.json') + channeldata_file = os.path.join(self.channel_root, "channeldata.json") if os.path.isfile(channeldata_file): with open(channeldata_file) as f: channel_data = json.load(f) # Step 2. Collect repodata from packages, save to pkg_repodata.json file - with tqdm(total=len(subdirs), disable=(verbose or not progress), leave=False) as t: + with tqdm( + total=len(subdirs), disable=(verbose or not progress), leave=False + ) as t: for subdir in subdirs: t.set_description("Subdir: %s" % subdir) t.update() - with tqdm(total=8, disable=(verbose or not progress), leave=False) as t2: + with tqdm( + total=8, disable=(verbose or not progress), leave=False + ) as t2: t2.set_description("Gathering repodata") t2.update() _ensure_valid_channel(self.channel_root, subdir) repodata_from_packages = self.index_subdir( - subdir, verbose=verbose, progress=progress, - index_file=index_file) + subdir, + verbose=verbose, + progress=progress, + index_file=index_file, + ) t2.set_description("Writing pre-patch repodata") t2.update() - self._write_repodata(subdir, repodata_from_packages, - REPODATA_FROM_PKGS_JSON_FN) + self._write_repodata( + subdir, + repodata_from_packages, + REPODATA_FROM_PKGS_JSON_FN, + ) # Step 3. Apply patch instructions. t2.set_description("Applying patch instructions") t2.update() patched_repodata, patch_instructions = self._patch_repodata( - subdir, repodata_from_packages, patch_generator) + subdir, repodata_from_packages, patch_generator + ) # Step 4. Save patched and augmented repodata. # If the contents of repodata have changed, write a new repodata.json file. @@ -833,14 +978,21 @@ def index(self, patch_generator, hotfix_source_repo=None, verbose=False, progres t2.set_description("Writing patched repodata") t2.update() - self._write_repodata(subdir, patched_repodata, REPODATA_JSON_FN) + self._write_repodata( + subdir, patched_repodata, REPODATA_JSON_FN + ) t2.set_description("Building current_repodata subset") t2.update() - current_repodata = _build_current_repodata(subdir, patched_repodata, - pins=current_index_versions) + current_repodata = _build_current_repodata( + subdir, patched_repodata, pins=current_index_versions + ) t2.set_description("Writing current_repodata subset") t2.update() - self._write_repodata(subdir, current_repodata, json_filename="current_repodata.json") + self._write_repodata( + subdir, + current_repodata, + json_filename="current_repodata.json", + ) t2.set_description("Writing subdir index HTML") t2.update() @@ -848,7 +1000,9 @@ def index(self, patch_generator, hotfix_source_repo=None, verbose=False, progres t2.set_description("Updating channeldata") t2.update() - self._update_channeldata(channel_data, patched_repodata, subdir) + self._update_channeldata( + channel_data, patched_repodata, subdir + ) # Step 7. Create and write channeldata. self._write_channeldata_index_html(channel_data) @@ -864,7 +1018,11 @@ def index_subdir(self, subdir, index_file=None, verbose=False, progress=False): # gather conda package filenames in subdir # we'll process these first, because reading their metadata is much faster - fns_in_subdir = {fn for fn in os.listdir(subdir_path) if fn.endswith('.conda') or fn.endswith('.tar.bz2')} + fns_in_subdir = { + fn + for fn in os.listdir(subdir_path) + if fn.endswith(".conda") or fn.endswith(".tar.bz2") + } # load current/old repodata try: @@ -885,7 +1043,7 @@ def index_subdir(self, subdir, index_file=None, verbose=False, progress=False): # 'md5': 'abd123', # }, # } - stat_cache_path = join(subdir_path, '.cache', 'stat.json') + stat_cache_path = join(subdir_path, ".cache", "stat.json") try: with open(stat_cache_path) as fh: stat_cache = json.load(fh) or {} @@ -895,7 +1053,7 @@ def index_subdir(self, subdir, index_file=None, verbose=False, progress=False): stat_cache_original = stat_cache.copy() remove_set = old_repodata_fns - fns_in_subdir - ignore_set = set(old_repodata.get('removed', [])) + ignore_set = set(old_repodata.get("removed", [])) try: # calculate all the paths and figure out what we're going to do with them # add_set: filenames that aren't in the current/old repodata, but exist in the subdir @@ -903,10 +1061,10 @@ def index_subdir(self, subdir, index_file=None, verbose=False, progress=False): with open(index_file) as fin: add_set = set() for line in fin: - fn_subdir, fn = line.strip().split('/') + fn_subdir, fn = line.strip().split("/") if fn_subdir != subdir: continue - if fn.endswith('.conda') or fn.endswith('.tar.bz2'): + if fn.endswith(".conda") or fn.endswith(".tar.bz2"): add_set.add(fn) else: add_set = fns_in_subdir - old_repodata_fns @@ -918,27 +1076,41 @@ def index_subdir(self, subdir, index_file=None, verbose=False, progress=False): # not using md5 here because it takes too long. If needing to do full md5 checks, # use the --deep-integrity-check flag / self.deep_integrity_check option. update_set = self._calculate_update_set( - subdir, fns_in_subdir, old_repodata_fns, stat_cache, - verbose=verbose, progress=progress + subdir, + fns_in_subdir, + old_repodata_fns, + stat_cache, + verbose=verbose, + progress=progress, ) # unchanged_set: packages in old repodata whose information can carry straight # across to new repodata unchanged_set = set(old_repodata_fns - update_set - remove_set - ignore_set) - assert isinstance(unchanged_set, set) # faster `in` queries + assert isinstance(unchanged_set, set) # faster `in` queries # clean up removed files - removed_set = (old_repodata_fns - fns_in_subdir) + removed_set = old_repodata_fns - fns_in_subdir for fn in removed_set: if fn in stat_cache: del stat_cache[fn] - new_repodata_packages = {k: v for k, v in old_repodata.get('packages', {}).items() if k in unchanged_set} - new_repodata_conda_packages = {k: v for k, v in old_repodata.get('packages.conda', {}).items() if k in unchanged_set} + new_repodata_packages = { + k: v + for k, v in old_repodata.get("packages", {}).items() + if k in unchanged_set + } + new_repodata_conda_packages = { + k: v + for k, v in old_repodata.get("packages.conda", {}).items() + if k in unchanged_set + } for k in sorted(unchanged_set): if not (k in new_repodata_packages or k in new_repodata_conda_packages): - fn, rec = ChannelIndex._load_index_from_cache(self.channel_root, subdir, fn, stat_cache) + fn, rec = ChannelIndex._load_index_from_cache( + self.channel_root, subdir, fn, stat_cache + ) # this is how we pass an exception through. When fn == rec, there's been a problem, # and we need to reload this file if fn == rec: @@ -956,79 +1128,101 @@ def index_subdir(self, subdir, index_file=None, verbose=False, progress=False): # Sorting here prioritizes .conda files ('c') over .tar.bz2 files ('b') hash_extract_set = (*add_set, *update_set) - extract_func = functools.partial(ChannelIndex._extract_to_cache, - self.channel_root, subdir) + extract_func = functools.partial( + ChannelIndex._extract_to_cache, self.channel_root, subdir + ) # split up the set by .conda packages first, then .tar.bz2. This avoids race conditions # with execution in parallel that would end up in the same place. - for conda_format in tqdm(CONDA_PACKAGE_EXTENSIONS, desc="File format", - disable=(verbose or not progress), leave=False): + for conda_format in tqdm( + CONDA_PACKAGE_EXTENSIONS, + desc="File format", + disable=(verbose or not progress), + leave=False, + ): for fn, mtime, size, index_json in tqdm( - self.thread_executor.map( - extract_func, - (fn for fn in hash_extract_set if fn.endswith(conda_format))), - desc="hash & extract packages for %s" % subdir, - disable=(verbose or not progress), leave=False): - + self.thread_executor.map( + extract_func, + (fn for fn in hash_extract_set if fn.endswith(conda_format)), + ), + desc="hash & extract packages for %s" % subdir, + disable=(verbose or not progress), + leave=False, + ): # fn can be None if the file was corrupt or no longer there if fn and mtime: - stat_cache[fn] = {'mtime': int(mtime), 'size': size} + stat_cache[fn] = {"mtime": int(mtime), "size": size} if index_json: if fn.endswith(CONDA_PACKAGE_EXTENSION_V2): new_repodata_conda_packages[fn] = index_json else: new_repodata_packages[fn] = index_json else: - log.error("Package at %s did not contain valid index.json data. Please" - " check the file and remove/redownload if necessary to obtain " - "a valid package." % os.path.join(subdir_path, fn)) + log.error( + "Package at %s did not contain valid index.json data. Please" + " check the file and remove/redownload if necessary to obtain " + "a valid package." % os.path.join(subdir_path, fn) + ) new_repodata = { - 'packages': new_repodata_packages, - 'packages.conda': new_repodata_conda_packages, - 'info': { - 'subdir': subdir, + "packages": new_repodata_packages, + "packages.conda": new_repodata_conda_packages, + "info": { + "subdir": subdir, }, - 'repodata_version': REPODATA_VERSION, - 'removed': sorted(list(ignore_set)) + "repodata_version": REPODATA_VERSION, + "removed": sorted(list(ignore_set)), } finally: if stat_cache != stat_cache_original: # log.info("writing stat cache to %s", stat_cache_path) - with open(stat_cache_path, 'w') as fh: + with open(stat_cache_path, "w") as fh: json.dump(stat_cache, fh) return new_repodata def _ensure_dirs(self, subdir): # Create all cache directories in the subdir. ensure = lambda path: isdir(path) or os.makedirs(path) - cache_path = join(self.channel_root, subdir, '.cache') + cache_path = join(self.channel_root, subdir, ".cache") ensure(cache_path) - ensure(join(cache_path, 'index')) - ensure(join(cache_path, 'about')) - ensure(join(cache_path, 'paths')) - ensure(join(cache_path, 'recipe')) - ensure(join(cache_path, 'run_exports')) - ensure(join(cache_path, 'post_install')) - ensure(join(cache_path, 'icon')) - ensure(join(self.channel_root, 'icons')) - ensure(join(cache_path, 'recipe_log')) - - def _calculate_update_set(self, subdir, fns_in_subdir, old_repodata_fns, stat_cache, - verbose=False, progress=True): + ensure(join(cache_path, "index")) + ensure(join(cache_path, "about")) + ensure(join(cache_path, "paths")) + ensure(join(cache_path, "recipe")) + ensure(join(cache_path, "run_exports")) + ensure(join(cache_path, "post_install")) + ensure(join(cache_path, "icon")) + ensure(join(self.channel_root, "icons")) + ensure(join(cache_path, "recipe_log")) + + def _calculate_update_set( + self, + subdir, + fns_in_subdir, + old_repodata_fns, + stat_cache, + verbose=False, + progress=True, + ): # Determine the packages that already exist in repodata, but need to be updated. # We're not using md5 here because it takes too long. candidate_fns = fns_in_subdir & old_repodata_fns subdir_path = join(self.channel_root, subdir) update_set = set() - for fn in tqdm(iter(candidate_fns), desc="Finding updated files", - disable=(verbose or not progress), leave=False): + for fn in tqdm( + iter(candidate_fns), + desc="Finding updated files", + disable=(verbose or not progress), + leave=False, + ): if fn not in stat_cache: update_set.add(fn) else: stat_result = os.stat(join(subdir_path, fn)) - if (int(stat_result.st_mtime) != int(stat_cache[fn]['mtime']) or - stat_result.st_size != stat_cache[fn]['size']): + if ( + int(stat_result.st_mtime) != int(stat_cache[fn]["mtime"]) + or stat_result.st_size != stat_cache[fn]["size"] + ): update_set.add(fn) return update_set @@ -1052,19 +1246,24 @@ def _extract_to_cache(channel_root, subdir, fn, second_try=False): mtime = stat_result.st_mtime retval = fn, mtime, size, None - index_cache_path = join(subdir_path, '.cache', 'index', cache_fn + '.json') - about_cache_path = join(subdir_path, '.cache', 'about', cache_fn + '.json') - paths_cache_path = join(subdir_path, '.cache', 'paths', cache_fn + '.json') - recipe_cache_path = join(subdir_path, '.cache', 'recipe', cache_fn + '.json') - run_exports_cache_path = join(subdir_path, '.cache', 'run_exports', cache_fn + '.json') - post_install_cache_path = join(subdir_path, '.cache', 'post_install', cache_fn + '.json') - icon_cache_path = join(subdir_path, '.cache', 'icon', cache_fn) + index_cache_path = join(subdir_path, ".cache", "index", cache_fn + ".json") + about_cache_path = join(subdir_path, ".cache", "about", cache_fn + ".json") + paths_cache_path = join(subdir_path, ".cache", "paths", cache_fn + ".json") + recipe_cache_path = join(subdir_path, ".cache", "recipe", cache_fn + ".json") + run_exports_cache_path = join( + subdir_path, ".cache", "run_exports", cache_fn + ".json" + ) + post_install_cache_path = join( + subdir_path, ".cache", "post_install", cache_fn + ".json" + ) + icon_cache_path = join(subdir_path, ".cache", "icon", cache_fn) log.debug("hashing, extracting, and caching %s" % fn) alternate_cache = False - if (not os.path.exists(index_cache_path) and - os.path.exists(index_cache_path.replace(fn, alternate_cache_fn))): + if not os.path.exists(index_cache_path) and os.path.exists( + index_cache_path.replace(fn, alternate_cache_fn) + ): alternate_cache = True try: @@ -1072,46 +1271,61 @@ def _extract_to_cache(channel_root, subdir, fn, second_try=False): # .conda readup is very fast (essentially free), but .conda files come from # converting .tar.bz2 files, which can go wrong. Forcing extraction for # .conda files gives us a check on the validity of that conversion. - if not fn.endswith(CONDA_PACKAGE_EXTENSION_V2) and os.path.isfile(index_cache_path): + if not fn.endswith(CONDA_PACKAGE_EXTENSION_V2) and os.path.isfile( + index_cache_path + ): with open(index_cache_path) as f: index_json = json.load(f) - elif not alternate_cache and (second_try or not os.path.exists(index_cache_path)): + elif not alternate_cache and ( + second_try or not os.path.exists(index_cache_path) + ): with TemporaryDirectory() as tmpdir: - conda_package_handling.api.extract(abs_fn, dest_dir=tmpdir, components="info") - index_file = os.path.join(tmpdir, 'info', 'index.json') + conda_package_handling.api.extract( + abs_fn, dest_dir=tmpdir, components="info" + ) + index_file = os.path.join(tmpdir, "info", "index.json") if not os.path.exists(index_file): return retval with open(index_file) as f: index_json = json.load(f) - _cache_info_file(tmpdir, 'about.json', about_cache_path) - _cache_info_file(tmpdir, 'paths.json', paths_cache_path) - _cache_info_file(tmpdir, 'recipe_log.json', paths_cache_path) + _cache_info_file(tmpdir, "about.json", about_cache_path) + _cache_info_file(tmpdir, "paths.json", paths_cache_path) + _cache_info_file(tmpdir, "recipe_log.json", paths_cache_path) _cache_run_exports(tmpdir, run_exports_cache_path) - _cache_post_install_details(paths_cache_path, post_install_cache_path) + _cache_post_install_details( + paths_cache_path, post_install_cache_path + ) recipe_json = _cache_recipe(tmpdir, recipe_cache_path) _cache_icon(tmpdir, recipe_json, icon_cache_path) # decide what fields to filter out, like has_prefix filter_fields = { - 'arch', - 'has_prefix', - 'mtime', - 'platform', - 'ucs', - 'requires_features', - 'binstar', - 'target-triplet', - 'machine', - 'operatingsystem', + "arch", + "has_prefix", + "mtime", + "platform", + "ucs", + "requires_features", + "binstar", + "target-triplet", + "machine", + "operatingsystem", } for field_name in filter_fields & set(index_json): del index_json[field_name] elif alternate_cache: # we hit the cache of the other file type. Copy files to this name, and replace # the size, md5, and sha256 values - paths = [index_cache_path, about_cache_path, paths_cache_path, recipe_cache_path, - run_exports_cache_path, post_install_cache_path, icon_cache_path] + paths = [ + index_cache_path, + about_cache_path, + paths_cache_path, + recipe_cache_path, + run_exports_cache_path, + post_install_cache_path, + icon_cache_path, + ] bizarro_paths = [_.replace(fn, alternate_cache_fn) for _ in paths] for src, dest in zip(bizarro_paths, paths): if os.path.exists(src): @@ -1133,17 +1347,19 @@ def _extract_to_cache(channel_root, subdir, fn, second_try=False): # info in the cache to avoid confusion. index_json.update(conda_package_handling.api.get_pkg_details(abs_fn)) - with open(index_cache_path, 'w') as fh: + with open(index_cache_path, "w") as fh: json.dump(index_json, fh) retval = fn, mtime, size, index_json except (InvalidArchiveError, KeyError, EOFError, JSONDecodeError): if not second_try: - return ChannelIndex._extract_to_cache(channel_root, subdir, fn, second_try=True) + return ChannelIndex._extract_to_cache( + channel_root, subdir, fn, second_try=True + ) return retval @staticmethod def _load_index_from_cache(channel_root, subdir, fn, stat_cache): - index_cache_path = join(channel_root, subdir, '.cache', 'index', fn + '.json') + index_cache_path = join(channel_root, subdir, ".cache", "index", fn + ".json") try: with open(index_cache_path) as fh: index_json = json.load(fh) @@ -1161,16 +1377,26 @@ def _load_all_from_cache(channel_root, subdir, fn): return {} # In contrast to self._load_index_from_cache(), this method reads up pretty much # all of the cached metadata, except for paths. It all gets dumped into a single map. - index_cache_path = join(subdir_path, '.cache', 'index', fn + '.json') - about_cache_path = join(subdir_path, '.cache', 'about', fn + '.json') - recipe_cache_path = join(subdir_path, '.cache', 'recipe', fn + '.json') - run_exports_cache_path = join(subdir_path, '.cache', 'run_exports', fn + '.json') - post_install_cache_path = join(subdir_path, '.cache', 'post_install', fn + '.json') - icon_cache_path_glob = join(subdir_path, '.cache', 'icon', fn + ".*") - recipe_log_path = join(subdir_path, '.cache', 'recipe_log', fn + '.json') + index_cache_path = join(subdir_path, ".cache", "index", fn + ".json") + about_cache_path = join(subdir_path, ".cache", "about", fn + ".json") + recipe_cache_path = join(subdir_path, ".cache", "recipe", fn + ".json") + run_exports_cache_path = join( + subdir_path, ".cache", "run_exports", fn + ".json" + ) + post_install_cache_path = join( + subdir_path, ".cache", "post_install", fn + ".json" + ) + icon_cache_path_glob = join(subdir_path, ".cache", "icon", fn + ".*") + recipe_log_path = join(subdir_path, ".cache", "recipe_log", fn + ".json") data = {} - for path in (recipe_cache_path, about_cache_path, index_cache_path, post_install_cache_path, recipe_log_path): + for path in ( + recipe_cache_path, + about_cache_path, + index_cache_path, + post_install_cache_path, + recipe_log_path, + ): try: if os.path.getsize(path) != 0: with open(path) as fh: @@ -1182,10 +1408,10 @@ def _load_all_from_cache(channel_root, subdir, fn): icon_cache_paths = glob(icon_cache_path_glob) if icon_cache_paths: icon_cache_path = sorted(icon_cache_paths)[-1] - icon_ext = icon_cache_path.rsplit('.', 1)[-1] - channel_icon_fn = "{}.{}".format(data['name'], icon_ext) + icon_ext = icon_cache_path.rsplit(".", 1)[-1] + channel_icon_fn = "{}.{}".format(data["name"], icon_ext) icon_url = "icons/" + channel_icon_fn - icon_channel_path = join(channel_root, 'icons', channel_icon_fn) + icon_channel_path = join(channel_root, "icons", channel_icon_fn) icon_md5 = utils.md5_file(icon_cache_path) icon_hash = f"md5:{icon_md5}:{getsize(icon_cache_path)}" data.update(icon_hash=icon_hash, icon_url=icon_url) @@ -1195,7 +1421,7 @@ def _load_all_from_cache(channel_root, subdir, fn): pass # have to stat again, because we don't have access to the stat cache here - data['mtime'] = mtime + data["mtime"] = mtime source = data.get("source", {}) try: @@ -1203,8 +1429,8 @@ def _load_all_from_cache(channel_root, subdir, fn): except AttributeError: # sometimes source is a list instead of a dict pass - _clear_newline_chars(data, 'description') - _clear_newline_chars(data, 'summary') + _clear_newline_chars(data, "description") + _clear_newline_chars(data, "summary") try: with open(run_exports_cache_path) as fh: data["run_exports"] = json.load(fh) @@ -1214,8 +1440,18 @@ def _load_all_from_cache(channel_root, subdir, fn): def _write_repodata(self, subdir, repodata, json_filename): repodata_json_path = join(self.channel_root, subdir, json_filename) - new_repodata_binary = json.dumps(repodata, indent=2, sort_keys=True,).replace("':'", "': '").encode("utf-8") - write_result = _maybe_write(repodata_json_path, new_repodata_binary, write_newline_end=True) + new_repodata_binary = ( + json.dumps( + repodata, + indent=2, + sort_keys=True, + ) + .replace("':'", "': '") + .encode("utf-8") + ) + write_result = _maybe_write( + repodata_json_path, new_repodata_binary, write_newline_end=True + ) if write_result: repodata_bz2_path = repodata_json_path + ".bz2" bz2_content = bz2.compress(new_repodata_binary) @@ -1229,52 +1465,57 @@ def _write_subdir_index_html(self, subdir, repodata): def _add_extra_path(extra_paths, path): if isfile(join(self.channel_root, path)): extra_paths[basename(path)] = { - 'size': getsize(path), - 'timestamp': int(getmtime(path)), - 'sha256': utils.sha256_checksum(path), - 'md5': utils.md5_file(path), + "size": getsize(path), + "timestamp": int(getmtime(path)), + "sha256": utils.sha256_checksum(path), + "md5": utils.md5_file(path), } extra_paths = OrderedDict() _add_extra_path(extra_paths, join(subdir_path, REPODATA_JSON_FN)) - _add_extra_path(extra_paths, join(subdir_path, REPODATA_JSON_FN + '.bz2')) + _add_extra_path(extra_paths, join(subdir_path, REPODATA_JSON_FN + ".bz2")) _add_extra_path(extra_paths, join(subdir_path, REPODATA_FROM_PKGS_JSON_FN)) - _add_extra_path(extra_paths, join(subdir_path, REPODATA_FROM_PKGS_JSON_FN + '.bz2')) + _add_extra_path( + extra_paths, join(subdir_path, REPODATA_FROM_PKGS_JSON_FN + ".bz2") + ) # _add_extra_path(extra_paths, join(subdir_path, "repodata2.json")) _add_extra_path(extra_paths, join(subdir_path, "patch_instructions.json")) rendered_html = _make_subdir_index_html( self.channel_name, subdir, repodata_packages, extra_paths ) - index_path = join(subdir_path, 'index.html') + index_path = join(subdir_path, "index.html") return _maybe_write(index_path, rendered_html) def _write_channeldata_index_html(self, channeldata): - rendered_html = _make_channeldata_index_html( - self.channel_name, channeldata - ) - index_path = join(self.channel_root, 'index.html') + rendered_html = _make_channeldata_index_html(self.channel_name, channeldata) + index_path = join(self.channel_root, "index.html") _maybe_write(index_path, rendered_html) def _update_channeldata(self, channel_data, repodata, subdir): legacy_packages = repodata["packages"] conda_packages = repodata["packages.conda"] - use_these_legacy_keys = set(legacy_packages.keys()) - {k[:-6] + CONDA_PACKAGE_EXTENSION_V1 for k in conda_packages.keys()} + use_these_legacy_keys = set(legacy_packages.keys()) - { + k[:-6] + CONDA_PACKAGE_EXTENSION_V1 for k in conda_packages.keys() + } all_packages = conda_packages.copy() all_packages.update({k: legacy_packages[k] for k in use_these_legacy_keys}) - package_data = channel_data.get('packages', {}) + package_data = channel_data.get("packages", {}) def _append_group(groups, candidates): candidate = sorted(candidates, key=lambda x: x[1].get("timestamp", 0))[-1] pkg_dict = candidate[1] - pkg_name = pkg_dict['name'] - - run_exports = package_data.get(pkg_name, {}).get('run_exports', {}) - if (pkg_name not in package_data or - subdir not in package_data.get(pkg_name, {}).get('subdirs', []) or - package_data.get(pkg_name, {}).get('timestamp', 0) < - _make_seconds(pkg_dict.get('timestamp', 0)) or - run_exports and pkg_dict['version'] not in run_exports): + pkg_name = pkg_dict["name"] + + run_exports = package_data.get(pkg_name, {}).get("run_exports", {}) + if ( + pkg_name not in package_data + or subdir not in package_data.get(pkg_name, {}).get("subdirs", []) + or package_data.get(pkg_name, {}).get("timestamp", 0) + < _make_seconds(pkg_dict.get("timestamp", 0)) + or run_exports + and pkg_dict["version"] not in run_exports + ): groups.append(candidate) groups = [] @@ -1300,53 +1541,92 @@ def _replace_if_newer_and_present(pd, data, erec, data_newer, k): if groups: fns, fn_dicts = zip(*groups) - load_func = functools.partial(ChannelIndex._load_all_from_cache, - self.channel_root, subdir,) + load_func = functools.partial( + ChannelIndex._load_all_from_cache, + self.channel_root, + subdir, + ) for fn_dict, data in zip(fn_dicts, self.thread_executor.map(load_func, fns)): if data: data.update(fn_dict) - name = data['name'] + name = data["name"] # existing record erec = package_data.get(name, {}) - data_v = data.get('version', '0') - erec_v = erec.get('version', '0') + data_v = data.get("version", "0") + erec_v = erec.get("version", "0") data_newer = VersionOrder(data_v) > VersionOrder(erec_v) package_data[name] = package_data.get(name, {}) # keep newer value for these - for k in ('description', 'dev_url', 'doc_url', 'doc_source_url', 'home', 'license', - 'source_url', 'source_git_url', 'summary', 'icon_url', 'icon_hash', 'tags', - 'identifiers', 'keywords', 'recipe_origin', 'version'): - _replace_if_newer_and_present(package_data[name], data, erec, data_newer, k) + for k in ( + "description", + "dev_url", + "doc_url", + "doc_source_url", + "home", + "license", + "source_url", + "source_git_url", + "summary", + "icon_url", + "icon_hash", + "tags", + "identifiers", + "keywords", + "recipe_origin", + "version", + ): + _replace_if_newer_and_present( + package_data[name], data, erec, data_newer, k + ) # keep any true value for these, since we don't distinguish subdirs - for k in ("binary_prefix", "text_prefix", "activate.d", "deactivate.d", "pre_link", - "post_link", "pre_unlink"): + for k in ( + "binary_prefix", + "text_prefix", + "activate.d", + "deactivate.d", + "pre_link", + "post_link", + "pre_unlink", + ): package_data[name][k] = any((data.get(k), erec.get(k))) - package_data[name]['subdirs'] = sorted(list(set(erec.get('subdirs', []) + [subdir]))) + package_data[name]["subdirs"] = sorted( + list(set(erec.get("subdirs", []) + [subdir])) + ) # keep one run_exports entry per version of the package, since these vary by version - run_exports = erec.get('run_exports', {}) - exports_from_this_version = data.get('run_exports') + run_exports = erec.get("run_exports", {}) + exports_from_this_version = data.get("run_exports") if exports_from_this_version: - run_exports[data_v] = data.get('run_exports') - package_data[name]['run_exports'] = run_exports - package_data[name]['timestamp'] = _make_seconds(max( - data.get('timestamp', 0), channel_data.get(name, {}).get('timestamp', 0))) - - channel_data.update({ - 'channeldata_version': CHANNELDATA_VERSION, - 'subdirs': sorted(list(set(channel_data.get('subdirs', []) + [subdir]))), - 'packages': package_data, - }) + run_exports[data_v] = data.get("run_exports") + package_data[name]["run_exports"] = run_exports + package_data[name]["timestamp"] = _make_seconds( + max( + data.get("timestamp", 0), + channel_data.get(name, {}).get("timestamp", 0), + ) + ) + + channel_data.update( + { + "channeldata_version": CHANNELDATA_VERSION, + "subdirs": sorted( + list(set(channel_data.get("subdirs", []) + [subdir])) + ), + "packages": package_data, + } + ) def _write_channeldata(self, channeldata): # trim out commits, as they can take up a ton of space. They're really only for the RSS feed. - for _pkg, pkg_dict in channeldata.get('packages', {}).items(): + for _pkg, pkg_dict in channeldata.get("packages", {}).items(): if "commits" in pkg_dict: - del pkg_dict['commits'] - channeldata_path = join(self.channel_root, 'channeldata.json') - content = json.dumps(channeldata, indent=2, sort_keys=True).replace("':'", "': '") + del pkg_dict["commits"] + channeldata_path = join(self.channel_root, "channeldata.json") + content = json.dumps(channeldata, indent=2, sort_keys=True).replace( + "':'", "': '" + ) _maybe_write(channeldata_path, content, True) def _load_patch_instructions_tarball(self, subdir, patch_generator): @@ -1360,61 +1640,78 @@ def _load_patch_instructions_tarball(self, subdir, patch_generator): return instructions def _create_patch_instructions(self, subdir, repodata, patch_generator=None): - gen_patch_path = patch_generator or join(self.channel_root, 'gen_patch.py') + gen_patch_path = patch_generator or join(self.channel_root, "gen_patch.py") if isfile(gen_patch_path): log.debug(f"using patch generator {gen_patch_path} for {subdir}") # https://stackoverflow.com/a/41595552/2127762 try: - from importlib.util import spec_from_file_location, module_from_spec - spec = spec_from_file_location('a_b', gen_patch_path) + from importlib.util import module_from_spec, spec_from_file_location + + spec = spec_from_file_location("a_b", gen_patch_path) mod = module_from_spec(spec) spec.loader.exec_module(mod) # older pythons except ImportError: import imp - mod = imp.load_source('a_b', gen_patch_path) + + mod = imp.load_source("a_b", gen_patch_path) instructions = mod._patch_repodata(repodata, subdir) - if instructions.get('patch_instructions_version', 0) > 1: + if instructions.get("patch_instructions_version", 0) > 1: raise RuntimeError("Incompatible patch instructions version") return instructions else: if patch_generator: - raise ValueError("Specified metadata patch file '{}' does not exist. Please try an absolute " - "path, or examine your relative path carefully with respect to your cwd." - .format(patch_generator)) + raise ValueError( + "Specified metadata patch file '{}' does not exist. Please try an absolute " + "path, or examine your relative path carefully with respect to your cwd.".format( + patch_generator + ) + ) return {} def _write_patch_instructions(self, subdir, instructions): - new_patch = json.dumps(instructions, indent=2, sort_keys=True).replace("':'", "': '") - patch_instructions_path = join(self.channel_root, subdir, 'patch_instructions.json') + new_patch = json.dumps(instructions, indent=2, sort_keys=True).replace( + "':'", "': '" + ) + patch_instructions_path = join( + self.channel_root, subdir, "patch_instructions.json" + ) _maybe_write(patch_instructions_path, new_patch, True) def _load_instructions(self, subdir): - patch_instructions_path = join(self.channel_root, subdir, 'patch_instructions.json') + patch_instructions_path = join( + self.channel_root, subdir, "patch_instructions.json" + ) if isfile(patch_instructions_path): log.debug("using patch instructions %s" % patch_instructions_path) with open(patch_instructions_path) as fh: instructions = json.load(fh) - if instructions.get('patch_instructions_version', 0) > 1: + if instructions.get("patch_instructions_version", 0) > 1: raise RuntimeError("Incompatible patch instructions version") return instructions return {} def _patch_repodata(self, subdir, repodata, patch_generator=None): - if patch_generator and any(patch_generator.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS): - instructions = self._load_patch_instructions_tarball(subdir, patch_generator) + if patch_generator and any( + patch_generator.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS + ): + instructions = self._load_patch_instructions_tarball( + subdir, patch_generator + ) else: - instructions = self._create_patch_instructions(subdir, repodata, patch_generator) + instructions = self._create_patch_instructions( + subdir, repodata, patch_generator + ) if instructions: self._write_patch_instructions(subdir, instructions) else: instructions = self._load_instructions(subdir) - if instructions.get('patch_instructions_version', 0) > 1: + if instructions.get("patch_instructions_version", 0) > 1: raise RuntimeError("Incompatible patch instructions version") return _apply_instructions(subdir, repodata, instructions), instructions diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index ea7be064f8..6e5e9a4980 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -1,40 +1,44 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from collections import defaultdict -from itertools import groupby -from functools import lru_cache import json -from operator import itemgetter -from os.path import abspath, join, dirname, exists, basename, normcase import os import re import sys import tempfile +from collections import defaultdict +from functools import lru_cache +from itertools import groupby +from operator import itemgetter +from os.path import abspath, basename, dirname, exists, join, normcase -from conda_build.os_utils.ldd import get_linkages, get_package_obj_files, get_untracked_obj_files +from conda_build.conda_interface import ( + display_actions, + get_index, + install_actions, + is_linked, + linked_data, + specs_from_args, +) +from conda_build.os_utils.ldd import ( + get_linkages, + get_package_obj_files, + get_untracked_obj_files, +) from conda_build.os_utils.liefldd import codefile_type from conda_build.os_utils.macho import get_rpaths, human_filetype from conda_build.utils import ( comma_join, - rm_rf, - package_has_file, - get_logger, ensure_list, + get_logger, + package_has_file, + rm_rf, ) -from conda_build.conda_interface import ( - specs_from_args, - is_linked, - linked_data, - get_index, -) -from conda_build.conda_interface import display_actions, install_actions - @lru_cache(maxsize=None) def dist_files(prefix, dist): meta = is_linked(prefix, dist) - return set(meta['files']) if meta else set() + return set(meta["files"]) if meta else set() def which_package(in_prefix_path, prefix, avoid_canonical_channel_name=False): @@ -43,8 +47,9 @@ def which_package(in_prefix_path, prefix, avoid_canonical_channel_name=False): the conda packages the file came from. Usually the iteration yields only one package. """ - norm_ipp = normcase(in_prefix_path.replace(os.sep, '/')) + norm_ipp = normcase(in_prefix_path.replace(os.sep, "/")) from conda_build.utils import linked_data_no_multichannels + if avoid_canonical_channel_name: fn = linked_data_no_multichannels else: @@ -67,10 +72,10 @@ def print_object_info(info, key): continue if f_info[data] is None: continue - output_string += f' {data}: {f_info[data]}\n' + output_string += f" {data}: {f_info[data]}\n" if len([i for i in f_info if f_info[i] is not None and i != key]) > 1: - output_string += '\n' - output_string += '\n' + output_string += "\n" + output_string += "\n" return output_string @@ -82,15 +87,18 @@ def __str__(self): untracked_package = _untracked_package() -def check_install(packages, platform=None, channel_urls=(), prepend=True, - minimal_hint=False): - prefix = tempfile.mkdtemp('conda') +def check_install( + packages, platform=None, channel_urls=(), prepend=True, minimal_hint=False +): + prefix = tempfile.mkdtemp("conda") try: specs = specs_from_args(packages) - index = get_index(channel_urls=channel_urls, prepend=prepend, - platform=platform, prefix=prefix) - actions = install_actions(prefix, index, specs, pinned=False, - minimal_hint=minimal_hint) + index = get_index( + channel_urls=channel_urls, prepend=prepend, platform=platform, prefix=prefix + ) + actions = install_actions( + prefix, index, specs, pinned=False, minimal_hint=minimal_hint + ) display_actions(actions, index) return actions finally: @@ -102,13 +110,13 @@ def print_linkages(depmap, show_files=False): # Print system and not found last dist_depmap = {} for k, v in depmap.items(): - if hasattr(k, 'dist_name'): + if hasattr(k, "dist_name"): k = k.dist_name dist_depmap[k] = v depmap = dist_depmap - k = sorted(set(depmap.keys()) - {'system', 'not found'}) - all_deps = k if 'not found' not in depmap.keys() else k + ['system', 'not found'] + k = sorted(set(depmap.keys()) - {"system", "not found"}) + all_deps = k if "not found" not in depmap.keys() else k + ["system", "not found"] output_string = "" for dep in all_deps: output_string += "%s:\n" % dep @@ -123,98 +131,116 @@ def print_linkages(depmap, show_files=False): def replace_path(binary, path, prefix): - if sys.platform.startswith('linux'): + if sys.platform.startswith("linux"): return abspath(path) - elif sys.platform.startswith('darwin'): + elif sys.platform.startswith("darwin"): if path == basename(binary): return abspath(join(prefix, binary)) - if '@rpath' in path: + if "@rpath" in path: rpaths = get_rpaths(join(prefix, binary)) if not rpaths: return "NO LC_RPATH FOUND" else: for rpath in rpaths: path1 = path.replace("@rpath", rpath) - path1 = path1.replace('@loader_path', join(prefix, dirname(binary))) + path1 = path1.replace("@loader_path", join(prefix, dirname(binary))) if exists(abspath(join(prefix, path1))): path = path1 break else: - return 'not found' - path = path.replace('@loader_path', join(prefix, dirname(binary))) - if path.startswith('/'): + return "not found" + path = path.replace("@loader_path", join(prefix, dirname(binary))) + if path.startswith("/"): return abspath(path) - return 'not found' + return "not found" -def test_installable(channel='defaults'): +def test_installable(channel="defaults"): success = True log = get_logger(__name__) - has_py = re.compile(r'py(\d)(\d)') - for platform in ['osx-64', 'linux-32', 'linux-64', 'win-32', 'win-64']: + has_py = re.compile(r"py(\d)(\d)") + for platform in ["osx-64", "linux-32", "linux-64", "win-32", "win-64"]: log.info("######## Testing platform %s ########", platform) channels = [channel] index = get_index(channel_urls=channels, prepend=False, platform=platform) for _, rec in index.items(): # If we give channels at the command line, only look at # packages from those channels (not defaults). - if channel != 'defaults' and rec.get('schannel', 'defaults') == 'defaults': + if channel != "defaults" and rec.get("schannel", "defaults") == "defaults": continue - name = rec['name'] - if name in {'conda', 'conda-build'}: + name = rec["name"] + if name in {"conda", "conda-build"}: # conda can only be installed in the root environment continue - if name.endswith('@'): + if name.endswith("@"): # this is a 'virtual' feature record that conda adds to the index for the solver # and should be ignored here continue # Don't fail just because the package is a different version of Python # than the default. We should probably check depends rather than the # build string. - build = rec['build'] + build = rec["build"] match = has_py.search(build) - assert match if 'py' in build else True, build + assert match if "py" in build else True, build if match: - additional_packages = [f'python={match.group(1)}.{match.group(2)}'] + additional_packages = [f"python={match.group(1)}.{match.group(2)}"] else: additional_packages = [] - version = rec['version'] - log.info('Testing %s=%s', name, version) + version = rec["version"] + log.info("Testing %s=%s", name, version) try: - install_steps = check_install([name + '=' + version] + additional_packages, - channel_urls=channels, prepend=False, - platform=platform) + install_steps = check_install( + [name + "=" + version] + additional_packages, + channel_urls=channels, + prepend=False, + platform=platform, + ) success &= bool(install_steps) except KeyboardInterrupt: raise # sys.exit raises an exception that doesn't subclass from Exception except BaseException as e: success = False - log.error("FAIL: %s %s on %s with %s (%s)", name, version, - platform, additional_packages, e) + log.error( + "FAIL: %s %s on %s with %s (%s)", + name, + version, + platform, + additional_packages, + e, + ) return success def _installed(prefix): installed = linked_data(prefix) - installed = {rec['name']: dist for dist, rec in installed.items()} + installed = {rec["name"]: dist for dist, rec in installed.items()} return installed def _underlined_text(text): - return str(text) + '\n' + '-' * len(str(text)) + '\n\n' - - -def inspect_linkages(packages, prefix=sys.prefix, untracked=False, - all_packages=False, show_files=False, groupby="package", sysroot=""): + return str(text) + "\n" + "-" * len(str(text)) + "\n\n" + + +def inspect_linkages( + packages, + prefix=sys.prefix, + untracked=False, + all_packages=False, + show_files=False, + groupby="package", + sysroot="", +): pkgmap = {} installed = _installed(prefix) if not packages and not untracked and not all_packages: - raise ValueError("At least one package or --untracked or --all must be provided") + raise ValueError( + "At least one package or --untracked or --all must be provided" + ) if all_packages: packages = sorted(installed.keys()) @@ -230,8 +256,10 @@ def inspect_linkages(packages, prefix=sys.prefix, untracked=False, else: dist = installed[pkg] - if not sys.platform.startswith(('linux', 'darwin')): - sys.exit("Error: conda inspect linkages is only implemented in Linux and OS X") + if not sys.platform.startswith(("linux", "darwin")): + sys.exit( + "Error: conda inspect linkages is only implemented in Linux and OS X" + ) if dist == untracked_package: obj_files = get_untracked_obj_files(prefix) @@ -240,41 +268,48 @@ def inspect_linkages(packages, prefix=sys.prefix, untracked=False, linkages = get_linkages(obj_files, prefix, sysroot) depmap = defaultdict(list) pkgmap[pkg] = depmap - depmap['not found'] = [] - depmap['system'] = [] + depmap["not found"] = [] + depmap["system"] = [] for binary in linkages: for lib, path in linkages[binary]: - path = replace_path(binary, path, prefix) if path not in {'', - 'not found'} else path + path = ( + replace_path(binary, path, prefix) + if path not in {"", "not found"} + else path + ) if path.startswith(prefix): - in_prefix_path = re.sub('^' + prefix + '/', '', path) + in_prefix_path = re.sub("^" + prefix + "/", "", path) deps = list(which_package(in_prefix_path, prefix)) if len(deps) > 1: deps_str = [str(dep) for dep in deps] - get_logger(__name__).warn("Warning: %s comes from multiple " - "packages: %s", path, comma_join(deps_str)) + get_logger(__name__).warn( + "Warning: %s comes from multiple " "packages: %s", + path, + comma_join(deps_str), + ) if not deps: if exists(path): - depmap['untracked'].append((lib, path.split(prefix + - '/', 1)[-1], binary)) + depmap["untracked"].append( + (lib, path.split(prefix + "/", 1)[-1], binary) + ) else: - depmap['not found'].append((lib, path.split(prefix + - '/', 1)[-1], binary)) + depmap["not found"].append( + (lib, path.split(prefix + "/", 1)[-1], binary) + ) for d in deps: - depmap[d].append((lib, path.split(prefix + '/', - 1)[-1], binary)) - elif path == 'not found': - depmap['not found'].append((lib, path, binary)) + depmap[d].append((lib, path.split(prefix + "/", 1)[-1], binary)) + elif path == "not found": + depmap["not found"].append((lib, path, binary)) else: - depmap['system'].append((lib, path, binary)) + depmap["system"].append((lib, path, binary)) output_string = "" - if groupby == 'package': + if groupby == "package": for pkg in packages: output_string += _underlined_text(pkg) output_string += print_linkages(pkgmap[pkg], show_files=show_files) - elif groupby == 'dependency': + elif groupby == "dependency": # {pkg: {dep: [files]}} -> {dep: {pkg: [files]}} inverted_map = defaultdict(lambda: defaultdict(list)) for pkg in pkgmap: @@ -283,19 +318,19 @@ def inspect_linkages(packages, prefix=sys.prefix, untracked=False, inverted_map[dep][pkg] = pkgmap[pkg][dep] # print system and not found last - k = sorted(set(inverted_map.keys()) - {'system', 'not found'}) - for dep in k + ['system', 'not found']: + k = sorted(set(inverted_map.keys()) - {"system", "not found"}) + for dep in k + ["system", "not found"]: output_string += _underlined_text(dep) output_string += print_linkages(inverted_map[dep], show_files=show_files) else: raise ValueError("Unrecognized groupby: %s" % groupby) - if hasattr(output_string, 'decode'): - output_string = output_string.decode('utf-8') + if hasattr(output_string, "decode"): + output_string = output_string.decode("utf-8") return output_string -def inspect_objects(packages, prefix=sys.prefix, groupby='package'): +def inspect_objects(packages, prefix=sys.prefix, groupby="package"): installed = _installed(prefix) output_string = "" @@ -309,7 +344,7 @@ def inspect_objects(packages, prefix=sys.prefix, groupby='package'): output_string += _underlined_text(pkg) - if not sys.platform.startswith('darwin'): + if not sys.platform.startswith("darwin"): sys.exit("Error: conda inspect objects is only implemented in OS X") if dist == untracked_package: @@ -322,15 +357,15 @@ def inspect_objects(packages, prefix=sys.prefix, groupby='package'): f_info = {} path = join(prefix, f) filetype = codefile_type(path) - if filetype == 'machofile': - f_info['filetype'] = human_filetype(path, None) - f_info['rpath'] = ':'.join(get_rpaths(path)) - f_info['filename'] = f + if filetype == "machofile": + f_info["filetype"] = human_filetype(path, None) + f_info["rpath"] = ":".join(get_rpaths(path)) + f_info["filename"] = f info.append(f_info) output_string += print_object_info(info, groupby) - if hasattr(output_string, 'decode'): - output_string = output_string.decode('utf-8') + if hasattr(output_string, "decode"): + output_string = output_string.decode("utf-8") return output_string @@ -339,9 +374,9 @@ def get_hash_input(packages): for pkg in ensure_list(packages): pkgname = os.path.basename(pkg)[:-8] hash_inputs[pkgname] = {} - hash_input = package_has_file(pkg, 'info/hash_input.json') + hash_input = package_has_file(pkg, "info/hash_input.json") if hash_input: - hash_inputs[pkgname]['recipe'] = json.loads(hash_input) + hash_inputs[pkgname]["recipe"] = json.loads(hash_input) else: hash_inputs[pkgname] = "" diff --git a/conda_build/jinja_context.py b/conda_build/jinja_context.py index f742153718..e77de5bf8a 100644 --- a/conda_build/jinja_context.py +++ b/conda_build/jinja_context.py @@ -1,31 +1,39 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from functools import partial -from io import StringIO, TextIOBase +import datetime import json import os import pathlib import re import time -import datetime +from functools import partial +from io import StringIO, TextIOBase from typing import IO, Any, Optional from warnings import warn import jinja2 import yaml + try: import tomllib # Python 3.11 except: import tomli as tomllib +from . import _load_setup_py_data from .environ import get_dict as get_environ -from .utils import get_installed_packages, apply_pin_expressions, get_logger, HashableDict +from .exceptions import CondaBuildException from .render import get_env_dependencies -from .utils import copy_into, check_call_env, rm_rf, ensure_valid_spec +from .utils import ( + HashableDict, + apply_pin_expressions, + check_call_env, + copy_into, + ensure_valid_spec, + get_installed_packages, + get_logger, + rm_rf, +) from .variants import DEFAULT_COMPILERS -from .exceptions import CondaBuildException -from . import _load_setup_py_data - log = get_logger(__name__) @@ -45,20 +53,63 @@ class UndefinedNeverFail(jinja2.Undefined): you can detect which undefined names were used by inspecting that list. Be sure to clear the all_undefined_names list before calling template.render(). """ + all_undefined_names = [] - def __init__(self, hint=None, obj=jinja2.runtime.missing, name=None, - exc=jinja2.exceptions.UndefinedError): + def __init__( + self, + hint=None, + obj=jinja2.runtime.missing, + name=None, + exc=jinja2.exceptions.UndefinedError, + ): jinja2.Undefined.__init__(self, hint, obj, name, exc) # Using any of these methods on an Undefined variable # results in another Undefined variable. - __add__ = __radd__ = __mul__ = __rmul__ = __div__ = __rdiv__ = \ - __truediv__ = __rtruediv__ = __floordiv__ = __rfloordiv__ = \ - __mod__ = __rmod__ = __pos__ = __neg__ = __call__ = \ - __getitem__ = __lt__ = __le__ = __gt__ = __ge__ = \ - __complex__ = __pow__ = __rpow__ = \ - lambda self, *args, **kwargs: self._return_undefined(self._undefined_name) + __add__ = ( + __radd__ + ) = ( + __mul__ + ) = ( + __rmul__ + ) = ( + __div__ + ) = ( + __rdiv__ + ) = ( + __truediv__ + ) = ( + __rtruediv__ + ) = ( + __floordiv__ + ) = ( + __rfloordiv__ + ) = ( + __mod__ + ) = ( + __rmod__ + ) = ( + __pos__ + ) = ( + __neg__ + ) = ( + __call__ + ) = ( + __getitem__ + ) = ( + __lt__ + ) = ( + __le__ + ) = ( + __gt__ + ) = ( + __ge__ + ) = ( + __complex__ + ) = __pow__ = __rpow__ = lambda self, *args, **kwargs: self._return_undefined( + self._undefined_name + ) # Accessing an attribute of an Undefined variable # results in another Undefined variable. @@ -66,12 +117,12 @@ def __getattr__(self, k): try: return object.__getattr__(self, k) except AttributeError: - self._return_undefined(self._undefined_name + '.' + k) + self._return_undefined(self._undefined_name + "." + k) # Unlike the methods above, Python requires that these # few methods must always return the correct type - __str__ = __repr__ = lambda self: self._return_value('') - __unicode__ = lambda self: self._return_value('') + __str__ = __repr__ = lambda self: self._return_value("") + __unicode__ = lambda self: self._return_value("") __int__ = lambda self: self._return_value(0) __float__ = lambda self: self._return_value(0.0) __nonzero__ = lambda self: self._return_value(False) @@ -79,10 +130,12 @@ def __getattr__(self, k): def _return_undefined(self, result_name): # Record that this undefined variable was actually used. UndefinedNeverFail.all_undefined_names.append(self._undefined_name) - return UndefinedNeverFail(hint=self._undefined_hint, - obj=self._undefined_obj, - name=result_name, - exc=self._undefined_exception) + return UndefinedNeverFail( + hint=self._undefined_hint, + obj=self._undefined_obj, + name=result_name, + exc=self._undefined_exception, + ) def _return_value(self, value=None): # Record that this undefined variable was actually used. @@ -103,76 +156,110 @@ def __init__(self, unfiltered_loader, config): def get_source(self, environment, template): # we have circular imports here. Do a local import - from .metadata import select_lines, ns_cfg - contents, filename, uptodate = self._unfiltered_loader.get_source(environment, - template) - return (select_lines(contents, ns_cfg(self.config), - variants_in_place=bool(self.config.variant)), filename, uptodate) + from .metadata import ns_cfg, select_lines + + contents, filename, uptodate = self._unfiltered_loader.get_source( + environment, template + ) + return ( + select_lines( + contents, + ns_cfg(self.config), + variants_in_place=bool(self.config.variant), + ), + filename, + uptodate, + ) -def load_setup_py_data(m, setup_file='setup.py', from_recipe_dir=False, recipe_dir=None, - permit_undefined_jinja=True): +def load_setup_py_data( + m, + setup_file="setup.py", + from_recipe_dir=False, + recipe_dir=None, + permit_undefined_jinja=True, +): _setuptools_data = None # we must copy the script into the work folder to avoid incompatible pyc files - origin_setup_script = os.path.join(os.path.dirname(__file__), '_load_setup_py_data.py') - dest_setup_script = os.path.join(m.config.work_dir, '_load_setup_py_data.py') + origin_setup_script = os.path.join( + os.path.dirname(__file__), "_load_setup_py_data.py" + ) + dest_setup_script = os.path.join(m.config.work_dir, "_load_setup_py_data.py") copy_into(origin_setup_script, dest_setup_script) env = get_environ(m) env["CONDA_BUILD_STATE"] = "RENDER" if os.path.isfile(m.config.build_python): args = [m.config.build_python, dest_setup_script, m.config.work_dir, setup_file] if from_recipe_dir: - assert recipe_dir, 'recipe_dir must be set if from_recipe_dir is True' - args.append('--from-recipe-dir') - args.extend(['--recipe-dir', recipe_dir]) + assert recipe_dir, "recipe_dir must be set if from_recipe_dir is True" + args.append("--from-recipe-dir") + args.extend(["--recipe-dir", recipe_dir]) if permit_undefined_jinja: - args.append('--permit-undefined-jinja') + args.append("--permit-undefined-jinja") check_call_env(args, env=env) # this is a file that the subprocess will have written - with open(os.path.join(m.config.work_dir, 'conda_build_loaded_setup_py.json')) as f: + with open( + os.path.join(m.config.work_dir, "conda_build_loaded_setup_py.json") + ) as f: _setuptools_data = json.load(f) else: try: - _setuptools_data = _load_setup_py_data.load_setup_py_data(setup_file, - from_recipe_dir=from_recipe_dir, - recipe_dir=recipe_dir, - work_dir=m.config.work_dir, - permit_undefined_jinja=permit_undefined_jinja) + _setuptools_data = _load_setup_py_data.load_setup_py_data( + setup_file, + from_recipe_dir=from_recipe_dir, + recipe_dir=recipe_dir, + work_dir=m.config.work_dir, + permit_undefined_jinja=permit_undefined_jinja, + ) except (TypeError, OSError): # setup.py file doesn't yet exist. Will get picked up in future parsings pass except ImportError as e: if permit_undefined_jinja: - log.debug("Reading setup.py failed due to missing modules. This is probably OK, " - "since it may succeed in later passes. Watch for incomplete recipe " - "info, though.") + log.debug( + "Reading setup.py failed due to missing modules. This is probably OK, " + "since it may succeed in later passes. Watch for incomplete recipe " + "info, though." + ) else: - raise CondaBuildException("Could not render recipe - need modules " - "installed in root env. Import error was \"{}\"".format(e)) + raise CondaBuildException( + "Could not render recipe - need modules " + 'installed in root env. Import error was "{}"'.format(e) + ) # cleanup: we must leave the source tree empty unless the source code is already present - rm_rf(os.path.join(m.config.work_dir, '_load_setup_py_data.py')) + rm_rf(os.path.join(m.config.work_dir, "_load_setup_py_data.py")) return _setuptools_data if _setuptools_data else {} -def load_setuptools(m, setup_file='setup.py', from_recipe_dir=False, recipe_dir=None, - permit_undefined_jinja=True): +def load_setuptools( + m, + setup_file="setup.py", + from_recipe_dir=False, + recipe_dir=None, + permit_undefined_jinja=True, +): warn( "conda_build.jinja_context.load_setuptools is pending deprecation in a future release. " "Use conda_build.jinja_context.load_setup_py_data instead.", PendingDeprecationWarning, ) - return load_setup_py_data(m, setup_file=setup_file, from_recipe_dir=from_recipe_dir, - recipe_dir=recipe_dir, permit_undefined_jinja=permit_undefined_jinja) + return load_setup_py_data( + m, + setup_file=setup_file, + from_recipe_dir=from_recipe_dir, + recipe_dir=recipe_dir, + permit_undefined_jinja=permit_undefined_jinja, + ) def load_npm(): - mode_dict = {'mode': 'r', 'encoding': 'utf-8'} - with open('package.json', **mode_dict) as pkg: + mode_dict = {"mode": "r", "encoding": "utf-8"} + with open("package.json", **mode_dict) as pkg: return json.load(pkg) def _find_file(file_name: str, from_recipe_dir: bool, recipe_dir: str, config) -> str: - """ Get the path to the given file which may be in the work_dir + """Get the path to the given file which may be in the work_dir or in the recipe_dir. Note, the returned file name may not exist. @@ -189,8 +276,14 @@ def _find_file(file_name: str, from_recipe_dir: bool, recipe_dir: str, config) - return path -def load_file_regex(config, load_file, regex_pattern, from_recipe_dir=False, - recipe_dir=None, permit_undefined_jinja=True): +def load_file_regex( + config, + load_file, + regex_pattern, + from_recipe_dir=False, + recipe_dir=None, + permit_undefined_jinja=True, +): try: load_file = _find_file(load_file, from_recipe_dir, recipe_dir, config) except FileNotFoundError as e: @@ -206,8 +299,17 @@ def load_file_regex(config, load_file, regex_pattern, from_recipe_dir=False, cached_env_dependencies = {} -def pin_compatible(m, package_name, lower_bound=None, upper_bound=None, min_pin='x.x.x.x.x.x', - max_pin='x', permit_undefined_jinja=False, exact=False, bypass_env_check=False): +def pin_compatible( + m, + package_name, + lower_bound=None, + upper_bound=None, + min_pin="x.x.x.x.x.x", + max_pin="x", + permit_undefined_jinja=False, + exact=False, + bypass_env_check=False, +): """dynamically pin based on currently installed version. only mandatory input is package_name. @@ -231,41 +333,55 @@ def pin_compatible(m, package_name, lower_bound=None, upper_bound=None, min_pin= pins = cached_env_dependencies[key] else: if m.is_cross and not m.build_is_host: - pins, _, _ = get_env_dependencies(m, 'host', m.config.variant) + pins, _, _ = get_env_dependencies(m, "host", m.config.variant) else: - pins, _, _ = get_env_dependencies(m, 'build', m.config.variant) + pins, _, _ = get_env_dependencies(m, "build", m.config.variant) if m.build_is_host: - host_pins, _, _ = get_env_dependencies(m, 'host', m.config.variant) + host_pins, _, _ = get_env_dependencies(m, "host", m.config.variant) pins.extend(host_pins) cached_env_dependencies[key] = pins - versions = {p.split(' ')[0]: p.split(' ')[1:] for p in pins} + versions = {p.split(" ")[0]: p.split(" ")[1:] for p in pins} if versions: if exact and versions.get(package_name): - compatibility = ' '.join(versions[package_name]) + compatibility = " ".join(versions[package_name]) else: version = lower_bound or versions.get(package_name) if version: - if hasattr(version, '__iter__') and not isinstance(version, str): + if hasattr(version, "__iter__") and not isinstance(version, str): version = version[0] else: version = str(version) if upper_bound: if min_pin or lower_bound: compatibility = ">=" + str(version) + "," - compatibility += f'<{upper_bound}' + compatibility += f"<{upper_bound}" else: compatibility = apply_pin_expressions(version, min_pin, max_pin) - if (not compatibility and not permit_undefined_jinja and not bypass_env_check): - check = re.compile(r'pin_compatible\s*\(\s*[''"]{}[''"]'.format(package_name)) + if not compatibility and not permit_undefined_jinja and not bypass_env_check: + check = re.compile(r"pin_compatible\s*\(\s*[" '"]{}[' '"]'.format(package_name)) if check.search(m.extract_requirements_text()): - raise RuntimeError("Could not get compatibility information for {} package. " - "Is it one of your host dependencies?".format(package_name)) - return " ".join((package_name, compatibility)) if compatibility is not None else package_name + raise RuntimeError( + "Could not get compatibility information for {} package. " + "Is it one of your host dependencies?".format(package_name) + ) + return ( + " ".join((package_name, compatibility)) + if compatibility is not None + else package_name + ) -def pin_subpackage_against_outputs(metadata, matching_package_keys, outputs, min_pin, max_pin, - exact, permit_undefined_jinja, skip_build_id=False): +def pin_subpackage_against_outputs( + metadata, + matching_package_keys, + outputs, + min_pin, + max_pin, + exact, + permit_undefined_jinja, + skip_build_id=False, +): pin = None if matching_package_keys: # two ways to match: @@ -281,8 +397,9 @@ def pin_subpackage_against_outputs(metadata, matching_package_keys, outputs, min # name, used vars+values). It used to be (package name, variant) - # but that was really big and hard to look at. shared_vars = set(variant.keys()) & set(metadata.config.variant.keys()) - if not shared_vars or all(variant[sv] == metadata.config.variant[sv] - for sv in shared_vars): + if not shared_vars or all( + variant[sv] == metadata.config.variant[sv] for sv in shared_vars + ): key = (pkg_name, variant) break @@ -292,20 +409,35 @@ def pin_subpackage_against_outputs(metadata, matching_package_keys, outputs, min pin = None else: if exact: - pin = " ".join([sp_m.name(), sp_m.version(), - sp_m.build_id() if not skip_build_id else str(sp_m.build_number())]) + pin = " ".join( + [ + sp_m.name(), + sp_m.version(), + sp_m.build_id() + if not skip_build_id + else str(sp_m.build_number()), + ] + ) else: - pin = "{} {}".format(sp_m.name(), - apply_pin_expressions(sp_m.version(), min_pin, - max_pin)) + pin = "{} {}".format( + sp_m.name(), + apply_pin_expressions(sp_m.version(), min_pin, max_pin), + ) else: pin = matching_package_keys[0][0] return pin -def pin_subpackage(metadata, subpackage_name, min_pin='x.x.x.x.x.x', max_pin='x', - exact=False, permit_undefined_jinja=False, allow_no_other_outputs=False, - skip_build_id=False): +def pin_subpackage( + metadata, + subpackage_name, + min_pin="x.x.x.x.x.x", + max_pin="x", + exact=False, + permit_undefined_jinja=False, + allow_no_other_outputs=False, + skip_build_id=False, +): """allow people to specify pinnings based on subpackages that are defined in the recipe. For example, given a compiler package, allow it to specify either a compatible or exact @@ -313,42 +445,52 @@ def pin_subpackage(metadata, subpackage_name, min_pin='x.x.x.x.x.x', max_pin='x' """ pin = None - if not hasattr(metadata, 'other_outputs'): + if not hasattr(metadata, "other_outputs"): if allow_no_other_outputs: pin = subpackage_name else: - raise ValueError("Bug in conda-build: we need to have info about other outputs in " - "order to allow pinning to them. It's not here.") + raise ValueError( + "Bug in conda-build: we need to have info about other outputs in " + "order to allow pinning to them. It's not here." + ) else: # two ways to match: # 1. only one other output named the same as the subpackage_name from the key # 2. whole key matches (both subpackage name and variant) keys = list(metadata.other_outputs.keys()) matching_package_keys = [k for k in keys if k[0] == subpackage_name] - pin = pin_subpackage_against_outputs(metadata, matching_package_keys, - metadata.other_outputs, min_pin, max_pin, - exact, permit_undefined_jinja, - skip_build_id=skip_build_id) + pin = pin_subpackage_against_outputs( + metadata, + matching_package_keys, + metadata.other_outputs, + min_pin, + max_pin, + exact, + permit_undefined_jinja, + skip_build_id=skip_build_id, + ) if not pin: pin = subpackage_name if not permit_undefined_jinja and not allow_no_other_outputs: - raise ValueError("Didn't find subpackage version info for '{}', which is used in a" - " pin_subpackage expression. Is it actually a subpackage? If not, " - "you want pin_compatible instead.".format(subpackage_name)) + raise ValueError( + "Didn't find subpackage version info for '{}', which is used in a" + " pin_subpackage expression. Is it actually a subpackage? If not, " + "you want pin_compatible instead.".format(subpackage_name) + ) return pin def native_compiler(language, config): compiler = language - for platform in [config.platform, config.platform.split('-')[0]]: + for platform in [config.platform, config.platform.split("-")[0]]: try: compiler = DEFAULT_COMPILERS[platform][language] break except KeyError: continue - if hasattr(compiler, 'keys'): - compiler = compiler.get(config.variant.get('python', 'nope'), 'vs2017') + if hasattr(compiler, "keys"): + compiler = compiler.get(config.variant.get("python", "nope"), "vs2017") return compiler @@ -364,27 +506,27 @@ def compiler(language, config, permit_undefined_jinja=False): compiler = native_compiler(language, config) version = None if config.variant: - target_platform = config.variant.get('target_platform', config.subdir) - language_compiler_key = f'{language}_compiler' + target_platform = config.variant.get("target_platform", config.subdir) + language_compiler_key = f"{language}_compiler" # fall back to native if language-compiler is not explicitly set in variant compiler = config.variant.get(language_compiler_key, compiler) - version = config.variant.get(language_compiler_key + '_version') + version = config.variant.get(language_compiler_key + "_version") else: target_platform = config.subdir # support cross compilers. A cross-compiler package will have a name such as # gcc_target # gcc_linux-cos6-64 - compiler = '_'.join([compiler, target_platform]) + compiler = "_".join([compiler, target_platform]) if version: - compiler = ' '.join((compiler, version)) + compiler = " ".join((compiler, version)) compiler = ensure_valid_spec(compiler, warn=False) return compiler def ccache(method, config, permit_undefined_jinja=False): config.ccache_method = method - return 'ccache' + return "ccache" def cdt(package_name, config, permit_undefined_jinja=False): @@ -434,27 +576,26 @@ def cdt(package_name, config, permit_undefined_jinja=False): } """ # NOQA - cdt_name = 'cos6' + cdt_name = "cos6" arch = config.host_arch or config.arch - if arch == 'ppc64le' or arch == 'aarch64' or arch == 'ppc64' or arch == 's390x': - cdt_name = 'cos7' + if arch == "ppc64le" or arch == "aarch64" or arch == "ppc64" or arch == "s390x": + cdt_name = "cos7" cdt_arch = arch else: - cdt_arch = 'x86_64' if arch == '64' else 'i686' + cdt_arch = "x86_64" if arch == "64" else "i686" if config.variant: - cdt_name = config.variant.get('cdt_name', cdt_name) - cdt_arch = config.variant.get('cdt_arch', cdt_arch) - if ' ' in package_name: - name = package_name.split(' ')[0] - ver_build = package_name.split(' ')[1:] - result = (name + '-' + cdt_name + '-' + cdt_arch + ' ' + ' '.join(ver_build)) + cdt_name = config.variant.get("cdt_name", cdt_name) + cdt_arch = config.variant.get("cdt_arch", cdt_arch) + if " " in package_name: + name = package_name.split(" ")[0] + ver_build = package_name.split(" ")[1:] + result = name + "-" + cdt_name + "-" + cdt_arch + " " + " ".join(ver_build) else: - result = (package_name + '-' + cdt_name + '-' + cdt_arch) + result = package_name + "-" + cdt_name + "-" + cdt_arch return result -def resolved_packages(m, env, permit_undefined_jinja=False, - bypass_env_check=False): +def resolved_packages(m, env, permit_undefined_jinja=False, bypass_env_check=False): """Returns the final list of packages that are listed in host or build. This include all packages (including the indirect dependencies) that will be installed in the host or build environment. An example usage of this @@ -486,8 +627,8 @@ def resolved_packages(m, env, permit_undefined_jinja=False, - openssl 1.0.2n hb7f436b_0 - zlib 1.2.11 ha838bed_2 """ - if env not in ('host', 'build'): - raise ValueError('Only host and build dependencies are supported.') + if env not in ("host", "build"): + raise ValueError("Only host and build dependencies are supported.") package_names = [] @@ -518,7 +659,7 @@ def _toml_load(stream): "json": json.load, "yaml": yaml.safe_load, "yml": yaml.safe_load, - "toml": _toml_load + "toml": _toml_load, } @@ -533,9 +674,16 @@ def _load_data(stream: IO, fmt: str, *args, **kwargs) -> Any: return load(stream, *args, **kwargs) -def load_file_data(filename: str, fmt: Optional[str] = None, *args, config=None, - from_recipe_dir=False, recipe_dir=None, permit_undefined_jinja=True, - **kwargs): +def load_file_data( + filename: str, + fmt: Optional[str] = None, + *args, + config=None, + from_recipe_dir=False, + recipe_dir=None, + permit_undefined_jinja=True, + **kwargs, +): """Loads a file and returns the parsed data. For example to load file data from a JSON file, you can use any of: @@ -552,7 +700,9 @@ def load_file_data(filename: str, fmt: Optional[str] = None, *args, config=None, raise else: with open(file_path) as f: - return _load_data(f, fmt or pathlib.Path(filename).suffix.lstrip("."), *args, **kwargs) + return _load_data( + f, fmt or pathlib.Path(filename).suffix.lstrip("."), *args, **kwargs + ) def load_str_data(string: str, fmt: str, *args, **kwargs): @@ -567,48 +717,91 @@ def load_str_data(string: str, fmt: str, *args, **kwargs): return _load_data(StringIO(string), fmt, *args, **kwargs) -def context_processor(initial_metadata, recipe_dir, config, permit_undefined_jinja, - allow_no_other_outputs=False, bypass_env_check=False, skip_build_id=False, - variant=None): +def context_processor( + initial_metadata, + recipe_dir, + config, + permit_undefined_jinja, + allow_no_other_outputs=False, + bypass_env_check=False, + skip_build_id=False, + variant=None, +): """ Return a dictionary to use as context for jinja templates. initial_metadata: Augment the context with values from this MetaData object. Used to bootstrap metadata contents via multiple parsing passes. """ - ctx = get_environ(m=initial_metadata, for_env=False, skip_build_id=skip_build_id, - escape_backslash=True, variant=variant) + ctx = get_environ( + m=initial_metadata, + for_env=False, + skip_build_id=skip_build_id, + escape_backslash=True, + variant=variant, + ) environ = dict(os.environ) environ.update(get_environ(m=initial_metadata, skip_build_id=skip_build_id)) ctx.update( - load_setup_py_data=partial(load_setup_py_data, m=initial_metadata, recipe_dir=recipe_dir, - permit_undefined_jinja=permit_undefined_jinja), + load_setup_py_data=partial( + load_setup_py_data, + m=initial_metadata, + recipe_dir=recipe_dir, + permit_undefined_jinja=permit_undefined_jinja, + ), # maintain old alias for backwards compatibility: - load_setuptools=partial(load_setuptools, m=initial_metadata, recipe_dir=recipe_dir, - permit_undefined_jinja=permit_undefined_jinja), + load_setuptools=partial( + load_setuptools, + m=initial_metadata, + recipe_dir=recipe_dir, + permit_undefined_jinja=permit_undefined_jinja, + ), load_npm=load_npm, - load_file_regex=partial(load_file_regex, config=config, recipe_dir=recipe_dir, - permit_undefined_jinja=permit_undefined_jinja), - load_file_data=partial(load_file_data, config=config, recipe_dir=recipe_dir, - permit_undefined_jinja=permit_undefined_jinja), + load_file_regex=partial( + load_file_regex, + config=config, + recipe_dir=recipe_dir, + permit_undefined_jinja=permit_undefined_jinja, + ), + load_file_data=partial( + load_file_data, + config=config, + recipe_dir=recipe_dir, + permit_undefined_jinja=permit_undefined_jinja, + ), load_str_data=load_str_data, - installed=get_installed_packages(os.path.join(config.host_prefix, 'conda-meta')), - pin_compatible=partial(pin_compatible, initial_metadata, - permit_undefined_jinja=permit_undefined_jinja, - bypass_env_check=bypass_env_check), - pin_subpackage=partial(pin_subpackage, initial_metadata, - permit_undefined_jinja=permit_undefined_jinja, - allow_no_other_outputs=allow_no_other_outputs, - skip_build_id=skip_build_id), - compiler=partial(compiler, config=config, permit_undefined_jinja=permit_undefined_jinja), + installed=get_installed_packages( + os.path.join(config.host_prefix, "conda-meta") + ), + pin_compatible=partial( + pin_compatible, + initial_metadata, + permit_undefined_jinja=permit_undefined_jinja, + bypass_env_check=bypass_env_check, + ), + pin_subpackage=partial( + pin_subpackage, + initial_metadata, + permit_undefined_jinja=permit_undefined_jinja, + allow_no_other_outputs=allow_no_other_outputs, + skip_build_id=skip_build_id, + ), + compiler=partial( + compiler, config=config, permit_undefined_jinja=permit_undefined_jinja + ), cdt=partial(cdt, config=config, permit_undefined_jinja=permit_undefined_jinja), - ccache=partial(ccache, config=config, permit_undefined_jinja=permit_undefined_jinja), - resolved_packages=partial(resolved_packages, initial_metadata, - permit_undefined_jinja=permit_undefined_jinja, - bypass_env_check=bypass_env_check), + ccache=partial( + ccache, config=config, permit_undefined_jinja=permit_undefined_jinja + ), + resolved_packages=partial( + resolved_packages, + initial_metadata, + permit_undefined_jinja=permit_undefined_jinja, + bypass_env_check=bypass_env_check, + ), time=time, datetime=datetime, - - environ=environ) + environ=environ, + ) return ctx diff --git a/conda_build/license_family.py b/conda_build/license_family.py index 542166dd30..2833974066 100644 --- a/conda_build/license_family.py +++ b/conda_build/license_family.py @@ -2,6 +2,7 @@ # SPDX-License-Identifier: BSD-3-Clause import re import string + from conda_build import exceptions from conda_build.utils import comma_join @@ -24,71 +25,68 @@ """.split() # regular expressions -gpl2_regex = re.compile('GPL[^3]*2') # match GPL2 -gpl3_regex = re.compile('GPL[^2]*3') # match GPL3 -gpl23_regex = re.compile('GPL[^2]*>= *2') # match GPL >= 2 -cc_regex = re.compile(r'CC\w+') # match CC -punk_regex = re.compile('[%s]' % re.escape(string.punctuation)) # removes punks +gpl2_regex = re.compile("GPL[^3]*2") # match GPL2 +gpl3_regex = re.compile("GPL[^2]*3") # match GPL3 +gpl23_regex = re.compile("GPL[^2]*>= *2") # match GPL >= 2 +cc_regex = re.compile(r"CC\w+") # match CC +punk_regex = re.compile("[%s]" % re.escape(string.punctuation)) # removes punks def match_gpl3(family): """True if family matches GPL3 or GPL >= 2, else False""" - return (gpl23_regex.search(family) or - gpl3_regex.search(family)) + return gpl23_regex.search(family) or gpl3_regex.search(family) def normalize(s): """Set to ALL CAPS, replace common GPL patterns, and strip""" s = s.upper() - s = re.sub('GENERAL PUBLIC LICENSE', 'GPL', s) - s = re.sub('LESSER *', 'L', s) - s = re.sub('AFFERO *', 'A', s) + s = re.sub("GENERAL PUBLIC LICENSE", "GPL", s) + s = re.sub("LESSER *", "L", s) + s = re.sub("AFFERO *", "A", s) return s.strip() def remove_special_characters(s): """Remove punctuation, spaces, tabs, and line feeds""" - s = punk_regex.sub(' ', s) - s = re.sub(r'\s+', '', s) + s = punk_regex.sub(" ", s) + s = re.sub(r"\s+", "", s) return s -def guess_license_family_from_index(index=None, - recognized=allowed_license_families): +def guess_license_family_from_index(index=None, recognized=allowed_license_families): """Return best guess of license_family from the conda package index. Note: Logic here is simple, and focuses on existing set of allowed families """ if isinstance(index, dict): - license_name = index.get('license_family', index.get('license')) + license_name = index.get("license_family", index.get("license")) else: # index argument is actually a string license_name = index return guess_license_family(license_name, recognized) -def guess_license_family(license_name=None, - recognized=allowed_license_families): +def guess_license_family(license_name=None, recognized=allowed_license_families): """Return best guess of license_family from the conda package index. Note: Logic here is simple, and focuses on existing set of allowed families """ if license_name is None: - return 'NONE' + return "NONE" license_name = normalize(license_name) # Handle GPL families as special cases # Remove AGPL and LGPL before looking for GPL2 and GPL3 - sans_lgpl = re.sub('[A,L]GPL', '', license_name) + sans_lgpl = re.sub("[A,L]GPL", "", license_name) if match_gpl3(sans_lgpl): - return 'GPL3' + return "GPL3" elif gpl2_regex.search(sans_lgpl): - return 'GPL2' + return "GPL2" elif cc_regex.search(license_name): - return 'CC' + return "CC" license_name = remove_special_characters(license_name) for family in recognized: @@ -97,17 +95,21 @@ def guess_license_family(license_name=None, for family in recognized: if license_name in remove_special_characters(family): return family - return 'OTHER' + return "OTHER" def ensure_valid_license_family(meta): try: - license_family = meta['about']['license_family'] + license_family = meta["about"]["license_family"] except KeyError: return - allowed_families = [remove_special_characters(normalize(fam)) - for fam in allowed_license_families] + allowed_families = [ + remove_special_characters(normalize(fam)) for fam in allowed_license_families + ] if remove_special_characters(normalize(license_family)) not in allowed_families: - raise RuntimeError(exceptions.indent( - "about/license_family '%s' not allowed. Allowed families are %s." % - (license_family, comma_join(sorted(allowed_license_families))))) + raise RuntimeError( + exceptions.indent( + "about/license_family '%s' not allowed. Allowed families are %s." + % (license_family, comma_join(sorted(allowed_license_families))) + ) + ) diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 99f1b423d9..b9941df872 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -1,36 +1,33 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from collections import OrderedDict import contextlib import copy -from functools import lru_cache import hashlib import json import os -from os.path import isfile, join import re import sys import time +from collections import OrderedDict +from functools import lru_cache +from os.path import isfile, join from bs4 import UnicodeDammit -from .conda_interface import md5_file -from .conda_interface import non_x86_linux_machines -from .conda_interface import MatchSpec -from .conda_interface import envs_dirs - -from conda_build import exceptions, utils, variants, environ -from conda_build.features import feature_list +from conda_build import environ, exceptions, utils, variants from conda_build.config import Config, get_or_merge_config +from conda_build.features import feature_list +from conda_build.license_family import ensure_valid_license_family from conda_build.utils import ( + HashableDict, ensure_list, - find_recipe, expand_globs, + find_recipe, get_installed_packages, - HashableDict, insert_variant_versions, ) -from conda_build.license_family import ensure_valid_license_family + +from .conda_interface import MatchSpec, envs_dirs, md5_file, non_x86_linux_machines try: import yaml @@ -348,8 +345,8 @@ def _variants_equal(metadata, output_metadata): def ensure_matching_hashes(output_metadata): envs = "build", "host", "run" problemos = [] - for (_, m) in output_metadata.values(): - for (_, om) in output_metadata.values(): + for _, m in output_metadata.values(): + for _, om in output_metadata.values(): if m != om: run_exports = om.meta.get("build", {}).get("run_exports", []) if hasattr(run_exports, "keys"): @@ -669,7 +666,6 @@ def build_string_from_metadata(metadata): ("mro", "mro-base", 3), ("mro", "mro-base_impl", 3), ): - for ms in metadata.ms_depends("run"): for name in ensure_list(names): if ms.name == name and name in build_pkg_names: @@ -1049,7 +1045,6 @@ class MetaData: __hash__ = None # declare as non-hashable to avoid its use with memoization def __init__(self, path, config=None, variant=None): - self.undefined_jinja_vars = [] self.config = get_or_merge_config(config, variant=variant) @@ -1801,9 +1796,9 @@ def _get_contents( return fd.read() from conda_build.jinja_context import ( - context_processor, - UndefinedNeverFail, FilteredLoader, + UndefinedNeverFail, + context_processor, ) path, filename = os.path.split(self.meta_path) @@ -2532,7 +2527,7 @@ def get_output_metadata_set( # Sanity check: if any exact pins of any subpackages, make sure that they match ensure_matching_hashes(conda_packages) final_conda_packages = [] - for (out_d, m) in conda_packages.values(): + for out_d, m in conda_packages.values(): # We arbitrarily mark all output metadata as final, regardless # of if it truly is or not. This is done to add sane hashes # to unfinalizable packages, so that they are differentiable diff --git a/conda_build/metapackage.py b/conda_build/metapackage.py index 0d06fa8a34..0566836030 100644 --- a/conda_build/metapackage.py +++ b/conda_build/metapackage.py @@ -1,12 +1,23 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause from collections import defaultdict + from conda_build.config import Config from conda_build.metadata import MetaData -def create_metapackage(name, version, entry_points=(), build_string=None, build_number=0, - dependencies=(), home=None, license_name=None, summary=None, config=None): +def create_metapackage( + name, + version, + entry_points=(), + build_string=None, + build_number=0, + dependencies=(), + home=None, + license_name=None, + summary=None, + config=None, +): # local import to avoid circular import, we provide create_metapackage in api from conda_build.api import build @@ -14,16 +25,16 @@ def create_metapackage(name, version, entry_points=(), build_string=None, build_ config = Config() d = defaultdict(dict) - d['package']['name'] = name - d['package']['version'] = version - d['build']['number'] = build_number - d['build']['entry_points'] = entry_points + d["package"]["name"] = name + d["package"]["version"] = version + d["build"]["number"] = build_number + d["build"]["entry_points"] = entry_points # MetaData does the auto stuff if the build string is None - d['build']['string'] = build_string - d['requirements']['run'] = dependencies - d['about']['home'] = home - d['about']['license'] = license_name - d['about']['summary'] = summary + d["build"]["string"] = build_string + d["requirements"]["run"] = dependencies + d["about"]["home"] = home + d["about"]["license"] = license_name + d["about"]["summary"] = summary d = dict(d) m = MetaData.fromdict(d, config=config) config.compute_build_id(m.name(), m.version()) diff --git a/conda_build/noarch_python.py b/conda_build/noarch_python.py index 6823faeea6..380367d43d 100644 --- a/conda_build/noarch_python.py +++ b/conda_build/noarch_python.py @@ -4,11 +4,11 @@ import locale import logging import os -from os.path import basename, dirname, isdir, join, isfile import shutil import sys +from os.path import basename, dirname, isdir, isfile, join -ISWIN = sys.platform.startswith('win') +ISWIN = sys.platform.startswith("win") def _force_dir(dirname): @@ -26,7 +26,7 @@ def rewrite_script(fn, prefix): noarch pacakges""" # Load and check the source file for not being a binary - src = join(prefix, 'Scripts' if ISWIN else 'bin', fn) + src = join(prefix, "Scripts" if ISWIN else "bin", fn) encoding = locale.getpreferredencoding() # if default locale is ascii, allow UTF-8 (a reasonably modern ASCII extension) if encoding == "ANSI_X3.4-1968": @@ -40,52 +40,53 @@ def rewrite_script(fn, prefix): os.unlink(src) # Get rid of '-script.py' suffix on Windows - if ISWIN and fn.endswith('-script.py'): + if ISWIN and fn.endswith("-script.py"): fn = fn[:-10] # Rewrite the file to the python-scripts directory - dst_dir = join(prefix, 'python-scripts') + dst_dir = join(prefix, "python-scripts") _force_dir(dst_dir) dst = join(dst_dir, fn) - with open(dst, 'w') as fo: + with open(dst, "w") as fo: fo.write(data) os.chmod(dst, src_mode) return fn def handle_file(f, d, prefix): - """Process a file for inclusion in a noarch python package. - """ + """Process a file for inclusion in a noarch python package.""" path = join(prefix, f) # Ignore egg-info and pyc files. - if f.endswith(('.egg-info', '.pyc', '.pyo')): + if f.endswith((".egg-info", ".pyc", ".pyo")): os.unlink(path) - elif f.endswith('.exe') and (isfile(os.path.join(prefix, f[:-4] + '-script.py')) or - basename(f[:-4]) in d['python-scripts']): + elif f.endswith(".exe") and ( + isfile(os.path.join(prefix, f[:-4] + "-script.py")) + or basename(f[:-4]) in d["python-scripts"] + ): os.unlink(path) # this is an entry point with a matching xx-script.py - elif 'site-packages' in f: - nsp = join(prefix, 'site-packages') + elif "site-packages" in f: + nsp = join(prefix, "site-packages") _force_dir(nsp) - g = f[f.find('site-packages'):] + g = f[f.find("site-packages") :] dst = join(prefix, g) dst_dir = dirname(dst) _force_dir(dst_dir) shutil.move(path, dst) - d['site-packages'].append(g[14:]) + d["site-packages"].append(g[14:]) # Treat scripts specially with the logic from above - elif f.startswith(('bin/', 'Scripts')): + elif f.startswith(("bin/", "Scripts")): fn = basename(path) fn = rewrite_script(fn, prefix) - d['python-scripts'].append(fn) + d["python-scripts"].append(fn) # Include examples in the metadata doc - elif f.startswith(('Examples/', 'Examples\\')): - d['Examples'].append(f[9:]) + elif f.startswith(("Examples/", "Examples\\")): + d["Examples"].append(f[9:]) # No special treatment for other files # leave them as-is else: @@ -95,10 +96,7 @@ def handle_file(f, d, prefix): def populate_files(m, files, prefix, entry_point_scripts=None): - d = {'dist': m.dist(), - 'site-packages': [], - 'python-scripts': [], - 'Examples': []} + d = {"dist": m.dist(), "site-packages": [], "python-scripts": [], "Examples": []} # Populate site-package, python-scripts, and Examples into above for f in files: @@ -106,9 +104,9 @@ def populate_files(m, files, prefix, entry_point_scripts=None): # Windows path conversion if ISWIN: - for fns in (d['site-packages'], d['Examples']): + for fns in (d["site-packages"], d["Examples"]): for i, fn in enumerate(fns): - fns[i] = fn.replace('\\', '/') + fns[i] = fn.replace("\\", "/") if entry_point_scripts: for entry_point in entry_point_scripts: @@ -120,28 +118,36 @@ def populate_files(m, files, prefix, entry_point_scripts=None): def transform(m, files, prefix): - bin_dir = join(prefix, 'bin') + bin_dir = join(prefix, "bin") _force_dir(bin_dir) - scripts_dir = join(prefix, 'Scripts') + scripts_dir = join(prefix, "Scripts") _force_dir(scripts_dir) name = m.name() # Create *nix prelink script # Note: it's important to use LF newlines or it wont work if we build on Win - with open(join(bin_dir, '.%s-pre-link.sh' % name), 'wb') as fo: - fo.write(b'''\ + with open(join(bin_dir, ".%s-pre-link.sh" % name), "wb") as fo: + fo.write( + b"""\ #!/bin/bash $PREFIX/bin/python $SOURCE_DIR/link.py - ''') + """ + ) # Create windows prelink script (be nice and use Windows newlines) - with open(join(scripts_dir, '.%s-pre-link.bat' % name), 'wb') as fo: - fo.write('''\ + with open(join(scripts_dir, ".%s-pre-link.bat" % name), "wb") as fo: + fo.write( + """\ @echo off "%PREFIX%\\python.exe" "%SOURCE_DIR%\\link.py" - '''.replace('\n', '\r\n').encode('utf-8')) + """.replace( + "\n", "\r\n" + ).encode( + "utf-8" + ) + ) d = populate_files(m, files, prefix) @@ -149,17 +155,17 @@ def transform(m, files, prefix): this_dir = dirname(__file__) # copy in windows exe shims if there are any python-scripts - if d['python-scripts']: - for fn in 'cli-32.exe', 'cli-64.exe': + if d["python-scripts"]: + for fn in "cli-32.exe", "cli-64.exe": shutil.copyfile(join(this_dir, fn), join(prefix, fn)) # Read the local _link.py - with open(join(this_dir, '_link.py')) as fi: + with open(join(this_dir, "_link.py")) as fi: link_code = fi.read() # Write the package metadata, and bumper with code for linking - with open(join(prefix, 'link.py'), 'w') as fo: - fo.write('DATA = ') + with open(join(prefix, "link.py"), "w") as fo: + fo.write("DATA = ") json.dump(d, fo, indent=2, sort_keys=True) - fo.write('\n## END DATA\n\n') + fo.write("\n## END DATA\n\n") fo.write(link_code) diff --git a/conda_build/os_utils/elf.py b/conda_build/os_utils/elf.py index 7aa9d594d4..5fc37e772e 100644 --- a/conda_build/os_utils/elf.py +++ b/conda_build/os_utils/elf.py @@ -1,21 +1,30 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from os.path import islink, isfile - +from os.path import isfile, islink # extensions which are assumed to belong to non-ELF files NO_EXT = ( - '.py', '.pyc', '.pyo', '.h', '.a', '.c', '.txt', '.html', - '.xml', '.png', '.jpg', '.gif', - '.o' # ELF but not what we are looking for + ".py", + ".pyc", + ".pyo", + ".h", + ".a", + ".c", + ".txt", + ".html", + ".xml", + ".png", + ".jpg", + ".gif", + ".o", # ELF but not what we are looking for ) -MAGIC = b'\x7fELF' +MAGIC = b"\x7fELF" def is_elf(path): if path.endswith(NO_EXT) or islink(path) or not isfile(path): return False - with open(path, 'rb') as fi: + with open(path, "rb") as fi: head = fi.read(4) return bool(head == MAGIC) diff --git a/conda_build/os_utils/external.py b/conda_build/os_utils/external.py index 3e5ea52994..18190aba5d 100644 --- a/conda_build/os_utils/external.py +++ b/conda_build/os_utils/external.py @@ -3,37 +3,44 @@ import os import stat import sys -from os.path import isfile, join, expanduser +from os.path import expanduser, isfile, join -from conda_build.conda_interface import root_dir from glob2 import glob +from conda_build.conda_interface import root_dir + def find_executable(executable, prefix=None, all_matches=False): # dir_paths is referenced as a module-level variable # in other code global dir_paths result = None - if sys.platform == 'win32': - dir_paths = [join(root_dir, 'Scripts'), - join(root_dir, 'Library\\mingw-w64\\bin'), - join(root_dir, 'Library\\usr\\bin'), - join(root_dir, 'Library\\bin'), ] + if sys.platform == "win32": + dir_paths = [ + join(root_dir, "Scripts"), + join(root_dir, "Library\\mingw-w64\\bin"), + join(root_dir, "Library\\usr\\bin"), + join(root_dir, "Library\\bin"), + ] if prefix: - dir_paths[0:0] = [join(prefix, 'Scripts'), - join(prefix, 'Library\\mingw-w64\\bin'), - join(prefix, 'Library\\usr\\bin'), - join(prefix, 'Library\\bin'), ] + dir_paths[0:0] = [ + join(prefix, "Scripts"), + join(prefix, "Library\\mingw-w64\\bin"), + join(prefix, "Library\\usr\\bin"), + join(prefix, "Library\\bin"), + ] else: - dir_paths = [join(root_dir, 'bin'), ] + dir_paths = [ + join(root_dir, "bin"), + ] if prefix: - dir_paths.insert(0, join(prefix, 'bin')) + dir_paths.insert(0, join(prefix, "bin")) - dir_paths.extend(os.environ['PATH'].split(os.pathsep)) - if sys.platform == 'win32': - exts = ('.exe', '.bat', '') + dir_paths.extend(os.environ["PATH"].split(os.pathsep)) + if sys.platform == "win32": + exts = (".exe", ".bat", "") else: - exts = ('',) + exts = ("",) all_matches_found = [] for dir_path in dir_paths: @@ -41,13 +48,13 @@ def find_executable(executable, prefix=None, all_matches=False): path = expanduser(join(dir_path, executable + ext)) if isfile(path): st = os.stat(path) - if sys.platform == 'win32' or st.st_mode & stat.S_IEXEC: + if sys.platform == "win32" or st.st_mode & stat.S_IEXEC: if all_matches: all_matches_found.append(path) else: result = path break - if not result and any([f in executable for f in ('*', '?', '.')]): + if not result and any([f in executable for f in ("*", "?", ".")]): matches = glob(os.path.join(dir_path, executable)) if matches: if all_matches: @@ -60,8 +67,10 @@ def find_executable(executable, prefix=None, all_matches=False): return result or all_matches_found -def find_preferably_prefixed_executable(executable, build_prefix=None, all_matches=False): - found = find_executable('*' + executable, build_prefix, all_matches) +def find_preferably_prefixed_executable( + executable, build_prefix=None, all_matches=False +): + found = find_executable("*" + executable, build_prefix, all_matches) if not found: # It is possible to force non-prefixed exes by passing os.sep as the # first character in executable. basename makes this work. diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index e094301e54..77daf4ab10 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -1,39 +1,42 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from functools import lru_cache -import sys import re import subprocess -from os.path import join, basename - -from conda_build.conda_interface import untracked -from conda_build.conda_interface import linked_data +import sys +from functools import lru_cache +from os.path import basename, join +from conda_build.conda_interface import linked_data, untracked from conda_build.os_utils.macho import otool -from conda_build.os_utils.pyldd import codefile_class, inspect_linkages, machofile, is_codefile +from conda_build.os_utils.pyldd import ( + codefile_class, + inspect_linkages, + is_codefile, + machofile, +) -LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)') -LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found') +LDD_RE = re.compile(r"\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)") +LDD_NOT_FOUND_RE = re.compile(r"\s*(.*?)\s*=>\s*not found") def ldd(path): "thin wrapper around ldd" - lines = subprocess.check_output(['ldd', path]).decode('utf-8').splitlines() + lines = subprocess.check_output(["ldd", path]).decode("utf-8").splitlines() res = [] for line in lines: - if '=>' not in line: + if "=>" not in line: continue - assert line[0] == '\t', (path, line) + assert line[0] == "\t", (path, line) m = LDD_RE.match(line) if m: res.append(m.groups()) continue m = LDD_NOT_FOUND_RE.match(line) if m: - res.append((m.group(1), 'not found')) + res.append((m.group(1), "not found")) continue - if 'ld-linux' in line: + if "ld-linux" in line: continue raise RuntimeError("Unexpected output from ldd: %s" % line) @@ -61,28 +64,37 @@ def _get_linkages(obj_files, prefix, sysroot): resolve_filenames = True recurse = True try: - if sys.platform.startswith('linux'): + if sys.platform.startswith("linux"): res[f] = ldd(path) - elif sys.platform.startswith('darwin'): + elif sys.platform.startswith("darwin"): links = otool(path) - res[f] = [(basename(line['name']), line['name']) for line in links] + res[f] = [(basename(line["name"]), line["name"]) for line in links] except: ldd_failed = True finally: - res_py = inspect_linkages(path, resolve_filenames=resolve_filenames, - sysroot=sysroot, recurse=recurse) + res_py = inspect_linkages( + path, + resolve_filenames=resolve_filenames, + sysroot=sysroot, + recurse=recurse, + ) res_py = [(basename(lp), lp) for lp in res_py] if ldd_failed: res[f] = res_py else: if set(res[f]) != set(res_py): - print("WARNING: pyldd disagrees with ldd/otool. This will not cause any") + print( + "WARNING: pyldd disagrees with ldd/otool. This will not cause any" + ) print("WARNING: problems for this build, but please file a bug at:") print("WARNING: https://github.com/conda/conda-build") print(f"WARNING: and (if possible) attach file {path}") - print("WARNING: \nldd/otool gives:\n{}\npyldd gives:\n{}\n" - .format("\n".join(str(e) for e in res[f]), "\n".join(str(e) - for e in res_py))) + print( + "WARNING: \nldd/otool gives:\n{}\npyldd gives:\n{}\n".format( + "\n".join(str(e) for e in res[f]), + "\n".join(str(e) for e in res_py), + ) + ) print(f"Diffs\n{set(res[f]) - set(res_py)}") print(f"Diffs\n{set(res_py) - set(res[f])}") return res @@ -91,12 +103,12 @@ def _get_linkages(obj_files, prefix, sysroot): @lru_cache(maxsize=None) def get_package_files(dist, prefix): files = [] - if hasattr(dist, 'get'): - files = dist.get('files') + if hasattr(dist, "get"): + files = dist.get("files") else: data = linked_data(prefix).get(dist) if data: - files = data.get('files', []) + files = data.get("files", []) return files diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index ba7df48b74..9b01e5d07d 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -4,27 +4,31 @@ from collections.abc import Hashable except ImportError: from collections.abc import Hashable -from functools import partial -import glob2 + import hashlib import json import os -from subprocess import Popen, PIPE import struct import sys import threading +from functools import partial +from subprocess import PIPE, Popen +import glob2 + +from .external import find_executable + +# lief cannot handle files it doesn't know about gracefully # TODO :: Remove all use of pyldd # Currently we verify the output of each against the other -from .pyldd import inspect_linkages as inspect_linkages_pyldd -# lief cannot handle files it doesn't know about gracefully from .pyldd import codefile_type as codefile_type_pyldd -from .external import find_executable +from .pyldd import inspect_linkages as inspect_linkages_pyldd codefile_type = codefile_type_pyldd have_lief = False try: import lief + have_lief = True except: pass @@ -51,12 +55,12 @@ def ensure_binary(file): return [] return lief.parse(file) except: - print(f'WARNING: liefldd: failed to ensure_binary({file})') + print(f"WARNING: liefldd: failed to ensure_binary({file})") return None def nm(filename): - """ Return symbols from *filename* binary """ + """Return symbols from *filename* binary""" done = False try: binary = lief.parse(filename) # Build an abstract binary @@ -80,13 +84,13 @@ def codefile_type_liefldd(file, skip_symlinks=True): if binary.format == lief.EXE_FORMATS.PE: if lief.PE.DLL_CHARACTERISTICS: if binary.header.characteristics & lief.PE.HEADER_CHARACTERISTICS.DLL: - result = 'DLLfile' + result = "DLLfile" else: - result = 'EXEfile' + result = "EXEfile" elif binary.format == lief.EXE_FORMATS.MACHO: - result = 'machofile' + result = "machofile" elif binary.format == lief.EXE_FORMATS.ELF: - result = 'elffile' + result = "elffile" return result @@ -95,7 +99,7 @@ def codefile_type_liefldd(file, skip_symlinks=True): def _trim_sysroot(sysroot): - while sysroot.endswith('/') or sysroot.endswith('\\'): + while sysroot.endswith("/") or sysroot.endswith("\\"): sysroot = sysroot[:-1] return sysroot @@ -111,21 +115,26 @@ def get_libraries(file): # LIEF returns LC_ID_DYLIB name @rpath/libbz2.dylib in binary.libraries. Strip that. binary_name = None if binary.format == lief.EXE_FORMATS.MACHO: - binary_name = [command.name for command in binary.commands - if command.command == lief.MachO.LOAD_COMMAND_TYPES.ID_DYLIB] + binary_name = [ + command.name + for command in binary.commands + if command.command == lief.MachO.LOAD_COMMAND_TYPES.ID_DYLIB + ] binary_name = binary_name[0] if len(binary_name) else None - result = [from_os_varnames(binary.format, None, lib) for lib in result - if not binary_name or lib != binary_name] + result = [ + from_os_varnames(binary.format, None, lib) + for lib in result + if not binary_name or lib != binary_name + ] return result def _get_elf_rpathy_thing(binary, attribute, dyn_tag): dynamic_entries = binary.dynamic_entries - rpaths_colons = [getattr(e, attribute) - for e in dynamic_entries if e.tag == dyn_tag] + rpaths_colons = [getattr(e, attribute) for e in dynamic_entries if e.tag == dyn_tag] rpaths = [] for rpath in rpaths_colons: - rpaths.extend(rpath.split(':')) + rpaths.extend(rpath.split(":")) return rpaths @@ -133,27 +142,32 @@ def _set_elf_rpathy_thing(binary, old_matching, new_rpath, set_rpath, set_runpat dynamic_entries = binary.dynamic_entries changed = False for e in dynamic_entries: - if (set_runpath and - e.tag == lief.ELF.DYNAMIC_TAGS.RUNPATH and - glob2.fnmatch.fnmatch(e.runpath, old_matching) and - e.runpath != new_rpath): + if ( + set_runpath + and e.tag == lief.ELF.DYNAMIC_TAGS.RUNPATH + and glob2.fnmatch.fnmatch(e.runpath, old_matching) + and e.runpath != new_rpath + ): e.runpath = new_rpath changed = True - elif (set_rpath and - e.tag == lief.ELF.DYNAMIC_TAGS.RPATH and - glob2.fnmatch.fnmatch(e.rpath, old_matching) and - e.rpath != new_rpath): + elif ( + set_rpath + and e.tag == lief.ELF.DYNAMIC_TAGS.RPATH + and glob2.fnmatch.fnmatch(e.rpath, old_matching) + and e.rpath != new_rpath + ): e.rpath = new_rpath changed = True return changed if have_lief: + def get_rpathy_thing_raw_partial(file, elf_attribute, elf_dyn_tag): - ''' + """ By raw we mean that no processing is done on them whatsoever. The values are taken directly from LIEF. For anything but Linux, this means an empty list. - ''' + """ binary_format = None binary_type = None @@ -163,17 +177,37 @@ def get_rpathy_thing_raw_partial(file, elf_attribute, elf_dyn_tag): binary_format = binary.format if binary_format == lief.EXE_FORMATS.ELF: binary_type = binary.type - if binary_type == lief.ELF.ELF_CLASS.CLASS32 or binary_type == lief.ELF.ELF_CLASS.CLASS64: + if ( + binary_type == lief.ELF.ELF_CLASS.CLASS32 + or binary_type == lief.ELF.ELF_CLASS.CLASS64 + ): rpaths = _get_elf_rpathy_thing(binary, elf_attribute, elf_dyn_tag) - elif (binary_format == lief.EXE_FORMATS.MACHO and - binary.has_rpath and - elf_dyn_tag == lief.ELF.DYNAMIC_TAGS.RPATH): - rpaths.extend([command.path for command in binary.commands - if command.command == lief.MachO.LOAD_COMMAND_TYPES.RPATH]) + elif ( + binary_format == lief.EXE_FORMATS.MACHO + and binary.has_rpath + and elf_dyn_tag == lief.ELF.DYNAMIC_TAGS.RPATH + ): + rpaths.extend( + [ + command.path + for command in binary.commands + if command.command == lief.MachO.LOAD_COMMAND_TYPES.RPATH + ] + ) return rpaths, binary_format, binary_type - get_runpaths_raw = partial(get_rpathy_thing_raw_partial, elf_attribute='runpath', elf_dyn_tag=lief.ELF.DYNAMIC_TAGS.RUNPATH) - get_rpaths_raw = partial(get_rpathy_thing_raw_partial, elf_attribute='rpath', elf_dyn_tag=lief.ELF.DYNAMIC_TAGS.RPATH) + + get_runpaths_raw = partial( + get_rpathy_thing_raw_partial, + elf_attribute="runpath", + elf_dyn_tag=lief.ELF.DYNAMIC_TAGS.RUNPATH, + ) + get_rpaths_raw = partial( + get_rpathy_thing_raw_partial, + elf_attribute="rpath", + elf_dyn_tag=lief.ELF.DYNAMIC_TAGS.RPATH, + ) else: + def get_runpaths_raw(file): return [], None, None @@ -182,28 +216,32 @@ def get_rpaths_raw(file): def get_runpaths_or_rpaths_raw(file): - ''' + """ Can be called on all OSes. On linux, if runpaths are present they are returned. - ''' + """ rpaths, binary_format, binary_type = get_runpaths_raw(file) if not len(rpaths): rpaths, _, _ = get_rpaths_raw(file) - rpaths_type = 'rpaths' + rpaths_type = "rpaths" else: - rpaths_type = 'runpaths' + rpaths_type = "runpaths" return rpaths, rpaths_type, binary_format, binary_type def set_rpath(old_matching, new_rpath, file): binary = ensure_binary(file) - if (binary.format == lief.EXE_FORMATS.ELF and - (binary.type == lief.ELF.ELF_CLASS.CLASS32 or binary.type == lief.ELF.ELF_CLASS.CLASS64)): - if _set_elf_rpathy_thing(binary, old_matching, new_rpath, set_rpath=True, set_runpath=False): + if binary.format == lief.EXE_FORMATS.ELF and ( + binary.type == lief.ELF.ELF_CLASS.CLASS32 + or binary.type == lief.ELF.ELF_CLASS.CLASS64 + ): + if _set_elf_rpathy_thing( + binary, old_matching, new_rpath, set_rpath=True, set_runpath=False + ): binary.write(file) -def get_rpaths(file, exe_dirname, envroot, windows_root=''): +def get_rpaths(file, exe_dirname, envroot, windows_root=""): rpaths, rpaths_type, binary_format, binary_type = get_runpaths_or_rpaths_raw(file) if binary_format == lief.EXE_FORMATS.PE: # To allow the unix-y rpath code to work we consider @@ -214,29 +252,29 @@ def get_rpaths(file, exe_dirname, envroot, windows_root=''): # not to apply them transitively. # https://docs.microsoft.com/en-us/windows/desktop/dlls/dynamic-link-library-search-order if exe_dirname: - rpaths.append(exe_dirname.replace('\\', '/')) + rpaths.append(exe_dirname.replace("\\", "/")) if windows_root: - rpaths.append('/'.join((windows_root, "System32"))) - rpaths.append('/'.join((windows_root, "System32", "downlevel"))) + rpaths.append("/".join((windows_root, "System32"))) + rpaths.append("/".join((windows_root, "System32", "downlevel"))) rpaths.append(windows_root) if envroot: # and not lief.PE.HEADER_CHARACTERISTICS.DLL in binary.header.characteristics_list: rpaths.extend(list(_get_path_dirs(envroot))) elif binary_format == lief.EXE_FORMATS.MACHO: - rpaths = [rpath.rstrip('/') for rpath in rpaths] + rpaths = [rpath.rstrip("/") for rpath in rpaths] return [from_os_varnames(binary_format, binary_type, rpath) for rpath in rpaths] # TODO :: Consider memoizing instead of repeatedly scanning # TODO :: libc.so/libSystem.dylib when inspect_linkages(recurse=True) -def _inspect_linkages_this(filename, sysroot='', arch='native'): - ''' +def _inspect_linkages_this(filename, sysroot="", arch="native"): + """ :param filename: :param sysroot: :param arch: :return: - ''' + """ if not os.path.exists(filename): return None, [], [] @@ -247,9 +285,13 @@ def _inspect_linkages_this(filename, sysroot='', arch='native'): # json_data = json.loads(lief.to_json_from_abstract(binary)) json_data = json.loads(lief.to_json(binary)) if json_data: - return filename, json_data['imported_libraries'], json_data['imported_libraries'] + return ( + filename, + json_data["imported_libraries"], + json_data["imported_libraries"], + ) except: - print(f'WARNING: liefldd: failed _inspect_linkages_this({filename})') + print(f"WARNING: liefldd: failed _inspect_linkages_this({filename})") return None, [], [] @@ -257,117 +299,129 @@ def _inspect_linkages_this(filename, sysroot='', arch='native'): def to_os_varnames(binary, input_): """Don't make these functions - they are methods to match the API for elffiles.""" if binary.format == lief.EXE_FORMATS.MACHO: - return input_.replace('$SELFDIR', '@loader_path') \ - .replace('$EXEDIR', '@executable_path') \ - .replace('$RPATH', '@rpath') + return ( + input_.replace("$SELFDIR", "@loader_path") + .replace("$EXEDIR", "@executable_path") + .replace("$RPATH", "@rpath") + ) elif binary.format == lief.EXE_FORMATS.ELF: if binary.ehdr.sz_ptr == 8: - libdir = '/lib64' + libdir = "/lib64" else: - libdir = '/lib' - return input.replace('$SELFDIR', '$ORIGIN') \ - .replace(libdir, '$LIB') + libdir = "/lib" + return input.replace("$SELFDIR", "$ORIGIN").replace(libdir, "$LIB") def from_os_varnames(binary_format, binary_type, input_): """Don't make these functions - they are methods to match the API for elffiles.""" if binary_format == lief.EXE_FORMATS.MACHO: - return input_.replace('@loader_path', '$SELFDIR') \ - .replace('@executable_path', '$EXEDIR') \ - .replace('@rpath', '$RPATH') + return ( + input_.replace("@loader_path", "$SELFDIR") + .replace("@executable_path", "$EXEDIR") + .replace("@rpath", "$RPATH") + ) elif binary_format == lief.EXE_FORMATS.ELF: if binary_type == lief.ELF.ELF_CLASS.CLASS64: - libdir = '/lib64' + libdir = "/lib64" else: - libdir = '/lib' - return input_.replace('$ORIGIN', '$SELFDIR') \ - .replace('$LIB', libdir) + libdir = "/lib" + return input_.replace("$ORIGIN", "$SELFDIR").replace("$LIB", libdir) elif binary_format == lief.EXE_FORMATS.PE: return input_ # TODO :: Use conda's version of this (or move the constant strings into constants.py) def _get_path_dirs(prefix): - yield '/'.join((prefix,)) - yield '/'.join((prefix, 'Library', 'mingw-w64', 'bin')) - yield '/'.join((prefix, 'Library', 'usr', 'bin')) - yield '/'.join((prefix, 'Library', 'bin')) - yield '/'.join((prefix, 'Scripts')) - yield '/'.join((prefix, 'bin')) + yield "/".join((prefix,)) + yield "/".join((prefix, "Library", "mingw-w64", "bin")) + yield "/".join((prefix, "Library", "usr", "bin")) + yield "/".join((prefix, "Library", "bin")) + yield "/".join((prefix, "Scripts")) + yield "/".join((prefix, "bin")) def get_uniqueness_key(file): binary = ensure_binary(file) if binary.format == lief.EXE_FORMATS.MACHO: return binary.name - elif (binary.format == lief.EXE_FORMATS.ELF - and # noqa - (binary.type == lief.ELF.ELF_CLASS.CLASS32 or binary.type == lief.ELF.ELF_CLASS.CLASS64)): + elif binary.format == lief.EXE_FORMATS.ELF and ( # noqa + binary.type == lief.ELF.ELF_CLASS.CLASS32 + or binary.type == lief.ELF.ELF_CLASS.CLASS64 + ): dynamic_entries = binary.dynamic_entries - result = [e.name for e in dynamic_entries if e.tag == lief.ELF.DYNAMIC_TAGS.SONAME] + result = [ + e.name for e in dynamic_entries if e.tag == lief.ELF.DYNAMIC_TAGS.SONAME + ] if result: return result[0] return binary.name return binary.name -def _get_resolved_location(codefile, - unresolved, - exedir, - selfdir, - rpaths_transitive, - LD_LIBRARY_PATH='', - default_paths=[], - sysroot='', - resolved_rpath=None): - ''' - From `man ld.so` - - When resolving shared object dependencies, the dynamic linker first inspects each dependency - string to see if it contains a slash (this can occur if a shared object pathname containing - slashes was specified at link time). If a slash is found, then the dependency string is - interpreted as a (relative or absolute) pathname, and the shared object is loaded using that - pathname. - - If a shared object dependency does not contain a slash, then it is searched for in the - following order: - - o Using the directories specified in the DT_RPATH dynamic section attribute of the binary - if present and DT_RUNPATH attribute does not exist. Use of DT_RPATH is deprecated. - - o Using the environment variable LD_LIBRARY_PATH (unless the executable is being run in - secure-execution mode; see below). in which case it is ignored. - - o Using the directories specified in the DT_RUNPATH dynamic section attribute of the - binary if present. Such directories are searched only to find those objects required - by DT_NEEDED (direct dependencies) entries and do not apply to those objects' children, - which must themselves have their own DT_RUNPATH entries. This is unlike DT_RPATH, - which is applied to searches for all children in the dependency tree. - - o From the cache file /etc/ld.so.cache, which contains a compiled list of candidate - shared objects previously found in the augmented library path. If, however, the binary - was linked with the -z nodeflib linker option, shared objects in the default paths are - skipped. Shared objects installed in hardware capability directories (see below) are - preferred to other shared objects. - - o In the default path /lib, and then /usr/lib. (On some 64-bit architectures, the default - paths for 64-bit shared objects are /lib64, and then /usr/lib64.) If the binary was - linked with the -z nodeflib linker option, this step is skipped. - - Returns a tuple of resolved location, rpath_used, in_sysroot - ''' +def _get_resolved_location( + codefile, + unresolved, + exedir, + selfdir, + rpaths_transitive, + LD_LIBRARY_PATH="", + default_paths=[], + sysroot="", + resolved_rpath=None, +): + """ + From `man ld.so` + + When resolving shared object dependencies, the dynamic linker first inspects each dependency + string to see if it contains a slash (this can occur if a shared object pathname containing + slashes was specified at link time). If a slash is found, then the dependency string is + interpreted as a (relative or absolute) pathname, and the shared object is loaded using that + pathname. + + If a shared object dependency does not contain a slash, then it is searched for in the + following order: + + o Using the directories specified in the DT_RPATH dynamic section attribute of the binary + if present and DT_RUNPATH attribute does not exist. Use of DT_RPATH is deprecated. + + o Using the environment variable LD_LIBRARY_PATH (unless the executable is being run in + secure-execution mode; see below). in which case it is ignored. + + o Using the directories specified in the DT_RUNPATH dynamic section attribute of the + binary if present. Such directories are searched only to find those objects required + by DT_NEEDED (direct dependencies) entries and do not apply to those objects' children, + which must themselves have their own DT_RUNPATH entries. This is unlike DT_RPATH, + which is applied to searches for all children in the dependency tree. + + o From the cache file /etc/ld.so.cache, which contains a compiled list of candidate + shared objects previously found in the augmented library path. If, however, the binary + was linked with the -z nodeflib linker option, shared objects in the default paths are + skipped. Shared objects installed in hardware capability directories (see below) are + preferred to other shared objects. + + o In the default path /lib, and then /usr/lib. (On some 64-bit architectures, the default + paths for 64-bit shared objects are /lib64, and then /usr/lib64.) If the binary was + linked with the -z nodeflib linker option, this step is skipped. + + Returns a tuple of resolved location, rpath_used, in_sysroot + """ rpath_result = None found = False - ld_library_paths = [] if not LD_LIBRARY_PATH else LD_LIBRARY_PATH.split(':') - if unresolved.startswith('$RPATH'): - these_rpaths = [resolved_rpath] if resolved_rpath else \ - rpaths_transitive + \ - ld_library_paths + \ - [dp.replace('$SYSROOT', sysroot) for dp in default_paths] + ld_library_paths = [] if not LD_LIBRARY_PATH else LD_LIBRARY_PATH.split(":") + if unresolved.startswith("$RPATH"): + these_rpaths = ( + [resolved_rpath] + if resolved_rpath + else rpaths_transitive + + ld_library_paths + + [dp.replace("$SYSROOT", sysroot) for dp in default_paths] + ) for rpath in these_rpaths: - resolved = unresolved.replace('$RPATH', rpath) \ - .replace('$SELFDIR', selfdir) \ - .replace('$EXEDIR', exedir) + resolved = ( + unresolved.replace("$RPATH", rpath) + .replace("$SELFDIR", selfdir) + .replace("$EXEDIR", exedir) + ) exists = os.path.exists(resolved) exists_sysroot = exists and sysroot and resolved.startswith(sysroot) if resolved_rpath or exists or exists_sysroot: @@ -377,13 +431,12 @@ def _get_resolved_location(codefile, if not found: # Return the so name so that it can be warned about as missing. return unresolved, None, False - elif any(a in unresolved for a in ('$SELFDIR', '$EXEDIR')): - resolved = unresolved.replace('$SELFDIR', selfdir) \ - .replace('$EXEDIR', exedir) + elif any(a in unresolved for a in ("$SELFDIR", "$EXEDIR")): + resolved = unresolved.replace("$SELFDIR", selfdir).replace("$EXEDIR", exedir) exists = os.path.exists(resolved) exists_sysroot = exists and sysroot and resolved.startswith(sysroot) else: - if unresolved.startswith('/'): + if unresolved.startswith("/"): return unresolved, None, False else: return os.path.join(selfdir, unresolved), None, False @@ -392,8 +445,14 @@ def _get_resolved_location(codefile, # TODO :: Consider returning a tree structure or a dict when recurse is True? -def inspect_linkages_lief(filename, resolve_filenames=True, recurse=True, - sysroot='', envroot='', arch='native'): +def inspect_linkages_lief( + filename, + resolve_filenames=True, + recurse=True, + sysroot="", + envroot="", + arch="native", +): # Already seen is partly about implementing single SONAME # rules and its appropriateness on macOS is TBD! already_seen = set() @@ -405,15 +464,23 @@ def inspect_linkages_lief(filename, resolve_filenames=True, recurse=True, default_paths = [] if binary.format == lief.EXE_FORMATS.ELF: if binary.type == lief.ELF.ELF_CLASS.CLASS64: - default_paths = ['$SYSROOT/lib64', '$SYSROOT/usr/lib64', '$SYSROOT/lib', '$SYSROOT/usr/lib'] + default_paths = [ + "$SYSROOT/lib64", + "$SYSROOT/usr/lib64", + "$SYSROOT/lib", + "$SYSROOT/usr/lib", + ] else: - default_paths = ['$SYSROOT/lib', '$SYSROOT/usr/lib'] + default_paths = ["$SYSROOT/lib", "$SYSROOT/usr/lib"] elif binary.format == lief.EXE_FORMATS.MACHO: - default_paths = ['$SYSROOT/usr/lib'] + default_paths = ["$SYSROOT/usr/lib"] elif binary.format == lief.EXE_FORMATS.PE: # We do not include C:\Windows nor C:\Windows\System32 in this list. They are added in # get_rpaths() instead since we need to carefully control the order. - default_paths = ['$SYSROOT/System32/Wbem', '$SYSROOT/System32/WindowsPowerShell/v1.0'] + default_paths = [ + "$SYSROOT/System32/Wbem", + "$SYSROOT/System32/WindowsPowerShell/v1.0", + ] results = {} rpaths_by_binary = dict() parents_by_filename = dict({filename: None}) @@ -428,18 +495,20 @@ def inspect_linkages_lief(filename, resolve_filenames=True, recurse=True, if binary.format == lief.EXE_FORMATS.PE: tmp_filename = filename2 while tmp_filename: - if not parent_exe_dirname and codefile_type(tmp_filename) == 'EXEfile': + if ( + not parent_exe_dirname + and codefile_type(tmp_filename) == "EXEfile" + ): parent_exe_dirname = os.path.dirname(tmp_filename) tmp_filename = parents_by_filename[tmp_filename] else: parent_exe_dirname = exedir # This is a hack for Python on Windows. Sorry. - if '.pyd' in filename2 or (os.sep + 'DLLs' + os.sep) in filename2: - parent_exe_dirname = envroot.replace(os.sep, '/') + '/DLLs' - rpaths_by_binary[filename2] = get_rpaths(binary, - parent_exe_dirname, - envroot.replace(os.sep, '/'), - sysroot) + if ".pyd" in filename2 or (os.sep + "DLLs" + os.sep) in filename2: + parent_exe_dirname = envroot.replace(os.sep, "/") + "/DLLs" + rpaths_by_binary[filename2] = get_rpaths( + binary, parent_exe_dirname, envroot.replace(os.sep, "/"), sysroot + ) tmp_filename = filename2 rpaths_transitive = [] if binary.format == lief.EXE_FORMATS.PE: @@ -452,24 +521,33 @@ def inspect_linkages_lief(filename, resolve_filenames=True, recurse=True, if filename2 in libraries: # Happens on macOS, leading to cycles. libraries.remove(filename2) # RPATH is implicit everywhere except macOS, make it explicit to simplify things. - these_orig = [('$RPATH/' + lib if not lib.startswith('/') and not lib.startswith('$') and # noqa - binary.format != lief.EXE_FORMATS.MACHO else lib) - for lib in libraries] + these_orig = [ + ( + "$RPATH/" + lib + if not lib.startswith("/") + and not lib.startswith("$") + and binary.format != lief.EXE_FORMATS.MACHO # noqa + else lib + ) + for lib in libraries + ] for lib, orig in zip(libraries, these_orig): - resolved = _get_resolved_location(binary, - orig, - exedir, - exedir, - rpaths_transitive=rpaths_transitive, - default_paths=default_paths, - sysroot=sysroot) + resolved = _get_resolved_location( + binary, + orig, + exedir, + exedir, + rpaths_transitive=rpaths_transitive, + default_paths=default_paths, + sysroot=sysroot, + ) path_fixed = os.path.normpath(resolved[0]) # Test, randomise case. We only allow for the filename part to be random, and we allow that # only for Windows DLLs. We may need a special case for Lib (from Python) vs lib (from R) # too, but in general we want to enforce case checking as much as we can since even Windows # can be run case-sensitively if the user wishes. # - ''' + """ if binary.format == lief.EXE_FORMATS.PE: import random path_fixed = os.path.dirname(path_fixed) + os.sep + \ @@ -478,11 +556,15 @@ def inspect_linkages_lief(filename, resolve_filenames=True, recurse=True, path_fixed = path_fixed.replace(os.sep + 'lib' + os.sep, os.sep + 'Lib' + os.sep) else: path_fixed = path_fixed.replace(os.sep + 'Lib' + os.sep, os.sep + 'lib' + os.sep) - ''' + """ if resolve_filenames: - rec = {'orig': orig, 'resolved': path_fixed, 'rpaths': rpaths_transitive} + rec = { + "orig": orig, + "resolved": path_fixed, + "rpaths": rpaths_transitive, + } else: - rec = {'orig': orig, 'rpaths': rpaths_transitive} + rec = {"orig": orig, "rpaths": rpaths_transitive} results[lib] = rec parents_by_filename[resolved[0]] = filename2 if recurse: @@ -492,43 +574,72 @@ def inspect_linkages_lief(filename, resolve_filenames=True, recurse=True, return results -def get_linkages(filename, resolve_filenames=True, recurse=True, - sysroot='', envroot='', arch='native'): +def get_linkages( + filename, + resolve_filenames=True, + recurse=True, + sysroot="", + envroot="", + arch="native", +): # When we switch to lief, want to ensure these results do not change. # We do not support Windows yet with pyldd. result_pyldd = [] debug = False if not have_lief or debug: - if codefile_type(filename) not in ('DLLfile', 'EXEfile'): - result_pyldd = inspect_linkages_pyldd(filename, resolve_filenames=resolve_filenames, recurse=recurse, - sysroot=sysroot, arch=arch) + if codefile_type(filename) not in ("DLLfile", "EXEfile"): + result_pyldd = inspect_linkages_pyldd( + filename, + resolve_filenames=resolve_filenames, + recurse=recurse, + sysroot=sysroot, + arch=arch, + ) if not have_lief: return result_pyldd else: - print(f"WARNING: failed to get_linkages, codefile_type('{filename}')={codefile_type(filename)}") + print( + f"WARNING: failed to get_linkages, codefile_type('{filename}')={codefile_type(filename)}" + ) return {} - result_lief = inspect_linkages_lief(filename, resolve_filenames=resolve_filenames, recurse=recurse, - sysroot=sysroot, envroot=envroot, arch=arch) + result_lief = inspect_linkages_lief( + filename, + resolve_filenames=resolve_filenames, + recurse=recurse, + sysroot=sysroot, + envroot=envroot, + arch=arch, + ) if debug and result_pyldd and set(result_lief) != set(result_pyldd): - print("WARNING: Disagreement in get_linkages(filename={}, resolve_filenames={}, recurse={}, sysroot={}, envroot={}, arch={}):\n lief: {}\npyldd: {}\n (using lief)". - format(filename, resolve_filenames, recurse, sysroot, envroot, arch, result_lief, result_pyldd)) + print( + "WARNING: Disagreement in get_linkages(filename={}, resolve_filenames={}, recurse={}, sysroot={}, envroot={}, arch={}):\n lief: {}\npyldd: {}\n (using lief)".format( + filename, + resolve_filenames, + recurse, + sysroot, + envroot, + arch, + result_lief, + result_pyldd, + ) + ) return result_lief -def get_imports(file, arch='native'): +def get_imports(file, arch="native"): binary = ensure_binary(file) return [str(i) for i in binary.imported_functions] def _get_archive_signature(file): try: - with open(file, 'rb') as f: + with open(file, "rb") as f: index = 0 content = f.read(8) - signature, = struct.unpack('<8s', content[index:8]) + (signature,) = struct.unpack("<8s", content[index:8]) return signature, 8 except: - return '', 0 + return "", 0 debug_static_archives = 0 @@ -536,7 +647,7 @@ def _get_archive_signature(file): def is_archive(file): signature, _ = _get_archive_signature(file) - return True if signature == b'!\n' else False + return True if signature == b"!\n" else False def get_static_lib_exports(file): @@ -546,7 +657,7 @@ def get_static_lib_exports(file): # https://en.wikipedia.org/wiki/Ar_(Unix) # https://web.archive.org/web/20100314154747/http://www.microsoft.com/whdc/system/platform/firmware/PECOFF.mspx def _parse_ar_hdr(content, index): - ''' + """ 0 16 File identifier ASCII 16 12 File modification timestamp Decimal 28 6 Owner ID Decimal @@ -554,44 +665,50 @@ def _parse_ar_hdr(content, index): 40 8 File mode Octal 48 10 File size in bytes Decimal 58 2 Ending characters 0x60 0x0A - ''' - header_fmt = '<16s 12s 6s 6s 8s 10s 2s' + """ + header_fmt = "<16s 12s 6s 6s 8s 10s 2s" header_sz = struct.calcsize(header_fmt) - name, modified, owner, group, mode, size, ending = \ - struct.unpack(header_fmt, content[index:index + header_sz]) + name, modified, owner, group, mode, size, ending = struct.unpack( + header_fmt, content[index : index + header_sz] + ) try: size = int(size) except: - print(f'ERROR: {name} has non-integral size of {size}') - return index, '', 0, 0, 'INVALID' - name_len = 0 # File data in BSD format archives begin with a name of this length. - if name.startswith(b'#1/'): - typ = 'BSD' + print(f"ERROR: {name} has non-integral size of {size}") + return index, "", 0, 0, "INVALID" + name_len = ( + 0 # File data in BSD format archives begin with a name of this length. + ) + if name.startswith(b"#1/"): + typ = "BSD" name_len = int(name[3:]) - name, = struct.unpack('<' + str(name_len) + 's', content[index + header_sz:index + header_sz + name_len]) - if b'\x00' in name: - name = name[:name.find(b'\x00')] - elif name.startswith(b'//'): - typ = 'GNU_TABLE' - elif name.strip() == b'/': - typ = 'GNU_SYMBOLS' - elif name.startswith(b'/'): - typ = 'GNU' + (name,) = struct.unpack( + "<" + str(name_len) + "s", + content[index + header_sz : index + header_sz + name_len], + ) + if b"\x00" in name: + name = name[: name.find(b"\x00")] + elif name.startswith(b"//"): + typ = "GNU_TABLE" + elif name.strip() == b"/": + typ = "GNU_SYMBOLS" + elif name.startswith(b"/"): + typ = "GNU" else: - typ = 'NORMAL' - if b'/' in name: - name = name[:name.find(b'/')] + typ = "NORMAL" + if b"/" in name: + name = name[: name.find(b"/")] # if debug_static_archives: print("index={}, name={}, ending={}, size={}, type={}".format(index, name, ending, size, typ)) index += header_sz + name_len return index, name, name_len, size, typ results = [] signature, len_signature = _get_archive_signature(file) - if signature != b'!\n': + if signature != b"!\n": print(f"ERROR: {file} is not an archive") return results - with open(file, 'rb') as f: + with open(file, "rb") as f: if debug_static_archives: print(f"Archive file {file}") index = 0 @@ -605,64 +722,102 @@ def _parse_ar_hdr(content, index): if debug_static_archives: print(f"ar_hdr index = {hex(index)}") index, name, name_len, size, typ = _parse_ar_hdr(content, index) - if typ == 'GNU_SYMBOLS': + if typ == "GNU_SYMBOLS": # Reference: # https://web.archive.org/web/20070924090618/http://www.microsoft.com/msj/0498/hood0498.aspx - nsymbols, = struct.unpack('>I', content[index:index + 4]) + (nsymbols,) = struct.unpack(">I", content[index : index + 4]) # Reference: # https://docs.microsoft.com/en-us/windows/desktop/api/winnt/ns-winnt-_image_file_header offsets = [] for i in range(nsymbols): - offset, = struct.unpack('>I', content[index + 4 + i * 4:index + 4 + (i + 1) * 4]) + (offset,) = struct.unpack( + ">I", content[index + 4 + i * 4 : index + 4 + (i + 1) * 4] + ) offsets.append(offset) - syms = [symname.decode('utf-8') - for symname in content[index + 4 + (nsymbols * 4):index + size].split(b'\x00')[:nsymbols]] + syms = [ + symname.decode("utf-8") + for symname in content[index + 4 + (nsymbols * 4) : index + size].split( + b"\x00" + )[:nsymbols] + ] for i in range(nsymbols): index2, name, name_len, size, typ = _parse_ar_hdr(content, offsets[i]) obj_starts.add(index2) obj_ends.add(offsets[i]) if debug_static_archives: - print(f"symname {syms[i]}, offset {offsets[i]}, name {name}, elf? {content[index2:index2 + 4]}") - elif name.startswith(b'__.SYMDEF'): + print( + f"symname {syms[i]}, offset {offsets[i]}, name {name}, elf? {content[index2:index2 + 4]}" + ) + elif name.startswith(b"__.SYMDEF"): # Reference: # http://www.manpagez.com/man/5/ranlib/ # https://opensource.apple.com/source/cctools/cctools-921/misc/libtool.c.auto.html # https://opensource.apple.com/source/cctools/cctools-921/misc/nm.c.auto.html # https://opensource.apple.com/source/cctools/cctools-921/libstuff/writeout.c # https://developer.apple.com/documentation/kernel/nlist_64/1583944-n_type?language=objc - if b'64' in name: + if b"64" in name: # 2 uint64_t, a string table index and an offset - ranlib_struct_field_fmt = 'Q' - toc_integers_fmt = 'Q' + ranlib_struct_field_fmt = "Q" + toc_integers_fmt = "Q" else: # 2 uint32_t, a string table index and an offset - ranlib_struct_field_fmt = 'I' - toc_integers_fmt = 'I' + ranlib_struct_field_fmt = "I" + toc_integers_fmt = "I" ranlib_struct_sz = struct.calcsize(ranlib_struct_field_fmt) * 2 toc_integers_sz = struct.calcsize(toc_integers_fmt) - size_ranlib_structs, = struct.unpack('<' + toc_integers_fmt, content[index:index + toc_integers_sz]) + (size_ranlib_structs,) = struct.unpack( + "<" + toc_integers_fmt, content[index : index + toc_integers_sz] + ) # Each of the ranlib structures consists of a zero based offset into the next # section (a string table of symbols) and an offset from the beginning of # the archive to the start of the archive file which defines the symbol nsymbols = size_ranlib_structs // 8 - size_string_table, = struct.unpack('<' + toc_integers_fmt, - content[index + toc_integers_sz + (nsymbols * ranlib_struct_sz):index + 4 + 4 + (nsymbols * ranlib_struct_sz)]) + (size_string_table,) = struct.unpack( + "<" + toc_integers_fmt, + content[ + index + + toc_integers_sz + + (nsymbols * ranlib_struct_sz) : index + + 4 + + 4 + + (nsymbols * ranlib_struct_sz) + ], + ) ranlib_structs = [] ranlib_index = index + (toc_integers_sz * 2) for i in range(nsymbols): - ran_off, ran_strx = struct.unpack('<' + ranlib_struct_field_fmt + ranlib_struct_field_fmt, - content[ranlib_index + (i * ranlib_struct_sz):ranlib_index + ((i + 1) * ranlib_struct_sz)]) + ran_off, ran_strx = struct.unpack( + "<" + ranlib_struct_field_fmt + ranlib_struct_field_fmt, + content[ + ranlib_index + + (i * ranlib_struct_sz) : ranlib_index + + ((i + 1) * ranlib_struct_sz) + ], + ) ranlib_structs.append((ran_strx, ran_off)) if debug_static_archives > 1: - print("string_table: start: {} end: {}".format(hex(ranlib_index + (nsymbols * ranlib_struct_sz)), - hex(ranlib_index + (nsymbols * ranlib_struct_sz) + size_string_table))) - string_table = content[ranlib_index + (nsymbols * ranlib_struct_sz):ranlib_index + (nsymbols * ranlib_struct_sz) + size_string_table] - string_table = string_table.decode('utf-8', errors='ignore') + print( + "string_table: start: {} end: {}".format( + hex(ranlib_index + (nsymbols * ranlib_struct_sz)), + hex( + ranlib_index + + (nsymbols * ranlib_struct_sz) + + size_string_table + ), + ) + ) + string_table = content[ + ranlib_index + + (nsymbols * ranlib_struct_sz) : ranlib_index + + (nsymbols * ranlib_struct_sz) + + size_string_table + ] + string_table = string_table.decode("utf-8", errors="ignore") syms = [] for i in range(nsymbols): ranlib_struct = ranlib_structs[i] strx, off = ranlib_struct - sym = string_table[strx:strx + string_table[strx:].find('\x00')] + sym = string_table[strx : strx + string_table[strx:].find("\x00")] syms.append(sym) if debug_static_archives > 1: print(f"{syms[i]} :: strx={hex(strx)}, off={hex(off)}") @@ -674,26 +829,28 @@ def _parse_ar_hdr(content, index): obj_starts = sorted(list(obj_starts)) obj_ends = sorted(list(obj_ends))[1:] if debug_static_archives > 1: - print('obj_starts: {}'.format(" ".join(f'0x{o:05x}' for o in obj_starts))) + print("obj_starts: {}".format(" ".join(f"0x{o:05x}" for o in obj_starts))) if debug_static_archives > 1: - print(' obj_ends: {}'.format(" ".join(f'0x{o:05x}' for o in obj_ends))) + print(" obj_ends: {}".format(" ".join(f"0x{o:05x}" for o in obj_ends))) for obj_start, obj_end in zip(obj_starts, obj_ends): - IMAGE_FILE_MACHINE_I386 = 0x014c + IMAGE_FILE_MACHINE_I386 = 0x014C IMAGE_FILE_MACHINE_AMD64 = 0x8664 - MACHINE_TYPE, = struct.unpack(' 0: print(hex(obj_start), hex(obj_end), obj_end - obj_start) if MACHINE_TYPE in (IMAGE_FILE_MACHINE_I386, IMAGE_FILE_MACHINE_AMD64): # 'This file is not a PE binary' (yeah, fair enough, it's a COFF file). # Reported at https://github.com/lief-project/LIEF/issues/233#issuecomment-452580391 try: - obj = lief.PE.parse(raw=content[obj_start:obj_end - 1]) + obj = lief.PE.parse(raw=content[obj_start : obj_end - 1]) except: if debug_static_archives > 0: - print("get_static_lib_exports failed, PECOFF not supported by LIEF nor pyldd.") + print( + "get_static_lib_exports failed, PECOFF not supported by LIEF nor pyldd." + ) pass obj = None - elif MACHINE_TYPE == 0xfacf: + elif MACHINE_TYPE == 0xFACF: obj = lief.parse(raw=content[obj_start:obj_end]) # filename = '/Users/rdonnelly/conda/conda-build/macOS-libpython2.7.a/getbuildinfo.o' @@ -716,7 +873,12 @@ def _parse_ar_hdr(content, index): # if sym.is_function and (sym.exported or sym.is_static): # functions.append(sym.name) functions.extend(get_symbols(obj, defined=True, undefined=False)) - return functions, [[0, 0] for sym in functions], functions, [[0, 0] for sym in functions] + return ( + functions, + [[0, 0] for sym in functions], + functions, + [[0, 0] for sym in functions], + ) def get_static_lib_exports_nope(file): @@ -724,79 +886,102 @@ def get_static_lib_exports_nope(file): def get_static_lib_exports_nm(filename): - nm_exe = find_executable('nm') - if sys.platform == 'win32' and not nm_exe: - nm_exe = 'C:\\msys64\\mingw64\\bin\\nm.exe' + nm_exe = find_executable("nm") + if sys.platform == "win32" and not nm_exe: + nm_exe = "C:\\msys64\\mingw64\\bin\\nm.exe" if not nm_exe or not os.path.exists(nm_exe): return None - flags = '-Pg' - if sys.platform == 'darwin': - flags = '-PgUj' + flags = "-Pg" + if sys.platform == "darwin": + flags = "-PgUj" try: - out, _ = Popen([nm_exe, flags, filename], shell=False, - stdout=PIPE).communicate() - results = out.decode('utf-8').replace('\r\n', '\n').splitlines() - results = [r.split(' ')[0] for r in results if ' T ' in r and not r.startswith('.text ')] + out, _ = Popen( + [nm_exe, flags, filename], shell=False, stdout=PIPE + ).communicate() + results = out.decode("utf-8").replace("\r\n", "\n").splitlines() + results = [ + r.split(" ")[0] + for r in results + if " T " in r and not r.startswith(".text ") + ] results.sort() except OSError: # nm may not be available or have the correct permissions, this # should not cause a failure, see gh-3287 - print(f'WARNING: nm: failed to get_exports({filename})') + print(f"WARNING: nm: failed to get_exports({filename})") results = None return results def get_static_lib_exports_dumpbin(filename): - r''' + r""" > dumpbin /SYMBOLS /NOLOGO C:\msys64\mingw64\lib\libasprintf.a > C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Tools\MSVC\14.20.27508\bin\Hostx64\x64\dumpbin.exe > 020 00000000 UNDEF notype () External | malloc > vs > 004 00000010 SECT1 notype () External | _ZN3gnu11autosprintfC1EPKcz - ''' - dumpbin_exe = find_executable('dumpbin') + """ + dumpbin_exe = find_executable("dumpbin") if not dumpbin_exe: - ''' + """ Oh the fun: https://stackoverflow.com/questions/41106407/programmatically-finding-the-vs2017-installation-directory Nice to see MS avoiding the Windows Registry though, took them a while! Still, let's ignore that, we just want a good dumpbin! - ''' - pfx86 = os.environ['PROGRAMFILES(X86)'] - programs = [p for p in os.listdir(pfx86) if p.startswith("Microsoft Visual Studio")] + """ + pfx86 = os.environ["PROGRAMFILES(X86)"] + programs = [ + p for p in os.listdir(pfx86) if p.startswith("Microsoft Visual Studio") + ] results = [] for p in programs: from conda_build.utils import rec_glob + dumpbin = rec_glob(os.path.join(pfx86, p), ("dumpbin.exe",)) for result in dumpbin: try: - out, _ = Popen([result, filename], shell=False, - stdout=PIPE).communicate() - lines = out.decode('utf-8').splitlines() - version = lines[0].split(' ')[-1] + out, _ = Popen( + [result, filename], shell=False, stdout=PIPE + ).communicate() + lines = out.decode("utf-8").splitlines() + version = lines[0].split(" ")[-1] results.append((result, version)) except: pass from conda_build.conda_interface import VersionOrder + results = sorted(results, key=lambda x: VersionOrder(x[1])) dumpbin_exe = results[-1][0] if not dumpbin_exe: return None - flags = ['/NOLOGO'] + flags = ["/NOLOGO"] exports = [] - for flag in ('/SYMBOLS', '/EXPORTS'): + for flag in ("/SYMBOLS", "/EXPORTS"): try: - out, _ = Popen([dumpbin_exe] + flags + [flag] + [filename], shell=False, - stdout=PIPE).communicate() - results = out.decode('utf-8').splitlines() - if flag == '/EXPORTS': - exports.extend([r.split(' ')[-1] for r in results if r.startswith(' ')]) + out, _ = Popen( + [dumpbin_exe] + flags + [flag] + [filename], shell=False, stdout=PIPE + ).communicate() + results = out.decode("utf-8").splitlines() + if flag == "/EXPORTS": + exports.extend( + [ + r.split(" ")[-1] + for r in results + if r.startswith(" ") + ] + ) else: - exports.extend([r.split(' ')[-1] for r in results if ('External ' in r and 'UNDEF ' not in r)]) + exports.extend( + [ + r.split(" ")[-1] + for r in results + if ("External " in r and "UNDEF " not in r) + ] + ) except OSError: # nm may not be available or have the correct permissions, this # should not cause a failure, see gh-3287 - print(f'WARNING: nm: failed to get_exports({filename})') + print(f"WARNING: nm: failed to get_exports({filename})") exports = None exports.sort() return exports @@ -814,12 +999,14 @@ def get_static_lib_exports_externally(filename): return res_nm -def get_exports(filename, arch='native', enable_static=False): +def get_exports(filename, arch="native", enable_static=False): result = [] if enable_static and isinstance(filename, str): - if (os.path.exists(filename) and - (filename.endswith('.a') or filename.endswith('.lib')) and - is_archive(filename)) and sys.platform != 'win32': + if ( + os.path.exists(filename) + and (filename.endswith(".a") or filename.endswith(".lib")) + and is_archive(filename) + ) and sys.platform != "win32": # syms = os.system('nm -g {}'.filename) # on macOS at least: # -PgUj is: @@ -827,16 +1014,18 @@ def get_exports(filename, arch='native', enable_static=False): # g: global (exported) only # U: not undefined # j: name only - if debug_static_archives or sys.platform == 'win32': + if debug_static_archives or sys.platform == "win32": exports = get_static_lib_exports_externally(filename) # Now, our own implementation which does not require nm and can # handle .lib files. - if sys.platform == 'win32': + if sys.platform == "win32": # Sorry, LIEF does not handle COFF (only PECOFF) and object files are COFF. exports2 = exports else: try: - exports2, flags2, exports2_all, flags2_all = get_static_lib_exports(filename) + exports2, flags2, exports2_all, flags2_all = get_static_lib_exports( + filename + ) except: print(f"WARNING :: Failed to get_static_lib_exports({filename})") exports2 = [] @@ -849,10 +1038,21 @@ def get_exports(filename, arch='native', enable_static=False): if debug_static_archives: print(f"errors: {error_count} (-{len(diff1)}, +{len(diff2)})") if debug_static_archives: - print("WARNING :: Disagreement regarding static lib exports in {} between nm (nsyms={}) and lielfldd (nsyms={}):" - .format(filename, len(exports), len(exports2))) - print("** nm.diff(liefldd) [MISSING SYMBOLS] **\n{}".format('\n'.join(diff1))) - print("** liefldd.diff(nm) [ EXTRA SYMBOLS] **\n{}".format('\n'.join(diff2))) + print( + "WARNING :: Disagreement regarding static lib exports in {} between nm (nsyms={}) and lielfldd (nsyms={}):".format( + filename, len(exports), len(exports2) + ) + ) + print( + "** nm.diff(liefldd) [MISSING SYMBOLS] **\n{}".format( + "\n".join(diff1) + ) + ) + print( + "** liefldd.diff(nm) [ EXTRA SYMBOLS] **\n{}".format( + "\n".join(diff2) + ) + ) if not result: binary = ensure_binary(filename) @@ -861,7 +1061,7 @@ def get_exports(filename, arch='native', enable_static=False): return result -def get_relocations(filename, arch='native'): +def get_relocations(filename, arch="native"): if not os.path.exists(filename): return [] try: @@ -874,12 +1074,12 @@ def get_relocations(filename, arch='native'): res.append(r.symbol.name) return res except: - print(f'WARNING: liefldd: failed get_relocations({filename})') + print(f"WARNING: liefldd: failed get_relocations({filename})") return [] -def get_symbols(file, defined=True, undefined=True, notexported=False, arch='native'): +def get_symbols(file, defined=True, undefined=True, notexported=False, arch="native"): binary = ensure_binary(file) first_undefined_symbol = 0 @@ -888,7 +1088,9 @@ def get_symbols(file, defined=True, undefined=True, notexported=False, arch='nat try: dyscmd = binary.dynamic_symbol_command first_undefined_symbol = dyscmd.idx_undefined_symbol - last_undefined_symbol = first_undefined_symbol + dyscmd.nb_undefined_symbols - 1 + last_undefined_symbol = ( + first_undefined_symbol + dyscmd.nb_undefined_symbols - 1 + ) except: pass res = [] @@ -903,15 +1105,17 @@ def get_symbols(file, defined=True, undefined=True, notexported=False, arch='nat for index, s in enumerate(syms): if debug_static_archives > 1: print(s) -# if s.type&16: -# continue + # if s.type&16: + # continue is_notexported = True - is_undefined = index >= first_undefined_symbol and index <= last_undefined_symbol + is_undefined = ( + index >= first_undefined_symbol and index <= last_undefined_symbol + ) if binary.__class__ != lief.MachO.Binary: if isinstance(s, str): - s_name = '%s' % s + s_name = "%s" % s else: - s_name = '%s' % s.name + s_name = "%s" % s.name if s.exported and s.imported: print(f"Weird, symbol {s.name} is both imported and exported") if s.exported: @@ -920,16 +1124,16 @@ def get_symbols(file, defined=True, undefined=True, notexported=False, arch='nat elif s.imported: is_undefined = False else: - s_name = '%s' % s.name + s_name = "%s" % s.name is_notexported = False if s.type & 1 else True # print("{:32s} : s.type 0b{:020b}, s.value 0b{:020b}".format(s.name, s.type, s.value)) # print("s.value 0b{:020b} :: s.type 0b{:020b}, {:32s}".format(s.value, s.type, s.name)) if notexported is True or is_notexported is False: if is_undefined and undefined: - res.append('%s' % s_name) + res.append("%s" % s_name) elif not is_undefined and defined: - res.append('%s' % s_name) + res.append("%s" % s_name) return res @@ -941,6 +1145,7 @@ class memoized_by_arg0_filehash: The first argument is required to be an existing filename and it is always converted to an inode number. """ + def __init__(self, func): self.func = func self.cache = {} @@ -951,7 +1156,7 @@ def __call__(self, *args, **kw): for arg in args: if arg is args[0]: sha1 = hashlib.sha1() - with open(arg, 'rb') as f: + with open(arg, "rb") as f: while True: data = f.read(65536) if not data: @@ -978,17 +1183,17 @@ def __call__(self, *args, **kw): @memoized_by_arg0_filehash -def get_exports_memoized(filename, arch='native', enable_static=False): +def get_exports_memoized(filename, arch="native", enable_static=False): return get_exports(filename, arch=arch, enable_static=enable_static) @memoized_by_arg0_filehash -def get_imports_memoized(filename, arch='native'): +def get_imports_memoized(filename, arch="native"): return get_imports(filename, arch=arch) @memoized_by_arg0_filehash -def get_relocations_memoized(filename, arch='native'): +def get_relocations_memoized(filename, arch="native"): return get_relocations(filename, arch=arch) @@ -998,7 +1203,14 @@ def get_symbols_memoized(filename, defined, undefined, arch): @memoized_by_arg0_filehash -def get_linkages_memoized(filename, resolve_filenames, recurse, - sysroot='', envroot='', arch='native'): - return get_linkages(filename, resolve_filenames=resolve_filenames, - recurse=recurse, sysroot=sysroot, envroot=envroot, arch=arch) +def get_linkages_memoized( + filename, resolve_filenames, recurse, sysroot="", envroot="", arch="native" +): + return get_linkages( + filename, + resolve_filenames=resolve_filenames, + recurse=recurse, + sysroot=sysroot, + envroot=envroot, + arch=arch, + ) diff --git a/conda_build/os_utils/macho.py b/conda_build/os_utils/macho.py index 5e19303bf7..eb13669049 100644 --- a/conda_build/os_utils/macho.py +++ b/conda_build/os_utils/macho.py @@ -1,94 +1,113 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +import os import re import stat import sys -from subprocess import Popen, check_output, PIPE, STDOUT, CalledProcessError -import os -from conda_build import utils from itertools import islice +from subprocess import PIPE, STDOUT, CalledProcessError, Popen, check_output + +from conda_build import utils from conda_build.os_utils.external import find_preferably_prefixed_executable NO_EXT = ( - '.py', '.pyc', '.pyo', '.h', '.a', '.c', '.txt', '.html', - '.xml', '.png', '.jpg', '.gif', '.class', '.in', '.sh', - '.yaml', '.md', '.ac', '.m4', '.cc', '.plist', + ".py", + ".pyc", + ".pyo", + ".h", + ".a", + ".c", + ".txt", + ".html", + ".xml", + ".png", + ".jpg", + ".gif", + ".class", + ".in", + ".sh", + ".yaml", + ".md", + ".ac", + ".m4", + ".cc", + ".plist", ) MAGIC = { - b'\xca\xfe\xba\xbe': 'MachO-universal', - b'\xce\xfa\xed\xfe': 'MachO-i386', - b'\xcf\xfa\xed\xfe': 'MachO-x86_64', - b'\xfe\xed\xfa\xce': 'MachO-ppc', - b'\xfe\xed\xfa\xcf': 'MachO-ppc64', + b"\xca\xfe\xba\xbe": "MachO-universal", + b"\xce\xfa\xed\xfe": "MachO-i386", + b"\xcf\xfa\xed\xfe": "MachO-x86_64", + b"\xfe\xed\xfa\xce": "MachO-ppc", + b"\xfe\xed\xfa\xcf": "MachO-ppc64", } FILETYPE = { - 1: 'MH_OBJECT', - 2: 'MH_EXECUTE', - 3: 'MH_FVMLIB', - 4: 'MH_CORE', - 5: 'MH_PRELOAD', - 6: 'MH_DYLIB', - 7: 'MH_DYLINKER', - 8: 'MH_BUNDLE', - 9: 'MH_DYLIB_STUB', - 10: 'MH_DSYM', - 11: 'MH_KEXT_BUNDLE', + 1: "MH_OBJECT", + 2: "MH_EXECUTE", + 3: "MH_FVMLIB", + 4: "MH_CORE", + 5: "MH_PRELOAD", + 6: "MH_DYLIB", + 7: "MH_DYLINKER", + 8: "MH_BUNDLE", + 9: "MH_DYLIB_STUB", + 10: "MH_DSYM", + 11: "MH_KEXT_BUNDLE", } def is_macho(path): if path.endswith(NO_EXT) or os.path.islink(path) or not os.path.isfile(path): return False - with open(path, 'rb') as fi: + with open(path, "rb") as fi: head = fi.read(4) return bool(head in MAGIC) def is_dylib(path, build_prefix): - return human_filetype(path) == 'DYLIB' + return human_filetype(path) == "DYLIB" def human_filetype(path, build_prefix): - otool = find_apple_cctools_executable('otool', build_prefix) - output = check_output((otool, '-h', path)).decode('utf-8') + otool = find_apple_cctools_executable("otool", build_prefix) + output = check_output((otool, "-h", path)).decode("utf-8") lines = output.splitlines() - if not lines[0].startswith((path, 'Mach header')): + if not lines[0].startswith((path, "Mach header")): raise ValueError( - 'Expected `otool -h` output to start with' - ' Mach header or {}, got:\n{}'.format(path, output) + "Expected `otool -h` output to start with" + " Mach header or {}, got:\n{}".format(path, output) ) - assert lines[0].startswith((path, 'Mach header')), path + assert lines[0].startswith((path, "Mach header")), path for line in lines: - if line.strip().startswith('0x'): + if line.strip().startswith("0x"): header = line.split() filetype = int(header[4]) return FILETYPE[filetype][3:] def is_dylib_info(lines): - dylib_info = ('LC_ID_DYLIB', 'LC_LOAD_DYLIB') + dylib_info = ("LC_ID_DYLIB", "LC_LOAD_DYLIB") if len(lines) > 1 and lines[1].split()[1] in dylib_info: return True return False def is_id_dylib(lines): - if len(lines) > 1 and lines[1].split()[1] == 'LC_ID_DYLIB': + if len(lines) > 1 and lines[1].split()[1] == "LC_ID_DYLIB": return True return False def is_load_dylib(lines): - if len(lines) > 1 and lines[1].split()[1] == 'LC_LOAD_DYLIB': + if len(lines) > 1 and lines[1].split()[1] == "LC_LOAD_DYLIB": return True return False def is_rpath(lines): - if len(lines) > 1 and lines[1].split()[1] == 'LC_RPATH': + if len(lines) > 1 and lines[1].split()[1] == "LC_RPATH": return True return False @@ -122,20 +141,20 @@ def _get_matching_load_commands(lines, cb_filter): # is fairly simple so let's just hardcode it for speed. if len(listy) == 2: key, value = listy - elif listy[0] == 'name' or listy[0] == 'path': + elif listy[0] == "name" or listy[0] == "path": # Create an entry for 'name offset' if there is one # as that can be useful if we need to know if there # is space to patch it for relocation purposes. - if listy[2] == '(offset': - key = listy[0] + ' offset' + if listy[2] == "(offset": + key = listy[0] + " offset" value = int(listy[3][:-1]) lcdict[key] = value key, value = listy[0:2] - elif listy[0] == 'time': - key = ' '.join(listy[0:3]) - value = ' '.join(listy[3:]) - elif listy[0] in ('current', 'compatibility'): - key = ' '.join(listy[0:2]) + elif listy[0] == "time": + key = " ".join(listy[0:3]) + value = " ".join(listy[3:]) + elif listy[0] in ("current", "compatibility"): + key = " ".join(listy[0:2]) value = listy[2] try: value = int(value) @@ -150,25 +169,34 @@ def find_apple_cctools_executable(name, build_prefix, nofail=False): tools = find_preferably_prefixed_executable(name, build_prefix, all_matches=True) for tool in tools: try: - if '/usr/bin' in tool: - with open(tool, 'rb') as f: + if "/usr/bin" in tool: + with open(tool, "rb") as f: s = f.read() - if s.find(b'usr/lib/libxcselect.dylib') != -1: + if s.find(b"usr/lib/libxcselect.dylib") != -1: # We ask xcrun. try: - tool_xcr = check_output(['xcrun', '-find', name], stderr=STDOUT).decode('utf-8').splitlines()[0] + tool_xcr = ( + check_output(["xcrun", "-find", name], stderr=STDOUT) + .decode("utf-8") + .splitlines()[0] + ) except Exception as e: log = utils.get_logger(__name__) - log.error("ERROR :: Found `{}` but is is an Apple Xcode stub executable\n" - "and it returned an error:\n{}".format(tool, e.output)) + log.error( + "ERROR :: Found `{}` but is is an Apple Xcode stub executable\n" + "and it returned an error:\n{}".format(tool, e.output) + ) raise e tool = tool_xcr if os.path.exists(tool): return tool except Exception as _: # noqa - print("ERROR :: Failed to run `{}`. Please use `conda` to install `cctools` into your base environment.\n" - " An option on macOS is to install `Xcode` or `Command Line Tools for Xcode`." - .format(tool)) + print( + "ERROR :: Failed to run `{}`. Please use `conda` to install `cctools` into your base environment.\n" + " An option on macOS is to install `Xcode` or `Command Line Tools for Xcode`.".format( + tool + ) + ) sys.exit(1) return tool @@ -190,15 +218,17 @@ def otool(path, build_prefix=None, cb_filter=is_dylib_info): Any key values that can be converted to integers are converted to integers, the rest are strings. """ - otool = find_apple_cctools_executable('otool', build_prefix) - lines = check_output([otool, '-l', path], - stderr=STDOUT).decode('utf-8') + otool = find_apple_cctools_executable("otool", build_prefix) + lines = check_output([otool, "-l", path], stderr=STDOUT).decode("utf-8") # llvm-objdump returns 0 for some things that are anything but successful completion. lines_split = lines.splitlines() # 'invalid', 'expected' and 'unexpected' are too generic # here so also check that we do not get 'useful' output. - if len(lines_split) < 10 and (re.match('.*(is not a Mach-O|invalid|expected|unexpected).*', - lines, re.MULTILINE)): + if len(lines_split) < 10 and ( + re.match( + ".*(is not a Mach-O|invalid|expected|unexpected).*", lines, re.MULTILINE + ) + ): raise CalledProcessError(-1, otool) return _get_matching_load_commands(lines_split, cb_filter) @@ -206,22 +236,22 @@ def otool(path, build_prefix=None, cb_filter=is_dylib_info): def get_dylibs(path, build_prefix=None): """Return a list of the loaded dylib pathnames""" dylib_loads = otool(path, build_prefix, is_load_dylib) - return [dylib_load['name'] for dylib_load in dylib_loads] + return [dylib_load["name"] for dylib_load in dylib_loads] def get_id(path, build_prefix=None): """Returns the id name of the Mach-O file `path` or an empty string""" dylib_loads = otool(path, build_prefix, is_id_dylib) try: - return [dylib_load['name'] for dylib_load in dylib_loads][0] + return [dylib_load["name"] for dylib_load in dylib_loads][0] except: - return '' + return "" def get_rpaths(path, build_prefix=None): """Return a list of the dylib rpaths""" dylib_loads = otool(path, build_prefix, is_rpath) - return [dylib_load['path'] for dylib_load in dylib_loads] + return [dylib_load["path"] for dylib_load in dylib_loads] def _chmod(filename, mode): @@ -233,18 +263,18 @@ def _chmod(filename, mode): def install_name_tool(args, build_prefix=None, verbose=False): - args_full = [find_apple_cctools_executable('install_name_tool', build_prefix)] + args_full = [find_apple_cctools_executable("install_name_tool", build_prefix)] args_full.extend(args) if verbose: - print(' '.join(args_full)) + print(" ".join(args_full)) old_mode = stat.S_IMODE(os.stat(args[-1]).st_mode) new_mode = old_mode | stat.S_IWUSR if old_mode != new_mode: _chmod(args[-1], new_mode) subproc = Popen(args_full, stdout=PIPE, stderr=PIPE) out, err = subproc.communicate() - out = out.decode('utf-8') - err = err.decode('utf-8') + out = out.decode("utf-8") + err = err.decode("utf-8") if old_mode != new_mode: _chmod(args[-1], old_mode) return subproc.returncode, out, err @@ -254,7 +284,7 @@ def add_rpath(path, rpath, build_prefix=None, verbose=False): """Add an `rpath` to the Mach-O file at `path`""" if not is_macho(path): return - args = ['-add_rpath', rpath, path] + args = ["-add_rpath", rpath, path] code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: print("Skipping Mach-O dynamic shared library stub file %s\n" % path) @@ -265,15 +295,14 @@ def add_rpath(path, rpath, build_prefix=None, verbose=False): else: print(stderr, file=sys.stderr) if code: - raise RuntimeError("install_name_tool failed with exit status %d" - % code) + raise RuntimeError("install_name_tool failed with exit status %d" % code) def delete_rpath(path, rpath, build_prefix=None, verbose=False): """Delete an `rpath` from the Mach-O file at `path`""" if not is_macho(path): return - args = ['-delete_rpath', rpath, path] + args = ["-delete_rpath", rpath, path] code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: print("Skipping Mach-O dynamic shared library stub file %s\n" % path) @@ -284,8 +313,7 @@ def delete_rpath(path, rpath, build_prefix=None, verbose=False): else: print(stderr, file=sys.stderr) if code: - raise RuntimeError("install_name_tool failed with exit status %d" - % code) + raise RuntimeError("install_name_tool failed with exit status %d" % code) def install_name_change(path, build_prefix, cb_func, dylibs, verbose=False): @@ -308,10 +336,10 @@ def install_name_change(path, build_prefix, cb_func, dylibs, verbose=False): ret = True for index, new_name in changes: args = [] - if dylibs[index]['cmd'] == 'LC_ID_DYLIB': - args.extend(('-id', new_name, path)) + if dylibs[index]["cmd"] == "LC_ID_DYLIB": + args.extend(("-id", new_name, path)) else: - args.extend(('-change', dylibs[index]['name'], new_name, path)) + args.extend(("-change", dylibs[index]["name"], new_name, path)) code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: print("Skipping Mach-O dynamic shared library stub file %s" % path) @@ -320,12 +348,14 @@ def install_name_change(path, build_prefix, cb_func, dylibs, verbose=False): else: print(stderr, file=sys.stderr) if code: - raise RuntimeError("install_name_tool failed with exit status %d, stderr of:\n%s" - % (code, stderr)) + raise RuntimeError( + "install_name_tool failed with exit status %d, stderr of:\n%s" + % (code, stderr) + ) return ret -if __name__ == '__main__': - if sys.platform == 'darwin': - for path in '/bin/ls', '/etc/locate.rc': +if __name__ == "__main__": + if sys.platform == "darwin": + for path in "/bin/ls", "/etc/locate.rc": print(path, is_macho(path)) diff --git a/conda_build/os_utils/pyldd.py b/conda_build/os_utils/pyldd.py index efce517ce2..1e1cd4e4cc 100644 --- a/conda_build/os_utils/pyldd.py +++ b/conda_build/os_utils/pyldd.py @@ -2,11 +2,11 @@ # SPDX-License-Identifier: BSD-3-Clause import argparse import glob +import logging import os import re import struct import sys -import logging from conda_build.utils import ensure_list, get_logger @@ -57,11 +57,11 @@ def __check_security_property(opt, filename, pattern): return results ''' -''' +""" Eventual goal is to become a full replacement for `ldd` `otool -L` and `ntldd' For now only works with ELF and Mach-O files and command-line execution is not supported. To get the list of shared libs use `inspect_linkages(filename)`. -''' +""" LDD_USAGE = """ Usage: ldd [OPTION]... FILE... @@ -74,7 +74,7 @@ def __check_security_property(opt, filename, pattern): For bug reporting instructions, please see: . -""" # noqa +""" # noqa OTOOL_USAGE = """ Usage: /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/otool [-arch arch_type] [-fahlLDtdorSTMRIHGvVcXmqQjCP] [-mcpu=arg] [--version] ... @@ -110,35 +110,37 @@ def __check_security_property(opt, filename, pattern): -P print the info plist section as strings -C print linker optimization hints --version print the version of /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/otool -""" # noqa +""" # noqa ############################################## # Constants used in the Mach-O specification # ############################################## -MH_MAGIC = 0xfeedface -MH_CIGAM = 0xcefaedfe -MH_MAGIC_64 = 0xfeedfacf -MH_CIGAM_64 = 0xcffaedfe -FAT_MAGIC = 0xcafebabe -BIG_ENDIAN = '>' -LITTLE_ENDIAN = '<' -LC_ID_DYLIB = 0xd -LC_LOAD_DYLIB = 0xc +MH_MAGIC = 0xFEEDFACE +MH_CIGAM = 0xCEFAEDFE +MH_MAGIC_64 = 0xFEEDFACF +MH_CIGAM_64 = 0xCFFAEDFE +FAT_MAGIC = 0xCAFEBABE +BIG_ENDIAN = ">" +LITTLE_ENDIAN = "<" +LC_ID_DYLIB = 0xD +LC_LOAD_DYLIB = 0xC LC_LOAD_WEAK_DYLIB = 0x18 LC_LOAD_UPWARD_DYLIB = 0x23 -LC_REEXPORT_DYLIB = 0x1f +LC_REEXPORT_DYLIB = 0x1F LC_LAZY_LOAD_DYLIB = 0x20 -LC_LOAD_DYLIBS = (LC_LOAD_DYLIB, - LC_LOAD_WEAK_DYLIB, - LC_LOAD_UPWARD_DYLIB, - LC_LAZY_LOAD_DYLIB, - LC_REEXPORT_DYLIB) +LC_LOAD_DYLIBS = ( + LC_LOAD_DYLIB, + LC_LOAD_WEAK_DYLIB, + LC_LOAD_UPWARD_DYLIB, + LC_LAZY_LOAD_DYLIB, + LC_REEXPORT_DYLIB, +) LC_REQ_DYLD = 0x80000000 -LC_RPATH = 0x1c | LC_REQ_DYLD +LC_RPATH = 0x1C | LC_REQ_DYLD majver = sys.version_info[0] -maxint = majver == 3 and getattr(sys, 'maxsize') or getattr(sys, 'maxint') +maxint = majver == 3 and getattr(sys, "maxsize") or getattr(sys, "maxint") class IncompleteRead(Exception): @@ -156,11 +158,11 @@ def __init__(self, file_obj): def read(self, size): buf = self._file_obj.read(size) if len(buf) != size: - raise IncompleteRead('requested number of bytes were not read.') + raise IncompleteRead("requested number of bytes were not read.") return buf def __getattr__(self, attr): - if attr == 'read': + if attr == "read": return self.read else: return getattr(self._file_obj, attr) @@ -182,16 +184,17 @@ def __init__(self, fileobj, start=0, size=maxint): self._pos = 0 def __repr__(self): - return '' % ( - self._start, self._end, self._fileobj) + return "" % (self._start, self._end, self._fileobj) def tell(self): return self._pos def _checkwindow(self, seekto, op): if not (self._start <= seekto <= self._end): - raise OSError("%s to offset %d is outside window [%d, %d]" % ( - op, seekto, self._start, self._end)) + raise OSError( + "%s to offset %d is outside window [%d, %d]" + % (op, seekto, self._start, self._end) + ) def seek(self, offset, whence=0): seekto = offset @@ -203,14 +206,14 @@ def seek(self, offset, whence=0): seekto += self._end else: raise OSError(f"Invalid whence argument to seek: {whence!r}") - self._checkwindow(seekto, 'seek') + self._checkwindow(seekto, "seek") self._fileobj.seek(seekto) self._pos = seekto - self._start def write(self, bytes): here = self._start + self._pos - self._checkwindow(here, 'write') - self._checkwindow(here + len(bytes), 'write') + self._checkwindow(here, "write") + self._checkwindow(here + len(bytes), "write") self._fileobj.seek(here, os.SEEK_SET) self._fileobj.write(bytes) self._pos += len(bytes) @@ -218,7 +221,7 @@ def write(self, bytes): def read(self, size=maxint): assert size >= 0 here = self._start + self._pos - self._checkwindow(here, 'read') + self._checkwindow(here, "read") size = min(size, self._end - here) self._fileobj.seek(here, os.SEEK_SET) bytes = self._fileobj.read(size) @@ -258,7 +261,7 @@ def read_data(file, endian, num=1): Read a given number of 32-bits unsigned integers from the given file with the given endianness. """ - res = struct.unpack(endian + 'L' * num, file.read(num * 4)) + res = struct.unpack(endian + "L" * num, file.read(num * 4)) if len(res) == 1: return res[0] return res @@ -273,11 +276,11 @@ def replace_lc_load_dylib(file, where, bits, endian, cmd, cmdsize, what, val): file.seek(where + name_offset, os.SEEK_SET) # Read the NUL terminated string load = file.read(cmdsize - name_offset).decode() - load = load[:load.index('\0')] + load = load[: load.index("\0")] # If the string is what is being replaced, overwrite it. if load == what: file.seek(where + name_offset, os.SEEK_SET) - file.write(val.encode() + b'\0') + file.write(val.encode() + b"\0") return True return False @@ -291,7 +294,7 @@ def find_lc_load_dylib(file, where, bits, endian, cmd, cmdsize, what): file.seek(where + name_offset, os.SEEK_SET) # Read the NUL terminated string load = file.read(cmdsize - name_offset).decode() - load = load[:load.index('\0')] + load = load[: load.index("\0")] # If the string is what is being replaced, overwrite it. if re.match(what, load): return load @@ -306,14 +309,15 @@ def find_lc_rpath(file, where, bits, endian, cmd, cmdsize): file.seek(where + name_offset, os.SEEK_SET) # Read the NUL terminated string load = file.read(cmdsize - name_offset).decode() - load = load[:load.index('\0')] + load = load[: load.index("\0")] return load def do_macho(file, bits, endian, lc_operation, *args): # Read Mach-O header (the magic number is assumed read by the caller) - _cputype, _cpusubtype, filetype, ncmds, _sizeofcmds, _flags \ - = read_data(file, endian, 6) + _cputype, _cpusubtype, filetype, ncmds, _sizeofcmds, _flags = read_data( + file, endian, 6 + ) # 64-bits header has one more field. if bits == 64: read_data(file, endian) @@ -323,8 +327,7 @@ def do_macho(file, bits, endian, lc_operation, *args): where = file.tell() # Read command header cmd, cmdsize = read_data(file, endian, 2) - results.append(lc_operation(file, where, bits, endian, cmd, cmdsize, - *args)) + results.append(lc_operation(file, where, bits, endian, cmd, cmdsize, *args)) # Seek to the next command file.seek(where + cmdsize, os.SEEK_SET) return filetype, results @@ -345,17 +348,15 @@ def do_file(file, lc_operation, off_sz, arch, results, *args): nfat_arch = read_data(file, BIG_ENDIAN) for _n in range(nfat_arch): # Read arch header - _cputype, _cpusubtype, offset, size, _align = \ - read_data(file, BIG_ENDIAN, 5) - do_file(file, lc_operation, offset_size(offset, size), arch, - results, *args) - elif magic == MH_MAGIC and arch in ('any', 'ppc32', 'm68k'): + _cputype, _cpusubtype, offset, size, _align = read_data(file, BIG_ENDIAN, 5) + do_file(file, lc_operation, offset_size(offset, size), arch, results, *args) + elif magic == MH_MAGIC and arch in ("any", "ppc32", "m68k"): results.append(do_macho(file, 32, BIG_ENDIAN, lc_operation, *args)) - elif magic == MH_CIGAM and arch in ('any', 'i386'): + elif magic == MH_CIGAM and arch in ("any", "i386"): results.append(do_macho(file, 32, LITTLE_ENDIAN, lc_operation, *args)) - elif magic == MH_MAGIC_64 and arch in ('any', 'ppc64'): + elif magic == MH_MAGIC_64 and arch in ("any", "ppc64"): results.append(do_macho(file, 64, BIG_ENDIAN, lc_operation, *args)) - elif magic == MH_CIGAM_64 and arch in ('any', 'x86_64'): + elif magic == MH_CIGAM_64 and arch in ("any", "x86_64"): results.append(do_macho(file, 64, LITTLE_ENDIAN, lc_operation, *args)) @@ -368,13 +369,12 @@ def mach_o_change(path, arch, what, value): assert len(what) >= len(value) results = [] - with open(path, 'r+b') as f: - do_file(f, replace_lc_load_dylib, offset_size(), arch, results, - what, value) + with open(path, "r+b") as f: + do_file(f, replace_lc_load_dylib, offset_size(), arch, results, what, value) return results -def mach_o_find_dylibs(ofile, arch, regex='.*'): +def mach_o_find_dylibs(ofile, arch, regex=".*"): """ Finds the executable's view of where any dylibs live without resolving any macros (@rpath, @loader_path, @executable_path) @@ -393,63 +393,70 @@ def mach_o_find_rpaths(ofile, arch): return results -def _get_resolved_location(codefile, - unresolved, - exe_dir, - self_dir, - LD_LIBRARY_PATH='', - default_paths=None, - sysroot='', - resolved_rpath=None): - ''' - From `man ld.so` - - When resolving shared object dependencies, the dynamic linker first inspects each dependency - string to see if it contains a slash (this can occur if a shared object pathname containing - slashes was specified at link time). If a slash is found, then the dependency string is - interpreted as a (relative or absolute) pathname, and the shared object is loaded using that - pathname. - - If a shared object dependency does not contain a slash, then it is searched for in the - following order: - - o Using the directories specified in the DT_RPATH dynamic section attribute of the binary - if present and DT_RUNPATH attribute does not exist. Use of DT_RPATH is deprecated. - - o Using the environment variable LD_LIBRARY_PATH (unless the executable is being run in - secure-execution mode; see below). in which case it is ignored. - - o Using the directories specified in the DT_RUNPATH dynamic section attribute of the - binary if present. Such directories are searched only to find those objects required - by DT_NEEDED (direct dependencies) entries and do not apply to those objects' children, - which must themselves have their own DT_RUNPATH entries. This is unlike DT_RPATH, - which is applied to searches for all children in the dependency tree. - - o From the cache file /etc/ld.so.cache, which contains a compiled list of candidate - shared objects previously found in the augmented library path. If, however, the binary - was linked with the -z nodeflib linker option, shared objects in the default paths are - skipped. Shared objects installed in hardware capability directories (see below) are - preferred to other shared objects. - - o In the default path /lib, and then /usr/lib. (On some 64-bit architectures, the default - paths for 64-bit shared objects are /lib64, and then /usr/lib64.) If the binary was - linked with the -z nodeflib linker option, this step is skipped. - - Returns a tuple of resolved location, rpath_used, in_sysroot - ''' +def _get_resolved_location( + codefile, + unresolved, + exe_dir, + self_dir, + LD_LIBRARY_PATH="", + default_paths=None, + sysroot="", + resolved_rpath=None, +): + """ + From `man ld.so` + + When resolving shared object dependencies, the dynamic linker first inspects each dependency + string to see if it contains a slash (this can occur if a shared object pathname containing + slashes was specified at link time). If a slash is found, then the dependency string is + interpreted as a (relative or absolute) pathname, and the shared object is loaded using that + pathname. + + If a shared object dependency does not contain a slash, then it is searched for in the + following order: + + o Using the directories specified in the DT_RPATH dynamic section attribute of the binary + if present and DT_RUNPATH attribute does not exist. Use of DT_RPATH is deprecated. + + o Using the environment variable LD_LIBRARY_PATH (unless the executable is being run in + secure-execution mode; see below). in which case it is ignored. + + o Using the directories specified in the DT_RUNPATH dynamic section attribute of the + binary if present. Such directories are searched only to find those objects required + by DT_NEEDED (direct dependencies) entries and do not apply to those objects' children, + which must themselves have their own DT_RUNPATH entries. This is unlike DT_RPATH, + which is applied to searches for all children in the dependency tree. + + o From the cache file /etc/ld.so.cache, which contains a compiled list of candidate + shared objects previously found in the augmented library path. If, however, the binary + was linked with the -z nodeflib linker option, shared objects in the default paths are + skipped. Shared objects installed in hardware capability directories (see below) are + preferred to other shared objects. + + o In the default path /lib, and then /usr/lib. (On some 64-bit architectures, the default + paths for 64-bit shared objects are /lib64, and then /usr/lib64.) If the binary was + linked with the -z nodeflib linker option, this step is skipped. + + Returns a tuple of resolved location, rpath_used, in_sysroot + """ rpath_result = None found = False - ld_library_paths = [] if not LD_LIBRARY_PATH else LD_LIBRARY_PATH.split(':') - if unresolved.startswith('$RPATH'): - these_rpaths = [resolved_rpath] if resolved_rpath else \ - codefile.get_rpaths_transitive() + \ - ld_library_paths + \ - codefile.get_rpaths_nontransitive() + \ - [dp.replace('$SYSROOT', sysroot) for dp in ensure_list(default_paths)] + ld_library_paths = [] if not LD_LIBRARY_PATH else LD_LIBRARY_PATH.split(":") + if unresolved.startswith("$RPATH"): + these_rpaths = ( + [resolved_rpath] + if resolved_rpath + else codefile.get_rpaths_transitive() + + ld_library_paths + + codefile.get_rpaths_nontransitive() + + [dp.replace("$SYSROOT", sysroot) for dp in ensure_list(default_paths)] + ) for rpath in these_rpaths: - resolved = unresolved.replace('$RPATH', rpath) \ - .replace('$SELFDIR', self_dir) \ - .replace('$EXEDIR', exe_dir) + resolved = ( + unresolved.replace("$RPATH", rpath) + .replace("$SELFDIR", self_dir) + .replace("$EXEDIR", exe_dir) + ) exists = os.path.exists(resolved) exists_sysroot = exists and sysroot and resolved.startswith(sysroot) if resolved_rpath or exists or exists_sysroot: @@ -459,13 +466,12 @@ def _get_resolved_location(codefile, if not found: # Return the so name so that it can be warned about as missing. return unresolved, None, False - elif any(a in unresolved for a in ('$SELFDIR', '$EXEDIR')): - resolved = unresolved.replace('$SELFDIR', self_dir) \ - .replace('$EXEDIR', exe_dir) + elif any(a in unresolved for a in ("$SELFDIR", "$EXEDIR")): + resolved = unresolved.replace("$SELFDIR", self_dir).replace("$EXEDIR", exe_dir) exists = os.path.exists(resolved) exists_sysroot = exists and sysroot and resolved.startswith(sysroot) else: - if unresolved.startswith('/'): + if unresolved.startswith("/"): return unresolved, None, False else: return os.path.join(self_dir, unresolved), None, False @@ -473,13 +479,18 @@ def _get_resolved_location(codefile, return resolved, rpath_result, exists_sysroot -def _get_resolved_relocated_location(codefile, so, src_exedir, src_selfdir, - dst_exedir, dst_selfdir): - src_resolved, rpath, in_sysroot = _get_resolved_location(codefile, so, src_exedir, src_selfdir) +def _get_resolved_relocated_location( + codefile, so, src_exedir, src_selfdir, dst_exedir, dst_selfdir +): + src_resolved, rpath, in_sysroot = _get_resolved_location( + codefile, so, src_exedir, src_selfdir + ) if in_sysroot: dst_resolved = src_resolved else: - dst_resolved = _get_resolved_location(codefile, so, dst_exedir, dst_selfdir, rpath) + dst_resolved = _get_resolved_location( + codefile, so, dst_exedir, dst_selfdir, rpath + ) return src_resolved, dst_resolved, in_sysroot @@ -496,45 +507,49 @@ def __init__(self, file, arch, initial_rpaths_transitive=[]): file.seek(0) self.rpaths_transitive = initial_rpaths_transitive _filetypes, rpaths = zip(*mach_o_find_rpaths(file, arch)) - local_rpaths = [self.from_os_varnames(rpath.rstrip('/')) - for rpath in rpaths[0] if rpath] + local_rpaths = [ + self.from_os_varnames(rpath.rstrip("/")) for rpath in rpaths[0] if rpath + ] self.rpaths_transitive.extend(local_rpaths) self.rpaths_nontransitive = local_rpaths self.shared_libraries.extend( - [(so, self.from_os_varnames(so)) for so in sos[0] if so]) + [(so, self.from_os_varnames(so)) for so in sos[0] if so] + ) file.seek(0) def to_os_varnames(self, input_): """Don't make these functions - they are methods to match the API for elffiles.""" - return input_.replace('$SELFDIR', '@loader_path') \ - .replace('$EXEDIR', '@executable_path') \ - .replace('$RPATH', '@rpath') + return ( + input_.replace("$SELFDIR", "@loader_path") + .replace("$EXEDIR", "@executable_path") + .replace("$RPATH", "@rpath") + ) def from_os_varnames(self, input_): """Don't make these functions - they are methods to match the API for elffiles.""" - return input_.replace('@loader_path', '$SELFDIR') \ - .replace('@executable_path', '$EXEDIR') \ - .replace('@rpath', '$RPATH') + return ( + input_.replace("@loader_path", "$SELFDIR") + .replace("@executable_path", "$EXEDIR") + .replace("@rpath", "$RPATH") + ) - def get_resolved_shared_libraries(self, src_exedir, src_selfdir, sysroot=''): + def get_resolved_shared_libraries(self, src_exedir, src_selfdir, sysroot=""): result = [] for so_orig, so in self.shared_libraries: - resolved, rpath, in_sysroot = \ - _get_resolved_location(self, so, src_exedir, src_selfdir, sysroot) + resolved, rpath, in_sysroot = _get_resolved_location( + self, so, src_exedir, src_selfdir, sysroot + ) result.append((so_orig, resolved, rpath, in_sysroot)) return result - def get_relocated_shared_libraries(self, src_exedir, src_selfdir, - dst_exedir, dst_selfdir): + def get_relocated_shared_libraries( + self, src_exedir, src_selfdir, dst_exedir, dst_selfdir + ): result = [] for so in self.shared_libraries: - resolved, dst_resolved, in_sysroot = \ - _get_resolved_relocated_location(self, - so, - src_exedir, - src_selfdir, - dst_exedir, - dst_selfdir) + resolved, dst_resolved, in_sysroot = _get_resolved_relocated_location( + self, so, src_exedir, src_selfdir, dst_exedir, dst_selfdir + ) result.append((so, resolved, dst_resolved, in_sysroot)) return result @@ -546,7 +561,7 @@ def uniqueness_key(self): # Constants used in the ELF specification # ########################################### -ELF_HDR = 0x7f454c46 +ELF_HDR = 0x7F454C46 E_TYPE_RELOCATABLE = 1 E_TYPE_EXECUTABLE = 2 E_TYPE_SHARED = 3 @@ -557,11 +572,11 @@ def uniqueness_key(self): E_MACHINE_MIPS = 0x08 E_MACHINE_POWERPC = 0x14 E_MACHINE_ARM = 0x28 -E_MACHINE_SUPERH = 0x2a +E_MACHINE_SUPERH = 0x2A E_MACHINE_IA_64 = 0x32 -E_MACHINE_X86_64 = 0x3e -E_MACHINE_AARCH64 = 0xb7 -E_MACHINE_RISC_V = 0xf3 +E_MACHINE_X86_64 = 0x3E +E_MACHINE_AARCH64 = 0xB7 +E_MACHINE_RISC_V = 0xF3 # It'd be quicker to use struct.calcsize here and a single # struct.unpack but it would be ugly and harder to maintain. @@ -574,10 +589,10 @@ def uniqueness_key(self): PT_PHDR = 6 PT_LOOS = 0x60000000 PT_LOPROC = 0x70000000 -PT_HIPROC = 0x7fffffff -PT_GNU_EH_FRAME = (PT_LOOS + 0x474e550) -PT_GNU_STACK = (PT_LOOS + 0x474e551) -PT_GNU_RELRO = (PT_LOOS + 0x474e552) +PT_HIPROC = 0x7FFFFFFF +PT_GNU_EH_FRAME = PT_LOOS + 0x474E550 +PT_GNU_STACK = PT_LOOS + 0x474E551 +PT_GNU_RELRO = PT_LOOS + 0x474E552 SHT_PROGBITS = 0x1 SHT_SYMTAB = 0x2 @@ -608,8 +623,8 @@ def uniqueness_key(self): SHF_OS_NONCONFORMING = 0x100 SHF_GROUP = 0x200 SHF_TLS = 0x400 -SHF_MASKOS = 0x0ff00000 -SHF_MASKPROC = 0xf0000000 +SHF_MASKOS = 0x0FF00000 +SHF_MASKPROC = 0xF0000000 SHF_ORDERED = 0x4000000 SHF_EXCLUDE = 0x8000000 @@ -644,57 +659,58 @@ def uniqueness_key(self): DT_FINI_ARRAYSZ = 28 DT_RUNPATH = 29 DT_LOOS = 0x60000000 -DT_HIOS = 0x6fffffff +DT_HIOS = 0x6FFFFFFF DT_LOPROC = 0x70000000 -DT_HIPROC = 0x7fffffff +DT_HIPROC = 0x7FFFFFFF class elfheader: def __init__(self, file): - self.hdr, = struct.unpack(BIG_ENDIAN + 'L', file.read(4)) + (self.hdr,) = struct.unpack(BIG_ENDIAN + "L", file.read(4)) self.dt_needed = [] self.dt_rpath = [] if self.hdr != ELF_HDR: return - bitness, = struct.unpack(LITTLE_ENDIAN + 'B', file.read(1)) + (bitness,) = struct.unpack(LITTLE_ENDIAN + "B", file.read(1)) bitness = 32 if bitness == 1 else 64 sz_ptr = int(bitness / 8) - ptr_type = 'Q' if sz_ptr == 8 else 'L' + ptr_type = "Q" if sz_ptr == 8 else "L" self.bitness = bitness self.sz_ptr = sz_ptr self.ptr_type = ptr_type - endian, = struct.unpack(LITTLE_ENDIAN + 'B', file.read(1)) + (endian,) = struct.unpack(LITTLE_ENDIAN + "B", file.read(1)) endian = LITTLE_ENDIAN if endian == 1 else BIG_ENDIAN self.endian = endian - self.version, = struct.unpack(endian + 'B', file.read(1)) - self.osabi, = struct.unpack(endian + 'B', file.read(1)) - self.abiver, = struct.unpack(endian + 'B', file.read(1)) - struct.unpack(endian + 'B' * 7, file.read(7)) - self.type, = struct.unpack(endian + 'H', file.read(2)) - self.machine, = struct.unpack(endian + 'H', file.read(2)) - self.version, = struct.unpack(endian + 'L', file.read(4)) - self.entry, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.phoff, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.shoff, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.flags, = struct.unpack(endian + 'L', file.read(4)) - self.ehsize, = struct.unpack(endian + 'H', file.read(2)) - self.phentsize, = struct.unpack(endian + 'H', file.read(2)) - self.phnum, = struct.unpack(endian + 'H', file.read(2)) - self.shentsize, = struct.unpack(endian + 'H', file.read(2)) - self.shnum, = struct.unpack(endian + 'H', file.read(2)) - self.shstrndx, = struct.unpack(endian + 'H', file.read(2)) + (self.version,) = struct.unpack(endian + "B", file.read(1)) + (self.osabi,) = struct.unpack(endian + "B", file.read(1)) + (self.abiver,) = struct.unpack(endian + "B", file.read(1)) + struct.unpack(endian + "B" * 7, file.read(7)) + (self.type,) = struct.unpack(endian + "H", file.read(2)) + (self.machine,) = struct.unpack(endian + "H", file.read(2)) + (self.version,) = struct.unpack(endian + "L", file.read(4)) + (self.entry,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.phoff,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.shoff,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.flags,) = struct.unpack(endian + "L", file.read(4)) + (self.ehsize,) = struct.unpack(endian + "H", file.read(2)) + (self.phentsize,) = struct.unpack(endian + "H", file.read(2)) + (self.phnum,) = struct.unpack(endian + "H", file.read(2)) + (self.shentsize,) = struct.unpack(endian + "H", file.read(2)) + (self.shnum,) = struct.unpack(endian + "H", file.read(2)) + (self.shstrndx,) = struct.unpack(endian + "H", file.read(2)) loc = file.tell() if loc != self.ehsize: - get_logger(__name__).warning(f'file.tell()={loc} != ehsize={self.ehsize}') + get_logger(__name__).warning(f"file.tell()={loc} != ehsize={self.ehsize}") def __str__(self): - return 'bitness {}, endian {}, version {}, type {}, machine {}, entry {}'.format( # noqa + return "bitness {}, endian {}, version {}, type {}, machine {}, entry {}".format( # noqa self.bitness, self.endian, self.version, self.type, hex(self.machine), - hex(self.entry)) + hex(self.entry), + ) class elfsection: @@ -704,16 +720,16 @@ def __init__(self, eh, file): endian = eh.endian # It'd be quicker to use struct.calcsize here and a single # struct.unpack but it would be ugly and harder to maintain. - self.sh_name, = struct.unpack(endian + 'L', file.read(4)) - self.sh_type, = struct.unpack(endian + 'L', file.read(4)) - self.sh_flags, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.sh_addr, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.sh_offset, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.sh_size, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.sh_link, = struct.unpack(endian + 'L', file.read(4)) - self.sh_info, = struct.unpack(endian + 'L', file.read(4)) - self.sh_addralign, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.sh_entsize, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.sh_name,) = struct.unpack(endian + "L", file.read(4)) + (self.sh_type,) = struct.unpack(endian + "L", file.read(4)) + (self.sh_flags,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.sh_addr,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.sh_offset,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.sh_size,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.sh_link,) = struct.unpack(endian + "L", file.read(4)) + (self.sh_info,) = struct.unpack(endian + "L", file.read(4)) + (self.sh_addralign,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.sh_entsize,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) # Lower priority == post processed earlier so that those # with higher priority can assume already initialized. if self.sh_type == SHT_STRTAB: @@ -776,7 +792,7 @@ def postprocess(self, elffile, file): dt_needed = [] dt_rpath = [] dt_runpath = [] - dt_soname = '$EXECUTABLE' + dt_soname = "$EXECUTABLE" if self.sh_entsize == 0: # Some ELF files (e.g., Guile's .go files) include sections # without a table of entries in which case sh_entsize will be 0 @@ -785,8 +801,8 @@ def postprocess(self, elffile, file): num_entries = int(self.sh_size / self.sh_entsize) for m in range(num_entries): file.seek(self.sh_offset + (m * self.sh_entsize)) - d_tag, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - d_val_ptr, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (d_tag,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (d_val_ptr,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) if d_tag == DT_NEEDED: dt_needed.append(d_val_ptr) elif d_tag == DT_RPATH: @@ -801,20 +817,20 @@ def postprocess(self, elffile, file): strsec, _offset = elffile.find_section_and_offset(dt_strtab_ptr) if strsec and strsec.sh_type == SHT_STRTAB: for n in dt_needed: - end = n + strsec.table[n:].index('\0') + end = n + strsec.table[n:].index("\0") elffile.dt_needed.append(strsec.table[n:end]) for r in dt_rpath: - end = r + strsec.table[r:].index('\0') + end = r + strsec.table[r:].index("\0") path = strsec.table[r:end] - rpaths = [p for p in path.split(':') if path] - elffile.dt_rpath.extend([p.rstrip('/') for p in rpaths]) + rpaths = [p for p in path.split(":") if path] + elffile.dt_rpath.extend([p.rstrip("/") for p in rpaths]) for r in dt_runpath: - end = r + strsec.table[r:].index('\0') + end = r + strsec.table[r:].index("\0") path = strsec.table[r:end] - rpaths = [p for p in path.split(':') if path] - elffile.dt_runpath.extend([p.rstrip('/') for p in rpaths]) - if dt_soname != '$EXECUTABLE': - end = dt_soname + strsec.table[dt_soname:].index('\0') + rpaths = [p for p in path.split(":") if path] + elffile.dt_runpath.extend([p.rstrip("/") for p in rpaths]) + if dt_soname != "$EXECUTABLE": + end = dt_soname + strsec.table[dt_soname:].index("\0") elffile.dt_soname = strsec.table[dt_soname:end] # runpath always takes precedence. @@ -827,17 +843,17 @@ def __init__(self, eh, file): ptr_type = eh.ptr_type sz_ptr = eh.sz_ptr endian = eh.endian - self.p_type, = struct.unpack(endian + 'L', file.read(4)) + (self.p_type,) = struct.unpack(endian + "L", file.read(4)) if eh.bitness == 64: - self.p_flags, = struct.unpack(endian + 'L', file.read(4)) - self.p_offset, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.p_vaddr, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.p_paddr, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.p_filesz, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) - self.p_memsz, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.p_flags,) = struct.unpack(endian + "L", file.read(4)) + (self.p_offset,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.p_vaddr,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.p_paddr,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.p_filesz,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.p_memsz,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) if eh.bitness == 32: - self.p_flags, = struct.unpack(endian + 'L', file.read(4)) - self.p_align, = struct.unpack(endian + ptr_type, file.read(sz_ptr)) + (self.p_flags,) = struct.unpack(endian + "L", file.read(4)) + (self.p_align,) = struct.unpack(endian + ptr_type, file.read(sz_ptr)) def postprocess(self, elffile, file): if self.p_type == PT_INTERP: @@ -845,7 +861,7 @@ def postprocess(self, elffile, file): elffile.program_interpreter = file.read(self.p_filesz - 1).decode() elif self.p_type == PT_LOAD: file.seek(self.p_offset) - if hasattr(elffile, 'ptload_p_vaddr'): + if hasattr(elffile, "ptload_p_vaddr"): elffile.ptload_p_vaddr.append(self.p_vaddr) elffile.ptload_p_paddr.append(self.p_paddr) else: @@ -862,7 +878,7 @@ def __init__(self, file, initial_rpaths_transitive=[]): self.programheaders = [] self.elfsections = [] self.program_interpreter = None - self.dt_soname = '$EXECUTABLE' + self.dt_soname = "$EXECUTABLE" self._dir = os.path.dirname(file.name) for n in range(self.ehdr.phnum): @@ -888,32 +904,35 @@ def __init__(self, file, initial_rpaths_transitive=[]): dt_rpath = [p.rstrip("/") for p in self.dt_rpath] dt_runpath = [p.rstrip("/") for p in self.dt_runpath] - self.rpaths_transitive = [self.from_os_varnames(rpath) - for rpath in (initial_rpaths_transitive + dt_rpath)] - self.rpaths_nontransitive = [self.from_os_varnames(rpath) - for rpath in dt_runpath] + self.rpaths_transitive = [ + self.from_os_varnames(rpath) + for rpath in (initial_rpaths_transitive + dt_rpath) + ] + self.rpaths_nontransitive = [ + self.from_os_varnames(rpath) for rpath in dt_runpath + ] # Lookup must be avoided when DT_NEEDED contains any '/'s - self.shared_libraries = [(needed, needed if '/' in needed else '$RPATH/' + needed) - for needed in self.dt_needed] + self.shared_libraries = [ + (needed, needed if "/" in needed else "$RPATH/" + needed) + for needed in self.dt_needed + ] def to_os_varnames(self, input): if self.ehdr.sz_ptr == 8: - libdir = '/lib64' + libdir = "/lib64" else: - libdir = '/lib' - return input.replace('$SELFDIR', '$ORIGIN') \ - .replace(libdir, '$LIB') + libdir = "/lib" + return input.replace("$SELFDIR", "$ORIGIN").replace(libdir, "$LIB") def from_os_varnames(self, input): if self.ehdr.sz_ptr == 8: - libdir = '/lib64' + libdir = "/lib64" else: - libdir = '/lib' - return input.replace('$ORIGIN', '$SELFDIR') \ - .replace('$LIB', libdir) + libdir = "/lib" + return input.replace("$ORIGIN", "$SELFDIR").replace("$LIB", libdir) def find_section_and_offset(self, addr): - 'Can be called immediately after the elfsections have been constructed' + "Can be called immediately after the elfsections have been constructed" for es in self.elfsections: if addr >= es.sh_addr and addr < es.sh_addr + es.sh_size: # sections which do not appear in the memory image of the @@ -923,20 +942,21 @@ def find_section_and_offset(self, addr): return es, addr - es.sh_addr return None, None - def get_resolved_shared_libraries(self, src_exedir, src_selfdir, sysroot=''): + def get_resolved_shared_libraries(self, src_exedir, src_selfdir, sysroot=""): result = [] - default_paths = ['$SYSROOT/lib', '$SYSROOT/usr/lib'] + default_paths = ["$SYSROOT/lib", "$SYSROOT/usr/lib"] if self.ehdr.sz_ptr == 8: - default_paths.extend(['$SYSROOT/lib64', '$SYSROOT/usr/lib64']) + default_paths.extend(["$SYSROOT/lib64", "$SYSROOT/usr/lib64"]) for so_orig, so in self.shared_libraries: - resolved, rpath, in_sysroot = \ - _get_resolved_location(self, - so, - src_exedir, - src_selfdir, - LD_LIBRARY_PATH='', - default_paths=default_paths, - sysroot=sysroot) + resolved, rpath, in_sysroot = _get_resolved_location( + self, + so, + src_exedir, + src_selfdir, + LD_LIBRARY_PATH="", + default_paths=default_paths, + sysroot=sysroot, + ) result.append((so_orig, resolved, rpath, in_sysroot)) return result @@ -967,11 +987,10 @@ def get_dir(self): return self._dir def uniqueness_key(self): - return 'unknown' + return "unknown" class DLLfile(UnixExecutable): - def __init__(self, file, initial_rpaths_transitive=[]): pass @@ -988,7 +1007,7 @@ def get_dir(self): return None def uniqueness_key(self): - return 'unknown' + return "unknown" class EXEfile: @@ -996,10 +1015,10 @@ def __init__(self, file, initial_rpaths_transitive=[]): self.super.__init__(self, file, initial_rpaths_transitive) -def codefile(file, arch='any', initial_rpaths_transitive=[]): - if file.name.endswith('.dll'): +def codefile(file, arch="any", initial_rpaths_transitive=[]): + if file.name.endswith(".dll"): return DLLfile(file, list(initial_rpaths_transitive)) - magic, = struct.unpack(BIG_ENDIAN + 'L', file.read(4)) + (magic,) = struct.unpack(BIG_ENDIAN + "L", file.read(4)) file.seek(0) if magic in (FAT_MAGIC, MH_MAGIC, MH_CIGAM, MH_CIGAM_64): return machofile(file, arch, list(initial_rpaths_transitive)) @@ -1017,17 +1036,17 @@ def codefile_class(filename, skip_symlinks=False): filename = os.path.realpath(filename) if os.path.isdir(filename): return None - if filename.endswith(('.dll', '.pyd')): + if filename.endswith((".dll", ".pyd")): return DLLfile - if filename.endswith('.exe'): + if filename.endswith(".exe"): return EXEfile # Java .class files share 0xCAFEBABE with Mach-O FAT_MAGIC. - if filename.endswith('.class'): + if filename.endswith(".class"): return None if not os.path.exists(filename) or os.path.getsize(filename) < 4: return None - with open(filename, 'rb') as file: - magic, = struct.unpack(BIG_ENDIAN + 'L', file.read(4)) + with open(filename, "rb") as file: + (magic,) = struct.unpack(BIG_ENDIAN + "L", file.read(4)) file.seek(0) if magic in (FAT_MAGIC, MH_MAGIC, MH_CIGAM, MH_CIGAM_64): return machofile @@ -1053,15 +1072,15 @@ def codefile_type(filename, skip_symlinks=True): def _trim_sysroot(sysroot): if sysroot: - while sysroot.endswith('/') or sysroot.endswith('\\'): + while sysroot.endswith("/") or sysroot.endswith("\\"): sysroot = sysroot[:-1] return sysroot def _get_arch_if_native(arch): - if arch == 'native': - if sys.platform == 'win32': - arch = 'x86_64' if sys.maxsize > 2**32 else 'i686' + if arch == "native": + if sys.platform == "win32": + arch = "x86_64" if sys.maxsize > 2**32 else "i686" else: _, _, _, _, arch = os.uname() return arch @@ -1069,20 +1088,20 @@ def _get_arch_if_native(arch): # TODO :: Consider memoizing instead of repeatedly scanning # TODO :: libc.so/libSystem.dylib when inspect_linkages(recurse=True) -def _inspect_linkages_this(filename, sysroot='', arch='native'): - ''' +def _inspect_linkages_this(filename, sysroot="", arch="native"): + """ :param filename: :param sysroot: :param arch: :return: - ''' + """ if not os.path.exists(filename): return None, [], [] sysroot = _trim_sysroot(sysroot) arch = _get_arch_if_native(arch) - with open(filename, 'rb') as f: + with open(filename, "rb") as f: # TODO :: Problems here: # TODO :: 1. macOS can modify RPATH for children in each .so # TODO :: 2. Linux can identify the program interpreter which can change the default_paths @@ -1091,7 +1110,7 @@ def _inspect_linkages_this(filename, sysroot='', arch='native'): except IncompleteRead: # the file was incomplete, can occur if a package ships a test file # which looks like an ELF file but is not. Orange3 does this. - get_logger(__name__).warning(f'problems inspecting linkages for {filename}') + get_logger(__name__).warning(f"problems inspecting linkages for {filename}") return None, [], [] dirname = os.path.dirname(filename) results = cf.get_resolved_shared_libraries(dirname, dirname, sysroot) @@ -1101,22 +1120,30 @@ def _inspect_linkages_this(filename, sysroot='', arch='native'): return cf.uniqueness_key(), orig_names, resolved_names -def inspect_rpaths(filename, resolve_dirnames=True, use_os_varnames=True, - sysroot='', arch='native'): +def inspect_rpaths( + filename, resolve_dirnames=True, use_os_varnames=True, sysroot="", arch="native" +): if not os.path.exists(filename): return [], [] sysroot = _trim_sysroot(sysroot) arch = _get_arch_if_native(arch) - with open(filename, 'rb') as f: + with open(filename, "rb") as f: # TODO :: Problems here: # TODO :: 1. macOS can modify RPATH for children in each .so # TODO :: 2. Linux can identify the program interpreter which can change the initial RPATHs # TODO :: Should '/lib', '/usr/lib' not include (or be?!) `sysroot`(s) instead? - cf = codefile(f, arch, ['/lib', '/usr/lib']) + cf = codefile(f, arch, ["/lib", "/usr/lib"]) if resolve_dirnames: - return [_get_resolved_location(cf, rpath, os.path.dirname(filename), - os.path.dirname(filename), sysroot)[0] - for rpath in cf.rpaths_nontransitive] + return [ + _get_resolved_location( + cf, + rpath, + os.path.dirname(filename), + os.path.dirname(filename), + sysroot, + )[0] + for rpath in cf.rpaths_nontransitive + ] else: if use_os_varnames: return [cf.to_os_varnames(rpath) for rpath in cf.rpaths_nontransitive] @@ -1124,18 +1151,19 @@ def inspect_rpaths(filename, resolve_dirnames=True, use_os_varnames=True, return cf.rpaths_nontransitive -def get_runpaths(filename, arch='native'): +def get_runpaths(filename, arch="native"): if not os.path.exists(filename): return [] arch = _get_arch_if_native(arch) - with open(filename, 'rb') as f: - cf = codefile(f, arch, ['/lib', '/usr/lib']) + with open(filename, "rb") as f: + cf = codefile(f, arch, ["/lib", "/usr/lib"]) return cf.get_runpaths() # TODO :: Consider returning a tree structure or a dict when recurse is True? -def inspect_linkages(filename, resolve_filenames=True, recurse=True, - sysroot='', arch='native'): +def inspect_linkages( + filename, resolve_filenames=True, recurse=True, sysroot="", arch="native" +): already_seen = set() todo = {filename} done = set() @@ -1143,13 +1171,14 @@ def inspect_linkages(filename, resolve_filenames=True, recurse=True, while todo != done: filename = next(iter(todo - done)) uniqueness_key, these_orig, these_resolved = _inspect_linkages_this( - filename, sysroot=sysroot, arch=arch) + filename, sysroot=sysroot, arch=arch + ) if uniqueness_key not in already_seen: for orig, resolved in zip(these_orig, these_resolved): if resolve_filenames: - rec = {'orig': orig, 'resolved': os.path.normpath(resolved)} + rec = {"orig": orig, "resolved": os.path.normpath(resolved)} else: - rec = {'orig': orig} + rec = {"orig": orig} results[orig] = rec if recurse: todo.update(these_resolved) @@ -1158,56 +1187,61 @@ def inspect_linkages(filename, resolve_filenames=True, recurse=True, return results -def inspect_linkages_otool(filename, arch='native'): +def inspect_linkages_otool(filename, arch="native"): from subprocess import check_output - args = ['/usr/bin/otool'] - if arch != 'native': - args.extend(['-arch', arch]) + + args = ["/usr/bin/otool"] + if arch != "native": + args.extend(["-arch", arch]) else: # 'x86_64' if sys.maxsize > 2**32 else 'i386' - args.extend(['-arch', os.uname()[4]]) - args.extend(['-L', filename]) - result = check_output(args).decode(encoding='ascii') - groups = re.findall(r'^\t(.*) \(compatibility', result, re.MULTILINE) + args.extend(["-arch", os.uname()[4]]) + args.extend(["-L", filename]) + result = check_output(args).decode(encoding="ascii") + groups = re.findall(r"^\t(.*) \(compatibility", result, re.MULTILINE) return groups # TODO :: Consider allowing QEMU/binfmt_misc to run foreign binaries + passing a sysroot here? def inspect_linkages_ldd(filename): from subprocess import PIPE, Popen - process = Popen(['/usr/bin/ldd', filename], stdout=PIPE, stderr=PIPE) + + process = Popen(["/usr/bin/ldd", filename], stdout=PIPE, stderr=PIPE) result, err = process.communicate() - result = result.decode(encoding='ascii') - err = err.decode(encoding='ascii') - groups = re.findall(r'^\t(?!linux-gate\.so\.1.*$)[^ ]+ => (.*) \([0-9a-fx]+\)', - result, re.MULTILINE) + result = result.decode(encoding="ascii") + err = err.decode(encoding="ascii") + groups = re.findall( + r"^\t(?!linux-gate\.so\.1.*$)[^ ]+ => (.*) \([0-9a-fx]+\)", result, re.MULTILINE + ) return groups def otool(*args): - parser = argparse.ArgumentParser(prog='otool', add_help=False) - parser.add_argument("-h", "--help", action='store_true') - parser.add_argument("-arch", dest='arch_type', help="arch_type", - default='native') - parser.add_argument("-L", dest='filename', - help="print shared libraries used") + parser = argparse.ArgumentParser(prog="otool", add_help=False) + parser.add_argument("-h", "--help", action="store_true") + parser.add_argument("-arch", dest="arch_type", help="arch_type", default="native") + parser.add_argument("-L", dest="filename", help="print shared libraries used") args = parser.parse_args(args) if args.help: print(OTOOL_USAGE) return 0 if args.filename: - shared_libs = inspect_linkages(args.filename, resolve_filenames=False, - recurse=False, arch=args.arch_type) - print("Shared libs used (non-recursively) by {} are:\n{}".format(args.filename, - shared_libs)) + shared_libs = inspect_linkages( + args.filename, resolve_filenames=False, recurse=False, arch=args.arch_type + ) + print( + "Shared libs used (non-recursively) by {} are:\n{}".format( + args.filename, shared_libs + ) + ) return 0 return 1 def otool_sys(*args): import subprocess - result = subprocess.check_output('/usr/bin/otool', args).\ - decode(encoding='ascii') + + result = subprocess.check_output("/usr/bin/otool", args).decode(encoding="ascii") return result @@ -1217,91 +1251,109 @@ def ldd_sys(*args): def ldd(*args): - parser = argparse.ArgumentParser(prog='ldd', add_help=False) - parser.add_argument("-h", "--help", action='store_true') + parser = argparse.ArgumentParser(prog="ldd", add_help=False) + parser.add_argument("-h", "--help", action="store_true") parser.add_argument("filename") args = parser.parse_args(args) if args.help: print(LDD_USAGE) return 0 if args.filename: - shared_libs = inspect_linkages(args.filename, resolve_filenames=False, - recurse=True) - print("Shared libs used (recursively) by {} are:\n{}".format(args.filename, - shared_libs)) + shared_libs = inspect_linkages( + args.filename, resolve_filenames=False, recurse=True + ) + print( + "Shared libs used (recursively) by {} are:\n{}".format( + args.filename, shared_libs + ) + ) return 0 return 1 def main(argv): for idx, progname in enumerate(argv[0:2][::-1]): - if re.match(r'.*ldd(?:$|\.exe|\.py)', progname): - return ldd(*argv[2 - idx:]) - elif re.match(r'.*otool(?:$|\.exe|\.py)', progname): - return otool(*argv[2 - idx:]) + if re.match(r".*ldd(?:$|\.exe|\.py)", progname): + return ldd(*argv[2 - idx :]) + elif re.match(r".*otool(?:$|\.exe|\.py)", progname): + return otool(*argv[2 - idx :]) elif os.path.isfile(progname): klass = codefile_class(progname) if not klass: return 1 elif klass == elffile: - return ldd(*argv[1 - idx:]) + return ldd(*argv[1 - idx :]) elif klass == machofile: - return otool('-L', *argv[1 - idx:]) + return otool("-L", *argv[1 - idx :]) return 1 def main_maybe_test(): - if sys.argv[1] == 'test': + if sys.argv[1] == "test": import functools + tool = sys.argv[2] - if tool != 'otool' and tool != 'ldd': - if sys.platform == 'darwin': - tool = 'otool' + if tool != "otool" and tool != "ldd": + if sys.platform == "darwin": + tool = "otool" else: - tool = 'ldd' + tool = "ldd" test_that = None - sysroot_args = [re.match('--sysroot=([^ ]+)', arg) for arg in sys.argv - if re.match('--sysroot=([^ ]+)', arg)] + sysroot_args = [ + re.match("--sysroot=([^ ]+)", arg) + for arg in sys.argv + if re.match("--sysroot=([^ ]+)", arg) + ] if len(sysroot_args): - sysroot, = sysroot_args[-1].groups(1) + (sysroot,) = sysroot_args[-1].groups(1) sysroot = os.path.expanduser(sysroot) else: - sysroot = '' - if tool == 'otool': - test_this = functools.partial(inspect_linkages, sysroot=sysroot, - resolve_filenames=False, recurse=False) - if sys.platform == 'darwin': + sysroot = "" + if tool == "otool": + test_this = functools.partial( + inspect_linkages, + sysroot=sysroot, + resolve_filenames=False, + recurse=False, + ) + if sys.platform == "darwin": test_that = functools.partial(inspect_linkages_otool) - SOEXT = 'dylib' - elif tool == 'ldd': - test_this = functools.partial(inspect_linkages, sysroot=sysroot, - resolve_filenames=True, recurse=True) - if sys.platform.startswith('linux'): + SOEXT = "dylib" + elif tool == "ldd": + test_this = functools.partial( + inspect_linkages, sysroot=sysroot, resolve_filenames=True, recurse=True + ) + if sys.platform.startswith("linux"): test_that = functools.partial(inspect_linkages_ldd) - SOEXT = 'so' + SOEXT = "so" # Find a load of dylibs or elfs and compare # the output against 'otool -L' or 'ldd' # codefiles = glob.glob('/usr/lib/*.'+SOEXT) - codefiles = glob.glob(sysroot + '/usr/lib/*.' + SOEXT) + codefiles = glob.glob(sysroot + "/usr/lib/*." + SOEXT) # codefiles = ['/usr/bin/file'] # Sometimes files do not exist: # (/usr/lib/libgutenprint.2.dylib -> libgutenprint.2.0.3.dylib) - codefiles = [codefile for codefile in codefiles - if not os.path.islink(codefile) or os.path.exists(os.readlink(codefile))] + codefiles = [ + codefile + for codefile in codefiles + if not os.path.islink(codefile) or os.path.exists(os.readlink(codefile)) + ] for codefile in codefiles: - print(f'\nchecking {codefile}') + print(f"\nchecking {codefile}") this = test_this(codefile) if test_that: that = test_that(codefile) else: that = this - print('\n'.join(this)) - assert set(this) == set(that),\ - "py-ldd result incorrect for {}, this:\n{}\nvs that:\n{}".\ - format(codefile, set(this), set(that)) + print("\n".join(this)) + assert set(this) == set( + that + ), "py-ldd result incorrect for {}, this:\n{}\nvs that:\n{}".format( + codefile, set(this), set(that) + ) else: return main(sys.argv) -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main_maybe_test()) diff --git a/conda_build/post.py b/conda_build/post.py index 6259b97ac6..290779385d 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -1,45 +1,64 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from copy import copy -from collections import defaultdict, OrderedDict -from functools import partial -from fnmatch import fnmatch, filter as fnmatch_filter, translate as fnmatch_translate -from os.path import (basename, dirname, exists, isabs, isdir, isfile, - islink, join, normpath, realpath, relpath, sep, splitext) import locale -import re import os +import re import shutil import stat -from subprocess import call, check_output, CalledProcessError import sys +from collections import OrderedDict, defaultdict +from copy import copy +from fnmatch import filter as fnmatch_filter +from fnmatch import fnmatch +from fnmatch import translate as fnmatch_translate +from functools import partial +from os.path import ( + basename, + dirname, + exists, + isabs, + isdir, + isfile, + islink, + join, + normpath, + realpath, + relpath, + sep, + splitext, +) +from subprocess import CalledProcessError, call, check_output + try: from os import readlink except ImportError: readlink = False -from conda_build.os_utils import external -from conda_build.conda_interface import lchmod -from conda_build.conda_interface import walk_prefix -from conda_build.conda_interface import TemporaryDirectory -from conda_build.conda_interface import md5_file - from conda_build import utils -from conda_build.os_utils.liefldd import (have_lief, get_exports_memoized, - get_linkages_memoized, get_rpaths_raw, - get_runpaths_raw, set_rpath) -from conda_build.os_utils.pyldd import codefile_type -from conda_build.os_utils.ldd import get_package_files, get_package_obj_files +from conda_build.conda_interface import ( + TemporaryDirectory, + lchmod, + md5_file, + walk_prefix, +) +from conda_build.exceptions import OverDependingError, OverLinkingError, RunPathError from conda_build.inspect_pkg import which_package -from conda_build.exceptions import (OverLinkingError, OverDependingError, RunPathError) - -from conda_build.os_utils import macho - +from conda_build.os_utils import external, macho +from conda_build.os_utils.ldd import get_package_files, get_package_obj_files +from conda_build.os_utils.liefldd import ( + get_exports_memoized, + get_linkages_memoized, + get_rpaths_raw, + get_runpaths_raw, + have_lief, + set_rpath, +) +from conda_build.os_utils.pyldd import codefile_type filetypes_for_platform = { - "win": ('DLLfile', 'EXEfile'), - "osx": ['machofile'], - "linux": ['elffile'], + "win": ("DLLfile", "EXEfile"), + "osx": ["machofile"], + "linux": ["elffile"], } @@ -58,31 +77,36 @@ def fix_shebang(f, prefix, build_python, osx_is_app=False): bytes_ = False os.chmod(path, 0o775) - with open(path, mode='r+', encoding=locale.getpreferredencoding()) as fi: + with open(path, mode="r+", encoding=locale.getpreferredencoding()) as fi: try: data = fi.read(100) fi.seek(0) except UnicodeDecodeError: # file is binary return - SHEBANG_PAT = re.compile(r'^#!.+$', re.M) + SHEBANG_PAT = re.compile(r"^#!.+$", re.M) # regexp on the memory mapped file so we only read it into # memory if the regexp matches. try: - mm = utils.mmap_mmap(fi.fileno(), 0, tagname=None, flags=utils.mmap_MAP_PRIVATE) + mm = utils.mmap_mmap( + fi.fileno(), 0, tagname=None, flags=utils.mmap_MAP_PRIVATE + ) except OSError: mm = fi.read() try: m = SHEBANG_PAT.match(mm) except TypeError: - SHEBANG_PAT = re.compile(br'^#!.+$', re.M) + SHEBANG_PAT = re.compile(rb"^#!.+$", re.M) bytes_ = True m = SHEBANG_PAT.match(mm) if m: - python_pattern = (re.compile(br'\/python[w]?(?:$|\s|\Z)', re.M) if bytes_ else - re.compile(r'\/python[w]?(:$|\s|\Z)', re.M)) + python_pattern = ( + re.compile(rb"\/python[w]?(?:$|\s|\Z)", re.M) + if bytes_ + else re.compile(r"\/python[w]?(:$|\s|\Z)", re.M) + ) if not re.search(python_pattern, m.group()): return else: @@ -90,16 +114,18 @@ def fix_shebang(f, prefix, build_python, osx_is_app=False): data = mm[:] - py_exec = '#!' + ('/bin/bash ' + prefix + '/bin/pythonw' - if sys.platform == 'darwin' and osx_is_app else - prefix + '/bin/' + basename(build_python)) - if bytes_ and hasattr(py_exec, 'encode'): + py_exec = "#!" + ( + "/bin/bash " + prefix + "/bin/pythonw" + if sys.platform == "darwin" and osx_is_app + else prefix + "/bin/" + basename(build_python) + ) + if bytes_ and hasattr(py_exec, "encode"): py_exec = py_exec.encode() new_data = SHEBANG_PAT.sub(py_exec, data, count=1) if new_data == data: return print("updating shebang:", f) - with open(path, 'w', encoding=locale.getpreferredencoding()) as fo: + with open(path, "w", encoding=locale.getpreferredencoding()) as fo: try: fo.write(new_data) except TypeError: @@ -108,10 +134,15 @@ def fix_shebang(f, prefix, build_python, osx_is_app=False): def write_pth(egg_path, config): fn = basename(egg_path) - py_ver = '.'.join(config.variant['python'].split('.')[:2]) - with open(join(utils.get_site_packages(config.host_prefix, py_ver), - '%s.pth' % (fn.split('-')[0])), 'w') as fo: - fo.write('./%s\n' % fn) + py_ver = ".".join(config.variant["python"].split(".")[:2]) + with open( + join( + utils.get_site_packages(config.host_prefix, py_ver), + "%s.pth" % (fn.split("-")[0]), + ), + "w", + ) as fo: + fo.write("./%s\n" % fn) def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False): @@ -120,24 +151,25 @@ def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False): itself """ absfiles = [join(prefix, f) for f in files] - py_ver = '.'.join(config.variant['python'].split('.')[:2]) + py_ver = ".".join(config.variant["python"].split(".")[:2]) sp_dir = utils.get_site_packages(prefix, py_ver) - for egg_path in utils.glob(join(sp_dir, '*-py*.egg')): + for egg_path in utils.glob(join(sp_dir, "*-py*.egg")): if isdir(egg_path): - if preserve_egg_dir or not any(join(egg_path, i) in absfiles for i - in walk_prefix(egg_path, False, windows_forward_slashes=False)): + if preserve_egg_dir or not any( + join(egg_path, i) in absfiles + for i in walk_prefix(egg_path, False, windows_forward_slashes=False) + ): write_pth(egg_path, config=config) continue - print('found egg dir:', egg_path) + print("found egg dir:", egg_path) try: - shutil.move(join(egg_path, 'EGG-INFO'), - egg_path + '-info') + shutil.move(join(egg_path, "EGG-INFO"), egg_path + "-info") except OSError: pass - utils.rm_rf(join(egg_path, 'EGG-INFO')) + utils.rm_rf(join(egg_path, "EGG-INFO")) for fn in os.listdir(egg_path): - if fn == '__pycache__': + if fn == "__pycache__": utils.rm_rf(join(egg_path, fn)) else: # this might be a name-space package @@ -145,34 +177,39 @@ def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False): # from another installed dependency if exists(join(sp_dir, fn)): try: - utils.copy_into(join(egg_path, fn), - join(sp_dir, fn), config.timeout, - locking=config.locking) + utils.copy_into( + join(egg_path, fn), + join(sp_dir, fn), + config.timeout, + locking=config.locking, + ) utils.rm_rf(join(egg_path, fn)) except OSError as e: fn = basename(str(e).split()[-1]) - raise OSError("Tried to merge folder {egg_path} into {sp_dir}, but {fn}" - " exists in both locations. Please either add " - "build/preserve_egg_dir: True to meta.yaml, or manually " - "remove the file during your install process to avoid " - "this conflict." - .format(egg_path=egg_path, sp_dir=sp_dir, fn=fn)) + raise OSError( + "Tried to merge folder {egg_path} into {sp_dir}, but {fn}" + " exists in both locations. Please either add " + "build/preserve_egg_dir: True to meta.yaml, or manually " + "remove the file during your install process to avoid " + "this conflict.".format( + egg_path=egg_path, sp_dir=sp_dir, fn=fn + ) + ) else: shutil.move(join(egg_path, fn), join(sp_dir, fn)) elif isfile(egg_path): if egg_path not in absfiles: continue - print('found egg:', egg_path) + print("found egg:", egg_path) write_pth(egg_path, config=config) - installer_files = [f for f in absfiles - if f.endswith(f".dist-info{sep}INSTALLER")] + installer_files = [f for f in absfiles if f.endswith(f".dist-info{sep}INSTALLER")] for file in installer_files: - with open(file, 'w') as f: - f.write('conda') + with open(file, "w") as f: + f.write("conda") - utils.rm_rf(join(sp_dir, 'easy-install.pth')) + utils.rm_rf(join(sp_dir, "easy-install.pth")) def rm_py_along_so(prefix): @@ -180,8 +217,8 @@ def rm_py_along_so(prefix): files = list(os.scandir(prefix)) for fn in files: - if fn.is_file() and fn.name.endswith(('.so', '.pyd')): - for ext in '.py', '.pyc', '.pyo': + if fn.is_file() and fn.name.endswith((".so", ".pyd")): + for ext in ".py", ".pyc", ".pyo": name, _ = splitext(fn.path) name = normpath(name + ext) if any(name == normpath(f) for f in files): @@ -196,23 +233,23 @@ def rm_pyo(files, prefix): optimize = 1 .. in setup.cfg in which case we can end up with some stdlib __pycache__ files ending in .opt-N.pyc on Python 3, as well as .pyo files for the - package's own python. """ - re_pyo = re.compile(r'.*(?:\.pyo$|\.opt-[0-9]\.pyc)') + package's own python.""" + re_pyo = re.compile(r".*(?:\.pyo$|\.opt-[0-9]\.pyc)") for fn in files: if re_pyo.match(fn): os.unlink(join(prefix, fn)) def rm_pyc(files, prefix): - re_pyc = re.compile(r'.*(?:\.pyc$)') + re_pyc = re.compile(r".*(?:\.pyc$)") for fn in files: if re_pyc.match(fn): os.unlink(join(prefix, fn)) def rm_share_info_dir(files, prefix): - if 'share/info/dir' in files: - fn = join(prefix, 'share', 'info', 'dir') + if "share/info/dir" in files: + fn = join(prefix, "share", "info", "dir") if isfile(fn): os.unlink(fn) @@ -228,25 +265,31 @@ def compile_missing_pyc(files, cwd, python_exe, skip_compile_pyc=()): unskipped_files = set(files) - skipped_files for fn in unskipped_files: # omit files in Library/bin, Scripts, and the root prefix - they are not generally imported - if sys.platform == 'win32': - if any([fn.lower().startswith(start) for start in ['library/bin', 'library\\bin', - 'scripts']]): + if sys.platform == "win32": + if any( + [ + fn.lower().startswith(start) + for start in ["library/bin", "library\\bin", "scripts"] + ] + ): continue else: - if fn.startswith('bin'): + if fn.startswith("bin"): continue - cache_prefix = ("__pycache__" + os.sep) - if (fn.endswith(".py") and - dirname(fn) + cache_prefix + basename(fn) + 'c' not in files): + cache_prefix = "__pycache__" + os.sep + if ( + fn.endswith(".py") + and dirname(fn) + cache_prefix + basename(fn) + "c" not in files + ): compile_files.append(fn) if compile_files: if not isfile(python_exe): - print('compiling .pyc files... failed as no python interpreter was found') + print("compiling .pyc files... failed as no python interpreter was found") else: - print('compiling .pyc files...') + print("compiling .pyc files...") # We avoid command lines longer than 8190 - if sys.platform == 'win32': + if sys.platform == "win32": limit = 8190 else: limit = 32760 @@ -255,8 +298,8 @@ def compile_missing_pyc(files, cwd, python_exe, skip_compile_pyc=()): if limit < lower_limit: limit = lower_limit groups = [[]] - args = [python_exe, '-Wi', '-m', 'py_compile'] - args_len = length = len(' '.join(args)) + 1 + args = [python_exe, "-Wi", "-m", "py_compile"] + args_len = length = len(" ".join(args)) + 1 for f in compile_files: length_this = len(f) + 1 if length_this + length > limit: @@ -271,27 +314,40 @@ def compile_missing_pyc(files, cwd, python_exe, skip_compile_pyc=()): def check_dist_info_version(name, version, files): for f in files: - if f.endswith('.dist-info' + os.sep + 'METADATA'): + if f.endswith(".dist-info" + os.sep + "METADATA"): f_lower = basename(dirname(f).lower()) - if f_lower.startswith(name + '-'): - f_lower, _, _ = f_lower.rpartition('.dist-info') - _, distname, f_lower = f_lower.rpartition(name + '-') + if f_lower.startswith(name + "-"): + f_lower, _, _ = f_lower.rpartition(".dist-info") + _, distname, f_lower = f_lower.rpartition(name + "-") if distname == name and version != f_lower: - print(f"ERROR: Top level dist-info version incorrect (is {f_lower}, should be {version})") + print( + f"ERROR: Top level dist-info version incorrect (is {f_lower}, should be {version})" + ) sys.exit(1) else: return -def post_process(name, version, files, prefix, config, preserve_egg_dir=False, noarch=False, skip_compile_pyc=()): +def post_process( + name, + version, + files, + prefix, + config, + preserve_egg_dir=False, + noarch=False, + skip_compile_pyc=(), +): rm_pyo(files, prefix) if noarch: rm_pyc(files, prefix) else: - python_exe = (config.build_python if isfile(config.build_python) else - config.host_python) - compile_missing_pyc(files, cwd=prefix, python_exe=python_exe, - skip_compile_pyc=skip_compile_pyc) + python_exe = ( + config.build_python if isfile(config.build_python) else config.host_python + ) + compile_missing_pyc( + files, cwd=prefix, python_exe=python_exe, skip_compile_pyc=skip_compile_pyc + ) remove_easy_install_pth(files, prefix, config, preserve_egg_dir=preserve_egg_dir) rm_py_along_so(prefix) rm_share_info_dir(files, prefix) @@ -300,17 +356,17 @@ def post_process(name, version, files, prefix, config, preserve_egg_dir=False, n def find_lib(link, prefix, files, path=None): if link.startswith(prefix): - link = normpath(link[len(prefix) + 1:]) + link = normpath(link[len(prefix) + 1 :]) if not any(link == normpath(w) for w in files): sys.exit("Error: Could not find %s" % link) return link - if link.startswith('/'): # but doesn't start with the build prefix + if link.startswith("/"): # but doesn't start with the build prefix return - if link.startswith('@rpath/'): + if link.startswith("@rpath/"): # Assume the rpath already points to lib, so there is no need to # change it. return - if '/' not in link or link.startswith('@executable_path/'): + if "/" not in link or link.startswith("@executable_path/"): link = basename(link) file_names = defaultdict(list) for f in files: @@ -327,25 +383,33 @@ def find_lib(link, prefix, files, path=None): for f in file_names[link]: md5s.add(md5_file(join(prefix, f))) if len(md5s) > 1: - sys.exit(f"Error: Found multiple instances of {link}: {file_names[link]}") + sys.exit( + f"Error: Found multiple instances of {link}: {file_names[link]}" + ) else: file_names[link].sort() - print("Found multiple instances of %s (%s). " - "Choosing the first one." % (link, file_names[link])) + print( + "Found multiple instances of %s (%s). " + "Choosing the first one." % (link, file_names[link]) + ) return file_names[link][0] print("Don't know how to find %s, skipping" % link) def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): - link = link_dict['name'] + link = link_dict["name"] if build_prefix != host_prefix and link.startswith(build_prefix): link = link.replace(build_prefix, host_prefix) print(f"Fixing linking of {link} in {path}") - print(".. seems to be linking to a compiler runtime, replacing build prefix with " - "host prefix and") + print( + ".. seems to be linking to a compiler runtime, replacing build prefix with " + "host prefix and" + ) if not codefile_type(link): - sys.exit("Error: Compiler runtime library in build prefix not found in host prefix %s" - % link) + sys.exit( + "Error: Compiler runtime library in build prefix not found in host prefix %s" + % link + ) else: print(f".. fixing linking of {link} in {path} instead") @@ -357,7 +421,7 @@ def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): print(f"Fixing linking of {link} in {path}") print("New link location is %s" % (link_loc)) - lib_to_link = relpath(dirname(link_loc), 'lib') + lib_to_link = relpath(dirname(link_loc), "lib") # path_to_lib = utils.relative(path[len(prefix) + 1:]) # e.g., if @@ -377,30 +441,32 @@ def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): # @loader_path/path_to_lib/lib_to_link/basename(link), like # @loader_path/../../things/libthings.dylib. - ret = f'@rpath/{lib_to_link}/{basename(link)}' + ret = f"@rpath/{lib_to_link}/{basename(link)}" # XXX: IF the above fails for whatever reason, the below can be used # TODO: This might contain redundant ..'s if link and path are both in # some subdirectory of lib. # ret = '@loader_path/%s/%s/%s' % (path_to_lib, lib_to_link, basename(link)) - ret = ret.replace('/./', '/') + ret = ret.replace("/./", "/") return ret -def mk_relative_osx(path, host_prefix, m, files, rpaths=('lib',)): +def mk_relative_osx(path, host_prefix, m, files, rpaths=("lib",)): base_prefix = m.config.build_folder assert base_prefix == dirname(host_prefix) build_prefix = m.config.build_prefix prefix = build_prefix if exists(build_prefix) else host_prefix names = macho.otool(path, prefix) - s = macho.install_name_change(path, prefix, - partial(osx_ch_link, - host_prefix=host_prefix, - build_prefix=build_prefix, - files=files), - dylibs=names) + s = macho.install_name_change( + path, + prefix, + partial( + osx_ch_link, host_prefix=host_prefix, build_prefix=build_prefix, files=files + ), + dylibs=names, + ) if names: existing_rpaths = macho.get_rpaths(path, build_prefix=prefix) @@ -408,16 +474,18 @@ def mk_relative_osx(path, host_prefix, m, files, rpaths=('lib',)): # being found. for rpath in rpaths: # Escape hatch for when you really don't want any rpaths added. - if rpath == '': + if rpath == "": continue - rpath_new = join('@loader_path', - relpath(join(host_prefix, rpath), dirname(path)), - '').replace('/./', '/') + rpath_new = join( + "@loader_path", relpath(join(host_prefix, rpath), dirname(path)), "" + ).replace("/./", "/") macho.add_rpath(path, rpath_new, build_prefix=prefix, verbose=True) full_rpath = join(host_prefix, rpath) for existing_rpath in existing_rpaths: if normpath(existing_rpath) == normpath(full_rpath): - macho.delete_rpath(path, existing_rpath, build_prefix=prefix, verbose=True) + macho.delete_rpath( + path, existing_rpath, build_prefix=prefix, verbose=True + ) for rpath in existing_rpaths: if rpath.startswith(base_prefix) and not rpath.startswith(host_prefix): @@ -428,7 +496,7 @@ def mk_relative_osx(path, host_prefix, m, files, rpaths=('lib',)): assert_relative_osx(path, host_prefix, build_prefix) -''' +""" # Both patchelf and LIEF have bugs in them. Neither can be used on all binaries we have seen. # This code tries each and tries to keep count of which worked between the original binary and # patchelf-patched, LIEF-patched versions. @@ -482,77 +550,95 @@ def check_binary_patchers(elf, prefix, rpath): if original == pelf and works: worksd['patchelf'] += 1 print('\n' + str(worksd) + '\n') -''' +""" -def mk_relative_linux(f, prefix, rpaths=('lib',), method=None): - 'Respects the original values and converts abs to $ORIGIN-relative' +def mk_relative_linux(f, prefix, rpaths=("lib",), method=None): + "Respects the original values and converts abs to $ORIGIN-relative" elf = join(prefix, f) origin = dirname(elf) existing_pe = None - patchelf = external.find_executable('patchelf', prefix) + patchelf = external.find_executable("patchelf", prefix) if not patchelf: - print(f"ERROR :: You should install patchelf, will proceed with LIEF for {elf} (was {method})") - method = 'LIEF' + print( + f"ERROR :: You should install patchelf, will proceed with LIEF for {elf} (was {method})" + ) + method = "LIEF" else: try: - existing_pe = check_output([patchelf, '--print-rpath', elf]).decode('utf-8').splitlines()[0] + existing_pe = ( + check_output([patchelf, "--print-rpath", elf]) + .decode("utf-8") + .splitlines()[0] + ) except CalledProcessError: - if method == 'patchelf': - print("ERROR :: `patchelf --print-rpath` failed for {}, but patchelf was specified".format( - elf)) - elif method != 'LIEF': - print("WARNING :: `patchelf --print-rpath` failed for {}, will proceed with LIEF (was {})".format( - elf, method)) - method = 'LIEF' + if method == "patchelf": + print( + "ERROR :: `patchelf --print-rpath` failed for {}, but patchelf was specified".format( + elf + ) + ) + elif method != "LIEF": + print( + "WARNING :: `patchelf --print-rpath` failed for {}, will proceed with LIEF (was {})".format( + elf, method + ) + ) + method = "LIEF" else: existing_pe = existing_pe.split(os.pathsep) existing = existing_pe if have_lief: existing2, _, _ = get_rpaths_raw(elf) if existing_pe and existing_pe != existing2: - print('WARNING :: get_rpaths_raw()={} and patchelf={} disagree for {} :: '.format( - existing2, existing_pe, elf)) + print( + "WARNING :: get_rpaths_raw()={} and patchelf={} disagree for {} :: ".format( + existing2, existing_pe, elf + ) + ) # Use LIEF if method is LIEF to get the initial value? - if method == 'LIEF': + if method == "LIEF": existing = existing2 new = [] for old in existing: - if old.startswith('$ORIGIN'): + if old.startswith("$ORIGIN"): new.append(old) - elif old.startswith('/'): + elif old.startswith("/"): # Test if this absolute path is outside of prefix. That is fatal. rp = relpath(old, prefix) - if rp.startswith('..' + os.sep): - print(f'Warning: rpath {old} is outside prefix {prefix} (removing it)') + if rp.startswith(".." + os.sep): + print(f"Warning: rpath {old} is outside prefix {prefix} (removing it)") else: - rp = '$ORIGIN/' + relpath(old, origin) + rp = "$ORIGIN/" + relpath(old, origin) if rp not in new: new.append(rp) # Ensure that the asked-for paths are also in new. for rpath in rpaths: - if rpath != '': - if not rpath.startswith('/'): + if rpath != "": + if not rpath.startswith("/"): # IMHO utils.relative shouldn't exist, but I am too paranoid to remove # it, so instead, make sure that what I think it should be replaced by # gives the same result and assert if not. Yeah, I am a chicken. rel_ours = normpath(utils.relative(f, rpath)) rel_stdlib = normpath(relpath(rpath, dirname(f))) if not rel_ours == rel_stdlib: - raise ValueError('utils.relative {} and relpath {} disagree for {}, {}'.format( - rel_ours, rel_stdlib, f, rpath)) - rpath = '$ORIGIN/' + rel_stdlib + raise ValueError( + "utils.relative {} and relpath {} disagree for {}, {}".format( + rel_ours, rel_stdlib, f, rpath + ) + ) + rpath = "$ORIGIN/" + rel_stdlib if rpath not in new: new.append(rpath) - rpath = ':'.join(new) + rpath = ":".join(new) # check_binary_patchers(elf, prefix, rpath) - if not patchelf or (method and method.upper() == 'LIEF'): - set_rpath(old_matching='*', new_rpath=rpath, file=elf) + if not patchelf or (method and method.upper() == "LIEF"): + set_rpath(old_matching="*", new_rpath=rpath, file=elf) else: - call([patchelf, '--force-rpath', '--set-rpath', rpath, elf]) + call([patchelf, "--force-rpath", "--set-rpath", rpath, elf]) def assert_relative_osx(path, host_prefix, build_prefix): @@ -560,32 +646,39 @@ def assert_relative_osx(path, host_prefix, build_prefix): for name in macho.get_dylibs(path, tools_prefix): for prefix in (host_prefix, build_prefix): if prefix and name.startswith(prefix): - raise RuntimeError("library at %s appears to have an absolute path embedded" % path) + raise RuntimeError( + "library at %s appears to have an absolute path embedded" % path + ) -def determine_package_nature(pkg, prefix, subdir, bldpkgs_dir, output_folder, channel_urls): +def determine_package_nature( + pkg, prefix, subdir, bldpkgs_dir, output_folder, channel_urls +): run_exports = None - lib_prefix = pkg.name.startswith('lib') + lib_prefix = pkg.name.startswith("lib") codefiles = get_package_obj_files(pkg, prefix) # get_package_obj_files already filters by extension and I'm not sure we need two. - dsos = [f for f in codefiles for ext in ('.dylib', '.so', '.dll', '.pyd') if ext in f] + dsos = [ + f for f in codefiles for ext in (".dylib", ".so", ".dll", ".pyd") if ext in f + ] # TODO :: Is this package not in a channel somewhere at this point? It would be good not to be special # casing like this. Clearly we aren't able to get run_exports for starters and that's not good if not isinstance(pkg, FakeDist): # we don't care about the actual run_exports value, just whether or not run_exports are present. - json_file = os.path.join(prefix, 'conda-meta', pkg.dist_name + '.json') + json_file = os.path.join(prefix, "conda-meta", pkg.dist_name + ".json") import json + assert os.path.isfile(json_file), f"conda-meta :: Not a file: {json_file}" json_info = json.loads(open(json_file).read()) - epd = json_info['extracted_package_dir'] - run_exports_json = os.path.join(epd, 'info', 'run_exports.json') + epd = json_info["extracted_package_dir"] + run_exports_json = os.path.join(epd, "info", "run_exports.json") if os.path.isfile(run_exports_json): run_exports = json.loads(open(run_exports_json).read()) return (dsos, run_exports, lib_prefix) def library_nature(pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_urls): - ''' + """ Result :: "non-library", "interpreted library (Python|R|Python,R)", "plugin library (Python|R|Python,R)", @@ -594,18 +687,20 @@ def library_nature(pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_url "interpreter (R)" "interpreter (Python)" .. in that order, i.e. if have both dsos and run_exports, it's a run_exports_library. - ''' - dsos, run_exports, _ = determine_package_nature(pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_urls) - if pkg.name == 'python': + """ + dsos, run_exports, _ = determine_package_nature( + pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_urls + ) + if pkg.name == "python": return "interpreter (Python)" - elif pkg.name == 'r-base': + elif pkg.name == "r-base": return "interpreter (R)" if run_exports: return "run-exports library" elif len(dsos): # If all DSOs are under site-packages or R/lib/ - python_dsos = [dso for dso in dsos if 'site-packages' in dso] - r_dsos = [dso for dso in dsos if 'lib/R/library' in dso] + python_dsos = [dso for dso in dsos if "site-packages" in dso] + r_dsos = [dso for dso in dsos if "lib/R/library" in dso] dsos_without_plugins = [dso for dso in dsos if dso not in r_dsos + python_dsos] if len(dsos_without_plugins): return "dso library" @@ -618,8 +713,8 @@ def library_nature(pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_url return "plugin library (R)" else: files = get_package_files(pkg, prefix) - python_files = [f for f in files if 'site-packages' in f] - r_files = [f for f in files if 'lib/R/library' in f] + python_files = [f for f in files if "site-packages" in f] + r_files = [f for f in files if "lib/R/library" in f] if python_files and r_files: return "interpreted library (Python,R)" elif python_files: @@ -633,6 +728,7 @@ def library_nature(pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_url def dists_from_names(names, prefix): results = [] from conda_build.utils import linked_data_no_multichannels + pkgs = linked_data_no_multichannels(prefix) for name in names: for pkg in pkgs: @@ -652,16 +748,17 @@ def __init__(self, name, version, build_number, build_str, channel, files): self.files = files def get(self, name): - if name == 'files': + if name == "files": return self.files # This is really just a small, fixed sysroot and it is rooted at ''. `libcrypto.0.9.8.dylib` should not be in it IMHO. -DEFAULT_MAC_WHITELIST = ['/opt/X11/', - '/usr/lib/libSystem.B.dylib', - '/usr/lib/libcrypto.0.9.8.dylib', - '/usr/lib/libobjc.A.dylib', -""" +DEFAULT_MAC_WHITELIST = [ + "/opt/X11/", + "/usr/lib/libSystem.B.dylib", + "/usr/lib/libcrypto.0.9.8.dylib", + "/usr/lib/libobjc.A.dylib", + """ '/System/Library/Frameworks/Accelerate.framework/*', '/System/Library/Frameworks/AGL.framework/*', '/System/Library/Frameworks/AppKit.framework/*', @@ -699,50 +796,64 @@ def get(self, name): '/System/Library/Frameworks/StoreKit.framework/*', '/System/Library/Frameworks/SystemConfiguration.framework/*', '/System/Library/Frameworks/WebKit.framework/*' -""" - ] +""", +] # Should contain the System32/SysWOW64 DLLs present on a clean installation of the # oldest version of Windows that we support (or are currently) building packages for. -DEFAULT_WIN_WHITELIST = ['**/ADVAPI32.dll', - '**/bcrypt.dll', - '**/COMCTL32.dll', - '**/COMDLG32.dll', - '**/CRYPT32.dll', - '**/dbghelp.dll', - '**/GDI32.dll', - '**/IMM32.dll', - '**/KERNEL32.dll', - '**/NETAPI32.dll', - '**/ole32.dll', - '**/OLEAUT32.dll', - '**/PSAPI.DLL', - '**/RPCRT4.dll', - '**/SHELL32.dll', - '**/USER32.dll', - '**/USERENV.dll', - '**/WINHTTP.dll', - '**/WS2_32.dll', - '**/ntdll.dll', - '**/msvcrt.dll'] - - -def _collect_needed_dsos(sysroots_files, files, run_prefix, sysroot_substitution, build_prefix, build_prefix_substitution): +DEFAULT_WIN_WHITELIST = [ + "**/ADVAPI32.dll", + "**/bcrypt.dll", + "**/COMCTL32.dll", + "**/COMDLG32.dll", + "**/CRYPT32.dll", + "**/dbghelp.dll", + "**/GDI32.dll", + "**/IMM32.dll", + "**/KERNEL32.dll", + "**/NETAPI32.dll", + "**/ole32.dll", + "**/OLEAUT32.dll", + "**/PSAPI.DLL", + "**/RPCRT4.dll", + "**/SHELL32.dll", + "**/USER32.dll", + "**/USERENV.dll", + "**/WINHTTP.dll", + "**/WS2_32.dll", + "**/ntdll.dll", + "**/msvcrt.dll", +] + + +def _collect_needed_dsos( + sysroots_files, + files, + run_prefix, + sysroot_substitution, + build_prefix, + build_prefix_substitution, +): all_needed_dsos = set() needed_dsos_for_file = dict() - sysroots = '' + sysroots = "" if sysroots_files: sysroots = list(sysroots_files.keys())[0] for f in files: path = join(run_prefix, f) if not codefile_type(path): continue - build_prefix = build_prefix.replace(os.sep, '/') - run_prefix = run_prefix.replace(os.sep, '/') - needed = get_linkages_memoized(path, resolve_filenames=True, recurse=False, - sysroot=sysroots, envroot=run_prefix) + build_prefix = build_prefix.replace(os.sep, "/") + run_prefix = run_prefix.replace(os.sep, "/") + needed = get_linkages_memoized( + path, + resolve_filenames=True, + recurse=False, + sysroot=sysroots, + envroot=run_prefix, + ) for lib, res in needed.items(): - resolved = res['resolved'].replace(os.sep, '/') + resolved = res["resolved"].replace(os.sep, "/") for sysroot, sysroot_files in sysroots_files.items(): if resolved.startswith(sysroot): resolved = resolved.replace(sysroot, sysroot_substitution) @@ -753,17 +864,27 @@ def _collect_needed_dsos(sysroots_files, files, run_prefix, sysroot_substitution if build_prefix != run_prefix and resolved.startswith(build_prefix): resolved = resolved.replace(build_prefix, build_prefix_substitution) if resolved.startswith(run_prefix): - resolved = relpath(resolved, run_prefix).replace(os.sep, '/') + resolved = relpath(resolved, run_prefix).replace(os.sep, "/") # If resolved still starts with '$RPATH' then that means we will either find it in # the whitelist or it will present as an error later. - res['resolved'] = resolved + res["resolved"] = resolved needed_dsos_for_file[f] = needed - all_needed_dsos = all_needed_dsos.union({info['resolved'] for f, info in needed.items()}) + all_needed_dsos = all_needed_dsos.union( + {info["resolved"] for f, info in needed.items()} + ) return all_needed_dsos, needed_dsos_for_file -def _map_file_to_package(files, run_prefix, build_prefix, all_needed_dsos, pkg_vendored_dist, ignore_list_syms, - sysroot_substitution, enable_static): +def _map_file_to_package( + files, + run_prefix, + build_prefix, + all_needed_dsos, + pkg_vendored_dist, + ignore_list_syms, + sysroot_substitution, + enable_static, +): # Form a mapping of file => package prefix_owners = {} @@ -780,24 +901,34 @@ def _map_file_to_package(files, run_prefix, build_prefix, all_needed_dsos, pkg_v for subdir2, _, filez in os.walk(prefix): for file in filez: fp = join(subdir2, file) - dynamic_lib = any(fnmatch(fp, ext) for ext in ('*.so*', '*.dylib*', '*.dll')) and \ - codefile_type(fp, skip_symlinks=False) is not None - static_lib = any(fnmatch(fp, ext) for ext in ('*.a', '*.lib')) + dynamic_lib = ( + any(fnmatch(fp, ext) for ext in ("*.so*", "*.dylib*", "*.dll")) + and codefile_type(fp, skip_symlinks=False) is not None + ) + static_lib = any(fnmatch(fp, ext) for ext in ("*.a", "*.lib")) # Looking at all the files is very slow. if not dynamic_lib and not static_lib: continue - rp = normpath(relpath(fp, prefix)).replace('\\', '/') - if dynamic_lib and not any(rp.lower() == w for w in all_needed_dsos_lower): + rp = normpath(relpath(fp, prefix)).replace("\\", "/") + if dynamic_lib and not any( + rp.lower() == w for w in all_needed_dsos_lower + ): continue if any(rp == normpath(w) for w in all_lib_exports[prefix]): continue - rp_po = rp.replace('\\', '/') - owners = prefix_owners[prefix][rp_po] if rp_po in prefix_owners[prefix] else [] + rp_po = rp.replace("\\", "/") + owners = ( + prefix_owners[prefix][rp_po] + if rp_po in prefix_owners[prefix] + else [] + ) # Self-vendoring, not such a big deal but may as well report it? if not len(owners): if any(rp == normpath(w) for w in files): owners.append(pkg_vendored_dist) - new_pkgs = list(which_package(rp, prefix, avoid_canonical_channel_name=True)) + new_pkgs = list( + which_package(rp, prefix, avoid_canonical_channel_name=True) + ) # Cannot filter here as this means the DSO (eg libomp.dylib) will not be found in any package # [owners.append(new_pkg) for new_pkg in new_pkgs if new_pkg not in owners # and not any([fnmatch(new_pkg.name, i) for i in ignore_for_statics])] @@ -806,18 +937,31 @@ def _map_file_to_package(files, run_prefix, build_prefix, all_needed_dsos, pkg_v owners.append(new_pkg) prefix_owners[prefix][rp_po] = owners if len(prefix_owners[prefix][rp_po]): - exports = {e for e in get_exports_memoized(fp, enable_static=enable_static) if not - any(fnmatch(e, pattern) for pattern in ignore_list_syms)} + exports = { + e + for e in get_exports_memoized( + fp, enable_static=enable_static + ) + if not any( + fnmatch(e, pattern) for pattern in ignore_list_syms + ) + } all_lib_exports[prefix][rp_po] = exports # Check codefile_type to filter out linker scripts. if dynamic_lib: contains_dsos[prefix_owners[prefix][rp_po][0]] = True elif static_lib: if sysroot_substitution in fp: - if (prefix_owners[prefix][rp_po][0].name.startswith('gcc_impl_linux') or - prefix_owners[prefix][rp_po][0].name == 'llvm'): + if ( + prefix_owners[prefix][rp_po][0].name.startswith( + "gcc_impl_linux" + ) + or prefix_owners[prefix][rp_po][0].name == "llvm" + ): continue - print(f"sysroot in {fp}, owner is {prefix_owners[prefix][rp_po][0]}") + print( + f"sysroot in {fp}, owner is {prefix_owners[prefix][rp_po][0]}" + ) # Hmm, not right, muddies the prefixes again. contains_static_libs[prefix_owners[prefix][rp_po][0]] = True @@ -829,16 +973,21 @@ def _get_fake_pkg_dist(pkg_name, pkg_version, build_str, build_number, channel, pkg_vendoring_version = str(pkg_version) pkg_vendoring_build_str = build_str pkg_vendoring_build_number = build_number - pkg_vendoring_key = '-'.join([pkg_vendoring_name, - pkg_vendoring_version, - pkg_vendoring_build_str]) - - return FakeDist(pkg_vendoring_name, - pkg_vendoring_version, - pkg_vendoring_build_number, - pkg_vendoring_build_str, - channel, - files), pkg_vendoring_key + pkg_vendoring_key = "-".join( + [pkg_vendoring_name, pkg_vendoring_version, pkg_vendoring_build_str] + ) + + return ( + FakeDist( + pkg_vendoring_name, + pkg_vendoring_version, + pkg_vendoring_build_number, + pkg_vendoring_build_str, + channel, + files, + ), + pkg_vendoring_key, + ) def _print_msg(errors, text, verbose): @@ -849,19 +998,35 @@ def _print_msg(errors, text, verbose): def caseless_sepless_fnmatch(paths, pat): - pat = pat.replace('\\', '/') + pat = pat.replace("\\", "/") match = re.compile("(?i)" + fnmatch_translate(pat)).match - matches = [path for path in paths if (path.replace('\\', '/') == pat) or match(path.replace('\\', '/'))] + matches = [ + path + for path in paths + if (path.replace("\\", "/") == pat) or match(path.replace("\\", "/")) + ] return matches -def _lookup_in_sysroots_and_whitelist(errors, whitelist, needed_dso, sysroots_files, msg_prelude, info_prelude, - sysroot_prefix, sysroot_substitution, subdir, verbose): +def _lookup_in_sysroots_and_whitelist( + errors, + whitelist, + needed_dso, + sysroots_files, + msg_prelude, + info_prelude, + sysroot_prefix, + sysroot_substitution, + subdir, + verbose, +): # A system or ignored dependency. We should be able to find it in one of the CDT or # compiler packages on linux or in a sysroot folder on other OSes. These usually # start with '$RPATH/' which indicates pyldd did not find them, so remove that now. if needed_dso.startswith(sysroot_substitution): - replacements = [sysroot_substitution] + [sysroot for sysroot, _ in sysroots_files.items()] + replacements = [sysroot_substitution] + [ + sysroot for sysroot, _ in sysroots_files.items() + ] else: replacements = [needed_dso] in_whitelist = False @@ -870,20 +1035,25 @@ def _lookup_in_sysroots_and_whitelist(errors, whitelist, needed_dso, sysroots_fi # Check if we have a CDT package or a file in a sysroot. sysroot_files = [] for sysroot, files in sysroots_files.items(): - sysroot_os = sysroot.replace('\\', os.sep) + sysroot_os = sysroot.replace("\\", os.sep) if needed_dso.startswith(sysroot_substitution): # Do we want to do this replace? - sysroot_files.append(needed_dso.replace(sysroot_substitution, sysroot_os)) + sysroot_files.append( + needed_dso.replace(sysroot_substitution, sysroot_os) + ) else: found = caseless_sepless_fnmatch(files, needed_dso[1:]) sysroot_files.extend(found) if len(sysroot_files): in_sysroots = True - if subdir.startswith('osx-') or 'win' in subdir: + if subdir.startswith("osx-") or "win" in subdir: in_prefix_dso = sysroot_files[0] n_dso_p = f"Needed DSO {in_prefix_dso}" - _print_msg(errors, '{}: {} found in $SYSROOT'. - format(info_prelude, n_dso_p), verbose=verbose) + _print_msg( + errors, + f"{info_prelude}: {n_dso_p} found in $SYSROOT", + verbose=verbose, + ) else: # Removing sysroot_prefix is only for Linux, though we could # use CONDA_BUILD_SYSROOT for macOS. We should figure out what to do about @@ -892,42 +1062,72 @@ def _lookup_in_sysroots_and_whitelist(errors, whitelist, needed_dso, sysroots_fi for idx in range(len(sysroot_files)): # in_prefix_dso = normpath(sysroot_files[idx].replace( # sysroot_prefix + os.sep, '')) - in_prefix_dso = sysroot_files[idx][len(sysroot_prefix) + 1:] + in_prefix_dso = sysroot_files[idx][len(sysroot_prefix) + 1 :] n_dso_p = f"Needed DSO {in_prefix_dso}" _pkgs = list(which_package(in_prefix_dso, sysroot_prefix)) if len(_pkgs) > 0: pkgs.extend(_pkgs) break if len(pkgs): - _print_msg(errors, '{}: {} found in CDT/compiler package {}'. - format(info_prelude, n_dso_p, pkgs[0]), verbose=verbose) + _print_msg( + errors, + "{}: {} found in CDT/compiler package {}".format( + info_prelude, n_dso_p, pkgs[0] + ), + verbose=verbose, + ) else: - _print_msg(errors, '{}: {} not found in any CDT/compiler package,' - ' nor the whitelist?!'. - format(msg_prelude, n_dso_p), verbose=verbose) + _print_msg( + errors, + "{}: {} not found in any CDT/compiler package," + " nor the whitelist?!".format(msg_prelude, n_dso_p), + verbose=verbose, + ) if not in_sysroots: # It takes a very long time to glob in C:/Windows so we do not do that. for replacement in replacements: - needed_dso_w = needed_dso.replace(sysroot_substitution, replacement + '/') + needed_dso_w = needed_dso.replace(sysroot_substitution, replacement + "/") # We should pass in multiple paths at once to this, but the code isn't structured for that. - in_whitelist = any([caseless_sepless_fnmatch([needed_dso_w], w) for w in whitelist]) + in_whitelist = any( + [caseless_sepless_fnmatch([needed_dso_w], w) for w in whitelist] + ) if in_whitelist: n_dso_p = f"Needed DSO {needed_dso_w}" - _print_msg(errors, '{}: {} found in the whitelist'. - format(info_prelude, n_dso_p), verbose=verbose) + _print_msg( + errors, + f"{info_prelude}: {n_dso_p} found in the whitelist", + verbose=verbose, + ) break if not in_whitelist and not in_sysroots: - _print_msg(errors, "{}: {} not found in packages, sysroot(s) nor the missing_dso_whitelist.\n" - ".. is this binary repackaging?". - format(msg_prelude, needed_dso), verbose=verbose) - - -def _lookup_in_prefix_packages(errors, needed_dso, files, run_prefix, whitelist, info_prelude, msg_prelude, - warn_prelude, verbose, requirements_run, lib_packages, lib_packages_used): + _print_msg( + errors, + "{}: {} not found in packages, sysroot(s) nor the missing_dso_whitelist.\n" + ".. is this binary repackaging?".format(msg_prelude, needed_dso), + verbose=verbose, + ) + + +def _lookup_in_prefix_packages( + errors, + needed_dso, + files, + run_prefix, + whitelist, + info_prelude, + msg_prelude, + warn_prelude, + verbose, + requirements_run, + lib_packages, + lib_packages_used, +): in_prefix_dso = normpath(needed_dso) - n_dso_p = "Needed DSO {}".format(in_prefix_dso.replace('\\', '/')) + n_dso_p = "Needed DSO {}".format(in_prefix_dso.replace("\\", "/")) and_also = " (and also in this package)" if in_prefix_dso in files else "" - pkgs = list(which_package(in_prefix_dso, run_prefix, avoid_canonical_channel_name=True)) + pkgs = list( + which_package(in_prefix_dso, run_prefix, avoid_canonical_channel_name=True) + ) in_pkgs_in_run_reqs = [pkg for pkg in pkgs if pkg.quad[0] in requirements_run] # TODO :: metadata build/inherit_child_run_exports (for vc, mro-base-impl). for pkg in in_pkgs_in_run_reqs: @@ -935,94 +1135,182 @@ def _lookup_in_prefix_packages(errors, needed_dso, files, run_prefix, whitelist, lib_packages_used.add(pkg) in_whitelist = any([fnmatch(in_prefix_dso, w) for w in whitelist]) if len(in_pkgs_in_run_reqs) == 1: - _print_msg(errors, '{}: {} found in {}{}'.format(info_prelude, - n_dso_p, - in_pkgs_in_run_reqs[0], - and_also), verbose=verbose) + _print_msg( + errors, + "{}: {} found in {}{}".format( + info_prelude, n_dso_p, in_pkgs_in_run_reqs[0], and_also + ), + verbose=verbose, + ) elif in_whitelist: - _print_msg(errors, '{}: {} found in the whitelist'. - format(info_prelude, n_dso_p), verbose=verbose) + _print_msg( + errors, + f"{info_prelude}: {n_dso_p} found in the whitelist", + verbose=verbose, + ) elif len(in_pkgs_in_run_reqs) == 0 and len(pkgs) > 0: - _print_msg(errors, '{}: {} found in {}{}'.format(msg_prelude, - n_dso_p, - [p.quad[0] for p in pkgs], - and_also), verbose=verbose) - _print_msg(errors, '{}: .. but {} not in reqs/run, (i.e. it is overlinking)' - ' (likely) or a missing dependency (less likely)'. - format(msg_prelude, [p.quad[0] for p in pkgs]), verbose=verbose) + _print_msg( + errors, + "{}: {} found in {}{}".format( + msg_prelude, n_dso_p, [p.quad[0] for p in pkgs], and_also + ), + verbose=verbose, + ) + _print_msg( + errors, + "{}: .. but {} not in reqs/run, (i.e. it is overlinking)" + " (likely) or a missing dependency (less likely)".format( + msg_prelude, [p.quad[0] for p in pkgs] + ), + verbose=verbose, + ) elif len(in_pkgs_in_run_reqs) > 1: - _print_msg(errors, '{}: {} found in multiple packages in run/reqs: {}{}' - .format(warn_prelude, - in_prefix_dso, - in_pkgs_in_run_reqs, - and_also), verbose=verbose) + _print_msg( + errors, + "{}: {} found in multiple packages in run/reqs: {}{}".format( + warn_prelude, in_prefix_dso, in_pkgs_in_run_reqs, and_also + ), + verbose=verbose, + ) else: if not any(in_prefix_dso == normpath(w) for w in files): - _print_msg(errors, '{}: {} not found in any packages'.format(msg_prelude, - in_prefix_dso), verbose=verbose) + _print_msg( + errors, + f"{msg_prelude}: {in_prefix_dso} not found in any packages", + verbose=verbose, + ) elif verbose: - _print_msg(errors, '{}: {} found in this package'.format(info_prelude, - in_prefix_dso), verbose=verbose) - - -def _show_linking_messages(files, errors, needed_dsos_for_file, build_prefix, run_prefix, pkg_name, - error_overlinking, runpath_whitelist, verbose, requirements_run, lib_packages, - lib_packages_used, whitelist, sysroots, sysroot_prefix, sysroot_substitution, subdir): + _print_msg( + errors, + f"{info_prelude}: {in_prefix_dso} found in this package", + verbose=verbose, + ) + + +def _show_linking_messages( + files, + errors, + needed_dsos_for_file, + build_prefix, + run_prefix, + pkg_name, + error_overlinking, + runpath_whitelist, + verbose, + requirements_run, + lib_packages, + lib_packages_used, + whitelist, + sysroots, + sysroot_prefix, + sysroot_substitution, + subdir, +): if len(sysroots): for sysroot, sr_files in sysroots.items(): - _print_msg(errors, " INFO: sysroot: '{}' files: '{}'".format(sysroot, - sorted(list(sr_files), reverse=True)[1:5]), - verbose=verbose) + _print_msg( + errors, + " INFO: sysroot: '{}' files: '{}'".format( + sysroot, sorted(list(sr_files), reverse=True)[1:5] + ), + verbose=verbose, + ) for f in files: path = join(run_prefix, f) filetype = codefile_type(path) - if not filetype or filetype not in filetypes_for_platform[subdir.split('-')[0]]: + if not filetype or filetype not in filetypes_for_platform[subdir.split("-")[0]]: continue - warn_prelude = "WARNING ({},{})".format(pkg_name, f.replace(os.sep, '/')) - err_prelude = " ERROR ({},{})".format(pkg_name, f.replace(os.sep, '/')) - info_prelude = " INFO ({},{})".format(pkg_name, f.replace(os.sep, '/')) + warn_prelude = "WARNING ({},{})".format(pkg_name, f.replace(os.sep, "/")) + err_prelude = " ERROR ({},{})".format(pkg_name, f.replace(os.sep, "/")) + info_prelude = " INFO ({},{})".format(pkg_name, f.replace(os.sep, "/")) msg_prelude = err_prelude if error_overlinking else warn_prelude # TODO :: Determine this much earlier, storing in needed_dsos_for_file in _collect_needed_dsos() try: runpaths, _, _ = get_runpaths_raw(path) except: - _print_msg(errors, f'{warn_prelude}: pyldd.py failed to process', - verbose=verbose) + _print_msg( + errors, f"{warn_prelude}: pyldd.py failed to process", verbose=verbose + ) continue - if runpaths and not (runpath_whitelist or - any(fnmatch(f, w) for w in runpath_whitelist)): - _print_msg(errors, '{}: runpaths {} found in {}'.format(msg_prelude, - runpaths, - path), verbose=verbose) + if runpaths and not ( + runpath_whitelist or any(fnmatch(f, w) for w in runpath_whitelist) + ): + _print_msg( + errors, + f"{msg_prelude}: runpaths {runpaths} found in {path}", + verbose=verbose, + ) needed = needed_dsos_for_file[f] for needed_dso, needed_dso_info in needed.items(): - needed_dso = needed_dso.replace('/', os.sep) + needed_dso = needed_dso.replace("/", os.sep) # Should always be the case, even when we fail to resolve the original value is stored here # as it is still a best attempt and informative. - if 'resolved' in needed_dso_info: - needed_dso = needed_dso_info['resolved'] - if not needed_dso.startswith(os.sep) and not needed_dso.startswith('$'): - _lookup_in_prefix_packages(errors, needed_dso, files, run_prefix, whitelist, info_prelude, msg_prelude, - warn_prelude, verbose, requirements_run, lib_packages, lib_packages_used) - elif needed_dso.startswith('$PATH'): - _print_msg(errors, "{}: {} found in build prefix; should never happen".format( - err_prelude, needed_dso), verbose=verbose) + if "resolved" in needed_dso_info: + needed_dso = needed_dso_info["resolved"] + if not needed_dso.startswith(os.sep) and not needed_dso.startswith("$"): + _lookup_in_prefix_packages( + errors, + needed_dso, + files, + run_prefix, + whitelist, + info_prelude, + msg_prelude, + warn_prelude, + verbose, + requirements_run, + lib_packages, + lib_packages_used, + ) + elif needed_dso.startswith("$PATH"): + _print_msg( + errors, + "{}: {} found in build prefix; should never happen".format( + err_prelude, needed_dso + ), + verbose=verbose, + ) else: - _lookup_in_sysroots_and_whitelist(errors, whitelist, needed_dso, sysroots, msg_prelude, - info_prelude, sysroot_prefix, sysroot_substitution, - subdir, verbose) - - -def check_overlinking_impl(pkg_name, pkg_version, build_str, build_number, subdir, - ignore_run_exports, - requirements_run, requirements_build, requirements_host, - run_prefix, build_prefix, - missing_dso_whitelist, runpath_whitelist, - error_overlinking, error_overdepending, verbose, - exception_on_error, files, bldpkgs_dirs, output_folder, channel_urls, - enable_static=False, - variants={}): + _lookup_in_sysroots_and_whitelist( + errors, + whitelist, + needed_dso, + sysroots, + msg_prelude, + info_prelude, + sysroot_prefix, + sysroot_substitution, + subdir, + verbose, + ) + + +def check_overlinking_impl( + pkg_name, + pkg_version, + build_str, + build_number, + subdir, + ignore_run_exports, + requirements_run, + requirements_build, + requirements_host, + run_prefix, + build_prefix, + missing_dso_whitelist, + runpath_whitelist, + error_overlinking, + error_overdepending, + verbose, + exception_on_error, + files, + bldpkgs_dirs, + output_folder, + channel_urls, + enable_static=False, + variants={}, +): verbose = True errors = [] @@ -1031,46 +1319,66 @@ def check_overlinking_impl(pkg_name, pkg_version, build_str, build_number, subdi for f in files: path = join(run_prefix, f) filetype = codefile_type(path) - if filetype and filetype in filetypes_for_platform[subdir.split('-')[0]]: + if filetype and filetype in filetypes_for_platform[subdir.split("-")[0]]: files_to_inspect.append(f) - filesu.append(f.replace('\\', '/')) + filesu.append(f.replace("\\", "/")) if not files_to_inspect: return dict() - sysroot_substitution = '$SYSROOT' - build_prefix_substitution = '$PATH' + sysroot_substitution = "$SYSROOT" + build_prefix_substitution = "$PATH" # Used to detect overlinking (finally) - requirements_run = [req.split(' ')[0] for req in requirements_run] + requirements_run = [req.split(" ")[0] for req in requirements_run] packages = dists_from_names(requirements_run, run_prefix) - local_channel = dirname(bldpkgs_dirs).replace('\\', '/') if utils.on_win else dirname(bldpkgs_dirs)[1:] - pkg_vendored_dist, pkg_vendoring_key = _get_fake_pkg_dist(pkg_name, pkg_version, build_str, build_number, - local_channel, files) + local_channel = ( + dirname(bldpkgs_dirs).replace("\\", "/") + if utils.on_win + else dirname(bldpkgs_dirs)[1:] + ) + pkg_vendored_dist, pkg_vendoring_key = _get_fake_pkg_dist( + pkg_name, pkg_version, build_str, build_number, local_channel, files + ) packages.append(pkg_vendored_dist) ignore_list = utils.ensure_list(ignore_run_exports) - if subdir.startswith('linux'): - ignore_list.append('libgcc-ng') - - package_nature = {package: library_nature(package, run_prefix, subdir, bldpkgs_dirs, output_folder, channel_urls) - for package in packages} - lib_packages = {package for package in packages - if package.quad[0] not in ignore_list and - [package] != 'non-library'} + if subdir.startswith("linux"): + ignore_list.append("libgcc-ng") + + package_nature = { + package: library_nature( + package, run_prefix, subdir, bldpkgs_dirs, output_folder, channel_urls + ) + for package in packages + } + lib_packages = { + package + for package in packages + if package.quad[0] not in ignore_list and [package] != "non-library" + } lib_packages_used = {pkg_vendored_dist} - ignore_list_syms = ['main', '_main', '*get_pc_thunk*', '___clang_call_terminate', '_timeout'] + ignore_list_syms = [ + "main", + "_main", + "*get_pc_thunk*", + "___clang_call_terminate", + "_timeout", + ] # ignore_for_statics = ['gcc_impl_linux*', 'compiler-rt*', 'llvm-openmp*', 'gfortran_osx*'] # sysroots and whitelists are similar, but the subtle distinctions are important. - CONDA_BUILD_SYSROOT = variants.get('CONDA_BUILD_SYSROOT', None) + CONDA_BUILD_SYSROOT = variants.get("CONDA_BUILD_SYSROOT", None) if CONDA_BUILD_SYSROOT and os.path.exists(CONDA_BUILD_SYSROOT): # When on macOS and CBS not set, sysroots should probably be '/' # is everything in the sysroot allowed? I suppose so! - sysroot_prefix = '' + sysroot_prefix = "" sysroots = [CONDA_BUILD_SYSROOT] else: # The linux case. sysroot_prefix = build_prefix - sysroots = [sysroot + os.sep for sysroot in utils.glob(join(sysroot_prefix, '**', 'sysroot'))] + sysroots = [ + sysroot + os.sep + for sysroot in utils.glob(join(sysroot_prefix, "**", "sysroot")) + ] whitelist = [] vendoring_record = dict() # When build_is_host is True we perform file existence checks for files in the sysroot (e.g. C:\Windows) @@ -1078,20 +1386,20 @@ def check_overlinking_impl(pkg_name, pkg_version, build_str, build_number, subdi # create some packages for the Windows System DLLs as an alternative?) build_is_host = False if not len(sysroots): - if subdir.startswith('osx-'): + if subdir.startswith("osx-"): # This is a bit confused! A sysroot shouldn't contain /usr/lib (it's the bit before that) # what we are really specifying here are subtrees of sysroots to search in and it may be # better to store each element of this as a tuple with a string and a nested tuple, e.g. # [('/', ('/usr/lib', '/opt/X11', '/System/Library/Frameworks'))] # Here we mean that we have a sysroot at '/' (could be a tokenized value like '$SYSROOT'?) # .. and in that sysroot there are 3 suddirs in which we may search for DSOs. - sysroots = ['/usr/lib', '/opt/X11', '/System/Library/Frameworks'] + sysroots = ["/usr/lib", "/opt/X11", "/System/Library/Frameworks"] whitelist = DEFAULT_MAC_WHITELIST - build_is_host = True if sys.platform == 'darwin' else False - elif subdir.startswith('win'): - sysroots = ['C:/Windows'] + build_is_host = True if sys.platform == "darwin" else False + elif subdir.startswith("win"): + sysroots = ["C:/Windows"] whitelist = DEFAULT_WIN_WHITELIST - build_is_host = True if sys.platform == 'win-32' else False + build_is_host = True if sys.platform == "win-32" else False whitelist += missing_dso_whitelist or [] @@ -1100,23 +1408,32 @@ def check_overlinking_impl(pkg_name, pkg_version, build_str, build_number, subdi sysroots_files = dict() for sysroot in sysroots: from conda_build.utils import prefix_files - srs = sysroot if sysroot.endswith('/') else sysroot + '/' + + srs = sysroot if sysroot.endswith("/") else sysroot + "/" sysroot_files = prefix_files(sysroot) - sysroot_files = [p.replace('\\', '/') for p in sysroot_files] + sysroot_files = [p.replace("\\", "/") for p in sysroot_files] sysroots_files[srs] = sysroot_files - if subdir.startswith('osx-'): + if subdir.startswith("osx-"): orig_sysroot_files = copy(sysroot_files) sysroot_files = [] for f in orig_sysroot_files: replaced = f - if f.endswith('.tbd'): + if f.endswith(".tbd"): # For now, look up the line containing: # install-name: /System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation - with open(os.path.join(sysroot, f), 'rb') as tbd_fh: - lines = [line for line in tbd_fh.read().decode('utf-8').splitlines() if line.startswith('install-name:')] + with open(os.path.join(sysroot, f), "rb") as tbd_fh: + lines = [ + line + for line in tbd_fh.read().decode("utf-8").splitlines() + if line.startswith("install-name:") + ] if lines: - install_names = [re.match(r'^install-name:\s+(.*)$', line) for line in lines] - install_names = [insname.groups(1)[0] for insname in install_names] + install_names = [ + re.match(r"^install-name:\s+(.*)$", line) for line in lines + ] + install_names = [ + insname.groups(1)[0] for insname in install_names + ] replaced = install_names[0][1:] if replaced.endswith("'"): # Some SDKs have install name surrounded by single qoutes @@ -1125,38 +1442,58 @@ def check_overlinking_impl(pkg_name, pkg_version, build_str, build_number, subdi diffs = set(orig_sysroot_files) - set(sysroot_files) if diffs: log = utils.get_logger(__name__) - log.warning("Partially parsed some '.tbd' files in sysroot {}, pretending .tbds are their install-names\n" - "Adding support to 'conda-build' for parsing these in 'liefldd.py' would be easy and useful:\n" - "{} ..." - .format(sysroot, list(diffs)[1:3])) + log.warning( + "Partially parsed some '.tbd' files in sysroot {}, pretending .tbds are their install-names\n" + "Adding support to 'conda-build' for parsing these in 'liefldd.py' would be easy and useful:\n" + "{} ...".format(sysroot, list(diffs)[1:3]) + ) sysroots_files[srs] = sysroot_files - sysroots_files = OrderedDict(sorted(sysroots_files.items(), key=lambda x: -len(x[1]))) - - all_needed_dsos, needed_dsos_for_file = _collect_needed_dsos(sysroots_files, files, run_prefix, - sysroot_substitution, - build_prefix, build_prefix_substitution) + sysroots_files = OrderedDict( + sorted(sysroots_files.items(), key=lambda x: -len(x[1])) + ) + + all_needed_dsos, needed_dsos_for_file = _collect_needed_dsos( + sysroots_files, + files, + run_prefix, + sysroot_substitution, + build_prefix, + build_prefix_substitution, + ) prefix_owners, _, _, all_lib_exports = _map_file_to_package( - files, run_prefix, build_prefix, all_needed_dsos, pkg_vendored_dist, ignore_list_syms, - sysroot_substitution, enable_static) + files, + run_prefix, + build_prefix, + all_needed_dsos, + pkg_vendored_dist, + ignore_list_syms, + sysroot_substitution, + enable_static, + ) for f in files_to_inspect: needed = needed_dsos_for_file[f] for needed_dso, needed_dso_info in needed.items(): orig = needed_dso - resolved = needed_dso_info['resolved'] - if (not resolved.startswith('/') and - not resolved.startswith(sysroot_substitution) and - not resolved.startswith(build_prefix_substitution) and - resolved.lower() not in [o.lower() for o in prefix_owners[run_prefix]] and - resolved not in filesu): + resolved = needed_dso_info["resolved"] + if ( + not resolved.startswith("/") + and not resolved.startswith(sysroot_substitution) + and not resolved.startswith(build_prefix_substitution) + and resolved.lower() + not in [o.lower() for o in prefix_owners[run_prefix]] + and resolved not in filesu + ): in_whitelist = False if not build_is_host: - in_whitelist = any([caseless_sepless_fnmatch([orig], w) for w in whitelist]) + in_whitelist = any( + [caseless_sepless_fnmatch([orig], w) for w in whitelist] + ) if not in_whitelist: if resolved in prefix_owners[build_prefix]: print(f" ERROR :: {needed_dso} in prefix_owners[build_prefix]") - elif not needed_dso.startswith('$PATH'): + elif not needed_dso.startswith("$PATH"): # DSOs with '$RPATH' in them at this stage are 'unresolved'. Though instead of # letting them through through like this, I should detect that they were not # resolved and change them back to how they were stored in the consumer DSO/elf @@ -1168,41 +1505,78 @@ def check_overlinking_impl(pkg_name, pkg_version, build_str, build_number, subdi # f, rpaths, needed_dso)) pass - _show_linking_messages(files, errors, needed_dsos_for_file, build_prefix, run_prefix, pkg_name, - error_overlinking, runpath_whitelist, verbose, requirements_run, lib_packages, - lib_packages_used, whitelist, sysroots_files, sysroot_prefix, sysroot_substitution, subdir) + _show_linking_messages( + files, + errors, + needed_dsos_for_file, + build_prefix, + run_prefix, + pkg_name, + error_overlinking, + runpath_whitelist, + verbose, + requirements_run, + lib_packages, + lib_packages_used, + whitelist, + sysroots_files, + sysroot_prefix, + sysroot_substitution, + subdir, + ) if lib_packages_used != lib_packages: info_prelude = f" INFO ({pkg_name})" warn_prelude = f"WARNING ({pkg_name})" err_prelude = f" ERROR ({pkg_name})" for lib in lib_packages - lib_packages_used: - if package_nature[lib] in ('run-exports library', 'dso library'): + if package_nature[lib] in ("run-exports library", "dso library"): msg_prelude = err_prelude if error_overdepending else warn_prelude - elif package_nature[lib] == 'plugin library': + elif package_nature[lib] == "plugin library": msg_prelude = info_prelude else: msg_prelude = warn_prelude found_interpreted_and_interpreter = False - if 'interpreter' in package_nature[lib] and 'interpreted' in package_nature[pkg_vendored_dist]: + if ( + "interpreter" in package_nature[lib] + and "interpreted" in package_nature[pkg_vendored_dist] + ): found_interpreted_and_interpreter = True if found_interpreted_and_interpreter: - _print_msg(errors, "{}: Interpreted package '{}' is interpreted by '{}'".format( - info_prelude, pkg_vendored_dist.name, lib.name), verbose=verbose) - elif package_nature[lib] != 'non-library': - _print_msg(errors, "{}: {} package {} in requirements/run but it is not used " - "(i.e. it is overdepending or perhaps statically linked? " - "If that is what you want then add it to `build/ignore_run_exports`)" - .format(msg_prelude, package_nature[lib], lib), verbose=verbose) + _print_msg( + errors, + "{}: Interpreted package '{}' is interpreted by '{}'".format( + info_prelude, pkg_vendored_dist.name, lib.name + ), + verbose=verbose, + ) + elif package_nature[lib] != "non-library": + _print_msg( + errors, + "{}: {} package {} in requirements/run but it is not used " + "(i.e. it is overdepending or perhaps statically linked? " + "If that is what you want then add it to `build/ignore_run_exports`)".format( + msg_prelude, package_nature[lib], lib + ), + verbose=verbose, + ) if len(errors): if exception_on_error: - runpaths_errors = [error for error in errors if re.match(r".*runpaths.*found in.*", error)] + runpaths_errors = [ + error for error in errors if re.match(r".*runpaths.*found in.*", error) + ] if len(runpaths_errors): raise RunPathError(runpaths_errors) - overlinking_errors = [error for error in errors if re.match(r".*(overlinking|not found in|did not find).*", error)] + overlinking_errors = [ + error + for error in errors + if re.match(r".*(overlinking|not found in|did not find).*", error) + ] if len(overlinking_errors): raise OverLinkingError(overlinking_errors) - overdepending_errors = [error for error in errors if "overdepending" in error] + overdepending_errors = [ + error for error in errors if "overdepending" in error + ] if len(overdepending_errors): raise OverDependingError(overdepending_errors) else: @@ -1219,32 +1593,40 @@ def check_overlinking(m, files, host_prefix=None): if not host_prefix: host_prefix = m.config.host_prefix - overlinking_ignore_patterns = m.meta.get("build", {}).get("overlinking_ignore_patterns") + overlinking_ignore_patterns = m.meta.get("build", {}).get( + "overlinking_ignore_patterns" + ) if overlinking_ignore_patterns: - files = [f for f in files if not any([fnmatch(f, p) for p in overlinking_ignore_patterns])] - return check_overlinking_impl(m.get_value('package/name'), - m.get_value('package/version'), - m.get_value('build/string'), - m.get_value('build/number'), - m.config.target_subdir, - m.get_value('build/ignore_run_exports'), - [req.split(' ')[0] for req in m.meta.get('requirements', {}).get('run', [])], - [req.split(' ')[0] for req in m.meta.get('requirements', {}).get('build', [])], - [req.split(' ')[0] for req in m.meta.get('requirements', {}).get('host', [])], - host_prefix, - m.config.build_prefix, - m.meta.get('build', {}).get('missing_dso_whitelist', []), - m.meta.get('build', {}).get('runpath_whitelist', []), - m.config.error_overlinking, - m.config.error_overdepending, - m.config.verbose, - True, - files, - m.config.bldpkgs_dir, - m.config.output_folder, - list(m.config.channel_urls) + ['local'], - m.config.enable_static, - m.config.variant) + files = [ + f + for f in files + if not any([fnmatch(f, p) for p in overlinking_ignore_patterns]) + ] + return check_overlinking_impl( + m.get_value("package/name"), + m.get_value("package/version"), + m.get_value("build/string"), + m.get_value("build/number"), + m.config.target_subdir, + m.get_value("build/ignore_run_exports"), + [req.split(" ")[0] for req in m.meta.get("requirements", {}).get("run", [])], + [req.split(" ")[0] for req in m.meta.get("requirements", {}).get("build", [])], + [req.split(" ")[0] for req in m.meta.get("requirements", {}).get("host", [])], + host_prefix, + m.config.build_prefix, + m.meta.get("build", {}).get("missing_dso_whitelist", []), + m.meta.get("build", {}).get("runpath_whitelist", []), + m.config.error_overlinking, + m.config.error_overdepending, + m.config.verbose, + True, + files, + m.config.bldpkgs_dir, + m.config.output_folder, + list(m.config.channel_urls) + ["local"], + m.config.enable_static, + m.config.variant, + ) def post_process_shared_lib(m, f, files, host_prefix=None): @@ -1252,16 +1634,23 @@ def post_process_shared_lib(m, f, files, host_prefix=None): host_prefix = m.config.host_prefix path = join(host_prefix, f) codefile_t = codefile_type(path) - if not codefile_t or path.endswith('.debug'): + if not codefile_t or path.endswith(".debug"): return - rpaths = m.get_value('build/rpaths', ['lib']) - if codefile_t == 'elffile': - mk_relative_linux(f, host_prefix, rpaths=rpaths, - method=m.get_value('build/rpaths_patcher', None)) - elif codefile_t == 'machofile': - if m.config.host_platform != 'osx': + rpaths = m.get_value("build/rpaths", ["lib"]) + if codefile_t == "elffile": + mk_relative_linux( + f, + host_prefix, + rpaths=rpaths, + method=m.get_value("build/rpaths_patcher", None), + ) + elif codefile_t == "machofile": + if m.config.host_platform != "osx": log = utils.get_logger(__name__) - log.warn("Found Mach-O file but patching is only supported on macOS, skipping: %s", path) + log.warn( + "Found Mach-O file but patching is only supported on macOS, skipping: %s", + path, + ) return mk_relative_osx(path, host_prefix, m, files=files, rpaths=rpaths) @@ -1281,7 +1670,14 @@ def fix_permissions(files, prefix): if old_mode & stat.S_IXUSR: new_mode = new_mode | stat.S_IXGRP | stat.S_IXOTH # ensure user and group can write and all can read - new_mode = new_mode | stat.S_IWUSR | stat.S_IWGRP | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # noqa + new_mode = ( + new_mode + | stat.S_IWUSR + | stat.S_IWGRP + | stat.S_IRUSR + | stat.S_IRGRP + | stat.S_IROTH + ) # noqa if old_mode != new_mode: try: lchmod(path, new_mode) @@ -1291,7 +1687,7 @@ def fix_permissions(files, prefix): def post_build(m, files, build_python, host_prefix=None, is_already_linked=False): - print('number of files:', len(files)) + print("number of files:", len(files)) if not host_prefix: host_prefix = m.config.host_prefix @@ -1300,21 +1696,27 @@ def post_build(m, files, build_python, host_prefix=None, is_already_linked=False for f in files: make_hardlink_copy(f, host_prefix) - if not m.config.target_subdir.startswith('win'): + if not m.config.target_subdir.startswith("win"): binary_relocation = m.binary_relocation() if not binary_relocation: print("Skipping binary relocation logic") - osx_is_app = (m.config.target_subdir.startswith('osx-') and - bool(m.get_value('build/osx_is_app', False))) + osx_is_app = m.config.target_subdir.startswith("osx-") and bool( + m.get_value("build/osx_is_app", False) + ) check_symlinks(files, host_prefix, m.config.croot) prefix_files = utils.prefix_files(host_prefix) for f in files: - if f.startswith('bin/'): - fix_shebang(f, prefix=host_prefix, build_python=build_python, - osx_is_app=osx_is_app) - if binary_relocation is True or (isinstance(binary_relocation, list) and - f in binary_relocation): + if f.startswith("bin/"): + fix_shebang( + f, + prefix=host_prefix, + build_python=build_python, + osx_is_app=osx_is_app, + ) + if binary_relocation is True or ( + isinstance(binary_relocation, list) and f in binary_relocation + ): post_process_shared_lib(m, f, prefix_files, host_prefix) check_overlinking(m, files, host_prefix) @@ -1332,27 +1734,30 @@ def check_symlinks(files, prefix, croot): # symlinks to binaries outside of the same dir don't work. RPATH stuff gets confused # because ld.so follows symlinks in RPATHS # If condition exists, then copy the file rather than symlink it. - if (not dirname(link_path) == dirname(real_link_path) and - codefile_type(f)): + if not dirname(link_path) == dirname(real_link_path) and codefile_type(f): os.remove(path) utils.copy_into(real_link_path, path) elif real_link_path.startswith(real_build_prefix): # If the path is in the build prefix, this is fine, but # the link needs to be relative relative_path = relpath(real_link_path, dirname(path)) - if not link_path.startswith('.') and link_path != relative_path: + if not link_path.startswith(".") and link_path != relative_path: # Don't change the link structure if it is already a # relative link. It's possible that ..'s later in the path # can result in a broken link still, but we'll assume that # such crazy things don't happen. - print(f"Making absolute symlink relative ({f} -> {link_path} :-> {relative_path})") + print( + f"Making absolute symlink relative ({f} -> {link_path} :-> {relative_path})" + ) os.unlink(path) os.symlink(relative_path, path) else: # Symlinks to absolute paths on the system (like /usr) are fine. if real_link_path.startswith(croot): - msgs.append("%s is a symlink to a path that may not " - "exist after the build is completed (%s)" % (f, link_path)) + msgs.append( + "%s is a symlink to a path that may not " + "exist after the build is completed (%s)" % (f, link_path) + ) if msgs: for msg in msgs: @@ -1380,15 +1785,21 @@ def make_hardlink_copy(path, prefix): def get_build_metadata(m): src_dir = m.config.work_dir - if exists(join(src_dir, '__conda_version__.txt')): - raise ValueError("support for __conda_version__ has been removed as of Conda-build 3.0." - "Try Jinja templates instead: " - "http://conda.pydata.org/docs/building/meta-yaml.html#templating-with-jinja") - if exists(join(src_dir, '__conda_buildnum__.txt')): - raise ValueError("support for __conda_buildnum__ has been removed as of Conda-build 3.0." - "Try Jinja templates instead: " - "http://conda.pydata.org/docs/building/meta-yaml.html#templating-with-jinja") - if exists(join(src_dir, '__conda_buildstr__.txt')): - raise ValueError("support for __conda_buildstr__ has been removed as of Conda-build 3.0." - "Try Jinja templates instead: " - "http://conda.pydata.org/docs/building/meta-yaml.html#templating-with-jinja") + if exists(join(src_dir, "__conda_version__.txt")): + raise ValueError( + "support for __conda_version__ has been removed as of Conda-build 3.0." + "Try Jinja templates instead: " + "http://conda.pydata.org/docs/building/meta-yaml.html#templating-with-jinja" + ) + if exists(join(src_dir, "__conda_buildnum__.txt")): + raise ValueError( + "support for __conda_buildnum__ has been removed as of Conda-build 3.0." + "Try Jinja templates instead: " + "http://conda.pydata.org/docs/building/meta-yaml.html#templating-with-jinja" + ) + if exists(join(src_dir, "__conda_buildstr__.txt")): + raise ValueError( + "support for __conda_buildstr__ has been removed as of Conda-build 3.0." + "Try Jinja templates instead: " + "http://conda.pydata.org/docs/building/meta-yaml.html#templating-with-jinja" + ) diff --git a/conda_build/render.py b/conda_build/render.py index 499f99a2a9..1acc0aaf8c 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -2,12 +2,8 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -from collections import OrderedDict, defaultdict -from functools import lru_cache import json import os -from os.path import isdir, isfile, abspath -from pathlib import Path import random import re import string @@ -15,23 +11,34 @@ import sys import tarfile import tempfile +from collections import OrderedDict, defaultdict +from functools import lru_cache +from os.path import abspath, isdir, isfile +from pathlib import Path import yaml -from .conda_interface import (UnsatisfiableError, ProgressiveFetchExtract, - TemporaryDirectory) -from .conda_interface import execute_actions -from .conda_interface import pkgs_dirs -from .conda_interface import specs_from_url -from .utils import CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2 - -from conda_build import exceptions, utils, environ -from conda_build.metadata import MetaData, combine_top_level_metadata_with_output import conda_build.source as source -from conda_build.variants import (get_package_variants, list_of_dicts_to_dict_of_lists, - filter_by_key_value) +from conda_build import environ, exceptions, utils from conda_build.exceptions import DependencyNeedsBuildingError from conda_build.index import get_build_index +from conda_build.metadata import MetaData, combine_top_level_metadata_with_output +from conda_build.variants import ( + filter_by_key_value, + get_package_variants, + list_of_dicts_to_dict_of_lists, +) + +from .conda_interface import ( + ProgressiveFetchExtract, + TemporaryDirectory, + UnsatisfiableError, + execute_actions, + pkgs_dirs, + specs_from_url, +) +from .utils import CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2 + # from conda_build.jinja_context import pin_subpackage_against_outputs @@ -45,12 +52,12 @@ def odict_representer(dumper, data): def bldpkg_path(m): - ''' + """ Returns path to built package's tarball given its ``Metadata``. - ''' - subdir = 'noarch' if m.noarch or m.noarch_python else m.config.host_subdir + """ + subdir = "noarch" if m.noarch or m.noarch_python else m.config.host_subdir - if not hasattr(m, 'type'): + if not hasattr(m, "type"): if m.config.conda_pkg_format == "2": pkg_type = "conda_v2" else: @@ -60,11 +67,15 @@ def bldpkg_path(m): # the default case will switch over to conda_v2 at some point if pkg_type == "conda": - path = os.path.join(m.config.output_folder, subdir, f'{m.dist()}{CONDA_PACKAGE_EXTENSION_V1}') + path = os.path.join( + m.config.output_folder, subdir, f"{m.dist()}{CONDA_PACKAGE_EXTENSION_V1}" + ) elif pkg_type == "conda_v2": - path = os.path.join(m.config.output_folder, subdir, f'{m.dist()}{CONDA_PACKAGE_EXTENSION_V2}') + path = os.path.join( + m.config.output_folder, subdir, f"{m.dist()}{CONDA_PACKAGE_EXTENSION_V2}" + ) else: - path = f'{m.type} file for {m.name()} in: {os.path.join(m.config.output_folder, subdir)}' + path = f"{m.type} file for {m.name()} in: {os.path.join(m.config.output_folder, subdir)}" return path @@ -87,62 +98,75 @@ def _categorize_deps(m, specs, exclude_pattern, variant): if not exclude_pattern or not exclude_pattern.match(spec): is_subpackage = False spec_name = spec.split()[0] - for entry in m.get_section('outputs'): - name = entry.get('name') + for entry in m.get_section("outputs"): + name = entry.get("name") if name == spec_name: - subpackages.append(' '.join((name, m.version()))) + subpackages.append(" ".join((name, m.version()))) is_subpackage = True if not is_subpackage: dependencies.append(spec) # fill in variant version iff no version at all is provided for key, value in variant.items(): # for sake of comparison, ignore dashes and underscores - if (dash_or_under.sub("", key) == dash_or_under.sub("", spec_name) and - not re.search(r'%s\s+[0-9a-zA-Z\_\.\<\>\=\*]' % spec_name, spec)): + if dash_or_under.sub("", key) == dash_or_under.sub( + "", spec_name + ) and not re.search(r"%s\s+[0-9a-zA-Z\_\.\<\>\=\*]" % spec_name, spec): dependencies.append(" ".join((spec_name, value))) elif exclude_pattern.match(spec): pass_through_deps.append(spec) return subpackages, dependencies, pass_through_deps -def get_env_dependencies(m, env, variant, exclude_pattern=None, - permit_unsatisfiable_variants=False, - merge_build_host_on_same_platform=True): +def get_env_dependencies( + m, + env, + variant, + exclude_pattern=None, + permit_unsatisfiable_variants=False, + merge_build_host_on_same_platform=True, +): specs = m.get_depends_top_and_out(env) # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x - if env in ('build', 'host'): + if env in ("build", "host"): no_xx_specs = [] for spec in specs: - if ' x.x' in spec: + if " x.x" in spec: pkg_name = spec.split()[0] - no_xx_specs.append(' '.join((pkg_name, variant.get(pkg_name, "")))) + no_xx_specs.append(" ".join((pkg_name, variant.get(pkg_name, "")))) else: no_xx_specs.append(spec) specs = no_xx_specs - subpackages, dependencies, pass_through_deps = _categorize_deps(m, specs, exclude_pattern, variant) + subpackages, dependencies, pass_through_deps = _categorize_deps( + m, specs, exclude_pattern, variant + ) dependencies = set(dependencies) unsat = None - random_string = ''.join(random.choice(string.ascii_uppercase + string.digits) - for _ in range(10)) + random_string = "".join( + random.choice(string.ascii_uppercase + string.digits) for _ in range(10) + ) with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir: try: - actions = environ.get_install_actions(tmpdir, tuple(dependencies), env, - subdir=getattr(m.config, f'{env}_subdir'), - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) + actions = environ.get_install_actions( + tmpdir, + tuple(dependencies), + env, + subdir=getattr(m.config, f"{env}_subdir"), + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls), + ) except (UnsatisfiableError, DependencyNeedsBuildingError) as e: # we'll get here if the environment is unsatisfiable - if hasattr(e, 'packages'): - unsat = ', '.join(e.packages) + if hasattr(e, "packages"): + unsat = ", ".join(e.packages) else: unsat = e.message if permit_unsatisfiable_variants: @@ -151,15 +175,20 @@ def get_env_dependencies(m, env, variant, exclude_pattern=None, raise specs = actions_to_pins(actions) - return (utils.ensure_list((specs + subpackages + pass_through_deps) or - m.meta.get('requirements', {}).get(env, [])), - actions, unsat) + return ( + utils.ensure_list( + (specs + subpackages + pass_through_deps) + or m.meta.get("requirements", {}).get(env, []) + ), + actions, + unsat, + ) def strip_channel(spec_str): - if hasattr(spec_str, 'decode'): + if hasattr(spec_str, "decode"): spec_str = spec_str.decode() - if ':' in spec_str: + if ":" in spec_str: spec_str = spec_str.split("::")[-1] return spec_str @@ -167,23 +196,30 @@ def strip_channel(spec_str): def get_pin_from_build(m, dep, build_dep_versions): dep_split = dep.split() dep_name = dep_split[0] - build = '' + build = "" if len(dep_split) >= 3: build = dep_split[2] pin = None version = build_dep_versions.get(dep_name) or m.config.variant.get(dep_name) - if (version and dep_name in m.config.variant.get('pin_run_as_build', {}) and - not (dep_name == 'python' and (m.noarch or m.noarch_python)) and - dep_name in build_dep_versions): - pin_cfg = m.config.variant['pin_run_as_build'][dep_name] + if ( + version + and dep_name in m.config.variant.get("pin_run_as_build", {}) + and not (dep_name == "python" and (m.noarch or m.noarch_python)) + and dep_name in build_dep_versions + ): + pin_cfg = m.config.variant["pin_run_as_build"][dep_name] if isinstance(pin_cfg, str): # if pin arg is a single 'x.x', use the same value for min and max pin_cfg = dict(min_pin=pin_cfg, max_pin=pin_cfg) pin = utils.apply_pin_expressions(version.split()[0], **pin_cfg) - elif dep.startswith('numpy') and 'x.x' in dep: + elif dep.startswith("numpy") and "x.x" in dep: if not build_dep_versions.get(dep_name): - raise ValueError("numpy x.x specified, but numpy not in build requirements.") - pin = utils.apply_pin_expressions(version.split()[0], min_pin='x.x', max_pin='x.x') + raise ValueError( + "numpy x.x specified, but numpy not in build requirements." + ) + pin = utils.apply_pin_expressions( + version.split()[0], min_pin="x.x", max_pin="x.x" + ) if pin: dep = " ".join((dep_name, pin, build)).strip() return dep @@ -193,10 +229,16 @@ def _filter_run_exports(specs, ignore_list): filtered_specs = {} for agent, specs_list in specs.items(): for spec in specs_list: - if hasattr(spec, 'decode'): + if hasattr(spec, "decode"): spec = spec.decode() - if not any((ignore_spec == '*' or spec == ignore_spec or - spec.startswith(ignore_spec + ' ')) for ignore_spec in ignore_list): + if not any( + ( + ignore_spec == "*" + or spec == ignore_spec + or spec.startswith(ignore_spec + " ") + ) + for ignore_spec in ignore_list + ): filtered_specs[agent] = filtered_specs.get(agent, []) + [spec] return filtered_specs @@ -237,21 +279,21 @@ def find_pkg_dir_or_file_in_pkgs_dirs( def _read_specs_from_package(pkg_loc, pkg_dist): specs = {} if pkg_loc and os.path.isdir(pkg_loc): - downstream_file = os.path.join(pkg_loc, 'info/run_exports') + downstream_file = os.path.join(pkg_loc, "info/run_exports") if os.path.isfile(downstream_file): with open(downstream_file) as f: - specs = {'weak': [spec.rstrip() for spec in f.readlines()]} + specs = {"weak": [spec.rstrip() for spec in f.readlines()]} # a later attempt: record more info in the yaml file, to support "strong" run exports - elif os.path.isfile(downstream_file + '.yaml'): - with open(downstream_file + '.yaml') as f: + elif os.path.isfile(downstream_file + ".yaml"): + with open(downstream_file + ".yaml") as f: specs = yaml.safe_load(f) - elif os.path.isfile(downstream_file + '.json'): - with open(downstream_file + '.json') as f: + elif os.path.isfile(downstream_file + ".json"): + with open(downstream_file + ".json") as f: specs = json.load(f) if not specs and pkg_loc and os.path.isfile(pkg_loc): # switching to json for consistency in conda-build 4 - specs_yaml = utils.package_has_file(pkg_loc, 'info/run_exports.yaml') - specs_json = utils.package_has_file(pkg_loc, 'info/run_exports.json') + specs_yaml = utils.package_has_file(pkg_loc, "info/run_exports.yaml") + specs_json = utils.package_has_file(pkg_loc, "info/run_exports.json") if hasattr(specs_json, "decode"): specs_json = specs_json.decode("utf-8") @@ -260,7 +302,7 @@ def _read_specs_from_package(pkg_loc, pkg_dist): elif specs_yaml: specs = yaml.safe_load(specs_yaml) else: - legacy_specs = utils.package_has_file(pkg_loc, 'info/run_exports') + legacy_specs = utils.package_has_file(pkg_loc, "info/run_exports") # exclude packages pinning themselves (makes no sense) if legacy_specs: weak_specs = set() @@ -269,33 +311,41 @@ def _read_specs_from_package(pkg_loc, pkg_dist): for spec in legacy_specs.splitlines(): if hasattr(spec, "decode"): spec = spec.decode("utf-8") - if not spec.startswith(pkg_dist.rsplit('-', 2)[0]): + if not spec.startswith(pkg_dist.rsplit("-", 2)[0]): weak_specs.add(spec.rstrip()) - specs = {'weak': sorted(list(weak_specs))} + specs = {"weak": sorted(list(weak_specs))} return specs def execute_download_actions(m, actions, env, package_subset=None, require_files=False): - index, _, _ = get_build_index(getattr(m.config, f'{env}_subdir'), bldpkgs_dir=m.config.bldpkgs_dir, - output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, - debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking, - timeout=m.config.timeout) + index, _, _ = get_build_index( + getattr(m.config, f"{env}_subdir"), + bldpkgs_dir=m.config.bldpkgs_dir, + output_folder=m.config.output_folder, + channel_urls=m.config.channel_urls, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + timeout=m.config.timeout, + ) # this should be just downloading packages. We don't need to extract them - - download_actions = {k: v for k, v in actions.items() if k in ('FETCH', 'EXTRACT', 'PREFIX')} - if 'FETCH' in actions or 'EXTRACT' in actions: + download_actions = { + k: v for k, v in actions.items() if k in ("FETCH", "EXTRACT", "PREFIX") + } + if "FETCH" in actions or "EXTRACT" in actions: # this is to force the download execute_actions(download_actions, index, verbose=m.config.debug) pkg_files = {} - packages = actions.get('LINK', []) + packages = actions.get("LINK", []) package_subset = utils.ensure_list(package_subset) selected_packages = set() if package_subset: for pkg in package_subset: - if hasattr(pkg, 'name'): + if hasattr(pkg, "name"): if pkg in packages: selected_packages.add(pkg) else: @@ -307,12 +357,14 @@ def execute_download_actions(m, actions, env, package_subset=None, require_files packages = selected_packages for pkg in packages: - if hasattr(pkg, 'dist_name'): + if hasattr(pkg, "dist_name"): pkg_dist = pkg.dist_name else: pkg = strip_channel(pkg) - pkg_dist = pkg.split(' ')[0] - pkg_loc = find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist, m, files_only=require_files) + pkg_dist = pkg.split(" ")[0] + pkg_loc = find_pkg_dir_or_file_in_pkgs_dirs( + pkg_dist, m, files_only=require_files + ) # ran through all pkgs_dirs, and did not find package or folder. Download it. # TODO: this is a vile hack reaching into conda's internals. Replace with @@ -342,27 +394,29 @@ def get_upstream_pins(m, actions, env): """Download packages from specs, then inspect each downloaded package for additional downstream dependency specs. Return these additional specs.""" - env_specs = m.meta.get('requirements', {}).get(env, []) - explicit_specs = [req.split(' ')[0] for req in env_specs] if env_specs else [] - linked_packages = actions.get('LINK', []) + env_specs = m.meta.get("requirements", {}).get(env, []) + explicit_specs = [req.split(" ")[0] for req in env_specs] if env_specs else [] + linked_packages = actions.get("LINK", []) linked_packages = [pkg for pkg in linked_packages if pkg.name in explicit_specs] - ignore_pkgs_list = utils.ensure_list(m.get_value('build/ignore_run_exports_from')) - ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports')) + ignore_pkgs_list = utils.ensure_list(m.get_value("build/ignore_run_exports_from")) + ignore_list = utils.ensure_list(m.get_value("build/ignore_run_exports")) additional_specs = {} for pkg in linked_packages: - if any(pkg.name in req.split(' ')[0] for req in ignore_pkgs_list): + if any(pkg.name in req.split(" ")[0] for req in ignore_pkgs_list): continue run_exports = None if m.config.use_channeldata: channeldata = utils.download_channeldata(pkg.channel) # only use channeldata if requested, channeldata exists and contains # a packages key, otherwise use run_exports from the packages themselves - if 'packages' in channeldata: - pkg_data = channeldata['packages'].get(pkg.name, {}) - run_exports = pkg_data.get('run_exports', {}).get(pkg.version, {}) + if "packages" in channeldata: + pkg_data = channeldata["packages"].get(pkg.name, {}) + run_exports = pkg_data.get("run_exports", {}).get(pkg.version, {}) if run_exports is None: - loc, dist = execute_download_actions(m, actions, env=env, package_subset=pkg)[pkg] + loc, dist = execute_download_actions( + m, actions, env=env, package_subset=pkg + )[pkg] run_exports = _read_specs_from_package(loc, dist) specs = _filter_run_exports(run_exports, ignore_list) if specs: @@ -371,87 +425,115 @@ def get_upstream_pins(m, actions, env): def _read_upstream_pin_files(m, env, permit_unsatisfiable_variants, exclude_pattern): - deps, actions, unsat = get_env_dependencies(m, env, m.config.variant, - exclude_pattern, - permit_unsatisfiable_variants=permit_unsatisfiable_variants) + deps, actions, unsat = get_env_dependencies( + m, + env, + m.config.variant, + exclude_pattern, + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + ) # extend host deps with strong build run exports. This is important for things like # vc feature activation to work correctly in the host env. extra_run_specs = get_upstream_pins(m, actions, env) - return list(set(deps)) or m.meta.get('requirements', {}).get(env, []), unsat, extra_run_specs + return ( + list(set(deps)) or m.meta.get("requirements", {}).get(env, []), + unsat, + extra_run_specs, + ) def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern): """Applies run_exports from any build deps to host and run sections""" # if we have host deps, they're more important than the build deps. - requirements = m.meta.get('requirements', {}) - build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files(m, 'build', - permit_unsatisfiable_variants, exclude_pattern) + requirements = m.meta.get("requirements", {}) + build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files( + m, "build", permit_unsatisfiable_variants, exclude_pattern + ) # is there a 'host' section? if m.is_cross: # this must come before we read upstream pins, because it will enforce things # like vc version from the compiler. - host_reqs = utils.ensure_list(m.get_value('requirements/host')) + host_reqs = utils.ensure_list(m.get_value("requirements/host")) # ensure host_reqs is present, so in-place modification below is actually in-place - requirements = m.meta.setdefault('requirements', {}) - requirements['host'] = host_reqs + requirements = m.meta.setdefault("requirements", {}) + requirements["host"] = host_reqs if not host_reqs: - matching_output = [out for out in m.meta.get('outputs', []) if - out.get('name') == m.name()] + matching_output = [ + out for out in m.meta.get("outputs", []) if out.get("name") == m.name() + ] if matching_output: - requirements = utils.expand_reqs(matching_output[0].get('requirements', {})) - matching_output[0]['requirements'] = requirements - host_reqs = requirements.setdefault('host', []) + requirements = utils.expand_reqs( + matching_output[0].get("requirements", {}) + ) + matching_output[0]["requirements"] = requirements + host_reqs = requirements.setdefault("host", []) # in-place modification of above thingie - host_reqs.extend(extra_run_specs_from_build.get('strong', [])) + host_reqs.extend(extra_run_specs_from_build.get("strong", [])) - host_deps, host_unsat, extra_run_specs_from_host = _read_upstream_pin_files(m, 'host', - permit_unsatisfiable_variants, exclude_pattern) + host_deps, host_unsat, extra_run_specs_from_host = _read_upstream_pin_files( + m, "host", permit_unsatisfiable_variants, exclude_pattern + ) if m.noarch or m.noarch_python: - extra_run_specs = set(extra_run_specs_from_host.get('noarch', [])) + extra_run_specs = set(extra_run_specs_from_host.get("noarch", [])) extra_run_constrained_specs = set() else: - extra_run_specs = set(extra_run_specs_from_host.get('strong', []) + - extra_run_specs_from_host.get('weak', []) + - extra_run_specs_from_build.get('strong', [])) + extra_run_specs = set( + extra_run_specs_from_host.get("strong", []) + + extra_run_specs_from_host.get("weak", []) + + extra_run_specs_from_build.get("strong", []) + ) extra_run_constrained_specs = set( - extra_run_specs_from_host.get('strong_constrains', []) + - extra_run_specs_from_host.get('weak_constrains', []) + - extra_run_specs_from_build.get('strong_constrains', []) + extra_run_specs_from_host.get("strong_constrains", []) + + extra_run_specs_from_host.get("weak_constrains", []) + + extra_run_specs_from_build.get("strong_constrains", []) ) else: host_deps = [] host_unsat = [] if m.noarch or m.noarch_python: if m.build_is_host: - extra_run_specs = set(extra_run_specs_from_build.get('noarch', [])) + extra_run_specs = set(extra_run_specs_from_build.get("noarch", [])) extra_run_constrained_specs = set() - build_deps = set(build_deps or []).update(extra_run_specs_from_build.get('noarch', [])) + build_deps = set(build_deps or []).update( + extra_run_specs_from_build.get("noarch", []) + ) else: extra_run_specs = set() extra_run_constrained_specs = set() build_deps = set(build_deps or []) else: - extra_run_specs = set(extra_run_specs_from_build.get('strong', [])) - extra_run_constrained_specs = set(extra_run_specs_from_build.get('strong_constrains', [])) + extra_run_specs = set(extra_run_specs_from_build.get("strong", [])) + extra_run_constrained_specs = set( + extra_run_specs_from_build.get("strong_constrains", []) + ) if m.build_is_host: - extra_run_specs.update(extra_run_specs_from_build.get('weak', [])) - extra_run_constrained_specs.update(extra_run_specs_from_build.get('weak_constrains', [])) - build_deps = set(build_deps or []).update(extra_run_specs_from_build.get('weak', [])) + extra_run_specs.update(extra_run_specs_from_build.get("weak", [])) + extra_run_constrained_specs.update( + extra_run_specs_from_build.get("weak_constrains", []) + ) + build_deps = set(build_deps or []).update( + extra_run_specs_from_build.get("weak", []) + ) else: - host_deps = set(extra_run_specs_from_build.get('strong', [])) + host_deps = set(extra_run_specs_from_build.get("strong", [])) - run_deps = extra_run_specs | set(utils.ensure_list(requirements.get('run'))) - run_constrained_deps = extra_run_constrained_specs | set(utils.ensure_list(requirements.get('run_constrained'))) + run_deps = extra_run_specs | set(utils.ensure_list(requirements.get("run"))) + run_constrained_deps = extra_run_constrained_specs | set( + utils.ensure_list(requirements.get("run_constrained")) + ) for section, deps in ( - ('build', build_deps), ('host', host_deps), ('run', run_deps), ('run_constrained', run_constrained_deps), + ("build", build_deps), + ("host", host_deps), + ("run", run_deps), + ("run_constrained", run_constrained_deps), ): if deps: requirements[section] = list(deps) - m.meta['requirements'] = requirements + m.meta["requirements"] = requirements return build_unsat, host_unsat @@ -460,9 +542,9 @@ def _simplify_to_exact_constraints(metadata): For metapackages that are pinned exactly, we want to bypass all dependencies that may be less exact. """ - requirements = metadata.meta.get('requirements', {}) + requirements = metadata.meta.get("requirements", {}) # collect deps on a per-section basis - for section in 'build', 'host', 'run': + for section in "build", "host", "run": deps = utils.ensure_list(requirements.get(section, [])) deps_dict = defaultdict(list) for dep in deps: @@ -479,7 +561,7 @@ def _simplify_to_exact_constraints(metadata): for dep in values: if len(dep) > 1: version, build = dep[:2] - if not (any(c in version for c in ('>', '<', '*')) or '*' in build): + if not (any(c in version for c in (">", "<", "*")) or "*" in build): exact_pins.append(dep) if len(values) == 1 and not any(values): deps_list.append(name) @@ -487,12 +569,12 @@ def _simplify_to_exact_constraints(metadata): if not all(pin == exact_pins[0] for pin in exact_pins): raise ValueError(f"Conflicting exact pins: {exact_pins}") else: - deps_list.append(' '.join([name] + exact_pins[0])) + deps_list.append(" ".join([name] + exact_pins[0])) else: - deps_list.extend(' '.join([name] + dep) for dep in values if dep) + deps_list.extend(" ".join([name] + dep) for dep in values if dep) if section in requirements and deps_list: requirements[section] = deps_list - metadata.meta['requirements'] = requirements + metadata.meta["requirements"] = requirements def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False): @@ -503,21 +585,24 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal m.final = True else: exclude_pattern = None - excludes = set(m.config.variant.get('ignore_version', [])) + excludes = set(m.config.variant.get("ignore_version", [])) - for key in m.config.variant.get('pin_run_as_build', {}).keys(): + for key in m.config.variant.get("pin_run_as_build", {}).keys(): if key in excludes: excludes.remove(key) output_excludes = set() - if hasattr(m, 'other_outputs'): + if hasattr(m, "other_outputs"): output_excludes = {name for (name, variant) in m.other_outputs.keys()} if excludes or output_excludes: - exclude_pattern = re.compile(r'|'.join(fr'(?:^{exc}(?:\s|$|\Z))' - for exc in excludes | output_excludes)) + exclude_pattern = re.compile( + r"|".join( + rf"(?:^{exc}(?:\s|$|\Z))" for exc in excludes | output_excludes + ) + ) - parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {}) + parent_recipe = m.meta.get("extra", {}).get("parent_recipe", {}) # extract the topmost section where variables are defined, and put it on top of the # requirements for a particular output @@ -526,111 +611,130 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal is_top_level = True if output: - if 'package' in output or 'name' not in output: + if "package" in output or "name" not in output: # it's just a top-level recipe - output = {'name': m.name()} + output = {"name": m.name()} else: is_top_level = False - if not parent_recipe or parent_recipe['name'] == m.name(): + if not parent_recipe or parent_recipe["name"] == m.name(): combine_top_level_metadata_with_output(m, output) - requirements = utils.expand_reqs(output.get('requirements', {})) - m.meta['requirements'] = requirements + requirements = utils.expand_reqs(output.get("requirements", {})) + m.meta["requirements"] = requirements - if m.meta.get('requirements'): - utils.insert_variant_versions(m.meta['requirements'], - m.config.variant, 'build') - utils.insert_variant_versions(m.meta['requirements'], - m.config.variant, 'host') + if m.meta.get("requirements"): + utils.insert_variant_versions( + m.meta["requirements"], m.config.variant, "build" + ) + utils.insert_variant_versions( + m.meta["requirements"], m.config.variant, "host" + ) m = parent_metadata.get_output_metadata(m.get_rendered_output(m.name())) - build_unsat, host_unsat = add_upstream_pins(m, - permit_unsatisfiable_variants, - exclude_pattern) + build_unsat, host_unsat = add_upstream_pins( + m, permit_unsatisfiable_variants, exclude_pattern + ) # getting this AFTER add_upstream_pins is important, because that function adds deps # to the metadata. - requirements = m.meta.get('requirements', {}) + requirements = m.meta.get("requirements", {}) # here's where we pin run dependencies to their build time versions. This happens based # on the keys in the 'pin_run_as_build' key in the variant, which is a list of package # names to have this behavior. if output_excludes: - exclude_pattern = re.compile(r'|'.join(fr'(?:^{exc}(?:\s|$|\Z))' - for exc in output_excludes)) - pinning_env = 'host' if m.is_cross else 'build' + exclude_pattern = re.compile( + r"|".join(rf"(?:^{exc}(?:\s|$|\Z))" for exc in output_excludes) + ) + pinning_env = "host" if m.is_cross else "build" build_reqs = requirements.get(pinning_env, []) # if python is in the build specs, but doesn't have a specific associated # version, make sure to add one - if build_reqs and 'python' in build_reqs: - build_reqs.append('python {}'.format(m.config.variant['python'])) - m.meta['requirements'][pinning_env] = build_reqs - - full_build_deps, _, _ = get_env_dependencies(m, pinning_env, - m.config.variant, - exclude_pattern=exclude_pattern, - permit_unsatisfiable_variants=permit_unsatisfiable_variants) - full_build_dep_versions = {dep.split()[0]: " ".join(dep.split()[1:]) - for dep in full_build_deps} - - if isfile(m.requirements_path) and not requirements.get('run'): - requirements['run'] = specs_from_url(m.requirements_path) - run_deps = requirements.get('run', []) - - versioned_run_deps = [get_pin_from_build(m, dep, full_build_dep_versions) - for dep in run_deps] - versioned_run_deps = [utils.ensure_valid_spec(spec, warn=True) - for spec in versioned_run_deps] + if build_reqs and "python" in build_reqs: + build_reqs.append("python {}".format(m.config.variant["python"])) + m.meta["requirements"][pinning_env] = build_reqs + + full_build_deps, _, _ = get_env_dependencies( + m, + pinning_env, + m.config.variant, + exclude_pattern=exclude_pattern, + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + ) + full_build_dep_versions = { + dep.split()[0]: " ".join(dep.split()[1:]) for dep in full_build_deps + } + + if isfile(m.requirements_path) and not requirements.get("run"): + requirements["run"] = specs_from_url(m.requirements_path) + run_deps = requirements.get("run", []) + + versioned_run_deps = [ + get_pin_from_build(m, dep, full_build_dep_versions) for dep in run_deps + ] + versioned_run_deps = [ + utils.ensure_valid_spec(spec, warn=True) for spec in versioned_run_deps + ] requirements[pinning_env] = full_build_deps - requirements['run'] = versioned_run_deps + requirements["run"] = versioned_run_deps - m.meta['requirements'] = requirements + m.meta["requirements"] = requirements # append other requirements, such as python.app, appropriately m.append_requirements() - if m.pin_depends == 'strict': - m.meta['requirements']['run'] = environ.get_pinned_deps( - m, 'run') - test_deps = m.get_value('test/requires') + if m.pin_depends == "strict": + m.meta["requirements"]["run"] = environ.get_pinned_deps(m, "run") + test_deps = m.get_value("test/requires") if test_deps: - versioned_test_deps = list({get_pin_from_build(m, dep, full_build_dep_versions) - for dep in test_deps}) - versioned_test_deps = [utils.ensure_valid_spec(spec, warn=True) - for spec in versioned_test_deps] - m.meta['test']['requires'] = versioned_test_deps - extra = m.meta.get('extra', {}) - extra['copy_test_source_files'] = m.config.copy_test_source_files - m.meta['extra'] = extra + versioned_test_deps = list( + { + get_pin_from_build(m, dep, full_build_dep_versions) + for dep in test_deps + } + ) + versioned_test_deps = [ + utils.ensure_valid_spec(spec, warn=True) for spec in versioned_test_deps + ] + m.meta["test"]["requires"] = versioned_test_deps + extra = m.meta.get("extra", {}) + extra["copy_test_source_files"] = m.config.copy_test_source_files + m.meta["extra"] = extra # if source/path is relative, then the output package makes no sense at all. The next # best thing is to hard-code the absolute path. This probably won't exist on any # system other than the original build machine, but at least it will work there. - if m.meta.get('source'): - if 'path' in m.meta['source']: - source_path = m.meta['source']['path'] + if m.meta.get("source"): + if "path" in m.meta["source"]: + source_path = m.meta["source"]["path"] os.path.expanduser(source_path) if not os.path.isabs(source_path): - m.meta['source']['path'] = os.path.normpath( - os.path.join(m.path, source_path)) - elif ('git_url' in m.meta['source'] and not ( - # absolute paths are not relative paths - os.path.isabs(m.meta['source']['git_url']) or - # real urls are not relative paths - ":" in m.meta['source']['git_url'])): - m.meta['source']['git_url'] = os.path.normpath( - os.path.join(m.path, m.meta['source']['git_url'])) - - if not m.meta.get('build'): - m.meta['build'] = {} + m.meta["source"]["path"] = os.path.normpath( + os.path.join(m.path, source_path) + ) + elif "git_url" in m.meta["source"] and not ( + # absolute paths are not relative paths + os.path.isabs(m.meta["source"]["git_url"]) + or + # real urls are not relative paths + ":" in m.meta["source"]["git_url"] + ): + m.meta["source"]["git_url"] = os.path.normpath( + os.path.join(m.path, m.meta["source"]["git_url"]) + ) + + if not m.meta.get("build"): + m.meta["build"] = {} _simplify_to_exact_constraints(m) if build_unsat or host_unsat: m.final = False log = utils.get_logger(__name__) - log.warn("Returning non-final recipe for {}; one or more dependencies " - "was unsatisfiable:".format(m.dist())) + log.warn( + "Returning non-final recipe for {}; one or more dependencies " + "was unsatisfiable:".format(m.dist()) + ) if build_unsat: log.warn(f"Build: {build_unsat}") if host_unsat: @@ -649,18 +753,24 @@ def try_download(metadata, no_download_source, raise_error=False): try: source.provide(metadata) except subprocess.CalledProcessError as error: - print("Warning: failed to download source. If building, will try " - "again after downloading recipe dependencies.") + print( + "Warning: failed to download source. If building, will try " + "again after downloading recipe dependencies." + ) print("Error was: ") print(error) if not metadata.source_provided: if no_download_source: - raise ValueError("no_download_source specified, but can't fully render recipe without" - " downloading source. Please fix the recipe, or don't use " - "no_download_source.") + raise ValueError( + "no_download_source specified, but can't fully render recipe without" + " downloading source. Please fix the recipe, or don't use " + "no_download_source." + ) elif raise_error: - raise RuntimeError("Failed to download or patch source. Please see build log for info.") + raise RuntimeError( + "Failed to download or patch source. Please see build log for info." + ) def reparse(metadata): @@ -669,15 +779,20 @@ def reparse(metadata): metadata.final = False sys.path.insert(0, metadata.config.build_prefix) sys.path.insert(0, metadata.config.host_prefix) - py_ver = '.'.join(metadata.config.variant['python'].split('.')[:2]) + py_ver = ".".join(metadata.config.variant["python"].split(".")[:2]) sys.path.insert(0, utils.get_site_packages(metadata.config.host_prefix, py_ver)) metadata.parse_until_resolved() metadata = finalize_metadata(metadata) return metadata -def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False, - allow_no_other_outputs=False, bypass_env_check=False): +def distribute_variants( + metadata, + variants, + permit_unsatisfiable_variants=False, + allow_no_other_outputs=False, + bypass_env_check=False, +): rendered_metadata = {} need_source_download = True @@ -686,21 +801,29 @@ def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False, # python gets used here. if metadata.noarch or metadata.noarch_python: from .conda_interface import VersionOrder - age = int(metadata.get_value('build/noarch_python_build_age', metadata.config.noarch_python_build_age)) + + age = int( + metadata.get_value( + "build/noarch_python_build_age", metadata.config.noarch_python_build_age + ) + ) versions = [] for variant in variants: - if 'python' in variant: - vo = variant['python'] + if "python" in variant: + vo = variant["python"] if vo not in versions: versions.append(vo) - version_indices = sorted(range(len(versions)), key=lambda k: VersionOrder(versions[k].split(' ')[0])) + version_indices = sorted( + range(len(versions)), key=lambda k: VersionOrder(versions[k].split(" ")[0]) + ) if age < 0: age = 0 elif age > len(versions) - 1: age = len(versions) - 1 build_ver = versions[version_indices[len(versions) - 1 - age]] - variants = filter_by_key_value(variants, 'python', build_ver, - 'noarch_python_reduction') + variants = filter_by_key_value( + variants, "python", build_ver, "noarch_python_reduction" + ) # store these for reference later metadata.config.variants = variants @@ -711,7 +834,7 @@ def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False, recipe_requirements = metadata.extract_requirements_text() recipe_package_and_build_text = metadata.extract_package_and_build_text() recipe_text = recipe_package_and_build_text + recipe_requirements - if hasattr(recipe_text, 'decode'): + if hasattr(recipe_text, "decode"): recipe_text = recipe_text.decode() metadata.config.variant = variants[0] @@ -720,13 +843,14 @@ def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False, for variant in top_loop: from conda_build.build import get_all_replacements + get_all_replacements(variant) mv = metadata.copy() mv.config.variant = variant - pin_run_as_build = variant.get('pin_run_as_build', {}) - if mv.numpy_xx and 'numpy' not in pin_run_as_build: - pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'} + pin_run_as_build = variant.get("pin_run_as_build", {}) + if mv.numpy_xx and "numpy" not in pin_run_as_build: + pin_run_as_build["numpy"] = {"min_pin": "x.x", "max_pin": "x.x"} conform_dict = {} for key in used_variables: @@ -736,17 +860,20 @@ def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False, conform_dict[key] = variant[key] for key, values in conform_dict.items(): - mv.config.variants = (filter_by_key_value(mv.config.variants, key, values, - 'distribute_variants_reduction') or - mv.config.variants) + mv.config.variants = ( + filter_by_key_value( + mv.config.variants, key, values, "distribute_variants_reduction" + ) + or mv.config.variants + ) get_all_replacements(mv.config.variants) - pin_run_as_build = variant.get('pin_run_as_build', {}) - if mv.numpy_xx and 'numpy' not in pin_run_as_build: - pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'} + pin_run_as_build = variant.get("pin_run_as_build", {}) + if mv.numpy_xx and "numpy" not in pin_run_as_build: + pin_run_as_build["numpy"] = {"min_pin": "x.x", "max_pin": "x.x"} numpy_pinned_variants = [] for _variant in mv.config.variants: - _variant['pin_run_as_build'] = pin_run_as_build + _variant["pin_run_as_build"] = pin_run_as_build numpy_pinned_variants.append(_variant) mv.config.variants = numpy_pinned_variants @@ -759,17 +886,21 @@ def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False, mv.parse_again() try: - mv.parse_until_resolved(allow_no_other_outputs=allow_no_other_outputs, - bypass_env_check=bypass_env_check) + mv.parse_until_resolved( + allow_no_other_outputs=allow_no_other_outputs, + bypass_env_check=bypass_env_check, + ) except SystemExit: pass - need_source_download = (not mv.needs_source_for_render or not mv.source_provided) + need_source_download = not mv.needs_source_for_render or not mv.source_provided - rendered_metadata[(mv.dist(), - mv.config.variant.get('target_platform', mv.config.subdir), - tuple((var, mv.config.variant.get(var)) - for var in mv.get_used_vars()))] = \ - (mv, need_source_download, None) + rendered_metadata[ + ( + mv.dist(), + mv.config.variant.get("target_platform", mv.config.subdir), + tuple((var, mv.config.variant.get(var)) for var in mv.get_used_vars()), + ) + ] = (mv, need_source_download, None) # list of tuples. # each tuple item is a tuple of 3 items: # metadata, need_download, need_reparse_in_env @@ -780,18 +911,29 @@ def expand_outputs(metadata_tuples): """Obtain all metadata objects for all outputs from recipe. Useful for outputting paths.""" expanded_outputs = OrderedDict() - for (_m, download, reparse) in metadata_tuples: + for _m, download, reparse in metadata_tuples: from conda_build.build import get_all_replacements + get_all_replacements(_m.config) from copy import deepcopy - for (output_dict, m) in deepcopy(_m).get_output_metadata_set(permit_unsatisfiable_variants=False): + + for output_dict, m in deepcopy(_m).get_output_metadata_set( + permit_unsatisfiable_variants=False + ): get_all_replacements(m.config) expanded_outputs[m.dist()] = (output_dict, m) return list(expanded_outputs.values()) -def render_recipe(recipe_path, config, no_download_source=False, variants=None, - permit_unsatisfiable_variants=True, reset_build_id=True, bypass_env_check=False): +def render_recipe( + recipe_path, + config, + no_download_source=False, + variants=None, + permit_unsatisfiable_variants=True, + reset_build_id=True, + bypass_env_check=False, +): """Returns a list of tuples, each consisting of (metadata-object, needs_download, needs_render_in_env) @@ -801,13 +943,13 @@ def render_recipe(recipe_path, config, no_download_source=False, variants=None, """ arg = recipe_path if isfile(arg): - if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): + if arg.endswith((".tar", ".tar.gz", ".tgz", ".tar.bz2")): recipe_dir = tempfile.mkdtemp() - t = tarfile.open(arg, 'r:*') + t = tarfile.open(arg, "r:*") t.extractall(path=recipe_dir) t.close() need_cleanup = True - elif arg.endswith('.yaml'): + elif arg.endswith(".yaml"): recipe_dir = os.path.dirname(arg) need_cleanup = False else: @@ -840,29 +982,47 @@ def render_recipe(recipe_path, config, no_download_source=False, variants=None, if m.needs_source_for_render and not m.source_provided: try_download(m, no_download_source=no_download_source) if m.final: - if not hasattr(m.config, 'variants') or not m.config.variant: + if not hasattr(m.config, "variants") or not m.config.variant: m.config.ignore_system_variants = True - if os.path.isfile(os.path.join(m.path, 'conda_build_config.yaml')): - m.config.variant_config_files = [os.path.join(m.path, 'conda_build_config.yaml')] + if os.path.isfile(os.path.join(m.path, "conda_build_config.yaml")): + m.config.variant_config_files = [ + os.path.join(m.path, "conda_build_config.yaml") + ] m.config.variants = get_package_variants(m, variants=variants) m.config.variant = m.config.variants[0] - rendered_metadata = [(m, False, False), ] + rendered_metadata = [ + (m, False, False), + ] else: # merge any passed-in variants with any files found variants = get_package_variants(m, variants=variants) # when building, we don't want to fully expand all outputs into metadata, only expand # whatever variants we have (i.e. expand top-level variants, not output-only variants) - rendered_metadata = distribute_variants(m, variants, - permit_unsatisfiable_variants=permit_unsatisfiable_variants, - allow_no_other_outputs=True, bypass_env_check=bypass_env_check) + rendered_metadata = distribute_variants( + m, + variants, + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + allow_no_other_outputs=True, + bypass_env_check=bypass_env_check, + ) if need_cleanup: utils.rm_rf(recipe_dir) return rendered_metadata # Keep this out of the function below so it can be imported by other modules. -FIELDS = ["package", "source", "build", "requirements", "test", "app", "outputs", "about", "extra"] +FIELDS = [ + "package", + "source", + "build", + "requirements", + "test", + "app", + "outputs", + "about", + "extra", +] # Next bit of stuff is to support YAML output in the order we expect. @@ -875,11 +1035,11 @@ def to_omap(self): def _represent_omap(dumper, data): - return dumper.represent_mapping('tag:yaml.org,2002:map', data.to_omap()) + return dumper.represent_mapping("tag:yaml.org,2002:map", data.to_omap()) def _unicode_representer(dumper, uni): - node = yaml.ScalarNode(tag='tag:yaml.org,2002:str', value=uni) + node = yaml.ScalarNode(tag="tag:yaml.org,2002:str", value=uni) return node @@ -898,12 +1058,20 @@ def ignore_aliases(self, data): def output_yaml(metadata, filename=None, suppress_outputs=False): local_metadata = metadata.copy() - if suppress_outputs and local_metadata.is_output and 'outputs' in local_metadata.meta: - del local_metadata.meta['outputs'] - output = yaml.dump(_MetaYaml(local_metadata.meta), Dumper=_IndentDumper, - default_flow_style=False, indent=2) + if ( + suppress_outputs + and local_metadata.is_output + and "outputs" in local_metadata.meta + ): + del local_metadata.meta["outputs"] + output = yaml.dump( + _MetaYaml(local_metadata.meta), + Dumper=_IndentDumper, + default_flow_style=False, + indent=2, + ) if filename: - if any(sep in filename for sep in ('\\', '/')): + if any(sep in filename for sep in ("\\", "/")): try: os.makedirs(os.path.dirname(filename)) except OSError: diff --git a/conda_build/skeletons/_example_skeleton.py b/conda_build/skeletons/_example_skeleton.py index 25ee72e23e..fc61c2d0b2 100644 --- a/conda_build/skeletons/_example_skeleton.py +++ b/conda_build/skeletons/_example_skeleton.py @@ -31,11 +31,13 @@ def add_parser(repos): "my_repo", help=""" Create recipe skeleton for packages hosted on my-repo.org - """,) + """, + ) my_repo.add_argument( "packages", - nargs='+', - help="my-repo packages to create recipe skeletons for.",) + nargs="+", + help="my-repo packages to create recipe skeletons for.", + ) # Add any additional parser arguments here diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index a1c53735dc..7cd48e08ef 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -6,32 +6,36 @@ import codecs -import hashlib -from glob import glob import gzip +import hashlib import json import os -from os import makedirs -from os.path import basename, dirname, join, exists +import pickle import subprocess import sys import tempfile -import pickle from functools import lru_cache, partial +from glob import glob +from os import makedirs +from os.path import basename, dirname, exists, join -from conda_build.conda_interface import get_index -from conda_build.conda_interface import TmpDownload, download -from conda_build.conda_interface import MatchSpec, Resolve -from conda_build.conda_interface import CondaHTTPError, CondaError +import requests +from conda_build import environ +from conda_build.conda_interface import ( + CondaError, + CondaHTTPError, + MatchSpec, + Resolve, + TmpDownload, + download, + get_index, +) from conda_build.config import get_or_merge_config -from conda_build.utils import on_win, check_call_env +from conda_build.utils import check_call_env, on_win from conda_build.variants import get_default_variant from conda_build.version import _parse as parse_version -import requests -from conda_build import environ - CPAN_META = """\ {{% set name = "{packagename}" %}} {{% set version = "{version}" %}} @@ -147,45 +151,49 @@ class InvalidReleaseError(RuntimeError): - ''' + """ An exception that is raised when a release is not available on MetaCPAN. - ''' + """ + pass class PerlTmpDownload(TmpDownload): - ''' + """ Subclass Conda's TmpDownload to replace : in download filenames. Critical on win. - ''' + """ def __enter__(self): - if '://' not in self.url: + if "://" not in self.url: # if we provide the file itself, no tmp dir is created self.tmp_dir = None return self.url else: - if 'CHECKSUMS' in self.url: - turl = self.url.split('id/') + if "CHECKSUMS" in self.url: + turl = self.url.split("id/") filename = turl[1] else: filename = basename(self.url) - filename = filename.replace('::', '-') + filename = filename.replace("::", "-") self.tmp_dir = tempfile.mkdtemp() - home = os.path.expanduser('~') - base_dir = join(home, '.conda-build', 'cpan', - basename(self.url).replace('::', '-')) + home = os.path.expanduser("~") + base_dir = join( + home, ".conda-build", "cpan", basename(self.url).replace("::", "-") + ) dst = join(base_dir, filename) - dst = dst.replace('::', '-') + dst = dst.replace("::", "-") base_dir = dirname(dst) if not exists(base_dir): makedirs(base_dir) - dst = get_pickle_file_path(cache_dir=base_dir, filename_prefix=filename, other_hashed=(self.url,)) + dst = get_pickle_file_path( + cache_dir=base_dir, filename_prefix=filename, other_hashed=(self.url,) + ) if not exists(os.path.dirname(dst)): makedirs(os.path.dirname(dst)) if not exists(dst): @@ -196,22 +204,35 @@ def __enter__(self): def get_build_dependencies_from_src_archive(package_url, sha256, src_cache): import tarfile + from conda_build import source - cached_path, _ = source.download_to_cache(src_cache, '', - {'url': package_url, - 'sha256': sha256}) + + cached_path, _ = source.download_to_cache( + src_cache, "", {"url": package_url, "sha256": sha256} + ) result = [] with tarfile.open(cached_path) as tf: - need_f = any([f.name.lower().endswith(('.f', '.f90', '.f77', '.f95', '.f03')) for f in tf]) + need_f = any( + [ + f.name.lower().endswith((".f", ".f90", ".f77", ".f95", ".f03")) + for f in tf + ] + ) # Fortran builds use CC to perform the link (they do not call the linker directly). - need_c = True if need_f else \ - any([f.name.lower().endswith(('.c', '.xs')) for f in tf]) - need_cxx = any([f.name.lower().endswith(('.cxx', '.cpp', '.cc', '.c++')) - for f in tf]) - need_autotools = any([f.name.lower().endswith('/configure') for f in tf]) - need_make = True if any((need_autotools, need_f, need_cxx, need_c)) else \ - any([f.name.lower().endswith(('/makefile', '/makevars')) - for f in tf]) + need_c = ( + True + if need_f + else any([f.name.lower().endswith((".c", ".xs")) for f in tf]) + ) + need_cxx = any( + [f.name.lower().endswith((".cxx", ".cpp", ".cc", ".c++")) for f in tf] + ) + need_autotools = any([f.name.lower().endswith("/configure") for f in tf]) + need_make = ( + True + if any((need_autotools, need_f, need_cxx, need_c)) + else any([f.name.lower().endswith(("/makefile", "/makevars")) for f in tf]) + ) if need_c or need_cxx or need_f: result.append("{{ compiler('c') }}") if need_cxx: @@ -226,7 +247,9 @@ def get_build_dependencies_from_src_archive(package_url, sha256, src_cache): if need_make: result.append("make # [not win]") result.append("m2-make # [win]") - print(f"INFO :: For {os.path.basename(package_url)}, we need the following build tools:\n{result}") + print( + f"INFO :: For {os.path.basename(package_url)}, we need the following build tools:\n{result}" + ) return result @@ -242,11 +265,10 @@ def get_cpan_api_url(url, colons): with gzip.open(json_path) as dist_json_file: output = dist_json_file.read() if hasattr(output, "decode"): - output = output.decode('utf-8-sig') + output = output.decode("utf-8-sig") rel_dict = json.loads(output) except OSError: - rel_dict = json.loads(codecs.open( - json_path, encoding='utf-8').read()) + rel_dict = json.loads(codecs.open(json_path, encoding="utf-8").read()) except CondaHTTPError: rel_dict = None return rel_dict @@ -255,11 +277,11 @@ def get_cpan_api_url(url, colons): # Probably uses a system cpan? TODO :: Fix this. def package_exists(package_name): try: - cmd = ['cpan', '-D', package_name] + cmd = ["cpan", "-D", package_name] if on_win: - cmd.insert(0, '/c') - cmd.insert(0, '/d') - cmd.insert(0, 'cmd.exe') + cmd.insert(0, "/c") + cmd.insert(0, "/d") + cmd.insert(0, "cmd.exe") check_call_env(cmd) in_repo = True except subprocess.CalledProcessError: @@ -269,31 +291,33 @@ def package_exists(package_name): def md5d_file_and_other(filename, other_hashed): sha1 = hashlib.md5() - with open(filename, 'rb') as f: + with open(filename, "rb") as f: while True: data = f.read(65536) if not data: break sha1.update(data) for other in other_hashed: - sha1.update(other.encode('utf-8') if hasattr(other, 'encode') else other) + sha1.update(other.encode("utf-8") if hasattr(other, "encode") else other) return sha1.hexdigest() def get_pickle_file_path(cache_dir, filename_prefix, other_hashed=()): - h = 'h' + md5d_file_and_other(__file__, other_hashed)[2:10] - return os.path.join(cache_dir, filename_prefix.replace('::', '-') + '.' + h + '.p') + h = "h" + md5d_file_and_other(__file__, other_hashed)[2:10] + return os.path.join(cache_dir, filename_prefix.replace("::", "-") + "." + h + ".p") def load_or_pickle(filename_prefix, base_folder, data_partial, key): # It might be nice to hash the entire code tree of data_partial # along with all the args to it via hashlib instead but that's # difficult. - pickled = get_pickle_file_path(cache_dir=base_folder, filename_prefix=filename_prefix + key) + pickled = get_pickle_file_path( + cache_dir=base_folder, filename_prefix=filename_prefix + key + ) # if exists(pickled): # os.unlink(pickled) if exists(pickled): - with open(pickled, 'rb') as f: + with open(pickled, "rb") as f: key_stored = pickle.load(f) if key and key_stored and key == key_stored: return pickle.load(f) @@ -302,7 +326,7 @@ def load_or_pickle(filename_prefix, base_folder, data_partial, key): os.makedirs(os.path.dirname(pickled)) except: pass - with open(pickled, 'wb') as f: + with open(pickled, "wb") as f: pickle.dump(key, f) pickle.dump(result, f) return result @@ -310,61 +334,89 @@ def load_or_pickle(filename_prefix, base_folder, data_partial, key): def install_perl_get_core_modules(version): try: - from conda_build.config import Config from conda_build.conda_interface import TemporaryDirectory + from conda_build.config import Config config = Config() - if sys.platform.startswith('win'): - subdirs = ('win-64', 'Library', 'bin', 'perl.exe') - elif sys.platform.startswith('linux'): - subdirs = ('linux-64', 'bin', 'perl') + if sys.platform.startswith("win"): + subdirs = ("win-64", "Library", "bin", "perl.exe") + elif sys.platform.startswith("linux"): + subdirs = ("linux-64", "bin", "perl") else: - subdirs = ('osx-64', 'bin', 'perl') + subdirs = ("osx-64", "bin", "perl") # Return one of the dist things instead? with TemporaryDirectory() as tmpdir: - environ.create_env(tmpdir, [f'perl={version}'], env='host', config=config, subdir=subdirs[0]) - args = [f'{join(tmpdir, *subdirs[1:])}', '-e', - 'use Module::CoreList; ' - 'my @modules = grep {Module::CoreList::is_core($_)} Module::CoreList->find_modules(qr/.*/); ' - 'print join "\n", @modules;'] + environ.create_env( + tmpdir, + [f"perl={version}"], + env="host", + config=config, + subdir=subdirs[0], + ) + args = [ + f"{join(tmpdir, *subdirs[1:])}", + "-e", + "use Module::CoreList; " + "my @modules = grep {Module::CoreList::is_core($_)} Module::CoreList->find_modules(qr/.*/); " + 'print join "\n", @modules;', + ] from subprocess import check_output - all_core_modules = check_output(args, shell=False).decode('utf-8').replace('\r\n', '\n').split('\n') + + all_core_modules = ( + check_output(args, shell=False) + .decode("utf-8") + .replace("\r\n", "\n") + .split("\n") + ) return all_core_modules except Exception as e: - print("Failed to query perl={} for core modules list, attempted command was:\n{}".format(version, - ' '.join(args))) + print( + "Failed to query perl={} for core modules list, attempted command was:\n{}".format( + version, " ".join(args) + ) + ) print(e.message) return [] def get_core_modules_for_this_perl_version(version, cache_dir): - return load_or_pickle('perl-core-modules', - base_folder=cache_dir, - data_partial=partial(install_perl_get_core_modules, version), key=version) + return load_or_pickle( + "perl-core-modules", + base_folder=cache_dir, + data_partial=partial(install_perl_get_core_modules, version), + key=version, + ) # meta_cpan_url="http://api.metacpan.org", -def skeletonize(packages, output_dir=".", version=None, - meta_cpan_url="https://fastapi.metacpan.org/v1", - recursive=False, force=False, config=None, write_core=False): - ''' +def skeletonize( + packages, + output_dir=".", + version=None, + meta_cpan_url="https://fastapi.metacpan.org/v1", + recursive=False, + force=False, + config=None, + write_core=False, +): + """ Loops over packages, outputting conda recipes converted from CPAN metata. - ''' + """ config = get_or_merge_config(config) - cache_dir = os.path.join(config.src_cache_root, '.conda-build', 'pickled.cb') + cache_dir = os.path.join(config.src_cache_root, ".conda-build", "pickled.cb") # TODO :: Make a temp env. with perl (which we need anyway) and use whatever version # got installed instead of this. Also allow the version to be specified. - perl_version = config.variant.get('perl', get_default_variant(config)['perl']) + perl_version = config.variant.get("perl", get_default_variant(config)["perl"]) core_modules = get_core_modules_for_this_perl_version(perl_version, cache_dir) # wildcards are not valid for perl perl_version = perl_version.replace(".*", "") package_dicts = {} - indent = '\n - ' - indent_core = '\n #- ' + indent = "\n - " + indent_core = "\n #- " processed_packages = set() orig_version = version new_packages = [] @@ -383,8 +435,8 @@ def skeletonize(packages, output_dir=".", version=None, package, module = packages.pop() # If we're passed version in the same format as `PACKAGE=VERSION` # update version - if '=' in package: - package, _, version = package.partition('=') + if "=" in package: + package, _, version = package.partition("=") else: version = orig_version @@ -399,106 +451,135 @@ def skeletonize(packages, output_dir=".", version=None, # distribution name, urls. The lot. Instead we mess about with other API end-points # getting a load of nonsense. orig_package = package - package = dist_for_module(meta_cpan_url, cache_dir, core_modules, module if module else package) - if package == 'perl': - print(("WARNING: {0} is a Perl core module that is not developed " + - "outside of Perl, so we are skipping creating a recipe " + - "for it.").format(orig_package)) + package = dist_for_module( + meta_cpan_url, cache_dir, core_modules, module if module else package + ) + if package == "perl": + print( + ( + "WARNING: {0} is a Perl core module that is not developed " + + "outside of Perl, so we are skipping creating a recipe " + + "for it." + ).format(orig_package) + ) continue - elif package not in {orig_package, orig_package.replace('::', '-')}: + elif package not in {orig_package, orig_package.replace("::", "-")}: print( - ("WARNING: {0} was part of the {1} distribution, so we are " + - "making a recipe for {1} instead.").format(orig_package, - package) + ( + "WARNING: {0} was part of the {1} distribution, so we are " + + "making a recipe for {1} instead." + ).format(orig_package, package) ) - latest_release_data = get_release_info(meta_cpan_url, cache_dir, core_modules, - module if module else orig_package, version) + latest_release_data = get_release_info( + meta_cpan_url, + cache_dir, + core_modules, + module if module else orig_package, + version, + ) packagename = perl_to_conda(package) # Skip duplicates - if ((version is not None and ((packagename + '-' + version) in - processed_packages)) or - ((packagename + '-' + latest_release_data['version']) in - processed_packages)): + if ( + version is not None + and ((packagename + "-" + version) in processed_packages) + ) or ( + (packagename + "-" + latest_release_data["version"]) in processed_packages + ): continue - d = package_dicts.setdefault(package, {'packagename': packagename, - 'build_depends': '', - 'host_depends': '', - 'run_depends': '', - 'build_comment': '# ', - 'test_commands': '', - 'usesha256': '', - 'useurl': '', - 'source_comment': '', - 'summary': "''", - 'import_tests': ''}) + d = package_dicts.setdefault( + package, + { + "packagename": packagename, + "build_depends": "", + "host_depends": "", + "run_depends": "", + "build_comment": "# ", + "test_commands": "", + "usesha256": "", + "useurl": "", + "source_comment": "", + "summary": "''", + "import_tests": "", + }, + ) # Fetch all metadata from CPAN if version is None: release_data = latest_release_data else: - release_data = get_release_info(meta_cpan_url, cache_dir, core_modules, package, - parse_version(version)) + release_data = get_release_info( + meta_cpan_url, cache_dir, core_modules, package, parse_version(version) + ) # Check if recipe directory already exists - dir_path = join(output_dir, packagename, release_data['version']) + dir_path = join(output_dir, packagename, release_data["version"]) # Add Perl version to core module requirements, since these are empty # packages, unless we're newer than what's in core if metacpan_api_is_core_version(meta_cpan_url, package): - if not write_core: - print('We found core module %s. Skipping recipe creation.' % - packagename) + print( + "We found core module %s. Skipping recipe creation." % packagename + ) continue - d['useurl'] = '#' - d['usesha256'] = '#' - d['source_comment'] = '#' + d["useurl"] = "#" + d["usesha256"] = "#" + d["source_comment"] = "#" empty_recipe = True # Add dependencies to d if not in core, or newer than what's in core else: - deps, packages_to_append = \ - deps_for_package(package, release_data=release_data, - output_dir=output_dir, cache_dir=cache_dir, - meta_cpan_url=meta_cpan_url, recursive=recursive, core_modules=core_modules) + deps, packages_to_append = deps_for_package( + package, + release_data=release_data, + output_dir=output_dir, + cache_dir=cache_dir, + meta_cpan_url=meta_cpan_url, + recursive=recursive, + core_modules=core_modules, + ) # If this is something we're downloading, get MD5 - d['cpanurl'] = '' - d['sha256'] = '' - if release_data.get('download_url'): - d['cpanurl'] = release_data['download_url'] - d['sha256'], size = get_checksum_and_size(release_data['download_url']) - print("Using url {} ({}) for {}.".format(d['cpanurl'], size, package)) - src_build_depends = get_build_dependencies_from_src_archive(release_data['download_url'], - d['sha256'], config.src_cache) + d["cpanurl"] = "" + d["sha256"] = "" + if release_data.get("download_url"): + d["cpanurl"] = release_data["download_url"] + d["sha256"], size = get_checksum_and_size(release_data["download_url"]) + print("Using url {} ({}) for {}.".format(d["cpanurl"], size, package)) + src_build_depends = get_build_dependencies_from_src_archive( + release_data["download_url"], d["sha256"], config.src_cache + ) else: src_build_depends = [] - d['useurl'] = '#' - d['usesha256'] = '#' - d['source_comment'] = '#' + d["useurl"] = "#" + d["usesha256"] = "#" + d["source_comment"] = "#" - d['build_depends'] += indent.join([''] + src_build_depends) + d["build_depends"] += indent.join([""] + src_build_depends) -# d['build_depends'] += indent_core.join([''] + list(deps['build']['core'] | -# deps['run']['core'])) + # d['build_depends'] += indent_core.join([''] + list(deps['build']['core'] | + # deps['run']['core'])) - d['host_depends'] += indent.join([''] + list(deps['build']['noncore'] | - deps['run']['noncore'])) + d["host_depends"] += indent.join( + [""] + list(deps["build"]["noncore"] | deps["run"]["noncore"]) + ) # run_exports will set these, but: # TODO :: Add ignore_run_exports for things in deps['build'] that are not also # in deps['run'] - d['run_depends'] += indent_core.join([''] + list(deps['run']['noncore'])) + d["run_depends"] += indent_core.join([""] + list(deps["run"]["noncore"])) # Make sure we append any packages before continuing for pkg in packages_to_append: if pkg not in packages: packages.append(pkg) else: - print("INFO :: Already building package {} (module {})".format(*pkg)) + print( + "INFO :: Already building package {} (module {})".format(*pkg) + ) empty_recipe = False # If we are recursively getting packages for a particular version @@ -506,57 +587,60 @@ def skeletonize(packages, output_dir=".", version=None, version = None if exists(dir_path) and not force: print( - 'Directory %s already exists and you have not specified --force ' % dir_path) + "Directory %s already exists and you have not specified --force " + % dir_path + ) continue elif exists(dir_path) and force: - print('Directory %s already exists, but forcing recipe creation' % dir_path) + print("Directory %s already exists, but forcing recipe creation" % dir_path) try: - d['homeurl'] = release_data['resources']['homepage'] + d["homeurl"] = release_data["resources"]["homepage"] except KeyError: - d['homeurl'] = 'http://metacpan.org/pod/' + package - if 'abstract' in release_data: + d["homeurl"] = "http://metacpan.org/pod/" + package + if "abstract" in release_data: # TODO this does not escape quotes in a YAML friendly manner - summary = repr(release_data['abstract']).lstrip('u') - d['summary'] = summary + summary = repr(release_data["abstract"]).lstrip("u") + d["summary"] = summary # d['summary'] = repr(release_data['abstract']).lstrip('u') try: - d['license'] = (release_data['license'][0] if - isinstance(release_data['license'], list) else - release_data['license']) + d["license"] = ( + release_data["license"][0] + if isinstance(release_data["license"], list) + else release_data["license"] + ) except KeyError: - d['license'] = 'perl_5' - d['version'] = release_data['version'] + d["license"] = "perl_5" + d["version"] = release_data["version"] - processed_packages.add(packagename + '-' + d['version']) + processed_packages.add(packagename + "-" + d["version"]) # Create import tests - module_prefix = package.replace('::', '-').split('-')[0] - if 'provides' in release_data: - for provided_mod in sorted(set(release_data['provides'])): + module_prefix = package.replace("::", "-").split("-")[0] + if "provides" in release_data: + for provided_mod in sorted(set(release_data["provides"])): # Filter out weird modules that don't belong - if (provided_mod.startswith(module_prefix) and - '::_' not in provided_mod): - d['import_tests'] += indent + provided_mod - if d['import_tests']: - d['import_comment'] = '' + if provided_mod.startswith(module_prefix) and "::_" not in provided_mod: + d["import_tests"] += indent + provided_mod + if d["import_tests"]: + d["import_comment"] = "" else: - d['import_comment'] = '# ' + d["import_comment"] = "# " if not exists(dir_path): makedirs(dir_path) # Write recipe files to a directory # TODO def write_recipe - print("Writing recipe for {}-{}".format(packagename, d['version'])) - with open(join(dir_path, 'meta.yaml'), 'wb') as f: - f.write(CPAN_META.format(**d).encode('utf-8')) - with open(join(dir_path, 'build.sh'), 'wb') as f: + print("Writing recipe for {}-{}".format(packagename, d["version"])) + with open(join(dir_path, "meta.yaml"), "wb") as f: + f.write(CPAN_META.format(**d).encode("utf-8")) + with open(join(dir_path, "build.sh"), "wb") as f: if empty_recipe: f.write(b'#!/bin/bash\necho "Nothing to do."\n') else: - f.write(CPAN_BUILD_SH.format(**d).encode('utf-8')) - with open(join(dir_path, 'bld.bat'), 'w') as f: + f.write(CPAN_BUILD_SH.format(**d).encode("utf-8")) + with open(join(dir_path, "bld.bat"), "w") as f: if empty_recipe: f.write('echo "Nothing to do."\n') else: @@ -567,8 +651,9 @@ def skeletonize(packages, output_dir=".", version=None, def is_core_version(core_version, version): if core_version is None: return False - elif core_version is not None and ((version in [None, '']) or - (core_version >= parse_version(version))): + elif core_version is not None and ( + (version in [None, ""]) or (core_version >= parse_version(version)) + ): return True else: return False @@ -580,41 +665,49 @@ def add_parser(repos): help=""" Create recipe skeleton for packages hosted on the Comprehensive Perl Archive Network (CPAN) (cpan.org). - """,) + """, + ) cpan.add_argument( "packages", - nargs='+', - help="CPAN packages to create recipe skeletons for.",) + nargs="+", + help="CPAN packages to create recipe skeletons for.", + ) cpan.add_argument( "--output-dir", help="Directory to write recipes to (default: %(default)s).", - default=".",) + default=".", + ) cpan.add_argument( "--version", - help="Version to use. Applies to all packages.",) + help="Version to use. Applies to all packages.", + ) cpan.add_argument( "--meta-cpan-url", - default='https://fastapi.metacpan.org/v1', - help="URL to use for MetaCPAN API. It must include a version, such as v1",) + default="https://fastapi.metacpan.org/v1", + help="URL to use for MetaCPAN API. It must include a version, such as v1", + ) cpan.add_argument( "--recursive", - action='store_true', - help='Create recipes for dependencies if they do not already exist (default: %(default)s).') + action="store_true", + help="Create recipes for dependencies if they do not already exist (default: %(default)s).", + ) cpan.add_argument( "--force", - action='store_true', - help='Force overwrite of existing recipes (default: %(default)s).') + action="store_true", + help="Force overwrite of existing recipes (default: %(default)s).", + ) cpan.add_argument( "--write_core", - action='store_true', - help='Write recipes for perl core modules (default: %(default)s). ') + action="store_true", + help="Write recipes for perl core modules (default: %(default)s). ", + ) @lru_cache(maxsize=None) def latest_pkg_version(pkg): - ''' + """ :returns: the latest version of the specified conda package available - ''' + """ r = Resolve(get_index()) try: pkg_list = sorted(r.get_pkgs(MatchSpec(pkg))) @@ -627,9 +720,10 @@ def latest_pkg_version(pkg): return pkg_version -def deps_for_package(package, release_data, output_dir, cache_dir, - meta_cpan_url, recursive, core_modules): - ''' +def deps_for_package( + package, release_data, output_dir, cache_dir, meta_cpan_url, recursive, core_modules +): + """ Build the sets of dependencies and packages we need recipes for. This should only be called for non-core modules/distributions, as dependencies are ignored for core modules. @@ -649,83 +743,99 @@ def deps_for_package(package, release_data, output_dir, cache_dir, :returns: Build dependencies, runtime dependencies, and set of packages to add to list of recipes to create. :rtype: 3-tuple of sets - ''' + """ # Create lists of dependencies - deps = {'build': {'core': set(), 'noncore': set()}, - 'test': {'core': set(), 'noncore': set()}, - 'run': {'core': set(), 'noncore': set()}} - phase_to_dep_type = {'build': 'build', - 'configure': 'build', - 'test': 'test', - 'runtime': 'run', - # TODO :: Check this, I am unsure about it .. - # These (sometimes?) reference sub-components of modules - # e.g. inc::MMPackageStash instead of inc which does not - # get found on metacpan fastapi. We may need to chop the - # suffix off an try again (and repeat until we find it). - 'x_Dist_Zilla': None, - 'develop': None} + deps = { + "build": {"core": set(), "noncore": set()}, + "test": {"core": set(), "noncore": set()}, + "run": {"core": set(), "noncore": set()}, + } + phase_to_dep_type = { + "build": "build", + "configure": "build", + "test": "test", + "runtime": "run", + # TODO :: Check this, I am unsure about it .. + # These (sometimes?) reference sub-components of modules + # e.g. inc::MMPackageStash instead of inc which does not + # get found on metacpan fastapi. We may need to chop the + # suffix off an try again (and repeat until we find it). + "x_Dist_Zilla": None, + "develop": None, + } packages_to_append = set() - print('Processing dependencies for %s...' % package, end='') + print("Processing dependencies for %s..." % package, end="") sys.stdout.flush() - if not release_data.get('dependency'): + if not release_data.get("dependency"): return deps, packages_to_append # release_data['dependency'] = ['FindBin-libs' if r == 'FindBin' else r for r in release_data['dependency']] new_deps = [] - for dep in release_data['dependency']: - if 'phase' in dep and dep['phase'] == 'develop': - print("Skipping develop dependency {}".format(dep['module'])) + for dep in release_data["dependency"]: + if "phase" in dep and dep["phase"] == "develop": + print("Skipping develop dependency {}".format(dep["module"])) continue - elif 'module' in dep and dep['module'] == 'FindBin': - dep['module'] = 'FindBin::Bin' - elif 'module' in dep and dep['module'] == 'Exporter': - dep['module'] = 'Exporter' + elif "module" in dep and dep["module"] == "FindBin": + dep["module"] = "FindBin::Bin" + elif "module" in dep and dep["module"] == "Exporter": + dep["module"] = "Exporter" new_deps.append(dep) - release_data['dependency'] = new_deps + release_data["dependency"] = new_deps - for dep_dict in release_data['dependency']: + for dep_dict in release_data["dependency"]: # Only care about requirements try: - if dep_dict['relationship'] == 'requires': - if not phase_to_dep_type[dep_dict['phase']]: + if dep_dict["relationship"] == "requires": + if not phase_to_dep_type[dep_dict["phase"]]: continue - if 'module' in dep_dict and dep_dict['module'] == 'common::sense': - print('debug common::sense version mismatch') - print('.', end='') + if "module" in dep_dict and dep_dict["module"] == "common::sense": + print("debug common::sense version mismatch") + print(".", end="") sys.stdout.flush() # Format dependency string (with Perl trailing dist comment) - orig_dist = dist_for_module(meta_cpan_url, cache_dir, core_modules, dep_dict['module']) + orig_dist = dist_for_module( + meta_cpan_url, cache_dir, core_modules, dep_dict["module"] + ) dep_entry = perl_to_conda(orig_dist) # Skip perl as a dependency, since it's already in list - if orig_dist.lower() == 'perl': + if orig_dist.lower() == "perl": continue # See if version is specified # There is a dep version and a pkg_version ... why? - if dep_dict['version'] in {'', 'undef'}: - dep_dict['version'] = '0' - dep_version = parse_version(dep_dict['version']) + if dep_dict["version"] in {"", "undef"}: + dep_dict["version"] = "0" + dep_version = parse_version(dep_dict["version"]) # Make sure specified version is valid # TODO def valid_release_info try: - get_release_info(meta_cpan_url, cache_dir, core_modules, dep_dict['module'], dep_version) + get_release_info( + meta_cpan_url, + cache_dir, + core_modules, + dep_dict["module"], + dep_version, + ) except InvalidReleaseError: - print(('WARNING: The version of %s listed as a ' + - 'dependency for %s, %s, is not available on MetaCPAN, ' + - 'so we are just assuming the latest version is ' + - 'okay.') % (orig_dist, package, str(dep_version))) - dep_version = parse_version('0') + print( + ( + "WARNING: The version of %s listed as a " + + "dependency for %s, %s, is not available on MetaCPAN, " + + "so we are just assuming the latest version is " + + "okay." + ) + % (orig_dist, package, str(dep_version)) + ) + dep_version = parse_version("0") # Add version number to dependency, if it's newer than latest # we have package for. - if loose_version(dep_version) > loose_version('0'): - + if loose_version(dep_version) > loose_version("0"): pkg_version = latest_pkg_version(dep_entry) # If we don't have a package, use core version as version if pkg_version is None: @@ -733,7 +843,9 @@ def deps_for_package(package, release_data, output_dir, cache_dir, # perl_version, # config=config) # print('dep entry is {}'.format(dep_entry)) - pkg_version = metacpan_api_get_core_version(core_modules, dep_dict['module']) + pkg_version = metacpan_api_get_core_version( + core_modules, dep_dict["module"] + ) # If no package is available at all, it's in the core, or # the latest is already good enough, don't specify version. # This is because conda doesn't support > in version @@ -741,39 +853,40 @@ def deps_for_package(package, release_data, output_dir, cache_dir, # J = Conda does support >= ? try: if pkg_version is not None and ( - loose_version(dep_version) > loose_version(pkg_version)): - dep_entry += ' ' + dep_dict['version'] + loose_version(dep_version) > loose_version(pkg_version) + ): + dep_entry += " " + dep_dict["version"] except Exception: - print( - 'We have got an expected error with dependency versions') - print('Module {}'.format(dep_dict['module'])) - print(f'Pkg_version {pkg_version}') - print(f'Dep Version {dep_version}') + print("We have got an expected error with dependency versions") + print("Module {}".format(dep_dict["module"])) + print(f"Pkg_version {pkg_version}") + print(f"Dep Version {dep_version}") # If recursive, check if we have a recipe for this dependency if recursive: # If dependency entry is versioned, make sure this is too - if ' ' in dep_entry: - if not exists(join(output_dir, dep_entry.replace('::', - '-'))): - packages_to_append.add(('='.join((orig_dist, - dep_dict['version']))), - dep_dict['module']) - elif not glob(join(output_dir, (dep_entry + '-[v1-9][0-9.]*'))): - packages_to_append.add((orig_dist, dep_dict['module'])) + if " " in dep_entry: + if not exists(join(output_dir, dep_entry.replace("::", "-"))): + packages_to_append.add( + ("=".join((orig_dist, dep_dict["version"]))), + dep_dict["module"], + ) + elif not glob(join(output_dir, (dep_entry + "-[v1-9][0-9.]*"))): + packages_to_append.add((orig_dist, dep_dict["module"])) # Add to appropriate dependency list - core = metacpan_api_is_core_version( - meta_cpan_url, dep_dict['module']) + core = metacpan_api_is_core_version(meta_cpan_url, dep_dict["module"]) - cb_phase = phase_to_dep_type[dep_dict['phase']] + cb_phase = phase_to_dep_type[dep_dict["phase"]] if cb_phase: if core: - deps[cb_phase]['core'].add(dep_entry) + deps[cb_phase]["core"].add(dep_entry) else: - deps[cb_phase]['noncore'].add(dep_entry) + deps[cb_phase]["noncore"].add(dep_entry) else: - print("Skipping {} dependency {}".format(dep_dict['phase'], dep_entry)) + print( + "Skipping {} dependency {}".format(dep_dict["phase"], dep_entry) + ) # seemingly new in conda 4.3: HTTPErrors arise when we ask for # something that is a # perl module, but not a package. @@ -781,50 +894,53 @@ def deps_for_package(package, release_data, output_dir, cache_dir, except (CondaError, CondaHTTPError): continue - print(f'module {package} adds {packages_to_append}') + print(f"module {package} adds {packages_to_append}") return deps, packages_to_append def dist_for_module(cpan_url, cache_dir, core_modules, module): - ''' + """ Given a name that could be a module or a distribution, return the distribution. - ''' - if 'Git::Check' in module: - print('debug this') + """ + if "Git::Check" in module: + print("debug this") # First check if it is a core module, those mask distributions here, or at least they # do in the case of `import Exporter` distribution = None try: mod_dict = core_module_dict(core_modules, module) - distribution = mod_dict['distribution'] + distribution = mod_dict["distribution"] except: # Next check if its already a distribution rel_dict = release_module_dict(cpan_url, cache_dir, module) if rel_dict is not None: - if rel_dict['distribution'] != module.replace('::', '-'): - print("WARNING :: module {} found in distribution {}".format(module, rel_dict['distribution'])) - distribution = rel_dict['distribution'] + if rel_dict["distribution"] != module.replace("::", "-"): + print( + "WARNING :: module {} found in distribution {}".format( + module, rel_dict["distribution"] + ) + ) + distribution = rel_dict["distribution"] if not distribution: - print('debug') + print("debug") assert distribution, "dist_for_module must succeed" return distribution def release_module_dict_direct(cpan_url, cache_dir, module): - - if 'Dist-Zilla-Plugin-Git' in module: + if "Dist-Zilla-Plugin-Git" in module: print(f"debug {module}") - elif 'Dist::Zilla::Plugin::Git' in module: + elif "Dist::Zilla::Plugin::Git" in module: print(f"debug {module}") - elif 'Time::Zone' in module: + elif "Time::Zone" in module: print(f"debug {module}") try: - url_module = f'{cpan_url}/module/{module}' - print(f'INFO :: url_module {url_module}') + url_module = f"{cpan_url}/module/{module}" + print(f"INFO :: url_module {url_module}") rel_dict = get_cpan_api_url(url_module, colons=True) except RuntimeError: rel_dict = None @@ -832,14 +948,16 @@ def release_module_dict_direct(cpan_url, cache_dir, module): rel_dict = None if not rel_dict: print(f"WARNING :: Did not find rel_dict for module {module}") - distribution = module.replace('::', '-') - if not rel_dict or 'dependency' not in rel_dict: - if rel_dict and 'distribution' in rel_dict: - distribution = rel_dict['distribution'] + distribution = module.replace("::", "-") + if not rel_dict or "dependency" not in rel_dict: + if rel_dict and "distribution" in rel_dict: + distribution = rel_dict["distribution"] else: - print(f"WARNING :: 'distribution' was not in {module}'s module info, making it up") + print( + f"WARNING :: 'distribution' was not in {module}'s module info, making it up" + ) try: - url_release = f'{cpan_url}/release/{distribution}' + url_release = f"{cpan_url}/release/{distribution}" rel_dict2 = get_cpan_api_url(url_release, colons=False) rel_dict = rel_dict2 except RuntimeError: @@ -848,35 +966,39 @@ def release_module_dict_direct(cpan_url, cache_dir, module): rel_dict = None else: print(f"INFO :: OK, found 'dependency' in module {module}") - if not rel_dict or 'dependency' not in rel_dict: - print("WARNING :: No dependencies found for module {} in distribution {}\n" - "WARNING :: Please check {} and {}".format(module, distribution, url_module, url_release)) + if not rel_dict or "dependency" not in rel_dict: + print( + "WARNING :: No dependencies found for module {} in distribution {}\n" + "WARNING :: Please check {} and {}".format( + module, distribution, url_module, url_release + ) + ) return rel_dict def release_module_dict(cpan_url, cache_dir, module): - if 'Regexp-Common' in module: + if "Regexp-Common" in module: print("debug") rel_dict = release_module_dict_direct(cpan_url, cache_dir, module) if not rel_dict: # In this case, the module may be a submodule of another dist, let's try something else. # An example of this is Dist::Zilla::Plugin::Git::Check. - pickled = get_pickle_file_path(cache_dir, module + '.dl_url') - url = f'{cpan_url}/download_url/{module}' + pickled = get_pickle_file_path(cache_dir, module + ".dl_url") + url = f"{cpan_url}/download_url/{module}" try: os.makedirs(os.path.dirname(pickled)) except: pass download(url, pickled) - with open(pickled, 'rb') as dl_url_json: + with open(pickled, "rb") as dl_url_json: output = dl_url_json.read() if hasattr(output, "decode"): - output = output.decode('utf-8-sig') + output = output.decode("utf-8-sig") dl_url_dict = json.loads(output) - if dl_url_dict['release'].endswith(dl_url_dict['version']): + if dl_url_dict["release"].endswith(dl_url_dict["version"]): # Easy case. print(f"Up to date: {module}") - dist = dl_url_dict['release'].replace('-' + dl_url_dict['version'], '') + dist = dl_url_dict["release"].replace("-" + dl_url_dict["version"], "") else: # Difficult case. print(f"Not up to date: {module}") @@ -893,8 +1015,8 @@ def release_module_dict(cpan_url, cache_dir, module): # # .. there is no field that lists a version of '2.33' in the data. We need # to inspect the tarball. - dst = os.path.join(cache_dir, basename(dl_url_dict['download_url'])) - download(dl_url_dict['download_url'], dst) + dst = os.path.join(cache_dir, basename(dl_url_dict["download_url"])) + download(dl_url_dict["download_url"], dst) with gzip.open(dst) as dist_json_file: output = dist_json_file.read() # (base) Rays-Mac-Pro:Volumes rdonnelly$ cpan -D Time::Zone @@ -904,55 +1026,62 @@ def release_module_dict(cpan_url, cache_dir, module): def core_module_dict_old(cpan_url, module): - if 'FindBin' in module: - print('debug') - if 'Exporter' in module: - print('debug') + if "FindBin" in module: + print("debug") + if "Exporter" in module: + print("debug") try: - mod_dict = get_cpan_api_url( - f'{cpan_url}/module/{module}', colons=True) + mod_dict = get_cpan_api_url(f"{cpan_url}/module/{module}", colons=True) # If there was an error, report it except CondaHTTPError as e: - sys.exit(('Error: Could not find module or distribution named' - ' %s on MetaCPAN. Error was: %s') % (module, e.message)) + sys.exit( + ( + "Error: Could not find module or distribution named" + " %s on MetaCPAN. Error was: %s" + ) + % (module, e.message) + ) else: - mod_dict = {'distribution': 'perl'} + mod_dict = {"distribution": "perl"} return mod_dict def core_module_dict(core_modules, module): if module in core_modules: - return {'distribution': 'perl'} + return {"distribution": "perl"} return None @lru_cache(maxsize=None) def metacpan_api_is_core_version(cpan_url, module): - if 'FindBin' in module: - print('debug') - url = f'{cpan_url}/release/{module}' + if "FindBin" in module: + print("debug") + url = f"{cpan_url}/release/{module}" url = url.replace("::", "-") req = requests.get(url) if req.status_code == 200: return False else: - url = f'{cpan_url}/module/{module}' + url = f"{cpan_url}/module/{module}" req = requests.get(url) if req.status_code == 200: return True else: - sys.exit(('Error: Could not find module or distribution named' - ' %s on MetaCPAN.') - % (module)) + sys.exit( + ( + "Error: Could not find module or distribution named" + " %s on MetaCPAN." + ) + % (module) + ) def metacpan_api_get_core_version(core_modules, module): - module_dict = core_module_dict(core_modules, module) try: - version = module_dict['module'][-1]['version'] + version = module_dict["module"][-1]["version"] except Exception: version = None @@ -960,10 +1089,10 @@ def metacpan_api_get_core_version(core_modules, module): def get_release_info(cpan_url, cache_dir, core_modules, package, version): - ''' + """ Return a dictionary of the JSON information stored at cpan.metacpan.org corresponding to the given package/dist/module. - ''' + """ # Transform module name to dist name if necessary orig_package = package package = dist_for_module(cpan_url, cache_dir, core_modules, package) @@ -971,60 +1100,69 @@ def get_release_info(cpan_url, cache_dir, core_modules, package, version): # Get latest info to find author, which is necessary for retrieving a # specific version try: - rel_dict = get_cpan_api_url( - f'{cpan_url}/release/{package}', colons=False) - rel_dict['version'] = str(rel_dict['version']).lstrip('v') + rel_dict = get_cpan_api_url(f"{cpan_url}/release/{package}", colons=False) + rel_dict["version"] = str(rel_dict["version"]).lstrip("v") except CondaHTTPError: core_version = metacpan_api_is_core_version(cpan_url, package) - if core_version is not None and (version is None or - (version == core_version)): - print(("WARNING: {0} is not available on MetaCPAN, but it's a " + - "core module, so we do not actually need the source file, " + - "and are omitting the URL and MD5 from the recipe " + - "entirely.").format(orig_package)) - rel_dict = {'version': str(core_version), 'download_url': '', - 'license': ['perl_5'], 'dependency': {}} + if core_version is not None and (version is None or (version == core_version)): + print( + ( + "WARNING: {0} is not available on MetaCPAN, but it's a " + + "core module, so we do not actually need the source file, " + + "and are omitting the URL and MD5 from the recipe " + + "entirely." + ).format(orig_package) + ) + rel_dict = { + "version": str(core_version), + "download_url": "", + "license": ["perl_5"], + "dependency": {}, + } else: - sys.exit(("Error: Could not find any versions of package %s on " + - "MetaCPAN.") % (orig_package)) + sys.exit( + ("Error: Could not find any versions of package %s on " + "MetaCPAN.") + % (orig_package) + ) version_mismatch = False if version is not None: version_str = str(version) - rel_version = str(rel_dict['version']) + rel_version = str(rel_dict["version"]) loose_str = str(parse_version(version_str)) try: version_mismatch = (version is not None) and ( - loose_version('0') != loose_version(version_str) and - parse_version(rel_version) != loose_version(version_str)) + loose_version("0") != loose_version(version_str) + and parse_version(rel_version) != loose_version(version_str) + ) # print(version_mismatch) except Exception as e: - print('We have some strange version mismatches. Please investigate.') + print("We have some strange version mismatches. Please investigate.") print(e) - print(f'Package {package}') - print(f'Version {version}') - print('Pkg Version {}'.format(rel_dict['version'])) - print(f'Loose Version {loose_str}') + print(f"Package {package}") + print(f"Version {version}") + print("Pkg Version {}".format(rel_dict["version"])) + print(f"Loose Version {loose_str}") # TODO - check for major/minor version mismatches # Allow for minor if version_mismatch: - print(f'WARNING :: Version mismatch in {package}') - print(f'WARNING :: Version: {version_str}, RelVersion: {rel_version}') + print(f"WARNING :: Version mismatch in {package}") + print(f"WARNING :: Version: {version_str}, RelVersion: {rel_version}") return rel_dict def get_checksum_and_size(download_url): - ''' + """ Looks in the CHECKSUMS file in the same directory as the file specified at download_url and returns the sha256 hash and file size. - ''' + """ base_url = dirname(download_url) filename = basename(download_url) - with PerlTmpDownload(base_url + '/CHECKSUMS') as checksum_path: + with PerlTmpDownload(base_url + "/CHECKSUMS") as checksum_path: with open(checksum_path) as checksum_file: found_file = False sha256 = None @@ -1040,11 +1178,11 @@ def get_checksum_and_size(download_url): size = line.split("=>")[1].strip("', ") break # This should never happen, but just in case - elif line.startswith('}'): + elif line.startswith("}"): break return sha256, size def perl_to_conda(name): - ''' Sanitizes a Perl package name for use as a conda package name. ''' - return 'perl-' + name.replace('::', '-').lower() + """Sanitizes a Perl package name for use as a conda package name.""" + return "perl-" + name.replace("::", "-").lower() diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index 1ff51c2635..0dac66631a 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -7,19 +7,29 @@ import argparse import copy -from itertools import chain -from os import makedirs, listdir, sep, environ -from os.path import (basename, commonprefix, exists, isabs, isdir, - isfile, join, normpath, realpath, relpath) +import hashlib import re import subprocess import sys -import hashlib - -import requests import tarfile -import zipfile import unicodedata +import zipfile +from itertools import chain +from os import environ, listdir, makedirs, sep +from os.path import ( + basename, + commonprefix, + exists, + isabs, + isdir, + isfile, + join, + normpath, + realpath, + relpath, +) + +import requests import yaml # try to import C dumper @@ -29,12 +39,13 @@ from yaml import SafeDumper from conda.common.io import dashlist -from conda_build import source, metadata -from conda_build.config import get_or_merge_config + +from conda_build import metadata, source from conda_build.conda_interface import TemporaryDirectory, cc_conda_build +from conda_build.config import get_or_merge_config from conda_build.license_family import allowed_license_families, guess_license_family -from conda_build.utils import rm_rf, ensure_list -from conda_build.variants import get_package_variants, DEFAULT_VARIANTS +from conda_build.utils import ensure_list, rm_rf +from conda_build.variants import DEFAULT_VARIANTS, get_package_variants SOURCE_META = """\ {archive_keys} @@ -216,84 +227,85 @@ exit 0 """ -INDENT = '\n - ' +INDENT = "\n - " CRAN_KEYS = [ - 'Site', - 'Archs', - 'Depends', - 'Enhances', - 'Imports', - 'License', - 'License_is_FOSS', - 'License_restricts_use', - 'LinkingTo', - 'MD5sum', - 'NeedsCompilation', - 'OS_type', - 'Package', - 'Path', - 'Priority', - 'Suggests', - 'Version', - - 'Title', - 'Author', - 'Maintainer', + "Site", + "Archs", + "Depends", + "Enhances", + "Imports", + "License", + "License_is_FOSS", + "License_restricts_use", + "LinkingTo", + "MD5sum", + "NeedsCompilation", + "OS_type", + "Package", + "Path", + "Priority", + "Suggests", + "Version", + "Title", + "Author", + "Maintainer", ] # The following base/recommended package names are derived from R's source # tree (R-3.0.2/share/make/vars.mk). Hopefully they don't change too much # between versions. R_BASE_PACKAGE_NAMES = ( - 'base', - 'compiler', - 'datasets', - 'graphics', - 'grDevices', - 'grid', - 'methods', - 'parallel', - 'splines', - 'stats', - 'stats4', - 'tcltk', - 'tools', - 'utils', + "base", + "compiler", + "datasets", + "graphics", + "grDevices", + "grid", + "methods", + "parallel", + "splines", + "stats", + "stats4", + "tcltk", + "tools", + "utils", ) R_RECOMMENDED_PACKAGE_NAMES = ( - 'MASS', - 'lattice', - 'Matrix', - 'nlme', - 'survival', - 'boot', - 'cluster', - 'codetools', - 'foreign', - 'KernSmooth', - 'rpart', - 'class', - 'nnet', - 'spatial', - 'mgcv', + "MASS", + "lattice", + "Matrix", + "nlme", + "survival", + "boot", + "cluster", + "codetools", + "foreign", + "KernSmooth", + "rpart", + "class", + "nnet", + "spatial", + "mgcv", ) # Stolen then tweaked from debian.deb822.PkgRelation.__dep_RE. VERSION_DEPENDENCY_REGEX = re.compile( - r'^\s*(?P[a-zA-Z0-9.+\-]{1,})' - r'(\s*\(\s*(?P[>=<]+)\s*' - r'(?P[0-9a-zA-Z:\-+~.]+)\s*\))' - r'?(\s*\[(?P[\s!\w\-]+)\])?\s*$' + r"^\s*(?P[a-zA-Z0-9.+\-]{1,})" + r"(\s*\(\s*(?P[>=<]+)\s*" + r"(?P[0-9a-zA-Z:\-+~.]+)\s*\))" + r"?(\s*\[(?P[\s!\w\-]+)\])?\s*$" ) -target_platform_bash_test_by_sel = {'linux': '=~ linux.*', - 'linux32': '== linux-32', - 'linux64': '== linux-64', - 'win32': '== win-32', - 'win64': '== win-64', - 'osx': '== osx-64'} +target_platform_bash_test_by_sel = { + "linux": "=~ linux.*", + "linux32": "== linux-32", + "linux64": "== linux-64", + "win32": "== win-32", + "win64": "== win-64", + "osx": "== osx-64", +} def package_exists(package_name): @@ -317,7 +329,7 @@ def add_parser(repos): ) cran.add_argument( "packages", - nargs='+', + nargs="+", help="""CRAN packages to create recipe skeletons for.""", ) cran.add_argument( @@ -354,101 +366,102 @@ def add_parser(repos): ) cran.add_argument( "--r-interp", - default='r-base', + default="r-base", help="Declare R interpreter package", ) cran.add_argument( "--use-binaries-ver", - help=("Repackage binaries from version provided by argument instead of building " - "from source."), + help=( + "Repackage binaries from version provided by argument instead of building " + "from source." + ), ) cran.add_argument( "--use-when-no-binary", - choices=('src', - 'old', - 'src-old', - 'old-src', - 'error'), - default='src', + choices=("src", "old", "src-old", "old-src", "error"), + default="src", help="""Sometimes binaries are not available at the correct version for a given platform (macOS). You can use this flag to specify what fallback to take, either compiling from source or using an older - binary or trying one then the other.""" + binary or trying one then the other.""", ) cran.add_argument( "--use-noarch-generic", - action='store_true', - dest='use_noarch_generic', + action="store_true", + dest="use_noarch_generic", help=("Mark packages that do not need compilation as `noarch: generic`"), ) cran.add_argument( "--use-rtools-win", - action='store_true', + action="store_true", help="Use Rtools when building from source on Windows", ) cran.add_argument( "--recursive", - action='store_true', - help='Create recipes for dependencies if they do not already exist.', + action="store_true", + help="Create recipes for dependencies if they do not already exist.", ) cran.add_argument( "--no-recursive", - action='store_false', - dest='recursive', + action="store_false", + dest="recursive", help="Don't create recipes for dependencies if they do not already exist.", ) cran.add_argument( - '--no-archive', - action='store_false', - dest='archive', + "--no-archive", + action="store_false", + dest="archive", help="Don't include an Archive download url.", ) cran.add_argument( - '--allow-archived', - action='store_true', - dest='allow_archived', + "--allow-archived", + action="store_true", + dest="allow_archived", help="If the package has been archived, download the latest version.", ) cran.add_argument( "--version-compare", - action='store_true', + action="store_true", help="""Compare the package version of the recipe with the one available - on CRAN. Exits 1 if a newer version is available and 0 otherwise.""" + on CRAN. Exits 1 if a newer version is available and 0 otherwise.""", ) cran.add_argument( "--update-policy", - action='store', - choices=('error', - 'skip-up-to-date', - 'skip-existing', - 'overwrite', - 'merge-keep-build-num', - 'merge-incr-build-num'), - default='error', + action="store", + choices=( + "error", + "skip-up-to-date", + "skip-existing", + "overwrite", + "merge-keep-build-num", + "merge-incr-build-num", + ), + default="error", help="""Dictates what to do when existing packages are encountered in the output directory (set by --output-dir). In the present implementation, the merge options avoid overwriting bld.bat and build.sh and only manage copying across patches, and the `build/{number,script_env}` fields. When the version changes, both merge options reset `build/number` to 0. When the version does - not change they either keep the old `build/number` or else increase it by one.""" + not change they either keep the old `build/number` or else increase it by one.""", ) cran.add_argument( - '-m', '--variant-config-files', - default=cc_conda_build.get('skeleton_config_yaml', None), + "-m", + "--variant-config-files", + default=cc_conda_build.get("skeleton_config_yaml", None), help="""Variant config file to add. These yaml files can contain - keys such as `cran_mirror`. Only one can be provided here.""" + keys such as `cran_mirror`. Only one can be provided here.""", ) cran.add_argument( "--add-cross-r-base", - action='store_true', + action="store_true", default=False, - help="""Add cross-r-base to build requirements for cross compiling""" + help="""Add cross-r-base to build requirements for cross compiling""", ) cran.add_argument( "--no-comments", - action='store_true', + action="store_true", default=False, - help="""Do not include instructional comments in recipe files""" + help="""Do not include instructional comments in recipe files""", ) @@ -458,19 +471,19 @@ def dict_from_cran_lines(lines): if not line: continue try: - if ': ' in line: - (k, v) = line.split(': ', 1) + if ": " in line: + (k, v) = line.split(": ", 1) else: # Sometimes fields are included but left blank, e.g.: # - Enhances in data.tree # - Suggests in corpcor - (k, v) = line.split(':', 1) + (k, v) = line.split(":", 1) except ValueError: sys.exit("Error: Could not parse metadata (%s)" % line) d[k] = v # if k not in CRAN_KEYS: # print("Warning: Unknown key %s" % k) - d['orig_lines'] = lines + d["orig_lines"] = lines return d @@ -495,17 +508,17 @@ def remove_package_line_continuations(chunk): 'License: GPL (>= 2)', 'NeedsCompilation: no'] """ # NOQA - continuation = (' ', '\t') + continuation = (" ", "\t") continued_ix = None continued_line = None had_continuation = False accumulating_continuations = False - chunk.append('') + chunk.append("") - for (i, line) in enumerate(chunk): + for i, line in enumerate(chunk): if line.startswith(continuation): - line = ' ' + line.lstrip() + line = " " + line.lstrip() if accumulating_continuations: assert had_continuation continued_line += line @@ -528,7 +541,7 @@ def remove_package_line_continuations(chunk): # Remove the None(s). chunk = [c for c in chunk if c] - chunk.append('') + chunk.append("") return chunk @@ -543,25 +556,30 @@ def yaml_quote_string(string): Note that this function is NOT general. """ - return yaml.dump(string, Dumper=SafeDumper).replace('\n...\n', '').replace('\n', '\n ').rstrip('\n ') + return ( + yaml.dump(string, Dumper=SafeDumper) + .replace("\n...\n", "") + .replace("\n", "\n ") + .rstrip("\n ") + ) # Due to how we render the metadata there can be significant areas of repeated newlines. # This collapses them and also strips any trailing spaces. def clear_whitespace(string): lines = [] - last_line = '' + last_line = "" for line in string.splitlines(): line = line.rstrip() - if not (line == '' and last_line == ''): + if not (line == "" and last_line == ""): lines.append(line) last_line = line - return '\n'.join(lines) + return "\n".join(lines) def read_description_contents(fp): bytes = fp.read() - text = bytes.decode('utf-8', errors='replace') + text = bytes.decode("utf-8", errors="replace") text = clear_whitespace(text) lines = remove_package_line_continuations(text.splitlines()) return dict_from_cran_lines(lines) @@ -569,39 +587,46 @@ def read_description_contents(fp): def get_archive_metadata(path, verbose=True): if verbose: - print('Reading package metadata from %s' % path) - if basename(path) == 'DESCRIPTION': - with open(path, 'rb') as fp: + print("Reading package metadata from %s" % path) + if basename(path) == "DESCRIPTION": + with open(path, "rb") as fp: return read_description_contents(fp) elif tarfile.is_tarfile(path): - with tarfile.open(path, 'r') as tf: + with tarfile.open(path, "r") as tf: for member in tf: - if re.match(r'^[^/]+/DESCRIPTION$', member.name): + if re.match(r"^[^/]+/DESCRIPTION$", member.name): fp = tf.extractfile(member) return read_description_contents(fp) - elif path.endswith('.zip'): - with zipfile.ZipFile(path, 'r') as zf: + elif path.endswith(".zip"): + with zipfile.ZipFile(path, "r") as zf: for member in zf.infolist(): - if re.match(r'^[^/]+/DESCRIPTION$', member.filename): - fp = zf.open(member, 'r') + if re.match(r"^[^/]+/DESCRIPTION$", member.filename): + fp = zf.open(member, "r") return read_description_contents(fp) else: - sys.exit('Cannot extract a DESCRIPTION from file %s' % path) - sys.exit('%s does not seem to be a CRAN package (no DESCRIPTION) file' % path) + sys.exit("Cannot extract a DESCRIPTION from file %s" % path) + sys.exit("%s does not seem to be a CRAN package (no DESCRIPTION) file" % path) def get_latest_git_tag(config): # SO says to use taggerdate instead of committerdate, but that is invalid for lightweight tags. - p = subprocess.Popen(['git', 'for-each-ref', - 'refs/tags', - '--sort=-committerdate', - '--format=%(refname:short)', - '--count=1'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, - cwd=config.work_dir) + p = subprocess.Popen( + [ + "git", + "for-each-ref", + "refs/tags", + "--sort=-committerdate", + "--format=%(refname:short)", + "--count=1", + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=config.work_dir, + ) stdout, stderr = p.communicate() - stdout = stdout.decode('utf-8') - stderr = stderr.decode('utf-8') + stdout = stdout.decode("utf-8") + stderr = stderr.decode("utf-8") if stderr or p.returncode: sys.exit("Error: git tag failed (%s)" % stderr) tags = stdout.strip().splitlines() @@ -618,7 +643,7 @@ def _ssl_no_verify(): This provides a workaround for users in some corporate environments where MITM style proxies make it difficult to fetch data over HTTPS. """ - return environ.get('SSL_NO_VERIFY', '').strip().lower() in ('1', 'true') + return environ.get("SSL_NO_VERIFY", "").strip().lower() in ("1", "true") def get_session(output_dir, verbose=True): @@ -629,12 +654,14 @@ def get_session(output_dir, verbose=True): import cachecontrol.caches except ImportError: if verbose: - print("Tip: install CacheControl and lockfile (conda packages) to cache the " - "CRAN metadata") + print( + "Tip: install CacheControl and lockfile (conda packages) to cache the " + "CRAN metadata" + ) else: - session = cachecontrol.CacheControl(session, - cache=cachecontrol.caches.FileCache(join(output_dir, - '.web_cache'))) + session = cachecontrol.CacheControl( + session, cache=cachecontrol.caches.FileCache(join(output_dir, ".web_cache")) + ) return session @@ -650,9 +677,11 @@ def get_cran_archive_versions(cran_url, session, package, verbose=True): return [] raise versions = [] - for p, dt in re.findall(r'\1\s*]*>([^<]*)', r.text): - if p.endswith('.tar.gz') and '_' in p: - name, version = p.rsplit('.', 2)[0].split('_', 1) + for p, dt in re.findall( + r'\1\s*]*>([^<]*)', r.text + ): + if p.endswith(".tar.gz") and "_" in p: + name, version = p.rsplit(".", 2)[0].split("_", 1) versions.append((dt.strip(), version)) return [v for dt, v in sorted(versions, reverse=True)] @@ -664,13 +693,13 @@ def get_cran_index(cran_url, session, verbose=True): r.raise_for_status() records = {} for p in re.findall(r'\1', r.text): - if p.endswith('.tar.gz') and '_' in p: - name, version = p.rsplit('.', 2)[0].split('_', 1) + if p.endswith(".tar.gz") and "_" in p: + name, version = p.rsplit(".", 2)[0].split("_", 1) records[name.lower()] = (name, version) r = session.get(cran_url + "/src/contrib/Archive/") r.raise_for_status() for p in re.findall(r'\1/', r.text): - if re.match(r'^[A-Za-z]', p): + if re.match(r"^[A-Za-z]", p): records.setdefault(p.lower(), (p, None)) return records @@ -682,7 +711,7 @@ def make_array(m, key, allow_empty=False): except: old_vals = [] if old_vals or allow_empty: - result.append(key.split('/')[-1] + ":") + result.append(key.split("/")[-1] + ":") for old_val in old_vals: result.append(f"{INDENT}{old_val}") return result @@ -691,19 +720,19 @@ def make_array(m, key, allow_empty=False): def existing_recipe_dir(output_dir, output_suffix, package, version): result = None if version: - package = package + '-' + version.replace('-', '_') + package = package + "-" + version.replace("-", "_") if exists(join(output_dir, package)): result = normpath(join(output_dir, package)) elif exists(join(output_dir, package + output_suffix)): result = normpath(join(output_dir, package + output_suffix)) - elif exists(join(output_dir, 'r-' + package + output_suffix)): - result = normpath(join(output_dir, 'r-' + package + output_suffix)) + elif exists(join(output_dir, "r-" + package + output_suffix)): + result = normpath(join(output_dir, "r-" + package + output_suffix)) return result def strip_end(string, end): if string.endswith(end): - return string[:-len(end)] + return string[: -len(end)] return string @@ -735,33 +764,44 @@ def package_to_inputs_dict(output_dir, output_suffix, git_tag, package, version= if isfile(package): return None print("Parsing input package %s:" % package) - package = strip_end(package, '/') + package = strip_end(package, "/") package = strip_end(package, sep) - if 'github.com' in package: - package = strip_end(package, '.git') + if "github.com" in package: + package = strip_end(package, ".git") pkg_name = basename(package).lower() - pkg_name = strip_end(pkg_name, '-feedstock') + pkg_name = strip_end(pkg_name, "-feedstock") if output_suffix: pkg_name = strip_end(pkg_name, output_suffix) - if pkg_name.startswith('r-'): + if pkg_name.startswith("r-"): pkg_name = pkg_name[2:] - if package.startswith('file://'): - location = package.replace('file://', '') + if package.startswith("file://"): + location = package.replace("file://", "") pkg_filename = basename(location) - pkg_name = re.match(r'(.*)_(.*)', pkg_filename).group(1).lower() - existing_location = existing_recipe_dir(output_dir, output_suffix, 'r-' + pkg_name, version) + pkg_name = re.match(r"(.*)_(.*)", pkg_filename).group(1).lower() + existing_location = existing_recipe_dir( + output_dir, output_suffix, "r-" + pkg_name, version + ) elif isabs(package): commp = commonprefix((package, output_dir)) if commp != output_dir: - raise RuntimeError("package {} specified with abs path outside of output-dir {}".format( - package, output_dir)) + raise RuntimeError( + "package {} specified with abs path outside of output-dir {}".format( + package, output_dir + ) + ) location = package - existing_location = existing_recipe_dir(output_dir, output_suffix, 'r-' + pkg_name, version) - elif 'github.com' in package: + existing_location = existing_recipe_dir( + output_dir, output_suffix, "r-" + pkg_name, version + ) + elif "github.com" in package: location = package - existing_location = existing_recipe_dir(output_dir, output_suffix, 'r-' + pkg_name, version) + existing_location = existing_recipe_dir( + output_dir, output_suffix, "r-" + pkg_name, version + ) else: - location = existing_location = existing_recipe_dir(output_dir, output_suffix, package, version) + location = existing_location = existing_recipe_dir( + output_dir, output_suffix, package, version + ) if existing_location: try: m = metadata.MetaData(existing_location) @@ -774,54 +814,74 @@ def package_to_inputs_dict(output_dir, output_suffix, git_tag, package, version= # It can still be the case that a package without 'github.com' in the location does really # come from there, for that we need to inspect the existing metadata's source/git_url. old_git_rev = git_tag - if location and m and 'github.com' not in location: - git_url = m.get_value('source/git_url', '') - if 'github.com' in git_url: + if location and m and "github.com" not in location: + git_url = m.get_value("source/git_url", "") + if "github.com" in git_url: location = git_url - old_git_rev = m.get_value('source/git_rev', None) + old_git_rev = m.get_value("source/git_rev", None) - vstr = '-' + version.replace('-', '_') if version else '' - new_location = join(output_dir, 'r-' + pkg_name + vstr + output_suffix) + vstr = "-" + version.replace("-", "_") if version else "" + new_location = join(output_dir, "r-" + pkg_name + vstr + output_suffix) print(f".. name: {pkg_name} location: {location} new_location: {new_location}") - return {'pkg-name': pkg_name, - 'location': location, - 'old-git-rev': old_git_rev, - 'old-metadata': m, - 'new-location': new_location, - 'version': version} + return { + "pkg-name": pkg_name, + "location": location, + "old-git-rev": old_git_rev, + "old-metadata": m, + "new-location": new_location, + "version": version, + } def get_available_binaries(cran_url, details): - url = cran_url + '/' + details['dir'] + url = cran_url + "/" + details["dir"] response = requests.get(url) response.raise_for_status() - ext = details['ext'] + ext = details["ext"] for filename in re.findall(r'\1', response.text): if filename.endswith(ext): - pkg, _, ver = filename.rpartition('_') + pkg, _, ver = filename.rpartition("_") ver, _, _ = ver.rpartition(ext) - details['binaries'].setdefault(pkg, []).append((ver, url + filename)) + details["binaries"].setdefault(pkg, []).append((ver, url + filename)) def remove_comments(template): - re_comment = re.compile(r'^\s*#\s') - lines = template.split('\n') + re_comment = re.compile(r"^\s*#\s") + lines = template.split("\n") lines_no_comments = [line for line in lines if not re_comment.match(line)] - return '\n'.join(lines_no_comments) - - -def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=None, version=None, - git_tag=None, cran_url=None, recursive=False, archive=True, - version_compare=False, update_policy='', r_interp='r-base', use_binaries_ver=None, - use_noarch_generic=False, use_when_no_binary='src', use_rtools_win=False, config=None, - variant_config_files=None, allow_archived=False, add_cross_r_base=False, - no_comments=False): - - if use_when_no_binary != 'error' and \ - use_when_no_binary != 'src' and \ - use_when_no_binary != 'old' and \ - use_when_no_binary != 'old-src': + return "\n".join(lines_no_comments) + + +def skeletonize( + in_packages, + output_dir=".", + output_suffix="", + add_maintainer=None, + version=None, + git_tag=None, + cran_url=None, + recursive=False, + archive=True, + version_compare=False, + update_policy="", + r_interp="r-base", + use_binaries_ver=None, + use_noarch_generic=False, + use_when_no_binary="src", + use_rtools_win=False, + config=None, + variant_config_files=None, + allow_archived=False, + add_cross_r_base=False, + no_comments=False, +): + if ( + use_when_no_binary != "error" + and use_when_no_binary != "src" + and use_when_no_binary != "old" + and use_when_no_binary != "old-src" + ): print(f"ERROR: --use_when_no_binary={use_when_no_binary} not yet implemented") sys.exit(1) output_dir = realpath(output_dir) @@ -834,64 +894,74 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No if not cran_url: with TemporaryDirectory() as t: _variant = get_package_variants(t, config)[0] - cran_url = ensure_list(_variant.get('cran_mirror', DEFAULT_VARIANTS['cran_mirror']))[0] + cran_url = ensure_list( + _variant.get("cran_mirror", DEFAULT_VARIANTS["cran_mirror"]) + )[0] if len(in_packages) > 1 and version_compare: raise ValueError("--version-compare only works with one package at a time") - if update_policy == 'error' and not in_packages: + if update_policy == "error" and not in_packages: raise ValueError("At least one package must be supplied") package_dicts = {} package_list = [] - cran_url = cran_url.rstrip('/') + cran_url = cran_url.rstrip("/") # Get cran index lazily so we don't have to go to CRAN # for a github repo or a local tarball cran_index = None - cran_layout_template = \ - {'source': {'selector': '{others}', - 'dir': 'src/contrib/', - 'ext': '.tar.gz', - # If we had platform filters we would change this to: - # build_for_linux or is_github_url or is_tarfile - 'use_this': True}, - 'win-64': {'selector': 'win64', - 'dir': f'bin/windows/contrib/{use_binaries_ver}/', - 'ext': '.zip', - 'use_this': True if use_binaries_ver else False}, - 'osx-64': {'selector': 'osx', - 'dir': 'bin/macosx/el-capitan/contrib/{}/'.format( - use_binaries_ver), - 'ext': '.tgz', - 'use_this': True if use_binaries_ver else False}} + cran_layout_template = { + "source": { + "selector": "{others}", + "dir": "src/contrib/", + "ext": ".tar.gz", + # If we had platform filters we would change this to: + # build_for_linux or is_github_url or is_tarfile + "use_this": True, + }, + "win-64": { + "selector": "win64", + "dir": f"bin/windows/contrib/{use_binaries_ver}/", + "ext": ".zip", + "use_this": True if use_binaries_ver else False, + }, + "osx-64": { + "selector": "osx", + "dir": f"bin/macosx/el-capitan/contrib/{use_binaries_ver}/", + "ext": ".tgz", + "use_this": True if use_binaries_ver else False, + }, + } # Figure out what binaries are available once: for archive_type, archive_details in cran_layout_template.items(): - archive_details['binaries'] = dict() - if archive_type != 'source' and archive_details['use_this']: + archive_details["binaries"] = dict() + if archive_type != "source" and archive_details["use_this"]: get_available_binaries(cran_url, archive_details) for package in in_packages: - inputs_dict = package_to_inputs_dict(output_dir, output_suffix, git_tag, package, version) + inputs_dict = package_to_inputs_dict( + output_dir, output_suffix, git_tag, package, version + ) if inputs_dict: - package_dicts.update({inputs_dict['pkg-name']: {'inputs': inputs_dict}}) + package_dicts.update({inputs_dict["pkg-name"]: {"inputs": inputs_dict}}) for package_name, package_dict in package_dicts.items(): package_list.append(package_name) while package_list: - inputs = package_dicts[package_list.pop()]['inputs'] - location = inputs['location'] - pkg_name = inputs['pkg-name'] - version = inputs['version'] - is_github_url = location and 'github.com' in location + inputs = package_dicts[package_list.pop()]["inputs"] + location = inputs["location"] + pkg_name = inputs["pkg-name"] + version = inputs["version"] + is_github_url = location and "github.com" in location is_tarfile = location and isfile(location) and tarfile.is_tarfile(location) is_archive = False - url = inputs['location'] + url = inputs["location"] - dir_path = inputs['new-location'] + dir_path = inputs["new-location"] print(f"Making/refreshing recipe for {pkg_name}") # Bodges GitHub packages into cran_metadata @@ -900,33 +970,47 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No elif is_github_url or is_tarfile: rm_rf(config.work_dir) - m = metadata.MetaData.fromdict({'source': {'git_url': location}}, config=config) - source.git_source(m.get_section('source'), m.config.git_cache, m.config.work_dir) + m = metadata.MetaData.fromdict( + {"source": {"git_url": location}}, config=config + ) + source.git_source( + m.get_section("source"), m.config.git_cache, m.config.work_dir + ) new_git_tag = git_tag if git_tag else get_latest_git_tag(config) - p = subprocess.Popen(['git', 'checkout', new_git_tag], stdout=subprocess.PIPE, - stderr=subprocess.PIPE, cwd=config.work_dir) + p = subprocess.Popen( + ["git", "checkout", new_git_tag], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=config.work_dir, + ) stdout, stderr = p.communicate() - stdout = stdout.decode('utf-8') - stderr = stderr.decode('utf-8') + stdout = stdout.decode("utf-8") + stderr = stderr.decode("utf-8") if p.returncode: - sys.exit("Error: 'git checkout %s' failed (%s).\nInvalid tag?" % - (new_git_tag, stderr.strip())) + sys.exit( + "Error: 'git checkout %s' failed (%s).\nInvalid tag?" + % (new_git_tag, stderr.strip()) + ) if stdout: print(stdout, file=sys.stdout) if stderr: print(stderr, file=sys.stderr) DESCRIPTION = join(config.work_dir, "DESCRIPTION") if not isfile(DESCRIPTION): - sub_description_pkg = join(config.work_dir, 'pkg', "DESCRIPTION") - sub_description_name = join(config.work_dir, location.split('/')[-1], "DESCRIPTION") + sub_description_pkg = join(config.work_dir, "pkg", "DESCRIPTION") + sub_description_name = join( + config.work_dir, location.split("/")[-1], "DESCRIPTION" + ) if isfile(sub_description_pkg): DESCRIPTION = sub_description_pkg elif isfile(sub_description_name): DESCRIPTION = sub_description_name else: - sys.exit("%s does not appear to be a valid R package " - "(no DESCRIPTION file in %s, %s)" - % (location, sub_description_pkg, sub_description_name)) + sys.exit( + "%s does not appear to be a valid R package " + "(no DESCRIPTION file in %s, %s)" + % (location, sub_description_pkg, sub_description_name) + ) cran_package = get_archive_metadata(DESCRIPTION) else: @@ -939,10 +1023,15 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No if cran_version and (not version or version == cran_version): version = cran_version elif version and not archive: - print(f'ERROR: Version {version} of package {package} is archived, but --no-archive was selected') + print( + f"ERROR: Version {version} of package {package} is archived, but --no-archive was selected" + ) sys.exit(1) elif not version and not cran_version and not allow_archived: - print("ERROR: Package %s is archived; to build, use --allow-archived or a --version value" % pkg_name) + print( + "ERROR: Package %s is archived; to build, use --allow-archived or a --version value" + % pkg_name + ) sys.exit(1) else: is_archive = True @@ -952,66 +1041,73 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No if not version: version = all_versions[0] elif version not in all_versions: - msg = f'ERROR: Version {version} of package {package} not found.\n Available versions: ' - print(msg + ', '.join(all_versions)) + msg = f"ERROR: Version {version} of package {package} not found.\n Available versions: " + print(msg + ", ".join(all_versions)) sys.exit(1) cran_package = None if cran_package is not None: - package = cran_package['Package'] - version = cran_package['Version'] + package = cran_package["Package"] + version = cran_package["Version"] plower = package.lower() d = package_dicts[pkg_name] - d.update({ - 'cran_packagename': package, - 'cran_version': version, - 'packagename': 'r-' + plower, + d.update( + { + "cran_packagename": package, + "cran_version": version, + "packagename": "r-" + plower, # Conda versions cannot have -. Conda (verlib) will treat _ as a . - 'conda_version': version.replace('-', '_'), - 'patches': '', - 'build_number': 0, - 'build_depends': '', - 'host_depends': '', - 'run_depends': '', + "conda_version": version.replace("-", "_"), + "patches": "", + "build_number": 0, + "build_depends": "", + "host_depends": "", + "run_depends": "", # CRAN doesn't seem to have this metadata :( - 'home_comment': '#', - 'homeurl': '', - 'summary_comment': '#', - 'summary': '', - 'binary1': '', - 'binary2': '' - }) + "home_comment": "#", + "homeurl": "", + "summary_comment": "#", + "summary": "", + "binary1": "", + "binary2": "", + } + ) if version_compare: - sys.exit(not version_compare(dir_path, d['conda_version'])) + sys.exit(not version_compare(dir_path, d["conda_version"])) patches = [] script_env = [] extra_recipe_maintainers = [] build_number = 0 - if update_policy.startswith('merge') and inputs['old-metadata']: - m = inputs['old-metadata'] - patches = make_array(m, 'source/patches') - script_env = make_array(m, 'build/script_env') - extra_recipe_maintainers = make_array(m, 'extra/recipe-maintainers', add_maintainer) - if m.version() == d['conda_version']: - build_number = int(m.get_value('build/number', 0)) - build_number += 1 if update_policy == 'merge-incr-build-num' else 0 + if update_policy.startswith("merge") and inputs["old-metadata"]: + m = inputs["old-metadata"] + patches = make_array(m, "source/patches") + script_env = make_array(m, "build/script_env") + extra_recipe_maintainers = make_array( + m, "extra/recipe-maintainers", add_maintainer + ) + if m.version() == d["conda_version"]: + build_number = int(m.get_value("build/number", 0)) + build_number += 1 if update_policy == "merge-incr-build-num" else 0 if add_maintainer: - new_maintainer = "{indent}{add_maintainer}".format(indent=INDENT, - add_maintainer=add_maintainer) + new_maintainer = "{indent}{add_maintainer}".format( + indent=INDENT, add_maintainer=add_maintainer + ) if new_maintainer not in extra_recipe_maintainers: if not len(extra_recipe_maintainers): # We hit this case when there is no existing recipe. - extra_recipe_maintainers = make_array({}, 'extra/recipe-maintainers', True) + extra_recipe_maintainers = make_array( + {}, "extra/recipe-maintainers", True + ) extra_recipe_maintainers.append(new_maintainer) if len(extra_recipe_maintainers): extra_recipe_maintainers[1:].sort() extra_recipe_maintainers.insert(0, "extra:\n ") - d['extra_recipe_maintainers'] = ''.join(extra_recipe_maintainers) - d['patches'] = ''.join(patches) - d['script_env'] = ''.join(script_env) - d['build_number'] = build_number + d["extra_recipe_maintainers"] = "".join(extra_recipe_maintainers) + d["patches"] = "".join(patches) + d["script_env"] = "".join(script_env) + d["build_number"] = build_number cached_path = None cran_layout = copy.deepcopy(cran_layout_template) @@ -1019,209 +1115,266 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No description_path = None for archive_type, archive_details in cran_layout.items(): - contrib_url = '' - archive_details['cran_version'] = d['cran_version'] - archive_details['conda_version'] = d['conda_version'] - if is_archive and archive_type == 'source': - archive_details['dir'] += 'Archive/' + package + '/' - available_artefact = True if archive_type == 'source' else \ - package in archive_details['binaries'] and \ - any(d['cran_version'] == v for v, _ in archive_details['binaries'][package]) + contrib_url = "" + archive_details["cran_version"] = d["cran_version"] + archive_details["conda_version"] = d["conda_version"] + if is_archive and archive_type == "source": + archive_details["dir"] += "Archive/" + package + "/" + available_artefact = ( + True + if archive_type == "source" + else package in archive_details["binaries"] + and any( + d["cran_version"] == v + for v, _ in archive_details["binaries"][package] + ) + ) if not available_artefact: - if use_when_no_binary == 'error': - print("ERROR: --use-when-no-binary is error (and there is no binary)") + if use_when_no_binary == "error": + print( + "ERROR: --use-when-no-binary is error (and there is no binary)" + ) sys.exit(1) - elif use_when_no_binary.startswith('old'): - if package not in archive_details['binaries']: - if use_when_no_binary.endswith('src'): + elif use_when_no_binary.startswith("old"): + if package not in archive_details["binaries"]: + if use_when_no_binary.endswith("src"): available_artefact = False - archive_details['use_this'] = False + archive_details["use_this"] = False continue else: - print("ERROR: No binary nor old binary found " - "(maybe pass --use-when-no-binary=old-src to fallback to source?)") + print( + "ERROR: No binary nor old binary found " + "(maybe pass --use-when-no-binary=old-src to fallback to source?)" + ) sys.exit(1) # Version needs to be stored in archive_details. - archive_details['cranurl'] = archive_details['binaries'][package][-1][1] - archive_details['conda_version'] = archive_details['binaries'][package][-1][0] - archive_details['cran_version'] = archive_details['conda_version'].replace('_', '-') + archive_details["cranurl"] = archive_details["binaries"][package][ + -1 + ][1] + archive_details["conda_version"] = archive_details["binaries"][ + package + ][-1][0] + archive_details["cran_version"] = archive_details[ + "conda_version" + ].replace("_", "-") available_artefact = True # We may need to inspect the file later to determine which compilers are needed. cached_path = None sha256 = hashlib.sha256() - if archive_details['use_this'] and available_artefact: + if archive_details["use_this"] and available_artefact: if is_tarfile: filename = basename(location) contrib_url = relpath(location, dir_path) contrib_url_rendered = package_url = contrib_url cached_path = location - elif not is_github_url or archive_type != 'source': - filename_rendered = '{}_{}{}'.format( - package, archive_details['cran_version'], archive_details['ext']) - filename = f'{package}_{{{{ version }}}}' + archive_details['ext'] - contrib_url = '{{{{ cran_mirror }}}}/{}'.format(archive_details['dir']) - contrib_url_rendered = cran_url + '/{}'.format(archive_details['dir']) + elif not is_github_url or archive_type != "source": + filename_rendered = "{}_{}{}".format( + package, archive_details["cran_version"], archive_details["ext"] + ) + filename = f"{package}_{{{{ version }}}}" + archive_details["ext"] + contrib_url = "{{{{ cran_mirror }}}}/{}".format( + archive_details["dir"] + ) + contrib_url_rendered = cran_url + "/{}".format( + archive_details["dir"] + ) package_url = contrib_url_rendered + filename_rendered print(f"Downloading {archive_type} from {package_url}") try: cached_path, _ = source.download_to_cache( - config.src_cache, '', - {'url': package_url, 'fn': archive_type + '-' + filename_rendered}) + config.src_cache, + "", + { + "url": package_url, + "fn": archive_type + "-" + filename_rendered, + }, + ) except: - print("logic error, file {} should exist, we found it in a dir listing earlier." - .format(package_url)) + print( + "logic error, file {} should exist, we found it in a dir listing earlier.".format( + package_url + ) + ) sys.exit(1) - if description_path is None or archive_type == 'source': + if description_path is None or archive_type == "source": description_path = cached_path available_details = {} - available_details['selector'] = archive_details['selector'] - available_details['cran_version'] = archive_details['cran_version'] - available_details['conda_version'] = archive_details['conda_version'] + available_details["selector"] = archive_details["selector"] + available_details["cran_version"] = archive_details["cran_version"] + available_details["conda_version"] = archive_details["conda_version"] if cached_path: - sha256.update(open(cached_path, 'rb').read()) - archive_details['cranurl'] = package_url - available_details['filename'] = filename - available_details['contrib_url'] = contrib_url - available_details['contrib_url_rendered'] = contrib_url_rendered - available_details['hash_entry'] = f'sha256: {sha256.hexdigest()}' - available_details['cached_path'] = cached_path + sha256.update(open(cached_path, "rb").read()) + archive_details["cranurl"] = package_url + available_details["filename"] = filename + available_details["contrib_url"] = contrib_url + available_details["contrib_url_rendered"] = contrib_url_rendered + available_details["hash_entry"] = f"sha256: {sha256.hexdigest()}" + available_details["cached_path"] = cached_path # This is rubbish; d[] should be renamed global[] and should be # merged into source and binaryN. - if archive_type == 'source': + if archive_type == "source": if is_github_url: - available_details['url_key'] = '' - available_details['git_url_key'] = 'git_url:' - available_details['git_tag_key'] = 'git_tag:' - hash_msg = '# You can add a hash for the file here, (md5, sha1 or sha256)' - available_details['hash_entry'] = hash_msg - available_details['filename'] = '' - available_details['cranurl'] = '' - available_details['git_url'] = url - available_details['git_tag'] = new_git_tag - available_details['archive_keys'] = '' + available_details["url_key"] = "" + available_details["git_url_key"] = "git_url:" + available_details["git_tag_key"] = "git_tag:" + hash_msg = "# You can add a hash for the file here, (md5, sha1 or sha256)" + available_details["hash_entry"] = hash_msg + available_details["filename"] = "" + available_details["cranurl"] = "" + available_details["git_url"] = url + available_details["git_tag"] = new_git_tag + available_details["archive_keys"] = "" else: - available_details['url_key'] = 'url:' - available_details['git_url_key'] = '' - available_details['git_tag_key'] = '' - available_details['cranurl'] = ' ' + contrib_url + filename - available_details['git_url'] = '' - available_details['git_tag'] = '' + available_details["url_key"] = "url:" + available_details["git_url_key"] = "" + available_details["git_tag_key"] = "" + available_details["cranurl"] = " " + contrib_url + filename + available_details["git_url"] = "" + available_details["git_tag"] = "" else: - available_details['cranurl'] = archive_details['cranurl'] + available_details["cranurl"] = archive_details["cranurl"] - available_details['patches'] = d['patches'] + available_details["patches"] = d["patches"] available[archive_type] = available_details # Figure out the selectors according to what is available. - _all = ['linux', 'win32', 'win64', 'osx'] + _all = ["linux", "win32", "win64", "osx"] from_source = _all[:] binary_id = 1 for archive_type, archive_details in available.items(): - if archive_type == 'source': + if archive_type == "source": for k, v in archive_details.items(): d[k] = v else: - sel = archive_details['selector'] + sel = archive_details["selector"] # Does the file exist? If not we need to build from source. from_source.remove(sel) binary_id += 1 if from_source == _all: sel_src = "" - sel_src_and_win = ' # [win]' - sel_src_not_win = ' # [not win]' + sel_src_and_win = " # [win]" + sel_src_not_win = " # [not win]" else: - sel_src = ' # [' + ' or '.join(from_source) + ']' - sel_src_and_win = ' # [' + ' or '.join(fs for fs in from_source if - fs.startswith('win')) + ']' - sel_src_not_win = ' # [' + ' or '.join(fs for fs in from_source if not - fs.startswith('win')) + ']' + sel_src = " # [" + " or ".join(from_source) + "]" + sel_src_and_win = ( + " # [" + + " or ".join(fs for fs in from_source if fs.startswith("win")) + + "]" + ) + sel_src_not_win = ( + " # [" + + " or ".join(fs for fs in from_source if not fs.startswith("win")) + + "]" + ) sel_cross = " # [build_platform != target_platform]" - d['sel_src'] = sel_src - d['sel_src_and_win'] = sel_src_and_win - d['sel_src_not_win'] = sel_src_not_win - d['from_source'] = from_source - - if 'source' in available: - available_details = available['source'] - available_details['sel'] = sel_src - filename = available_details['filename'] - if 'contrib_url' in available_details: - contrib_url = available_details['contrib_url'] + d["sel_src"] = sel_src + d["sel_src_and_win"] = sel_src_and_win + d["sel_src_not_win"] = sel_src_not_win + d["from_source"] = from_source + + if "source" in available: + available_details = available["source"] + available_details["sel"] = sel_src + filename = available_details["filename"] + if "contrib_url" in available_details: + contrib_url = available_details["contrib_url"] if archive: if is_tarfile: - available_details['cranurl'] = (INDENT + contrib_url) + available_details["cranurl"] = INDENT + contrib_url elif not is_archive: - available_details['cranurl'] = (INDENT + contrib_url + - filename + sel_src + INDENT + contrib_url + - f'Archive/{package}/' + filename + sel_src) + available_details["cranurl"] = ( + INDENT + + contrib_url + + filename + + sel_src + + INDENT + + contrib_url + + f"Archive/{package}/" + + filename + + sel_src + ) else: - available_details['cranurl'] = ' ' + contrib_url + filename + sel_src + available_details["cranurl"] = ( + " " + contrib_url + filename + sel_src + ) if not is_github_url: - available_details['archive_keys'] = '{url_key}{sel}' \ - ' {cranurl}\n' \ - ' {hash_entry}{sel}'.format( - **available_details) + available_details["archive_keys"] = ( + "{url_key}{sel}" + " {cranurl}\n" + " {hash_entry}{sel}".format(**available_details) + ) # Extract the DESCRIPTION data from the source if cran_package is None: cran_package = get_archive_metadata(description_path) - d['cran_metadata'] = '\n'.join(['# %s' % line for line in - cran_package['orig_lines'] if line]) + d["cran_metadata"] = "\n".join( + ["# %s" % line for line in cran_package["orig_lines"] if line] + ) # Render the source and binaryN keys binary_id = 1 - d['version_binary1'] = d['version_binary2'] = "" + d["version_binary1"] = d["version_binary2"] = "" for archive_type, archive_details in available.items(): - if archive_type == 'source': - d['source'] = SOURCE_META.format(**archive_details) - d['version_source'] = VERSION_META.format(**archive_details) + if archive_type == "source": + d["source"] = SOURCE_META.format(**archive_details) + d["version_source"] = VERSION_META.format(**archive_details) else: - archive_details['sel'] = ' # [' + archive_details['selector'] + ']' - d['binary' + str(binary_id)] = BINARY_META.format(**archive_details) - d['version_binary' + str(binary_id)] = VERSION_META.format(**archive_details) + archive_details["sel"] = " # [" + archive_details["selector"] + "]" + d["binary" + str(binary_id)] = BINARY_META.format(**archive_details) + d["version_binary" + str(binary_id)] = VERSION_META.format( + **archive_details + ) binary_id += 1 - license_info = get_license_info(cran_package.get("License", "None"), allowed_license_families) - d['license'], d['license_file'], d['license_family'] = license_info + license_info = get_license_info( + cran_package.get("License", "None"), allowed_license_families + ) + d["license"], d["license_file"], d["license_family"] = license_info - if 'License_is_FOSS' in cran_package: - d['license'] += ' (FOSS)' - if cran_package.get('License_restricts_use') == 'yes': - d['license'] += ' (Restricts use)' + if "License_is_FOSS" in cran_package: + d["license"] += " (FOSS)" + if cran_package.get("License_restricts_use") == "yes": + d["license"] += " (Restricts use)" if "URL" in cran_package: - d['home_comment'] = '' - d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL']) + d["home_comment"] = "" + d["homeurl"] = " " + yaml_quote_string(cran_package["URL"]) else: # use CRAN page as homepage if nothing has been specified - d['home_comment'] = '' + d["home_comment"] = "" if is_github_url: - d['homeurl'] = f' {location}' + d["homeurl"] = f" {location}" else: - d['homeurl'] = f' https://CRAN.R-project.org/package={package}' + d["homeurl"] = f" https://CRAN.R-project.org/package={package}" - if not use_noarch_generic or cran_package.get("NeedsCompilation", 'no') == 'yes': - d['noarch_generic'] = '' + if ( + not use_noarch_generic + or cran_package.get("NeedsCompilation", "no") == "yes" + ): + d["noarch_generic"] = "" else: - d['noarch_generic'] = 'noarch: generic' + d["noarch_generic"] = "noarch: generic" - if 'Description' in cran_package: - d['summary_comment'] = '' - d['summary'] = ' ' + yaml_quote_string(cran_package['Description']) + if "Description" in cran_package: + d["summary_comment"] = "" + d["summary"] = " " + yaml_quote_string(cran_package["Description"]) if "Suggests" in cran_package and not no_comments: - d['suggests'] = "# Suggests: %s" % cran_package['Suggests'] + d["suggests"] = "# Suggests: %s" % cran_package["Suggests"] else: - d['suggests'] = '' + d["suggests"] = "" # Every package depends on at least R. # I'm not sure what the difference between depends and imports is. - depends = [s.strip() for s in cran_package.get('Depends', - '').split(',') if s.strip()] - imports = [s.strip() for s in cran_package.get('Imports', - '').split(',') if s.strip()] - links = [s.strip() for s in cran_package.get("LinkingTo", - '').split(',') if s.strip()] + depends = [ + s.strip() for s in cran_package.get("Depends", "").split(",") if s.strip() + ] + imports = [ + s.strip() for s in cran_package.get("Imports", "").split(",") if s.strip() + ] + links = [ + s.strip() for s in cran_package.get("LinkingTo", "").split(",") if s.strip() + ] dep_dict = {} @@ -1229,133 +1382,210 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No for s in list(chain(imports, depends, links)): match = VERSION_DEPENDENCY_REGEX.match(s) if not match: - sys.exit("Could not parse version from dependency of %s: %s" % - (package, s)) - name = match.group('name') + sys.exit( + "Could not parse version from dependency of {}: {}".format( + package, s + ) + ) + name = match.group("name") if name in seen: continue seen.add(name) - archs = match.group('archs') - relop = match.group('relop') or '' - ver = match.group('version') or '' - ver = ver.replace('-', '_') + archs = match.group("archs") + relop = match.group("relop") or "" + ver = match.group("version") or "" + ver = ver.replace("-", "_") # If there is a relop there should be a version assert not relop or ver if archs: - sys.exit("Don't know how to handle archs from dependency of " - "package %s: %s" % (package, s)) + sys.exit( + "Don't know how to handle archs from dependency of " + "package %s: %s" % (package, s) + ) - dep_dict[name] = f'{relop}{ver}' + dep_dict[name] = f"{relop}{ver}" - if 'R' not in dep_dict: - dep_dict['R'] = '' + if "R" not in dep_dict: + dep_dict["R"] = "" - os_type = cran_package.get("OS_type", '') - if os_type != 'unix' and os_type != 'windows' and os_type != '': + os_type = cran_package.get("OS_type", "") + if os_type != "unix" and os_type != "windows" and os_type != "": print(f"Unknown OS_type: {os_type} in CRAN package") - os_type = '' - if os_type == 'unix': - d['skip_os'] = 'skip: True # [not unix]' + os_type = "" + if os_type == "unix": + d["skip_os"] = "skip: True # [not unix]" d["noarch_generic"] = "" - if os_type == 'windows': - d['skip_os'] = 'skip: True # [not win]' + if os_type == "windows": + d["skip_os"] = "skip: True # [not win]" d["noarch_generic"] = "" - if os_type == '' and no_comments: - d['skip_os'] = '' - elif os_type == '': - d['skip_os'] = '# no skip' + if os_type == "" and no_comments: + d["skip_os"] = "" + elif os_type == "": + d["skip_os"] = "# no skip" need_git = is_github_url - if cran_package.get("NeedsCompilation", 'no') == 'yes': - with tarfile.open(available['source']['cached_path']) as tf: - need_f = any([f.name.lower().endswith(('.f', '.f90', '.f77', '.f95', '.f03')) for f in tf]) + if cran_package.get("NeedsCompilation", "no") == "yes": + with tarfile.open(available["source"]["cached_path"]) as tf: + need_f = any( + [ + f.name.lower().endswith((".f", ".f90", ".f77", ".f95", ".f03")) + for f in tf + ] + ) # Fortran builds use CC to perform the link (they do not call the linker directly). - need_c = True if need_f else \ - any([f.name.lower().endswith('.c') for f in tf]) - need_cxx = any([f.name.lower().endswith(('.cxx', '.cpp', '.cc', '.c++')) - for f in tf]) - need_autotools = any([f.name.lower().endswith('/configure') for f in tf]) - need_make = True if any((need_autotools, need_f, need_cxx, need_c)) else \ - any([f.name.lower().endswith(('/makefile', '/makevars')) - for f in tf]) + need_c = ( + True if need_f else any([f.name.lower().endswith(".c") for f in tf]) + ) + need_cxx = any( + [ + f.name.lower().endswith((".cxx", ".cpp", ".cc", ".c++")) + for f in tf + ] + ) + need_autotools = any( + [f.name.lower().endswith("/configure") for f in tf] + ) + need_make = ( + True + if any((need_autotools, need_f, need_cxx, need_c)) + else any( + [ + f.name.lower().endswith(("/makefile", "/makevars")) + for f in tf + ] + ) + ) else: need_c = need_cxx = need_f = need_autotools = need_make = False - if 'Rcpp' in dep_dict or 'RcppArmadillo' in dep_dict: + if "Rcpp" in dep_dict or "RcppArmadillo" in dep_dict: need_cxx = True if need_cxx: need_c = True - for dep_type in ['build', 'host', 'run']: - + for dep_type in ["build", "host", "run"]: deps = [] # Put non-R dependencies first. - if dep_type == 'build': + if dep_type == "build": if need_c: - deps.append("{indent}{{{{ compiler('c') }}}} {sel}".format( - indent=INDENT, sel=sel_src_not_win)) - deps.append("{indent}{{{{ compiler('m2w64_c') }}}} {sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append( + "{indent}{{{{ compiler('c') }}}} {sel}".format( + indent=INDENT, sel=sel_src_not_win + ) + ) + deps.append( + "{indent}{{{{ compiler('m2w64_c') }}}} {sel}".format( + indent=INDENT, sel=sel_src_and_win + ) + ) if need_cxx: - deps.append("{indent}{{{{ compiler('cxx') }}}} {sel}".format( - indent=INDENT, sel=sel_src_not_win)) - deps.append("{indent}{{{{ compiler('m2w64_cxx') }}}} {sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append( + "{indent}{{{{ compiler('cxx') }}}} {sel}".format( + indent=INDENT, sel=sel_src_not_win + ) + ) + deps.append( + "{indent}{{{{ compiler('m2w64_cxx') }}}} {sel}".format( + indent=INDENT, sel=sel_src_and_win + ) + ) if need_f: - deps.append("{indent}{{{{ compiler('fortran') }}}} {sel}".format( - indent=INDENT, sel=sel_src_not_win)) - deps.append("{indent}{{{{ compiler('m2w64_fortran') }}}}{sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append( + "{indent}{{{{ compiler('fortran') }}}} {sel}".format( + indent=INDENT, sel=sel_src_not_win + ) + ) + deps.append( + "{indent}{{{{ compiler('m2w64_fortran') }}}}{sel}".format( + indent=INDENT, sel=sel_src_and_win + ) + ) if use_rtools_win: need_c = need_cxx = need_f = need_autotools = need_make = False - deps.append("{indent}rtools {sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append( + "{indent}rtools {sel}".format( + indent=INDENT, sel=sel_src_and_win + ) + ) # extsoft is legacy. R packages will download rwinlib subprojects # as necessary according to Jeroen Ooms. (may need to disable that # for non-MRO builds or maybe switch to Jeroen's toolchain?) # deps.append("{indent}{{{{native}}}}extsoft {sel}".format( # indent=INDENT, sel=sel_src_and_win)) if need_autotools or need_make or need_git: - deps.append("{indent}{{{{ posix }}}}filesystem {sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append( + "{indent}{{{{ posix }}}}filesystem {sel}".format( + indent=INDENT, sel=sel_src_and_win + ) + ) if need_git: deps.append(f"{INDENT}{{{{ posix }}}}git") if need_autotools: - deps.append("{indent}{{{{ posix }}}}sed {sel}".format( - indent=INDENT, sel=sel_src_and_win)) - deps.append("{indent}{{{{ posix }}}}grep {sel}".format( - indent=INDENT, sel=sel_src_and_win)) - deps.append("{indent}{{{{ posix }}}}autoconf {sel}".format( - indent=INDENT, sel=sel_src)) - deps.append("{indent}{{{{ posix }}}}automake {sel}".format( - indent=INDENT, sel=sel_src_not_win)) - deps.append("{indent}{{{{ posix }}}}automake-wrapper{sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append( + "{indent}{{{{ posix }}}}sed {sel}".format( + indent=INDENT, sel=sel_src_and_win + ) + ) + deps.append( + "{indent}{{{{ posix }}}}grep {sel}".format( + indent=INDENT, sel=sel_src_and_win + ) + ) + deps.append( + "{indent}{{{{ posix }}}}autoconf {sel}".format( + indent=INDENT, sel=sel_src + ) + ) + deps.append( + "{indent}{{{{ posix }}}}automake {sel}".format( + indent=INDENT, sel=sel_src_not_win + ) + ) + deps.append( + "{indent}{{{{ posix }}}}automake-wrapper{sel}".format( + indent=INDENT, sel=sel_src_and_win + ) + ) deps.append(f"{INDENT}{{{{ posix }}}}pkg-config") if need_make: - deps.append("{indent}{{{{ posix }}}}make {sel}".format( - indent=INDENT, sel=sel_src)) + deps.append( + "{indent}{{{{ posix }}}}make {sel}".format( + indent=INDENT, sel=sel_src + ) + ) if not need_autotools: - deps.append("{indent}{{{{ posix }}}}sed {sel}".format( - indent=INDENT, sel=sel_src_and_win)) - deps.append("{indent}{{{{ posix }}}}coreutils {sel}".format( - indent=INDENT, sel=sel_src_and_win)) - deps.append("{indent}{{{{ posix }}}}zip {sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append( + "{indent}{{{{ posix }}}}sed {sel}".format( + indent=INDENT, sel=sel_src_and_win + ) + ) + deps.append( + "{indent}{{{{ posix }}}}coreutils {sel}".format( + indent=INDENT, sel=sel_src_and_win + ) + ) + deps.append( + "{indent}{{{{ posix }}}}zip {sel}".format( + indent=INDENT, sel=sel_src_and_win + ) + ) if add_cross_r_base: deps.append(f"{INDENT}cross-r-base {{{{ r_base }}}} {sel_cross}") - elif dep_type == 'run': + elif dep_type == "run": if need_c or need_cxx or need_f: - deps.append("{indent}{{{{native}}}}gcc-libs {sel}".format( - indent=INDENT, sel=sel_src_and_win)) + deps.append( + "{indent}{{{{native}}}}gcc-libs {sel}".format( + indent=INDENT, sel=sel_src_and_win + ) + ) - if dep_type == 'host' or dep_type == 'run': + if dep_type == "host" or dep_type == "run": for name in sorted(dep_dict): if name in R_BASE_PACKAGE_NAMES: continue - if name == 'R': + if name == "R": # Put R first # Regarless of build or run, and whether this is a # recommended package or not, it can only depend on @@ -1365,29 +1595,38 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No # that are in the recommended group. # We don't include any R version restrictions because # conda-build always pins r-base and mro-base version. - deps.insert(0, f'{INDENT}{r_interp}') + deps.insert(0, f"{INDENT}{r_interp}") else: - conda_name = 'r-' + name.lower() + conda_name = "r-" + name.lower() if dep_dict[name]: - deps.append('{indent}{name} {version}'.format(name=conda_name, - version=dep_dict[name], indent=INDENT)) + deps.append( + "{indent}{name} {version}".format( + name=conda_name, + version=dep_dict[name], + indent=INDENT, + ) + ) else: - deps.append('{indent}{name}'.format(name=conda_name, - indent=INDENT)) + deps.append(f"{INDENT}{conda_name}") if recursive: lower_name = name.lower() if lower_name not in package_dicts: - inputs_dict = package_to_inputs_dict(output_dir, output_suffix, - git_tag, lower_name, None) - assert lower_name == inputs_dict['pkg-name'], \ - "name {} != inputs_dict['pkg-name'] {}".format( - name, inputs_dict['pkg-name']) + inputs_dict = package_to_inputs_dict( + output_dir, output_suffix, git_tag, lower_name, None + ) + assert ( + lower_name == inputs_dict["pkg-name"] + ), "name {} != inputs_dict['pkg-name'] {}".format( + name, inputs_dict["pkg-name"] + ) assert lower_name not in package_list - package_dicts.update({lower_name: {'inputs': inputs_dict}}) + package_dicts.update( + {lower_name: {"inputs": inputs_dict}} + ) package_list.append(lower_name) - d['%s_depends' % dep_type] = ''.join(deps) + d["%s_depends" % dep_type] = "".join(deps) if no_comments: global CRAN_BUILD_SH_SOURCE, CRAN_META @@ -1396,51 +1635,64 @@ def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=No for package in package_dicts: d = package_dicts[package] - dir_path = d['inputs']['new-location'] + dir_path = d["inputs"]["new-location"] if exists(dir_path) and not version_compare: - if update_policy == 'error': - raise RuntimeError("directory already exists " - "(and --update-policy is 'error'): %s" % dir_path) - elif update_policy == 'overwrite': + if update_policy == "error": + raise RuntimeError( + "directory already exists " + "(and --update-policy is 'error'): %s" % dir_path + ) + elif update_policy == "overwrite": rm_rf(dir_path) - elif update_policy == 'skip-up-to-date': + elif update_policy == "skip-up-to-date": if cran_index is None: session = get_session(output_dir) cran_index = get_cran_index(cran_url, session) - if up_to_date(cran_index, d['inputs']['old-metadata']): + if up_to_date(cran_index, d["inputs"]["old-metadata"]): continue - elif update_policy == 'skip-existing' and d['inputs']['old-metadata']: + elif update_policy == "skip-existing" and d["inputs"]["old-metadata"]: continue - from_sources = d['from_source'] + from_sources = d["from_source"] # Normalize the metadata values - d = {k: unicodedata.normalize("NFKD", str(v)).encode('ascii', 'ignore') - .decode() for k, v in d.items()} + d = { + k: unicodedata.normalize("NFKD", str(v)).encode("ascii", "ignore").decode() + for k, v in d.items() + } try: makedirs(join(dir_path)) except: pass print("Writing recipe for %s" % package.lower()) - with open(join(dir_path, 'meta.yaml'), 'w') as f: + with open(join(dir_path, "meta.yaml"), "w") as f: f.write(clear_whitespace(CRAN_META.format(**d))) - if not exists(join(dir_path, 'build.sh')) or update_policy == 'overwrite': - with open(join(dir_path, 'build.sh'), 'wb') as f: + if not exists(join(dir_path, "build.sh")) or update_policy == "overwrite": + with open(join(dir_path, "build.sh"), "wb") as f: if from_sources == _all: - f.write(CRAN_BUILD_SH_SOURCE.format(**d).encode('utf-8')) + f.write(CRAN_BUILD_SH_SOURCE.format(**d).encode("utf-8")) elif from_sources == []: - f.write(CRAN_BUILD_SH_BINARY.format(**d).encode('utf-8')) + f.write(CRAN_BUILD_SH_BINARY.format(**d).encode("utf-8")) else: tpbt = [target_platform_bash_test_by_sel[t] for t in from_sources] - d['source_pf_bash'] = ' || '.join(['[[ ${target_platform} ' + s + ' ]]' - for s in tpbt]) - f.write(CRAN_BUILD_SH_MIXED.format(**d).encode('utf-8')) - - if not exists(join(dir_path, 'bld.bat')) or update_policy == 'overwrite': - with open(join(dir_path, 'bld.bat'), 'wb') as f: - if len([fs for fs in from_sources if fs.startswith('win')]) == 2: - f.write(CRAN_BLD_BAT_SOURCE.format(**d).replace('\n', '\r\n').encode('utf-8')) + d["source_pf_bash"] = " || ".join( + ["[[ ${target_platform} " + s + " ]]" for s in tpbt] + ) + f.write(CRAN_BUILD_SH_MIXED.format(**d).encode("utf-8")) + + if not exists(join(dir_path, "bld.bat")) or update_policy == "overwrite": + with open(join(dir_path, "bld.bat"), "wb") as f: + if len([fs for fs in from_sources if fs.startswith("win")]) == 2: + f.write( + CRAN_BLD_BAT_SOURCE.format(**d) + .replace("\n", "\r\n") + .encode("utf-8") + ) else: - f.write(CRAN_BLD_BAT_MIXED.format(**d).replace('\n', '\r\n').encode('utf-8')) + f.write( + CRAN_BLD_BAT_MIXED.format(**d) + .replace("\n", "\r\n") + .encode("utf-8") + ) def version_compare(recipe_dir, newest_conda_version): @@ -1459,7 +1711,7 @@ def get_outdated(output_dir, cran_index, packages=()): to_update = [] recipes = listdir(output_dir) for recipe in recipes: - if not recipe.startswith('r-') or not isdir(recipe): + if not recipe.startswith("r-") or not isdir(recipe): continue recipe_name = recipe[2:] @@ -1471,8 +1723,9 @@ def get_outdated(output_dir, cran_index, packages=()): print("Skipping %s, not found on CRAN" % recipe) continue - version_compare(join(output_dir, recipe), - cran_index[recipe_name][1].replace('-', '_')) + version_compare( + join(output_dir, recipe), cran_index[recipe_name][1].replace("-", "_") + ) print("Updating %s" % recipe) to_update.append(recipe_name) @@ -1481,11 +1734,10 @@ def get_outdated(output_dir, cran_index, packages=()): def get_existing(output_dir, cran_index, packages=()): - existing = [] recipes = listdir(output_dir) for recipe in recipes: - if not recipe.startswith('r-') or not isdir(recipe): + if not recipe.startswith("r-") or not isdir(recipe): continue recipe_name = recipe[2:] @@ -1507,7 +1759,7 @@ def up_to_date(cran_index, package): return False # For now. We can do better; need to collect *all* information upfront. - if 'github.com' in location: + if "github.com" in location: return False else: if cran_pkg_name not in cran_index: @@ -1541,28 +1793,34 @@ def get_license_info(license_text, allowed_license_families): # The list order matters. The first element should be the name of the # license file shipped with r-base. - d_license = {'agpl3': ['AGPL-3', 'AGPL (>= 3)', 'AGPL', - 'GNU Affero General Public License'], - 'artistic2': ['Artistic-2.0', 'Artistic License 2.0'], - 'gpl2': ['GPL-2', 'GPL (>= 2)', 'GNU General Public License (>= 2)'], - 'gpl3': ['GPL-3', 'GPL (>= 3)', 'GNU General Public License (>= 3)', - 'GPL', 'GNU General Public License'], - 'lgpl2': ['LGPL-2', 'LGPL (>= 2)'], - 'lgpl21': ['LGPL-2.1', 'LGPL (>= 2.1)'], - 'lgpl3': ['LGPL-3', 'LGPL (>= 3)', 'LGPL', - 'GNU Lesser General Public License'], - 'bsd2': ['BSD_2_clause', 'BSD_2_Clause', 'BSD 2-clause License'], - 'bsd3': ['BSD_3_clause', 'BSD_3_Clause', 'BSD 3-clause License'], - 'mit': ['MIT'], - } - - license_file_template = '\'{{{{ environ["PREFIX"] }}}}/lib/R/share/licenses/{license_id}\'' + d_license = { + "agpl3": ["AGPL-3", "AGPL (>= 3)", "AGPL", "GNU Affero General Public License"], + "artistic2": ["Artistic-2.0", "Artistic License 2.0"], + "gpl2": ["GPL-2", "GPL (>= 2)", "GNU General Public License (>= 2)"], + "gpl3": [ + "GPL-3", + "GPL (>= 3)", + "GNU General Public License (>= 3)", + "GPL", + "GNU General Public License", + ], + "lgpl2": ["LGPL-2", "LGPL (>= 2)"], + "lgpl21": ["LGPL-2.1", "LGPL (>= 2.1)"], + "lgpl3": ["LGPL-3", "LGPL (>= 3)", "LGPL", "GNU Lesser General Public License"], + "bsd2": ["BSD_2_clause", "BSD_2_Clause", "BSD 2-clause License"], + "bsd3": ["BSD_3_clause", "BSD_3_Clause", "BSD 3-clause License"], + "mit": ["MIT"], + } + + license_file_template = ( + "'{{{{ environ[\"PREFIX\"] }}}}/lib/R/share/licenses/{license_id}'" + ) license_texts = [] license_files = [] # split license_text by "|" and "+" into parts for further matching - license_text_parts = [l_opt.strip() for l_opt in re.split(r'\||\+', license_text)] + license_text_parts = [l_opt.strip() for l_opt in re.split(r"\||\+", license_text)] for l_opt in license_text_parts: # the file case if l_opt.startswith("file "): @@ -1575,7 +1833,9 @@ def get_license_info(license_text, allowed_license_families): l_opt_text = d_license[license_id][0] license_texts.append(l_opt_text) - license_files.append(license_file_template.format(license_id=l_opt_text)) + license_files.append( + license_file_template.format(license_id=l_opt_text) + ) break # Join or fallback to original license_text if matched license_texts is empty diff --git a/conda_build/skeletons/luarocks.py b/conda_build/skeletons/luarocks.py index e83e76fd65..14d9c44f77 100644 --- a/conda_build/skeletons/luarocks.py +++ b/conda_build/skeletons/luarocks.py @@ -8,14 +8,14 @@ # - mingw32 support (really any windows support, completely untested) # - replace manual "luajit -e require 'blah'" with built-in entry-point testing +import json import os import subprocess import tempfile from glob import glob -import json from sys import platform as _platform -INDENT = '\n - ' +INDENT = "\n - " rockspec_parser = """ local ok,cjson = pcall(require, "cjson") @@ -145,7 +145,7 @@ def add_parser(repos): ) luarocks.add_argument( "packages", - nargs='+', + nargs="+", help="luarocks packages to create recipe skeletons for.", ) luarocks.add_argument( @@ -159,8 +159,9 @@ def add_parser(repos): ) luarocks.add_argument( "--recursive", - action='store_true', - help='Create recipes for dependencies if they do not already exist.') + action="store_true", + help="Create recipes for dependencies if they do not already exist.", + ) def package_exists(package_name): @@ -224,7 +225,6 @@ def ensure_base_deps(deps): def skeletonize(packages, output_dir=".", version=None, recursive=False): - # Check that we have Lua installed (any version) # Check that we have luarocks installed @@ -245,33 +245,37 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False): while packages: package = packages.pop() - packagename = "lua-%s" % package.lower() if package[:4] != "lua-" else package.lower() - d = package_dicts.setdefault(package, + packagename = ( + "lua-%s" % package.lower() if package[:4] != "lua-" else package.lower() + ) + d = package_dicts.setdefault( + package, { - 'packagename': packagename, - 'version': "0.0", - 'filename': "", - 'url': "", - 'md5': "", - 'usemd5': "# ", - 'usefile': "# ", - 'usegit': "# ", - 'usegittag': "# ", - 'usegitrev': "# ", - 'gittag': "", - 'gitrev': "", - 'noarch_python_comment': "# ", - 'build_depends': "", - 'run_depends': "", - 'test_comment': "", - 'entry_comment': "", - 'test_commands': "", - 'home_comment': "# ", - 'homeurl': "", - 'license': "Unknown", - 'summary_comment': "# ", - 'summary': "", - }) + "packagename": packagename, + "version": "0.0", + "filename": "", + "url": "", + "md5": "", + "usemd5": "# ", + "usefile": "# ", + "usegit": "# ", + "usegittag": "# ", + "usegitrev": "# ", + "gittag": "", + "gitrev": "", + "noarch_python_comment": "# ", + "build_depends": "", + "run_depends": "", + "test_comment": "", + "entry_comment": "", + "test_commands": "", + "home_comment": "# ", + "homeurl": "", + "license": "Unknown", + "summary_comment": "# ", + "summary": "", + }, + ) # Download rockspec o = subprocess.call(["luarocks", "download", package, "--rockspec"]) @@ -282,67 +286,68 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False): fs = glob(package + "*.rockspec") if len(fs) != 1: raise Exception("Failed to download rockspec") - d['rockspec_file'] = fs[0] + d["rockspec_file"] = fs[0] # Parse the rockspec into a dictionary - p = subprocess.Popen(["lua", "-e", rockspec_parser % d['rockspec_file']], - stdout=subprocess.PIPE) + p = subprocess.Popen( + ["lua", "-e", rockspec_parser % d["rockspec_file"]], stdout=subprocess.PIPE + ) out, err = p.communicate() if "ERROR" in out: raise Exception(out.replace("ERROR: ", "")) spec = json.loads(out) # Gather the basic details - d['rockname'] = getval(spec, "package") - d['version'] = getval(spec, "version") - d['version'] = "".join([c for c in d['version'] if c.isalnum()]) + d["rockname"] = getval(spec, "package") + d["version"] = getval(spec, "version") + d["version"] = "".join([c for c in d["version"] if c.isalnum()]) source = getval(spec, "source") # Figure out how to download the package, and from where - d['url'] = getval(source, "url") - ext = os.path.splitext(d['url'])[-1] + d["url"] = getval(source, "url") + ext = os.path.splitext(d["url"])[-1] if ext in [".zip", ".tar", ".tar.bz2", ".tar.xz", ".tar.gz"]: - d['usefile'] = "" - d['filename'] = os.path.split(d['url'])[-1] + d["usefile"] = "" + d["filename"] = os.path.split(d["url"])[-1] if "md5" in source: md5 = getval(source, "md5") if len(md5): - d['md5'] = md5 - d['usemd5'] = "" - elif ext in [".git"] or d['url'][:4] == "git:": - d['usegit'] = "" + d["md5"] = md5 + d["usemd5"] = "" + elif ext in [".git"] or d["url"][:4] == "git:": + d["usegit"] = "" # Check if we're using a tag or a commit if "tag" in source: - d['usegittag'] = "" - d['gittag'] = getval(source, "tag") + d["usegittag"] = "" + d["gittag"] = getval(source, "tag") elif "branch" in source: - d['usegittag'] = "" - d['gittag'] = getval(source, "branch") - warn_against_branches(d['gittag']) + d["usegittag"] = "" + d["gittag"] = getval(source, "branch") + warn_against_branches(d["gittag"]) else: - d['usegittag'] = "" - d['gittag'] = "master" - warn_against_branches(d['gittag']) + d["usegittag"] = "" + d["gittag"] = "master" + warn_against_branches(d["gittag"]) # Gather the description if "description" in spec: desc = getval(spec, "description") if "homepage" in desc: - d['homeurl'] = desc['homepage'] - d['home_comment'] = "" + d["homeurl"] = desc["homepage"] + d["home_comment"] = "" if "summary" in desc: - d['summary'] = desc['summary'] - d['summary_comment'] = "" + d["summary"] = desc["summary"] + d["summary_comment"] = "" if "license" in desc: - d['license'] = desc['license'] + d["license"] = desc["license"] # Gather the dependencies if "dependencies" in spec: deps = getval(spec, "dependencies") if len(deps): deps = ensure_base_deps([format_dep(dep) for dep in deps]) - d['build_depends'] = INDENT.join([''] + deps) - d['run_depends'] = d['build_depends'] + d["build_depends"] = INDENT.join([""] + deps) + d["run_depends"] = d["build_depends"] # Build some entry-point tests. if "build" in spec: @@ -352,33 +357,37 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False): our_plat = "unix" modules = None - if "modules" in spec['build']: - modules = spec['build']["modules"] - elif "platforms" in spec['build']: - if our_plat in spec['build']['platforms']: - if "modules" in spec['build']['platforms'][our_plat]: - modules = spec['build']['platforms'][our_plat]["modules"] + if "modules" in spec["build"]: + modules = spec["build"]["modules"] + elif "platforms" in spec["build"]: + if our_plat in spec["build"]["platforms"]: + if "modules" in spec["build"]["platforms"][our_plat]: + modules = spec["build"]["platforms"][our_plat]["modules"] if modules: - d['test_commands'] = INDENT.join([''] + - ["""lua -e "require '%s'\"""" % r - for r in modules.keys()]) + d["test_commands"] = INDENT.join( + [""] + ["""lua -e "require '%s'\"""" % r for r in modules.keys()] + ) # If we didn't find any modules to import, import the base name - if d['test_commands'] == "": - d['test_commands'] = INDENT.join([''] + ["""lua -e "require '%s'" """ % d['rockname']]) + if d["test_commands"] == "": + d["test_commands"] = INDENT.join( + [""] + ["""lua -e "require '%s'" """ % d["rockname"]] + ) # Build the luarocks skeleton os.chdir(cwd) for package in package_dicts: d = package_dicts[package] - name = d['packagename'] + name = d["packagename"] os.makedirs(os.path.join(output_dir, name)) - print(f"Writing recipe for {package.lower()} to {os.path.join(output_dir, name)}") - with open(os.path.join(output_dir, name, 'meta.yaml'), 'w') as f: + print( + f"Writing recipe for {package.lower()} to {os.path.join(output_dir, name)}" + ) + with open(os.path.join(output_dir, name, "meta.yaml"), "w") as f: f.write(LUAROCKS_META.format(**d)) - with open(os.path.join(output_dir, name, 'build.sh'), 'w') as f: + with open(os.path.join(output_dir, name, "build.sh"), "w") as f: f.write(LUAROCKS_BUILD_SH.format(**d)) - with open(os.path.join(output_dir, name, 'post-link.sh'), 'w') as f: + with open(os.path.join(output_dir, name, "post-link.sh"), "w") as f: f.write(LUAROCKS_POSTLINK_SH) - with open(os.path.join(output_dir, name, 'pre-unlink.sh'), 'w') as f: + with open(os.path.join(output_dir, name, "pre-unlink.sh"), "w") as f: f.write(LUAROCKS_PREUNLINK_SH.format(**d)) diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index 152be0b164..f376ad49a0 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -5,40 +5,49 @@ """ -from collections import defaultdict, OrderedDict import keyword import logging import os -from os import makedirs, listdir, getcwd, chdir -from os.path import join, isdir, exists, isfile, abspath - -from conda_build.version import _parse as parse_version import re -from shutil import copy2 import subprocess import sys +from collections import OrderedDict, defaultdict +from os import chdir, getcwd, listdir, makedirs +from os.path import abspath, exists, isdir, isfile, join +from shutil import copy2 from tempfile import mkdtemp +from urllib.parse import urljoin, urlsplit import pkginfo import requests -from requests.packages.urllib3.util.url import parse_url -from urllib.parse import urljoin, urlsplit import yaml +from requests.packages.urllib3.util.url import parse_url -from conda_build.conda_interface import spec_from_line -from conda_build.conda_interface import input, configparser, StringIO -from conda_build.conda_interface import download -from conda_build.conda_interface import normalized_version -from conda_build.conda_interface import human_bytes, hashsum_file -from conda_build.conda_interface import default_python - -from conda_build.utils import decompressible_exts, tar_xf, rm_rf, check_call_env, ensure_list -from conda_build.source import apply_patch -from conda_build.environ import create_env +from conda_build.conda_interface import ( + StringIO, + configparser, + default_python, + download, + hashsum_file, + human_bytes, + input, + normalized_version, + spec_from_line, +) from conda_build.config import Config -from conda_build.metadata import MetaData +from conda_build.environ import create_env from conda_build.license_family import allowed_license_families, guess_license_family +from conda_build.metadata import MetaData from conda_build.render import FIELDS as EXPECTED_SECTION_ORDER +from conda_build.source import apply_patch +from conda_build.utils import ( + check_call_env, + decompressible_exts, + ensure_list, + rm_rf, + tar_xf, +) +from conda_build.version import _parse as parse_version pypi_example = """ Examples: @@ -58,11 +67,18 @@ # Definition of REQUIREMENTS_ORDER below are from # https://github.com/conda-forge/conda-smithy/blob/master/conda_smithy/lint_recipe.py#L16 -REQUIREMENTS_ORDER = ['host', 'run'] +REQUIREMENTS_ORDER = ["host", "run"] # Definition of ABOUT_ORDER reflects current practice -ABOUT_ORDER = ['home', 'license', 'license_family', 'license_file', 'summary', - 'doc_url', 'dev_url'] +ABOUT_ORDER = [ + "home", + "license", + "license_family", + "license_file", + "summary", + "doc_url", + "dev_url", +] PYPI_META_HEADER = """{{% set name = "{packagename}" %}} {{% set version = "{version}" %}} @@ -74,19 +90,26 @@ # The top-level ordering is irrelevant because the write order of 'package', # etc. is determined by EXPECTED_SECTION_ORDER. PYPI_META_STATIC = { - 'package': OrderedDict([ - ('name', '{{ name|lower }}'), - ('version', '{{ version }}'), - ]), - 'source': OrderedDict([ - ('url', '/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz'), # NOQA - ]), - 'build': OrderedDict([ - ('number', 0), - ]), - 'extra': OrderedDict([ - ('recipe-maintainers', '') - ]), + "package": OrderedDict( + [ + ("name", "{{ name|lower }}"), + ("version", "{{ version }}"), + ] + ), + "source": OrderedDict( + [ + ( + "url", + "/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz", + ), # NOQA + ] + ), + "build": OrderedDict( + [ + ("number", 0), + ] + ), + "extra": OrderedDict([("recipe-maintainers", "")]), } # Note the {} formatting bits here @@ -136,7 +159,7 @@ def run_setup (script_name, script_args=None, stop_after="run"): """Run a setup script in a somewhat controlled environment, and ''' -INDENT = '\n - ' +INDENT = "\n - " def _ssl_no_verify(): @@ -145,22 +168,22 @@ def _ssl_no_verify(): This provides a workaround for users in some corporate environments where MITM style proxies make it difficult to fetch data over HTTPS. """ - return os.environ.get('SSL_NO_VERIFY', '').strip().lower() in ('1', 'true') + return os.environ.get("SSL_NO_VERIFY", "").strip().lower() in ("1", "true") def package_exists(package_name, pypi_url=None): if not pypi_url: - pypi_url = 'https://pypi.io/pypi' + pypi_url = "https://pypi.io/pypi" # request code will be 404 if the package does not exist. Requires exact match. - r = requests.get(pypi_url + '/' + package_name, verify=not _ssl_no_verify()) + r = requests.get(pypi_url + "/" + package_name, verify=not _ssl_no_verify()) return r.status_code != 404 -def __print_with_indent(line, prefix='', suffix='', level=0, newline=True): - output = '' +def __print_with_indent(line, prefix="", suffix="", level=0, newline=True): + output = "" if level: - output = ' ' * level - return output + prefix + line + suffix + ('\n' if newline else '') + output = " " * level + return output + prefix + line + suffix + ("\n" if newline else "") def _print_dict(recipe_metadata, order=None, level=0, indent=2): @@ -173,30 +196,40 @@ def _print_dict(recipe_metadata, order=None, level=0, indent=2): :param int indent: Indentation - Number of empty spaces for each level :return string: Recipe rendered with the metadata """ - rendered_recipe = '' + rendered_recipe = "" if not order: order = sorted(list(recipe_metadata.keys())) for section_name in order: if section_name in recipe_metadata and recipe_metadata[section_name]: - rendered_recipe += __print_with_indent(section_name, suffix=':') - for attribute_name, attribute_value in recipe_metadata[section_name].items(): + rendered_recipe += __print_with_indent(section_name, suffix=":") + for attribute_name, attribute_value in recipe_metadata[ + section_name + ].items(): if attribute_value is None: continue - if isinstance(attribute_value, str) or not hasattr(attribute_value, "__iter__"): - rendered_recipe += __print_with_indent(attribute_name, suffix=':', level=level + indent, - newline=False) + if isinstance(attribute_value, str) or not hasattr( + attribute_value, "__iter__" + ): + rendered_recipe += __print_with_indent( + attribute_name, suffix=":", level=level + indent, newline=False + ) rendered_recipe += _formating_value(attribute_name, attribute_value) - elif hasattr(attribute_value, 'keys'): - rendered_recipe += _print_dict(attribute_value, sorted(list(attribute_value.keys()))) + elif hasattr(attribute_value, "keys"): + rendered_recipe += _print_dict( + attribute_value, sorted(list(attribute_value.keys())) + ) # assume that it's a list if it exists at all elif attribute_value: - rendered_recipe += __print_with_indent(attribute_name, suffix=':', level=level + indent) + rendered_recipe += __print_with_indent( + attribute_name, suffix=":", level=level + indent + ) for item in attribute_value: - rendered_recipe += __print_with_indent(item, prefix='- ', - level=level + indent) + rendered_recipe += __print_with_indent( + item, prefix="- ", level=level + indent + ) # add a newline in between sections if level == 0: - rendered_recipe += '\n' + rendered_recipe += "\n" return rendered_recipe @@ -209,20 +242,34 @@ def _formating_value(attribute_name, attribute_value): :param string attribute_value: Attribute value :return string: Value quoted if need """ - pattern_search = re.compile(r'[@_!#$%^&*()<>?/\|}{~:]') - if isinstance(attribute_value, str) \ - and pattern_search.search(attribute_value) \ - or attribute_name in ["summary", "description", "version", "script"]: + pattern_search = re.compile(r"[@_!#$%^&*()<>?/\|}{~:]") + if ( + isinstance(attribute_value, str) + and pattern_search.search(attribute_value) + or attribute_name in ["summary", "description", "version", "script"] + ): return ' "' + str(attribute_value) + '"\n' - return ' ' + str(attribute_value) + '\n' - - -def skeletonize(packages, output_dir=".", version=None, recursive=False, - all_urls=False, pypi_url='https://pypi.io/pypi/', noprompt=True, - version_compare=False, python_version=None, manual_url=False, - all_extras=False, noarch_python=False, config=None, setup_options=None, - extra_specs=[], - pin_numpy=False): + return " " + str(attribute_value) + "\n" + + +def skeletonize( + packages, + output_dir=".", + version=None, + recursive=False, + all_urls=False, + pypi_url="https://pypi.io/pypi/", + noprompt=True, + version_compare=False, + python_version=None, + manual_url=False, + all_extras=False, + noarch_python=False, + config=None, + setup_options=None, + extra_specs=[], + pin_numpy=False, +): package_dicts = {} if not setup_options: @@ -234,46 +281,50 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False, if not config: config = Config() - python_version = python_version or config.variant.get('python', default_python) + python_version = python_version or config.variant.get("python", default_python) created_recipes = [] while packages: package = packages.pop() created_recipes.append(package) - is_url = ':' in package + is_url = ":" in package if is_url: - package_pypi_url = '' + package_pypi_url = "" else: - package_pypi_url = urljoin(pypi_url, '/'.join((package, 'json'))) + package_pypi_url = urljoin(pypi_url, "/".join((package, "json"))) if not is_url: dir_path = join(output_dir, package.lower()) if exists(dir_path) and not version_compare: raise RuntimeError("directory already exists: %s" % dir_path) - d = package_dicts.setdefault(package, + d = package_dicts.setdefault( + package, { - 'packagename': package, - 'run_depends': '', - 'build_depends': '', - 'entry_points': '', - 'test_commands': '', - 'tests_require': '', - }) + "packagename": package, + "run_depends": "", + "build_depends": "", + "entry_points": "", + "test_commands": "", + "tests_require": "", + }, + ) if is_url: - del d['packagename'] + del d["packagename"] if is_url: - d['version'] = 'UNKNOWN' + d["version"] = "UNKNOWN" # Make sure there is always something to pass in for this pypi_data = {} else: pypi_resp = requests.get(package_pypi_url, verify=not _ssl_no_verify()) if pypi_resp.status_code != 200: - sys.exit("Request to fetch %s failed with status: %d" - % (package_pypi_url, pypi_resp.status_code)) + sys.exit( + "Request to fetch %s failed with status: %d" + % (package_pypi_url, pypi_resp.status_code) + ) pypi_data = pypi_resp.json() @@ -283,65 +334,77 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False, version_compare(versions) if version: if version not in versions: - sys.exit("Error: Version %s of %s is not available on PyPI." - % (version, package)) - d['version'] = version + sys.exit( + "Error: Version %s of %s is not available on PyPI." + % (version, package) + ) + d["version"] = version else: # select the most visible version from PyPI. if not versions: - sys.exit("Error: Could not find any versions of package %s" % package) + sys.exit( + "Error: Could not find any versions of package %s" % package + ) if len(versions) > 1: - print("Warning, the following versions were found for %s" % - package) + print("Warning, the following versions were found for %s" % package) for ver in versions: print(ver) print("Using %s" % versions[-1]) print("Use --version to specify a different version.") - d['version'] = versions[-1] + d["version"] = versions[-1] - data, d['pypiurl'], d['filename'], d['digest'] = get_download_data(pypi_data, - package, - d['version'], - is_url, all_urls, - noprompt, manual_url) + data, d["pypiurl"], d["filename"], d["digest"] = get_download_data( + pypi_data, package, d["version"], is_url, all_urls, noprompt, manual_url + ) - d['import_tests'] = '' + d["import_tests"] = "" # Get summary directly from the metadata returned # from PyPI. summary will be pulled from package information in # get_package_metadata or a default value set if it turns out that # data['summary'] is empty. Ignore description as it is too long. - d['summary'] = data.get('summary', '') - get_package_metadata(package, d, data, output_dir, python_version, - all_extras, recursive, created_recipes, noarch_python, - noprompt, packages, extra_specs, config=config, - setup_options=setup_options) + d["summary"] = data.get("summary", "") + get_package_metadata( + package, + d, + data, + output_dir, + python_version, + all_extras, + recursive, + created_recipes, + noarch_python, + noprompt, + packages, + extra_specs, + config=config, + setup_options=setup_options, + ) # Set these *after* get_package_metadata so that the preferred hash # can be calculated from the downloaded file, if necessary. - d['hash_type'] = d['digest'][0] - d['hash_value'] = d['digest'][1] + d["hash_type"] = d["digest"][0] + d["hash_value"] = d["digest"][1] # Change requirements to use format that guarantees the numpy # version will be pinned when the recipe is built and that # the version is included in the build string. if pin_numpy: - for depends in ['build_depends', 'run_depends']: + for depends in ["build_depends", "run_depends"]: deps = d[depends] - numpy_dep = [idx for idx, dep in enumerate(deps) - if 'numpy' in dep] + numpy_dep = [idx for idx, dep in enumerate(deps) if "numpy" in dep] if numpy_dep: # Turns out this needs to be inserted before the rest # of the numpy spec. - deps.insert(numpy_dep[0], 'numpy x.x') + deps.insert(numpy_dep[0], "numpy x.x") d[depends] = deps for package in package_dicts: d = package_dicts[package] - name = d['packagename'].lower() + name = d["packagename"].lower() makedirs(join(output_dir, name)) print("Writing recipe for %s" % package.lower()) - with open(join(output_dir, name, 'meta.yaml'), 'w') as f: + with open(join(output_dir, name, "meta.yaml"), "w") as f: rendered_recipe = PYPI_META_HEADER.format(**d) ordered_recipe = OrderedDict() @@ -352,47 +415,53 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False, except KeyError: ordered_recipe[key] = OrderedDict() - if '://' not in pypi_url: + if "://" not in pypi_url: raise ValueError("pypi_url must have protocol (e.g. http://) included") base_url = urlsplit(pypi_url) base_url = "://".join((base_url.scheme, base_url.netloc)) - ordered_recipe['source']['url'] = urljoin(base_url, ordered_recipe['source']['url']) - ordered_recipe['source']['sha256'] = d['hash_value'] + ordered_recipe["source"]["url"] = urljoin( + base_url, ordered_recipe["source"]["url"] + ) + ordered_recipe["source"]["sha256"] = d["hash_value"] - if d['entry_points']: - ordered_recipe['build']['entry_points'] = d['entry_points'] + if d["entry_points"]: + ordered_recipe["build"]["entry_points"] = d["entry_points"] if noarch_python: - ordered_recipe['build']['noarch'] = 'python' + ordered_recipe["build"]["noarch"] = "python" recipe_script_cmd = ["{{ PYTHON }} -m pip install . -vv"] - ordered_recipe['build']['script'] = ' '.join(recipe_script_cmd + setup_options) + ordered_recipe["build"]["script"] = " ".join( + recipe_script_cmd + setup_options + ) # Always require python as a dependency. Pip is because we use pip for # the install line. - ordered_recipe['requirements'] = OrderedDict() - ordered_recipe['requirements']['host'] = sorted(set(['python', 'pip'] + - list(d['build_depends']))) - ordered_recipe['requirements']['run'] = sorted(set(['python'] + - list(d['run_depends']))) + ordered_recipe["requirements"] = OrderedDict() + ordered_recipe["requirements"]["host"] = sorted( + set(["python", "pip"] + list(d["build_depends"])) + ) + ordered_recipe["requirements"]["run"] = sorted( + set(["python"] + list(d["run_depends"])) + ) - if d['import_tests']: - ordered_recipe['test']['imports'] = d['import_tests'] + if d["import_tests"]: + ordered_recipe["test"]["imports"] = d["import_tests"] - if d['test_commands']: - ordered_recipe['test']['commands'] = d['test_commands'] + if d["test_commands"]: + ordered_recipe["test"]["commands"] = d["test_commands"] - if d['tests_require']: - ordered_recipe['test']['requires'] = d['tests_require'] + if d["tests_require"]: + ordered_recipe["test"]["requires"] = d["tests_require"] - ordered_recipe['about'] = OrderedDict() + ordered_recipe["about"] = OrderedDict() for key in ABOUT_ORDER: try: - ordered_recipe['about'][key] = d[key] + ordered_recipe["about"][key] = d[key] except KeyError: - ordered_recipe['about'][key] = '' - ordered_recipe['extra']['recipe-maintainers'] = ['your-github-id-here'] + ordered_recipe["about"][key] = "" + ordered_recipe["extra"]["recipe-maintainers"] = ["your-github-id-here"] # Prune any top-level sections that are empty rendered_recipe += _print_dict(ordered_recipe, EXPECTED_SECTION_ORDER) @@ -410,14 +479,13 @@ def skeletonize(packages, output_dir=".", version=None, recursive=False, # Fix the indents recipe_lines = [] for line in rendered_recipe.splitlines(): - match = re.search(r'^\s+(-) ', line, - flags=re.MULTILINE) + match = re.search(r"^\s+(-) ", line, flags=re.MULTILINE) if match: - pre, sep, post = line.partition('-') - sep = ' ' + sep + pre, sep, post = line.partition("-") + sep = " " + sep line = pre + sep + post recipe_lines.append(line) - rendered_recipe = '\n'.join(recipe_lines) + rendered_recipe = "\n".join(recipe_lines) f.write(rendered_recipe) @@ -434,7 +502,7 @@ def add_parser(repos): ) pypi.add_argument( "packages", - nargs='+', + nargs="+", help="""PyPi packages to create recipe skeletons for. You can also specify package[extra,...] features.""", ) @@ -456,7 +524,7 @@ def add_parser(repos): ) pypi.add_argument( "--pypi-url", - default='https://pypi.io/pypi/', + default="https://pypi.io/pypi/", help="URL to use for PyPI (default: %(default)s).", ) pypi.add_argument( @@ -465,7 +533,7 @@ def add_parser(repos): default=True, dest="noprompt", help="""Prompt the user on ambiguous choices. Default is to make the - best possible choice and continue.""" + best possible choice and continue.""", ) pypi.add_argument( "--all-extras", @@ -475,66 +543,70 @@ def add_parser(repos): ) pypi.add_argument( "--recursive", - action='store_true', - help='Create recipes for dependencies if they do not already exist.' + action="store_true", + help="Create recipes for dependencies if they do not already exist.", ) pypi.add_argument( "--version-compare", - action='store_true', + action="store_true", help="""Compare the package version of the recipe with all available - versions on PyPI.""" + versions on PyPI.""", ) pypi.add_argument( "--python-version", - action='store', + action="store", default=default_python, help="""Version of Python to use to run setup.py. Default is %(default)s.""", - choices=['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10', '3.11'], + choices=["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"], ) pypi.add_argument( "--manual-url", - action='store_true', + action="store_true", default=False, - help=("Manually choose source url when more than one urls are present." - "Default is the one with least source size.") + help=( + "Manually choose source url when more than one urls are present." + "Default is the one with least source size." + ), ) pypi.add_argument( "--noarch-python", - action='store_true', + action="store_true", default=False, - help="Creates recipe as noarch python" + help="Creates recipe as noarch python", ) pypi.add_argument( "--setup-options", - action='append', + action="append", default=[], - help='Options to be added to setup.py install in the recipe. ' - 'The same options are passed to setup.py install in both ' - 'the construction of the recipe and in the recipe itself.' - 'For options that include a double-hypen or to pass multiple ' - 'options, use the syntax ' - '--setup-options="--option1 --option-with-arg arg"' + help="Options to be added to setup.py install in the recipe. " + "The same options are passed to setup.py install in both " + "the construction of the recipe and in the recipe itself." + "For options that include a double-hypen or to pass multiple " + "options, use the syntax " + '--setup-options="--option1 --option-with-arg arg"', ) pypi.add_argument( "--pin-numpy", - action='store_true', + action="store_true", help="Ensure that the generated recipe pins the version of numpy" - "to CONDA_NPY." + "to CONDA_NPY.", ) pypi.add_argument( "--extra-specs", - action='append', + action="append", default=[], help="Extra specs for the build environment to extract the skeleton.", ) -def get_download_data(pypi_data, package, version, is_url, all_urls, noprompt, manual_url): +def get_download_data( + pypi_data, package, version, is_url, all_urls, noprompt, manual_url +): """ Get at least one valid *source* download URL or fail. @@ -550,64 +622,68 @@ def get_download_data(pypi_data, package, version, is_url, all_urls, noprompt, m digest : dict Key is type of checksum, value is the checksum. """ - data = pypi_data['info'] if not is_url else {} + data = pypi_data["info"] if not is_url else {} # PyPI will typically have several downloads (source, wheels) for one # package/version. - urls = [url for url in pypi_data['releases'][version]] if not is_url else [package] + urls = [url for url in pypi_data["releases"][version]] if not is_url else [package] if not is_url and not all_urls: # Try to find source urls - urls = [url for url in urls if url['packagetype'] == 'sdist'] + urls = [url for url in urls if url["packagetype"] == "sdist"] if not urls: # Try harder for a download location - if data.get('download_url'): - urls = [defaultdict(str, {'url': data['download_url']})] - if not urls[0]['url']: + if data.get("download_url"): + urls = [defaultdict(str, {"url": data["download_url"]})] + if not urls[0]["url"]: # The package doesn't have a url, or maybe it only has a wheel. - sys.exit("Error: Could not build recipe for %s. " - "Could not find any valid urls." % package) - U = parse_url(urls[0]['url']) + sys.exit( + "Error: Could not build recipe for %s. " + "Could not find any valid urls." % package + ) + U = parse_url(urls[0]["url"]) if not U.path: - sys.exit("Error: Could not parse url for %s: %s" % - (package, U)) - urls[0]['filename'] = U.path.rsplit('/')[-1] - fragment = U.fragment or '' + sys.exit(f"Error: Could not parse url for {package}: {U}") + urls[0]["filename"] = U.path.rsplit("/")[-1] + fragment = U.fragment or "" digest = fragment.split("=") else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not noprompt: - print("More than one source version is available for %s:" % - package) + print("More than one source version is available for %s:" % package) if manual_url: for i, url in enumerate(urls): - print("%d: %s (%s) %s" % (i, url['url'], - human_bytes(url['size']), url['comment_text'])) + print( + "%d: %s (%s) %s" + % (i, url["url"], human_bytes(url["size"]), url["comment_text"]) + ) n = int(input("which version should i use? ")) else: print("Using the one with the least source size") print("use --manual-url to override this behavior") - _, n = min((url['size'], i) - for (i, url) in enumerate(urls)) + _, n = min((url["size"], i) for (i, url) in enumerate(urls)) else: n = 0 if not is_url: # Found a location from PyPI. url = urls[n] - pypiurl = url['url'] - print("Using url {} ({}) for {}.".format(pypiurl, - human_bytes(url['size'] or 0), package)) + pypiurl = url["url"] + print( + "Using url {} ({}) for {}.".format( + pypiurl, human_bytes(url["size"] or 0), package + ) + ) - if url['digests']['sha256']: - digest = ('sha256', url['digests']['sha256']) + if url["digests"]["sha256"]: + digest = ("sha256", url["digests"]["sha256"]) else: # That didn't work, even though as of 7/17/2017 some packages # have a 'digests' entry. # As a last-ditch effort, try for the md5_digest entry. digest = () - filename = url['filename'] or 'package' + filename = url["filename"] or "package" else: # User provided a URL, try to use it. print("Using url %s" % package) @@ -615,7 +691,7 @@ def get_download_data(pypi_data, package, version, is_url, all_urls, noprompt, m U = parse_url(package) digest = U.fragment.split("=") # TODO: 'package' won't work with unpack() - filename = U.path.rsplit('/', 1)[-1] or 'package' + filename = U.path.rsplit("/", 1)[-1] or "package" return (data, pypiurl, filename, digest) @@ -637,11 +713,10 @@ def version_compare(package, versions): local_version = nv(m.version()) print(f"Local recipe for {package} has version {local_version}") if local_version not in versions: - sys.exit("Error: %s %s is not available on PyPI." - % (package, local_version)) + sys.exit(f"Error: {package} {local_version} is not available on PyPI.") else: # Comparing normalized versions, displaying non normalized ones - new_versions = versions[:norm_versions.index(local_version)] + new_versions = versions[: norm_versions.index(local_version)] if len(new_versions) > 0: print("Following new versions of %s are avaliable" % (package)) for ver in new_versions: @@ -653,25 +728,26 @@ def version_compare(package, versions): def convert_version(version): """Convert version into a pin-compatible format according to PEP440.""" - version_parts = version.split('.') - suffixes = ('post', 'pre') + version_parts = version.split(".") + suffixes = ("post", "pre") if any(suffix in version_parts[-1] for suffix in suffixes): version_parts.pop() # the max pin length is n-1, but in terms of index this is n-2 max_ver_len = len(version_parts) - 2 version_parts[max_ver_len] = int(version_parts[max_ver_len]) + 1 - max_pin = '.'.join(str(v) for v in version_parts[:max_ver_len + 1]) - pin_compatible = ' >={},<{}' .format(version, max_pin) + max_pin = ".".join(str(v) for v in version_parts[: max_ver_len + 1]) + pin_compatible = f" >={version},<{max_pin}" return pin_compatible -MARKER_RE = re.compile(r"(?P^[^=<>!~\s;]+)" - r"\s*" - r"(?P[=!><~]=?\s*[^\s;]+)?" - r"(?:\s*;\s+)?(?P[^=<>!~\s;]+)?" - r"\s*" - r"(?P[=<>!\s]+[^=<>!~\s]+)?" - ) +MARKER_RE = re.compile( + r"(?P^[^=<>!~\s;]+)" + r"\s*" + r"(?P[=!><~]=?\s*[^\s;]+)?" + r"(?:\s*;\s+)?(?P[^=<>!~\s;]+)?" + r"\s*" + r"(?P[=<>!\s]+[^=<>!~\s]+)?" +) def _get_env_marker_operator_and_value(constraint): @@ -702,7 +778,7 @@ def _translate_platform_system_constraint(constraint): def _translate_sys_platform_constraint(constraint): operator, value = _get_env_marker_operator_and_value(constraint) # Only take the "letter" part to translate, e.g., "linux2"->"linux", "win32"->"win". - system = re.match('^[a-z]*', value, re.I)[0] + system = re.match("^[a-z]*", value, re.I)[0] return "{}{}".format("not " if operator == "!=" else "", system) @@ -717,7 +793,7 @@ def env_mark_lookup(env_mark_name, env_mark_constraint): "sys_platform": _translate_sys_platform_constraint, } marker = env_mark_table[env_mark_name](env_mark_constraint) - return ' # [ ' + marker + ' ]' + return " # [ " + marker + " ]" def parse_dep_with_env_marker(dep_str): @@ -727,25 +803,40 @@ def parse_dep_with_env_marker(dep_str): name = " ".join((name, match.group("constraint").replace(" ", ""))) env_mark = "" if match.group("env_mark_name"): - env_mark = env_mark_lookup(match.group("env_mark_name"), - match.group("env_mark_constraint")) + env_mark = env_mark_lookup( + match.group("env_mark_name"), match.group("env_mark_constraint") + ) return name, env_mark -def get_package_metadata(package, metadata, data, output_dir, python_version, all_extras, - recursive, created_recipes, noarch_python, no_prompt, packages, - extra_specs, config, setup_options): - +def get_package_metadata( + package, + metadata, + data, + output_dir, + python_version, + all_extras, + recursive, + created_recipes, + noarch_python, + no_prompt, + packages, + extra_specs, + config, + setup_options, +): print("Downloading %s" % package) - print("PyPI URL: ", metadata['pypiurl']) - pkginfo = get_pkginfo(package, - filename=metadata['filename'], - pypiurl=metadata['pypiurl'], - digest=metadata['digest'], - python_version=python_version, - extra_specs=extra_specs, - setup_options=setup_options, - config=config) + print("PyPI URL: ", metadata["pypiurl"]) + pkginfo = get_pkginfo( + package, + filename=metadata["filename"], + pypiurl=metadata["pypiurl"], + digest=metadata["digest"], + python_version=python_version, + extra_specs=extra_specs, + setup_options=setup_options, + config=config, + ) metadata.update(get_entry_points(pkginfo)) @@ -754,21 +845,21 @@ def get_package_metadata(package, metadata, data, output_dir, python_version, al if requires or is_setuptools_enabled(pkginfo): list_deps = get_dependencies(requires, is_setuptools_enabled(pkginfo)) - metadata['build_depends'] = ['pip'] + list_deps + metadata["build_depends"] = ["pip"] + list_deps # Never add setuptools to runtime dependencies. - metadata['run_depends'] = list_deps + metadata["run_depends"] = list_deps if recursive: packages += get_recursive_deps(created_recipes, list_deps, output_dir) - if 'packagename' not in metadata: - metadata['packagename'] = pkginfo['name'].lower() + if "packagename" not in metadata: + metadata["packagename"] = pkginfo["name"].lower() - if metadata['version'] == 'UNKNOWN': - metadata['version'] = pkginfo['version'] + if metadata["version"] == "UNKNOWN": + metadata["version"] = pkginfo["version"] metadata["import_tests"] = get_import_tests(pkginfo, metadata.get("import_tests")) - metadata['tests_require'] = get_tests_require(pkginfo) + metadata["tests_require"] = get_tests_require(pkginfo) metadata["home"] = get_home(pkginfo, data) @@ -778,10 +869,12 @@ def get_package_metadata(package, metadata, data, output_dir, python_version, al license_name = get_license_name(package, pkginfo, no_prompt, data) metadata["license"] = clean_license_name(license_name) - metadata['license_family'] = guess_license_family(license_name, allowed_license_families) + metadata["license_family"] = guess_license_family( + license_name, allowed_license_families + ) - if 'new_hash_value' in pkginfo: - metadata['digest'] = pkginfo['new_hash_value'] + if "new_hash_value" in pkginfo: + metadata["digest"] = pkginfo["new_hash_value"] def get_recursive_deps(created_recipes, list_deps, output_dir): @@ -813,7 +906,7 @@ def get_dependencies(requires, setuptools_enabled=True): # START :: Copied from conda # These can be removed if we want to drop support for conda <= 4.9.0 def _strip_comment(line): - return line.split('#')[0].rstrip() + return line.split("#")[0].rstrip() def _spec_from_line(line): spec_pat = re.compile( @@ -831,21 +924,23 @@ def _spec_from_line(line): m = spec_pat.match(_strip_comment(line)) if m is None: return None - name, cc, pc = (m.group('name').lower(), m.group('cc'), m.group('pc')) + name, cc, pc = (m.group("name").lower(), m.group("cc"), m.group("pc")) if cc: - return name + cc.replace('=', ' ') + return name + cc.replace("=", " ") elif pc: - if pc.startswith('~= '): - assert pc.count('~=') == 1, \ - f"Overly complex 'Compatible release' spec not handled {line}" - assert pc.count('.'), f"No '.' in 'Compatible release' version {line}" - ver = pc.replace('~= ', '') - ver2 = '.'.join(ver.split('.')[:-1]) + '.*' - return name + ' >=' + ver + ',==' + ver2 + if pc.startswith("~= "): + assert ( + pc.count("~=") == 1 + ), f"Overly complex 'Compatible release' spec not handled {line}" + assert pc.count("."), f"No '.' in 'Compatible release' version {line}" + ver = pc.replace("~= ", "") + ver2 = ".".join(ver.split(".")[:-1]) + ".*" + return name + " >=" + ver + ",==" + ver2 else: - return name + ' ' + pc.replace(' ', '') + return name + " " + pc.replace(" ", "") else: return name + # END :: Copied from conda list_deps = ["setuptools"] if setuptools_enabled else [] @@ -857,7 +952,7 @@ def _spec_from_line(line): # or a multiline requirements string... for dep in dep_text: # ... and may also contain comments... - dep = dep.split('#')[0].strip() + dep = dep.split("#")[0].strip() if not dep: continue @@ -873,20 +968,22 @@ def _spec_from_line(line): # if spec != spec2: # print("Disagreement on PEP440 'Compatible release' {} vs {}".format(spec, spec2)) spec = spec_from_line(dep) - if '~=' in dep_orig: + if "~=" in dep_orig: spec = None if spec is None: - if '~=' in dep_orig: + if "~=" in dep_orig: log = logging.getLogger(__name__) - log.warning("Your conda is too old to handle ~= PEP440 'Compatible versions', " - "using copied implementation.") + log.warning( + "Your conda is too old to handle ~= PEP440 'Compatible versions', " + "using copied implementation." + ) spec = _spec_from_line(dep_orig) if spec is None: sys.exit("Error: Could not parse: %s" % dep) if marker: - spec = ' '.join((spec, marker)) + spec = " ".join((spec, marker)) list_deps.append(spec) return list_deps @@ -904,29 +1001,25 @@ def get_import_tests(pkginfo, import_tests_metada=""): olddeps = [] if import_tests_metada != "PLACEHOLDER": - olddeps = [ - x for x in import_tests_metada.split() if x != "-" - ] + olddeps = [x for x in import_tests_metada.split() if x != "-"] return sorted(set(olddeps) | set(pkginfo["packages"])) def get_tests_require(pkginfo): - return sorted( - spec_from_line(pkg) for pkg in ensure_list(pkginfo['tests_require']) - ) + return sorted(spec_from_line(pkg) for pkg in ensure_list(pkginfo["tests_require"])) def get_home(pkginfo, data=None): default_home = "The package home page" - if pkginfo.get('home'): - return pkginfo['home'] + if pkginfo.get("home"): + return pkginfo["home"] if data: return data.get("home", default_home) return default_home def get_summary(pkginfo): - return pkginfo.get("summary", "Summary of the package").replace('"', r'\"') + return pkginfo.get("summary", "Summary of the package").replace('"', r"\"") def get_license_name(package, pkginfo, no_prompt=False, data=None): @@ -940,7 +1033,7 @@ def get_license_name(package, pkginfo, no_prompt=False, data=None): license_classifier = "License :: OSI Approved :: " data_classifier = data.get("classifiers", []) if data else [] - pkg_classifier = pkginfo.get('classifiers', data_classifier) + pkg_classifier = pkginfo.get("classifiers", data_classifier) pkg_classifier = pkg_classifier if pkg_classifier else data_classifier licenses = [ @@ -950,19 +1043,19 @@ def get_license_name(package, pkginfo, no_prompt=False, data=None): ] if licenses: - return ' or '.join(licenses) + return " or ".join(licenses) - if pkginfo.get('license'): - license_name = pkginfo['license'] - elif data and 'license' in data: - license_name = data['license'] + if pkginfo.get("license"): + license_name = pkginfo["license"] + elif data and "license" in data: + license_name = data["license"] else: license_name = None if license_name: if no_prompt: return license_name - elif '\n' not in license_name: + elif "\n" not in license_name: print('Using "%s" for the license' % license_name) else: # Some projects put the whole license text in this field @@ -986,7 +1079,7 @@ def clean_license_name(license_name): :param str license_name: Receives the license name :return str: Return a string without the word ``license`` """ - return re.subn(r'(.*)\s+license', r'\1', license_name, flags=re.IGNORECASE)[0] + return re.subn(r"(.*)\s+license", r"\1", license_name, flags=re.IGNORECASE)[0] def get_entry_points(pkginfo): @@ -994,7 +1087,7 @@ def get_entry_points(pkginfo): :param pkginfo: :return dict: """ - entry_points = pkginfo.get('entry_points') + entry_points = pkginfo.get("entry_points") if not entry_points: return {} @@ -1011,26 +1104,22 @@ def get_entry_points(pkginfo): else: entry_points = {} for section in _config.sections(): - if section in ['console_scripts', 'gui_scripts']: + if section in ["console_scripts", "gui_scripts"]: entry_points[section] = [ - f'{option}={_config.get(section, option)}' + f"{option}={_config.get(section, option)}" for option in _config.options(section) ] if isinstance(entry_points, dict): - console_script = convert_to_flat_list( - entry_points.get('console_scripts', []) - ) - gui_scripts = convert_to_flat_list( - entry_points.get('gui_scripts', []) - ) + console_script = convert_to_flat_list(entry_points.get("console_scripts", [])) + gui_scripts = convert_to_flat_list(entry_points.get("gui_scripts", [])) # TODO: Use pythonw for gui scripts entry_list = console_script + gui_scripts if entry_list: return { "entry_points": entry_list, - "test_commands": make_entry_tests(entry_list) + "test_commands": make_entry_tests(entry_list), } else: print("WARNING: Could not add entry points. They were:") @@ -1048,7 +1137,11 @@ def convert_to_flat_list(var_scripts): """ if isinstance(var_scripts, str): var_scripts = [var_scripts] - elif var_scripts and isinstance(var_scripts, list) and isinstance(var_scripts[0], list): + elif ( + var_scripts + and isinstance(var_scripts, list) + and isinstance(var_scripts[0], list) + ): var_scripts = [item for sublist in [s for s in var_scripts] for item in sublist] return var_scripts @@ -1064,16 +1157,16 @@ def is_setuptools_enabled(pkginfo): # We have *other* kinds of entry-points so we need # setuptools at run-time - if set(entry_points.keys()) - {'console_scripts', 'gui_scripts'}: + if set(entry_points.keys()) - {"console_scripts", "gui_scripts"}: return True return False def valid(name): - if (re.match("[_A-Za-z][_a-zA-Z0-9]*$", name) and not keyword.iskeyword(name)): + if re.match("[_A-Za-z][_a-zA-Z0-9]*$", name) and not keyword.iskeyword(name): return name else: - return '' + return "" def unpack(src_path, tempdir): @@ -1084,8 +1177,11 @@ def unpack(src_path, tempdir): def get_dir(tempdir): - lst = [fn for fn in listdir(tempdir) if not fn.startswith('.') and - isdir(join(tempdir, fn))] + lst = [ + fn + for fn in listdir(tempdir) + if not fn.startswith(".") and isdir(join(tempdir, fn)) + ] if len(lst) == 1: dir_path = join(tempdir, lst[0]) if isdir(dir_path): @@ -1097,31 +1193,31 @@ def get_dir(tempdir): def get_requirements(package, pkginfo, all_extras=True): # Look for package[extra,...] features spec: - match_extras = re.match(r'^([^[]+)\[([^]]+)\]$', package) + match_extras = re.match(r"^([^[]+)\[([^]]+)\]$", package) if match_extras: package, extras = match_extras.groups() - extras = extras.split(',') + extras = extras.split(",") else: extras = [] # Extract requested extra feature requirements... if all_extras: - extras_require = list(pkginfo['extras_require'].values()) + extras_require = list(pkginfo["extras_require"].values()) else: try: - extras_require = [pkginfo['extras_require'][x] for x in extras] + extras_require = [pkginfo["extras_require"][x] for x in extras] except KeyError: - sys.exit("Error: Invalid extra features: [%s]" % ','.join(extras)) + sys.exit("Error: Invalid extra features: [%s]" % ",".join(extras)) # match PEP 508 environment markers; currently only matches the # subset of environment markers that compare to python_version # using a single basic Python comparison operator - version_marker = re.compile(r'^:python_version(<|<=|!=|==|>=|>)(.+)$') - for extra in pkginfo['extras_require']: + version_marker = re.compile(r"^:python_version(<|<=|!=|==|>=|>)(.+)$") + for extra in pkginfo["extras_require"]: match_ver_mark = version_marker.match(extra) if match_ver_mark: op, ver = match_ver_mark.groups() try: - ver_tuple = tuple(int(x) for x in ver.strip('\'"').split(".")) + ver_tuple = tuple(int(x) for x in ver.strip("'\"").split(".")) except ValueError: pass # bad match; abort else: @@ -1138,11 +1234,11 @@ def get_requirements(package, pkginfo, all_extras=True): else: # op == ">": satisfies_ver = sys.version_info > ver_tuple if satisfies_ver: - extras_require += pkginfo['extras_require'][extra] + extras_require += pkginfo["extras_require"][extra] # ... and collect all needed requirement specs in a single list: requires = [] - for specs in [pkginfo.get('install_requires', "")] + extras_require: + for specs in [pkginfo.get("install_requires", "")] + extras_require: if isinstance(specs, str): requires.append(specs) else: @@ -1151,15 +1247,23 @@ def get_requirements(package, pkginfo, all_extras=True): return requires -def get_pkginfo(package, filename, pypiurl, digest, python_version, extra_specs, config, - setup_options): +def get_pkginfo( + package, + filename, + pypiurl, + digest, + python_version, + extra_specs, + config, + setup_options, +): # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. - tempdir = mkdtemp('conda_skeleton_' + filename) + tempdir = mkdtemp("conda_skeleton_" + filename) if not isdir(config.src_cache): makedirs(config.src_cache) @@ -1170,22 +1274,26 @@ def get_pkginfo(package, filename, pypiurl, digest, python_version, extra_specs, # Download it to the build source cache. That way, you have # it. download_path = join(config.src_cache, filename) - if not isfile(download_path) or \ - hashsum_file(download_path, hash_type) != hash_value: + if ( + not isfile(download_path) + or hashsum_file(download_path, hash_type) != hash_value + ): download(pypiurl, join(config.src_cache, filename)) if hashsum_file(download_path, hash_type) != hash_value: - raise RuntimeError(' Download of {} failed' - ' checksum type {} expected value {}. Please' - ' try again.'.format(package, hash_type, hash_value)) + raise RuntimeError( + " Download of {} failed" + " checksum type {} expected value {}. Please" + " try again.".format(package, hash_type, hash_value) + ) else: print("Using cached download") # Calculate the preferred hash type here if necessary. # Needs to be done in this block because this is where we have # access to the source file. - if hash_type != 'sha256': - new_hash_value = hashsum_file(download_path, 'sha256') + if hash_type != "sha256": + new_hash_value = hashsum_file(download_path, "sha256") else: - new_hash_value = '' + new_hash_value = "" print("Unpacking %s..." % package) unpack(join(config.src_cache, filename), tempdir) @@ -1193,16 +1301,24 @@ def get_pkginfo(package, filename, pypiurl, digest, python_version, extra_specs, print("working in %s" % tempdir) src_dir = get_dir(tempdir) # TODO: find args parameters needed by run_setuppy - run_setuppy(src_dir, tempdir, python_version, extra_specs=extra_specs, config=config, - setup_options=setup_options) + run_setuppy( + src_dir, + tempdir, + python_version, + extra_specs=extra_specs, + config=config, + setup_options=setup_options, + ) try: - with open(join(tempdir, 'pkginfo.yaml')) as fn: + with open(join(tempdir, "pkginfo.yaml")) as fn: pkg_info = yaml.safe_load(fn) except OSError: - print("WARNING: the pkginfo.yaml file was absent, falling back to pkginfo.SDist") + print( + "WARNING: the pkginfo.yaml file was absent, falling back to pkginfo.SDist" + ) pkg_info = pkginfo.SDist(download_path).__dict__ if new_hash_value: - pkg_info['new_hash_value'] = ('sha256', new_hash_value) + pkg_info["new_hash_value"] = ("sha256", new_hash_value) finally: rm_rf(tempdir) @@ -1210,54 +1326,79 @@ def get_pkginfo(package, filename, pypiurl, digest, python_version, extra_specs, def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_options): - ''' + """ Patch distutils and then run setup.py in a subprocess. :param src_dir: Directory containing the source code :type src_dir: str :param temp_dir: Temporary directory for doing for storing pkginfo.yaml :type temp_dir: str - ''' + """ # TODO: we could make everyone's lives easier if we include packaging here, because setuptools # needs it in recent versions. At time of writing, it is not a package in defaults, so this # actually breaks conda-build right now. Omit it until packaging is on defaults. # specs = ['python %s*' % python_version, 'pyyaml', 'setuptools', 'six', 'packaging', 'appdirs'] subdir = config.host_subdir - specs = [f'python {python_version}*', - 'pip', 'pyyaml', 'setuptools'] + (['m2-patch', 'm2-gcc-libs'] if config.host_subdir.startswith('win') - else ['patch']) + specs = [f"python {python_version}*", "pip", "pyyaml", "setuptools"] + ( + ["m2-patch", "m2-gcc-libs"] + if config.host_subdir.startswith("win") + else ["patch"] + ) with open(os.path.join(src_dir, "setup.py")) as setup: text = setup.read() - if 'import numpy' in text or 'from numpy' in text: - specs.append('numpy') + if "import numpy" in text or "from numpy" in text: + specs.append("numpy") specs.extend(extra_specs) rm_rf(config.host_prefix) - create_env(config.host_prefix, specs_or_actions=specs, env='host', - subdir=subdir, clear_cache=False, config=config) - stdlib_dir = join(config.host_prefix, - 'Lib' if sys.platform == 'win32' - else 'lib/python%s' % python_version) + create_env( + config.host_prefix, + specs_or_actions=specs, + env="host", + subdir=subdir, + clear_cache=False, + config=config, + ) + stdlib_dir = join( + config.host_prefix, + "Lib" if sys.platform == "win32" else "lib/python%s" % python_version, + ) - patch = join(temp_dir, 'pypi-distutils.patch') - with open(patch, 'wb') as f: - f.write(DISTUTILS_PATCH.format(temp_dir.replace('\\', '\\\\')).encode('utf-8')) + patch = join(temp_dir, "pypi-distutils.patch") + with open(patch, "wb") as f: + f.write(DISTUTILS_PATCH.format(temp_dir.replace("\\", "\\\\")).encode("utf-8")) - if exists(join(stdlib_dir, 'distutils', 'core.py-copy')): - rm_rf(join(stdlib_dir, 'distutils', 'core.py')) - copy2(join(stdlib_dir, 'distutils', 'core.py-copy'), - join(stdlib_dir, 'distutils', 'core.py')) + if exists(join(stdlib_dir, "distutils", "core.py-copy")): + rm_rf(join(stdlib_dir, "distutils", "core.py")) + copy2( + join(stdlib_dir, "distutils", "core.py-copy"), + join(stdlib_dir, "distutils", "core.py"), + ) # Avoid race conditions. Invalidate the cache. - rm_rf(join(stdlib_dir, 'distutils', '__pycache__', - 'core.cpython-%s%s.pyc' % sys.version_info[:2])) - rm_rf(join(stdlib_dir, 'distutils', '__pycache__', - 'core.cpython-%s%s.pyo' % sys.version_info[:2])) + rm_rf( + join( + stdlib_dir, + "distutils", + "__pycache__", + "core.cpython-%s%s.pyc" % sys.version_info[:2], + ) + ) + rm_rf( + join( + stdlib_dir, + "distutils", + "__pycache__", + "core.cpython-%s%s.pyo" % sys.version_info[:2], + ) + ) else: - copy2(join(stdlib_dir, 'distutils', 'core.py'), join(stdlib_dir, - 'distutils', 'core.py-copy')) - apply_patch(join(stdlib_dir, 'distutils'), patch, config=config) + copy2( + join(stdlib_dir, "distutils", "core.py"), + join(stdlib_dir, "distutils", "core.py-copy"), + ) + apply_patch(join(stdlib_dir, "distutils"), patch, config=config) vendored = join(stdlib_dir, "site-packages", "setuptools", "_distutils") if os.path.isdir(vendored): @@ -1265,19 +1406,19 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op # Save PYTHONPATH for later env = os.environ.copy() - if 'PYTHONPATH' in env: - env['PYTHONPATH'] = str(src_dir + ':' + env['PYTHONPATH']) + if "PYTHONPATH" in env: + env["PYTHONPATH"] = str(src_dir + ":" + env["PYTHONPATH"]) else: - env['PYTHONPATH'] = str(src_dir) + env["PYTHONPATH"] = str(src_dir) cwd = getcwd() chdir(src_dir) - cmdargs = [config.host_python, 'setup.py', 'install'] + cmdargs = [config.host_python, "setup.py", "install"] cmdargs.extend(setup_options) try: check_call_env(cmdargs, env=env) except subprocess.CalledProcessError: - print('$PYTHONPATH = %s' % env['PYTHONPATH']) - sys.exit('Error: command failed: %s' % ' '.join(cmdargs)) + print("$PYTHONPATH = %s" % env["PYTHONPATH"]) + sys.exit("Error: command failed: %s" % " ".join(cmdargs)) finally: chdir(cwd) @@ -1285,6 +1426,6 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op def make_entry_tests(entry_list): tests = [] for entry_point in entry_list: - entry = entry_point.partition('=')[0].strip() + entry = entry_point.partition("=")[0].strip() tests.append(entry + " --help") return tests diff --git a/conda_build/skeletons/rpm.py b/conda_build/skeletons/rpm.py index 04f73d7355..409e3aad4b 100644 --- a/conda_build/skeletons/rpm.py +++ b/conda_build/skeletons/rpm.py @@ -1,22 +1,25 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import argparse -from conda_build.source import download_to_cache -from conda_build.license_family import guess_license_family from copy import copy + +from conda_build.license_family import guess_license_family +from conda_build.source import download_to_cache + try: import cPickle as pickle except: import pickle as pickle + import gzip import hashlib -from os import (chmod, makedirs) -from os.path import (basename, dirname, exists, join, splitext) import re +from os import chmod, makedirs +from os.path import basename, dirname, exists, join, splitext from textwrap import wrap from xml.etree import ElementTree as ET -from .cran import yaml_quote_string +from .cran import yaml_quote_string try: from urllib.request import urlopen @@ -25,8 +28,8 @@ # This is used in two places -default_architecture = 'x86_64' -default_distro = 'centos6' +default_architecture = "x86_64" +default_distro = "centos6" RPM_META = """\ package: @@ -79,92 +82,104 @@ """ -CDTs = dict({'centos5': {'dirname': 'centos5', - 'short_name': 'cos5', - 'base_url': 'http://vault.centos.org/5.11/os/{base_architecture}/CentOS/', - 'sbase_url': 'http://vault.centos.org/5.11/os/Source/', - 'repomd_url': 'http://vault.centos.org/5.11/os/{base_architecture}/repodata/repomd.xml', # noqa - 'host_machine': '{architecture}-conda_cos5-linux-gnu', - 'host_subdir': 'linux-{bits}', - 'fname_architecture': '{architecture}', - 'rpm_filename_platform': 'el5.{architecture}', - 'checksummer': hashlib.sha1, - 'checksummer_name': "sha1", - 'macros': {}}, - 'centos6': {'dirname': 'centos6', - 'short_name': 'cos6', - 'base_url': 'http://vault.centos.org/centos/6.10/os/{base_architecture}/CentOS/', # noqa - 'sbase_url': 'http://vault.centos.org/6.10/os/Source/SPackages/', - 'repomd_url': 'http://vault.centos.org/centos/6.10/os/{base_architecture}/repodata/repomd.xml', # noqa - 'host_machine': '{architecture}-conda_cos6-linux-gnu', - 'host_subdir': 'linux-{bits}', - 'fname_architecture': '{architecture}', - 'rpm_filename_platform': 'el6.{architecture}', - 'checksummer': hashlib.sha256, - 'checksummer_name': "sha256", - # Some macros are defined in /etc/rpm/macros.* but I cannot find where - # these ones are defined. Also, rpm --eval "%{gdk_pixbuf_base_version}" - # gives nothing nor does rpm --showrc | grep gdk - 'macros': {'pyver': '2.6.6', - 'gdk_pixbuf_base_version': '2.24.1'}}, - 'centos7': {'dirname': 'centos7', - 'short_name': 'cos7', - 'base_url': 'http://vault.centos.org/altarch/7/os/{base_architecture}/CentOS/', # noqa - 'sbase_url': 'http://vault.centos.org/7.7.1908/os/Source/SPackages/', - 'repomd_url': 'http://vault.centos.org/altarch/7/os/{base_architecture}/repodata/repomd.xml', # noqa - 'host_machine': '{gnu_architecture}-conda_cos7-linux-gnu', - 'host_subdir': 'linux-ppc64le', - 'fname_architecture': '{architecture}', - 'rpm_filename_platform': 'el7.{architecture}', - 'checksummer': hashlib.sha256, - 'checksummer_name': "sha256", - # Some macros are defined in /etc/rpm/macros.* but I cannot find where - # these ones are defined. Also, rpm --eval "%{gdk_pixbuf_base_version}" - # gives nothing nor does rpm --showrc | grep gdk - 'macros': {'pyver': '2.6.6', - 'gdk_pixbuf_base_version': '2.24.1'}}, - 'clefos': {'dirname': 'clefos', - 'short_name': 'cos7', - 'base_url': 'http://download.sinenomine.net/clefos/7/os/{base_architecture}/', # noqa - 'sbase_url': 'http://download.sinenomine.net/clefos/7/source/srpms/', # noqa - 'repomd_url': 'http://download.sinenomine.net/clefos/7/os/repodata/repomd.xml', # noqa - 'host_machine': '{gnu_architecture}-conda-cos7-linux-gnu', - 'host_subdir': 'linux-s390x', - 'fname_architecture': '{architecture}', - 'rpm_filename_platform': 'el7.{architecture}', - 'checksummer': hashlib.sha256, - 'checksummer_name': "sha256", - 'macros': {'pyver': '2.7.5', - 'gdk_pixbuf_base_version': '2.36.2'}}, - 'suse_leap_rpi3': {'dirname': 'suse_leap_rpi3', - 'short_name': 'slrpi3', - # I cannot locate the src.rpms for OpenSUSE leap. The existence - # of this key tells this code to ignore missing src rpms but we - # should *never* release binaries we do not have the sources for. - 'allow_missing_sources': True, - 'repomd_url': 'http://download.opensuse.org/ports/aarch64/distribution/leap/42.3-Current/repo/oss/suse/repodata/repomd.xml', # noqa - 'base_url': 'http://download.opensuse.org/ports/{architecture}/distribution/leap/42.3-Current/repo/oss/suse/{architecture}/', # noqa - 'sbase_url': 'http://download.opensuse.org/ports/{architecture}/source/factory/repo/oss/suse/src/', # noqa - # I even tried an older release but it was just as bad: - # 'repomd_url': 'http://download.opensuse.org/ports/aarch64/distribution/leap/42.2/repo/oss/suse/repodata/repomd.xml', # noqa - # 'base_url': 'http://download.opensuse.org/ports/{architecture}/distribution/leap/42.2/repo/oss/suse/{architecture}/', # noqa - # 'sbase_url': 'http://download.opensuse.org/source/distribution/leap/42.2/repo/oss/suse/src/', # noqa - 'host_machine': 'aarch64-conda_rpi3-linux-gnueabi', - 'host_subdir': 'linux-aarch64', - 'fname_architecture': '{architecture}', - 'rpm_filename_platform': '{architecture}', - 'checksummer': hashlib.sha256, - 'checksummer_name': "sha256", - 'macros': {}}, - 'raspbian_rpi2': {'dirname': 'raspbian_rpi2', - 'cdt_short_name': 'rrpi2', - 'host_machine': 'armv7a-conda_rpi2-linux-gnueabi', - 'host_subdir': 'armv7a-32', - 'fname_architecture': '{architecture}', - 'checksummer': hashlib.sha256, - 'checksummer_name': "sha256", - 'macros': {}}, - }) +CDTs = dict( + { + "centos5": { + "dirname": "centos5", + "short_name": "cos5", + "base_url": "http://vault.centos.org/5.11/os/{base_architecture}/CentOS/", + "sbase_url": "http://vault.centos.org/5.11/os/Source/", + "repomd_url": "http://vault.centos.org/5.11/os/{base_architecture}/repodata/repomd.xml", # noqa + "host_machine": "{architecture}-conda_cos5-linux-gnu", + "host_subdir": "linux-{bits}", + "fname_architecture": "{architecture}", + "rpm_filename_platform": "el5.{architecture}", + "checksummer": hashlib.sha1, + "checksummer_name": "sha1", + "macros": {}, + }, + "centos6": { + "dirname": "centos6", + "short_name": "cos6", + "base_url": "http://vault.centos.org/centos/6.10/os/{base_architecture}/CentOS/", # noqa + "sbase_url": "http://vault.centos.org/6.10/os/Source/SPackages/", + "repomd_url": "http://vault.centos.org/centos/6.10/os/{base_architecture}/repodata/repomd.xml", # noqa + "host_machine": "{architecture}-conda_cos6-linux-gnu", + "host_subdir": "linux-{bits}", + "fname_architecture": "{architecture}", + "rpm_filename_platform": "el6.{architecture}", + "checksummer": hashlib.sha256, + "checksummer_name": "sha256", + # Some macros are defined in /etc/rpm/macros.* but I cannot find where + # these ones are defined. Also, rpm --eval "%{gdk_pixbuf_base_version}" + # gives nothing nor does rpm --showrc | grep gdk + "macros": {"pyver": "2.6.6", "gdk_pixbuf_base_version": "2.24.1"}, + }, + "centos7": { + "dirname": "centos7", + "short_name": "cos7", + "base_url": "http://vault.centos.org/altarch/7/os/{base_architecture}/CentOS/", # noqa + "sbase_url": "http://vault.centos.org/7.7.1908/os/Source/SPackages/", + "repomd_url": "http://vault.centos.org/altarch/7/os/{base_architecture}/repodata/repomd.xml", # noqa + "host_machine": "{gnu_architecture}-conda_cos7-linux-gnu", + "host_subdir": "linux-ppc64le", + "fname_architecture": "{architecture}", + "rpm_filename_platform": "el7.{architecture}", + "checksummer": hashlib.sha256, + "checksummer_name": "sha256", + # Some macros are defined in /etc/rpm/macros.* but I cannot find where + # these ones are defined. Also, rpm --eval "%{gdk_pixbuf_base_version}" + # gives nothing nor does rpm --showrc | grep gdk + "macros": {"pyver": "2.6.6", "gdk_pixbuf_base_version": "2.24.1"}, + }, + "clefos": { + "dirname": "clefos", + "short_name": "cos7", + "base_url": "http://download.sinenomine.net/clefos/7/os/{base_architecture}/", # noqa + "sbase_url": "http://download.sinenomine.net/clefos/7/source/srpms/", # noqa + "repomd_url": "http://download.sinenomine.net/clefos/7/os/repodata/repomd.xml", # noqa + "host_machine": "{gnu_architecture}-conda-cos7-linux-gnu", + "host_subdir": "linux-s390x", + "fname_architecture": "{architecture}", + "rpm_filename_platform": "el7.{architecture}", + "checksummer": hashlib.sha256, + "checksummer_name": "sha256", + "macros": {"pyver": "2.7.5", "gdk_pixbuf_base_version": "2.36.2"}, + }, + "suse_leap_rpi3": { + "dirname": "suse_leap_rpi3", + "short_name": "slrpi3", + # I cannot locate the src.rpms for OpenSUSE leap. The existence + # of this key tells this code to ignore missing src rpms but we + # should *never* release binaries we do not have the sources for. + "allow_missing_sources": True, + "repomd_url": "http://download.opensuse.org/ports/aarch64/distribution/leap/42.3-Current/repo/oss/suse/repodata/repomd.xml", # noqa + "base_url": "http://download.opensuse.org/ports/{architecture}/distribution/leap/42.3-Current/repo/oss/suse/{architecture}/", # noqa + "sbase_url": "http://download.opensuse.org/ports/{architecture}/source/factory/repo/oss/suse/src/", # noqa + # I even tried an older release but it was just as bad: + # 'repomd_url': 'http://download.opensuse.org/ports/aarch64/distribution/leap/42.2/repo/oss/suse/repodata/repomd.xml', # noqa + # 'base_url': 'http://download.opensuse.org/ports/{architecture}/distribution/leap/42.2/repo/oss/suse/{architecture}/', # noqa + # 'sbase_url': 'http://download.opensuse.org/source/distribution/leap/42.2/repo/oss/suse/src/', # noqa + "host_machine": "aarch64-conda_rpi3-linux-gnueabi", + "host_subdir": "linux-aarch64", + "fname_architecture": "{architecture}", + "rpm_filename_platform": "{architecture}", + "checksummer": hashlib.sha256, + "checksummer_name": "sha256", + "macros": {}, + }, + "raspbian_rpi2": { + "dirname": "raspbian_rpi2", + "cdt_short_name": "rrpi2", + "host_machine": "armv7a-conda_rpi2-linux-gnueabi", + "host_subdir": "armv7a-32", + "fname_architecture": "{architecture}", + "checksummer": hashlib.sha256, + "checksummer_name": "sha256", + "macros": {}, + }, + } +) def package_exists(package_name): @@ -175,29 +190,29 @@ def package_exists(package_name): def cache_file(src_cache, url, fn=None, checksummer=hashlib.sha256): if fn: - source = dict({'url': url, 'fn': fn}) + source = dict({"url": url, "fn": fn}) else: - source = dict({'url': url}) - cached_path, _ = download_to_cache(src_cache, '', source) + source = dict({"url": url}) + cached_path, _ = download_to_cache(src_cache, "", source) csum = checksummer() - csum.update(open(cached_path, 'rb').read()) + csum.update(open(cached_path, "rb").read()) csumstr = csum.hexdigest() return cached_path, csumstr def rpm_filename_split(rpmfilename): base, _ = splitext(rpmfilename) - release_platform = base.split('-')[-1] - parts = release_platform.split('.') + release_platform = base.split("-")[-1] + parts = release_platform.split(".") if len(parts) == 2: release, platform = parts[0], parts[1] elif len(parts) > 2: - release, platform = '.'.join(parts[0:len(parts) - 1]), '.'.join(parts[-1:]) + release, platform = ".".join(parts[0 : len(parts) - 1]), ".".join(parts[-1:]) else: print(f"ERROR: Cannot figure out the release and platform for {base}") - name_version = base.split('-')[0:-1] + name_version = base.split("-")[0:-1] version = name_version[-1] - rpm_name = '-'.join(name_version[0:len(name_version) - 1]) + rpm_name = "-".join(name_version[0 : len(name_version) - 1]) return rpm_name, version, release, platform @@ -208,7 +223,7 @@ def rpm_split_url_and_cache(rpm_url, src_cache): def rpm_filename_generate(rpm_name, version, release, platform): - return f'{rpm_name}-{version}-{release}.{platform}.rpm' + return f"{rpm_name}-{version}-{release}.{platform}.rpm" def rpm_url_generate(url_dirname, rpm_name, version, release, platform, src_cache): @@ -217,14 +232,14 @@ def rpm_url_generate(url_dirname, rpm_name, version, release, platform, src_cach """ result = rpm_filename_generate(rpm_name, version, release, platform) url = join(url_dirname, result) - path, _ = download_to_cache(src_cache, '', dict({'url': url})) + path, _ = download_to_cache(src_cache, "", dict({"url": url})) assert path, f"Failed to cache generated RPM url {result}" return url def find_repo_entry_and_arch(repo_primary, architectures, depend): - dep_name = depend['name'] - found_package_name = '' + dep_name = depend["name"] + found_package_name = "" try: # Try direct lookup first. found_package = repo_primary[dep_name] @@ -234,16 +249,18 @@ def find_repo_entry_and_arch(repo_primary, architectures, depend): for name, package in repo_primary.items(): for arch in architectures: if arch in package: - if 'provides' in package[arch]: - for provide in package[arch]['provides']: - if provide['name'] == dep_name: + if "provides" in package[arch]: + for provide in package[arch]["provides"]: + if provide["name"] == dep_name: print(f"Found it in {name}") found_package = package found_package_name = name break - if found_package_name == '': - print(f"WARNING: Did not find package called (or another one providing) {dep_name}") # noqa + if found_package_name == "": + print( + f"WARNING: Did not find package called (or another one providing) {dep_name}" + ) # noqa return None, None, None chosen_arch = None @@ -257,12 +274,15 @@ def find_repo_entry_and_arch(repo_primary, architectures, depend): return entry, found_package_name, chosen_arch -str_flags_to_conda_version_spec = dict({'LT': '<', - 'LE': '<=', - 'EQ': '==', - 'GE': '>=', - 'GT': '>', - }) +str_flags_to_conda_version_spec = dict( + { + "LT": "<", + "LE": "<=", + "EQ": "==", + "GE": ">=", + "GT": ">", + } +) def dictify(r, root=True): @@ -279,70 +299,78 @@ def dictify(r, root=True): def dictify_pickled(xml_file, src_cache, dict_massager=None, cdt=None): - pickled = xml_file + '.p' + pickled = xml_file + ".p" if exists(pickled): - return pickle.load(open(pickled, 'rb')) - with open(xml_file, encoding='utf-8') as xf: + return pickle.load(open(pickled, "rb")) + with open(xml_file, encoding="utf-8") as xf: xmlstring = xf.read() # Remove the global namespace. - xmlstring = re.sub(r'\sxmlns="[^"]+"', r'', xmlstring, count=1) + xmlstring = re.sub(r'\sxmlns="[^"]+"', r"", xmlstring, count=1) # Replace sub-namespaces with their names. xmlstring = re.sub(r'\sxmlns:([a-zA-Z]*)="[^"]+"', r' xmlns:\1="\1"', xmlstring) - root = ET.fromstring(xmlstring.encode('utf-8')) + root = ET.fromstring(xmlstring.encode("utf-8")) result = dictify(root) if dict_massager: result = dict_massager(result, src_cache, cdt) - pickle.dump(result, open(pickled, 'wb')) + pickle.dump(result, open(pickled, "wb")) return result def get_repo_dict(repomd_url, data_type, dict_massager, cdt, src_cache): xmlstring = urlopen(repomd_url).read() # Remove the default namespace definition (xmlns="http://some/namespace") - xmlstring = re.sub(br'\sxmlns="[^"]+"', b'', xmlstring, count=1) + xmlstring = re.sub(rb'\sxmlns="[^"]+"', b"", xmlstring, count=1) repomd = ET.fromstring(xmlstring) for child in repomd.findall(f"*[@type='{data_type}']"): open_csum = child.findall("open-checksum")[0].text xml_file = join(src_cache, open_csum) try: - xml_file, xml_csum = cache_file(src_cache, xml_file, None, cdt['checksummer']) + xml_file, xml_csum = cache_file( + src_cache, xml_file, None, cdt["checksummer"] + ) except: csum = child.findall("checksum")[0].text - location = child.findall("location")[0].attrib['href'] - xmlgz_file = dirname(dirname(repomd_url)) + '/' + location - cached_path, cached_csum = cache_file(src_cache, xmlgz_file, - None, cdt['checksummer']) - assert csum == cached_csum, "Checksum for {} does not match value in {}".format( - xmlgz_file, repomd_url) - with gzip.open(cached_path, 'rb') as gz: + location = child.findall("location")[0].attrib["href"] + xmlgz_file = dirname(dirname(repomd_url)) + "/" + location + cached_path, cached_csum = cache_file( + src_cache, xmlgz_file, None, cdt["checksummer"] + ) + assert ( + csum == cached_csum + ), "Checksum for {} does not match value in {}".format( + xmlgz_file, repomd_url + ) + with gzip.open(cached_path, "rb") as gz: xml_content = gz.read() - xml_csum = cdt['checksummer']() + xml_csum = cdt["checksummer"]() xml_csum.update(xml_content) xml_csum = xml_csum.hexdigest() if xml_csum == open_csum: - with open(xml_file, 'wb') as xml: + with open(xml_file, "wb") as xml: xml.write(xml_content) else: - print(f"ERROR: Checksum of uncompressed file {xmlgz_file} does not match") # noqa + print( + f"ERROR: Checksum of uncompressed file {xmlgz_file} does not match" + ) # noqa return dictify_pickled(xml_file, src_cache, dict_massager, cdt) return dict({}) def massage_primary_requires(requires, cdt): for require in requires: - require['name'] = require['name'] - if 'flags' in require: - require['flags'] = str_flags_to_conda_version_spec[require['flags']] + require["name"] = require["name"] + if "flags" in require: + require["flags"] = str_flags_to_conda_version_spec[require["flags"]] else: - require['flags'] = None - if 'ver' in require: - if '%' in require['ver']: - require['ver'] = require['ver'].replace('%', '') - if not require['ver'].startswith('{'): - require['ver'] = '{' + require['ver'] - if not require['ver'].endswith('}'): - require['ver'] = require['ver'] + '}' - require['ver'] = require['ver'].format(**cdt['macros']) + require["flags"] = None + if "ver" in require: + if "%" in require["ver"]: + require["ver"] = require["ver"].replace("%", "") + if not require["ver"].startswith("{"): + require["ver"] = "{" + require["ver"] + if not require["ver"].endswith("}"): + require["ver"] = require["ver"] + "}" + require["ver"] = require["ver"].format(**cdt["macros"]) return requires @@ -359,51 +387,55 @@ def massage_primary(repo_primary, src_cache, cdt): """ new_dict = dict({}) - for package in repo_primary['metadata']['package']: - name = package['name'][0]['_text'] - arch = package['arch'][0]['_text'] - if arch == 'src': + for package in repo_primary["metadata"]["package"]: + name = package["name"][0]["_text"] + arch = package["arch"][0]["_text"] + if arch == "src": continue - checksum = package['checksum'][0]['_text'] - source = package['format'][0]['{rpm}sourcerpm'][0]['_text'] + checksum = package["checksum"][0]["_text"] + source = package["format"][0]["{rpm}sourcerpm"][0]["_text"] # If you need to check if the sources exist (perhaps you've got the source URL wrong # or the distro has forgotten to copy them?): # import requests # sbase_url = cdt['sbase_url'] # surl = sbase_url + source # print("{} {}".format(requests.head(surl).status_code, surl)) - location = package['location'][0]['href'] - version = package['version'][0] - summary = package['summary'][0]['_text'] + location = package["location"][0]["href"] + version = package["version"][0] + summary = package["summary"][0]["_text"] try: - description = package['description'][0]['_text'] + description = package["description"][0]["_text"] except: description = "NA" - if '_text' in package['url'][0]: - url = package['url'][0]['_text'] + if "_text" in package["url"][0]: + url = package["url"][0]["_text"] else: - url = '' - license = package['format'][0]['{rpm}license'][0]['_text'] + url = "" + license = package["format"][0]["{rpm}license"][0]["_text"] try: - provides = package['format'][0]['{rpm}provides'][0]['{rpm}entry'] + provides = package["format"][0]["{rpm}provides"][0]["{rpm}entry"] provides = massage_primary_requires(provides, cdt) except: provides = [] try: - requires = package['format'][0]['{rpm}requires'][0]['{rpm}entry'] + requires = package["format"][0]["{rpm}requires"][0]["{rpm}entry"] requires = massage_primary_requires(requires, cdt) except: requires = [] - new_package = dict({'checksum': checksum, - 'location': location, - 'home': url, - 'source': source, - 'version': version, - 'summary': yaml_quote_string(summary), - 'description': description, - 'license': license, - 'provides': provides, - 'requires': requires}) + new_package = dict( + { + "checksum": checksum, + "location": location, + "home": url, + "source": source, + "version": version, + "summary": yaml_quote_string(summary), + "description": description, + "license": license, + "provides": provides, + "requires": requires, + } + ) if name in new_dict: if arch in new_dict[name]: print(f"WARNING: Duplicate packages exist for {name} for arch {arch}") @@ -414,41 +446,47 @@ def massage_primary(repo_primary, src_cache, cdt): def valid_depends(depends): - name = depends['name'] - str_flags = depends['flags'] - if (not name.startswith('rpmlib(') and not - name.startswith('config(') and not - name.startswith('pkgconfig(') and not - name.startswith('/') and - name != 'rtld(GNU_HASH)' and - '.so' not in name and - '(' not in name and - str_flags): + name = depends["name"] + str_flags = depends["flags"] + if ( + not name.startswith("rpmlib(") + and not name.startswith("config(") + and not name.startswith("pkgconfig(") + and not name.startswith("/") + and name != "rtld(GNU_HASH)" + and ".so" not in name + and "(" not in name + and str_flags + ): return True return False def remap_license(rpm_license): - mapping = {'lgplv2+': 'LGPL (>= 2)', - 'gplv2+': 'GPL (>= 2)', - 'public domain (uncopyrighted)': 'Public-Domain', - 'public domain': 'Public-Domain', - 'mit/x11': 'MIT', - 'the open group license': 'The Open Group License'} + mapping = { + "lgplv2+": "LGPL (>= 2)", + "gplv2+": "GPL (>= 2)", + "public domain (uncopyrighted)": "Public-Domain", + "public domain": "Public-Domain", + "mit/x11": "MIT", + "the open group license": "The Open Group License", + } l_rpm_license = rpm_license.lower() if l_rpm_license in mapping: - license, family = mapping[l_rpm_license], guess_license_family(mapping[l_rpm_license]) + license, family = mapping[l_rpm_license], guess_license_family( + mapping[l_rpm_license] + ) else: license, family = rpm_license, guess_license_family(rpm_license) # Yuck: - if family == 'APACHE': - family = 'Apache' - elif family == 'PUBLIC-DOMAIN': - family = 'Public-Domain' - elif family == 'PROPRIETARY': - family = 'Proprietary' - elif family == 'OTHER': - family = 'Other' + if family == "APACHE": + family = "Apache" + elif family == "PUBLIC-DOMAIN": + family = "Public-Domain" + elif family == "PROPRIETARY": + family = "Proprietary" + elif family == "OTHER": + family = "Other" return license, family @@ -459,19 +497,28 @@ def tidy_text(text, wrap_at=0): return stripped -def write_conda_recipes(recursive, repo_primary, package, architectures, - cdt, output_dir, override_arch, src_cache): - entry, entry_name, arch = find_repo_entry_and_arch(repo_primary, architectures, - dict({'name': package})) +def write_conda_recipes( + recursive, + repo_primary, + package, + architectures, + cdt, + output_dir, + override_arch, + src_cache, +): + entry, entry_name, arch = find_repo_entry_and_arch( + repo_primary, architectures, dict({"name": package}) + ) if not entry: return if override_arch: arch = architectures[0] else: - arch = cdt['fname_architecture'] + arch = cdt["fname_architecture"] package = entry_name - rpm_url = dirname(dirname(cdt['base_url'])) + '/' + entry['location'] - srpm_url = cdt['sbase_url'] + entry['source'] + rpm_url = dirname(dirname(cdt["base_url"])) + "/" + entry["location"] + srpm_url = cdt["sbase_url"] + entry["source"] _, _, _, _, _, sha256str = rpm_split_url_and_cache(rpm_url, src_cache) try: # We ignore the hash of source RPMs since they @@ -479,103 +526,123 @@ def write_conda_recipes(recursive, repo_primary, package, architectures, _, _, _, _, _, _ = rpm_split_url_and_cache(srpm_url, src_cache) except: # Just pretend the binaries are sources. - if 'allow_missing_sources' in cdt: + if "allow_missing_sources" in cdt: srpm_url = rpm_url else: raise - depends = [required for required in entry['requires'] if valid_depends(required)] + depends = [required for required in entry["requires"] if valid_depends(required)] - if package in cdt['dependency_add']: - for missing_dep in cdt['dependency_add'][package]: - e_missing, e_name_missing, _ = find_repo_entry_and_arch(repo_primary, architectures, - dict({'name': missing_dep})) + if package in cdt["dependency_add"]: + for missing_dep in cdt["dependency_add"][package]: + e_missing, e_name_missing, _ = find_repo_entry_and_arch( + repo_primary, architectures, dict({"name": missing_dep}) + ) if e_missing: - for provides in e_missing['provides']: - if provides['name'] == e_name_missing: + for provides in e_missing["provides"]: + if provides["name"] == e_name_missing: copy_provides = copy(provides) if "rel" in copy_provides: del copy_provides["rel"] depends.append(copy_provides) else: - print('WARNING: Additional dependency of {}, {} not found'.format(package, - missing_dep)) + print( + "WARNING: Additional dependency of {}, {} not found".format( + package, missing_dep + ) + ) for depend in depends: - dep_entry, dep_name, dep_arch = find_repo_entry_and_arch(repo_primary, - architectures, - depend) + dep_entry, dep_name, dep_arch = find_repo_entry_and_arch( + repo_primary, architectures, depend + ) if override_arch: dep_arch = architectures[0] - depend['arch'] = dep_arch + depend["arch"] = dep_arch # Because something else may provide a substitute for the wanted package # we need to also overwrite the versions with those of the provider, e.g. # libjpeg 6b is provided by libjpeg-turbo 1.2.1 - if depend['name'] != dep_name and 'version' in dep_entry: - if 'ver' in dep_entry['version']: - depend['ver'] = dep_entry['version']['ver'] - if 'epoch' in dep_entry['version']: - depend['epoch'] = dep_entry['version']['epoch'] + if depend["name"] != dep_name and "version" in dep_entry: + if "ver" in dep_entry["version"]: + depend["ver"] = dep_entry["version"]["ver"] + if "epoch" in dep_entry["version"]: + depend["epoch"] = dep_entry["version"]["epoch"] if recursive: - depend['name'] = write_conda_recipes(recursive, - repo_primary, - depend['name'], - architectures, - cdt, - output_dir, - override_arch, - src_cache) - - sn = cdt['short_name'] + '-' + arch + depend["name"] = write_conda_recipes( + recursive, + repo_primary, + depend["name"], + architectures, + cdt, + output_dir, + override_arch, + src_cache, + ) + + sn = cdt["short_name"] + "-" + arch dependsstr = "" if len(depends): - depends_specs = ["{}-{}-{} {}{}".format(depend['name'].lower().replace('+', 'x'), - cdt['short_name'], depend['arch'], - depend['flags'], depend['ver']) - for depend in depends] - dependsstr_part = '\n'.join([f' - {depends_spec}' - for depends_spec in depends_specs]) - dependsstr_build = ' build:\n' + dependsstr_part + '\n' - dependsstr_host = ' host:\n' + dependsstr_part + '\n' - dependsstr_run = ' run:\n' + dependsstr_part - dependsstr = 'requirements:\n' + dependsstr_build + dependsstr_host + dependsstr_run - - package_l = package.lower().replace('+', 'x') - package_cdt_name = package_l + '-' + sn - license, license_family = remap_license(entry['license']) - d = dict({'version': entry['version']['ver'], - 'packagename': package_cdt_name, - 'hostmachine': cdt['host_machine'], - 'hostsubdir': cdt['host_subdir'], - 'depends': dependsstr, - 'rpmurl': rpm_url, - 'srcrpmurl': srpm_url, - 'home': entry['home'], - 'license': license, - 'license_family': license_family, - 'checksum_name': cdt['checksummer_name'], - 'checksum': entry['checksum'], - 'summary': '"(CDT) ' + tidy_text(entry['summary']) + '"', - 'description': '|\n ' + '\n '.join(tidy_text(entry['description'], 78)), # noqa - # Cheeky workaround. I use ${PREFIX}, - # ${PWD}, ${RPM} and ${RECIPE_DIR} in - # BUILDSH and they get interpreted as - # format string tokens so bounce them - # back. - 'PREFIX': '{PREFIX}', - 'RPM': '{RPM}', - 'PWD': '{PWD}', - 'RECIPE_DIR': '{RECIPE_DIR}', - 'SRC_DIR': '{SRC_DIR}'}) + depends_specs = [ + "{}-{}-{} {}{}".format( + depend["name"].lower().replace("+", "x"), + cdt["short_name"], + depend["arch"], + depend["flags"], + depend["ver"], + ) + for depend in depends + ] + dependsstr_part = "\n".join( + [f" - {depends_spec}" for depends_spec in depends_specs] + ) + dependsstr_build = " build:\n" + dependsstr_part + "\n" + dependsstr_host = " host:\n" + dependsstr_part + "\n" + dependsstr_run = " run:\n" + dependsstr_part + dependsstr = ( + "requirements:\n" + dependsstr_build + dependsstr_host + dependsstr_run + ) + + package_l = package.lower().replace("+", "x") + package_cdt_name = package_l + "-" + sn + license, license_family = remap_license(entry["license"]) + d = dict( + { + "version": entry["version"]["ver"], + "packagename": package_cdt_name, + "hostmachine": cdt["host_machine"], + "hostsubdir": cdt["host_subdir"], + "depends": dependsstr, + "rpmurl": rpm_url, + "srcrpmurl": srpm_url, + "home": entry["home"], + "license": license, + "license_family": license_family, + "checksum_name": cdt["checksummer_name"], + "checksum": entry["checksum"], + "summary": '"(CDT) ' + tidy_text(entry["summary"]) + '"', + "description": "|\n " + + "\n ".join(tidy_text(entry["description"], 78)), # noqa + # Cheeky workaround. I use ${PREFIX}, + # ${PWD}, ${RPM} and ${RECIPE_DIR} in + # BUILDSH and they get interpreted as + # format string tokens so bounce them + # back. + "PREFIX": "{PREFIX}", + "RPM": "{RPM}", + "PWD": "{PWD}", + "RECIPE_DIR": "{RECIPE_DIR}", + "SRC_DIR": "{SRC_DIR}", + } + ) odir = join(output_dir, package_cdt_name) try: makedirs(odir) except: pass - with open(join(odir, 'meta.yaml'), 'wb') as f: - f.write(RPM_META.format(**d).encode('utf-8')) - buildsh = join(odir, 'build.sh') - with open(buildsh, 'wb') as f: + with open(join(odir, "meta.yaml"), "wb") as f: + f.write(RPM_META.format(**d).encode("utf-8")) + buildsh = join(odir, "build.sh") + with open(buildsh, "wb") as f: chmod(buildsh, 0o755) - f.write(BUILDSH.format(**d).encode('utf-8')) + f.write(BUILDSH.format(**d).encode("utf-8")) return package @@ -583,15 +650,23 @@ def write_conda_recipes(recursive, repo_primary, package, architectures, # name their RPMs differently we probably want to hide that away from users # Do I want to pass just the package name, the CDT and the arch and rely on # expansion to form the URL? I have been going backwards and forwards here. -def write_conda_recipe(packages, distro, output_dir, architecture, recursive, override_arch, - dependency_add, config): +def write_conda_recipe( + packages, + distro, + output_dir, + architecture, + recursive, + override_arch, + dependency_add, + config, +): cdt_name = distro - bits = '32' if architecture in ('armv6', 'armv7a', 'i686', 'i386') else '64' - base_architectures = dict({'i686': 'i386'}) + bits = "32" if architecture in ("armv6", "armv7a", "i686", "i386") else "64" + base_architectures = dict({"i686": "i386"}) # gnu_architectures are those recognized by the canonical config.sub / config.guess # and crosstool-ng. They are returned from ${CC} -dumpmachine and are a part of the # sysroot. - gnu_architectures = dict({'ppc64le': 'powerpc64le'}) + gnu_architectures = dict({"ppc64le": "powerpc64le"}) try: base_architecture = base_architectures[architecture] except: @@ -600,10 +675,14 @@ def write_conda_recipe(packages, distro, output_dir, architecture, recursive, ov gnu_architecture = gnu_architectures[architecture] except: gnu_architecture = architecture - architecture_bits = dict({'architecture': architecture, - 'base_architecture': base_architecture, - 'gnu_architecture': gnu_architecture, - 'bits': bits}) + architecture_bits = dict( + { + "architecture": architecture, + "base_architecture": base_architecture, + "gnu_architecture": gnu_architecture, + "bits": bits, + } + ) cdt = dict() for k, v in CDTs[cdt_name].items(): if isinstance(v, str): @@ -613,53 +692,66 @@ def write_conda_recipe(packages, distro, output_dir, architecture, recursive, ov # Add undeclared dependencies. These can be baked into the global # CDTs dict, passed in on the commandline or a mixture of both. - if 'dependency_add' not in cdt: - cdt['dependency_add'] = dict() + if "dependency_add" not in cdt: + cdt["dependency_add"] = dict() if dependency_add: for package_and_missed_deps in dependency_add: - as_list = package_and_missed_deps[0].split(',') - if as_list[0] in cdt['dependency_add']: - cdt['dependency_add'][as_list[0]].extend(as_list[1:]) + as_list = package_and_missed_deps[0].split(",") + if as_list[0] in cdt["dependency_add"]: + cdt["dependency_add"][as_list[0]].extend(as_list[1:]) else: - cdt['dependency_add'][as_list[0]] = as_list[1:] + cdt["dependency_add"][as_list[0]] = as_list[1:] - repomd_url = cdt['repomd_url'] - repo_primary = get_repo_dict(repomd_url, - "primary", massage_primary, - cdt, - config.src_cache) + repomd_url = cdt["repomd_url"] + repo_primary = get_repo_dict( + repomd_url, "primary", massage_primary, cdt, config.src_cache + ) for package in packages: - write_conda_recipes(recursive, - repo_primary, - package, - [architecture, "noarch"], - cdt, - output_dir, - override_arch, - config.src_cache) - - -def skeletonize(packages, output_dir=".", version=None, recursive=False, - architecture=default_architecture, override_arch=True, - dependency_add=[], config=None, distro=default_distro): - write_conda_recipe(packages, distro, output_dir, architecture, recursive, - override_arch, dependency_add, config) + write_conda_recipes( + recursive, + repo_primary, + package, + [architecture, "noarch"], + cdt, + output_dir, + override_arch, + config.src_cache, + ) + + +def skeletonize( + packages, + output_dir=".", + version=None, + recursive=False, + architecture=default_architecture, + override_arch=True, + dependency_add=[], + config=None, + distro=default_distro, +): + write_conda_recipe( + packages, + distro, + output_dir, + architecture, + recursive, + override_arch, + dependency_add, + config, + ) def add_parser(repos): - rpm = repos.add_parser( "rpm", help=""" Create recipe skeleton for RPM files - """,) - - rpm.add_argument( - "packages", - nargs='+', - help="RPM package name(s)" + """, ) + rpm.add_argument("packages", nargs="+", help="RPM package name(s)") + rpm.add_argument( "--output-dir", help="Directory to write recipes to (default: %(default)s).", @@ -668,16 +760,16 @@ def add_parser(repos): rpm.add_argument( "--recursive", - action='store_true', - dest='recursive', - help='Create recipes for dependencies if they do not already exist', + action="store_true", + dest="recursive", + help="Create recipes for dependencies if they do not already exist", ) rpm.add_argument( "--dependency-add", - nargs='+', - action='append', - help='Add undeclared dependencies (format: package,missing_dep1,missing_dep2)', + nargs="+", + action="append", + help="Add undeclared dependencies (format: package,missing_dep1,missing_dep2)", ) rpm.add_argument( @@ -696,17 +788,24 @@ def valid_distros(): def distro(distro_name): if distro_name not in CDTs: - raise argparse.ArgumentTypeError(f"valid --distro values are {valid_distros()}") + raise argparse.ArgumentTypeError( + f"valid --distro values are {valid_distros()}" + ) return distro_name - rpm.add_argument("--distro", - type=distro, - default=default_distro, - help="Distro to use. Applies to all packages, valid values are: {}".format( - valid_distros())) - - rpm.add_argument("--no-override-arch", - help="Do not override noarch in package names", - dest="override_arch", - default=True, - action="store_false") + rpm.add_argument( + "--distro", + type=distro, + default=default_distro, + help="Distro to use. Applies to all packages, valid values are: {}".format( + valid_distros() + ), + ) + + rpm.add_argument( + "--no-override-arch", + help="Do not override noarch in package names", + dest="override_arch", + default=True, + action="store_false", + ) diff --git a/conda_build/source.py b/conda_build/source.py index 572666ccdb..25cee9ce85 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -4,26 +4,35 @@ import locale import os -from os.path import join, isdir, isfile, abspath, basename, exists, normpath, expanduser import re import shutil -from subprocess import CalledProcessError import sys import tempfile import time +from os.path import abspath, basename, exists, expanduser, isdir, isfile, join, normpath from pathlib import Path +from subprocess import CalledProcessError from typing import Iterable -from .conda_interface import download, TemporaryDirectory -from .conda_interface import hashsum_file - -from .exceptions import MissingDependency +from conda_build.conda_interface import CondaHTTPError, url_path from conda_build.os_utils import external -from conda_build.conda_interface import url_path, CondaHTTPError -from conda_build.utils import (decompressible_exts, tar_xf, safe_print_unicode, copy_into, on_win, ensure_list, - check_output_env, check_call_env, convert_path_for_cygwin_or_msys2, - get_logger, rm_rf, LoggingContext) - +from conda_build.utils import ( + LoggingContext, + check_call_env, + check_output_env, + convert_path_for_cygwin_or_msys2, + copy_into, + decompressible_exts, + ensure_list, + get_logger, + on_win, + rm_rf, + safe_print_unicode, + tar_xf, +) + +from .conda_interface import TemporaryDirectory, download, hashsum_file +from .exceptions import MissingDependency log = get_logger(__name__) if on_win: @@ -34,54 +43,58 @@ else: from urlparse import urljoin -git_submod_re = re.compile(r'(?:.+)\.(.+)\.(?:.+)\s(.+)') +git_submod_re = re.compile(r"(?:.+)\.(.+)\.(?:.+)\s(.+)") ext_re = re.compile(r"(.*?)(\.(?:tar\.)?[^.]+)$") def append_hash_to_fn(fn, hash_value): - return ext_re.sub(fr"\1_{hash_value[:10]}\2", fn) + return ext_re.sub(rf"\1_{hash_value[:10]}\2", fn) def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): - ''' Download a source to the local cache. ''' + """Download a source to the local cache.""" if verbose: - log.info('Source cache directory is: %s' % cache_folder) + log.info("Source cache directory is: %s" % cache_folder) if not isdir(cache_folder) and not os.path.islink(cache_folder): os.makedirs(cache_folder) - source_urls = source_dict['url'] + source_urls = source_dict["url"] if not isinstance(source_urls, list): source_urls = [source_urls] - unhashed_fn = fn = source_dict['fn'] if 'fn' in source_dict else basename(source_urls[0]) + unhashed_fn = fn = ( + source_dict["fn"] if "fn" in source_dict else basename(source_urls[0]) + ) hash_added = False - for hash_type in ('md5', 'sha1', 'sha256'): + for hash_type in ("md5", "sha1", "sha256"): if hash_type in source_dict: if source_dict[hash_type] in (None, ""): - raise ValueError(f'Empty {hash_type} hash provided for {fn}') + raise ValueError(f"Empty {hash_type} hash provided for {fn}") fn = append_hash_to_fn(fn, source_dict[hash_type]) hash_added = True break else: - log.warn("No hash (md5, sha1, sha256) provided for {}. Source download forced. " - "Add hash to recipe to use source cache.".format(unhashed_fn)) + log.warn( + "No hash (md5, sha1, sha256) provided for {}. Source download forced. " + "Add hash to recipe to use source cache.".format(unhashed_fn) + ) path = join(cache_folder, fn) if isfile(path): if verbose: - log.info('Found source in cache: %s' % fn) + log.info("Found source in cache: %s" % fn) else: if verbose: - log.info('Downloading source to cache: %s' % fn) + log.info("Downloading source to cache: %s" % fn) for url in source_urls: if "://" not in url: - if url.startswith('~'): + if url.startswith("~"): url = expanduser(url) if not os.path.isabs(url): url = os.path.normpath(os.path.join(recipe_path, url)) url = url_path(url) else: - if url.startswith('file:///~'): - url = 'file:///' + expanduser(url[8:]).replace('\\', '/') + if url.startswith("file:///~"): + url = "file:///" + expanduser(url[8:]).replace("\\", "/") try: if verbose: log.info("Downloading %s" % url) @@ -102,21 +115,24 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): raise RuntimeError("Could not download %s" % url) hashed = None - for tp in ('md5', 'sha1', 'sha256'): + for tp in ("md5", "sha1", "sha256"): if tp in source_dict: expected_hash = source_dict[tp] hashed = hashsum_file(path, tp) if expected_hash != hashed: rm_rf(path) - raise RuntimeError("%s mismatch: '%s' != '%s'" % - (tp.upper(), hashed, expected_hash)) + raise RuntimeError( + "{} mismatch: '{}' != '{}'".format( + tp.upper(), hashed, expected_hash + ) + ) break # this is really a fallback. If people don't provide the hash, we still need to prevent # collisions in our source cache, but the end user will get no benefit from the cache. if not hash_added: if not hashed: - hashed = hashsum_file(path, 'sha256') + hashed = hashsum_file(path, "sha256") dest_path = append_hash_to_fn(path, hashed) if not os.path.isfile(dest_path): shutil.move(path, dest_path) @@ -140,10 +156,20 @@ def hoist_single_extracted_folder(nested_folder): shutil.move(os.path.join(tmpdir, entry), os.path.join(parent, entry)) -def unpack(source_dict, src_dir, cache_folder, recipe_path, croot, verbose=False, - timeout=900, locking=True): - ''' Uncompress a downloaded source. ''' - src_path, unhashed_fn = download_to_cache(cache_folder, recipe_path, source_dict, verbose) +def unpack( + source_dict, + src_dir, + cache_folder, + recipe_path, + croot, + verbose=False, + timeout=900, + locking=True, +): + """Uncompress a downloaded source.""" + src_path, unhashed_fn = download_to_cache( + cache_folder, recipe_path, source_dict, verbose + ) if not isdir(src_dir): os.makedirs(src_dir) @@ -155,9 +181,11 @@ def unpack(source_dict, src_dir, cache_folder, recipe_path, croot, verbose=False tar_xf(src_path, tmpdir) else: # In this case, the build script will need to deal with unpacking the source - print("Warning: Unrecognized source format. Source file will be copied to the SRC_DIR") + print( + "Warning: Unrecognized source format. Source file will be copied to the SRC_DIR" + ) copy_into(src_path, unhashed_dest, timeout, locking=locking) - if src_path.lower().endswith('.whl'): + if src_path.lower().endswith(".whl"): # copy wheel itself *and* unpack it # This allows test_files or about.license_file to locate files in the wheel, # as well as `pip install name-version.whl` as install command @@ -166,7 +194,7 @@ def unpack(source_dict, src_dir, cache_folder, recipe_path, croot, verbose=False folder = os.path.join(tmpdir, flist[0]) # Hoisting is destructive of information, in CDT packages, a single top level # folder of /usr64 must not be discarded. - if len(flist) == 1 and os.path.isdir(folder) and 'no_hoist' not in source_dict: + if len(flist) == 1 and os.path.isdir(folder) and "no_hoist" not in source_dict: hoist_single_extracted_folder(folder) flist = os.listdir(tmpdir) for f in flist: @@ -175,61 +203,76 @@ def unpack(source_dict, src_dir, cache_folder, recipe_path, croot, verbose=False def check_git_lfs(git, cwd): try: - lfs_list_output = check_output_env([git, 'lfs', 'ls-files', '--all'], cwd=cwd) + lfs_list_output = check_output_env([git, "lfs", "ls-files", "--all"], cwd=cwd) return lfs_list_output and lfs_list_output.strip() except CalledProcessError: return False def git_lfs_fetch(git, cwd, stdout, stderr): - lfs_version = check_output_env([git, 'lfs', 'version'], cwd=cwd) + lfs_version = check_output_env([git, "lfs", "version"], cwd=cwd) log.info(lfs_version) - check_call_env([git, 'lfs', 'fetch', 'origin', '--all'], cwd=cwd, stdout=stdout, stderr=stderr) - - -def git_mirror_checkout_recursive(git, mirror_dir, checkout_dir, git_url, git_cache, git_ref=None, - git_depth=-1, is_top_level=True, verbose=True): - """ Mirror (and checkout) a Git repository recursively. - - It's not possible to use `git submodule` on a bare - repository, so the checkout must be done before we - know which submodules there are. - - Worse, submodules can be identified by using either - absolute URLs or relative paths. If relative paths - are used those need to be relocated upon mirroring, - but you could end up with `../../../../blah` and in - that case conda-build could be tricked into writing - to the root of the drive and overwriting the system - folders unless steps are taken to prevent that. + check_call_env( + [git, "lfs", "fetch", "origin", "--all"], cwd=cwd, stdout=stdout, stderr=stderr + ) + + +def git_mirror_checkout_recursive( + git, + mirror_dir, + checkout_dir, + git_url, + git_cache, + git_ref=None, + git_depth=-1, + is_top_level=True, + verbose=True, +): + """Mirror (and checkout) a Git repository recursively. + + It's not possible to use `git submodule` on a bare + repository, so the checkout must be done before we + know which submodules there are. + + Worse, submodules can be identified by using either + absolute URLs or relative paths. If relative paths + are used those need to be relocated upon mirroring, + but you could end up with `../../../../blah` and in + that case conda-build could be tricked into writing + to the root of the drive and overwriting the system + folders unless steps are taken to prevent that. """ if verbose: stdout = None stderr = None else: - FNULL = open(os.devnull, 'wb') + FNULL = open(os.devnull, "wb") stdout = FNULL stderr = FNULL if not mirror_dir.startswith(git_cache + os.sep): - sys.exit("Error: Attempting to mirror to %s which is outside of GIT_CACHE %s" - % (mirror_dir, git_cache)) + sys.exit( + "Error: Attempting to mirror to %s which is outside of GIT_CACHE %s" + % (mirror_dir, git_cache) + ) # This is necessary for Cygwin git and m2-git, although it is fixed in newer MSYS2. - git_mirror_dir = convert_path_for_cygwin_or_msys2(git, mirror_dir).rstrip('/') - git_checkout_dir = convert_path_for_cygwin_or_msys2(git, checkout_dir).rstrip('/') + git_mirror_dir = convert_path_for_cygwin_or_msys2(git, mirror_dir).rstrip("/") + git_checkout_dir = convert_path_for_cygwin_or_msys2(git, checkout_dir).rstrip("/") # Set default here to catch empty dicts - git_ref = git_ref or 'HEAD' + git_ref = git_ref or "HEAD" - mirror_dir = mirror_dir.rstrip('/') + mirror_dir = mirror_dir.rstrip("/") if not isdir(os.path.dirname(mirror_dir)): os.makedirs(os.path.dirname(mirror_dir)) if isdir(mirror_dir): try: - if git_ref != 'HEAD': - check_call_env([git, 'fetch'], cwd=mirror_dir, stdout=stdout, stderr=stderr) + if git_ref != "HEAD": + check_call_env( + [git, "fetch"], cwd=mirror_dir, stdout=stdout, stderr=stderr + ) if check_git_lfs(git, mirror_dir): git_lfs_fetch(git, mirror_dir, stdout, stderr) else: @@ -239,13 +282,28 @@ def git_mirror_checkout_recursive(git, mirror_dir, checkout_dir, git_url, git_ca # This is important when the git repo is a local path like "git_url: ../", # but the user is working with a branch other than 'master' without # explicitly providing git_rev. - check_call_env([git, 'fetch', 'origin', '+HEAD:_conda_cache_origin_head'], - cwd=mirror_dir, stdout=stdout, stderr=stderr) - check_call_env([git, 'symbolic-ref', 'HEAD', 'refs/heads/_conda_cache_origin_head'], - cwd=mirror_dir, stdout=stdout, stderr=stderr) + check_call_env( + [git, "fetch", "origin", "+HEAD:_conda_cache_origin_head"], + cwd=mirror_dir, + stdout=stdout, + stderr=stderr, + ) + check_call_env( + [ + git, + "symbolic-ref", + "HEAD", + "refs/heads/_conda_cache_origin_head", + ], + cwd=mirror_dir, + stdout=stdout, + stderr=stderr, + ) except CalledProcessError: - msg = ("Failed to update local git cache. " - "Deleting local cached repo: {} ".format(mirror_dir)) + msg = ( + "Failed to update local git cache. " + "Deleting local cached repo: {} ".format(mirror_dir) + ) print(msg) # Maybe the failure was caused by a corrupt mirror directory. @@ -253,135 +311,178 @@ def git_mirror_checkout_recursive(git, mirror_dir, checkout_dir, git_url, git_ca shutil.rmtree(mirror_dir) raise else: - args = [git, 'clone', '--mirror'] + args = [git, "clone", "--mirror"] if git_depth > 0: - args += ['--depth', str(git_depth)] + args += ["--depth", str(git_depth)] try: - check_call_env(args + [git_url, git_mirror_dir], stdout=stdout, stderr=stderr) + check_call_env( + args + [git_url, git_mirror_dir], stdout=stdout, stderr=stderr + ) if check_git_lfs(git, mirror_dir): git_lfs_fetch(git, mirror_dir, stdout, stderr) except CalledProcessError: # on windows, remote URL comes back to us as cygwin or msys format. Python doesn't # know how to normalize it. Need to convert it to a windows path. - if sys.platform == 'win32' and git_url.startswith('/'): + if sys.platform == "win32" and git_url.startswith("/"): git_url = convert_unix_path_to_win(git_url) if os.path.exists(git_url): # Local filepaths are allowed, but make sure we normalize them git_url = normpath(git_url) - check_call_env(args + [git_url, git_mirror_dir], stdout=stdout, stderr=stderr) + check_call_env( + args + [git_url, git_mirror_dir], stdout=stdout, stderr=stderr + ) assert isdir(mirror_dir) # Now clone from mirror_dir into checkout_dir. - check_call_env([git, 'clone', git_mirror_dir, git_checkout_dir], stdout=stdout, stderr=stderr) + check_call_env( + [git, "clone", git_mirror_dir, git_checkout_dir], stdout=stdout, stderr=stderr + ) if is_top_level: checkout = git_ref - if git_url.startswith('.'): - output = check_output_env([git, "rev-parse", checkout], stdout=stdout, stderr=stderr) - checkout = output.decode('utf-8') + if git_url.startswith("."): + output = check_output_env( + [git, "rev-parse", checkout], stdout=stdout, stderr=stderr + ) + checkout = output.decode("utf-8") if verbose: - print('checkout: %r' % checkout) + print("checkout: %r" % checkout) if checkout: - check_call_env([git, 'checkout', checkout], - cwd=checkout_dir, stdout=stdout, stderr=stderr) + check_call_env( + [git, "checkout", checkout], + cwd=checkout_dir, + stdout=stdout, + stderr=stderr, + ) # submodules may have been specified using relative paths. # Those paths are relative to git_url, and will not exist # relative to mirror_dir, unless we do some work to make # it so. try: - submodules = check_output_env([git, 'config', '--file', '.gitmodules', '--get-regexp', - 'url'], stderr=stdout, cwd=checkout_dir) - submodules = submodules.decode('utf-8').splitlines() + submodules = check_output_env( + [git, "config", "--file", ".gitmodules", "--get-regexp", "url"], + stderr=stdout, + cwd=checkout_dir, + ) + submodules = submodules.decode("utf-8").splitlines() except CalledProcessError: submodules = [] for submodule in submodules: matches = git_submod_re.match(submodule) - if matches and matches.group(2)[0] == '.': + if matches and matches.group(2)[0] == ".": submod_name = matches.group(1) submod_rel_path = matches.group(2) - submod_url = urljoin(git_url + '/', submod_rel_path) + submod_url = urljoin(git_url + "/", submod_rel_path) submod_mirror_dir = os.path.normpath( - os.path.join(mirror_dir, submod_rel_path)) + os.path.join(mirror_dir, submod_rel_path) + ) if verbose: - print('Relative submodule {} found: url is {}, submod_mirror_dir is {}'.format( - submod_name, submod_url, submod_mirror_dir)) + print( + "Relative submodule {} found: url is {}, submod_mirror_dir is {}".format( + submod_name, submod_url, submod_mirror_dir + ) + ) with TemporaryDirectory() as temp_checkout_dir: - git_mirror_checkout_recursive(git, submod_mirror_dir, temp_checkout_dir, submod_url, - git_cache=git_cache, git_ref=git_ref, - git_depth=git_depth, is_top_level=False, - verbose=verbose) + git_mirror_checkout_recursive( + git, + submod_mirror_dir, + temp_checkout_dir, + submod_url, + git_cache=git_cache, + git_ref=git_ref, + git_depth=git_depth, + is_top_level=False, + verbose=verbose, + ) if is_top_level: # Now that all relative-URL-specified submodules are locally mirrored to # relatively the same place we can go ahead and checkout the submodules. - check_call_env([git, 'submodule', 'update', '--init', - '--recursive'], cwd=checkout_dir, stdout=stdout, stderr=stderr) + check_call_env( + [git, "submodule", "update", "--init", "--recursive"], + cwd=checkout_dir, + stdout=stdout, + stderr=stderr, + ) git_info(checkout_dir, None, git=git, verbose=verbose) if not verbose: FNULL.close() def git_source(source_dict, git_cache, src_dir, recipe_path=None, verbose=True): - ''' Download a source from a Git repo (or submodule, recursively) ''' + """Download a source from a Git repo (or submodule, recursively)""" if not isdir(git_cache): os.makedirs(git_cache) - git = external.find_executable('git') + git = external.find_executable("git") if not git: - sys.exit("Error: git is not installed in your root environment or as a build requirement.") + sys.exit( + "Error: git is not installed in your root environment or as a build requirement." + ) - git_depth = int(source_dict.get('git_depth', -1)) - git_ref = source_dict.get('git_rev') or 'HEAD' + git_depth = int(source_dict.get("git_depth", -1)) + git_ref = source_dict.get("git_rev") or "HEAD" - git_url = source_dict['git_url'] - if git_url.startswith('~'): + git_url = source_dict["git_url"] + if git_url.startswith("~"): git_url = os.path.expanduser(git_url) - if git_url.startswith('.'): + if git_url.startswith("."): # It's a relative path from the conda recipe git_url = abspath(normpath(os.path.join(recipe_path, git_url))) - if sys.platform == 'win32': - git_dn = git_url.replace(':', '_') + if sys.platform == "win32": + git_dn = git_url.replace(":", "_") else: git_dn = git_url[1:] else: - git_dn = git_url.split('://')[-1].replace('/', os.sep) + git_dn = git_url.split("://")[-1].replace("/", os.sep) if git_dn.startswith(os.sep): git_dn = git_dn[1:] - git_dn = git_dn.replace(':', '_') + git_dn = git_dn.replace(":", "_") mirror_dir = join(git_cache, git_dn) git_mirror_checkout_recursive( - git, mirror_dir, src_dir, git_url, git_cache=git_cache, git_ref=git_ref, - git_depth=git_depth, is_top_level=True, verbose=verbose) + git, + mirror_dir, + src_dir, + git_url, + git_cache=git_cache, + git_ref=git_ref, + git_depth=git_depth, + is_top_level=True, + verbose=verbose, + ) return git # Why not use get_git_info instead? def git_info(src_dir, build_prefix, git=None, verbose=True, fo=None): - ''' Print info about a Git repo. ''' + """Print info about a Git repo.""" assert isdir(src_dir) if not git: - git = external.find_executable('git', build_prefix) + git = external.find_executable("git", build_prefix) if not git: - log.warn("git not installed in root environment. Skipping recording of git info.") + log.warn( + "git not installed in root environment. Skipping recording of git info." + ) return if verbose: stderr = None else: - FNULL = open(os.devnull, 'wb') + FNULL = open(os.devnull, "wb") stderr = FNULL # Ensure to explicitly set GIT_DIR as some Linux machines will not # properly execute without it. env = os.environ.copy() - env['GIT_DIR'] = join(src_dir, '.git') + env["GIT_DIR"] = join(src_dir, ".git") env = {str(key): str(value) for key, value in env.items()} for cmd, check_error in ( - ((git, 'log', '-n1'), True), - ((git, 'describe', '--tags', '--dirty'), False), - ((git, 'status'), True)): + ((git, "log", "-n1"), True), + ((git, "describe", "--tags", "--dirty"), False), + ((git, "status"), True), + ): try: stdout = check_output_env(cmd, stderr=stderr, cwd=src_dir, env=env) except CalledProcessError as e: @@ -390,49 +491,51 @@ def git_info(src_dir, build_prefix, git=None, verbose=True, fo=None): encoding = locale.getpreferredencoding() if not fo: encoding = sys.stdout.encoding - encoding = encoding or 'utf-8' - if hasattr(stdout, 'decode'): - stdout = stdout.decode(encoding, 'ignore') + encoding = encoding or "utf-8" + if hasattr(stdout, "decode"): + stdout = stdout.decode(encoding, "ignore") if fo: - fo.write('==> {} <==\n'.format(' '.join(cmd))) + fo.write("==> {} <==\n".format(" ".join(cmd))) if verbose: - fo.write(stdout + '\n') + fo.write(stdout + "\n") else: if verbose: - print('==> {} <==\n'.format(' '.join(cmd))) - safe_print_unicode(stdout + '\n') + print("==> {} <==\n".format(" ".join(cmd))) + safe_print_unicode(stdout + "\n") def hg_source(source_dict, src_dir, hg_cache, verbose): - ''' Download a source from Mercurial repo. ''' + """Download a source from Mercurial repo.""" if verbose: stdout = None stderr = None else: - FNULL = open(os.devnull, 'wb') + FNULL = open(os.devnull, "wb") stdout = FNULL stderr = FNULL - hg_url = source_dict['hg_url'] + hg_url = source_dict["hg_url"] if not isdir(hg_cache): os.makedirs(hg_cache) - hg_dn = hg_url.split(':')[-1].replace('/', '_') + hg_dn = hg_url.split(":")[-1].replace("/", "_") cache_repo = join(hg_cache, hg_dn) if isdir(cache_repo): - check_call_env(['hg', 'pull'], cwd=cache_repo, stdout=stdout, stderr=stderr) + check_call_env(["hg", "pull"], cwd=cache_repo, stdout=stdout, stderr=stderr) else: - check_call_env(['hg', 'clone', hg_url, cache_repo], stdout=stdout, stderr=stderr) + check_call_env( + ["hg", "clone", hg_url, cache_repo], stdout=stdout, stderr=stderr + ) assert isdir(cache_repo) # now clone in to work directory - update = source_dict.get('hg_tag') or 'tip' + update = source_dict.get("hg_tag") or "tip" if verbose: - print('checkout: %r' % update) + print("checkout: %r" % update) - check_call_env(['hg', 'clone', cache_repo, src_dir], stdout=stdout, - stderr=stderr) - check_call_env(['hg', 'update', '-C', update], cwd=src_dir, stdout=stdout, - stderr=stderr) + check_call_env(["hg", "clone", cache_repo, src_dir], stdout=stdout, stderr=stderr) + check_call_env( + ["hg", "update", "-C", update], cwd=src_dir, stdout=stdout, stderr=stderr + ) if not verbose: FNULL.close() @@ -440,36 +543,45 @@ def hg_source(source_dict, src_dir, hg_cache, verbose): return src_dir -def svn_source(source_dict, src_dir, svn_cache, verbose=True, timeout=900, locking=True): - ''' Download a source from SVN repo. ''' +def svn_source( + source_dict, src_dir, svn_cache, verbose=True, timeout=900, locking=True +): + """Download a source from SVN repo.""" if verbose: stdout = None stderr = None else: - FNULL = open(os.devnull, 'wb') + FNULL = open(os.devnull, "wb") stdout = FNULL stderr = FNULL def parse_bool(s): - return str(s).lower().strip() in ('yes', 'true', '1', 'on') + return str(s).lower().strip() in ("yes", "true", "1", "on") - svn_url = source_dict['svn_url'] - svn_revision = source_dict.get('svn_rev') or 'head' - svn_ignore_externals = parse_bool(source_dict.get('svn_ignore_externals') or 'no') + svn_url = source_dict["svn_url"] + svn_revision = source_dict.get("svn_rev") or "head" + svn_ignore_externals = parse_bool(source_dict.get("svn_ignore_externals") or "no") if not isdir(svn_cache): os.makedirs(svn_cache) - svn_dn = svn_url.split(':', 1)[-1].replace('/', '_').replace(':', '_') + svn_dn = svn_url.split(":", 1)[-1].replace("/", "_").replace(":", "_") cache_repo = join(svn_cache, svn_dn) if svn_ignore_externals: - extra_args = ['--ignore-externals'] + extra_args = ["--ignore-externals"] else: extra_args = [] if isdir(cache_repo): - check_call_env(['svn', 'up', '-r', svn_revision] + extra_args, cwd=cache_repo, - stdout=stdout, stderr=stderr) + check_call_env( + ["svn", "up", "-r", svn_revision] + extra_args, + cwd=cache_repo, + stdout=stdout, + stderr=stderr, + ) else: - check_call_env(['svn', 'co', '-r', svn_revision] + extra_args + [svn_url, cache_repo], - stdout=stdout, stderr=stderr) + check_call_env( + ["svn", "co", "-r", svn_revision] + extra_args + [svn_url, cache_repo], + stdout=stdout, + stderr=stderr, + ) assert isdir(cache_repo) # now copy into work directory @@ -486,8 +598,9 @@ def get_repository_info(recipe_path): from the source - you can have a recipe in svn that gets source via git.""" try: if exists(join(recipe_path, ".git")): - origin = check_output_env(["git", "config", "--get", "remote.origin.url"], - cwd=recipe_path) + origin = check_output_env( + ["git", "config", "--get", "remote.origin.url"], cwd=recipe_path + ) rev = check_output_env(["git", "rev-parse", "HEAD"], cwd=recipe_path) return f"Origin {origin}, commit {rev}" elif isdir(join(recipe_path, ".hg")): @@ -496,19 +609,22 @@ def get_repository_info(recipe_path): return f"Origin {origin}, commit {rev}" elif isdir(join(recipe_path, ".svn")): info = check_output_env(["svn", "info"], cwd=recipe_path) - info = info.decode("utf-8") # Py3 returns a byte string, but re needs unicode or str. + info = info.decode( + "utf-8" + ) # Py3 returns a byte string, but re needs unicode or str. server = re.search("Repository Root: (.*)$", info, flags=re.M).group(1) revision = re.search("Revision: (.*)$", info, flags=re.M).group(1) return f"{server}, Revision {revision}" else: - return "{}, last modified {}".format(recipe_path, - time.ctime(os.path.getmtime( - join(recipe_path, "meta.yaml")))) + return "{}, last modified {}".format( + recipe_path, + time.ctime(os.path.getmtime(join(recipe_path, "meta.yaml"))), + ) except CalledProcessError: get_logger(__name__).debug("Failed to checkout source in " + recipe_path) - return "{}, last modified {}".format(recipe_path, - time.ctime(os.path.getmtime( - join(recipe_path, "meta.yaml")))) + return "{}, last modified {}".format( + recipe_path, time.ctime(os.path.getmtime(join(recipe_path, "meta.yaml"))) + ) _RE_LF = re.compile(rb"(?71} ]]".format( - 'R' if pa['reversible'] else '-', - 'A' if pa['applicable'] else '-', - 'Y' if pa['patch_exe'].startswith(build_prefix) else '-', - 'M' if not pa['amalgamated'] else '-', - 'D' if pa['dry_runnable'] else '-', - str(pa['level']), - 'L' if not pa['level_ambiguous'] else '-', - 'O' if not pa['offsets'] else '-', - 'V' if not pa['fuzzy'] else '-', - 'E' if not pa['stderr'] else '-', - rel_path[-71:]) + "R" if pa["reversible"] else "-", + "A" if pa["applicable"] else "-", + "Y" if pa["patch_exe"].startswith(build_prefix) else "-", + "M" if not pa["amalgamated"] else "-", + "D" if pa["dry_runnable"] else "-", + str(pa["level"]), + "L" if not pa["level_ambiguous"] else "-", + "O" if not pa["offsets"] else "-", + "V" if not pa["fuzzy"] else "-", + "E" if not pa["stderr"] else "-", + rel_path[-71:], + ) def _patch_attributes_debug_print(attributes): @@ -595,14 +717,18 @@ def _patch_attributes_debug_print(attributes): print("Patch analysis gives:") print("\n".join(attributes)) print("\nKey:\n") - print("R :: Reversible A :: Applicable\n" - "Y :: Build-prefix patch in use M :: Minimal, non-amalgamated\n" - "D :: Dry-runnable N :: Patch level (1 is preferred)\n" - "L :: Patch level not-ambiguous O :: Patch applies without offsets\n" - "V :: Patch applies without fuzz E :: Patch applies without emitting to stderr\n") - - -def _get_patch_attributes(path, patch_exe, git, src_dir, stdout, stderr, retained_tmpdir=None): + print( + "R :: Reversible A :: Applicable\n" + "Y :: Build-prefix patch in use M :: Minimal, non-amalgamated\n" + "D :: Dry-runnable N :: Patch level (1 is preferred)\n" + "L :: Patch level not-ambiguous O :: Patch applies without offsets\n" + "V :: Patch applies without fuzz E :: Patch applies without emitting to stderr\n" + ) + + +def _get_patch_attributes( + path, patch_exe, git, src_dir, stdout, stderr, retained_tmpdir=None +): from collections import OrderedDict files_list, is_git_format = _get_patch_file_details(path) @@ -612,41 +738,47 @@ def _get_patch_attributes(path, patch_exe, git, src_dir, stdout, stderr, retaine amalgamated = True strip_level, strip_level_guessed = _guess_patch_strip_level(files, src_dir) if strip_level: - files = {f.split('/', strip_level)[-1] for f in files} + files = {f.split("/", strip_level)[-1] for f in files} # Defaults - result = {'patch': path, - 'files': files, - 'patch_exe': git if (git and is_git_format) else patch_exe, - 'format': 'git' if is_git_format else 'generic', - # If these remain 'unknown' we had no patch program to test with. - 'dry_runnable': None, - 'applicable': None, - 'reversible': None, - 'amalgamated': amalgamated, - 'offsets': None, - 'fuzzy': None, - 'stderr': None, - 'level': strip_level, - 'level_ambiguous': strip_level_guessed, - 'args': []} + result = { + "patch": path, + "files": files, + "patch_exe": git if (git and is_git_format) else patch_exe, + "format": "git" if is_git_format else "generic", + # If these remain 'unknown' we had no patch program to test with. + "dry_runnable": None, + "applicable": None, + "reversible": None, + "amalgamated": amalgamated, + "offsets": None, + "fuzzy": None, + "stderr": None, + "level": strip_level, + "level_ambiguous": strip_level_guessed, + "args": [], + } crlf = False lf = False - with open(path, errors='ignore') as f: + with open(path, errors="ignore") as f: _content = f.read() - for line in _content.split('\n'): - if line.startswith((' ', '+', '-')): - if line.endswith('\r'): + for line in _content.split("\n"): + if line.startswith((" ", "+", "-")): + if line.endswith("\r"): crlf = True else: lf = True - result['line_endings'] = 'mixed' if (crlf and lf) else 'crlf' if crlf else 'lf' + result["line_endings"] = "mixed" if (crlf and lf) else "crlf" if crlf else "lf" if not patch_exe: - log.warning(f"No patch program found, cannot determine patch attributes for {path}") + log.warning( + f"No patch program found, cannot determine patch attributes for {path}" + ) if not git: - log.error("No git program found either. Please add a dependency for one of these.") + log.error( + "No git program found either. Please add a dependency for one of these." + ) return result class noop_context: @@ -661,28 +793,28 @@ def __enter__(self): def __exit__(self, exc, value, tb): return - fmts = OrderedDict(native=['--binary'], - lf=[], - crlf=[]) + fmts = OrderedDict(native=["--binary"], lf=[], crlf=[]) if patch_exe: # Good, we have a patch executable so we can perform some checks: - with noop_context(retained_tmpdir) if retained_tmpdir else TemporaryDirectory() as tmpdir: + with noop_context( + retained_tmpdir + ) if retained_tmpdir else TemporaryDirectory() as tmpdir: # Make all the fmts. - result['patches'] = {} + result["patches"] = {} for fmt, _ in fmts.items(): - new_patch = os.path.join(tmpdir, os.path.basename(path) + f'.{fmt}') - if fmt == 'native': + new_patch = os.path.join(tmpdir, os.path.basename(path) + f".{fmt}") + if fmt == "native": try: shutil.copy2(path, new_patch) except: shutil.copy(path, new_patch) - elif fmt == 'lf': + elif fmt == "lf": _ensure_LF(path, new_patch) - elif fmt == 'crlf': + elif fmt == "crlf": _ensure_CRLF(path, new_patch) - result['patches'][fmt] = new_patch + result["patches"][fmt] = new_patch - tmp_src_dir = os.path.join(tmpdir, 'src_dir') + tmp_src_dir = os.path.join(tmpdir, "src_dir") def copy_to_be_patched_files(src_dir, tmp_src_dir, files): try: @@ -702,26 +834,32 @@ def copy_to_be_patched_files(src_dir, tmp_src_dir, files): shutil.copy2(os.path.join(src_dir, file), dst) copy_to_be_patched_files(src_dir, tmp_src_dir, files) - checks = OrderedDict(dry_runnable=['--dry-run'], - applicable=[], - reversible=['-R']) + checks = OrderedDict( + dry_runnable=["--dry-run"], applicable=[], reversible=["-R"] + ) for check_name, extra_args in checks.items(): for fmt, fmt_args in fmts.items(): - patch_args = ['-Np{}'.format(result['level']), - '-i', result['patches'][fmt]] + extra_args + fmt_args + patch_args = ( + ["-Np{}".format(result["level"]), "-i", result["patches"][fmt]] + + extra_args + + fmt_args + ) try: env = os.environ.copy() - env['LC_ALL'] = 'C' - from subprocess import Popen, PIPE - process = Popen([patch_exe] + patch_args, - cwd=tmp_src_dir, - stdout=PIPE, - stderr=PIPE, - shell=False) + env["LC_ALL"] = "C" + from subprocess import PIPE, Popen + + process = Popen( + [patch_exe] + patch_args, + cwd=tmp_src_dir, + stdout=PIPE, + stderr=PIPE, + shell=False, + ) output, error = process.communicate() - result['offsets'] = b'offset' in output - result['fuzzy'] = b'fuzz' in output - result['stderr'] = bool(error) + result["offsets"] = b"offset" in output + result["fuzzy"] = b"fuzz" in output + result["stderr"] = bool(error) if stdout: stdout.write(output) if stderr: @@ -733,12 +871,12 @@ def copy_to_be_patched_files(src_dir, tmp_src_dir, files): else: result[check_name] = fmt # Save the first one found. - if check_name == 'applicable' and not result['args']: - result['args'] = patch_args + if check_name == "applicable" and not result["args"]: + result["args"] = patch_args break - if not retained_tmpdir and 'patches' in result: - del result['patches'] + if not retained_tmpdir and "patches" in result: + del result["patches"] return result @@ -746,7 +884,7 @@ def copy_to_be_patched_files(src_dir, tmp_src_dir, files): def apply_one_patch(src_dir, recipe_dir, rel_path, config, git=None): path = os.path.join(recipe_dir, rel_path) if config.verbose: - print(f'Applying patch: {path}') + print(f"Applying patch: {path}") def try_apply_patch(patch, patch_args, cwd, stdout, stderr): # An old reference: https://unix.stackexchange.com/a/243748/34459 @@ -775,32 +913,38 @@ def try_apply_patch(patch, patch_args, cwd, stdout, stderr): # Some may bemoan the loss of patch failure artifacts, but it is fairly random which # patch and patch attempt they apply to so their informational value is low, besides that, # they are ugly. - temp_name = os.path.join(tempfile.gettempdir(), next(tempfile._get_candidate_names())) - base_patch_args = ['--no-backup-if-mismatch', '--batch'] + patch_args + temp_name = os.path.join( + tempfile.gettempdir(), next(tempfile._get_candidate_names()) + ) + base_patch_args = ["--no-backup-if-mismatch", "--batch"] + patch_args try: try_patch_args = base_patch_args[:] - try_patch_args.append('--dry-run') + try_patch_args.append("--dry-run") log.debug(f"dry-run applying with\n{patch} {try_patch_args}") - check_call_env([patch] + try_patch_args, cwd=cwd, stdout=stdout, stderr=stderr) + check_call_env( + [patch] + try_patch_args, cwd=cwd, stdout=stdout, stderr=stderr + ) # You can use this to pretend the patch failed so as to test reversal! # raise CalledProcessError(-1, ' '.join([patch] + patch_args)) except Exception as e: raise e else: - check_call_env([patch] + base_patch_args, cwd=cwd, stdout=stdout, stderr=stderr) + check_call_env( + [patch] + base_patch_args, cwd=cwd, stdout=stdout, stderr=stderr + ) finally: if os.path.exists(temp_name): os.unlink(temp_name) exception = None if not isfile(path): - raise RuntimeError('Error: no such patch: %s' % path) + raise RuntimeError("Error: no such patch: %s" % path) if config.verbose: stdout = None stderr = None else: - FNULL = open(os.devnull, 'wb') + FNULL = open(os.devnull, "wb") stdout = FNULL stderr = FNULL @@ -812,29 +956,39 @@ def try_apply_patch(patch, patch_args, cwd, stdout, stderr): if not patch_exe: raise MissingDependency("Failed to find conda-build dependency: 'patch'") with TemporaryDirectory() as tmpdir: - patch_attributes = _get_patch_attributes(path, patch_exe, git, src_dir, stdout, stderr, tmpdir) - attributes_output += _patch_attributes_debug(patch_attributes, rel_path, config.build_prefix) - if git and patch_attributes['format'] == 'git': + patch_attributes = _get_patch_attributes( + path, patch_exe, git, src_dir, stdout, stderr, tmpdir + ) + attributes_output += _patch_attributes_debug( + patch_attributes, rel_path, config.build_prefix + ) + if git and patch_attributes["format"] == "git": # Prevents git from asking interactive questions, # also necessary to achieve sha1 reproducibility; # as is --committer-date-is-author-date. By this, # we mean a round-trip of git am/git format-patch # gives the same file. git_env = os.environ - git_env['GIT_COMMITTER_NAME'] = 'conda-build' - git_env['GIT_COMMITTER_EMAIL'] = 'conda@conda-build.org' - check_call_env([git, 'am', '-3', '--committer-date-is-author-date', path], - cwd=src_dir, stdout=stdout, stderr=stderr, env=git_env) + git_env["GIT_COMMITTER_NAME"] = "conda-build" + git_env["GIT_COMMITTER_EMAIL"] = "conda@conda-build.org" + check_call_env( + [git, "am", "-3", "--committer-date-is-author-date", path], + cwd=src_dir, + stdout=stdout, + stderr=stderr, + env=git_env, + ) config.git_commits_since_tag += 1 else: - patch_args = patch_attributes['args'] + patch_args = patch_attributes["args"] if config.verbose: - print(f'Applying patch: {path} with args:\n{patch_args}') + print(f"Applying patch: {path} with args:\n{patch_args}") try: - try_apply_patch(patch_exe, patch_args, - cwd=src_dir, stdout=stdout, stderr=stderr) + try_apply_patch( + patch_exe, patch_args, cwd=src_dir, stdout=stdout, stderr=stderr + ) except Exception as e: exception = e if exception: @@ -843,7 +997,9 @@ def try_apply_patch(patch, patch_args, cwd, stdout, stderr): def apply_patch(src_dir, patch, config, git=None): - apply_one_patch(src_dir, os.path.dirname(patch), os.path.basename(patch), config, git) + apply_one_patch( + src_dir, os.path.dirname(patch), os.path.basename(patch), config, git + ) def provide(metadata): @@ -853,47 +1009,70 @@ def provide(metadata): - unpack - apply patches (if any) """ - meta = metadata.get_section('source') + meta = metadata.get_section("source") if not os.path.isdir(metadata.config.build_folder): os.makedirs(metadata.config.build_folder) git = None - if hasattr(meta, 'keys'): + if hasattr(meta, "keys"): dicts = [meta] else: dicts = meta try: for source_dict in dicts: - folder = source_dict.get('folder') - src_dir = os.path.join(metadata.config.work_dir, folder if folder else '') - if any(k in source_dict for k in ('fn', 'url')): - unpack(source_dict, src_dir, metadata.config.src_cache, recipe_path=metadata.path, - croot=metadata.config.croot, verbose=metadata.config.verbose, - timeout=metadata.config.timeout, locking=metadata.config.locking) - elif 'git_url' in source_dict: - git = git_source(source_dict, metadata.config.git_cache, src_dir, metadata.path, - verbose=metadata.config.verbose) + folder = source_dict.get("folder") + src_dir = os.path.join(metadata.config.work_dir, folder if folder else "") + if any(k in source_dict for k in ("fn", "url")): + unpack( + source_dict, + src_dir, + metadata.config.src_cache, + recipe_path=metadata.path, + croot=metadata.config.croot, + verbose=metadata.config.verbose, + timeout=metadata.config.timeout, + locking=metadata.config.locking, + ) + elif "git_url" in source_dict: + git = git_source( + source_dict, + metadata.config.git_cache, + src_dir, + metadata.path, + verbose=metadata.config.verbose, + ) # build to make sure we have a work directory with source in it. We # want to make sure that whatever version that is does not # interfere with the test we run next. - elif 'hg_url' in source_dict: - hg_source(source_dict, src_dir, metadata.config.hg_cache, - verbose=metadata.config.verbose) - elif 'svn_url' in source_dict: - svn_source(source_dict, src_dir, metadata.config.svn_cache, - verbose=metadata.config.verbose, timeout=metadata.config.timeout, - locking=metadata.config.locking) - elif 'path' in source_dict: - source_path = os.path.expanduser(source_dict['path']) + elif "hg_url" in source_dict: + hg_source( + source_dict, + src_dir, + metadata.config.hg_cache, + verbose=metadata.config.verbose, + ) + elif "svn_url" in source_dict: + svn_source( + source_dict, + src_dir, + metadata.config.svn_cache, + verbose=metadata.config.verbose, + timeout=metadata.config.timeout, + locking=metadata.config.locking, + ) + elif "path" in source_dict: + source_path = os.path.expanduser(source_dict["path"]) path = normpath(abspath(join(metadata.path, source_path))) - path_via_symlink = 'path_via_symlink' in source_dict + path_via_symlink = "path_via_symlink" in source_dict if path_via_symlink and not folder: - print("WARNING: `path_via_symlink` is too dangerous without specifying a folder,\n" - " conda could end up changing - or deleting - your local source code!\n" - " Going to make copies instead. When using `path_via_symlink` you should\n" - " also take care to run the build outside of your local source code folder(s)\n" - " unless that is your intention.") + print( + "WARNING: `path_via_symlink` is too dangerous without specifying a folder,\n" + " conda could end up changing - or deleting - your local source code!\n" + " Going to make copies instead. When using `path_via_symlink` you should\n" + " also take care to run the build outside of your local source code folder(s)\n" + " unless that is your intention." + ) path_via_symlink = False sys.exit(1) if path_via_symlink: @@ -908,20 +1087,30 @@ def provide(metadata): print(f"Copying {path} to {src_dir}") # careful here: we set test path to be outside of conda-build root in setup.cfg. # If you don't do that, this is a recursive function - copy_into(path, src_dir, metadata.config.timeout, symlinks=True, - locking=metadata.config.locking, clobber=True) + copy_into( + path, + src_dir, + metadata.config.timeout, + symlinks=True, + locking=metadata.config.locking, + clobber=True, + ) else: # no source if not isdir(src_dir): os.makedirs(src_dir) - patches = ensure_list(source_dict.get('patches', [])) + patches = ensure_list(source_dict.get("patches", [])) patch_attributes_output = [] for patch in patches: - patch_attributes_output += [apply_one_patch(src_dir, metadata.path, patch, metadata.config, git)] + patch_attributes_output += [ + apply_one_patch(src_dir, metadata.path, patch, metadata.config, git) + ] _patch_attributes_debug_print(patch_attributes_output) except CalledProcessError: - shutil.move(metadata.config.work_dir, metadata.config.work_dir + '_failed_provide') + shutil.move( + metadata.config.work_dir, metadata.config.work_dir + "_failed_provide" + ) raise return metadata.config.work_dir diff --git a/conda_build/tarcheck.py b/conda_build/tarcheck.py index ac8cebd769..7884066014 100644 --- a/conda_build/tarcheck.py +++ b/conda_build/tarcheck.py @@ -1,19 +1,19 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import json -from os.path import basename, normpath import tarfile +from os.path import basename, normpath from conda_build.utils import codec, filter_info_files def dist_fn(fn): - if fn.endswith('.tar'): + if fn.endswith(".tar"): return fn[:-4] - elif fn.endswith('.tar.bz2'): + elif fn.endswith(".tar.bz2"): return fn[:-8] else: - raise Exception('did not expect filename: %r' % fn) + raise Exception("did not expect filename: %r" % fn) class TarCheck: @@ -21,7 +21,9 @@ def __init__(self, path, config): self.t = tarfile.open(path) self.paths = {m.path for m in self.t.getmembers()} self.dist = dist_fn(basename(path)) - self.name, self.version, self.build = self.dist.split('::', 1)[-1].rsplit('-', 2) + self.name, self.version, self.build = self.dist.split("::", 1)[-1].rsplit( + "-", 2 + ) self.config = config def __enter__(self): @@ -31,57 +33,67 @@ def __exit__(self, e_type, e_value, traceback): self.t.close() def info_files(self): - lista = [normpath(p.strip().decode('utf-8')) for p in - self.t.extractfile('info/files').readlines()] + lista = [ + normpath(p.strip().decode("utf-8")) + for p in self.t.extractfile("info/files").readlines() + ] seta = set(lista) if len(lista) != len(seta): - raise Exception('info/files: duplicates') + raise Exception("info/files: duplicates") files_in_tar = [normpath(m.path) for m in self.t.getmembers()] - files_in_tar = filter_info_files(files_in_tar, '') + files_in_tar = filter_info_files(files_in_tar, "") setb = set(files_in_tar) if len(files_in_tar) != len(setb): - raise Exception('info_files: duplicate members') + raise Exception("info_files: duplicate members") if seta == setb: return for p in sorted(seta | setb): if p not in seta: - print('%r not in info/files' % p) + print("%r not in info/files" % p) if p not in setb: - print('%r not in tarball' % p) - raise Exception('info/files') + print("%r not in tarball" % p) + raise Exception("info/files") def index_json(self): - info = json.loads(self.t.extractfile('info/index.json').read().decode('utf-8')) - for varname in 'name', 'version': + info = json.loads(self.t.extractfile("info/index.json").read().decode("utf-8")) + for varname in "name", "version": if info[varname] != getattr(self, varname): - raise Exception('{}: {!r} != {!r}'.format(varname, info[varname], - getattr(self, varname))) - assert isinstance(info['build_number'], int) + raise Exception( + "{}: {!r} != {!r}".format( + varname, info[varname], getattr(self, varname) + ) + ) + assert isinstance(info["build_number"], int) def prefix_length(self): prefix_length = None - if 'info/has_prefix' in self.t.getnames(): - prefix_files = self.t.extractfile('info/has_prefix').readlines() + if "info/has_prefix" in self.t.getnames(): + prefix_files = self.t.extractfile("info/has_prefix").readlines() for line in prefix_files: try: prefix, file_type, _ = line.split() # lines not conforming to the split except ValueError: continue - if hasattr(file_type, 'decode'): + if hasattr(file_type, "decode"): file_type = file_type.decode(codec) - if file_type == 'binary': + if file_type == "binary": prefix_length = len(prefix) break return prefix_length def correct_subdir(self): - info = json.loads(self.t.extractfile('info/index.json').read().decode('utf-8')) - assert info['subdir'] in [self.config.host_subdir, 'noarch', self.config.target_subdir], \ - ("Inconsistent subdir in package - index.json expecting {}," - " got {}".format(self.config.host_subdir, info['subdir'])) + info = json.loads(self.t.extractfile("info/index.json").read().decode("utf-8")) + assert info["subdir"] in [ + self.config.host_subdir, + "noarch", + self.config.target_subdir, + ], ( + "Inconsistent subdir in package - index.json expecting {}," + " got {}".format(self.config.host_subdir, info["subdir"]) + ) def check_all(path, config): diff --git a/conda_build/utils.py b/conda_build/utils.py index 5c8affae92..58f33eecde 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -1,56 +1,56 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from collections import OrderedDict, defaultdict import contextlib import fnmatch -from functools import lru_cache import hashlib -from itertools import filterfalse import json -from locale import getpreferredencoding -import libarchive import logging import logging.config import mmap import os +import re +import shutil +import stat +import subprocess +import sys +import tarfile +import tempfile +import time +from collections import OrderedDict, defaultdict +from functools import lru_cache +from itertools import filterfalse +from locale import getpreferredencoding from os.path import ( + abspath, dirname, + expanduser, + expandvars, getmtime, getsize, isdir, - join, isfile, - abspath, islink, - expanduser, - expandvars, + join, ) -import re -import stat -import subprocess -import sys -import shutil -import tarfile -import tempfile -from threading import Thread -import time from pathlib import Path +from threading import Thread + +import libarchive try: from json.decoder import JSONDecodeError except ImportError: JSONDecodeError = ValueError -import yaml - -import filelock import conda_package_handling.api +import filelock +import yaml try: from conda.base.constants import ( - CONDA_PACKAGE_EXTENSIONS, CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, + CONDA_PACKAGE_EXTENSIONS, ) except Exception: from conda.base.constants import ( @@ -60,36 +60,35 @@ CONDA_PACKAGE_EXTENSION_V2 = ".conda" CONDA_PACKAGE_EXTENSIONS = (CONDA_PACKAGE_EXTENSION_V2, CONDA_PACKAGE_EXTENSION_V1) -from conda.api import PackageCacheData # noqa +import urllib.parse as urlparse +import urllib.request as urllib +from glob import glob as glob_glob -from .conda_interface import ( - hashsum_file, - md5_file, - unix_path_to_win, - win_path_to_unix, -) # noqa -from .conda_interface import root_dir, pkgs_dirs # noqa -from .conda_interface import StringIO # noqa -from .conda_interface import VersionOrder, MatchSpec # noqa -from .conda_interface import cc_conda_build # noqa -from .conda_interface import Dist # noqa -from .conda_interface import context # noqa -from .conda_interface import ( - download, - TemporaryDirectory, - get_conda_channel, - CondaHTTPError, -) # noqa +from conda.api import PackageCacheData # noqa # NOQA because it is not used in this file. from conda_build.conda_interface import rm_rf as _rm_rf # noqa from conda_build.exceptions import BuildLockError # noqa from conda_build.os_utils import external # noqa -import urllib.parse as urlparse -import urllib.request as urllib - -from glob import glob as glob_glob +from .conda_interface import Dist # noqa +from .conda_interface import StringIO # noqa +from .conda_interface import cc_conda_build # noqa +from .conda_interface import context # noqa +from .conda_interface import ( # noqa + CondaHTTPError, + MatchSpec, + TemporaryDirectory, + VersionOrder, + download, + get_conda_channel, + hashsum_file, + md5_file, + pkgs_dirs, + root_dir, + unix_path_to_win, + win_path_to_unix, +) # stdlib glob is less feature-rich but considerably faster than glob2 @@ -1343,7 +1342,8 @@ def find_recipe(path): Returns full path to meta file to be built. - If we have a base level meta file and other supplemental (nested) ones, use the base level.""" + If we have a base level meta file and other supplemental (nested) ones, use the base level. + """ # if initial path is absolute then any path we find (via rec_glob) # will also be absolute if not os.path.isabs(path): diff --git a/conda_build/variants.py b/conda_build/variants.py index 6e1295459c..4e6f47eba8 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -3,115 +3,130 @@ """This file handles the parsing of feature specifications from files, ending up with a configuration matrix""" +import os.path +import re +import sys from collections import OrderedDict from copy import copy from functools import lru_cache from itertools import product -import os.path -import re -import sys import yaml -from conda_build.conda_interface import subdir -from conda_build.conda_interface import cc_conda_build -from conda_build.version import _parse as parse_version +from conda_build.conda_interface import cc_conda_build, subdir from conda_build.utils import ensure_list, get_logger, islist, on_win, trim_empty_keys +from conda_build.version import _parse as parse_version DEFAULT_VARIANTS = { "python": f"{sys.version_info.major}.{sys.version_info.minor}", "numpy": "1.21", # this one actually needs to be pretty specific. The reason is that cpan skeleton uses the # version to say what's in their standard library. - 'perl': '5.26.2', - 'lua': '5', - 'r_base': '3.4' if on_win else '3.5', - 'cpu_optimization_target': 'nocona', - 'pin_run_as_build': OrderedDict(python=OrderedDict(min_pin='x.x', max_pin='x.x')), - 'ignore_version': [], - 'ignore_build_only_deps': ['python', 'numpy'], - 'extend_keys': ['pin_run_as_build', 'ignore_version', 'ignore_build_only_deps', 'extend_keys'], - 'cran_mirror': "https://cran.r-project.org", + "perl": "5.26.2", + "lua": "5", + "r_base": "3.4" if on_win else "3.5", + "cpu_optimization_target": "nocona", + "pin_run_as_build": OrderedDict(python=OrderedDict(min_pin="x.x", max_pin="x.x")), + "ignore_version": [], + "ignore_build_only_deps": ["python", "numpy"], + "extend_keys": [ + "pin_run_as_build", + "ignore_version", + "ignore_build_only_deps", + "extend_keys", + ], + "cran_mirror": "https://cran.r-project.org", } # set this outside the initialization because of the dash in the key -DEFAULT_VARIANTS['pin_run_as_build']['r-base'] = OrderedDict(min_pin='x.x', max_pin='x.x') +DEFAULT_VARIANTS["pin_run_as_build"]["r-base"] = OrderedDict( + min_pin="x.x", max_pin="x.x" +) # map python version to default compiler on windows, to match upstream python # This mapping only sets the "native" compiler, and can be overridden by specifying a compiler # in the conda-build variant configuration DEFAULT_COMPILERS = { - 'win': { - 'c': { - '2.7': 'vs2008', - '3.3': 'vs2010', - '3.4': 'vs2010', - '3.5': 'vs2017', + "win": { + "c": { + "2.7": "vs2008", + "3.3": "vs2010", + "3.4": "vs2010", + "3.5": "vs2017", }, - 'cxx': { - '2.7': 'vs2008', - '3.3': 'vs2010', - '3.4': 'vs2010', - '3.5': 'vs2017', + "cxx": { + "2.7": "vs2008", + "3.3": "vs2010", + "3.4": "vs2010", + "3.5": "vs2017", }, - 'vc': { - '2.7': '9', - '3.3': '10', - '3.4': '10', - '3.5': '14', + "vc": { + "2.7": "9", + "3.3": "10", + "3.4": "10", + "3.5": "14", }, - 'fortran': 'gfortran', + "fortran": "gfortran", }, - 'linux': { - 'c': 'gcc', - 'cxx': 'gxx', - 'fortran': 'gfortran', + "linux": { + "c": "gcc", + "cxx": "gxx", + "fortran": "gfortran", }, - 'osx': { - 'c': 'clang', - 'cxx': 'clangxx', - 'fortran': 'gfortran', + "osx": { + "c": "clang", + "cxx": "clangxx", + "fortran": "gfortran", }, } -arch_name = subdir.rsplit('-', 1)[-1] +arch_name = subdir.rsplit("-", 1)[-1] -SUFFIX_MAP = {'PY': 'python', - 'NPY': 'numpy', - 'LUA': 'lua', - 'PERL': 'perl', - 'R': 'r_base'} +SUFFIX_MAP = { + "PY": "python", + "NPY": "numpy", + "LUA": "lua", + "PERL": "perl", + "R": "r_base", +} @lru_cache(maxsize=None) def _get_default_compilers(platform, py_ver): compilers = DEFAULT_COMPILERS[platform].copy() - if platform == 'win': - if parse_version(py_ver) >= parse_version('3.5'): - py_ver = '3.5' - elif parse_version(py_ver) <= parse_version('3.2'): - py_ver = '2.7' - compilers['c'] = compilers['c'][py_ver] - compilers['cxx'] = compilers['cxx'][py_ver] - compilers = {lang + '_compiler': pkg_name - for lang, pkg_name in compilers.items() if lang != 'vc'} + if platform == "win": + if parse_version(py_ver) >= parse_version("3.5"): + py_ver = "3.5" + elif parse_version(py_ver) <= parse_version("3.2"): + py_ver = "2.7" + compilers["c"] = compilers["c"][py_ver] + compilers["cxx"] = compilers["cxx"][py_ver] + compilers = { + lang + "_compiler": pkg_name + for lang, pkg_name in compilers.items() + if lang != "vc" + } # this one comes after, because it's not a _compiler key - if platform == 'win': - compilers['vc'] = DEFAULT_COMPILERS[platform]['vc'][py_ver] + if platform == "win": + compilers["vc"] = DEFAULT_COMPILERS[platform]["vc"][py_ver] return compilers def get_default_variant(config): base = DEFAULT_VARIANTS.copy() - base['target_platform'] = config.subdir - python = base['python'] if (not hasattr(config, 'variant') or - not config.variant.get('python')) else config.variant['python'] + base["target_platform"] = config.subdir + python = ( + base["python"] + if (not hasattr(config, "variant") or not config.variant.get("python")) + else config.variant["python"] + ) base.update(_get_default_compilers(config.platform, python)) return base def parse_config_file(path, config): - from conda_build.metadata import select_lines, ns_cfg + from conda_build.metadata import ns_cfg, select_lines + with open(path) as f: contents = f.read() contents = select_lines(contents, ns_cfg(config), variants_in_place=False) @@ -125,9 +140,7 @@ def validate_spec(src, spec): # check for invalid characters errors.extend( - f" {k} key contains an invalid character '-'" - for k in spec - if "-" in k + f" {k} key contains an invalid character '-'" for k in spec if "-" in k ) # check for properly formatted zip_key @@ -162,13 +175,18 @@ def validate_spec(src, spec): for zg in zip_keys # include error if all zip fields in a zip_group are the same size, # ignore missing fields - if len({len(ensure_list(spec[k])) if k in spec else None for k in zg} - {None}) > 1 + if len( + {len(ensure_list(spec[k])) if k in spec else None for k in zg} - {None} + ) + > 1 ) # filter out None values that were potentially added above errors = list(filter(None, errors)) if errors: - raise ValueError("Variant configuration errors in {}:\n{}".format(src, "\n".join(errors))) + raise ValueError( + "Variant configuration errors in {}:\n{}".format(src, "\n".join(errors)) + ) def find_config_files(metadata_or_path, config): @@ -199,14 +217,14 @@ def find_config_files(metadata_or_path, config): if not files and not config.ignore_system_variants: # user config - if cc_conda_build.get('config_file'): - cfg = resolve(cc_conda_build['config_file']) + if cc_conda_build.get("config_file"): + cfg = resolve(cc_conda_build["config_file"]) else: - cfg = resolve(os.path.join('~', "conda_build_config.yaml")) + cfg = resolve(os.path.join("~", "conda_build_config.yaml")) if os.path.isfile(cfg): files.append(cfg) - cfg = resolve('conda_build_config.yaml') + cfg = resolve("conda_build_config.yaml") if os.path.isfile(cfg): files.append(cfg) @@ -220,8 +238,9 @@ def find_config_files(metadata_or_path, config): return files -def _combine_spec_dictionaries(specs, extend_keys=None, filter_keys=None, zip_keys=None, - log_output=True): +def _combine_spec_dictionaries( + specs, extend_keys=None, filter_keys=None, zip_keys=None, log_output=True +): # each spec is a dictionary. Each subsequent spec replaces the previous one. # Only the last one with the key stays. values = {} @@ -237,8 +256,8 @@ def _combine_spec_dictionaries(specs, extend_keys=None, filter_keys=None, zip_ke if not keys or k in keys: if k in extend_keys: # update dictionaries, extend lists - if hasattr(v, 'keys'): - if k in values and hasattr(values[k], 'keys'): + if hasattr(v, "keys"): + if k in values and hasattr(values[k], "keys"): values[k].update(v) else: values[k] = v.copy() @@ -247,17 +266,19 @@ def _combine_spec_dictionaries(specs, extend_keys=None, filter_keys=None, zip_ke values[k].extend(ensure_list(v)) # uniquify values[k] = list(set(values[k])) - elif k == 'zip_keys': + elif k == "zip_keys": v = [subval for subval in v if subval] if not isinstance(v[0], list) and not isinstance(v[0], tuple): v = [v] # should always be a list of lists, but users may specify as just a list values[k] = values.get(k, []) values[k].extend(v) - values[k] = list(list(set_group) for set_group in {tuple(group) - for group in values[k]}) + values[k] = list( + list(set_group) + for set_group in {tuple(group) for group in values[k]} + ) else: - if hasattr(v, 'keys'): + if hasattr(v, "keys"): values[k] = v.copy() else: # default "group" is just this one key. We latch onto other groups if @@ -273,31 +294,57 @@ def _combine_spec_dictionaries(specs, extend_keys=None, filter_keys=None, zip_ke # Otherwise, we filter later. if all(group_item in spec for group_item in keys_in_group): for group_item in keys_in_group: - if len(ensure_list(spec[group_item])) != len(ensure_list(v)): - raise ValueError("All entries associated by a zip_key " - "field must be the same length. In {}, {} and {} are " - "different ({} and {})".format(spec_source, k, group_item, - len(ensure_list(v)), - len(ensure_list(spec[group_item])))) + if len(ensure_list(spec[group_item])) != len( + ensure_list(v) + ): + raise ValueError( + "All entries associated by a zip_key " + "field must be the same length. In {}, {} and {} are " + "different ({} and {})".format( + spec_source, + k, + group_item, + len(ensure_list(v)), + len(ensure_list(spec[group_item])), + ) + ) values[group_item] = ensure_list(spec[group_item]) elif k in values: for group_item in keys_in_group: - if group_item in spec and \ - len(ensure_list(spec[group_item])) != len(ensure_list(v)): + if group_item in spec and len( + ensure_list(spec[group_item]) + ) != len(ensure_list(v)): break - if group_item in values and \ - len(ensure_list(values[group_item])) != len(ensure_list(v)): + if group_item in values and len( + ensure_list(values[group_item]) + ) != len(ensure_list(v)): break else: values[k] = v.copy() - missing_subvalues = [subvalue for subvalue in ensure_list(v) if subvalue not in values[k]] - missing_group_items = [group_item for group_item in keys_in_group if group_item not in spec] + missing_subvalues = [ + subvalue + for subvalue in ensure_list(v) + if subvalue not in values[k] + ] + missing_group_items = [ + group_item + for group_item in keys_in_group + if group_item not in spec + ] if len(missing_subvalues): - raise ValueError("variant config in {} is ambiguous because it\n" + raise ValueError( + "variant config in {} is ambiguous because it\n" "does not fully implement all zipped keys (To be clear: missing {})\n" "or specifies a subspace that is not fully implemented (To be clear:\n" - ".. we did not find {} from {} in {}:{}).". - format(spec_source, missing_group_items, missing_subvalues, spec, k, values[k])) + ".. we did not find {} from {} in {}:{}).".format( + spec_source, + missing_group_items, + missing_subvalues, + spec, + k, + values[k], + ) + ) return values @@ -311,18 +358,25 @@ def combine_specs(specs, log_output=True): names used in Jinja2 templated recipes. Values can be either single values (strings or integers), or collections (lists, tuples, sets). """ - extend_keys = DEFAULT_VARIANTS['extend_keys'][:] - extend_keys.extend([key for spec in specs.values() if spec - for key in ensure_list(spec.get('extend_keys'))]) + extend_keys = DEFAULT_VARIANTS["extend_keys"][:] + extend_keys.extend( + [ + key + for spec in specs.values() + if spec + for key in ensure_list(spec.get("extend_keys")) + ] + ) # first pass gets zip_keys entries from each and merges them. We treat these specially # below, keeping the size of related fields identical, or else the zipping makes no sense - zip_keys = _combine_spec_dictionaries(specs, extend_keys=extend_keys, - filter_keys=['zip_keys'], - log_output=log_output).get('zip_keys', []) - values = _combine_spec_dictionaries(specs, extend_keys=extend_keys, zip_keys=zip_keys, - log_output=log_output) + zip_keys = _combine_spec_dictionaries( + specs, extend_keys=extend_keys, filter_keys=["zip_keys"], log_output=log_output + ).get("zip_keys", []) + values = _combine_spec_dictionaries( + specs, extend_keys=extend_keys, zip_keys=zip_keys, log_output=log_output + ) return values @@ -337,9 +391,9 @@ def set_language_env_vars(variant): if variant_name in variant: value = str(variant[variant_name]) # legacy compatibility: python should be just first - if env_var_name == 'PY': - value = ''.join(value.split('.')[:2]) - env['CONDA_' + env_var_name] = value + if env_var_name == "PY": + value = "".join(value.split(".")[:2]) + env["CONDA_" + env_var_name] = value return env @@ -354,12 +408,14 @@ def _get_zip_keys(spec): :rtype: set :raise ValueError: 'zip_keys' cannot be standardized """ - zip_keys = spec.get('zip_keys') + zip_keys = spec.get("zip_keys") if not zip_keys: return set() elif islist(zip_keys, uniform=lambda e: isinstance(e, str)): return {frozenset(zip_keys)} - elif islist(zip_keys, uniform=lambda e: islist(e, uniform=lambda e: isinstance(e, str))): + elif islist( + zip_keys, uniform=lambda e: islist(e, uniform=lambda e: isinstance(e, str)) + ): return {frozenset(zg) for zg in zip_keys} raise ValueError("'zip_keys' expect list of string or list of lists of string") @@ -376,10 +432,10 @@ def _get_extend_keys(spec, include_defaults=True): :return: Standardized 'extend_keys' value :rtype: set """ - extend_keys = {'zip_keys', 'extend_keys'} + extend_keys = {"zip_keys", "extend_keys"} if include_defaults: - extend_keys.update(DEFAULT_VARIANTS['extend_keys']) - return extend_keys.union(ensure_list(spec.get('extend_keys'))) + extend_keys.update(DEFAULT_VARIANTS["extend_keys"]) + return extend_keys.union(ensure_list(spec.get("extend_keys"))) def _get_passthru_keys(spec, zip_keys=None, extend_keys=None): @@ -400,7 +456,7 @@ def _get_passthru_keys(spec, zip_keys=None, extend_keys=None): zip_keys = _get_zip_keys(spec) if extend_keys is None: extend_keys = _get_extend_keys(spec) - passthru_keys = {'replacements', 'extend_keys', 'zip_keys'} + passthru_keys = {"replacements", "extend_keys", "zip_keys"} return passthru_keys.union(extend_keys).difference(*zip_keys).intersection(spec) @@ -431,7 +487,7 @@ def filter_by_key_value(variants, key, values, source_name): """variants is the exploded out list of dicts, with one value per key in each dict. key and values come from subsequent variants before they are exploded out.""" reduced_variants = [] - if hasattr(values, 'keys'): + if hasattr(values, "keys"): reduced_variants = variants else: # break this out into a full loop so that we can show filtering output @@ -440,10 +496,15 @@ def filter_by_key_value(variants, key, values, source_name): reduced_variants.append(variant) else: log = get_logger(__name__) - log.debug('Filtering variant with key {key} not matching target value(s) ' - '({tgt_vals}) from {source_name}, actual {actual_val}'.format( - key=key, tgt_vals=values, source_name=source_name, - actual_val=variant.get(key))) + log.debug( + "Filtering variant with key {key} not matching target value(s) " + "({tgt_vals}) from {source_name}, actual {actual_val}".format( + key=key, + tgt_vals=values, + source_name=source_name, + actual_val=variant.get(key), + ) + ) return reduced_variants @@ -505,7 +566,9 @@ def explode_variants(spec): (k,): [ensure_list(v, include_dict=False) for v in ensure_list(spec[k])] for k in explode_keys.difference(*zip_keys) } - explode.update({zg: list(zip(*(ensure_list(spec[k]) for k in zg))) for zg in zip_keys}) + explode.update( + {zg: list(zip(*(ensure_list(spec[k]) for k in zg))) for zg in zip_keys} + ) trim_empty_keys(explode) # Cartesian Product of dict of lists @@ -514,7 +577,9 @@ def explode_variants(spec): variants = [] for values in product(*explode.values()): variant = {k: copy(v) for k, v in passthru.items()} - variant.update({k: v for zg, zv in zip(explode, values) for k, v in zip(zg, zv)}) + variant.update( + {k: v for zg, zv in zip(explode, values) for k, v in zip(zg, zv)} + ) variants.append(variant) return variants @@ -534,22 +599,26 @@ def list_of_dicts_to_dict_of_lists(list_of_dicts): squished = OrderedDict() all_zip_keys = set() groups = None - zip_key_groups = (list_of_dicts[0]['zip_keys'] if 'zip_keys' in list_of_dicts[0] and - list_of_dicts[0]['zip_keys'] else []) + zip_key_groups = ( + list_of_dicts[0]["zip_keys"] + if "zip_keys" in list_of_dicts[0] and list_of_dicts[0]["zip_keys"] + else [] + ) if zip_key_groups: - if (isinstance(list_of_dicts[0]['zip_keys'][0], list) or - isinstance(list_of_dicts[0]['zip_keys'][0], tuple)): - groups = list_of_dicts[0]['zip_keys'] + if isinstance(list_of_dicts[0]["zip_keys"][0], list) or isinstance( + list_of_dicts[0]["zip_keys"][0], tuple + ): + groups = list_of_dicts[0]["zip_keys"] else: - groups = [list_of_dicts[0]['zip_keys']] + groups = [list_of_dicts[0]["zip_keys"]] for group in groups: for item in group: all_zip_keys.add(item) for variant in list_of_dicts: for k, v in variant.items(): - if k == 'zip_keys': + if k == "zip_keys": continue - if hasattr(v, 'keys'): + if hasattr(v, "keys"): existing_value = squished.get(k, OrderedDict()) existing_value.update(v) squished[k] = existing_value @@ -565,7 +634,7 @@ def list_of_dicts_to_dict_of_lists(list_of_dicts): values = list(zip(*set(zip(*(squished[key] for key in group))))) for idx, key in enumerate(group): squished[key] = values[idx] - squished['zip_keys'] = zip_key_groups + squished["zip_keys"] = zip_key_groups return squished @@ -573,10 +642,11 @@ def get_package_combined_spec(recipedir_or_metadata, config=None, variants=None) # outputs a tuple of (combined_spec_dict_of_lists, used_spec_file_dict) # # The output of this function is order preserving, unlike get_package_variants - if hasattr(recipedir_or_metadata, 'config'): + if hasattr(recipedir_or_metadata, "config"): config = recipedir_or_metadata.config if not config: from conda_build.config import Config + config = Config() files = find_config_files(recipedir_or_metadata, config) @@ -586,10 +656,10 @@ def get_package_combined_spec(recipedir_or_metadata, config=None, variants=None) specs[f] = parse_config_file(f, config) # this is the override of the variants from files and args with values from CLI or env vars - if hasattr(config, 'variant') and config.variant: - specs['config.variant'] = config.variant + if hasattr(config, "variant") and config.variant: + specs["config.variant"] = config.variant if variants: - specs['argument_variants'] = variants + specs["argument_variants"] = variants for f, spec in specs.items(): validate_spec(f, spec) @@ -605,7 +675,7 @@ def filter_combined_spec_to_used_keys(combined_spec, specs): # delete the default specs, so that they don't unnecessarily limit the matrix specs = specs.copy() - del specs['internal_defaults'] + del specs["internal_defaults"] # TODO: act here? combined_spec = explode_variants(combined_spec) @@ -615,24 +685,34 @@ def filter_combined_spec_to_used_keys(combined_spec, specs): # when filtering ends up killing off all variants, we just ignore that. Generally, # this arises when a later variant config overrides, rather than selects a # subspace of earlier configs - combined_spec = (filter_by_key_value(combined_spec, k, vs, source_name=source) or - combined_spec) + combined_spec = ( + filter_by_key_value(combined_spec, k, vs, source_name=source) + or combined_spec + ) return combined_spec def get_package_variants(recipedir_or_metadata, config=None, variants=None): - combined_spec, specs = get_package_combined_spec(recipedir_or_metadata, config=config, variants=variants) + combined_spec, specs = get_package_combined_spec( + recipedir_or_metadata, config=config, variants=variants + ) return filter_combined_spec_to_used_keys(combined_spec, specs=specs) def get_vars(variants, loop_only=False): """For purposes of naming/identifying, provide a way of identifying which variables contribute to the matrix dimensionality""" - special_keys = {'pin_run_as_build', 'zip_keys', 'ignore_version'} - special_keys.update(set(ensure_list(variants[0].get('extend_keys')))) - loop_vars = [k for k in variants[0] if k not in special_keys and - (not loop_only or - any(variant[k] != variants[0][k] for variant in variants[1:]))] + special_keys = {"pin_run_as_build", "zip_keys", "ignore_version"} + special_keys.update(set(ensure_list(variants[0].get("extend_keys")))) + loop_vars = [ + k + for k in variants[0] + if k not in special_keys + and ( + not loop_only + or any(variant[k] != variants[0][k] for variant in variants[1:]) + ) + ] return loop_vars @@ -642,23 +722,29 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False): recipe_lines = recipe_text.splitlines() for v in variant: all_res = [] - compiler_match = re.match(r'(.*?)_compiler(_version)?$', v) + compiler_match = re.match(r"(.*?)_compiler(_version)?$", v) if compiler_match and not selectors_only: compiler_lang = compiler_match.group(1) - compiler_regex = ( - r"\{\s*compiler\([\'\"]%s[\"\'][^\{]*?\}" % re.escape(compiler_lang) + compiler_regex = r"\{\s*compiler\([\'\"]%s[\"\'][^\{]*?\}" % re.escape( + compiler_lang ) all_res.append(compiler_regex) - variant_lines = [line for line in recipe_lines if v in line or compiler_lang in line] + variant_lines = [ + line for line in recipe_lines if v in line or compiler_lang in line + ] else: - variant_lines = [line for line in recipe_lines if v in line.replace('-', '_')] + variant_lines = [ + line for line in recipe_lines if v in line.replace("-", "_") + ] if not variant_lines: continue v_regex = re.escape(v) - v_req_regex = '[-_]'.join(map(re.escape, v.split('_'))) + v_req_regex = "[-_]".join(map(re.escape, v.split("_"))) variant_regex = r"\{\s*(?:pin_[a-z]+\(\s*?['\"])?%s[^'\"]*?\}\}" % v_regex selector_regex = r"^[^#\[]*?\#?\s\[[^\]]*?(?!\]]" % v_regex - conditional_regex = r"(?:^|[^\{])\{%\s*(?:el)?if\s*.*" + v_regex + r"\s*(?:[^%]*?)?%\}" + conditional_regex = ( + r"(?:^|[^\{])\{%\s*(?:el)?if\s*.*" + v_regex + r"\s*(?:[^%]*?)?%\}" + ) # plain req name, no version spec. Look for end of line after name, or comment or selector requirement_regex = r"^\s+\-\s+%s\s*(?:\s[\[#]|$)" % v_req_regex if selectors_only: @@ -669,9 +755,9 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False): all_res = r"|".join(all_res) if any(re.search(all_res, line) for line in variant_lines): used_variables.add(v) - if v in ('c_compiler', 'cxx_compiler'): - if 'CONDA_BUILD_SYSROOT' in variant: - used_variables.add('CONDA_BUILD_SYSROOT') + if v in ("c_compiler", "cxx_compiler"): + if "CONDA_BUILD_SYSROOT" in variant: + used_variables.add("CONDA_BUILD_SYSROOT") return used_variables diff --git a/conda_build/version.py b/conda_build/version.py index 880d0d2c8a..15d88478dc 100644 --- a/conda_build/version.py +++ b/conda_build/version.py @@ -31,7 +31,8 @@ """ import re from typing import Iterator, List, Tuple, Union -from packaging.version import _BaseVersion, Version, InvalidVersion + +from packaging.version import InvalidVersion, Version, _BaseVersion LegacyCmpKey = Tuple[int, Tuple[str, ...]] @@ -49,7 +50,6 @@ def _parse(version: str) -> Union["_LegacyVersion", "Version"]: class _LegacyVersion(_BaseVersion): - def __init__(self, version: str) -> None: self._version = str(version) self._key = _legacy_cmpkey(self._version) @@ -134,7 +134,6 @@ def _parse_version_parts(s: str) -> Iterator[str]: def _legacy_cmpkey(version: str) -> LegacyCmpKey: - # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch # greater than or equal to 0. This will effectively put the LegacyVersion, # which uses the defacto standard originally implemented by setuptools, diff --git a/conda_build/windows.py b/conda_build/windows.py index c365cc60db..ffaad7f5ca 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -2,32 +2,37 @@ # SPDX-License-Identifier: BSD-3-Clause import os import pprint -from os.path import isdir, join, dirname, isfile +from os.path import dirname, isdir, isfile, join # importing setuptools patches distutils so that it knows how to find VC for python 2.7 import setuptools # noqa + # Leverage the hard work done by setuptools/distutils to find vcvarsall using # either the registry or the VS**COMNTOOLS environment variable try: + from distutils.msvc9compiler import WINSDK_BASE, Reg from distutils.msvc9compiler import find_vcvarsall as distutils_find_vcvarsall - from distutils.msvc9compiler import Reg, WINSDK_BASE except: # Allow some imports to work for cross or CONDA_SUBDIR usage. pass from conda_build import environ -from conda_build.utils import (check_call_env, path_prepended, - copy_into, get_logger, write_bat_activation_text) -from conda_build.variants import set_language_env_vars, get_default_variant - +from conda_build.utils import ( + check_call_env, + copy_into, + get_logger, + path_prepended, + write_bat_activation_text, +) +from conda_build.variants import get_default_variant, set_language_env_vars VS_VERSION_STRING = { - '8.0': 'Visual Studio 8 2005', - '9.0': 'Visual Studio 9 2008', - '10.0': 'Visual Studio 10 2010', - '11.0': 'Visual Studio 11 2012', - '12.0': 'Visual Studio 12 2013', - '14.0': 'Visual Studio 14 2015' + "8.0": "Visual Studio 8 2005", + "9.0": "Visual Studio 9 2008", + "10.0": "Visual Studio 10 2010", + "11.0": "Visual Studio 11 2012", + "12.0": "Visual Studio 12 2013", + "14.0": "Visual Studio 14 2015", } @@ -40,23 +45,27 @@ def fix_staged_scripts(scripts_dir, config): return for fn in os.listdir(scripts_dir): # process all the extensionless files - if not isfile(join(scripts_dir, fn)) or '.' in fn: + if not isfile(join(scripts_dir, fn)) or "." in fn: continue # read as binary file to ensure we don't run into encoding errors, see #1632 - with open(join(scripts_dir, fn), 'rb') as f: + with open(join(scripts_dir, fn), "rb") as f: line = f.readline() # If it's a #!python script - if not (line.startswith(b'#!') and b'python' in line.lower()): + if not (line.startswith(b"#!") and b"python" in line.lower()): continue - print('Adjusting unix-style #! script %s, ' - 'and adding a .bat file for it' % fn) + print( + "Adjusting unix-style #! script %s, " + "and adding a .bat file for it" % fn + ) # copy it with a .py extension (skipping that first #! line) - with open(join(scripts_dir, fn + '-script.py'), 'wb') as fo: + with open(join(scripts_dir, fn + "-script.py"), "wb") as fo: fo.write(f.read()) # now create the .exe file - copy_into(join(dirname(__file__), 'cli-%s.exe' % config.host_arch), - join(scripts_dir, fn + '.exe')) + copy_into( + join(dirname(__file__), "cli-%s.exe" % config.host_arch), + join(scripts_dir, fn + ".exe"), + ) # remove the original script os.remove(join(scripts_dir, fn)) @@ -69,21 +78,24 @@ def build_vcvarsall_vs_path(version): Expected versions are of the form {9.0, 10.0, 12.0, 14.0} """ # Set up a load of paths that can be imported from the tests - if 'ProgramFiles(x86)' in os.environ: - PROGRAM_FILES_PATH = os.environ['ProgramFiles(x86)'] + if "ProgramFiles(x86)" in os.environ: + PROGRAM_FILES_PATH = os.environ["ProgramFiles(x86)"] else: - PROGRAM_FILES_PATH = os.environ['ProgramFiles'] + PROGRAM_FILES_PATH = os.environ["ProgramFiles"] - flatversion = str(version).replace('.', '') + flatversion = str(version).replace(".", "") vstools = f"VS{flatversion}COMNTOOLS" if vstools in os.environ: - return os.path.join(os.environ[vstools], '..\\..\\VC\\vcvarsall.bat') + return os.path.join(os.environ[vstools], "..\\..\\VC\\vcvarsall.bat") else: # prefer looking at env var; fall back to program files defaults - return os.path.join(PROGRAM_FILES_PATH, - f'Microsoft Visual Studio {version}', 'VC', - 'vcvarsall.bat') + return os.path.join( + PROGRAM_FILES_PATH, + f"Microsoft Visual Studio {version}", + "VC", + "vcvarsall.bat", + ) def msvc_env_cmd(bits, config, override=None): @@ -91,16 +103,20 @@ def msvc_env_cmd(bits, config, override=None): # there's clear user demand, it's not clear that we should invest the # effort into updating a known deprecated function for a new platform. log = get_logger(__name__) - log.warn("Using legacy MSVC compiler setup. This will be removed in conda-build 4.0. " - "If this recipe does not use a compiler, this message is safe to ignore. " - "Otherwise, use {{compiler('')}} jinja2 in requirements/build.") + log.warn( + "Using legacy MSVC compiler setup. This will be removed in conda-build 4.0. " + "If this recipe does not use a compiler, this message is safe to ignore. " + "Otherwise, use {{compiler('')}} jinja2 in requirements/build." + ) if override: - log.warn("msvc_compiler key in meta.yaml is deprecated. Use the new" - "variant-powered compiler configuration instead. Note that msvc_compiler" - "is incompatible with the new {{{{compiler('c')}}}} jinja scheme.") + log.warn( + "msvc_compiler key in meta.yaml is deprecated. Use the new" + "variant-powered compiler configuration instead. Note that msvc_compiler" + "is incompatible with the new {{{{compiler('c')}}}} jinja scheme." + ) # this has been an int at times. Make sure it's a string for consistency. bits = str(bits) - arch_selector = 'x86' if bits == '32' else 'amd64' + arch_selector = "x86" if bits == "32" else "amd64" msvc_env_lines = [] @@ -113,25 +129,28 @@ def msvc_env_cmd(bits, config, override=None): # For > 3.5 it literally just skips the validation logic. # See distutils _msvccompiler.py and msvc9compiler.py / msvccompiler.py # for more information. - msvc_env_lines.append('set DISTUTILS_USE_SDK=1') + msvc_env_lines.append("set DISTUTILS_USE_SDK=1") # This is also required to hit the 'don't validate' logic on < 3.5. # For > 3.5 this is ignored. - msvc_env_lines.append('set MSSdk=1') + msvc_env_lines.append("set MSSdk=1") if not version: - py_ver = config.variant.get('python', get_default_variant(config)['python']) + py_ver = config.variant.get("python", get_default_variant(config)["python"]) if int(py_ver[0]) >= 3: - if int(py_ver.split('.')[1]) < 5: - version = '10.0' - version = '14.0' + if int(py_ver.split(".")[1]) < 5: + version = "10.0" + version = "14.0" else: - version = '9.0' + version = "9.0" if float(version) >= 14.0: # For Python 3.5+, ensure that we link with the dynamic runtime. See # http://stevedower.id.au/blog/building-for-python-3-5-part-two/ for more info - msvc_env_lines.append('set PY_VCRUNTIME_REDIST=%LIBRARY_BIN%\\vcruntime{}.dll'.format( - version.replace('.', ''))) + msvc_env_lines.append( + "set PY_VCRUNTIME_REDIST=%LIBRARY_BIN%\\vcruntime{}.dll".format( + version.replace(".", "") + ) + ) vcvarsall_vs_path = build_vcvarsall_vs_path(version) @@ -139,7 +158,7 @@ def build_vcvarsall_cmd(cmd, arch=arch_selector): # Default argument `arch_selector` is defined above return f'call "{cmd}" {arch}' - vs_major = version.split('.')[0] + vs_major = version.split(".")[0] msvc_env_lines.append(f'set "VS_VERSION={version}"') msvc_env_lines.append(f'set "VS_MAJOR={vs_major}"') msvc_env_lines.append(f'set "VS_YEAR={VS_VERSION_STRING[version][-4:]}"') @@ -147,19 +166,23 @@ def build_vcvarsall_cmd(cmd, arch=arch_selector): # No Win64 for VS 2019. msvc_env_lines.append(f'set "CMAKE_GENERATOR={VS_VERSION_STRING[version]}"') else: - msvc_env_lines.append('set "CMAKE_GENERATOR={}"'.format(VS_VERSION_STRING[version] + - {'64': ' Win64', '32': ''}[bits])) + msvc_env_lines.append( + 'set "CMAKE_GENERATOR={}"'.format( + VS_VERSION_STRING[version] + {"64": " Win64", "32": ""}[bits] + ) + ) # tell msys2 to ignore path conversions for issue-causing windows-style flags in build # See https://github.com/conda-forge/icu-feedstock/pull/5 msvc_env_lines.append('set "MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out"') msvc_env_lines.append('set "MSYS2_ENV_CONV_EXCL=CL"') - if version == '10.0': + if version == "10.0": try: - WIN_SDK_71_PATH = Reg.get_value(os.path.join(WINSDK_BASE, 'v7.1'), - 'installationfolder') - WIN_SDK_71_BAT_PATH = os.path.join(WIN_SDK_71_PATH, 'Bin', 'SetEnv.cmd') + WIN_SDK_71_PATH = Reg.get_value( + os.path.join(WINSDK_BASE, "v7.1"), "installationfolder" + ) + WIN_SDK_71_BAT_PATH = os.path.join(WIN_SDK_71_PATH, "Bin", "SetEnv.cmd") - win_sdk_arch = '/Release /x86' if bits == '32' else '/Release /x64' + win_sdk_arch = "/Release /x86" if bits == "32" else "/Release /x64" win_sdk_cmd = build_vcvarsall_cmd(WIN_SDK_71_BAT_PATH, arch=win_sdk_arch) # There are two methods of building Python 3.3 and 3.4 extensions (both @@ -173,27 +196,31 @@ def build_vcvarsall_cmd(cmd, arch=arch_selector): # up the environment does **not EXIT 1** and therefore we must fall # back to attempting to set up VS2010. # DelayedExpansion is required for the SetEnv.cmd - msvc_env_lines.append('Setlocal EnableDelayedExpansion') + msvc_env_lines.append("Setlocal EnableDelayedExpansion") msvc_env_lines.append(win_sdk_cmd) # If the WindowsSDKDir environment variable has not been successfully # set then try activating VS2010 - msvc_env_lines.append('if not "%WindowsSDKDir%" == "{}" ( {} )'.format( - WIN_SDK_71_PATH, build_vcvarsall_cmd(vcvarsall_vs_path))) + msvc_env_lines.append( + 'if not "%WindowsSDKDir%" == "{}" ( {} )'.format( + WIN_SDK_71_PATH, build_vcvarsall_cmd(vcvarsall_vs_path) + ) + ) # sdk is not installed. Fall back to only trying VS 2010 except KeyError: msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path)) - elif version == '9.0': + elif version == "9.0": # Get the Visual Studio 2008 path (not the Visual C++ for Python path) # and get the 'vcvars64.bat' from inside the bin (in the directory above # that returned by distutils_find_vcvarsall) try: - VCVARS64_VS9_BAT_PATH = os.path.join(os.path.dirname(distutils_find_vcvarsall(9)), - 'bin', 'vcvars64.bat') + VCVARS64_VS9_BAT_PATH = os.path.join( + os.path.dirname(distutils_find_vcvarsall(9)), "bin", "vcvars64.bat" + ) # there's an exception if VS or the VC compiler for python are not actually installed. except (KeyError, TypeError): VCVARS64_VS9_BAT_PATH = None - error1 = 'IF %ERRORLEVEL% NEQ 0 {}' + error1 = "IF %ERRORLEVEL% NEQ 0 {}" # Prefer VS9 proper over Microsoft Visual C++ Compiler for Python 2.7 msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path)) @@ -201,49 +228,57 @@ def build_vcvarsall_cmd(cmd, arch=arch_selector): # the amd64 build files, so we call the vcvars64.bat manually, # rather than using the vcvarsall.bat which would try and call the # missing bat file. - if arch_selector == 'amd64' and VCVARS64_VS9_BAT_PATH: - msvc_env_lines.append(error1.format( - build_vcvarsall_cmd(VCVARS64_VS9_BAT_PATH))) + if arch_selector == "amd64" and VCVARS64_VS9_BAT_PATH: + msvc_env_lines.append( + error1.format(build_vcvarsall_cmd(VCVARS64_VS9_BAT_PATH)) + ) # Otherwise, fall back to icrosoft Visual C++ Compiler for Python 2.7+ # by using the logic provided by setuptools - msvc_env_lines.append(error1.format( - build_vcvarsall_cmd(distutils_find_vcvarsall(9)))) + msvc_env_lines.append( + error1.format(build_vcvarsall_cmd(distutils_find_vcvarsall(9))) + ) else: # Visual Studio 14 or otherwise msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path)) - return '\n'.join(msvc_env_lines) + '\n' + return "\n".join(msvc_env_lines) + "\n" def write_build_scripts(m, env, bld_bat): - env_script = join(m.config.work_dir, 'build_env_setup.bat') + env_script = join(m.config.work_dir, "build_env_setup.bat") if m.noarch == "python": env["PYTHONDONTWRITEBYTECODE"] = True import codecs - with codecs.getwriter('utf-8')(open(env_script, 'wb')) as fo: + + with codecs.getwriter("utf-8")(open(env_script, "wb")) as fo: # more debuggable with echo on - fo.write('@echo on\n') + fo.write("@echo on\n") for key, value in env.items(): - if value != '' and value is not None: + if value != "" and value is not None: fo.write(f'set "{key}={value}"\n') if not m.uses_new_style_compiler_activation: - fo.write(msvc_env_cmd(bits=m.config.host_arch, config=m.config, - override=m.get_value('build/msvc_compiler', None))) + fo.write( + msvc_env_cmd( + bits=m.config.host_arch, + config=m.config, + override=m.get_value("build/msvc_compiler", None), + ) + ) # Reset echo on, because MSVC scripts might have turned it off - fo.write('@echo on\n') + fo.write("@echo on\n") fo.write('set "INCLUDE={};%INCLUDE%"\n'.format(env["LIBRARY_INC"])) fo.write('set "LIB={};%LIB%"\n'.format(env["LIBRARY_LIB"])) - if m.config.activate and m.name() != 'conda': + if m.config.activate and m.name() != "conda": write_bat_activation_text(fo, m) # bld_bat may have been generated elsewhere with contents of build/script - work_script = join(m.config.work_dir, 'conda_build.bat') + work_script = join(m.config.work_dir, "conda_build.bat") if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() - with codecs.getwriter('utf-8')(open(work_script, 'wb')) as fo: + with codecs.getwriter("utf-8")(open(work_script, "wb")) as fo: fo.write('IF "%CONDA_BUILD%" == "" (\n') fo.write(f" call {env_script}\n") - fo.write(')\n') + fo.write(")\n") fo.write("REM ===== end generated header =====\n") fo.write(data) return work_script, env_script @@ -264,7 +299,7 @@ def build(m, bld_bat, stats, provision_only=False): # Note that pip env "NO" variables are inverted logic. # PIP_NO_BUILD_ISOLATION=False means don't use build isolation. # - env["PIP_NO_BUILD_ISOLATION"] = 'False' + env["PIP_NO_BUILD_ISOLATION"] = "False" # some other env vars to have pip ignore dependencies. # we supply them ourselves instead. # See note above about inverted logic on "NO" variables @@ -275,7 +310,7 @@ def build(m, bld_bat, stats, provision_only=False): # disabled as this results in .egg-info rather than # .dist-info directories being created, see gh-3094 # set PIP_CACHE_DIR to a path in the work dir that does not exist. - env['PIP_CACHE_DIR'] = m.config.pip_cache_dir + env["PIP_CACHE_DIR"] = m.config.pip_cache_dir # tell pip to not get anything from PyPI, please. We have everything we need # locally, and if we don't, it's a problem. @@ -284,23 +319,24 @@ def build(m, bld_bat, stats, provision_only=False): # set variables like CONDA_PY in the test environment env.update(set_language_env_vars(m.config.variant)) - for name in 'BIN', 'INC', 'LIB': - path = env['LIBRARY_' + name] + for name in "BIN", "INC", "LIB": + path = env["LIBRARY_" + name] if not isdir(path): os.makedirs(path) work_script, env_script = write_build_scripts(m, env, bld_bat) if not provision_only and os.path.isfile(work_script): - cmd = ['cmd.exe', '/d', '/c', os.path.basename(work_script)] + cmd = ["cmd.exe", "/d", "/c", os.path.basename(work_script)] # rewrite long paths in stdout back to their env variables if m.config.debug or m.config.no_rewrite_stdout_env: rewrite_env = None else: rewrite_env = { - k: env[k] - for k in ['PREFIX', 'BUILD_PREFIX', 'SRC_DIR'] if k in env + k: env[k] for k in ["PREFIX", "BUILD_PREFIX", "SRC_DIR"] if k in env } print("Rewriting env in output: %s" % pprint.pformat(rewrite_env)) - check_call_env(cmd, cwd=m.config.work_dir, stats=stats, rewrite_stdout_env=rewrite_env) - fix_staged_scripts(join(m.config.host_prefix, 'Scripts'), config=m.config) + check_call_env( + cmd, cwd=m.config.work_dir, stats=stats, rewrite_stdout_env=rewrite_env + ) + fix_staged_scripts(join(m.config.host_prefix, "Scripts"), config=m.config) diff --git a/docs/scrape_help.py b/docs/scrape_help.py index ac7ffe0e45..2f99fbb403 100755 --- a/docs/scrape_help.py +++ b/docs/scrape_help.py @@ -1,21 +1,20 @@ #!/usr/bin/env python # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from subprocess import check_output, PIPE, Popen, STDOUT -from os.path import join, dirname, abspath, isdir -from os import makedirs, pathsep -from collections import OrderedDict -from shlex import quote -from concurrent.futures import ThreadPoolExecutor - -import sys import json import re +import sys +from collections import OrderedDict +from concurrent.futures import ThreadPoolExecutor +from os import makedirs, pathsep +from os.path import abspath, dirname, isdir, join +from shlex import quote +from subprocess import PIPE, STDOUT, Popen, check_output -manpath = join(dirname(__file__), 'build', 'man') +manpath = join(dirname(__file__), "build", "man") if not isdir(manpath): makedirs(manpath) -rstpath = join(dirname(__file__), 'source', 'commands') +rstpath = join(dirname(__file__), "source", "commands") if not isdir(rstpath): makedirs(rstpath) @@ -31,7 +30,7 @@ def run_command(*args, **kwargs): - include_stderr = kwargs.pop('include_stderr', False) + include_stderr = kwargs.pop("include_stderr", False) if include_stderr: stderr_pipe = STDOUT else: @@ -39,29 +38,34 @@ def run_command(*args, **kwargs): p = Popen(*args, stdout=PIPE, stderr=stderr_pipe, **kwargs) out, err = p.communicate() if err is None: - err = b'' - out, err = out.decode('utf-8'), err.decode('utf-8') + err = b"" + out, err = out.decode("utf-8"), err.decode("utf-8") if p.returncode != 0: - print("{!r} failed with error code {}".format(' '.join(map(quote, args[0])), p.returncode), file=sys.stderr) + print( + "{!r} failed with error code {}".format( + " ".join(map(quote, args[0])), p.returncode + ), + file=sys.stderr, + ) elif err: - print("{!r} gave stderr output: {}".format(' '.join(*args), err)) + print("{!r} gave stderr output: {}".format(" ".join(*args), err)) return out def str_check_output(*args, **kwargs): - return check_output(*args, **kwargs).decode('utf-8') + return check_output(*args, **kwargs).decode("utf-8") def conda_help(cache=[]): if cache: return cache[0] - cache.append(str_check_output(['conda', '--help'])) + cache.append(str_check_output(["conda", "--help"])) return cache[0] def conda_command_help(command): - return str_check_output(['conda'] + command.split() + ['--help']) + return str_check_output(["conda"] + command.split() + ["--help"]) def conda_commands(): @@ -71,14 +75,14 @@ def conda_commands(): start = False for line in help.splitlines(): # Commands start after "command" header - if line.strip() == 'command': + if line.strip() == "command": start = True continue if start: # The end of the commands if not line: break - if line[4] != ' ': + if line[4] != " ": commands.append(line.split()[0]) return commands @@ -90,19 +94,19 @@ def external_commands(): start = False for line in help.splitlines(): # Commands start after "command" header - if line.strip() == 'other commands:': + if line.strip() == "other commands:": start = True continue if start: # The end of the commands if not line: break - if line[4] != ' ': + if line[4] != " ": commands.append(line.split()[0]) # TODO: Parallelize this print("Getting list of external subcommands") - subcommands_re = re.compile(r'\s*\{(.*)\}\s*') + subcommands_re = re.compile(r"\s*\{(.*)\}\s*") # Check for subcommands (like conda skeleton pypi) command_help = {} @@ -124,8 +128,7 @@ def get_help(command): if start: m = subcommands_re.match(line) if m: - commands.extend([f'{command} {i}' for i in - m.group(1).split(',')]) + commands.extend([f"{command} {i}" for i in m.group(1).split(",")]) break return commands @@ -133,38 +136,45 @@ def get_help(command): def man_replacements(): # XXX: We should use conda-api for this, but it's currently annoying to set the # root prefix with. - info = json.loads(str_check_output(['conda', 'info', '--json'])) + info = json.loads(str_check_output(["conda", "info", "--json"])) # We need to use an ordered dict because the root prefix should be # replaced last, since it is typically a substring of the default prefix - r = OrderedDict([ - (info['default_prefix'], 'default prefix'), - (pathsep.join(info['envs_dirs']), 'envs dirs'), - # For whatever reason help2man won't italicize these on its own - # Note these require conda > 3.7.1 - (info['user_rc_path'], r'\fI\,user .condarc path\/\fP'), - (info['sys_rc_path'], r'\fI\,system .condarc path\/\fP'), - - (info['root_prefix'], r'root prefix'), - ]) + r = OrderedDict( + [ + (info["default_prefix"], "default prefix"), + (pathsep.join(info["envs_dirs"]), "envs dirs"), + # For whatever reason help2man won't italicize these on its own + # Note these require conda > 3.7.1 + (info["user_rc_path"], r"\fI\,user .condarc path\/\fP"), + (info["sys_rc_path"], r"\fI\,system .condarc path\/\fP"), + (info["root_prefix"], r"root prefix"), + ] + ) return r def generate_man(command): - conda_version = run_command(['conda', '--version'], include_stderr=True) + conda_version = run_command(["conda", "--version"], include_stderr=True) - manpage = '' + manpage = "" retries = 5 while not manpage and retries: - manpage = run_command([ - 'help2man', - '--name', 'conda %s' % command, - '--section', '1', - '--source', 'Anaconda, Inc.', - '--version-string', conda_version, - '--no-info', - 'conda %s' % command, - ]) + manpage = run_command( + [ + "help2man", + "--name", + "conda %s" % command, + "--section", + "1", + "--source", + "Anaconda, Inc.", + "--version-string", + conda_version, + "--no-info", + "conda %s" % command, + ] + ) retries -= 1 if not manpage: @@ -173,17 +183,19 @@ def generate_man(command): replacements = man_replacements() for text in replacements: manpage = manpage.replace(text, replacements[text]) - with open(join(manpath, 'conda-%s.1' % command.replace(' ', '-')), 'w') as f: + with open(join(manpath, "conda-%s.1" % command.replace(" ", "-")), "w") as f: f.write(manpage) print("Generated manpage for conda %s" % command) def generate_html(command): - command_file = command.replace(' ', '-') + command_file = command.replace(" ", "-") # Use abspath so that it always has a path separator - man = Popen(["man", abspath(join(manpath, "conda-%s.1" % command_file))], stdout=PIPE) + man = Popen( + ["man", abspath(join(manpath, "conda-%s.1" % command_file))], stdout=PIPE + ) htmlpage = check_output( [ "man2html", @@ -195,16 +207,17 @@ def generate_html(command): "-botm", "0", # No bottom margin ], - stdin=man.stdout) + stdin=man.stdout, + ) - with open(join(manpath, 'conda-%s.html' % command_file), 'wb') as f: + with open(join(manpath, "conda-%s.html" % command_file), "wb") as f: f.write(htmlpage) print("Generated html for conda %s" % command) def write_rst(command, sep=None): - command_file = command.replace(' ', '-') - with open(join(manpath, 'conda-%s.html' % command_file)) as f: + command_file = command.replace(" ", "-") + with open(join(manpath, "conda-%s.html" % command_file)) as f: html = f.read() rp = rstpath @@ -212,12 +225,12 @@ def write_rst(command, sep=None): rp = join(rp, sep) if not isdir(rp): makedirs(rp) - with open(join(rp, 'conda-%s.rst' % command_file), 'w') as f: + with open(join(rp, "conda-%s.rst" % command_file), "w") as f: f.write(RST_HEADER.format(command=command)) for line in html.splitlines(): - f.write(' ') + f.write(" ") f.write(line) - f.write('\n') + f.write("\n") print("Generated rst for conda %s" % command) @@ -227,21 +240,21 @@ def main(): # let's just hard-code this for now # build_commands = () build_commands = [ - 'build', - 'convert', - 'develop', - 'index', - 'inspect', - 'inspect channels', - 'inspect linkages', - 'inspect objects', - 'metapackage', - 'render', - 'skeleton', - 'skeleton cpan', - 'skeleton cran', - 'skeleton luarocks', - 'skeleton pypi', + "build", + "convert", + "develop", + "index", + "inspect", + "inspect channels", + "inspect linkages", + "inspect objects", + "metapackage", + "render", + "skeleton", + "skeleton cpan", + "skeleton cran", + "skeleton luarocks", + "skeleton pypi", ] commands = sys.argv[1:] or core_commands + build_commands @@ -258,5 +271,5 @@ def gen_command(command): write_rst(command) -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main()) diff --git a/recipe/run_test.py b/recipe/run_test.py index 42a35beb73..d9a40df806 100644 --- a/recipe/run_test.py +++ b/recipe/run_test.py @@ -2,4 +2,4 @@ # SPDX-License-Identifier: BSD-3-Clause import conda_build -print('conda_build.__version__: %s' % conda_build.__version__) +print("conda_build.__version__: %s" % conda_build.__version__) diff --git a/recipe/test_bdist_conda_setup.py b/recipe/test_bdist_conda_setup.py index 3f26e18b06..c7b3d34abf 100644 --- a/recipe/test_bdist_conda_setup.py +++ b/recipe/test_bdist_conda_setup.py @@ -1,6 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause from setuptools import setup + import conda_build.bdist_conda setup( diff --git a/setup.py b/setup.py index a634b5b7d2..19370a3001 100755 --- a/setup.py +++ b/setup.py @@ -2,6 +2,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause from pathlib import Path + from setuptools import setup import versioneer @@ -53,7 +54,7 @@ "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10" + "Programming Language :: Python :: 3.10", ], python_requires=">=3.8", description="tools for building conda packages", diff --git a/tests/bdist-recipe/bin/test-script-setup.py b/tests/bdist-recipe/bin/test-script-setup.py index f125947783..c515fb849e 100644 --- a/tests/bdist-recipe/bin/test-script-setup.py +++ b/tests/bdist-recipe/bin/test-script-setup.py @@ -2,10 +2,12 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import conda_build_test + conda_build_test print("Test script setup.py") if __name__ == "__main__": from conda_build_test import manual_entry + manual_entry.main() diff --git a/tests/bdist-recipe/conda_build_test/manual_entry.py b/tests/bdist-recipe/conda_build_test/manual_entry.py index fb15342cd3..1a63c8a8e9 100644 --- a/tests/bdist-recipe/conda_build_test/manual_entry.py +++ b/tests/bdist-recipe/conda_build_test/manual_entry.py @@ -4,9 +4,7 @@ def main(): import argparse # Just picks them up from `sys.argv`. - parser = argparse.ArgumentParser( - description="Basic parser." - ) + parser = argparse.ArgumentParser(description="Basic parser.") parser.parse_args() print("Manual entry point") diff --git a/tests/bdist-recipe/setup.py b/tests/bdist-recipe/setup.py index d9aac68996..74982e5865 100644 --- a/tests/bdist-recipe/setup.py +++ b/tests/bdist-recipe/setup.py @@ -1,11 +1,12 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause from setuptools import setup + import conda_build.bdist_conda setup( name="conda-build-test-project", - version='1.0', + version="1.0", distclass=conda_build.bdist_conda.CondaDistribution, conda_buildnum=1, conda_features=[], @@ -23,8 +24,8 @@ "Programming Language :: Python :: 3.4", ], description="test package for testing conda-build", - packages=['conda_build_test'], + packages=["conda_build_test"], scripts=[ - 'bin/test-script-setup.py', + "bin/test-script-setup.py", ], ) diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py index f35fdaa4de..3f91d42d8c 100644 --- a/tests/cli/test_main_build.py +++ b/tests/cli/test_main_build.py @@ -6,15 +6,19 @@ import pytest -from conda_build.conda_interface import cc_conda_build, context, reset_context - import conda_build from conda_build import api from conda_build.cli import main_build, main_render +from conda_build.conda_interface import ( + TemporaryDirectory, + cc_conda_build, + context, + reset_context, +) from conda_build.config import Config, zstd_compression_level_default -from conda_build.utils import on_win, get_build_folders, package_has_file -from conda_build.conda_interface import TemporaryDirectory from conda_build.exceptions import DependencyNeedsBuildingError +from conda_build.utils import get_build_folders, on_win, package_has_file + from ..utils import metadata_dir diff --git a/tests/cli/test_main_debug.py b/tests/cli/test_main_debug.py index 056b2723b6..22011bedf1 100644 --- a/tests/cli/test_main_debug.py +++ b/tests/cli/test_main_debug.py @@ -1,13 +1,14 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from pathlib import Path import sys +from pathlib import Path from unittest import mock import pytest from pytest import CaptureFixture, MonkeyPatch -from conda_build.cli import main_debug as debug, validators as valid +from conda_build.cli import main_debug as debug +from conda_build.cli import validators as valid def test_main_debug_help_message(capsys: CaptureFixture, monkeypatch: MonkeyPatch): @@ -47,6 +48,6 @@ def test_main_debug_happy_path( debug.main() captured = capsys.readouterr() - assert captured.err == '' + assert captured.err == "" assert len(mock_debug.mock_calls) == 2 diff --git a/tests/cli/test_main_inspect.py b/tests/cli/test_main_inspect.py index 9e25986609..94ede317dc 100644 --- a/tests/cli/test_main_inspect.py +++ b/tests/cli/test_main_inspect.py @@ -3,13 +3,14 @@ import os import re import sys -import yaml import pytest +import yaml from conda_build import api from conda_build.cli import main_inspect from conda_build.utils import on_win + from ..utils import metadata_dir diff --git a/tests/cli/test_main_metapackage.py b/tests/cli/test_main_metapackage.py index e56d2b7452..19312ae539 100644 --- a/tests/cli/test_main_metapackage.py +++ b/tests/cli/test_main_metapackage.py @@ -1,9 +1,9 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from glob import glob import json import os import sys +from glob import glob from conda_build.cli import main_metapackage from conda_build.utils import package_has_file diff --git a/tests/cli/test_main_render.py b/tests/cli/test_main_render.py index 33e0345c6d..6c946c68ba 100644 --- a/tests/cli/test_main_render.py +++ b/tests/cli/test_main_render.py @@ -2,13 +2,14 @@ # SPDX-License-Identifier: BSD-3-Clause import os import sys -import yaml import pytest +import yaml from conda_build import api from conda_build.cli import main_render from conda_build.conda_interface import TemporaryDirectory + from ..utils import metadata_dir diff --git a/tests/cli/test_main_skeleton.py b/tests/cli/test_main_skeleton.py index 807e42b763..64eb300878 100644 --- a/tests/cli/test_main_skeleton.py +++ b/tests/cli/test_main_skeleton.py @@ -5,7 +5,6 @@ import pytest from conda_build import api - from conda_build.cli import main_build, main_skeleton diff --git a/tests/cli/test_validators.py b/tests/cli/test_validators.py index 7de6c87d56..5048494c46 100644 --- a/tests/cli/test_validators.py +++ b/tests/cli/test_validators.py @@ -10,14 +10,24 @@ @pytest.mark.parametrize( - 'file_or_folder,expected,is_dir,create', + "file_or_folder,expected,is_dir,create", [ # Happy path cases - ('aws-c-common-0.4.57-hb1e8313_1.tar.bz2', 'aws-c-common-0.4.57-hb1e8313_1.tar.bz2', False, True), - ('aws-c-common-0.4.57-hb1e8313_1.conda', 'aws-c-common-0.4.57-hb1e8313_1.conda', False, True), - ('somedir', 'somedir', True, True), + ( + "aws-c-common-0.4.57-hb1e8313_1.tar.bz2", + "aws-c-common-0.4.57-hb1e8313_1.tar.bz2", + False, + True, + ), + ( + "aws-c-common-0.4.57-hb1e8313_1.conda", + "aws-c-common-0.4.57-hb1e8313_1.conda", + False, + True, + ), + ("somedir", "somedir", True, True), # Error case (i.e. the file or directory does not exist - ('aws-c-common-0.4.57-hb1e8313_1.conda', False, False, False), + ("aws-c-common-0.4.57-hb1e8313_1.conda", False, False, False), ], ) def test_validate_is_conda_pkg_or_recipe_dir( @@ -34,7 +44,10 @@ def test_validate_is_conda_pkg_or_recipe_dir( try: received = valid.validate_is_conda_pkg_or_recipe_dir(file_or_folder) - except (ArgumentError, SystemExit): # if we get these errors, we know it's not valid + except ( + ArgumentError, + SystemExit, + ): # if we get these errors, we know it's not valid received = False assert received == expected diff --git a/tests/conftest.py b/tests/conftest.py index 11f1603311..80ac8d4ba4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,18 +1,17 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from collections import defaultdict -from pathlib import Path import os import subprocess import sys +from collections import defaultdict +from pathlib import Path import pytest - from conda.common.compat import on_mac + import conda_build.config from conda_build.config import ( Config, - get_or_merge_config, _src_cache_root_default, conda_pkg_format_default, enable_static_default, @@ -20,6 +19,7 @@ error_overlinking_default, exit_on_verify_error_default, filename_hashing_default, + get_or_merge_config, ignore_verify_codes_default, no_rewrite_stdout_env_default, noarch_python_build_age_default, diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 3896b08511..80fd24dc6c 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -3,46 +3,63 @@ """ This module tests the build API. These are high-level integration tests. """ -from collections import OrderedDict -from glob import glob +import json import logging import os -from pathlib import Path import re import subprocess import sys -import json +import tarfile import uuid +from collections import OrderedDict +from glob import glob +from pathlib import Path # for version import conda - -from conda_build.conda_interface import url_path, LinkError, CondaError, cc_conda_build - -import conda_build - -from binstar_client.commands import remove, show -from binstar_client.errors import NotFound import pytest import yaml -import tarfile +from binstar_client.commands import remove, show +from binstar_client.errors import NotFound +from conda.exceptions import ClobberError, CondaMultiError -from conda_build import api, exceptions, __version__ -from conda_build.render import finalize_metadata -from conda_build.utils import (copy_into, on_win, check_call_env, convert_path_for_cygwin_or_msys2, - package_has_file, check_output_env, get_conda_operation_locks, rm_rf, - prepend_bin_path, walk, env_var, FileNotFoundError) +import conda_build +from conda_build import __version__, api, exceptions +from conda_build.conda_interface import ( + CondaError, + LinkError, + cc_conda_build, + reset_context, + url_path, +) +from conda_build.exceptions import ( + CondaBuildException, + DependencyNeedsBuildingError, + OverDependingError, + OverLinkingError, +) from conda_build.os_utils.external import find_executable -from conda_build.exceptions import (DependencyNeedsBuildingError, CondaBuildException, - OverLinkingError, OverDependingError) -from conda_build.conda_interface import reset_context -from conda.exceptions import ClobberError, CondaMultiError +from conda_build.render import finalize_metadata +from conda_build.utils import ( + FileNotFoundError, + check_call_env, + check_output_env, + convert_path_for_cygwin_or_msys2, + copy_into, + env_var, + get_conda_operation_locks, + on_win, + package_has_file, + prepend_bin_path, + rm_rf, + walk, +) from .utils import ( + add_mangling, + fail_dir, get_valid_recipes, metadata_dir, - fail_dir, - add_mangling, metadata_path, ) @@ -56,15 +73,18 @@ def represent_ordereddict(dumper, data): value.append((node_key, node_value)) - return yaml.nodes.MappingNode('tag:yaml.org,2002:map', value) + return yaml.nodes.MappingNode("tag:yaml.org,2002:map", value) yaml.add_representer(OrderedDict, represent_ordereddict) class AnacondaClientArgs: - def __init__(self, specs, token=None, site=None, log_level=logging.INFO, force=False): + def __init__( + self, specs, token=None, site=None, log_level=logging.INFO, force=False + ): from binstar_client.utils import parse_specs + self.specs = [parse_specs(specs)] self.spec = self.specs[0] self.token = token @@ -110,10 +130,12 @@ def test_recipe_builds( @pytest.mark.serial -@pytest.mark.skipif("CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, - reason="This test does not run on Github Actions yet. We will need to adjust " - "where to look for the pkgs. The github action for setup-miniconda sets " - "pkg_dirs to conda_pkgs_dir.") +@pytest.mark.skipif( + "CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, + reason="This test does not run on Github Actions yet. We will need to adjust " + "where to look for the pkgs. The github action for setup-miniconda sets " + "pkg_dirs to conda_pkgs_dir.", +) # Regardless of the reason for skipping, we should definitely find a better way for tests to look for the packages # Rather than assuming they will be at $ROOT/pkgs since that can change and we don't care where they are in terms of the # tests. @@ -126,10 +148,12 @@ def test_ignore_prefix_files(testing_config, monkeypatch): @pytest.mark.serial -@pytest.mark.skipif("CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, - reason="This test does not run on Github Actions yet. We will need to adjust " - "where to look for the pkgs. The github action for setup-miniconda sets " - "pkg_dirs to conda_pkgs_dir.") +@pytest.mark.skipif( + "CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, + reason="This test does not run on Github Actions yet. We will need to adjust " + "where to look for the pkgs. The github action for setup-miniconda sets " + "pkg_dirs to conda_pkgs_dir.", +) # Regardless of the reason for skipping, we should definitely find a better way for tests to look for the packages # Rather than assuming they will be at $ROOT/pkgs since that can change and we don't care where they are in terms of the # tests. @@ -148,14 +172,18 @@ def test_token_upload(testing_metadata): folder_uuid = uuid.uuid4().hex # generated with conda_test_account user, command: # anaconda auth --create --name CONDA_BUILD_UPLOAD_TEST --scopes 'api repos conda' - args = AnacondaClientArgs(specs="conda_build_test/test_token_upload_" + folder_uuid, - token="co-143399b8-276e-48db-b43f-4a3de839a024", - force=True) + args = AnacondaClientArgs( + specs="conda_build_test/test_token_upload_" + folder_uuid, + token="co-143399b8-276e-48db-b43f-4a3de839a024", + force=True, + ) with pytest.raises(NotFound): show.main(args) - testing_metadata.meta['package']['name'] = '_'.join([testing_metadata.name(), folder_uuid]) + testing_metadata.meta["package"]["name"] = "_".join( + [testing_metadata.name(), folder_uuid] + ) testing_metadata.config.token = args.token # the folder with the test recipe to upload @@ -201,8 +229,11 @@ def test_git_describe_info_on_branch(testing_config): m = api.render(recipe_path, config=testing_config)[0][0] output = api.get_output_file_path(m)[0] # missing hash because we set custom build string in meta.yaml - test_path = os.path.join(testing_config.croot, testing_config.host_subdir, - "git_describe_number_branch-1.20.2.0-1_g82c6ba6.tar.bz2") + test_path = os.path.join( + testing_config.croot, + testing_config.host_subdir, + "git_describe_number_branch-1.20.2.0-1_g82c6ba6.tar.bz2", + ) assert test_path == output @@ -215,7 +246,7 @@ def test_no_include_recipe_config_arg(testing_metadata): # make sure that it is not there when the command line flag is passed testing_metadata.config.include_recipe = False - testing_metadata.meta['build']['number'] = 2 + testing_metadata.meta["build"]["number"] = 2 # We cannot test packages without recipes as we cannot render them output_file = api.build(testing_metadata, notest=True)[0] assert not package_has_file(output_file, "info/recipe/meta.yaml") @@ -228,14 +259,18 @@ def test_no_include_recipe_meta_yaml(testing_metadata, testing_config): outputs = api.build(testing_metadata, notest=True) assert package_has_file(outputs[0], "info/recipe/meta.yaml") - output_file = api.build(os.path.join(metadata_dir, '_no_include_recipe'), - config=testing_config, notest=True)[0] + output_file = api.build( + os.path.join(metadata_dir, "_no_include_recipe"), + config=testing_config, + notest=True, + )[0] assert not package_has_file(output_file, "info/recipe/meta.yaml") with pytest.raises(SystemExit): # we are testing that even with the recipe excluded, we still get the tests in place - output_file = api.build(os.path.join(metadata_dir, '_no_include_recipe'), - config=testing_config)[0] + output_file = api.build( + os.path.join(metadata_dir, "_no_include_recipe"), config=testing_config + )[0] @pytest.mark.serial @@ -243,7 +278,7 @@ def test_no_include_recipe_meta_yaml(testing_metadata, testing_config): def test_early_abort(testing_config, capfd): """There have been some problems with conda-build dropping out early. Make sure we aren't causing them""" - api.build(os.path.join(metadata_dir, '_test_early_abort'), config=testing_config) + api.build(os.path.join(metadata_dir, "_test_early_abort"), config=testing_config) output, error = capfd.readouterr() assert "Hello World" in output @@ -253,40 +288,59 @@ def test_output_build_path_git_source(testing_config): m = api.render(recipe_path, config=testing_config)[0][0] output = api.get_output_file_paths(m)[0] _hash = m.hash_dependencies() - test_path = os.path.join(testing_config.croot, testing_config.host_subdir, - "conda-build-test-source-git-jinja2-1.20.2-py{}{}{}_0_g262d444.tar.bz2".format( - sys.version_info.major, sys.version_info.minor, _hash)) + test_path = os.path.join( + testing_config.croot, + testing_config.host_subdir, + "conda-build-test-source-git-jinja2-1.20.2-py{}{}{}_0_g262d444.tar.bz2".format( + sys.version_info.major, sys.version_info.minor, _hash + ), + ) assert output == test_path @pytest.mark.sanity @pytest.mark.serial def test_build_with_no_activate_does_not_activate(): - api.build(os.path.join(metadata_dir, '_set_env_var_no_activate_build'), activate=False, - anaconda_upload=False) + api.build( + os.path.join(metadata_dir, "_set_env_var_no_activate_build"), + activate=False, + anaconda_upload=False, + ) @pytest.mark.sanity @pytest.mark.serial -@pytest.mark.xfail(on_win and len(os.getenv('PATH')) > 1024, reason="Long PATHs make activation fail with obscure messages") +@pytest.mark.xfail( + on_win and len(os.getenv("PATH")) > 1024, + reason="Long PATHs make activation fail with obscure messages", +) def test_build_with_activate_does_activate(): - api.build(os.path.join(metadata_dir, '_set_env_var_activate_build'), activate=True, - anaconda_upload=False) + api.build( + os.path.join(metadata_dir, "_set_env_var_activate_build"), + activate=True, + anaconda_upload=False, + ) @pytest.mark.sanity -@pytest.mark.skipif(sys.platform == "win32", - reason="no binary prefix manipulation done on windows.") +@pytest.mark.skipif( + sys.platform == "win32", reason="no binary prefix manipulation done on windows." +) def test_binary_has_prefix_files(testing_config): - api.build(os.path.join(metadata_dir, '_binary_has_prefix_files'), config=testing_config) + api.build( + os.path.join(metadata_dir, "_binary_has_prefix_files"), config=testing_config + ) @pytest.mark.xfail @pytest.mark.sanity -@pytest.mark.skipif(sys.platform == "win32", - reason="no binary prefix manipulation done on windows.") +@pytest.mark.skipif( + sys.platform == "win32", reason="no binary prefix manipulation done on windows." +) def test_binary_has_prefix_files_non_utf8(testing_config): - api.build(os.path.join(metadata_dir, '_binary_has_utf_non_8'), config=testing_config) + api.build( + os.path.join(metadata_dir, "_binary_has_utf_non_8"), config=testing_config + ) def test_relative_path_git_versioning( @@ -334,15 +388,21 @@ def dummy_executable(folder, exename): prefix = "@echo off\n" else: prefix = "#!/bin/bash\nexec 1>&2\n" - with open(dummyfile, 'w') as f: - f.write(prefix + """ + with open(dummyfile, "w") as f: + f.write( + prefix + + """ echo ******* You have reached the dummy {}. It is likely there is a bug in echo ******* conda that makes it not add the _build/bin directory onto the echo ******* PATH before running the source checkout tool exit -1 - """.format(exename)) + """.format( + exename + ) + ) if sys.platform != "win32": import stat + st = os.stat(dummyfile) os.chmod(dummyfile, st.st_mode | stat.S_IEXEC) return exename @@ -352,14 +412,17 @@ def test_checkout_tool_as_dependency(testing_workdir, testing_config, monkeypatc # "hide" svn by putting a known bad one on PATH exename = dummy_executable(testing_workdir, "svn") monkeypatch.setenv("PATH", testing_workdir, prepend=os.pathsep) - FNULL = open(os.devnull, 'w') + FNULL = open(os.devnull, "w") with pytest.raises(subprocess.CalledProcessError): - check_call_env([exename, '--version'], stderr=FNULL) + check_call_env([exename, "--version"], stderr=FNULL) FNULL.close() env = os.environ.copy() env["PATH"] = os.pathsep.join([testing_workdir, env["PATH"]]) testing_config.activate = True - api.build(os.path.join(metadata_dir, '_checkout_tool_as_dependency'), config=testing_config) + api.build( + os.path.join(metadata_dir, "_checkout_tool_as_dependency"), + config=testing_config, + ) platforms = ["64" if sys.maxsize > 2**32 else "32"] @@ -376,36 +439,31 @@ def test_checkout_tool_as_dependency(testing_workdir, testing_config, monkeypatc @pytest.mark.parametrize("msvc_ver", msvc_vers) def test_build_msvc_compiler(msvc_ver, monkeypatch): # verify that the correct compiler is available - cl_versions = {"9.0": 15, - "10.0": 16, - "11.0": 17, - "12.0": 18, - "14.0": 19} + cl_versions = {"9.0": 15, "10.0": 16, "11.0": 17, "12.0": 18, "14.0": 19} - monkeypatch.setenv('CONDATEST_MSVC_VER', msvc_ver) - monkeypatch.setenv('CL_EXE_VERSION', str(cl_versions[msvc_ver])) + monkeypatch.setenv("CONDATEST_MSVC_VER", msvc_ver) + monkeypatch.setenv("CL_EXE_VERSION", str(cl_versions[msvc_ver])) try: # Always build Python 2.7 - but set MSVC version manually via Jinja template - api.build(os.path.join(metadata_dir, '_build_msvc_compiler'), python="2.7") + api.build(os.path.join(metadata_dir, "_build_msvc_compiler"), python="2.7") except: raise finally: - del os.environ['CONDATEST_MSVC_VER'] - del os.environ['CL_EXE_VERSION'] + del os.environ["CONDATEST_MSVC_VER"] + del os.environ["CL_EXE_VERSION"] @pytest.mark.sanity @pytest.mark.parametrize("platform", platforms) @pytest.mark.parametrize("target_compiler", compilers) def test_cmake_generator(platform, target_compiler, testing_config): - testing_config.variant['python'] = target_compiler + testing_config.variant["python"] = target_compiler testing_config.activate = True - api.build(os.path.join(metadata_dir, '_cmake_generator'), config=testing_config) + api.build(os.path.join(metadata_dir, "_cmake_generator"), config=testing_config) -@pytest.mark.skipif(sys.platform == "win32", - reason="No windows symlinks") +@pytest.mark.skipif(sys.platform == "win32", reason="No windows symlinks") def test_symlink_fail(testing_config): with pytest.raises((SystemExit, FileNotFoundError)): api.build(os.path.join(fail_dir, "symlinks"), config=testing_config) @@ -413,14 +471,18 @@ def test_symlink_fail(testing_config): @pytest.mark.sanity def test_pip_in_meta_yaml_fail(testing_config): - with pytest.raises(ValueError, match='environment.yml'): - api.build(os.path.join(fail_dir, "pip_reqs_fail_informatively"), config=testing_config) + with pytest.raises(ValueError, match="environment.yml"): + api.build( + os.path.join(fail_dir, "pip_reqs_fail_informatively"), config=testing_config + ) @pytest.mark.sanity def test_recursive_fail(testing_config): - with pytest.raises((RuntimeError, exceptions.DependencyNeedsBuildingError), - match="recursive-build2"): + with pytest.raises( + (RuntimeError, exceptions.DependencyNeedsBuildingError), + match="recursive-build2", + ): api.build(os.path.join(fail_dir, "recursive-build"), config=testing_config) # indentation critical here. If you indent this, and the exception is not raised, then # the exc variable here isn't really completely created and shows really strange errors: @@ -430,7 +492,9 @@ def test_recursive_fail(testing_config): @pytest.mark.sanity def test_jinja_typo(testing_config): with pytest.raises(SystemExit, match="GIT_DSECRIBE_TAG"): - api.build(os.path.join(fail_dir, "source_git_jinja2_oops"), config=testing_config) + api.build( + os.path.join(fail_dir, "source_git_jinja2_oops"), config=testing_config + ) @pytest.mark.sanity @@ -450,7 +514,7 @@ def test_skip_existing_url(testing_metadata, testing_workdir, capfd): outputs = api.build(testing_metadata) # Copy our package into some new folder - output_dir = os.path.join(testing_workdir, 'someoutput') + output_dir = os.path.join(testing_workdir, "someoutput") platform = os.path.join(output_dir, testing_metadata.config.host_subdir) os.makedirs(platform) copy_into(outputs[0], os.path.join(platform, os.path.basename(outputs[0]))) @@ -470,7 +534,9 @@ def test_skip_existing_url(testing_metadata, testing_workdir, capfd): def test_failed_tests_exit_build(testing_config): """https://github.com/conda/conda-build/issues/1112""" with pytest.raises(SystemExit, match="TESTS FAILED"): - api.build(os.path.join(metadata_dir, "_test_failed_test_exits"), config=testing_config) + api.build( + os.path.join(metadata_dir, "_test_failed_test_exits"), config=testing_config + ) @pytest.mark.sanity @@ -484,8 +550,10 @@ def test_requirements_txt_for_run_reqs(testing_config): This test attempts to reproduce those conditions: a channel other than defaults with this requirements.txt """ - testing_config.channel_urls = ('conda_build_test', ) - api.build(os.path.join(metadata_dir, "_requirements_txt_run_reqs"), config=testing_config) + testing_config.channel_urls = ("conda_build_test",) + api.build( + os.path.join(metadata_dir, "_requirements_txt_run_reqs"), config=testing_config + ) @pytest.mark.skipif( @@ -493,9 +561,11 @@ def test_requirements_txt_for_run_reqs(testing_config): reason="Python 3.10+, py_compile terminates once it finds an invalid file", ) def test_compileall_compiles_all_good_files(testing_config): - output = api.build(os.path.join(metadata_dir, "_compile-test"), config=testing_config)[0] - good_files = ['f1.py', 'f3.py'] - bad_file = 'f2_bad.py' + output = api.build( + os.path.join(metadata_dir, "_compile-test"), config=testing_config + )[0] + good_files = ["f1.py", "f3.py"] + bad_file = "f2_bad.py" for f in good_files: assert package_has_file(output, f) # look for the compiled file also @@ -505,11 +575,13 @@ def test_compileall_compiles_all_good_files(testing_config): @pytest.mark.sanity -@pytest.mark.skipif(not on_win, reason="only Windows is insane enough to have backslashes in paths") +@pytest.mark.skipif( + not on_win, reason="only Windows is insane enough to have backslashes in paths" +) def test_backslash_in_always_include_files_path(): - api.build(os.path.join(metadata_dir, '_backslash_in_include_files')) + api.build(os.path.join(metadata_dir, "_backslash_in_include_files")) with pytest.raises(RuntimeError): - api.build(os.path.join(fail_dir, 'backslash_in_include_files')) + api.build(os.path.join(fail_dir, "backslash_in_include_files")) @pytest.mark.sanity @@ -519,22 +591,24 @@ def test_build_metadata_object(testing_metadata): @pytest.mark.serial def test_numpy_setup_py_data(testing_config): - recipe_path = os.path.join(metadata_dir, '_numpy_setup_py_data') + recipe_path = os.path.join(metadata_dir, "_numpy_setup_py_data") # this shows an error that is OK to ignore: # (Is this Error still relevant) # PackagesNotFoundError: The following packages are missing from the target environment: # - cython - subprocess.call('conda remove -y cython'.split()) + subprocess.call("conda remove -y cython".split()) with pytest.raises(CondaBuildException) as exc_info: api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] assert exc_info.match("Cython") subprocess.check_call(["conda", "install", "-y", "cython"]) m = api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] _hash = m.hash_dependencies() - assert os.path.basename(api.get_output_file_path(m)[0]) == \ - "load_setup_py_test-0.1.0-np116py{}{}{}_0.tar.bz2".format( - sys.version_info.major, sys.version_info.minor, _hash) + assert os.path.basename( + api.get_output_file_path(m)[0] + ) == "load_setup_py_test-0.1.0-np116py{}{}{}_0.tar.bz2".format( + sys.version_info.major, sys.version_info.minor, _hash + ) @pytest.mark.slow @@ -552,76 +626,84 @@ def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monke functions are using tools from the build env. """ - toplevel = os.path.join(testing_workdir, 'toplevel') + toplevel = os.path.join(testing_workdir, "toplevel") os.mkdir(toplevel) - relative_sub = os.path.join(testing_workdir, 'relative_sub') + relative_sub = os.path.join(testing_workdir, "relative_sub") os.mkdir(relative_sub) - absolute_sub = os.path.join(testing_workdir, 'absolute_sub') + absolute_sub = os.path.join(testing_workdir, "absolute_sub") os.mkdir(absolute_sub) sys_git_env = os.environ.copy() - sys_git_env['GIT_AUTHOR_NAME'] = 'conda-build' - sys_git_env['GIT_AUTHOR_EMAIL'] = 'conda@conda-build.org' - sys_git_env['GIT_COMMITTER_NAME'] = 'conda-build' - sys_git_env['GIT_COMMITTER_EMAIL'] = 'conda@conda-build.org' + sys_git_env["GIT_AUTHOR_NAME"] = "conda-build" + sys_git_env["GIT_AUTHOR_EMAIL"] = "conda@conda-build.org" + sys_git_env["GIT_COMMITTER_NAME"] = "conda-build" + sys_git_env["GIT_COMMITTER_EMAIL"] = "conda@conda-build.org" # Find the git executable before putting our dummy one on PATH. - git = find_executable('git') + git = find_executable("git") # Put the broken git on os.environ["PATH"] - exename = dummy_executable(testing_workdir, 'git') + exename = dummy_executable(testing_workdir, "git") monkeypatch.setenv("PATH", testing_workdir, prepend=os.pathsep) # .. and ensure it gets run (and fails). - FNULL = open(os.devnull, 'w') + FNULL = open(os.devnull, "w") # Strangely .. # stderr=FNULL suppresses the output from echo on OS X whereas # stdout=FNULL suppresses the output from echo on Windows with pytest.raises(subprocess.CalledProcessError): - check_call_env([exename, '--version'], stdout=FNULL, stderr=FNULL) + check_call_env([exename, "--version"], stdout=FNULL, stderr=FNULL) FNULL.close() for tag in range(2): os.chdir(absolute_sub) if tag == 0: - check_call_env([git, 'init'], env=sys_git_env) - with open('absolute', 'w') as f: + check_call_env([git, "init"], env=sys_git_env) + with open("absolute", "w") as f: f.write(str(tag)) - check_call_env([git, 'add', 'absolute'], env=sys_git_env) - check_call_env([git, 'commit', '-m', f'absolute{tag}'], - env=sys_git_env) + check_call_env([git, "add", "absolute"], env=sys_git_env) + check_call_env([git, "commit", "-m", f"absolute{tag}"], env=sys_git_env) os.chdir(relative_sub) if tag == 0: - check_call_env([git, 'init'], env=sys_git_env) - with open('relative', 'w') as f: + check_call_env([git, "init"], env=sys_git_env) + with open("relative", "w") as f: f.write(str(tag)) - check_call_env([git, 'add', 'relative'], env=sys_git_env) - check_call_env([git, 'commit', '-m', f'relative{tag}'], - env=sys_git_env) + check_call_env([git, "add", "relative"], env=sys_git_env) + check_call_env([git, "commit", "-m", f"relative{tag}"], env=sys_git_env) os.chdir(toplevel) if tag == 0: - check_call_env([git, 'init'], env=sys_git_env) - with open('toplevel', 'w') as f: + check_call_env([git, "init"], env=sys_git_env) + with open("toplevel", "w") as f: f.write(str(tag)) - check_call_env([git, 'add', 'toplevel'], env=sys_git_env) - check_call_env([git, 'commit', '-m', f'toplevel{tag}'], - env=sys_git_env) + check_call_env([git, "add", "toplevel"], env=sys_git_env) + check_call_env([git, "commit", "-m", f"toplevel{tag}"], env=sys_git_env) if tag == 0: - check_call_env([git, 'submodule', 'add', - convert_path_for_cygwin_or_msys2(git, absolute_sub), 'absolute'], - env=sys_git_env) - check_call_env([git, 'submodule', 'add', '../relative_sub', 'relative'], - env=sys_git_env) + check_call_env( + [ + git, + "submodule", + "add", + convert_path_for_cygwin_or_msys2(git, absolute_sub), + "absolute", + ], + env=sys_git_env, + ) + check_call_env( + [git, "submodule", "add", "../relative_sub", "relative"], + env=sys_git_env, + ) else: # Once we use a more recent Git for Windows than 2.6.4 on Windows or m2-git we # can change this to `git submodule update --recursive`. - gits = git.replace('\\', '/') - check_call_env([git, 'submodule', 'foreach', gits, 'pull'], env=sys_git_env) - check_call_env([git, 'commit', '-am', f'added submodules@{tag}'], - env=sys_git_env) - check_call_env([git, 'tag', '-a', str(tag), '-m', f'tag {tag}'], - env=sys_git_env) + gits = git.replace("\\", "/") + check_call_env([git, "submodule", "foreach", gits, "pull"], env=sys_git_env) + check_call_env( + [git, "commit", "-am", f"added submodules@{tag}"], env=sys_git_env + ) + check_call_env( + [git, "tag", "-a", str(tag), "-m", f"tag {tag}"], env=sys_git_env + ) # It is possible to use `Git for Windows` here too, though you *must* not use a different # (type of) git than the one used above to add the absolute submodule, because .gitmodules @@ -629,43 +711,79 @@ def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monke # # Also, git is set to False here because it needs to be rebuilt with the longer prefix. As # things stand, my _b_env folder for this test contains more than 80 characters. - requirements = ('requirements', OrderedDict([ - ('build', - ['git # [False]', - 'm2-git # [win]', - 'm2-filesystem # [win]'])])) - - recipe_dir = os.path.join(testing_workdir, 'recipe') + requirements = ( + "requirements", + OrderedDict( + [ + ( + "build", + [ + "git # [False]", + "m2-git # [win]", + "m2-filesystem # [win]", + ], + ) + ] + ), + ) + + recipe_dir = os.path.join(testing_workdir, "recipe") if not os.path.exists(recipe_dir): os.makedirs(recipe_dir) - filename = os.path.join(testing_workdir, 'recipe', 'meta.yaml') - data = OrderedDict([ - ('package', OrderedDict([ - ('name', 'relative_submodules'), - ('version', '{{ GIT_DESCRIBE_TAG }}')])), - ('source', OrderedDict([ - ('git_url', toplevel), - ('git_tag', str(tag))])), - requirements, - ('build', OrderedDict([ - ('script', - ['git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%%s > ' - '%PREFIX%\\summaries.txt # [win]', - 'git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%s > ' - '$PREFIX/summaries.txt # [not win]']) - ])), - ('test', OrderedDict([ - ('commands', - ['echo absolute{}relative{} > %PREFIX%\\expected_summaries.txt # [win]' - .format(tag, tag), - 'fc.exe /W %PREFIX%\\expected_summaries.txt %PREFIX%\\summaries.txt # [win]', - 'echo absolute{}relative{} > $PREFIX/expected_summaries.txt # [not win]' - .format(tag, tag), - 'diff -wuN ${PREFIX}/expected_summaries.txt ${PREFIX}/summaries.txt # [not win]']) - ])) - ]) - - with open(filename, 'w') as outfile: + filename = os.path.join(testing_workdir, "recipe", "meta.yaml") + data = OrderedDict( + [ + ( + "package", + OrderedDict( + [ + ("name", "relative_submodules"), + ("version", "{{ GIT_DESCRIBE_TAG }}"), + ] + ), + ), + ("source", OrderedDict([("git_url", toplevel), ("git_tag", str(tag))])), + requirements, + ( + "build", + OrderedDict( + [ + ( + "script", + [ + "git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%%s > " + "%PREFIX%\\summaries.txt # [win]", + "git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%s > " + "$PREFIX/summaries.txt # [not win]", + ], + ) + ] + ), + ), + ( + "test", + OrderedDict( + [ + ( + "commands", + [ + "echo absolute{}relative{} > %PREFIX%\\expected_summaries.txt # [win]".format( + tag, tag + ), + "fc.exe /W %PREFIX%\\expected_summaries.txt %PREFIX%\\summaries.txt # [win]", + "echo absolute{}relative{} > $PREFIX/expected_summaries.txt # [not win]".format( + tag, tag + ), + "diff -wuN ${PREFIX}/expected_summaries.txt ${PREFIX}/summaries.txt # [not win]", + ], + ) + ] + ), + ), + ] + ) + + with open(filename, "w") as outfile: outfile.write(yaml.dump(data, default_flow_style=False, width=999999999)) # Reset the path because our broken, dummy `git` would cause `render_recipe` # to fail, while no `git` will cause the build_dependencies to be installed. @@ -674,44 +792,46 @@ def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monke # build env prepended to os.environ[] metadata = api.render(testing_workdir, config=testing_config)[0][0] output = api.get_output_file_path(metadata, config=testing_config)[0] - assert (f"relative_submodules-{tag}-" in output) + assert f"relative_submodules-{tag}-" in output api.build(metadata, config=testing_config) def test_noarch(testing_workdir): - filename = os.path.join(testing_workdir, 'meta.yaml') + filename = os.path.join(testing_workdir, "meta.yaml") for noarch in (False, True): - data = OrderedDict([ - ('package', OrderedDict([ - ('name', 'test'), - ('version', '0.0.0')])), - ('build', OrderedDict([ - ('noarch', noarch)])) - ]) - with open(filename, 'w') as outfile: + data = OrderedDict( + [ + ("package", OrderedDict([("name", "test"), ("version", "0.0.0")])), + ("build", OrderedDict([("noarch", noarch)])), + ] + ) + with open(filename, "w") as outfile: outfile.write(yaml.dump(data, default_flow_style=False, width=999999999)) output = api.get_output_file_path(testing_workdir)[0] - assert (os.path.sep + "noarch" + os.path.sep in output or not noarch) - assert (os.path.sep + "noarch" + os.path.sep not in output or noarch) + assert os.path.sep + "noarch" + os.path.sep in output or not noarch + assert os.path.sep + "noarch" + os.path.sep not in output or noarch def test_disable_pip(testing_metadata): testing_metadata.config.disable_pip = True - testing_metadata.meta['requirements'] = {'host': ['python'], - 'run': ['python']} - testing_metadata.meta['build']['script'] = 'python -c "import pip; print(pip.__version__)"' + testing_metadata.meta["requirements"] = {"host": ["python"], "run": ["python"]} + testing_metadata.meta["build"][ + "script" + ] = 'python -c "import pip; print(pip.__version__)"' with pytest.raises(subprocess.CalledProcessError): api.build(testing_metadata) - testing_metadata.meta['build']['script'] = ('python -c "import setuptools; ' - 'print(setuptools.__version__)"') + testing_metadata.meta["build"]["script"] = ( + 'python -c "import setuptools; ' 'print(setuptools.__version__)"' + ) with pytest.raises(subprocess.CalledProcessError): api.build(testing_metadata) @pytest.mark.sanity -@pytest.mark.skipif(sys.platform.startswith('win'), - reason="rpath fixup not done on Windows.") +@pytest.mark.skipif( + sys.platform.startswith("win"), reason="rpath fixup not done on Windows." +) def test_rpath_unix(testing_config, variants_conda_build_sysroot): testing_config.activate = True api.build( @@ -729,23 +849,27 @@ def test_noarch_none_value(testing_config): @pytest.mark.sanity def test_noarch_foo_value(testing_config): - outputs = api.build(os.path.join(metadata_dir, "noarch_generic"), config=testing_config) - metadata = json.loads(package_has_file(outputs[0], 'info/index.json')) - assert metadata['noarch'] == "generic" + outputs = api.build( + os.path.join(metadata_dir, "noarch_generic"), config=testing_config + ) + metadata = json.loads(package_has_file(outputs[0], "info/index.json")) + assert metadata["noarch"] == "generic" def test_about_json_content(testing_metadata): outputs = api.build(testing_metadata) - about = json.loads(package_has_file(outputs[0], 'info/about.json')) - assert 'conda_version' in about and about['conda_version'] == conda.__version__ - assert 'conda_build_version' in about and about['conda_build_version'] == __version__ - assert 'channels' in about and about['channels'] - assert 'tags' in about and about['tags'] == ["a", "b"] + about = json.loads(package_has_file(outputs[0], "info/about.json")) + assert "conda_version" in about and about["conda_version"] == conda.__version__ + assert ( + "conda_build_version" in about and about["conda_build_version"] == __version__ + ) + assert "channels" in about and about["channels"] + assert "tags" in about and about["tags"] == ["a", "b"] # this one comes in as a string - test type coercion - assert 'identifiers' in about and about['identifiers'] == ["a"] - assert 'env_vars' in about and about['env_vars'] + assert "identifiers" in about and about["identifiers"] == ["a"] + assert "env_vars" in about and about["env_vars"] - assert 'root_pkgs' in about and about['root_pkgs'] + assert "root_pkgs" in about and about["root_pkgs"] @pytest.mark.parametrize( @@ -765,10 +889,18 @@ def test_about_license_file_and_prelink_message(testing_config, name, field): recipe = os.path.join(base_dir, "dir") outputs = api.build(recipe, config=testing_config) - assert package_has_file(outputs[0], f"info/{name}s/{name}-dir-from-source/first-{name}.txt") - assert package_has_file(outputs[0], f"info/{name}s/{name}-dir-from-source/second-{name}.txt") - assert package_has_file(outputs[0], f"info/{name}s/{name}-dir-from-recipe/first-{name}.txt") - assert package_has_file(outputs[0], f"info/{name}s/{name}-dir-from-recipe/second-{name}.txt") + assert package_has_file( + outputs[0], f"info/{name}s/{name}-dir-from-source/first-{name}.txt" + ) + assert package_has_file( + outputs[0], f"info/{name}s/{name}-dir-from-source/second-{name}.txt" + ) + assert package_has_file( + outputs[0], f"info/{name}s/{name}-dir-from-recipe/first-{name}.txt" + ) + assert package_has_file( + outputs[0], f"info/{name}s/{name}-dir-from-recipe/second-{name}.txt" + ) recipe = os.path.join(base_dir, "dir-no-slash-suffix") assert os.path.isdir(recipe) @@ -778,10 +910,12 @@ def test_about_license_file_and_prelink_message(testing_config, name, field): @pytest.mark.slow -@pytest.mark.skipif("CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, - reason="This test does not run on Github Actions yet. We will need to adjust " - "where to look for the pkgs. The github action for setup-miniconda sets " - "pkg_dirs to conda_pkgs_dir.") +@pytest.mark.skipif( + "CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, + reason="This test does not run on Github Actions yet. We will need to adjust " + "where to look for the pkgs. The github action for setup-miniconda sets " + "pkg_dirs to conda_pkgs_dir.", +) # Regardless of the reason for skipping, we should definitely find a better way for tests to look for the packages # Rather than assuming they will be at $ROOT/pkgs since that can change and we don't care where they are in terms of the # tests. @@ -789,51 +923,68 @@ def test_noarch_python_with_tests(testing_config): recipe = os.path.join(metadata_dir, "_noarch_python_with_tests") pkg = api.build(recipe, config=testing_config)[0] # noarch recipes with commands should generate both .bat and .sh files. - assert package_has_file(pkg, 'info/test/run_test.bat') - assert package_has_file(pkg, 'info/test/run_test.sh') + assert package_has_file(pkg, "info/test/run_test.bat") + assert package_has_file(pkg, "info/test/run_test.sh") @pytest.mark.sanity def test_noarch_python_1(testing_config): - output = api.build(os.path.join(metadata_dir, "_noarch_python"), config=testing_config)[0] - assert package_has_file(output, 'info/files') != '' - extra = json.loads(package_has_file(output, 'info/link.json')) - assert 'noarch' in extra - assert 'entry_points' in extra['noarch'] - assert 'type' in extra['noarch'] - assert 'package_metadata_version' in extra + output = api.build( + os.path.join(metadata_dir, "_noarch_python"), config=testing_config + )[0] + assert package_has_file(output, "info/files") != "" + extra = json.loads(package_has_file(output, "info/link.json")) + assert "noarch" in extra + assert "entry_points" in extra["noarch"] + assert "type" in extra["noarch"] + assert "package_metadata_version" in extra @pytest.mark.sanity def test_skip_compile_pyc(testing_config): - outputs = api.build(os.path.join(metadata_dir, "skip_compile_pyc"), config=testing_config) + outputs = api.build( + os.path.join(metadata_dir, "skip_compile_pyc"), config=testing_config + ) tf = tarfile.open(outputs[0]) pyc_count = 0 for f in tf.getmembers(): filename = os.path.basename(f.name) _, ext = os.path.splitext(filename) - basename = filename.split('.', 1)[0] - if basename == 'skip_compile_pyc': - assert not ext == '.pyc', f"a skip_compile_pyc .pyc was compiled: {filename}" - if ext == '.pyc': - assert basename == 'compile_pyc', f"an unexpected .pyc was compiled: {filename}" + basename = filename.split(".", 1)[0] + if basename == "skip_compile_pyc": + assert ( + not ext == ".pyc" + ), f"a skip_compile_pyc .pyc was compiled: {filename}" + if ext == ".pyc": + assert ( + basename == "compile_pyc" + ), f"an unexpected .pyc was compiled: {filename}" pyc_count = pyc_count + 1 - assert pyc_count == 2, f"there should be 2 .pyc files, instead there were {pyc_count}" + assert ( + pyc_count == 2 + ), f"there should be 2 .pyc files, instead there were {pyc_count}" def test_detect_binary_files_with_prefix(testing_config): - outputs = api.build(os.path.join(metadata_dir, "_detect_binary_files_with_prefix"), - config=testing_config) + outputs = api.build( + os.path.join(metadata_dir, "_detect_binary_files_with_prefix"), + config=testing_config, + ) matches = [] with tarfile.open(outputs[0]) as tf: - has_prefix = tf.extractfile('info/has_prefix') - contents = [p.strip().decode('utf-8') for p in - has_prefix.readlines()] + has_prefix = tf.extractfile("info/has_prefix") + contents = [p.strip().decode("utf-8") for p in has_prefix.readlines()] has_prefix.close() - matches = [entry for entry in contents if entry.endswith('binary-has-prefix') or - entry.endswith('"binary-has-prefix"')] + matches = [ + entry + for entry in contents + if entry.endswith("binary-has-prefix") + or entry.endswith('"binary-has-prefix"') + ] assert len(matches) == 1, "binary-has-prefix not recorded in info/has_prefix" - assert ' binary ' in matches[0], "binary-has-prefix not recorded as binary in info/has_prefix" + assert ( + " binary " in matches[0] + ), "binary-has-prefix not recorded as binary in info/has_prefix" def test_skip_detect_binary_files_with_prefix(testing_config): @@ -842,16 +993,21 @@ def test_skip_detect_binary_files_with_prefix(testing_config): matches = [] with tarfile.open(outputs[0]) as tf: try: - has_prefix = tf.extractfile('info/has_prefix') - contents = [p.strip().decode('utf-8') for p in - has_prefix.readlines()] + has_prefix = tf.extractfile("info/has_prefix") + contents = [p.strip().decode("utf-8") for p in has_prefix.readlines()] has_prefix.close() - matches = [entry for entry in contents if entry.endswith('binary-has-prefix') or - entry.endswith('"binary-has-prefix"')] + matches = [ + entry + for entry in contents + if entry.endswith("binary-has-prefix") + or entry.endswith('"binary-has-prefix"') + ] except: pass - assert len(matches) == 0, "binary-has-prefix recorded in info/has_prefix despite:" \ - "build/detect_binary_files_with_prefix: false" + assert len(matches) == 0, ( + "binary-has-prefix recorded in info/has_prefix despite:" + "build/detect_binary_files_with_prefix: false" + ) def test_fix_permissions(testing_config): @@ -859,18 +1015,21 @@ def test_fix_permissions(testing_config): outputs = api.build(recipe, config=testing_config) with tarfile.open(outputs[0]) as tf: for f in tf.getmembers(): - assert f.mode & 0o444 == 0o444, f"tar member '{f.name}' has invalid (read) mode" + assert ( + f.mode & 0o444 == 0o444 + ), f"tar member '{f.name}' has invalid (read) mode" @pytest.mark.sanity @pytest.mark.skipif(not on_win, reason="windows-only functionality") -@pytest.mark.parametrize('recipe_name', ["_script_win_creates_exe", - "_script_win_creates_exe_garbled"]) +@pytest.mark.parametrize( + "recipe_name", ["_script_win_creates_exe", "_script_win_creates_exe_garbled"] +) def test_script_win_creates_exe(testing_config, recipe_name): recipe = os.path.join(metadata_dir, recipe_name) outputs = api.build(recipe, config=testing_config) - assert package_has_file(outputs[0], 'Scripts/test-script.exe') - assert package_has_file(outputs[0], 'Scripts/test-script-script.py') + assert package_has_file(outputs[0], "Scripts/test-script.exe") + assert package_has_file(outputs[0], "Scripts/test-script-script.py") @pytest.mark.sanity @@ -881,25 +1040,36 @@ def test_output_folder_moves_file(testing_metadata, testing_workdir): @pytest.mark.sanity -@pytest.mark.skipif("CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, - reason="This test does not run on Github Actions yet. We will need to adjust " - "where to look for the pkgs. The github action for setup-miniconda sets " - "pkg_dirs to conda_pkgs_dir.") +@pytest.mark.skipif( + "CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, + reason="This test does not run on Github Actions yet. We will need to adjust " + "where to look for the pkgs. The github action for setup-miniconda sets " + "pkg_dirs to conda_pkgs_dir.", +) def test_info_files_json(testing_config): - outputs = api.build(os.path.join(metadata_dir, "_ignore_some_prefix_files"), - config=testing_config) + outputs = api.build( + os.path.join(metadata_dir, "_ignore_some_prefix_files"), config=testing_config + ) assert package_has_file(outputs[0], "info/paths.json") with tarfile.open(outputs[0]) as tf: - data = json.loads(tf.extractfile('info/paths.json').read().decode('utf-8')) - fields = ["_path", "sha256", "size_in_bytes", "path_type", "file_mode", "no_link", - "prefix_placeholder", "inode_paths"] + data = json.loads(tf.extractfile("info/paths.json").read().decode("utf-8")) + fields = [ + "_path", + "sha256", + "size_in_bytes", + "path_type", + "file_mode", + "no_link", + "prefix_placeholder", + "inode_paths", + ] for key in data.keys(): - assert key in ['paths', 'paths_version'] - for paths in data.get('paths'): + assert key in ["paths", "paths_version"] + for paths in data.get("paths"): for field in paths.keys(): assert field in fields - assert len(data.get('paths')) == 2 - for file in data.get('paths'): + assert len(data.get("paths")) == 2 + for file in data.get("paths"): for key in file.keys(): assert key in fields short_path = file.get("_path") @@ -914,49 +1084,56 @@ def test_info_files_json(testing_config): def test_build_expands_wildcards(mocker): build_tree = mocker.patch("conda_build.build.build_tree") config = api.Config() - files = ['abc', 'acb'] + files = ["abc", "acb"] for f in files: os.makedirs(f) - with open(os.path.join(f, 'meta.yaml'), 'w') as fh: - fh.write('\n') + with open(os.path.join(f, "meta.yaml"), "w") as fh: + fh.write("\n") api.build(["a*"], config=config) - output = sorted(os.path.join(os.getcwd(), path, 'meta.yaml') for path in files) - - build_tree.assert_called_once_with(output, - config=mocker.ANY, - stats=mocker.ANY, - build_only=False, - post=None, notest=False, - variants=None) + output = sorted(os.path.join(os.getcwd(), path, "meta.yaml") for path in files) + + build_tree.assert_called_once_with( + output, + config=mocker.ANY, + stats=mocker.ANY, + build_only=False, + post=None, + notest=False, + variants=None, + ) -@pytest.mark.parametrize('set_build_id', [True, False]) +@pytest.mark.parametrize("set_build_id", [True, False]) def test_remove_workdir_default(testing_config, caplog, set_build_id): - recipe = os.path.join(metadata_dir, '_keep_work_dir') + recipe = os.path.join(metadata_dir, "_keep_work_dir") # make a metadata object - otherwise the build folder is computed within the build, but does # not alter the config object that is passed in. This is by design - we always make copies # of the config object rather than edit it in place, so that variants don't clobber one # another metadata = api.render(recipe, config=testing_config)[0][0] api.build(metadata, set_build_id=set_build_id) - assert not glob(os.path.join(metadata.config.work_dir, '*')) + assert not glob(os.path.join(metadata.config.work_dir, "*")) def test_keep_workdir_and_dirty_reuse(testing_config, capfd): - recipe = os.path.join(metadata_dir, '_keep_work_dir') + recipe = os.path.join(metadata_dir, "_keep_work_dir") # make a metadata object - otherwise the build folder is computed within the build, but does # not alter the config object that is passed in. This is by design - we always make copies # of the config object rather than edit it in place, so that variants don't clobber one # another - metadata = api.render(recipe, config=testing_config, dirty=True, remove_work_dir=False)[0][0] + metadata = api.render( + recipe, config=testing_config, dirty=True, remove_work_dir=False + )[0][0] workdir = metadata.config.work_dir api.build(metadata) out, err = capfd.readouterr() - assert glob(os.path.join(metadata.config.work_dir, '*')) + assert glob(os.path.join(metadata.config.work_dir, "*")) # test that --dirty reuses the same old folder - metadata = api.render(recipe, config=testing_config, dirty=True, remove_work_dir=False)[0][0] + metadata = api.render( + recipe, config=testing_config, dirty=True, remove_work_dir=False + )[0][0] assert workdir == metadata.config.work_dir # test that without --dirty, we don't reuse the folder @@ -968,19 +1145,19 @@ def test_keep_workdir_and_dirty_reuse(testing_config, capfd): @pytest.mark.sanity def test_workdir_removal_warning(testing_config, caplog): - recipe = os.path.join(metadata_dir, '_test_uses_src_dir') + recipe = os.path.join(metadata_dir, "_test_uses_src_dir") with pytest.raises(ValueError) as exc: api.build(recipe, config=testing_config) assert "work dir is removed" in str(exc) @pytest.mark.sanity -@pytest.mark.skipif(sys.platform != 'darwin', reason="relevant to mac only") +@pytest.mark.skipif(sys.platform != "darwin", reason="relevant to mac only") def test_append_python_app_osx(testing_config, conda_build_test_recipe_envvar: str): """Recipes that use osx_is_app need to have python.app in their runtime requirements. conda-build will add it if it's missing.""" - recipe = os.path.join(metadata_dir, '_osx_is_app_missing_python_app') + recipe = os.path.join(metadata_dir, "_osx_is_app_missing_python_app") # tests will fail here if python.app is not added to the run reqs by conda-build, because # without it, pythonw will be missing. api.build(recipe, config=testing_config) @@ -988,168 +1165,218 @@ def test_append_python_app_osx(testing_config, conda_build_test_recipe_envvar: s @pytest.mark.sanity def test_run_exports(testing_metadata, testing_config, testing_workdir): - api.build(os.path.join(metadata_dir, '_run_exports'), config=testing_config, notest=True) - api.build(os.path.join(metadata_dir, '_run_exports_implicit_weak'), config=testing_config, - notest=True) + api.build( + os.path.join(metadata_dir, "_run_exports"), config=testing_config, notest=True + ) + api.build( + os.path.join(metadata_dir, "_run_exports_implicit_weak"), + config=testing_config, + notest=True, + ) # run_exports is tricky. We mostly only ever want things in "host". Here are the conditions: # 1. only build section present (legacy recipe). Here, use run_exports from build. Because build and host # will be merged when build subdir == host_subdir, the weak run_exports should be present. - testing_metadata.meta['requirements']['build'] = ['test_has_run_exports'] - api.output_yaml(testing_metadata, 'meta.yaml') + testing_metadata.meta["requirements"]["build"] = ["test_has_run_exports"] + api.output_yaml(testing_metadata, "meta.yaml") m = api.render(testing_workdir, config=testing_config)[0][0] - assert 'strong_pinned_package 1.0.*' in m.meta['requirements']['run'] - assert 'weak_pinned_package 1.0.*' in m.meta['requirements']['run'] + assert "strong_pinned_package 1.0.*" in m.meta["requirements"]["run"] + assert "weak_pinned_package 1.0.*" in m.meta["requirements"]["run"] # 2. host present. Use run_exports from host, ignore 'weak' ones from build. All are # weak by default. - testing_metadata.meta['requirements']['build'] = ['test_has_run_exports_implicit_weak', - '{{ compiler("c") }}'] - testing_metadata.meta['requirements']['host'] = ['python'] - api.output_yaml(testing_metadata, 'host_present_weak/meta.yaml') - m = api.render(os.path.join(testing_workdir, 'host_present_weak'), config=testing_config)[0][0] - assert 'weak_pinned_package 2.0.*' not in m.meta['requirements'].get('run', []) + testing_metadata.meta["requirements"]["build"] = [ + "test_has_run_exports_implicit_weak", + '{{ compiler("c") }}', + ] + testing_metadata.meta["requirements"]["host"] = ["python"] + api.output_yaml(testing_metadata, "host_present_weak/meta.yaml") + m = api.render( + os.path.join(testing_workdir, "host_present_weak"), config=testing_config + )[0][0] + assert "weak_pinned_package 2.0.*" not in m.meta["requirements"].get("run", []) # 3. host present, and deps in build have "strong" run_exports section. use host, add # in "strong" from build. - testing_metadata.meta['requirements']['build'] = ['test_has_run_exports', '{{ compiler("c") }}'] - testing_metadata.meta['requirements']['host'] = ['test_has_run_exports_implicit_weak'] - api.output_yaml(testing_metadata, 'host_present_strong/meta.yaml') - m = api.render(os.path.join(testing_workdir, 'host_present_strong'), - config=testing_config)[0][0] - assert 'strong_pinned_package 1.0 0' in m.meta['requirements']['host'] - assert 'strong_pinned_package 1.0.*' in m.meta['requirements']['run'] + testing_metadata.meta["requirements"]["build"] = [ + "test_has_run_exports", + '{{ compiler("c") }}', + ] + testing_metadata.meta["requirements"]["host"] = [ + "test_has_run_exports_implicit_weak" + ] + api.output_yaml(testing_metadata, "host_present_strong/meta.yaml") + m = api.render( + os.path.join(testing_workdir, "host_present_strong"), config=testing_config + )[0][0] + assert "strong_pinned_package 1.0 0" in m.meta["requirements"]["host"] + assert "strong_pinned_package 1.0.*" in m.meta["requirements"]["run"] # weak one from test_has_run_exports should be excluded, since it is a build dep - assert 'weak_pinned_package 1.0.*' not in m.meta['requirements']['run'] + assert "weak_pinned_package 1.0.*" not in m.meta["requirements"]["run"] # weak one from test_has_run_exports_implicit_weak should be present, since it is a host dep - assert 'weak_pinned_package 2.0.*' in m.meta['requirements']['run'] + assert "weak_pinned_package 2.0.*" in m.meta["requirements"]["run"] @pytest.mark.sanity def test_ignore_run_exports(testing_metadata, testing_config): # build the package with run exports for ensuring that we ignore it - api.build(os.path.join(metadata_dir, '_run_exports'), config=testing_config, - notest=True) + api.build( + os.path.join(metadata_dir, "_run_exports"), config=testing_config, notest=True + ) # customize our fixture metadata with our desired changes - testing_metadata.meta['requirements']['host'] = ['test_has_run_exports'] - testing_metadata.meta['build']['ignore_run_exports'] = ['downstream_pinned_package'] + testing_metadata.meta["requirements"]["host"] = ["test_has_run_exports"] + testing_metadata.meta["build"]["ignore_run_exports"] = ["downstream_pinned_package"] testing_metadata.config.index = None m = finalize_metadata(testing_metadata) - assert 'downstream_pinned_package 1.0' not in m.meta['requirements'].get('run', []) + assert "downstream_pinned_package 1.0" not in m.meta["requirements"].get("run", []) @pytest.mark.sanity def test_ignore_run_exports_from(testing_metadata, testing_config): # build the package with run exports for ensuring that we ignore it - api.build(os.path.join(metadata_dir, '_run_exports'), config=testing_config, - notest=True) + api.build( + os.path.join(metadata_dir, "_run_exports"), config=testing_config, notest=True + ) # customize our fixture metadata with our desired changes - testing_metadata.meta['requirements']['host'] = ['test_has_run_exports'] - testing_metadata.meta['build']['ignore_run_exports_from'] = ['test_has_run_exports'] + testing_metadata.meta["requirements"]["host"] = ["test_has_run_exports"] + testing_metadata.meta["build"]["ignore_run_exports_from"] = ["test_has_run_exports"] testing_metadata.config.index = None m = finalize_metadata(testing_metadata) - assert 'downstream_pinned_package 1.0' not in m.meta['requirements'].get('run', []) + assert "downstream_pinned_package 1.0" not in m.meta["requirements"].get("run", []) -@pytest.mark.skipif("CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, - reason="This test does not run on Github Actions yet. We will need to adjust " - "where to look for the pkgs. The github action for setup-miniconda sets " - "pkg_dirs to conda_pkgs_dir.") +@pytest.mark.skipif( + "CI" in os.environ and "GITHUB_WORKFLOW" in os.environ, + reason="This test does not run on Github Actions yet. We will need to adjust " + "where to look for the pkgs. The github action for setup-miniconda sets " + "pkg_dirs to conda_pkgs_dir.", +) def test_run_exports_noarch_python(testing_metadata, testing_config): # build the package with run exports for ensuring that we ignore it - api.build(os.path.join(metadata_dir, '_run_exports_noarch'), config=testing_config, - notest=True) + api.build( + os.path.join(metadata_dir, "_run_exports_noarch"), + config=testing_config, + notest=True, + ) # customize our fixture metadata with our desired changes - testing_metadata.meta['requirements']['host'] = ['python'] - testing_metadata.meta['requirements']['run'] = ['python'] - testing_metadata.meta['build']['noarch'] = 'python' + testing_metadata.meta["requirements"]["host"] = ["python"] + testing_metadata.meta["requirements"]["run"] = ["python"] + testing_metadata.meta["build"]["noarch"] = "python" testing_metadata.config.index = None testing_metadata.config.variant["python"] = "3.8 with_run_exports" m = finalize_metadata(testing_metadata) - assert 'python 3.6 with_run_exports' in m.meta['requirements'].get('host', []) - assert 'python 3.6 with_run_exports' not in m.meta['requirements'].get('run', []) + assert "python 3.6 with_run_exports" in m.meta["requirements"].get("host", []) + assert "python 3.6 with_run_exports" not in m.meta["requirements"].get("run", []) def test_run_exports_constrains(testing_metadata, testing_config, testing_workdir): - api.build(os.path.join(metadata_dir, '_run_exports_constrains'), config=testing_config, - notest=True) - - testing_metadata.meta['requirements']['build'] = ['run_exports_constrains'] - testing_metadata.meta['requirements']['host'] = [] - api.output_yaml(testing_metadata, 'in_build/meta.yaml') - m = api.render(os.path.join(testing_workdir, 'in_build'), config=testing_config)[0][0] - reqs_set = lambda section: set(m.meta['requirements'].get(section, [])) - assert {'strong_run_export'} == reqs_set('run') - assert {'strong_constrains_export'} == reqs_set('run_constrained') - - testing_metadata.meta['requirements']['build'] = [] - testing_metadata.meta['requirements']['host'] = ['run_exports_constrains'] - api.output_yaml(testing_metadata, 'in_host/meta.yaml') - m = api.render(os.path.join(testing_workdir, 'in_host'), config=testing_config)[0][0] - reqs_set = lambda section: set(m.meta['requirements'].get(section, [])) - assert {'strong_run_export', 'weak_run_export'} == reqs_set('run') - assert {'strong_constrains_export', 'weak_constrains_export'} == reqs_set('run_constrained') - - testing_metadata.meta['requirements']['build'] = ['run_exports_constrains_only_weak'] - testing_metadata.meta['requirements']['host'] = [] - api.output_yaml(testing_metadata, 'only_weak_in_build/meta.yaml') - m = api.render(os.path.join(testing_workdir, 'only_weak_in_build'), config=testing_config)[0][0] - reqs_set = lambda section: set(m.meta['requirements'].get(section, [])) - assert set() == reqs_set('run') - assert set() == reqs_set('run_constrained') - - testing_metadata.meta['requirements']['build'] = [] - testing_metadata.meta['requirements']['host'] = ['run_exports_constrains_only_weak'] - api.output_yaml(testing_metadata, 'only_weak_in_host/meta.yaml') - m = api.render(os.path.join(testing_workdir, 'only_weak_in_host'), config=testing_config)[0][0] - reqs_set = lambda section: set(m.meta['requirements'].get(section, [])) - assert {'weak_run_export'} == reqs_set('run') - assert {'weak_constrains_export'} == reqs_set('run_constrained') + api.build( + os.path.join(metadata_dir, "_run_exports_constrains"), + config=testing_config, + notest=True, + ) + + testing_metadata.meta["requirements"]["build"] = ["run_exports_constrains"] + testing_metadata.meta["requirements"]["host"] = [] + api.output_yaml(testing_metadata, "in_build/meta.yaml") + m = api.render(os.path.join(testing_workdir, "in_build"), config=testing_config)[0][ + 0 + ] + reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + assert {"strong_run_export"} == reqs_set("run") + assert {"strong_constrains_export"} == reqs_set("run_constrained") + + testing_metadata.meta["requirements"]["build"] = [] + testing_metadata.meta["requirements"]["host"] = ["run_exports_constrains"] + api.output_yaml(testing_metadata, "in_host/meta.yaml") + m = api.render(os.path.join(testing_workdir, "in_host"), config=testing_config)[0][ + 0 + ] + reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + assert {"strong_run_export", "weak_run_export"} == reqs_set("run") + assert {"strong_constrains_export", "weak_constrains_export"} == reqs_set( + "run_constrained" + ) + + testing_metadata.meta["requirements"]["build"] = [ + "run_exports_constrains_only_weak" + ] + testing_metadata.meta["requirements"]["host"] = [] + api.output_yaml(testing_metadata, "only_weak_in_build/meta.yaml") + m = api.render( + os.path.join(testing_workdir, "only_weak_in_build"), config=testing_config + )[0][0] + reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + assert set() == reqs_set("run") + assert set() == reqs_set("run_constrained") + + testing_metadata.meta["requirements"]["build"] = [] + testing_metadata.meta["requirements"]["host"] = ["run_exports_constrains_only_weak"] + api.output_yaml(testing_metadata, "only_weak_in_host/meta.yaml") + m = api.render( + os.path.join(testing_workdir, "only_weak_in_host"), config=testing_config + )[0][0] + reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + assert {"weak_run_export"} == reqs_set("run") + assert {"weak_constrains_export"} == reqs_set("run_constrained") def test_pin_subpackage_exact(testing_config): - recipe = os.path.join(metadata_dir, '_pin_subpackage_exact') + recipe = os.path.join(metadata_dir, "_pin_subpackage_exact") ms = api.render(recipe, config=testing_config) assert len(ms) == 2 - assert any(re.match(r'run_exports_subpkg\ 1\.0\ 0', req) - for (m, _, _) in ms for req in m.meta.get('requirements', {}).get('run', [])) + assert any( + re.match(r"run_exports_subpkg\ 1\.0\ 0", req) + for (m, _, _) in ms + for req in m.meta.get("requirements", {}).get("run", []) + ) @pytest.mark.sanity @pytest.mark.serial -@pytest.mark.skipif(sys.platform != 'linux', reason="xattr code written here is specific to linux") +@pytest.mark.skipif( + sys.platform != "linux", reason="xattr code written here is specific to linux" +) def test_copy_read_only_file_with_xattr(testing_config, testing_homedir): if not testing_homedir: - return pytest.xfail("could not create a temporary folder in {} (tmpfs inappropriate for xattrs)". - format('${HOME}' if sys.platform != 'win32' else '%UserProfile%')) - src_recipe = os.path.join(metadata_dir, '_xattr_copy') - recipe = os.path.join(testing_homedir, '_xattr_copy') + return pytest.xfail( + "could not create a temporary folder in {} (tmpfs inappropriate for xattrs)".format( + "${HOME}" if sys.platform != "win32" else "%UserProfile%" + ) + ) + src_recipe = os.path.join(metadata_dir, "_xattr_copy") + recipe = os.path.join(testing_homedir, "_xattr_copy") copy_into(src_recipe, recipe) # file is r/w for owner, but we change it to 400 after setting the attribute - ro_file = os.path.join(recipe, 'mode_400_file') + ro_file = os.path.join(recipe, "mode_400_file") # tmpfs on modern Linux does not support xattr in general. # https://stackoverflow.com/a/46598063 # tmpfs can support extended attributes if you enable CONFIG_TMPFS_XATTR in Kernel config. # But Currently this enables support for the trusted.* and security.* namespaces try: - subprocess.check_call(f'setfattr -n user.attrib -v somevalue {ro_file}', shell=True) + subprocess.check_call( + f"setfattr -n user.attrib -v somevalue {ro_file}", shell=True + ) except: - return pytest.xfail("setfattr not possible in {}, see https://stackoverflow.com/a/46598063".format( - testing_homedir)) - subprocess.check_call(f'chmod 400 {ro_file}', shell=True) + return pytest.xfail( + "setfattr not possible in {}, see https://stackoverflow.com/a/46598063".format( + testing_homedir + ) + ) + subprocess.check_call(f"chmod 400 {ro_file}", shell=True) api.build(recipe, config=testing_config) @pytest.mark.sanity @pytest.mark.serial def test_env_creation_fail_exits_build(testing_config): - recipe = os.path.join(metadata_dir, '_post_link_exits_after_retry') + recipe = os.path.join(metadata_dir, "_post_link_exits_after_retry") with pytest.raises((RuntimeError, LinkError, CondaError, KeyError)): api.build(recipe, config=testing_config) - recipe = os.path.join(metadata_dir, '_post_link_exits_tests') + recipe = os.path.join(metadata_dir, "_post_link_exits_tests") with pytest.raises((RuntimeError, LinkError, CondaError, KeyError)): api.build(recipe, config=testing_config) @@ -1159,20 +1386,22 @@ def test_recursion_packages(testing_config): """Two packages that need to be built are listed in the recipe make sure that both get built before the one needing them gets built.""" - recipe = os.path.join(metadata_dir, '_recursive-build-two-packages') + recipe = os.path.join(metadata_dir, "_recursive-build-two-packages") api.build(recipe, config=testing_config) @pytest.mark.sanity def test_recursion_layers(testing_config): """go two 'hops' - try to build a, but a needs b, so build b first, then come back to a""" - recipe = os.path.join(metadata_dir, '_recursive-build-two-layers') + recipe = os.path.join(metadata_dir, "_recursive-build-two-layers") api.build(recipe, config=testing_config) @pytest.mark.sanity -@pytest.mark.skipif(sys.platform != 'win32', reason=("spaces break openssl prefix " - "replacement on *nix")) +@pytest.mark.skipif( + sys.platform != "win32", + reason=("spaces break openssl prefix " "replacement on *nix"), +) def test_croot_with_spaces(testing_metadata, testing_workdir): testing_metadata.config.croot = os.path.join(testing_workdir, "space path") api.build(testing_metadata) @@ -1180,7 +1409,7 @@ def test_croot_with_spaces(testing_metadata, testing_workdir): @pytest.mark.sanity def test_unknown_selectors(testing_config): - recipe = os.path.join(metadata_dir, 'unknown_selector') + recipe = os.path.join(metadata_dir, "unknown_selector") api.build(recipe, config=testing_config) @@ -1188,13 +1417,13 @@ def test_unknown_selectors(testing_config): # https://github.com/conda/conda-build/issues/4685 @pytest.mark.flaky(reruns=5, reruns_delay=2) def test_failed_recipe_leaves_folders(testing_config): - recipe = os.path.join(fail_dir, 'recursive-build') + recipe = os.path.join(fail_dir, "recursive-build") m = api.render(recipe, config=testing_config)[0][0] locks = get_conda_operation_locks(m.config) with pytest.raises((RuntimeError, exceptions.DependencyNeedsBuildingError)): api.build(m) - assert os.path.isdir(m.config.build_folder), 'build folder was removed' - assert os.listdir(m.config.build_folder), 'build folder has no files' + assert os.path.isdir(m.config.build_folder), "build folder was removed" + assert os.listdir(m.config.build_folder), "build folder has no files" # make sure that it does not leave lock files, though, as these cause permission errors on # centralized installations @@ -1203,141 +1432,144 @@ def test_failed_recipe_leaves_folders(testing_config): @pytest.mark.sanity def test_only_r_env_vars_defined(testing_config): - recipe = os.path.join(metadata_dir, '_r_env_defined') + recipe = os.path.join(metadata_dir, "_r_env_defined") api.build(recipe, config=testing_config) @pytest.mark.sanity def test_only_perl_env_vars_defined(testing_config): - recipe = os.path.join(metadata_dir, '_perl_env_defined') + recipe = os.path.join(metadata_dir, "_perl_env_defined") api.build(recipe, config=testing_config) @pytest.mark.sanity -@pytest.mark.skipif(on_win, reason='no lua package on win') +@pytest.mark.skipif(on_win, reason="no lua package on win") def test_only_lua_env(testing_config): - recipe = os.path.join(metadata_dir, '_lua_env_defined') + recipe = os.path.join(metadata_dir, "_lua_env_defined") testing_config.set_build_id = False api.build(recipe, config=testing_config) def test_run_constrained_stores_constrains_info(testing_config): - recipe = os.path.join(metadata_dir, '_run_constrained') + recipe = os.path.join(metadata_dir, "_run_constrained") out_file = api.build(recipe, config=testing_config)[0] - info_contents = json.loads(package_has_file(out_file, 'info/index.json')) - assert 'constrains' in info_contents - assert len(info_contents['constrains']) == 1 - assert info_contents['constrains'][0] == 'bzip2 1.*' + info_contents = json.loads(package_has_file(out_file, "info/index.json")) + assert "constrains" in info_contents + assert len(info_contents["constrains"]) == 1 + assert info_contents["constrains"][0] == "bzip2 1.*" @pytest.mark.sanity def test_no_locking(testing_config): - recipe = os.path.join(metadata_dir, 'source_git_jinja2') + recipe = os.path.join(metadata_dir, "source_git_jinja2") api.update_index(os.path.join(testing_config.croot)) api.build(recipe, config=testing_config, locking=False) @pytest.mark.sanity def test_test_dependencies(testing_config): - recipe = os.path.join(fail_dir, 'check_test_dependencies') + recipe = os.path.join(fail_dir, "check_test_dependencies") with pytest.raises(exceptions.DependencyNeedsBuildingError) as e: api.build(recipe, config=testing_config) - assert 'Unsatisfiable dependencies for platform ' in str(e.value) - assert 'pytest-package-does-not-exist' in str(e.value) + assert "Unsatisfiable dependencies for platform " in str(e.value) + assert "pytest-package-does-not-exist" in str(e.value) @pytest.mark.sanity def test_runtime_dependencies(testing_config): - recipe = os.path.join(fail_dir, 'check_runtime_dependencies') + recipe = os.path.join(fail_dir, "check_runtime_dependencies") with pytest.raises(exceptions.DependencyNeedsBuildingError) as e: api.build(recipe, config=testing_config) - assert 'Unsatisfiable dependencies for platform ' in str(e.value) - assert 'some-nonexistent-package1' in str(e.value) + assert "Unsatisfiable dependencies for platform " in str(e.value) + assert "some-nonexistent-package1" in str(e.value) @pytest.mark.sanity def test_no_force_upload_condarc_setting(mocker, testing_workdir, testing_metadata): testing_metadata.config.anaconda_upload = True - del testing_metadata.meta['test'] - api.output_yaml(testing_metadata, 'meta.yaml') - call = mocker.patch.object(conda_build.build.subprocess, 'call') - cc_conda_build['force_upload'] = False + del testing_metadata.meta["test"] + api.output_yaml(testing_metadata, "meta.yaml") + call = mocker.patch.object(conda_build.build.subprocess, "call") + cc_conda_build["force_upload"] = False pkg = api.build(testing_workdir) - assert call.called_once_with(['anaconda', 'upload', pkg]) - del cc_conda_build['force_upload'] + assert call.called_once_with(["anaconda", "upload", pkg]) + del cc_conda_build["force_upload"] pkg = api.build(testing_workdir) - assert call.called_once_with(['anaconda', 'upload', '--force', pkg]) + assert call.called_once_with(["anaconda", "upload", "--force", pkg]) @pytest.mark.sanity def test_setup_py_data_in_env(testing_config): - recipe = os.path.join(metadata_dir, '_setup_py_data_in_env') + recipe = os.path.join(metadata_dir, "_setup_py_data_in_env") # should pass with any modern python (just not 3.5) api.build(recipe, config=testing_config) # make sure it fails with our special python logic with pytest.raises(subprocess.CalledProcessError): - api.build(recipe, config=testing_config, python='3.5') + api.build(recipe, config=testing_config, python="3.5") @pytest.mark.sanity def test_numpy_xx(testing_config): - recipe = os.path.join(metadata_dir, '_numpy_xx') - api.render(recipe, config=testing_config, numpy='1.15', python="3.6") + recipe = os.path.join(metadata_dir, "_numpy_xx") + api.render(recipe, config=testing_config, numpy="1.15", python="3.6") @pytest.mark.sanity def test_numpy_xx_host(testing_config): - recipe = os.path.join(metadata_dir, '_numpy_xx_host') - api.render(recipe, config=testing_config, numpy='1.15', python="3.6") + recipe = os.path.join(metadata_dir, "_numpy_xx_host") + api.render(recipe, config=testing_config, numpy="1.15", python="3.6") @pytest.mark.sanity def test_python_xx(testing_config): - recipe = os.path.join(metadata_dir, '_python_xx') - api.render(recipe, config=testing_config, python='3.5') + recipe = os.path.join(metadata_dir, "_python_xx") + api.render(recipe, config=testing_config, python="3.5") @pytest.mark.sanity def test_indirect_numpy_dependency(testing_metadata, testing_workdir): - testing_metadata.meta['requirements']['build'] = ['pandas'] - api.output_yaml(testing_metadata, os.path.join(testing_workdir, 'meta.yaml')) - api.render(testing_workdir, numpy='1.13', notest=True) + testing_metadata.meta["requirements"]["build"] = ["pandas"] + api.output_yaml(testing_metadata, os.path.join(testing_workdir, "meta.yaml")) + api.render(testing_workdir, numpy="1.13", notest=True) @pytest.mark.sanity def test_dependencies_with_notest(testing_config): - recipe = os.path.join(metadata_dir, '_test_dependencies') + recipe = os.path.join(metadata_dir, "_test_dependencies") api.build(recipe, config=testing_config, notest=True) with pytest.raises(DependencyNeedsBuildingError) as excinfo: api.build(recipe, config=testing_config, notest=False) - assert 'Unsatisfiable dependencies for platform' in str(excinfo.value) - assert 'somenonexistentpackage1' in str(excinfo.value) + assert "Unsatisfiable dependencies for platform" in str(excinfo.value) + assert "somenonexistentpackage1" in str(excinfo.value) @pytest.mark.sanity def test_source_cache_build(testing_workdir): - recipe = os.path.join(metadata_dir, 'source_git_jinja2') + recipe = os.path.join(metadata_dir, "source_git_jinja2") config = api.Config(src_cache_root=testing_workdir) api.build(recipe, notest=True, config=config) - git_cache_directory = f'{testing_workdir}/git_cache' + git_cache_directory = f"{testing_workdir}/git_cache" assert os.path.isdir(git_cache_directory) - files = [filename for _, _, filenames in walk(git_cache_directory) - for filename in filenames] + files = [ + filename + for _, _, filenames in walk(git_cache_directory) + for filename in filenames + ] assert len(files) > 0 @pytest.mark.slow def test_copy_test_source_files(testing_config): - recipe = os.path.join(metadata_dir, '_test_test_source_files') + recipe = os.path.join(metadata_dir, "_test_test_source_files") filenames = set() for copy in (False, True): testing_config.copy_test_source_files = copy @@ -1351,23 +1583,28 @@ def test_copy_test_source_files(testing_config): # nesting of test/test here is because info/test is the main folder # for test files, then test is the source_files folder we specify, # and text.txt is within that. - if f.name == 'info/test/test_files_folder/text.txt': + if f.name == "info/test/test_files_folder/text.txt": found = True break if found: - assert copy, "'info/test/test_files_folder/text.txt' found in tar.bz2 but not copying test source files" + assert ( + copy + ), "'info/test/test_files_folder/text.txt' found in tar.bz2 but not copying test source files" if copy: api.test(outputs[0]) else: with pytest.raises(RuntimeError): api.test(outputs[0]) else: - assert not copy, "'info/test/test_files_folder/text.txt' not found in tar.bz2 but copying test source files. File list: %r" % files + assert not copy, ( + "'info/test/test_files_folder/text.txt' not found in tar.bz2 but copying test source files. File list: %r" + % files + ) @pytest.mark.sanity def test_copy_test_source_files_deps(testing_config): - recipe = os.path.join(metadata_dir, '_test_test_source_files') + recipe = os.path.join(metadata_dir, "_test_test_source_files") for copy in (False, True): testing_config.copy_test_source_files = copy # test is that pytest is a dep either way. Builds will fail if it's not. @@ -1378,46 +1615,50 @@ def test_pin_depends(testing_config): """purpose of 'record' argument is to put a 'requires' file that records pinned run dependencies """ - recipe = os.path.join(metadata_dir, '_pin_depends_record') + recipe = os.path.join(metadata_dir, "_pin_depends_record") m = api.render(recipe, config=testing_config)[0][0] # the recipe python is not pinned, and having pin_depends set to record # will not show it in record - assert not any(re.search(r'python\s+[23]\.', dep) for dep in m.meta['requirements']['run']) + assert not any( + re.search(r"python\s+[23]\.", dep) for dep in m.meta["requirements"]["run"] + ) output = api.build(m, config=testing_config)[0] - requires = package_has_file(output, 'info/requires') + requires = package_has_file(output, "info/requires") assert requires - if hasattr(requires, 'decode'): + if hasattr(requires, "decode"): requires = requires.decode() - assert re.search(r'python\=[23]\.', requires), "didn't find pinned python in info/requires" + assert re.search( + r"python\=[23]\.", requires + ), "didn't find pinned python in info/requires" @pytest.mark.sanity def test_failed_patch_exits_build(testing_config): with pytest.raises(RuntimeError): - api.build(os.path.join(metadata_dir, '_bad_patch'), config=testing_config) + api.build(os.path.join(metadata_dir, "_bad_patch"), config=testing_config) @pytest.mark.sanity def test_version_mismatch_in_variant_does_not_infinitely_rebuild_folder(testing_config): # unsatisfiable; also not buildable (test_a recipe version is 2.0) - testing_config.variant['test_a'] = "1.0" - recipe = os.path.join(metadata_dir, '_build_deps_no_infinite_loop', 'test_b') + testing_config.variant["test_a"] = "1.0" + recipe = os.path.join(metadata_dir, "_build_deps_no_infinite_loop", "test_b") with pytest.raises(DependencyNeedsBuildingError): api.build(recipe, config=testing_config) # passes now, because package can be built, or is already built. Doesn't matter which. - testing_config.variant['test_a'] = "2.0" + testing_config.variant["test_a"] = "2.0" api.build(recipe, config=testing_config) @pytest.mark.sanity def test_provides_features_metadata(testing_config): - recipe = os.path.join(metadata_dir, '_requires_provides_features') + recipe = os.path.join(metadata_dir, "_requires_provides_features") out = api.build(recipe, config=testing_config)[0] - index = json.loads(package_has_file(out, 'info/index.json')) - assert 'requires_features' in index - assert index['requires_features'] == {'test': 'ok'} - assert 'provides_features' in index - assert index['provides_features'] == {'test2': 'also_ok'} + index = json.loads(package_has_file(out, "info/index.json")) + assert "requires_features" in index + assert index["requires_features"] == {"test": "ok"} + assert "provides_features" in index + assert index["provides_features"] == {"test2": "also_ok"} # using different MACOSX_DEPLOYMENT_TARGET in parallel causes some SDK race condition @@ -1428,14 +1669,22 @@ def test_overlinking_detection(testing_config, variants_conda_build_sysroot): testing_config.activate = True testing_config.error_overlinking = True testing_config.verify = False - recipe = os.path.join(metadata_dir, '_overlinking_detection') - dest_sh = os.path.join(recipe, 'build.sh') - dest_bat = os.path.join(recipe, 'bld.bat') - copy_into(os.path.join(recipe, 'build_scripts', 'default.sh'), dest_sh, clobber=True) - copy_into(os.path.join(recipe, 'build_scripts', 'default.bat'), dest_bat, clobber=True) + recipe = os.path.join(metadata_dir, "_overlinking_detection") + dest_sh = os.path.join(recipe, "build.sh") + dest_bat = os.path.join(recipe, "bld.bat") + copy_into( + os.path.join(recipe, "build_scripts", "default.sh"), dest_sh, clobber=True + ) + copy_into( + os.path.join(recipe, "build_scripts", "default.bat"), dest_bat, clobber=True + ) api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) - copy_into(os.path.join(recipe, 'build_scripts', 'no_as_needed.sh'), dest_sh, clobber=True) - copy_into(os.path.join(recipe, 'build_scripts', 'with_bzip2.bat'), dest_bat, clobber=True) + copy_into( + os.path.join(recipe, "build_scripts", "no_as_needed.sh"), dest_sh, clobber=True + ) + copy_into( + os.path.join(recipe, "build_scripts", "with_bzip2.bat"), dest_bat, clobber=True + ) with pytest.raises(OverLinkingError): api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) rm_rf(dest_sh) @@ -1452,14 +1701,22 @@ def test_overlinking_detection_ignore_patterns( testing_config.activate = True testing_config.error_overlinking = True testing_config.verify = False - recipe = os.path.join(metadata_dir, '_overlinking_detection_ignore_patterns') - dest_sh = os.path.join(recipe, 'build.sh') - dest_bat = os.path.join(recipe, 'bld.bat') - copy_into(os.path.join(recipe, 'build_scripts', 'default.sh'), dest_sh, clobber=True) - copy_into(os.path.join(recipe, 'build_scripts', 'default.bat'), dest_bat, clobber=True) + recipe = os.path.join(metadata_dir, "_overlinking_detection_ignore_patterns") + dest_sh = os.path.join(recipe, "build.sh") + dest_bat = os.path.join(recipe, "bld.bat") + copy_into( + os.path.join(recipe, "build_scripts", "default.sh"), dest_sh, clobber=True + ) + copy_into( + os.path.join(recipe, "build_scripts", "default.bat"), dest_bat, clobber=True + ) api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) - copy_into(os.path.join(recipe, 'build_scripts', 'no_as_needed.sh'), dest_sh, clobber=True) - copy_into(os.path.join(recipe, 'build_scripts', 'with_bzip2.bat'), dest_bat, clobber=True) + copy_into( + os.path.join(recipe, "build_scripts", "no_as_needed.sh"), dest_sh, clobber=True + ) + copy_into( + os.path.join(recipe, "build_scripts", "with_bzip2.bat"), dest_bat, clobber=True + ) api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) rm_rf(dest_sh) rm_rf(dest_bat) @@ -1470,13 +1727,12 @@ def test_overdepending_detection(testing_config, variants_conda_build_sysroot): testing_config.error_overlinking = True testing_config.error_overdepending = True testing_config.verify = False - recipe = os.path.join(metadata_dir, '_overdepending_detection') + recipe = os.path.join(metadata_dir, "_overdepending_detection") with pytest.raises(OverDependingError): api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) -@pytest.mark.skipif(sys.platform != "darwin", - reason="macOS-only test (at present)") +@pytest.mark.skipif(sys.platform != "darwin", reason="macOS-only test (at present)") def test_macos_tbd_handling(testing_config, variants_conda_build_sysroot): """ Test path handling after installation... The test case uses a Hello World @@ -1486,28 +1742,28 @@ def test_macos_tbd_handling(testing_config, variants_conda_build_sysroot): testing_config.error_overlinking = True testing_config.error_overdepending = True testing_config.verify = False - recipe = os.path.join(metadata_dir, '_macos_tbd_handling') + recipe = os.path.join(metadata_dir, "_macos_tbd_handling") api.build(recipe, config=testing_config, variants=variants_conda_build_sysroot) @pytest.mark.sanity def test_empty_package_with_python_in_build_and_host_barfs(testing_config): - recipe = os.path.join(metadata_dir, '_empty_pkg_with_python_build_host') + recipe = os.path.join(metadata_dir, "_empty_pkg_with_python_build_host") with pytest.raises(CondaBuildException): api.build(recipe, config=testing_config) @pytest.mark.sanity def test_empty_package_with_python_and_compiler_in_build_barfs(testing_config): - recipe = os.path.join(metadata_dir, '_compiler_python_build_section') + recipe = os.path.join(metadata_dir, "_compiler_python_build_section") with pytest.raises(CondaBuildException): api.build(recipe, config=testing_config) @pytest.mark.sanity def test_downstream_tests(testing_config): - upstream = os.path.join(metadata_dir, '_test_downstreams/upstream') - downstream = os.path.join(metadata_dir, '_test_downstreams/downstream') + upstream = os.path.join(metadata_dir, "_test_downstreams/upstream") + downstream = os.path.join(metadata_dir, "_test_downstreams/downstream") api.build(downstream, config=testing_config, notest=True) with pytest.raises(SystemExit): api.build(upstream, config=testing_config) @@ -1515,35 +1771,48 @@ def test_downstream_tests(testing_config): @pytest.mark.sanity def test_warning_on_file_clobbering(testing_config, capfd): - recipe_dir = os.path.join(metadata_dir, '_overlapping_files_warning') + recipe_dir = os.path.join(metadata_dir, "_overlapping_files_warning") - api.build(os.path.join(recipe_dir, 'a', ), config=testing_config) - api.build(os.path.join(recipe_dir, 'b', ), config=testing_config) + api.build( + os.path.join( + recipe_dir, + "a", + ), + config=testing_config, + ) + api.build( + os.path.join( + recipe_dir, + "b", + ), + config=testing_config, + ) # The clobber warning here is raised when creating the test environment for b out, err = capfd.readouterr() assert "ClobberWarning" in err with pytest.raises((ClobberError, CondaMultiError)): - with env_var('CONDA_PATH_CONFLICT', 'prevent', reset_context): - api.build(os.path.join(recipe_dir, 'b'), config=testing_config) + with env_var("CONDA_PATH_CONFLICT", "prevent", reset_context): + api.build(os.path.join(recipe_dir, "b"), config=testing_config) @pytest.mark.sanity @pytest.mark.skip(reason="conda-verify is deprecated because it is unsupported") def test_verify_bad_package(testing_config): from conda_verify.errors import PackageError - recipe_dir = os.path.join(fail_dir, 'create_bad_folder_for_conda_verify') + + recipe_dir = os.path.join(fail_dir, "create_bad_folder_for_conda_verify") api.build(recipe_dir, config=testing_config) with pytest.raises(PackageError): testing_config.exit_on_verify_error = True api.build(recipe_dir, config=testing_config) # ignore the error that we know should be raised, and re-run to make sure it is actually ignored - testing_config.ignore_verify_codes = ['C1125', 'C1115'] + testing_config.ignore_verify_codes = ["C1125", "C1115"] api.build(recipe_dir, config=testing_config) @pytest.mark.sanity def test_ignore_verify_codes(testing_config): - recipe_dir = os.path.join(metadata_dir, '_ignore_verify_codes') + recipe_dir = os.path.join(metadata_dir, "_ignore_verify_codes") testing_config.exit_on_verify_error = True # this recipe intentionally has a license error. If ignore_verify_codes works, # it will build OK. If not, it will error out. @@ -1552,26 +1821,26 @@ def test_ignore_verify_codes(testing_config): @pytest.mark.sanity def test_extra_meta(testing_config): - recipe_dir = os.path.join(metadata_dir, '_extra_meta') - testing_config.extra_meta = {'foo': 'bar'} + recipe_dir = os.path.join(metadata_dir, "_extra_meta") + testing_config.extra_meta = {"foo": "bar"} outputs = api.build(recipe_dir, config=testing_config) - about = json.loads(package_has_file(outputs[0], 'info/about.json')) - assert 'foo' in about['extra'] and about['extra']['foo'] == 'bar' + about = json.loads(package_has_file(outputs[0], "info/about.json")) + assert "foo" in about["extra"] and about["extra"]["foo"] == "bar" def test_symlink_dirs_in_always_include_files(testing_config): - recipe = os.path.join(metadata_dir, '_symlink_dirs_in_always_include_files') + recipe = os.path.join(metadata_dir, "_symlink_dirs_in_always_include_files") api.build(recipe, config=testing_config) def test_clean_rpaths(testing_config): - recipe = os.path.join(metadata_dir, '_clean_rpaths') + recipe = os.path.join(metadata_dir, "_clean_rpaths") api.build(recipe, config=testing_config, activate=True) def test_script_env_warnings(testing_config, recwarn): - recipe_dir = os.path.join(metadata_dir, '_script_env_warnings') - token = 'CONDA_BUILD_PYTEST_SCRIPT_ENV_TEST_TOKEN' + recipe_dir = os.path.join(metadata_dir, "_script_env_warnings") + token = "CONDA_BUILD_PYTEST_SCRIPT_ENV_TEST_TOKEN" def assert_keyword(keyword): messages = [str(w.message) for w in recwarn.list] @@ -1579,16 +1848,16 @@ def assert_keyword(keyword): recwarn.clear() api.build(recipe_dir, config=testing_config) - assert_keyword('undefined') + assert_keyword("undefined") os.environ[token] = "SECRET" try: api.build(recipe_dir, config=testing_config) - assert_keyword('SECRET') + assert_keyword("SECRET") testing_config.suppress_variables = True api.build(recipe_dir, config=testing_config) - assert_keyword('') + assert_keyword("") finally: os.environ.pop(token) diff --git a/tests/test_api_build_conda_v2.py b/tests/test_api_build_conda_v2.py index 328c340ea7..25e7915848 100644 --- a/tests/test_api_build_conda_v2.py +++ b/tests/test_api_build_conda_v2.py @@ -10,9 +10,7 @@ @pytest.mark.parametrize("pkg_format,pkg_ext", [(None, ".tar.bz2"), ("2", ".conda")]) -def test_conda_pkg_format( - pkg_format, pkg_ext, testing_config, monkeypatch, capfd -): +def test_conda_pkg_format(pkg_format, pkg_ext, testing_config, monkeypatch, capfd): """Conda package format "2" builds .conda packages.""" # Build the "entry_points" recipe, which contains a test pass for package. @@ -25,7 +23,7 @@ def test_conda_pkg_format( monkeypatch.setenv("CONDA_TEST_VAR", "conda_test") monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2") - output_file, = api.get_output_file_paths(recipe, config=testing_config) + (output_file,) = api.get_output_file_paths(recipe, config=testing_config) assert output_file.endswith(pkg_ext) api.build(recipe, config=testing_config) diff --git a/tests/test_api_consistency.py b/tests/test_api_consistency.py index 7931b99b55..15e4321d5d 100644 --- a/tests/test_api_consistency.py +++ b/tests/test_api_consistency.py @@ -4,63 +4,85 @@ # happens, we should bump our major build number, because we may have broken someone. import sys +from inspect import getfullargspec as getargspec import pytest from conda_build import api -from inspect import getfullargspec as getargspec - pytestmark = pytest.mark.no_default_testing_config def test_api_config(): - assert hasattr(api, 'Config') - assert hasattr(api, 'get_or_merge_config') + assert hasattr(api, "Config") + assert hasattr(api, "get_or_merge_config") def test_api_get_or_merge_config(): argspec = getargspec(api.get_or_merge_config) - assert argspec.args == ['config', 'variant'] - assert argspec.defaults == (None, ) + assert argspec.args == ["config", "variant"] + assert argspec.defaults == (None,) def test_api_render(): argspec = getargspec(api.render) - assert argspec.args == ['recipe_path', 'config', 'variants', - 'permit_unsatisfiable_variants', 'finalize', - 'bypass_env_check'] + assert argspec.args == [ + "recipe_path", + "config", + "variants", + "permit_unsatisfiable_variants", + "finalize", + "bypass_env_check", + ] assert argspec.defaults == (None, None, True, True, False) def test_api_output_yaml(): argspec = getargspec(api.output_yaml) - assert argspec.args == ['metadata', 'file_path', 'suppress_outputs'] + assert argspec.args == ["metadata", "file_path", "suppress_outputs"] assert argspec.defaults == (None, False) def test_api_get_output_file_path(): argspec = getargspec(api.get_output_file_path) - assert argspec.args == ['recipe_path_or_metadata', 'no_download_source', 'config', 'variants'] + assert argspec.args == [ + "recipe_path_or_metadata", + "no_download_source", + "config", + "variants", + ] assert argspec.defaults == (False, None, None) def test_api_check(): argspec = getargspec(api.check) - assert argspec.args == ['recipe_path', 'no_download_source', 'config', 'variants'] + assert argspec.args == ["recipe_path", "no_download_source", "config", "variants"] assert argspec.defaults == (False, None, None) def test_api_build(): argspec = getargspec(api.build) - assert argspec.args == ['recipe_paths_or_metadata', 'post', 'need_source_download', - 'build_only', 'notest', 'config', 'variants', 'stats'] + assert argspec.args == [ + "recipe_paths_or_metadata", + "post", + "need_source_download", + "build_only", + "notest", + "config", + "variants", + "stats", + ] assert argspec.defaults == (None, True, False, False, None, None, None) def test_api_test(): argspec = getargspec(api.test) - assert argspec.args == ['recipedir_or_package_or_metadata', 'move_broken', 'config', 'stats'] + assert argspec.args == [ + "recipedir_or_package_or_metadata", + "move_broken", + "config", + "stats", + ] assert argspec.defaults == (True, None, None) @@ -72,60 +94,124 @@ def test_api_list_skeletons(): def test_api_skeletonize(): argspec = getargspec(api.skeletonize) - assert argspec.args == ['packages', 'repo', 'output_dir', 'version', 'recursive', 'config'] - assert argspec.defaults == ('.', None, False, None) + assert argspec.args == [ + "packages", + "repo", + "output_dir", + "version", + "recursive", + "config", + ] + assert argspec.defaults == (".", None, False, None) def test_api_develop(): argspec = getargspec(api.develop) - assert argspec.args == ['recipe_dir', 'prefix', 'no_pth_file', 'build_ext', - 'clean', 'uninstall'] + assert argspec.args == [ + "recipe_dir", + "prefix", + "no_pth_file", + "build_ext", + "clean", + "uninstall", + ] assert argspec.defaults == (sys.prefix, False, False, False, False) def test_api_convert(): argspec = getargspec(api.convert) - assert argspec.args == ['package_file', 'output_dir', 'show_imports', 'platforms', 'force', - 'dependencies', 'verbose', 'quiet', 'dry_run'] - assert argspec.defaults == ('.', False, None, False, None, False, True, False) + assert argspec.args == [ + "package_file", + "output_dir", + "show_imports", + "platforms", + "force", + "dependencies", + "verbose", + "quiet", + "dry_run", + ] + assert argspec.defaults == (".", False, None, False, None, False, True, False) def test_api_installable(): argspec = getargspec(api.test_installable) - assert argspec.args == ['channel'] - assert argspec.defaults == ('defaults',) + assert argspec.args == ["channel"] + assert argspec.defaults == ("defaults",) def test_api_inspect_linkages(): argspec = getargspec(api.inspect_linkages) - assert argspec.args == ['packages', 'prefix', 'untracked', 'all_packages', - 'show_files', 'groupby', 'sysroot'] - assert argspec.defaults == (sys.prefix, False, False, False, 'package', '') + assert argspec.args == [ + "packages", + "prefix", + "untracked", + "all_packages", + "show_files", + "groupby", + "sysroot", + ] + assert argspec.defaults == (sys.prefix, False, False, False, "package", "") def test_api_inspect_objects(): argspec = getargspec(api.inspect_objects) - assert argspec.args == ['packages', 'prefix', 'groupby'] - assert argspec.defaults == (sys.prefix, 'filename') + assert argspec.args == ["packages", "prefix", "groupby"] + assert argspec.defaults == (sys.prefix, "filename") def test_api_inspect_prefix_length(): argspec = getargspec(api.inspect_prefix_length) - assert argspec.args == ['packages', 'min_prefix_length'] + assert argspec.args == ["packages", "min_prefix_length"] # hard-coded prefix length as intentional check here assert argspec.defaults == (255,) def test_api_create_metapackage(): argspec = getargspec(api.create_metapackage) - assert argspec.args == ['name', 'version', 'entry_points', 'build_string', 'build_number', - 'dependencies', 'home', 'license_name', 'summary', 'config'] + assert argspec.args == [ + "name", + "version", + "entry_points", + "build_string", + "build_number", + "dependencies", + "home", + "license_name", + "summary", + "config", + ] assert argspec.defaults == ((), None, 0, (), None, None, None, None) def test_api_update_index(): argspec = getargspec(api.update_index) - assert argspec.args == ['dir_paths', 'config', 'force', 'check_md5', 'remove', 'channel_name', 'subdir', - 'threads', 'patch_generator', "verbose", "progress", "hotfix_source_repo", - 'current_index_versions'] - assert argspec.defaults == (None, False, False, False, None, None, None, None, False, False, None, None) + assert argspec.args == [ + "dir_paths", + "config", + "force", + "check_md5", + "remove", + "channel_name", + "subdir", + "threads", + "patch_generator", + "verbose", + "progress", + "hotfix_source_repo", + "current_index_versions", + ] + assert argspec.defaults == ( + None, + False, + False, + False, + None, + None, + None, + None, + False, + False, + None, + None, + ) diff --git a/tests/test_api_convert.py b/tests/test_api_convert.py index de41fc9ca7..8eacad1475 100644 --- a/tests/test_api_convert.py +++ b/tests/test_api_convert.py @@ -1,18 +1,18 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import csv -import os +import hashlib import json +import os import tarfile -import hashlib import pytest -from conda_build.conda_interface import download from conda_build import api -from conda_build.utils import package_has_file, on_win +from conda_build.conda_interface import download +from conda_build.utils import on_win, package_has_file -from .utils import metadata_dir, assert_package_consistency +from .utils import assert_package_consistency, metadata_dir def test_convert_wheel_raises(): @@ -30,31 +30,34 @@ def test_convert_exe_raises(): def assert_package_paths_matches_files(package_path): """Ensure that info/paths.json matches info/files""" with tarfile.open(package_path) as t: - files_content = t.extractfile('info/files').read().decode('utf-8') + files_content = t.extractfile("info/files").read().decode("utf-8") files_set = {line for line in files_content.splitlines() if line} - paths_content = json.loads(t.extractfile('info/paths.json').read().decode('utf-8')) + paths_content = json.loads( + t.extractfile("info/paths.json").read().decode("utf-8") + ) - for path_entry in paths_content['paths']: - assert path_entry['_path'] in files_set - files_set.remove(path_entry['_path']) + for path_entry in paths_content["paths"]: + assert path_entry["_path"] in files_set + files_set.remove(path_entry["_path"]) assert not files_set # Check that we've seen all the entries in files -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('cryptography-1.8.1', '__about__.py')]) +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("cryptography-1.8.1", "__about__.py")]) def test_show_imports(base_platform, package, capfd): package_name, example_file = package - platforms = ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32'] + platforms = ["osx-64", "win-64", "win-32", "linux-64", "linux-32"] # skip building on the same platform as the source platform for platform in platforms: - source_platform = '{}-64' .format(base_platform) + source_platform = f"{base_platform}-64" if platform == source_platform: platforms.remove(platform) - f = 'http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2'.format(base_platform, - package_name) + f = "http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2".format( + base_platform, package_name + ) fn = f"{package_name}-py36_0.tar.bz2" download(f, fn) @@ -65,18 +68,19 @@ def test_show_imports(base_platform, package, capfd): output, error = capfd.readouterr() # there will be four duplicate outputs since we're converting to four platforms - assert 'import cryptography.hazmat.bindings._constant_time' in output - assert 'import cryptography.hazmat.bindings._openssl' in output - assert 'import cryptography.hazmat.bindings._padding' in output + assert "import cryptography.hazmat.bindings._constant_time" in output + assert "import cryptography.hazmat.bindings._openssl" in output + assert "import cryptography.hazmat.bindings._padding" in output -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('itsdangerous-0.24', 'itsdangerous.py')]) +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("itsdangerous-0.24", "itsdangerous.py")]) def test_no_imports_found(base_platform, package, capfd): package_name, example_file = package - f = 'http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2'.format(base_platform, - package_name) + f = "http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2".format( + base_platform, package_name + ) fn = f"{package_name}-py36_0.tar.bz2" download(f, fn) @@ -84,39 +88,43 @@ def test_no_imports_found(base_platform, package, capfd): api.convert(fn, platforms=None, show_imports=True) output, error = capfd.readouterr() - assert 'No imports found.' in output + assert "No imports found." in output -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('cryptography-1.8.1', '__about__.py')]) +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("cryptography-1.8.1", "__about__.py")]) def test_no_platform(base_platform, package): package_name, example_file = package - f = 'http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2'.format(base_platform, - package_name) + f = "http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2".format( + base_platform, package_name + ) fn = f"{package_name}-py36_0.tar.bz2" download(f, fn) with pytest.raises(SystemExit) as e: api.convert(fn, platforms=None) - assert 'Error: --platform option required for conda package conversion.' in str(e.value) + assert "Error: --platform option required for conda package conversion." in str( + e.value + ) -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('cryptography-1.8.1', '__about__.py')]) +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("cryptography-1.8.1", "__about__.py")]) def test_c_extension_error(base_platform, package): package_name, example_file = package - platforms = ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32'] + platforms = ["osx-64", "win-64", "win-32", "linux-64", "linux-32"] # skip building on the same platform as the source platform for platform in platforms: - source_platform = '{}-64' .format(base_platform) + source_platform = f"{base_platform}-64" if platform == source_platform: platforms.remove(platform) - f = 'http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2'.format(base_platform, - package_name) + f = "http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2".format( + base_platform, package_name + ) fn = f"{package_name}-py36_0.tar.bz2" download(f, fn) @@ -124,187 +132,211 @@ def test_c_extension_error(base_platform, package): with pytest.raises(SystemExit) as e: api.convert(fn, platforms=platform) - assert ('WARNING: Package {} contains C extensions; skipping conversion. ' - 'Use -f to force conversion.' .format(fn)) in str(e.value) + assert ( + "WARNING: Package {} contains C extensions; skipping conversion. " + "Use -f to force conversion.".format(fn) + ) in str(e.value) -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('cryptography-1.8.1', '__about__.py')]) +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("cryptography-1.8.1", "__about__.py")]) def test_c_extension_conversion(base_platform, package): package_name, example_file = package - platforms = ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32'] + platforms = ["osx-64", "win-64", "win-32", "linux-64", "linux-32"] # skip building on the same platform as the source platform for platform in platforms: - source_platform = '{}-64' .format(base_platform) + source_platform = f"{base_platform}-64" if platform == source_platform: platforms.remove(platform) - f = 'http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2'.format(base_platform, - package_name) + f = "http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2".format( + base_platform, package_name + ) fn = f"{package_name}-py36_0.tar.bz2" download(f, fn) for platform in platforms: api.convert(fn, platforms=platform, force=True) - assert os.path.exists('{}/{}' .format(platform, fn)) + assert os.path.exists(f"{platform}/{fn}") -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('itsdangerous-0.24', 'itsdangerous.py'), - ('py-1.4.32', 'py/__init__.py')]) +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize( + "package", + [("itsdangerous-0.24", "itsdangerous.py"), ("py-1.4.32", "py/__init__.py")], +) def test_convert_platform_to_others(base_platform, package): package_name, example_file = package - subdir = f'{base_platform}-64' - f = 'http://repo.anaconda.com/pkgs/free/{}/{}-py27_0.tar.bz2'.format(subdir, - package_name) + subdir = f"{base_platform}-64" + f = "http://repo.anaconda.com/pkgs/free/{}/{}-py27_0.tar.bz2".format( + subdir, package_name + ) fn = f"{package_name}-py27_0.tar.bz2" download(f, fn) - expected_paths_json = package_has_file(fn, 'info/paths.json') - api.convert(fn, platforms='all', quiet=False, verbose=False) - for platform in ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']: + expected_paths_json = package_has_file(fn, "info/paths.json") + api.convert(fn, platforms="all", quiet=False, verbose=False) + for platform in ["osx-64", "win-64", "win-32", "linux-64", "linux-32"]: if subdir != platform: - python_folder = 'lib/python2.7' if not platform.startswith('win') else 'Lib' + python_folder = "lib/python2.7" if not platform.startswith("win") else "Lib" package = os.path.join(platform, fn) - assert package_has_file(package, - f'{python_folder}/site-packages/{example_file}') + assert package_has_file( + package, f"{python_folder}/site-packages/{example_file}" + ) if expected_paths_json: - assert package_has_file(package, 'info/paths.json') + assert package_has_file(package, "info/paths.json") assert_package_paths_matches_files(package) @pytest.mark.slow -@pytest.mark.skipif(on_win, reason="we create the pkg to be converted in *nix; don't run on win.") +@pytest.mark.skipif( + on_win, reason="we create the pkg to be converted in *nix; don't run on win." +) def test_convert_from_unix_to_win_creates_entry_points(testing_config): recipe_dir = os.path.join(metadata_dir, "entry_points") fn = api.build(recipe_dir, config=testing_config)[0] - for platform in ['win-64', 'win-32']: + for platform in ["win-64", "win-32"]: api.convert(fn, platforms=[platform], force=True) converted_fn = os.path.join(platform, os.path.basename(fn)) assert package_has_file(converted_fn, "Scripts/test-script-manual-script.py") assert package_has_file(converted_fn, "Scripts/test-script-manual.exe") - script_contents = package_has_file(converted_fn, "Scripts/test-script-setup-script.py") + script_contents = package_has_file( + converted_fn, "Scripts/test-script-setup-script.py" + ) assert script_contents assert "Test script setup" in script_contents bat_contents = package_has_file(converted_fn, "Scripts/test-script-setup.exe") assert bat_contents assert_package_consistency(converted_fn) - paths_content = json.loads(package_has_file(converted_fn, 'info/paths.json')) + paths_content = json.loads(package_has_file(converted_fn, "info/paths.json")) # Check the validity of the sha and filesize of the converted scripts with tarfile.open(converted_fn) as t: - for f in paths_content['paths']: - if f['_path'].startswith('Scripts/') and f['_path'].endswith('-script.py'): - script_content = package_has_file(converted_fn, f['_path']) - if hasattr(script_content, 'encode'): + for f in paths_content["paths"]: + if f["_path"].startswith("Scripts/") and f["_path"].endswith( + "-script.py" + ): + script_content = package_has_file(converted_fn, f["_path"]) + if hasattr(script_content, "encode"): script_content = script_content.encode() - assert f['sha256'] == hashlib.sha256(script_content).hexdigest() - assert f['size_in_bytes'] == t.getmember(f['_path']).size + assert f["sha256"] == hashlib.sha256(script_content).hexdigest() + assert f["size_in_bytes"] == t.getmember(f["_path"]).size - paths_list = {f['_path'] for f in paths_content['paths']} - files = {p for p in package_has_file(converted_fn, 'info/files').splitlines()} + paths_list = {f["_path"] for f in paths_content["paths"]} + files = {p for p in package_has_file(converted_fn, "info/files").splitlines()} assert files == paths_list - index = json.loads(package_has_file(converted_fn, 'info/index.json')) - assert index['subdir'] == platform + index = json.loads(package_has_file(converted_fn, "info/index.json")) + assert index["subdir"] == platform has_prefix_files = package_has_file(converted_fn, "info/has_prefix") - fieldnames = ['prefix', 'type', 'path'] + fieldnames = ["prefix", "type", "path"] csv_dialect = csv.Sniffer().sniff(has_prefix_files) - csv_dialect.lineterminator = '\n' - has_prefix_files = csv.DictReader(has_prefix_files.splitlines(), fieldnames=fieldnames, - dialect=csv_dialect) - has_prefix_files = {d['path']: d for d in has_prefix_files} + csv_dialect.lineterminator = "\n" + has_prefix_files = csv.DictReader( + has_prefix_files.splitlines(), fieldnames=fieldnames, dialect=csv_dialect + ) + has_prefix_files = {d["path"]: d for d in has_prefix_files} assert len(has_prefix_files) == 4 - assert 'Scripts/test-script-script.py' in has_prefix_files - assert 'Scripts/test-script-setup-script.py' in has_prefix_files - assert 'Scripts/test-script-manual-script.py' in has_prefix_files - assert 'Scripts/test-script-manual-postfix-script.py' in has_prefix_files + assert "Scripts/test-script-script.py" in has_prefix_files + assert "Scripts/test-script-setup-script.py" in has_prefix_files + assert "Scripts/test-script-manual-script.py" in has_prefix_files + assert "Scripts/test-script-manual-postfix-script.py" in has_prefix_files -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('anaconda-4.4.0', 'version.txt')]) +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("anaconda-4.4.0", "version.txt")]) def test_convert_dependencies(base_platform, package): package_name, example_file = package - subdir = f'{base_platform}-64' - f = 'http://repo.anaconda.com/pkgs/free/{}/{}-np112py36_0.tar.bz2'.format(subdir, - package_name) + subdir = f"{base_platform}-64" + f = "http://repo.anaconda.com/pkgs/free/{}/{}-np112py36_0.tar.bz2".format( + subdir, package_name + ) fn = f"{package_name}-np112py36_0.tar.bz2" download(f, fn) - dependencies = ['numpy 1.7.1 py36_0', 'cryptography 1.7.0 py36_0'] - expected_paths_json = package_has_file(fn, 'info/paths.json') - api.convert(fn, platforms='all', dependencies=dependencies, quiet=False, verbose=False) - for platform in ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']: + dependencies = ["numpy 1.7.1 py36_0", "cryptography 1.7.0 py36_0"] + expected_paths_json = package_has_file(fn, "info/paths.json") + api.convert( + fn, platforms="all", dependencies=dependencies, quiet=False, verbose=False + ) + for platform in ["osx-64", "win-64", "win-32", "linux-64", "linux-32"]: if platform != subdir: - python_folder = 'lib/python3.6' if not platform.startswith('win') else 'Lib' + python_folder = "lib/python3.6" if not platform.startswith("win") else "Lib" package = os.path.join(platform, fn) - assert package_has_file(package, - f'{python_folder}/{example_file}') + assert package_has_file(package, f"{python_folder}/{example_file}") with tarfile.open(package) as t: - info = json.loads(t.extractfile('info/index.json').read().decode('utf-8')) + info = json.loads( + t.extractfile("info/index.json").read().decode("utf-8") + ) - assert 'numpy 1.7.1 py36_0' in info['depends'] - assert 'numpy 1.12.1 py36_0' not in info['depends'] - assert 'cryptography 1.7.0 py36_0' in info['depends'] - assert 'cryptography 1.8.1 py36_0' not in info['depends'] + assert "numpy 1.7.1 py36_0" in info["depends"] + assert "numpy 1.12.1 py36_0" not in info["depends"] + assert "cryptography 1.7.0 py36_0" in info["depends"] + assert "cryptography 1.8.1 py36_0" not in info["depends"] if expected_paths_json: - assert package_has_file(package, 'info/paths.json') + assert package_has_file(package, "info/paths.json") assert_package_paths_matches_files(package) -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('anaconda-4.4.0', 'version.txt')]) +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("anaconda-4.4.0", "version.txt")]) def test_convert_no_dependencies(base_platform, package): package_name, example_file = package - subdir = f'{base_platform}-64' - f = 'http://repo.anaconda.com/pkgs/free/{}/{}-np112py36_0.tar.bz2'.format(subdir, - package_name) + subdir = f"{base_platform}-64" + f = "http://repo.anaconda.com/pkgs/free/{}/{}-np112py36_0.tar.bz2".format( + subdir, package_name + ) fn = f"{package_name}-np112py36_0.tar.bz2" download(f, fn) - expected_paths_json = package_has_file(fn, 'info/paths.json') - api.convert(fn, platforms='all', dependencies=None, quiet=False, verbose=False) - for platform in ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']: + expected_paths_json = package_has_file(fn, "info/paths.json") + api.convert(fn, platforms="all", dependencies=None, quiet=False, verbose=False) + for platform in ["osx-64", "win-64", "win-32", "linux-64", "linux-32"]: if platform != subdir: - python_folder = 'lib/python3.6' if not platform.startswith('win') else 'Lib' + python_folder = "lib/python3.6" if not platform.startswith("win") else "Lib" package = os.path.join(platform, fn) - assert package_has_file(package, - f'{python_folder}/{example_file}') + assert package_has_file(package, f"{python_folder}/{example_file}") with tarfile.open(package) as t: - info = json.loads(t.extractfile('info/index.json').read().decode('utf-8')) + info = json.loads( + t.extractfile("info/index.json").read().decode("utf-8") + ) - assert 'numpy 1.12.1 py36_0' in info['depends'] - assert 'cryptography 1.8.1 py36_0' in info['depends'] + assert "numpy 1.12.1 py36_0" in info["depends"] + assert "cryptography 1.8.1 py36_0" in info["depends"] if expected_paths_json: - assert package_has_file(package, 'info/paths.json') + assert package_has_file(package, "info/paths.json") assert_package_paths_matches_files(package) -@pytest.mark.parametrize('base_platform', ['linux', 'win', 'osx']) -@pytest.mark.parametrize('package', [('anaconda-4.4.0', 'version.txt')]) +@pytest.mark.parametrize("base_platform", ["linux", "win", "osx"]) +@pytest.mark.parametrize("package", [("anaconda-4.4.0", "version.txt")]) def test_skip_conversion(base_platform, package, capfd): package_name, example_file = package - source_plat_arch = '{}-64' .format(base_platform) + source_plat_arch = f"{base_platform}-64" - f = 'http://repo.anaconda.com/pkgs/free/{}-64/{}-np112py36_0.tar.bz2'.format(base_platform, - package_name) + f = "http://repo.anaconda.com/pkgs/free/{}-64/{}-np112py36_0.tar.bz2".format( + base_platform, package_name + ) fn = f"{package_name}-np112py36_0.tar.bz2" download(f, fn) - api.convert(fn, platforms=source_plat_arch, dependencies=None, quiet=False, verbose=False) + api.convert( + fn, platforms=source_plat_arch, dependencies=None, quiet=False, verbose=False + ) output, error = capfd.readouterr() - skip_message = ("Source platform '{}' and target platform '{}' are identical. " - "Skipping conversion.\n" - .format(source_plat_arch, source_plat_arch)) + skip_message = ( + "Source platform '{}' and target platform '{}' are identical. " + "Skipping conversion.\n".format(source_plat_arch, source_plat_arch) + ) package = os.path.join(source_plat_arch, fn) @@ -312,8 +344,8 @@ def test_skip_conversion(base_platform, package, capfd): assert not os.path.exists(package) -@pytest.mark.parametrize('base_platform', ['linux', 'osx']) -@pytest.mark.parametrize('package', [('sparkmagic-0.12.1', '')]) +@pytest.mark.parametrize("base_platform", ["linux", "osx"]) +@pytest.mark.parametrize("package", [("sparkmagic-0.12.1", "")]) def test_renaming_executables(base_platform, package): """Test that the files in /bin are properly renamed. @@ -325,17 +357,18 @@ def test_renaming_executables(base_platform, package): the same. """ package_name, example_file = package - subdir = f'{base_platform}-64' - f = 'http://repo.anaconda.com/pkgs/free/{}/{}-py27_0.tar.bz2'.format(subdir, - package_name) + subdir = f"{base_platform}-64" + f = "http://repo.anaconda.com/pkgs/free/{}/{}-py27_0.tar.bz2".format( + subdir, package_name + ) fn = f"{package_name}-py27_0.tar.bz2" download(f, fn) - expected_paths_json = package_has_file(fn, 'info/paths.json') - api.convert(fn, platforms='all', quiet=False, verbose=False) - for platform in ['osx-64', 'win-64', 'win-32', 'linux-64', 'linux-32']: + expected_paths_json = package_has_file(fn, "info/paths.json") + api.convert(fn, platforms="all", quiet=False, verbose=False) + for platform in ["osx-64", "win-64", "win-32", "linux-64", "linux-32"]: if subdir != platform: package = os.path.join(platform, fn) if expected_paths_json: - assert package_has_file(package, 'info/paths.json') + assert package_has_file(package, "info/paths.json") assert_package_paths_matches_files(package) diff --git a/tests/test_api_debug.py b/tests/test_api_debug.py index c41cc8f062..42fa1275fe 100644 --- a/tests/test_api_debug.py +++ b/tests/test_api_debug.py @@ -6,17 +6,16 @@ """ from __future__ import annotations +import subprocess from contextlib import nullcontext from pathlib import Path -import subprocess -from conda.common.compat import on_win import pytest +from conda.common.compat import on_win from conda_build.api import debug -from .utils import metadata_path, archive_path - +from .utils import archive_path, metadata_path DEBUG_PKG = metadata_path / "_debug_pkg" MULTI_OUT = metadata_path / "_debug_pkg_multiple_outputs" diff --git a/tests/test_api_inspect.py b/tests/test_api_inspect.py index 7e6e72d388..f667867610 100644 --- a/tests/test_api_inspect.py +++ b/tests/test_api_inspect.py @@ -5,6 +5,7 @@ import pytest from conda_build import api + from .utils import metadata_dir diff --git a/tests/test_api_render.py b/tests/test_api_render.py index 2e0c400474..c2cd8999e2 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -9,11 +9,10 @@ import pytest import yaml - from conda.common.compat import on_win from conda_build import api, render -from conda_build.conda_interface import subdir, cc_conda_build +from conda_build.conda_interface import cc_conda_build, subdir from .utils import metadata_dir, variants_dir @@ -26,7 +25,8 @@ def test_render_need_download(testing_config): metadata, need_download, need_reparse_in_env = api.render( os.path.join(metadata_dir, "source_git_jinja2"), config=testing_config, - no_download_source=True)[0] + no_download_source=True, + )[0] assert need_download assert need_reparse_in_env @@ -35,15 +35,16 @@ def test_render_need_download(testing_config): os.path.join(metadata_dir, "source_git_jinja2"), config=testing_config, no_download_source=False, - finalize=False)[0] + finalize=False, + )[0] assert not need_download assert metadata.meta["package"]["version"] == "1.20.2" def test_render_yaml_output(testing_workdir, testing_config): metadata, need_download, need_reparse_in_env = api.render( - os.path.join(metadata_dir, "source_git_jinja2"), - config=testing_config)[0] + os.path.join(metadata_dir, "source_git_jinja2"), config=testing_config + )[0] yaml_metadata = api.output_yaml(metadata) assert "package:" in yaml_metadata @@ -54,22 +55,28 @@ def test_render_yaml_output(testing_workdir, testing_config): def test_get_output_file_path(testing_workdir, testing_metadata): testing_metadata = render.finalize_metadata(testing_metadata) - api.output_yaml(testing_metadata, 'recipe/meta.yaml') - - build_path = api.get_output_file_paths(os.path.join(testing_workdir, 'recipe'), - config=testing_metadata.config, - no_download_source=True)[0] - assert build_path == os.path.join(testing_metadata.config.croot, - testing_metadata.config.host_subdir, - "test_get_output_file_path-1.0-1.tar.bz2") + api.output_yaml(testing_metadata, "recipe/meta.yaml") + + build_path = api.get_output_file_paths( + os.path.join(testing_workdir, "recipe"), + config=testing_metadata.config, + no_download_source=True, + )[0] + assert build_path == os.path.join( + testing_metadata.config.croot, + testing_metadata.config.host_subdir, + "test_get_output_file_path-1.0-1.tar.bz2", + ) def test_get_output_file_path_metadata_object(testing_metadata): testing_metadata.final = True build_path = api.get_output_file_paths(testing_metadata)[0] - assert build_path == os.path.join(testing_metadata.config.croot, - testing_metadata.config.host_subdir, - "test_get_output_file_path_metadata_object-1.0-1.tar.bz2") + assert build_path == os.path.join( + testing_metadata.config.croot, + testing_metadata.config.host_subdir, + "test_get_output_file_path_metadata_object-1.0-1.tar.bz2", + ) def test_get_output_file_path_jinja2(testing_config): @@ -79,20 +86,22 @@ def test_get_output_file_path_jinja2(testing_config): # First get metadata with a recipe that is known to need a download: with pytest.raises((ValueError, SystemExit)): - build_path = api.get_output_file_paths(recipe, - config=testing_config, - no_download_source=True)[0] + build_path = api.get_output_file_paths( + recipe, config=testing_config, no_download_source=True + )[0] metadata, need_download, need_reparse_in_env = api.render( - recipe, - config=testing_config, - no_download_source=False)[0] + recipe, config=testing_config, no_download_source=False + )[0] build_path = api.get_output_file_paths(metadata)[0] _hash = metadata.hash_dependencies() - python = ''.join(metadata.config.variant['python'].split('.')[:2]) - assert build_path == os.path.join(testing_config.croot, testing_config.host_subdir, - "conda-build-test-source-git-jinja2-1.20.2-" - "py{}{}_0_g262d444.tar.bz2".format(python, _hash)) + python = "".join(metadata.config.variant["python"].split(".")[:2]) + assert build_path == os.path.join( + testing_config.croot, + testing_config.host_subdir, + "conda-build-test-source-git-jinja2-1.20.2-" + "py{}{}_0_g262d444.tar.bz2".format(python, _hash), + ) def test_output_without_jinja_does_not_download(mocker, testing_config): @@ -104,57 +113,59 @@ def test_output_without_jinja_does_not_download(mocker, testing_config): def test_pin_compatible_semver(testing_config): - recipe_dir = os.path.join(metadata_dir, '_pin_compatible') + recipe_dir = os.path.join(metadata_dir, "_pin_compatible") metadata = api.render(recipe_dir, config=testing_config)[0][0] - assert 'zlib >=1.2.11,<2.0a0' in metadata.get_value('requirements/run') + assert "zlib >=1.2.11,<2.0a0" in metadata.get_value("requirements/run") @pytest.mark.slow @pytest.mark.xfail(on_win, reason="Defaults channel has conflicting vc packages") def test_resolved_packages_recipe(testing_config): - recipe_dir = os.path.join(metadata_dir, '_resolved_packages_host_build') + recipe_dir = os.path.join(metadata_dir, "_resolved_packages_host_build") metadata = api.render(recipe_dir, config=testing_config)[0][0] - assert all(len(pkg.split()) == 3 for pkg in metadata.get_value('requirements/run')) - run_requirements = {x.split()[0] for x in metadata.get_value('requirements/run')} + assert all(len(pkg.split()) == 3 for pkg in metadata.get_value("requirements/run")) + run_requirements = {x.split()[0] for x in metadata.get_value("requirements/run")} for package in [ - 'curl', # direct dependency - 'numpy', # direct dependency - 'zlib', # indirect dependency of curl - 'python', # indirect dependency of numpy + "curl", # direct dependency + "numpy", # direct dependency + "zlib", # indirect dependency of curl + "python", # indirect dependency of numpy ]: assert package in run_requirements @pytest.mark.slow def test_host_entries_finalized(testing_config): - recipe = os.path.join(metadata_dir, '_host_entries_finalized') + recipe = os.path.join(metadata_dir, "_host_entries_finalized") metadata = api.render(recipe, config=testing_config) assert len(metadata) == 2 outputs = api.get_output_file_paths(metadata) - assert any('py27' in out for out in outputs) - assert any('py39' in out for out in outputs) + assert any("py27" in out for out in outputs) + assert any("py39" in out for out in outputs) def test_hash_no_apply_to_custom_build_string(testing_metadata, testing_workdir): - testing_metadata.meta['build']['string'] = 'steve' - testing_metadata.meta['requirements']['build'] = ['zlib 1.2.8'] + testing_metadata.meta["build"]["string"] = "steve" + testing_metadata.meta["requirements"]["build"] = ["zlib 1.2.8"] - api.output_yaml(testing_metadata, 'meta.yaml') + api.output_yaml(testing_metadata, "meta.yaml") metadata = api.render(testing_workdir)[0][0] - assert metadata.build_id() == 'steve' + assert metadata.build_id() == "steve" def test_pin_depends(testing_config): """This is deprecated functionality - replaced by the more general variants pinning scheme""" - recipe = os.path.join(metadata_dir, '_pin_depends_strict') + recipe = os.path.join(metadata_dir, "_pin_depends_strict") m = api.render(recipe, config=testing_config)[0][0] # the recipe python is not pinned, but having pin_depends set will force it to be. - assert any(re.search(r'python\s+[23]\.', dep) for dep in m.meta['requirements']['run']) + assert any( + re.search(r"python\s+[23]\.", dep) for dep in m.meta["requirements"]["run"] + ) def test_cross_recipe_with_only_build_section(testing_config): - recipe = os.path.join(metadata_dir, '_cross_prefix_elision_compiler_used') + recipe = os.path.join(metadata_dir, "_cross_prefix_elision_compiler_used") metadata = api.render(recipe, config=testing_config, bypass_env_check=True)[0][0] assert metadata.config.host_subdir != subdir assert metadata.config.build_prefix != metadata.config.host_prefix @@ -162,30 +173,31 @@ def test_cross_recipe_with_only_build_section(testing_config): def test_cross_info_index_platform(testing_config): - recipe = os.path.join(metadata_dir, '_cross_build_unix_windows') + recipe = os.path.join(metadata_dir, "_cross_build_unix_windows") metadata = api.render(recipe, config=testing_config, bypass_env_check=True)[0][0] info_index = metadata.info_index() assert metadata.config.host_subdir != subdir - assert metadata.config.host_subdir == info_index['subdir'] + assert metadata.config.host_subdir == info_index["subdir"] assert metadata.config.host_platform != metadata.config.platform - assert metadata.config.host_platform == info_index['platform'] + assert metadata.config.host_platform == info_index["platform"] def test_setting_condarc_vars_with_env_var_expansion(testing_workdir): - os.makedirs('config') + os.makedirs("config") # python won't be used - the stuff in the recipe folder will override it - python_versions = ['2.6', '3.4', '3.10'] - config = {'python': python_versions, - 'bzip2': ['0.9', '1.0']} - with open(os.path.join('config', 'conda_build_config.yaml'), 'w') as f: + python_versions = ["2.6", "3.4", "3.10"] + config = {"python": python_versions, "bzip2": ["0.9", "1.0"]} + with open(os.path.join("config", "conda_build_config.yaml"), "w") as f: yaml.dump(config, f, default_flow_style=False) cc_conda_build_backup = cc_conda_build.copy() # hacky equivalent of changing condarc # careful, this is global and affects other tests! make sure to clear it! - cc_conda_build.update({'config_file': '${TEST_WORKDIR}/config/conda_build_config.yaml'}) + cc_conda_build.update( + {"config_file": "${TEST_WORKDIR}/config/conda_build_config.yaml"} + ) - os.environ['TEST_WORKDIR'] = testing_workdir + os.environ["TEST_WORKDIR"] = testing_workdir try: m = api.render( os.path.join(variants_dir, "19_used_variables"), @@ -193,29 +205,31 @@ def test_setting_condarc_vars_with_env_var_expansion(testing_workdir): finalize=False, )[0][0] # this one should have gotten clobbered by the values in the recipe - assert m.config.variant['python'] not in python_versions + assert m.config.variant["python"] not in python_versions # this confirms that we loaded the config file correctly - assert len(m.config.squished_variants['bzip2']) == 2 + assert len(m.config.squished_variants["bzip2"]) == 2 finally: cc_conda_build.clear() cc_conda_build.update(cc_conda_build_backup) def test_self_reference_run_exports_pin_subpackage_picks_up_version_correctly(): - recipe = os.path.join(metadata_dir, '_self_reference_run_exports') + recipe = os.path.join(metadata_dir, "_self_reference_run_exports") m = api.render(recipe)[0][0] - run_exports = m.meta.get('build', {}).get('run_exports', []) + run_exports = m.meta.get("build", {}).get("run_exports", []) assert run_exports assert len(run_exports) == 1 - assert run_exports[0].split()[1] == '>=1.0.0,<2.0a0' + assert run_exports[0].split()[1] == ">=1.0.0,<2.0a0" def test_run_exports_with_pin_compatible_in_subpackages(testing_config): - recipe = os.path.join(metadata_dir, '_run_exports_in_outputs') + recipe = os.path.join(metadata_dir, "_run_exports_in_outputs") ms = api.render(recipe, config=testing_config) for m, _, _ in ms: - if m.name().startswith('gfortran_'): - run_exports = set(m.meta.get('build', {}).get('run_exports', {}).get('strong', [])) + if m.name().startswith("gfortran_"): + run_exports = set( + m.meta.get("build", {}).get("run_exports", {}).get("strong", []) + ) assert len(run_exports) == 1 # len after splitting should be more than one because of pin_compatible. If it's only zlib, we've lost the # compatibility bound info. This is generally due to lack of rendering of an output, such that the @@ -233,26 +247,26 @@ def test_ignore_build_only_deps(): def test_merge_build_host_build_key(): - m = api.render(os.path.join(metadata_dir, '_no_merge_build_host'))[0][0] - assert not any('bzip2' in dep for dep in m.meta['requirements']['run']) + m = api.render(os.path.join(metadata_dir, "_no_merge_build_host"))[0][0] + assert not any("bzip2" in dep for dep in m.meta["requirements"]["run"]) def test_merge_build_host_empty_host_section(): - m = api.render(os.path.join(metadata_dir, '_empty_host_avoids_merge'))[0][0] - assert not any('bzip2' in dep for dep in m.meta['requirements']['run']) + m = api.render(os.path.join(metadata_dir, "_empty_host_avoids_merge"))[0][0] + assert not any("bzip2" in dep for dep in m.meta["requirements"]["run"]) def test_pin_expression_works_with_prereleases(testing_config): - recipe = os.path.join(metadata_dir, '_pinning_prerelease') + recipe = os.path.join(metadata_dir, "_pinning_prerelease") ms = api.render(recipe, config=testing_config) assert len(ms) == 2 - m = next(m_[0] for m_ in ms if m_[0].meta['package']['name'] == 'bar') - assert 'foo >=3.10.0.rc1,<3.11.0a0' in m.meta['requirements']['run'] + m = next(m_[0] for m_ in ms if m_[0].meta["package"]["name"] == "bar") + assert "foo >=3.10.0.rc1,<3.11.0a0" in m.meta["requirements"]["run"] def test_pin_expression_works_with_python_prereleases(testing_config): - recipe = os.path.join(metadata_dir, '_pinning_prerelease_python') + recipe = os.path.join(metadata_dir, "_pinning_prerelease_python") ms = api.render(recipe, config=testing_config) assert len(ms) == 2 - m = next(m_[0] for m_ in ms if m_[0].meta['package']['name'] == 'bar') - assert 'python >=3.10.0rc1,<3.11.0a0' in m.meta['requirements']['run'] + m = next(m_[0] for m_ in ms if m_[0].meta["package"]["name"] == "bar") + assert "python >=3.10.0rc1,<3.11.0a0" in m.meta["requirements"]["run"] diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index 62c09933fc..10d9a6973e 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -3,23 +3,30 @@ from __future__ import annotations import os -from pathlib import Path import subprocess import sys +from pathlib import Path -from conda_build.version import _parse as parse_version import pytest import ruamel.yaml -from conda_build.skeletons.pypi import get_package_metadata, \ - get_entry_points, is_setuptools_enabled, convert_to_flat_list, \ - get_dependencies, get_import_tests, get_tests_require, get_home, \ - get_summary, get_license_name, clean_license_name - from conda_build import api from conda_build.exceptions import DependencyNeedsBuildingError +from conda_build.skeletons.pypi import ( + clean_license_name, + convert_to_flat_list, + get_dependencies, + get_entry_points, + get_home, + get_import_tests, + get_license_name, + get_package_metadata, + get_summary, + get_tests_require, + is_setuptools_enabled, +) from conda_build.utils import on_win - +from conda_build.version import _parse as parse_version SYMPY_URL = "https://pypi.python.org/packages/source/s/sympy/sympy-1.10.tar.gz#md5=b3f5189ad782bbcb1bedc1ec2ca12f29" @@ -51,48 +58,54 @@ def mock_metadata(): def pylint_pkginfo(): # Hardcoding it to avoid to use the get_pkginfo because it takes too much time return { - 'classifiers': [ - 'Development Status :: 6 - Mature', - 'Environment :: Console', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: GNU General Public License (GPL)', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: Implementation :: CPython', - 'Programming Language :: Python :: Implementation :: PyPy', - 'Topic :: Software Development :: Debuggers', - 'Topic :: Software Development :: Quality Assurance', - 'Topic :: Software Development :: Testing' + "classifiers": [ + "Development Status :: 6 - Mature", + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: GNU General Public License (GPL)", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Software Development :: Debuggers", + "Topic :: Software Development :: Quality Assurance", + "Topic :: Software Development :: Testing", ], - 'entry_points': { - 'console_scripts': [ - 'pylint = pylint:run_pylint', - 'epylint = pylint:run_epylint', - 'pyreverse = pylint:run_pyreverse', - 'symilar = pylint:run_symilar' + "entry_points": { + "console_scripts": [ + "pylint = pylint:run_pylint", + "epylint = pylint:run_epylint", + "pyreverse = pylint:run_pyreverse", + "symilar = pylint:run_symilar", ] }, - 'extras_require': {':sys_platform=="win32"': ['colorama']}, - 'home': 'https://github.com/PyCQA/pylint', - 'install_requires': [ - 'astroid>=2.2.0,<3', 'isort>=4.2.5,<5', 'mccabe>=0.6,<0.7' + "extras_require": {':sys_platform=="win32"': ["colorama"]}, + "home": "https://github.com/PyCQA/pylint", + "install_requires": [ + "astroid>=2.2.0,<3", + "isort>=4.2.5,<5", + "mccabe>=0.6,<0.7", ], - 'license': 'GPL', - 'name': 'pylint', - 'packages': [ - 'pylint', 'pylint.checkers', 'pylint.pyreverse', - 'pylint.extensions', 'pylint.reporters', 'pylint.reporters.ureports' + "license": "GPL", + "name": "pylint", + "packages": [ + "pylint", + "pylint.checkers", + "pylint.pyreverse", + "pylint.extensions", + "pylint.reporters", + "pylint.reporters.ureports", ], - 'setuptools': True, - 'summary': 'python code static checker', - 'tests_require': ['pytest'], - 'version': '2.3.1' + "setuptools": True, + "summary": "python code static checker", + "tests_require": ["pytest"], + "version": "2.3.1", } @@ -213,35 +226,35 @@ def test_convert_to_flat_list(): def test_is_setuptools_enabled(): assert not is_setuptools_enabled({"entry_points": "STRING"}) - assert not is_setuptools_enabled({ - "entry_points": { - "console_scripts": ["CONSOLE"], - "gui_scripts": ["GUI"], + assert not is_setuptools_enabled( + { + "entry_points": { + "console_scripts": ["CONSOLE"], + "gui_scripts": ["GUI"], + } } - }) + ) - assert is_setuptools_enabled({ - "entry_points": { - "console_scripts": ["CONSOLE"], - "gui_scripts": ["GUI"], - "foo_scripts": ["SCRIPTS"], + assert is_setuptools_enabled( + { + "entry_points": { + "console_scripts": ["CONSOLE"], + "gui_scripts": ["GUI"], + "foo_scripts": ["SCRIPTS"], + } } - }) + ) def test_get_dependencies(): assert get_dependencies( - ['astroid >=2.2.0,<3 #COMMENTS', 'isort >=4.2.5,<5', - 'mccabe >=0.6,<0.7'], - False - ) == ['astroid >=2.2.0,<3', 'isort >=4.2.5,<5', 'mccabe >=0.6,<0.7'] + ["astroid >=2.2.0,<3 #COMMENTS", "isort >=4.2.5,<5", "mccabe >=0.6,<0.7"], + False, + ) == ["astroid >=2.2.0,<3", "isort >=4.2.5,<5", "mccabe >=0.6,<0.7"] assert get_dependencies( - ['astroid >=2.2.0,<3 #COMMENTS', 'isort >=4.2.5,<5', - 'mccabe >=0.6,<0.7'], - True - ) == ['setuptools', 'astroid >=2.2.0,<3', 'isort >=4.2.5,<5', - 'mccabe >=0.6,<0.7'] + ["astroid >=2.2.0,<3 #COMMENTS", "isort >=4.2.5,<5", "mccabe >=0.6,<0.7"], True + ) == ["setuptools", "astroid >=2.2.0,<3", "isort >=4.2.5,<5", "mccabe >=0.6,<0.7"] def test_get_import_tests(pylint_pkginfo, pylint_metadata): @@ -326,7 +339,7 @@ def test_pypi_pin_numpy(tmp_path: Path, testing_config): ) assert (tmp_path / "msumastro" / "meta.yaml").read_text().count("numpy x.x") == 2 with pytest.raises(DependencyNeedsBuildingError): - api.build('msumastro') + api.build("msumastro") def test_pypi_version_sorting(tmp_path: Path, testing_config): @@ -341,7 +354,7 @@ def test_pypi_version_sorting(tmp_path: Path, testing_config): def test_list_skeletons(): skeletons = api.list_skeletons() - assert set(skeletons) == {'pypi', 'cran', 'cpan', 'luarocks', 'rpm'} + assert set(skeletons) == {"pypi", "cran", "cpan", "luarocks", "rpm"} def test_pypi_with_entry_points(tmp_path: Path): @@ -360,8 +373,8 @@ def test_pypi_with_version_arg(tmp_path: Path): def test_pypi_with_extra_specs(tmp_path: Path, testing_config): # regression test for https://github.com/conda/conda-build/issues/1697 # For mpi4py: - testing_config.channel_urls.append('https://repo.anaconda.com/pkgs/free') - extra_specs = ['cython', 'mpi4py'] + testing_config.channel_urls.append("https://repo.anaconda.com/pkgs/free") + extra_specs = ["cython", "mpi4py"] if not on_win: extra_specs.append("nomkl") api.skeletonize( @@ -375,15 +388,15 @@ def test_pypi_with_extra_specs(tmp_path: Path, testing_config): ) m = api.render(str(tmp_path / "bigfile"))[0][0] assert parse_version(m.version()) == parse_version("0.1.24") - assert any('cython' in req for req in m.meta['requirements']['host']) - assert any('mpi4py' in req for req in m.meta['requirements']['host']) + assert any("cython" in req for req in m.meta["requirements"]["host"]) + assert any("mpi4py" in req for req in m.meta["requirements"]["host"]) @pytest.mark.slow def test_pypi_with_version_inconsistency(tmp_path: Path, testing_config): # regression test for https://github.com/conda/conda-build/issues/189 # For mpi4py: - extra_specs = ['mpi4py'] + extra_specs = ["mpi4py"] if not on_win: extra_specs.append("nomkl") testing_config.channel_urls.append("https://repo.anaconda.com/pkgs/free") @@ -405,8 +418,8 @@ def test_pypi_with_basic_environment_markers(tmp_path: Path): api.skeletonize("coconut", "pypi", version="1.2.2", output_dir=tmp_path) m = api.render(tmp_path / "coconut")[0][0] - build_reqs = str(m.meta['requirements']['host']) - run_reqs = str(m.meta['requirements']['run']) + build_reqs = str(m.meta["requirements"]["host"]) + run_reqs = str(m.meta["requirements"]["run"]) # should include the right dependencies for the right version assert "futures" not in build_reqs assert "futures" not in run_reqs @@ -426,9 +439,11 @@ def test_pypi_section_order_preserved(tmp_path: Path): Test whether sections have been written in the correct order. """ from conda_build.render import FIELDS - from conda_build.skeletons.pypi import (ABOUT_ORDER, - REQUIREMENTS_ORDER, - PYPI_META_STATIC) + from conda_build.skeletons.pypi import ( + ABOUT_ORDER, + PYPI_META_STATIC, + REQUIREMENTS_ORDER, + ) api.skeletonize(packages="sympy", repo="pypi", output_dir=tmp_path) # Since we want to check the order of items in the recipe (not whether @@ -448,8 +463,8 @@ def test_pypi_section_order_preserved(tmp_path: Path): # before comparing. pruned_fields = [f for f in FIELDS if f in major_sections] assert major_sections == pruned_fields - assert list(recipe['about']) == ABOUT_ORDER - assert list(recipe['requirements']) == REQUIREMENTS_ORDER + assert list(recipe["about"]) == ABOUT_ORDER + assert list(recipe["requirements"]) == REQUIREMENTS_ORDER for k, v in PYPI_META_STATIC.items(): assert list(v.keys()) == list(recipe[k]) diff --git a/tests/test_api_skeleton_cpan.py b/tests/test_api_skeleton_cpan.py index 815cb43522..238635ba50 100644 --- a/tests/test_api_skeleton_cpan.py +++ b/tests/test_api_skeleton_cpan.py @@ -1,9 +1,9 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -''' +""" Integrative tests of the CPAN skeleton that start from conda_build.api.skeletonize and check the output files -''' +""" import pytest @@ -18,6 +18,8 @@ def test_xs_needs_c_compiler(testing_config): """Perl packages with XS files need a C compiler""" # This uses Sub::Identify=0.14 since it includes no .c files but a .xs file. api.skeletonize("Sub::Identify", version="0.14", repo="cpan", config=testing_config) - m = api.render("perl-sub-identify/0.14", finalize=False, bypass_env_check=True)[0][0] + m = api.render("perl-sub-identify/0.14", finalize=False, bypass_env_check=True)[0][ + 0 + ] build_requirements = m.get_value("requirements/build") assert compiler("c", testing_config) in build_requirements diff --git a/tests/test_api_skeleton_cran.py b/tests/test_api_skeleton_cran.py index 0522bde3a3..9b62b4ac30 100644 --- a/tests/test_api_skeleton_cran.py +++ b/tests/test_api_skeleton_cran.py @@ -1,9 +1,9 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -''' +""" Integrative tests of the CRAN skeleton that start from conda_build.api.skeletonize and check the output files -''' +""" from pathlib import Path from typing import Sequence @@ -69,9 +69,9 @@ def test_cran_os_type(package: str, skip_text: str, tmp_path: Path, testing_conf @pytest.mark.flaky(rerun=5, reruns_delay=2) def test_cran_no_comments(tmp_path: Path, testing_config): package = "data.table" - meta_yaml_comment = ' # This is required to make R link correctly on Linux.' - build_sh_comment = '# Add more build steps here, if they are necessary.' - build_sh_shebang = '#!/bin/bash' + meta_yaml_comment = " # This is required to make R link correctly on Linux." + build_sh_comment = "# Add more build steps here, if they are necessary." + build_sh_shebang = "#!/bin/bash" # Check that comments are part of the templates assert meta_yaml_comment in CRAN_META diff --git a/tests/test_api_test.py b/tests/test_api_test.py index ed432e66b4..a258bbba0d 100644 --- a/tests/test_api_test.py +++ b/tests/test_api_test.py @@ -8,13 +8,14 @@ import pytest from conda_build import api + from .utils import metadata_dir @pytest.mark.sanity def test_recipe_test(testing_config): """Test calling conda build -t """ - recipe = os.path.join(metadata_dir, 'has_prefix_files') + recipe = os.path.join(metadata_dir, "has_prefix_files") metadata = api.render(recipe, config=testing_config)[0][0] api.build(metadata, notest=True, anaconda_upload=False) api.test(recipe, config=metadata.config) @@ -23,7 +24,7 @@ def test_recipe_test(testing_config): @pytest.mark.sanity def test_package_test(testing_config): """Test calling conda build -t - rather than """ - recipe = os.path.join(metadata_dir, 'has_prefix_files') + recipe = os.path.join(metadata_dir, "has_prefix_files") metadata = api.render(recipe, config=testing_config)[0][0] outputs = api.build(metadata, notest=True, anaconda_upload=False) api.test(outputs[0], config=metadata.config) @@ -47,18 +48,18 @@ def test_package_with_jinja2_does_not_redownload_source( # this recipe uses jinja2, which should trigger source download, except that source download # will have already happened in the build stage. # https://github.com/conda/conda-build/issues/1451 - provide = mocker.patch('conda_build.source.provide') + provide = mocker.patch("conda_build.source.provide") api.test(outputs[0], config=metadata.config) assert not provide.called @pytest.mark.sanity def test_api_extra_dep(testing_metadata): - testing_metadata.meta['test']['imports'] = ['click'] + testing_metadata.meta["test"]["imports"] = ["click"] output = api.build(testing_metadata, notest=True, anaconda_upload=False)[0] # extra_deps will add it in - api.test(output, config=testing_metadata.config, extra_deps=['click']) + api.test(output, config=testing_metadata.config, extra_deps=["click"]) # missing click dep will fail tests with pytest.raises(SystemExit): diff --git a/tests/test_build.py b/tests/test_build.py index 6f2e12aabb..f11be7727c 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -6,14 +6,14 @@ """ import json import os -from pathlib import Path import sys +from pathlib import Path from conda.common.compat import on_win -from conda_build import build, api +from conda_build import api, build -from .utils import metadata_dir, get_noarch_python_meta +from .utils import get_noarch_python_meta, metadata_dir PREFIX_TESTS = {"normal": os.path.sep} if on_win: @@ -36,17 +36,22 @@ def test_find_prefix_files(testing_workdir): def test_build_preserves_PATH(testing_config): - m = api.render(os.path.join(metadata_dir, 'source_git'), config=testing_config)[0][0] - ref_path = os.environ['PATH'] + m = api.render(os.path.join(metadata_dir, "source_git"), config=testing_config)[0][ + 0 + ] + ref_path = os.environ["PATH"] build.build(m, stats=None) - assert os.environ['PATH'] == ref_path + assert os.environ["PATH"] == ref_path def test_sanitize_channel(): - test_url = 'https://conda.anaconda.org/t/ms-534991f2-4123-473a-b512-42025291b927/somechannel' - assert build.sanitize_channel(test_url) == 'https://conda.anaconda.org/somechannel' - test_url_auth = 'https://myuser:mypass@conda.anaconda.org/somechannel' - assert build.sanitize_channel(test_url_auth) == 'https://conda.anaconda.org/somechannel' + test_url = "https://conda.anaconda.org/t/ms-534991f2-4123-473a-b512-42025291b927/somechannel" + assert build.sanitize_channel(test_url) == "https://conda.anaconda.org/somechannel" + test_url_auth = "https://myuser:mypass@conda.anaconda.org/somechannel" + assert ( + build.sanitize_channel(test_url_auth) + == "https://conda.anaconda.org/somechannel" + ) def test_get_short_path(testing_metadata): @@ -61,9 +66,14 @@ def test_get_short_path(testing_metadata): def test_has_prefix(): - files_with_prefix = [("prefix/path", "text", "short/path/1"), - ("prefix/path", "text", "short/path/2")] - assert build.has_prefix("short/path/1", files_with_prefix) == ("prefix/path", "text") + files_with_prefix = [ + ("prefix/path", "text", "short/path/1"), + ("prefix/path", "text", "short/path/2"), + ] + assert build.has_prefix("short/path/1", files_with_prefix) == ( + "prefix/path", + "text", + ) assert build.has_prefix("short/path/nope", files_with_prefix) == (None, None) @@ -97,21 +107,35 @@ def test_create_info_files_json(testing_workdir, testing_metadata): files_with_prefix = [("prefix/path", "text", "foo")] files = ["one", "two", "foo"] - build.create_info_files_json_v1(testing_metadata, info_dir, testing_workdir, files, - files_with_prefix) + build.create_info_files_json_v1( + testing_metadata, info_dir, testing_workdir, files, files_with_prefix + ) assert json.loads((info_dir / "paths.json").read_text()) == { - "paths": [{"file_mode": "text", "path_type": "hardlink", "_path": "foo", - "prefix_placeholder": "prefix/path", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "hardlink", "_path": "one", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "hardlink", "_path": "two", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}], - "paths_version": 1} + "paths": [ + { + "file_mode": "text", + "path_type": "hardlink", + "_path": "foo", + "prefix_placeholder": "prefix/path", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "hardlink", + "_path": "one", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "hardlink", + "_path": "two", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + ], + "paths_version": 1, + } def test_create_info_files_json_symlinks(testing_workdir, testing_metadata): @@ -139,38 +163,75 @@ def test_create_info_files_json_symlinks(testing_workdir, testing_metadata): os.symlink(cycle2_symlink, cycle1_symlink) files_with_prefix = [("prefix/path", "text", "foo")] - files = ["one", "two", "foo", "two_sl", "nowhere_sl", "recursive_sl", "cycle1_sl", "cycle2_sl"] - - build.create_info_files_json_v1(testing_metadata, info_dir, testing_workdir, files, - files_with_prefix) + files = [ + "one", + "two", + "foo", + "two_sl", + "nowhere_sl", + "recursive_sl", + "cycle1_sl", + "cycle2_sl", + ] + + build.create_info_files_json_v1( + testing_metadata, info_dir, testing_workdir, files, files_with_prefix + ) assert json.loads((info_dir / "paths.json").read_text()) == { "paths": [ - {"path_type": "softlink", "_path": "cycle1_sl", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "softlink", "_path": "cycle2_sl", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"file_mode": "text", "path_type": "hardlink", "_path": "foo", - "prefix_placeholder": "prefix/path", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "softlink", "_path": "nowhere_sl", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "hardlink", "_path": "one", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "softlink", "_path": "recursive_sl", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "hardlink", "_path": "two", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "softlink", "_path": "two_sl", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}], - "paths_version": 1} + { + "path_type": "softlink", + "_path": "cycle1_sl", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "softlink", + "_path": "cycle2_sl", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "file_mode": "text", + "path_type": "hardlink", + "_path": "foo", + "prefix_placeholder": "prefix/path", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "softlink", + "_path": "nowhere_sl", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "hardlink", + "_path": "one", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "softlink", + "_path": "recursive_sl", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "hardlink", + "_path": "two", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "softlink", + "_path": "two_sl", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + ], + "paths_version": 1, + } def test_create_info_files_json_no_inodes(testing_workdir, testing_metadata): @@ -188,30 +249,49 @@ def test_create_info_files_json_no_inodes(testing_workdir, testing_metadata): files_with_prefix = [("prefix/path", "text", "foo")] files = ["one", "two", "one_hl", "foo"] - build.create_info_files_json_v1(testing_metadata, info_dir, testing_workdir, files, - files_with_prefix) + build.create_info_files_json_v1( + testing_metadata, info_dir, testing_workdir, files, files_with_prefix + ) assert json.loads((info_dir / "paths.json").read_text()) == { - "paths": [{"file_mode": "text", "path_type": "hardlink", "_path": "foo", - "prefix_placeholder": "prefix/path", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "hardlink", "_path": "one", "inode_paths": ["one", "one_hl"], - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "hardlink", "_path": "one_hl", "inode_paths": ["one", "one_hl"], - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}, - {"path_type": "hardlink", "_path": "two", - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "size_in_bytes": 0}], - "paths_version": 1} + "paths": [ + { + "file_mode": "text", + "path_type": "hardlink", + "_path": "foo", + "prefix_placeholder": "prefix/path", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "hardlink", + "_path": "one", + "inode_paths": ["one", "one_hl"], + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "hardlink", + "_path": "one_hl", + "inode_paths": ["one", "one_hl"], + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + { + "path_type": "hardlink", + "_path": "two", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "size_in_bytes": 0, + }, + ], + "paths_version": 1, + } def test_rewrite_output(testing_config, capsys): api.build(os.path.join(metadata_dir, "_rewrite_env"), config=testing_config) captured = capsys.readouterr() stdout = captured.out - if sys.platform == 'win32': + if sys.platform == "win32": assert "PREFIX=%PREFIX%" in stdout assert "LIBDIR=%PREFIX%\\lib" in stdout assert "PWD=%SRC_DIR%" in stdout diff --git a/tests/test_check.py b/tests/test_check.py index 8e5109c771..ee1f2bebc6 100644 --- a/tests/test_check.py +++ b/tests/test_check.py @@ -3,9 +3,10 @@ import os from conda_build import api + from .utils import metadata_dir def test_check_multiple_sources(): - recipe = os.path.join(metadata_dir, 'multiple_sources') + recipe = os.path.join(metadata_dir, "multiple_sources") assert api.check(recipe) diff --git a/tests/test_conda_interface.py b/tests/test_conda_interface.py index ea56b3062c..1c13d6faf9 100644 --- a/tests/test_conda_interface.py +++ b/tests/test_conda_interface.py @@ -4,6 +4,6 @@ def test_get_installed_version(): - versions = ci.get_installed_version(ci.root_dir, 'conda') - assert versions.get('conda') - assert ci.VersionOrder(versions.get('conda')) + versions = ci.get_installed_version(ci.root_dir, "conda") + assert versions.get("conda") + assert ci.VersionOrder(versions.get("conda")) diff --git a/tests/test_config.py b/tests/test_config.py index 4dce1e63a2..528e4a5122 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -5,8 +5,8 @@ import pytest -from conda_build.config import Config, get_or_merge_config from conda_build.conda_interface import TemporaryDirectory +from conda_build.config import Config, get_or_merge_config from conda_build.utils import on_win @@ -25,11 +25,12 @@ def build_id(): def test_set_build_id(config, build_id): config.build_id = build_id # windows always uses the short prefix due to its limitation of 260 char paths - if sys.platform == 'win32': + if sys.platform == "win32": assert config.host_prefix == os.path.join(config.croot, build_id, "_h_env") else: - long_prefix = os.path.join(config.croot, build_id, - "_h_env" + "_placehold" * 25)[:config.prefix_length] + long_prefix = os.path.join( + config.croot, build_id, "_h_env" + "_placehold" * 25 + )[: config.prefix_length] assert config.host_prefix == long_prefix @@ -42,7 +43,7 @@ def test_keep_old_work(config, build_id): os.makedirs(work_path) # assert False assert len(os.listdir(config.work_dir)) == 0 - with open(os.path.join(work_path, 'a_touched_file.magic'), 'w') as _: + with open(os.path.join(work_path, "a_touched_file.magic"), "w") as _: # Touch a random file so the "work_dir" is not empty pass assert len(os.listdir(config.work_dir)) > 0 @@ -65,7 +66,7 @@ def test_long_build_prefix_length(config): def test_long_test_prefix_length(config): # defaults to True in conda-build 3.0+ assert config.long_test_prefix - assert '_plac' in config.test_prefix + assert "_plac" in config.test_prefix config.long_test_prefix = True # The length of the testing prefix is reduced by 2 characters to check if the null # byte padding causes issues @@ -81,32 +82,32 @@ def test_build_id_at_end_of_long_build_prefix(config, build_id): def test_create_config_with_subdir(): - config = Config(host_subdir='steve-128') - assert config.host_platform == 'steve' - assert config.host_subdir == 'steve-128' + config = Config(host_subdir="steve-128") + assert config.host_platform == "steve" + assert config.host_subdir == "steve-128" def test_set_platform(config): - config.host_platform = 'steve' + config.host_platform = "steve" arch = config.arch - assert config.host_subdir == 'steve-' + str(arch) + assert config.host_subdir == "steve-" + str(arch) def test_set_subdir(config): - config.host_subdir = 'steve' + config.host_subdir = "steve" arch = config.arch - assert config.host_subdir == 'steve-' + str(arch) - assert config.host_platform == 'steve' + assert config.host_subdir == "steve-" + str(arch) + assert config.host_platform == "steve" - config.host_subdir = 'steve-128' - assert config.host_subdir == 'steve-128' - assert config.host_platform == 'steve' - assert config.host_arch == '128' + config.host_subdir = "steve-128" + assert config.host_subdir == "steve-128" + assert config.host_platform == "steve" + assert config.host_arch == "128" def test_set_bits(config): config.host_arch = 128 - assert config.host_subdir == config.platform + '-' + str(128) + assert config.host_subdir == config.platform + "-" + str(128) assert config.host_arch == 128 diff --git a/tests/test_cpan_skeleton.py b/tests/test_cpan_skeleton.py index 89a6bb49cf..2b726f6c88 100644 --- a/tests/test_cpan_skeleton.py +++ b/tests/test_cpan_skeleton.py @@ -9,8 +9,8 @@ import pytest -from conda_build.variants import get_default_variant from conda_build.skeletons.cpan import get_core_modules_for_this_perl_version +from conda_build.variants import get_default_variant @pytest.mark.slow diff --git a/tests/test_cran_skeleton.py b/tests/test_cran_skeleton.py index b17fe5c59d..a0f5575114 100644 --- a/tests/test_cran_skeleton.py +++ b/tests/test_cran_skeleton.py @@ -1,16 +1,20 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -''' +""" Unit tests of the CRAN skeleton utility functions -''' +""" import os -import pytest +import pytest from conda.auxlib.ish import dals + from conda_build.license_family import allowed_license_families -from conda_build.skeletons.cran import (get_license_info, - read_description_contents, - remove_comments) +from conda_build.skeletons.cran import ( + get_license_info, + read_description_contents, + remove_comments, +) + from .utils import cran_dir @@ -119,12 +123,15 @@ def test_read_description_contents(): description = os.path.join(cran_dir, "rpart", "DESCRIPTION") with open(description, "rb") as fp: contents = read_description_contents(fp) - assert contents['Package'] == 'rpart' - assert contents['Priority'] == 'recommended' - assert contents['Title'] == 'Recursive Partitioning and Regression Trees' - assert contents['Depends'] == 'R (>= 2.15.0), graphics, stats, grDevices' - assert contents['License'] == 'GPL-2 | GPL-3' - assert contents['URL'] == 'https://github.com/bethatkinson/rpart, https://cran.r-project.org/package=rpart' + assert contents["Package"] == "rpart" + assert contents["Priority"] == "recommended" + assert contents["Title"] == "Recursive Partitioning and Regression Trees" + assert contents["Depends"] == "R (>= 2.15.0), graphics, stats, grDevices" + assert contents["License"] == "GPL-2 | GPL-3" + assert ( + contents["URL"] + == "https://github.com/bethatkinson/rpart, https://cran.r-project.org/package=rpart" + ) def test_remove_comments(): diff --git a/tests/test_create_test.py b/tests/test_create_test.py index 0ebf6816df..67a811a640 100644 --- a/tests/test_create_test.py +++ b/tests/test_create_test.py @@ -8,9 +8,9 @@ import pytest from conda_build.create_test import ( - create_py_files, create_lua_files, create_pl_files, + create_py_files, create_r_files, ) diff --git a/tests/test_environ.py b/tests/test_environ.py index fed4cf0a17..93311ab81b 100644 --- a/tests/test_environ.py +++ b/tests/test_environ.py @@ -6,7 +6,12 @@ def test_environment_creation_preserves_PATH(testing_workdir, testing_config): - ref_path = os.environ['PATH'] - environ.create_env(testing_workdir, ['python'], env='host', config=testing_config, - subdir=testing_config.build_subdir) - assert os.environ['PATH'] == ref_path + ref_path = os.environ["PATH"] + environ.create_env( + testing_workdir, + ["python"], + env="host", + config=testing_config, + subdir=testing_config.build_subdir, + ) + assert os.environ["PATH"] == ref_path diff --git a/tests/test_index.py b/tests/test_index.py index 4375cfe7b5..263147fd87 100644 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -9,15 +9,15 @@ import tarfile from logging import getLogger from os.path import dirname, isdir, isfile, join +from unittest import mock import conda_package_handling.api import pytest -from unittest import mock -from conda_build.conda_interface import context -from conda_build.utils import copy_into, rm_rf import conda_build.api import conda_build.index +from conda_build.conda_interface import context +from conda_build.utils import copy_into, rm_rf from .utils import archive_dir @@ -900,7 +900,6 @@ def test_patch_from_tarball(testing_workdir): def test_index_of_removed_pkg(testing_metadata): - archive_name = "test_index_of_removed_pkg-1.0-1.tar.bz2" archive_destination = os.path.join( testing_metadata.config.croot, TEST_SUBDIR, archive_name diff --git a/tests/test_inspect.py b/tests/test_inspect.py index 238f09240a..cd90ba98ae 100644 --- a/tests/test_inspect.py +++ b/tests/test_inspect.py @@ -9,25 +9,25 @@ def test_inspect_linkages(): - if sys.platform == 'win32': + if sys.platform == "win32": with pytest.raises(SystemExit) as exc: out_string = api.inspect_linkages("python") - assert 'conda inspect linkages is only implemented in Linux and OS X' in exc + assert "conda inspect linkages is only implemented in Linux and OS X" in exc else: out_string = api.inspect_linkages("python") - assert 'libncursesw' in out_string + assert "libncursesw" in out_string def test_inspect_objects(): - if sys.platform != 'darwin': + if sys.platform != "darwin": with pytest.raises(SystemExit) as exc: out_string = api.inspect_objects("python") - assert 'conda inspect objects is only implemented in OS X' in exc + assert "conda inspect objects is only implemented in OS X" in exc else: out_string = api.inspect_objects("python") - assert re.search('rpath:.*@loader_path', out_string) + assert re.search("rpath:.*@loader_path", out_string) def test_channel_installable(): # make sure the default channel is installable as a reference - assert api.test_installable('conda-team') + assert api.test_installable("conda-team") diff --git a/tests/test_jinja_context.py b/tests/test_jinja_context.py index 8654da5842..3a82ed0227 100644 --- a/tests/test_jinja_context.py +++ b/tests/test_jinja_context.py @@ -12,133 +12,134 @@ def test_pin_default(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test') - assert pin == 'test >=1.2.3,<2.0a0' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test") + assert pin == "test >=1.2.3,<2.0a0" def test_pin_compatible_exact(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3 abc_0'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test', exact=True) - assert pin == 'test 1.2.3 abc_0' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3 abc_0"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test", exact=True) + assert pin == "test 1.2.3 abc_0" def test_pin_jpeg_style_default(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['jpeg 9d 0'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'jpeg') - assert pin == 'jpeg >=9d,<10a' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["jpeg 9d 0"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "jpeg") + assert pin == "jpeg >=9d,<10a" def test_pin_jpeg_style_minor(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['jpeg 9d 0'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'jpeg', max_pin='x.x') - assert pin == 'jpeg >=9d,<9e' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["jpeg 9d 0"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "jpeg", max_pin="x.x") + assert pin == "jpeg >=9d,<9e" def test_pin_openssl_style_bugfix(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['openssl 1.0.2j 0'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'openssl', max_pin='x.x.x') - assert pin == 'openssl >=1.0.2j,<1.0.3a' - pin = jinja_context.pin_compatible(testing_metadata, 'openssl', max_pin='x.x.x.x') - assert pin == 'openssl >=1.0.2j,<1.0.2k' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["openssl 1.0.2j 0"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "openssl", max_pin="x.x.x") + assert pin == "openssl >=1.0.2j,<1.0.3a" + pin = jinja_context.pin_compatible(testing_metadata, "openssl", max_pin="x.x.x.x") + assert pin == "openssl >=1.0.2j,<1.0.2k" def test_pin_major_minor(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test', max_pin='x.x') - assert pin == 'test >=1.2.3,<1.3.0a0' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test", max_pin="x.x") + assert pin == "test >=1.2.3,<1.3.0a0" def test_pin_excessive_max_pin(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test', max_pin='x.x.x.x.x.x') - assert pin == 'test >=1.2.3,<1.2.4.0a0' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test", max_pin="x.x.x.x.x.x") + assert pin == "test >=1.2.3,<1.2.4.0a0" def test_pin_upper_bound(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test', upper_bound="3.0") - assert pin == 'test >=1.2.3,<3.0' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test", upper_bound="3.0") + assert pin == "test >=1.2.3,<3.0" def test_pin_lower_bound(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test', lower_bound=1.0) - assert pin == 'test >=1.0,<2.0a0' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test", lower_bound=1.0) + assert pin == "test >=1.0,<2.0a0" def test_pin_none_min(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test', min_pin=None) - assert pin == 'test <2.0a0' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test", min_pin=None) + assert pin == "test <2.0a0" def test_pin_none_max(testing_metadata, mocker): - get_env_dependencies = mocker.patch.object(jinja_context, 'get_env_dependencies') - get_env_dependencies.return_value = ['test 1.2.3'], [], None - pin = jinja_context.pin_compatible(testing_metadata, 'test', max_pin=None) - assert pin == 'test >=1.2.3' + get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") + get_env_dependencies.return_value = ["test 1.2.3"], [], None + pin = jinja_context.pin_compatible(testing_metadata, "test", max_pin=None) + assert pin == "test >=1.2.3" def test_pin_subpackage_exact(testing_metadata): name = testing_metadata.name() - output_dict = {'name': name} - testing_metadata.meta['outputs'] = [output_dict] + output_dict = {"name": name} + testing_metadata.meta["outputs"] = [output_dict] fm = testing_metadata.get_output_metadata(output_dict) - testing_metadata.other_outputs = {(name, HashableDict(testing_metadata.config.variant)): - (output_dict, fm)} + testing_metadata.other_outputs = { + (name, HashableDict(testing_metadata.config.variant)): (output_dict, fm) + } pin = jinja_context.pin_subpackage(testing_metadata, name, exact=True) assert len(pin.split()) == 3 def test_pin_subpackage_expression(testing_metadata): name = testing_metadata.name() - output_dict = {'name': name} - testing_metadata.meta['outputs'] = [output_dict] + output_dict = {"name": name} + testing_metadata.meta["outputs"] = [output_dict] fm = testing_metadata.get_output_metadata(output_dict) - testing_metadata.other_outputs = {(name, HashableDict(testing_metadata.config.variant)): - (output_dict, fm)} + testing_metadata.other_outputs = { + (name, HashableDict(testing_metadata.config.variant)): (output_dict, fm) + } pin = jinja_context.pin_subpackage(testing_metadata, name) assert len(pin.split()) == 2 def test_resolved_packages(testing_metadata): - testing_metadata.meta['requirements']['build'] = ['numpy'] - packages = jinja_context.resolved_packages(testing_metadata, 'build') + testing_metadata.meta["requirements"]["build"] = ["numpy"] + packages = jinja_context.resolved_packages(testing_metadata, "build") assert all(len(pkg.split()) == 3 for pkg in packages) - assert any('numpy' == pkg.split()[0] for pkg in packages) - assert any('python' == pkg.split()[0] for pkg in packages) + assert any("numpy" == pkg.split()[0] for pkg in packages) + assert any("python" == pkg.split()[0] for pkg in packages) def test_load_setup_py_data_from_setup_cfg(testing_metadata, tmp_path: Path): setup_py = tmp_path / "setup.py" setup_cfg = tmp_path / "setup.cfg" setup_py.write_text( - 'from setuptools import setup\n' - 'setup(name="name_from_setup_py")\n' + "from setuptools import setup\n" 'setup(name="name_from_setup_py")\n' ) setup_cfg.write_text( - '[metadata]\n' - 'name = name_from_setup_cfg\n' - 'version = version_from_setup_cfg\n' - '[options.extras_require]\n' - 'extra = extra_package\n' + "[metadata]\n" + "name = name_from_setup_cfg\n" + "version = version_from_setup_cfg\n" + "[options.extras_require]\n" + "extra = extra_package\n" ) setuptools_data = jinja_context.load_setup_py_data(testing_metadata, str(setup_py)) # ensure that setup.cfg has priority over setup.py - assert setuptools_data['name'] == 'name_from_setup_cfg' - assert setuptools_data['version'] == 'version_from_setup_cfg' - assert setuptools_data['extras_require'] == {'extra': ['extra_package']} + assert setuptools_data["name"] == "name_from_setup_cfg" + assert setuptools_data["version"] == "version_from_setup_cfg" + assert setuptools_data["extras_require"] == {"extra": ["extra_package"]} @pytest.mark.parametrize( diff --git a/tests/test_license_family.py b/tests/test_license_family.py index bbc4a1a880..fc0882f7cc 100644 --- a/tests/test_license_family.py +++ b/tests/test_license_family.py @@ -1,8 +1,9 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from conda_build.license_family import guess_license_family, ensure_valid_license_family import pytest +from conda_build.license_family import ensure_valid_license_family, guess_license_family + LICENSE_FAMILY = { # AGPL "Affero GPL": "AGPL", @@ -66,8 +67,8 @@ def test_guess_license_family(license, family): def test_ensure_valid_family(testing_metadata): - testing_metadata.meta['about']['license_family'] = 'public-domain' + testing_metadata.meta["about"]["license_family"] = "public-domain" ensure_valid_license_family(testing_metadata.meta) with pytest.raises(RuntimeError): - testing_metadata.meta['about']['license_family'] = 'local H' + testing_metadata.meta["about"]["license_family"] = "local H" ensure_valid_license_family(testing_metadata.meta) diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 30ec75f50a..99545c50c9 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -7,31 +7,30 @@ import pytest -from conda_build.metadata import select_lines, MetaData from conda_build import api -from .utils import thisdir, metadata_dir - +from conda_build.metadata import MetaData, _hash_dependencies, select_lines from conda_build.utils import DEFAULT_SUBDIRS -from conda_build.metadata import _hash_dependencies + +from .utils import metadata_dir, thisdir def test_uses_vcs_in_metadata(testing_workdir, testing_metadata): - testing_metadata._meta_path = os.path.join(testing_workdir, 'meta.yaml') - testing_metadata._meta_name = 'meta.yaml' - with open(testing_metadata.meta_path, 'w') as f: - f.write('http://hg.something.com') + testing_metadata._meta_path = os.path.join(testing_workdir, "meta.yaml") + testing_metadata._meta_name = "meta.yaml" + with open(testing_metadata.meta_path, "w") as f: + f.write("http://hg.something.com") assert not testing_metadata.uses_vcs_in_meta assert not testing_metadata.uses_vcs_in_build - with open(testing_metadata.meta_path, 'w') as f: - f.write('hg something something') + with open(testing_metadata.meta_path, "w") as f: + f.write("hg something something") assert not testing_metadata.uses_vcs_in_meta assert testing_metadata.uses_vcs_in_build - with open(testing_metadata.meta_path, 'w') as f: - f.write('hg.exe something something') + with open(testing_metadata.meta_path, "w") as f: + f.write("hg.exe something something") assert not testing_metadata.uses_vcs_in_meta assert testing_metadata.uses_vcs_in_build - with open(testing_metadata.meta_path, 'w') as f: - f.write('HG_WEEEEE') + with open(testing_metadata.meta_path, "w") as f: + f.write("HG_WEEEEE") assert testing_metadata.uses_vcs_in_meta assert not testing_metadata.uses_vcs_in_build @@ -53,7 +52,9 @@ def test_select_lines(): {{ environ["test"] }} # [abc] """ - assert select_lines(lines, {'abc': True}, variants_in_place=True) == """ + assert ( + select_lines(lines, {"abc": True}, variants_in_place=True) + == """ test test [abc] no test [abc] # no @@ -68,71 +69,82 @@ def test_select_lines(): test {{ JINJA_VAR[:2] }} {{ environ["test"] }} """ - assert select_lines(lines, {'abc': False}, variants_in_place=True) == """ + ) + assert ( + select_lines(lines, {"abc": False}, variants_in_place=True) + == """ test test [abc] no test [abc] # no test {{ JINJA_VAR[:2] }} """ + ) def test_disallow_leading_period_in_version(testing_metadata): - testing_metadata.meta['package']['version'] = '.ste.ve' + testing_metadata.meta["package"]["version"] = ".ste.ve" testing_metadata.final = True with pytest.raises(ValueError): testing_metadata.version() def test_disallow_dash_in_features(testing_metadata): - testing_metadata.meta['build']['features'] = ['abc'] + testing_metadata.meta["build"]["features"] = ["abc"] testing_metadata.parse_again() with pytest.raises(ValueError): - testing_metadata.meta['build']['features'] = ['ab-c'] + testing_metadata.meta["build"]["features"] = ["ab-c"] testing_metadata.parse_again() def test_append_section_data(testing_metadata): testing_metadata.final = False testing_metadata.parse_again() - requirements_len = len(testing_metadata.meta['requirements'].get('build', [])) - testing_metadata.config.append_sections_file = os.path.join(thisdir, 'test-append.yaml') + requirements_len = len(testing_metadata.meta["requirements"].get("build", [])) + testing_metadata.config.append_sections_file = os.path.join( + thisdir, "test-append.yaml" + ) testing_metadata.final = False testing_metadata.parse_again() - assert len(testing_metadata.meta['requirements']['build']) == requirements_len + 1 - assert 'frank' in testing_metadata.meta['requirements']['build'] + assert len(testing_metadata.meta["requirements"]["build"]) == requirements_len + 1 + assert "frank" in testing_metadata.meta["requirements"]["build"] def test_clobber_section_data(testing_metadata): - testing_metadata.config.clobber_sections_file = os.path.join(thisdir, 'test-clobber.yaml') + testing_metadata.config.clobber_sections_file = os.path.join( + thisdir, "test-clobber.yaml" + ) testing_metadata.final = False testing_metadata.parse_again() # a field that should be clobbered - testing_metadata.meta['about']['summary'] = 'yep' + testing_metadata.meta["about"]["summary"] = "yep" # a field that should stay the same - testing_metadata.meta['about']['home'] = 'sweet home' + testing_metadata.meta["about"]["home"] = "sweet home" @pytest.mark.serial def test_build_bootstrap_env_by_name(testing_metadata): - assert not any("git" in pkg for pkg in testing_metadata.meta["requirements"].get("build", [])), \ - testing_metadata.meta["requirements"].get("build", []) + assert not any( + "git" in pkg for pkg in testing_metadata.meta["requirements"].get("build", []) + ), testing_metadata.meta["requirements"].get("build", []) try: cmd = "conda create -y -n conda_build_bootstrap_test git" subprocess.check_call(cmd.split()) testing_metadata.config.bootstrap = "conda_build_bootstrap_test" testing_metadata.final = False testing_metadata.parse_again() - assert any("git" in pkg for pkg in testing_metadata.meta["requirements"]["build"]), \ - testing_metadata.meta["requirements"]["build"] + assert any( + "git" in pkg for pkg in testing_metadata.meta["requirements"]["build"] + ), testing_metadata.meta["requirements"]["build"] finally: cmd = "conda remove -y -n conda_build_bootstrap_test --all" subprocess.check_call(cmd.split()) def test_build_bootstrap_env_by_path(testing_metadata): - assert not any("git" in pkg for pkg in testing_metadata.meta["requirements"].get("build", [])), \ - testing_metadata.meta["requirements"].get("build", []) + assert not any( + "git" in pkg for pkg in testing_metadata.meta["requirements"].get("build", []) + ), testing_metadata.meta["requirements"].get("build", []) path = os.path.join(thisdir, "conda_build_bootstrap_test") try: cmd = f"conda create -y -p {path} git" @@ -140,8 +152,9 @@ def test_build_bootstrap_env_by_path(testing_metadata): testing_metadata.config.bootstrap = path testing_metadata.final = False testing_metadata.parse_again() - assert any("git" in pkg for pkg in testing_metadata.meta["requirements"]["build"]), \ - testing_metadata.meta["requirements"]["build"] + assert any( + "git" in pkg for pkg in testing_metadata.meta["requirements"]["build"] + ), testing_metadata.meta["requirements"]["build"] finally: cmd = f"conda remove -y -p {path} --all" subprocess.check_call(cmd.split()) @@ -185,48 +198,57 @@ def test_native_compiler_metadata( def test_compiler_metadata_cross_compiler(): - variant = {'c_compiler': 'c-compiler-linux', - 'cxx_compiler': 'cxx-compiler-linux', - 'fortran_compiler': 'fortran-compiler-linux', - 'target_platform': 'osx-109-x86_64'} - metadata = MetaData(os.path.join(metadata_dir, '_compiler_jinja2'), variant=variant) - assert 'c-compiler-linux_osx-109-x86_64' in metadata.meta['requirements']['build'] - assert 'cxx-compiler-linux_osx-109-x86_64' in metadata.meta['requirements']['build'] - assert 'fortran-compiler-linux_osx-109-x86_64' in metadata.meta['requirements']['build'] + variant = { + "c_compiler": "c-compiler-linux", + "cxx_compiler": "cxx-compiler-linux", + "fortran_compiler": "fortran-compiler-linux", + "target_platform": "osx-109-x86_64", + } + metadata = MetaData(os.path.join(metadata_dir, "_compiler_jinja2"), variant=variant) + assert "c-compiler-linux_osx-109-x86_64" in metadata.meta["requirements"]["build"] + assert "cxx-compiler-linux_osx-109-x86_64" in metadata.meta["requirements"]["build"] + assert ( + "fortran-compiler-linux_osx-109-x86_64" + in metadata.meta["requirements"]["build"] + ) def test_hash_build_id(testing_metadata): - testing_metadata.config.variant['zlib'] = '1.2' - testing_metadata.meta['requirements']['host'] = ['zlib'] + testing_metadata.config.variant["zlib"] = "1.2" + testing_metadata.meta["requirements"]["host"] = ["zlib"] testing_metadata.final = True hash_contents = testing_metadata.get_hash_contents() - assert hash_contents['zlib'] == '1.2' + assert hash_contents["zlib"] == "1.2" hdeps = testing_metadata.hash_dependencies() hash_contents_tp = hash_contents.copy() found = False for subdir in DEFAULT_SUBDIRS: - hash_contents_tp['target_platform'] = subdir - hdeps_tp = _hash_dependencies(hash_contents_tp, testing_metadata.config.hash_length) + hash_contents_tp["target_platform"] = subdir + hdeps_tp = _hash_dependencies( + hash_contents_tp, testing_metadata.config.hash_length + ) if hdeps_tp == hdeps: found = True break - assert found, f"Did not find build that matched {hdeps} when testing each of DEFAULT_SUBDIRS" - assert testing_metadata.build_id() == hdeps + '_1' + assert ( + found + ), f"Did not find build that matched {hdeps} when testing each of DEFAULT_SUBDIRS" + assert testing_metadata.build_id() == hdeps + "_1" def test_hash_build_id_key_order(testing_metadata): - deps = testing_metadata.meta['requirements'].get('build', [])[:] + deps = testing_metadata.meta["requirements"].get("build", [])[:] # first, prepend newdeps = deps[:] - newdeps.insert(0, 'steve') - testing_metadata.meta['requirements']['build'] = newdeps + newdeps.insert(0, "steve") + testing_metadata.meta["requirements"]["build"] = newdeps hash_pre = testing_metadata.hash_dependencies() # next, append newdeps = deps[:] - newdeps.append('steve') - testing_metadata.meta['requirements']['build'] = newdeps + newdeps.append("steve") + testing_metadata.meta["requirements"]["build"] = newdeps hash_post = testing_metadata.hash_dependencies() # make sure they match @@ -234,7 +256,7 @@ def test_hash_build_id_key_order(testing_metadata): def test_config_member_decoupling(testing_metadata): - testing_metadata.config.some_member = 'abc' + testing_metadata.config.some_member = "abc" b = testing_metadata.copy() - b.config.some_member = '123' + b.config.some_member = "123" assert b.config.some_member != testing_metadata.config.some_member diff --git a/tests/test_misc.py b/tests/test_misc.py index 27cd9b430c..7a8b56dced 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -5,8 +5,9 @@ from pathlib import Path import pytest + from conda_build._link import pyc_f -from conda_build.conda_interface import PathType, EntityEncoder, CrossPlatformStLink +from conda_build.conda_interface import CrossPlatformStLink, EntityEncoder, PathType @pytest.mark.parametrize( @@ -25,7 +26,7 @@ def test_pyc_f(source, python, compiled): def test_pathtype(): hardlink = PathType("hardlink") assert str(hardlink) == "hardlink" - assert hardlink.__json__() == 'hardlink' + assert hardlink.__json__() == "hardlink" softlink = PathType("softlink") assert str(softlink) == "softlink" diff --git a/tests/test_os_utils_external.py b/tests/test_os_utils_external.py index f964b13d38..a4f88d913a 100644 --- a/tests/test_os_utils_external.py +++ b/tests/test_os_utils_external.py @@ -4,6 +4,7 @@ from pathlib import Path from conda.common.compat import on_win + from conda_build.os_utils.external import find_executable diff --git a/tests/test_patch.py b/tests/test_patch.py index e5e69beeaf..7b1764ce58 100644 --- a/tests/test_patch.py +++ b/tests/test_patch.py @@ -3,15 +3,15 @@ from __future__ import annotations from pathlib import Path +from subprocess import CalledProcessError from textwrap import dedent from types import SimpleNamespace -from subprocess import CalledProcessError import pytest from conda_build.source import ( - _ensure_LF, _ensure_CRLF, + _ensure_LF, _guess_patch_strip_level, apply_patch, ) diff --git a/tests/test_post.py b/tests/test_post.py index ed50b9dde9..3fa808fad5 100644 --- a/tests/test_post.py +++ b/tests/test_post.py @@ -6,8 +6,8 @@ import pytest -from conda_build import post, api -from conda_build.utils import on_win, package_has_file, get_site_packages +from conda_build import api, post +from conda_build.utils import get_site_packages, on_win, package_has_file from .utils import add_mangling, metadata_dir @@ -17,69 +17,77 @@ reason="Python 3.10+, py_compile terminates once it finds an invalid file", ) def test_compile_missing_pyc(testing_workdir): - good_files = ['f1.py', 'f3.py'] - bad_file = 'f2_bad.py' - tmp = os.path.join(testing_workdir, 'tmp') - shutil.copytree(os.path.join(os.path.dirname(__file__), 'test-recipes', - 'metadata', '_compile-test'), tmp) - post.compile_missing_pyc(os.listdir(tmp), cwd=tmp, - python_exe=sys.executable) + good_files = ["f1.py", "f3.py"] + bad_file = "f2_bad.py" + tmp = os.path.join(testing_workdir, "tmp") + shutil.copytree( + os.path.join( + os.path.dirname(__file__), "test-recipes", "metadata", "_compile-test" + ), + tmp, + ) + post.compile_missing_pyc(os.listdir(tmp), cwd=tmp, python_exe=sys.executable) for f in good_files: assert os.path.isfile(os.path.join(tmp, add_mangling(f))) assert not os.path.isfile(os.path.join(tmp, add_mangling(bad_file))) def test_hardlinks_to_copies(): - with open('test1', 'w') as f: + with open("test1", "w") as f: f.write("\n") - os.link('test1', 'test2') - assert os.lstat('test1').st_nlink == 2 - assert os.lstat('test2').st_nlink == 2 + os.link("test1", "test2") + assert os.lstat("test1").st_nlink == 2 + assert os.lstat("test2").st_nlink == 2 - post.make_hardlink_copy('test1', os.getcwd()) - post.make_hardlink_copy('test2', os.getcwd()) + post.make_hardlink_copy("test1", os.getcwd()) + post.make_hardlink_copy("test2", os.getcwd()) - assert os.lstat('test1').st_nlink == 1 - assert os.lstat('test2').st_nlink == 1 + assert os.lstat("test1").st_nlink == 1 + assert os.lstat("test2").st_nlink == 1 def test_postbuild_files_raise(testing_metadata): - fn = 'buildstr', 'buildnum', 'version' + fn = "buildstr", "buildnum", "version" for f in fn: - with open(os.path.join(testing_metadata.config.work_dir, - f'__conda_{f}__.txt'), 'w') as fh: - fh.write('123') + with open( + os.path.join(testing_metadata.config.work_dir, f"__conda_{f}__.txt"), "w" + ) as fh: + fh.write("123") with pytest.raises(ValueError, match=f): post.get_build_metadata(testing_metadata) @pytest.mark.skipif(on_win, reason="fix_shebang is not executed on win32") def test_fix_shebang(): - fname = 'test1' - with open(fname, 'w') as f: + fname = "test1" + with open(fname, "w") as f: f.write("\n") os.chmod(fname, 0o000) - post.fix_shebang(fname, '.', '/test/python') + post.fix_shebang(fname, ".", "/test/python") assert os.stat(fname).st_mode == 33277 # file with permissions 0o775 def test_postlink_script_in_output_explicit(testing_config): - recipe = os.path.join(metadata_dir, '_post_link_in_output') + recipe = os.path.join(metadata_dir, "_post_link_in_output") pkg = api.build(recipe, config=testing_config, notest=True)[0] - assert (package_has_file(pkg, 'bin/.out1-post-link.sh') or - package_has_file(pkg, 'Scripts/.out1-post-link.bat')) + assert package_has_file(pkg, "bin/.out1-post-link.sh") or package_has_file( + pkg, "Scripts/.out1-post-link.bat" + ) def test_postlink_script_in_output_implicit(testing_config): - recipe = os.path.join(metadata_dir, '_post_link_in_output_implicit') + recipe = os.path.join(metadata_dir, "_post_link_in_output_implicit") pkg = api.build(recipe, config=testing_config, notest=True)[0] - assert (package_has_file(pkg, 'bin/.out1-post-link.sh') or - package_has_file(pkg, 'Scripts/.out1-post-link.bat')) + assert package_has_file(pkg, "bin/.out1-post-link.sh") or package_has_file( + pkg, "Scripts/.out1-post-link.bat" + ) def test_pypi_installer_metadata(testing_config): - recipe = os.path.join(metadata_dir, '_pypi_installer_metadata') + recipe = os.path.join(metadata_dir, "_pypi_installer_metadata") pkg = api.build(recipe, config=testing_config, notest=True)[0] - expected_installer = '{}/imagesize-1.1.0.dist-info/INSTALLER'.format(get_site_packages('', '3.9')) - assert 'conda' == (package_has_file(pkg, expected_installer, refresh_mode='forced')) + expected_installer = "{}/imagesize-1.1.0.dist-info/INSTALLER".format( + get_site_packages("", "3.9") + ) + assert "conda" == (package_has_file(pkg, expected_installer, refresh_mode="forced")) diff --git a/tests/test_published_examples.py b/tests/test_published_examples.py index 28390f28d2..95b2efc301 100644 --- a/tests/test_published_examples.py +++ b/tests/test_published_examples.py @@ -1,15 +1,16 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import os -from pathlib import Path import sys +from pathlib import Path import pytest - from conda.testing.integration import BIN_DIRECTORY + from conda_build.api import build from conda_build.utils import check_call_env -from .utils import published_path, get_valid_recipes + +from .utils import get_valid_recipes, published_path @pytest.mark.sanity diff --git a/tests/test_pypi_skeleton.py b/tests/test_pypi_skeleton.py index 5d4a347d42..6562a50f14 100644 --- a/tests/test_pypi_skeleton.py +++ b/tests/test_pypi_skeleton.py @@ -1,10 +1,10 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import pytest - from conda.auxlib.ish import dals + from conda_build.skeletons import pypi -from conda_build.skeletons.pypi import _print_dict, _formating_value +from conda_build.skeletons.pypi import _formating_value, _print_dict @pytest.mark.parametrize( diff --git a/tests/test_render.py b/tests/test_render.py index ff1e67666b..e400d45e87 100644 --- a/tests/test_render.py +++ b/tests/test_render.py @@ -9,8 +9,7 @@ import pytest -from conda_build import api -from conda_build import render +from conda_build import api, render from conda_build.metadata import MetaData from conda_build.utils import CONDA_PACKAGE_EXTENSION_V1 @@ -43,15 +42,11 @@ def test_reduce_duplicate_specs(testing_metadata): def test_pin_run_as_build_preserve_string(testing_metadata): m = testing_metadata - m.config.variant['pin_run_as_build']['pkg'] = { - 'max_pin': 'x.x' - } + m.config.variant["pin_run_as_build"]["pkg"] = {"max_pin": "x.x"} dep = render.get_pin_from_build( - m, - 'pkg * somestring*', - {'pkg': '1.2.3 somestring_h1234'} + m, "pkg * somestring*", {"pkg": "1.2.3 somestring_h1234"} ) - assert dep == 'pkg >=1.2.3,<1.3.0a0 somestring*' + assert dep == "pkg >=1.2.3,<1.3.0a0 somestring*" @pytest.mark.parametrize( diff --git a/tests/test_source.py b/tests/test_source.py index 8c33062d3d..e32a133b84 100644 --- a/tests/test_source.py +++ b/tests/test_source.py @@ -7,154 +7,195 @@ import pytest from conda_build import source -from conda_build.conda_interface import hashsum_file, TemporaryDirectory +from conda_build.conda_interface import TemporaryDirectory, hashsum_file from conda_build.source import download_to_cache from conda_build.utils import reset_deduplicator + from .utils import thisdir def test_alternative_url_no_fn(testing_metadata): - testing_metadata.meta['source'] = {'url': [ - os.path.join(thisdir, 'archives', 'a.tar.bz2'), - os.path.join(thisdir, 'archives', 'a.tar.bz2'), - ]} + testing_metadata.meta["source"] = { + "url": [ + os.path.join(thisdir, "archives", "a.tar.bz2"), + os.path.join(thisdir, "archives", "a.tar.bz2"), + ] + } source.provide(testing_metadata) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'a')) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "a")) def test_multiple_url_sources(testing_metadata): - - testing_metadata.meta['source'] = [ - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')}, - {'folder': 'f2', 'url': os.path.join(thisdir, 'archives', 'b.tar.bz2')}] + testing_metadata.meta["source"] = [ + {"folder": "f1", "url": os.path.join(thisdir, "archives", "a.tar.bz2")}, + {"folder": "f2", "url": os.path.join(thisdir, "archives", "b.tar.bz2")}, + ] source.provide(testing_metadata) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f2')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'a')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f2', 'b')) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1")) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f2")) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1", "a")) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f2", "b")) def test_multiple_url_sources_into_same_folder(testing_metadata): - testing_metadata.meta['source'] = [ - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')}, - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'b.tar.bz2')}] + testing_metadata.meta["source"] = [ + {"folder": "f1", "url": os.path.join(thisdir, "archives", "a.tar.bz2")}, + {"folder": "f1", "url": os.path.join(thisdir, "archives", "b.tar.bz2")}, + ] source.provide(testing_metadata) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'a')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'b')) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1")) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1", "a")) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1", "b")) def test_extract_tarball_with_subfolders_moves_files(testing_metadata): """Ensure that tarballs that contain only a single folder get their contents hoisted up one level""" - testing_metadata.meta['source'] = { - 'url': os.path.join(thisdir, 'archives', 'subfolder.tar.bz2')} + testing_metadata.meta["source"] = { + "url": os.path.join(thisdir, "archives", "subfolder.tar.bz2") + } source.provide(testing_metadata) - assert not os.path.exists(os.path.join(testing_metadata.config.work_dir, 'subfolder')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'abc')) + assert not os.path.exists( + os.path.join(testing_metadata.config.work_dir, "subfolder") + ) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "abc")) def test_extract_multiple_tarballs_with_subfolders_flattens_all(testing_metadata): """Ensure that tarballs that contain only a single folder get their contents hoisted up one level""" - testing_metadata.meta['source'] = [ - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'subfolder.tar.bz2')}, - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'subfolder2.tar.bz2')}] + testing_metadata.meta["source"] = [ + {"folder": "f1", "url": os.path.join(thisdir, "archives", "subfolder.tar.bz2")}, + { + "folder": "f1", + "url": os.path.join(thisdir, "archives", "subfolder2.tar.bz2"), + }, + ] source.provide(testing_metadata) - assert not os.path.exists(os.path.join(testing_metadata.config.work_dir, 'subfolder')) - assert not os.path.exists(os.path.join(testing_metadata.config.work_dir, 'subfolder2')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'abc')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'def')) + assert not os.path.exists( + os.path.join(testing_metadata.config.work_dir, "subfolder") + ) + assert not os.path.exists( + os.path.join(testing_metadata.config.work_dir, "subfolder2") + ) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1", "abc")) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1", "def")) def test_multiple_different_sources(testing_metadata): - testing_metadata.meta['source'] = [ - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')}, - {'folder': 'f2', 'git_url': 'https://github.com/conda/conda_build_test_recipe'}] + testing_metadata.meta["source"] = [ + {"folder": "f1", "url": os.path.join(thisdir, "archives", "a.tar.bz2")}, + {"folder": "f2", "git_url": "https://github.com/conda/conda_build_test_recipe"}, + ] source.provide(testing_metadata) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'a')) - assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f2', 'README.md')) + assert os.path.exists(os.path.join(testing_metadata.config.work_dir, "f1", "a")) + assert os.path.exists( + os.path.join(testing_metadata.config.work_dir, "f2", "README.md") + ) # Test get_value() indexing syntax. - assert testing_metadata.get_value('source/url') == testing_metadata.meta['source'][0]['url'] - assert testing_metadata.get_value('source/0/url') == testing_metadata.meta['source'][0]['url'] - assert (testing_metadata.get_value('source/1/git_url') == - testing_metadata.meta['source'][1]['git_url']) + assert ( + testing_metadata.get_value("source/url") + == testing_metadata.meta["source"][0]["url"] + ) + assert ( + testing_metadata.get_value("source/0/url") + == testing_metadata.meta["source"][0]["url"] + ) + assert ( + testing_metadata.get_value("source/1/git_url") + == testing_metadata.meta["source"][1]["git_url"] + ) def test_git_into_existing_populated_folder_raises(testing_metadata): """Git will not clone into a non-empty folder. This should raise an exception.""" - testing_metadata.meta['source'] = [ - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')}, - {'folder': 'f1', 'git_url': 'https://github.com/conda/conda_build_test_recipe'}] + testing_metadata.meta["source"] = [ + {"folder": "f1", "url": os.path.join(thisdir, "archives", "a.tar.bz2")}, + {"folder": "f1", "git_url": "https://github.com/conda/conda_build_test_recipe"}, + ] with pytest.raises(subprocess.CalledProcessError): source.provide(testing_metadata) def test_git_repo_with_single_subdir_does_not_enter_subdir(testing_metadata): - """Regression test for https://github.com/conda/conda-build/issues/1910 """ - testing_metadata.meta['source'] = { - 'git_url': 'https://github.com/conda/conda_build_single_folder_test'} + """Regression test for https://github.com/conda/conda-build/issues/1910""" + testing_metadata.meta["source"] = { + "git_url": "https://github.com/conda/conda_build_single_folder_test" + } source.provide(testing_metadata) - assert os.path.basename(testing_metadata.config.work_dir) != 'one_folder' + assert os.path.basename(testing_metadata.config.work_dir) != "one_folder" @pytest.mark.sanity def test_source_user_expand(): - with TemporaryDirectory(dir=os.path.expanduser('~')) as tmp: + with TemporaryDirectory(dir=os.path.expanduser("~")) as tmp: with TemporaryDirectory() as tbz_srcdir: file_txt = os.path.join(tbz_srcdir, "file.txt") - with open(file_txt, 'w') as f: + with open(file_txt, "w") as f: f.write("hello") tbz_name = os.path.join(tmp, "cb-test.tar.bz2") with tarfile.open(tbz_name, "w:bz2") as tar: tar.add(tbz_srcdir, arcname=os.path.sep) - for prefix in ('~', 'file:///~'): - source_dict = {"url": os.path.join(prefix, os.path.basename(tmp), "cb-test.tar.bz2"), - "sha256": hashsum_file(tbz_name, 'sha256')} + for prefix in ("~", "file:///~"): + source_dict = { + "url": os.path.join( + prefix, os.path.basename(tmp), "cb-test.tar.bz2" + ), + "sha256": hashsum_file(tbz_name, "sha256"), + } with TemporaryDirectory() as tmp2: - download_to_cache(tmp2, '', source_dict) + download_to_cache(tmp2, "", source_dict) def test_hoist_same_name(testing_workdir): - testdir = os.path.join(testing_workdir, 'test', 'test') - outer_dir = os.path.join(testing_workdir, 'test') + testdir = os.path.join(testing_workdir, "test", "test") + outer_dir = os.path.join(testing_workdir, "test") os.makedirs(testdir) - with open(os.path.join(testdir, 'somefile'), 'w') as f: - f.write('weeeee') + with open(os.path.join(testdir, "somefile"), "w") as f: + f.write("weeeee") source.hoist_single_extracted_folder(testdir) - assert os.path.isfile(os.path.join(outer_dir, 'somefile')) + assert os.path.isfile(os.path.join(outer_dir, "somefile")) assert not os.path.isdir(testdir) def test_hoist_different_name(testing_workdir): - testdir = os.path.join(testing_workdir, 'test') - nesteddir = os.path.join(testdir, 'test_name') + testdir = os.path.join(testing_workdir, "test") + nesteddir = os.path.join(testdir, "test_name") os.makedirs(nesteddir) - with open(os.path.join(nesteddir, 'somefile'), 'w') as f: - f.write('weeeee') + with open(os.path.join(nesteddir, "somefile"), "w") as f: + f.write("weeeee") source.hoist_single_extracted_folder(nesteddir) - assert os.path.isfile(os.path.join(testdir, 'somefile')) + assert os.path.isfile(os.path.join(testdir, "somefile")) assert not os.path.isdir(nesteddir) def test_append_hash_to_fn(testing_metadata): - relative_zip = 'testfn.zip' - assert source.append_hash_to_fn(relative_zip, '123') == 'testfn_123.zip' - relative_tar_gz = 'testfn.tar.gz' - assert source.append_hash_to_fn(relative_tar_gz, '123') == 'testfn_123.tar.gz' - absolute_zip = '/abc/testfn.zip' - assert source.append_hash_to_fn(absolute_zip, '123') == '/abc/testfn_123.zip' - absolute_tar_gz = '/abc/testfn.tar.gz' - assert source.append_hash_to_fn(absolute_tar_gz, '123') == '/abc/testfn_123.tar.gz' - absolute_win_zip = 'C:\\abc\\testfn.zip' - assert source.append_hash_to_fn(absolute_win_zip, '123') == 'C:\\abc\\testfn_123.zip' - absolute_win_tar_gz = 'C:\\abc\\testfn.tar.gz' - assert source.append_hash_to_fn(absolute_win_tar_gz, '123') == 'C:\\abc\\testfn_123.tar.gz' - relative_whl = 'setuptools-36.4.0-py2.py3-none-any.whl' - assert source.append_hash_to_fn(relative_whl, '123') == 'setuptools-36.4.0-py2.py3-none-any_123.whl' - - testing_metadata.meta['source'] = [ - {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')}] + relative_zip = "testfn.zip" + assert source.append_hash_to_fn(relative_zip, "123") == "testfn_123.zip" + relative_tar_gz = "testfn.tar.gz" + assert source.append_hash_to_fn(relative_tar_gz, "123") == "testfn_123.tar.gz" + absolute_zip = "/abc/testfn.zip" + assert source.append_hash_to_fn(absolute_zip, "123") == "/abc/testfn_123.zip" + absolute_tar_gz = "/abc/testfn.tar.gz" + assert source.append_hash_to_fn(absolute_tar_gz, "123") == "/abc/testfn_123.tar.gz" + absolute_win_zip = "C:\\abc\\testfn.zip" + assert ( + source.append_hash_to_fn(absolute_win_zip, "123") == "C:\\abc\\testfn_123.zip" + ) + absolute_win_tar_gz = "C:\\abc\\testfn.tar.gz" + assert ( + source.append_hash_to_fn(absolute_win_tar_gz, "123") + == "C:\\abc\\testfn_123.tar.gz" + ) + relative_whl = "setuptools-36.4.0-py2.py3-none-any.whl" + assert ( + source.append_hash_to_fn(relative_whl, "123") + == "setuptools-36.4.0-py2.py3-none-any_123.whl" + ) + + testing_metadata.meta["source"] = [ + {"folder": "f1", "url": os.path.join(thisdir, "archives", "a.tar.bz2")} + ] reset_deduplicator() source.provide(testing_metadata) diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index 46ca19893f..a27402cb47 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -1,19 +1,19 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from glob import glob import json import os -from pathlib import Path import re import sys +from glob import glob +from pathlib import Path import pytest -from conda_build.render import finalize_metadata -from conda_build.conda_interface import subdir from conda_build import api, utils +from conda_build.conda_interface import subdir +from conda_build.render import finalize_metadata -from .utils import subpackage_dir, get_valid_recipes +from .utils import get_valid_recipes, subpackage_dir @pytest.mark.slow @@ -31,33 +31,42 @@ def test_subpackage_recipes(recipe: Path, testing_config): @pytest.mark.sanity def test_autodetect_raises_on_invalid_extension(testing_config): with pytest.raises(NotImplementedError): - api.build(os.path.join(subpackage_dir, '_invalid_script_extension'), config=testing_config) + api.build( + os.path.join(subpackage_dir, "_invalid_script_extension"), + config=testing_config, + ) # regression test for https://github.com/conda/conda-build/issues/1661 -def test_rm_rf_does_not_remove_relative_source_package_files(testing_config, monkeypatch): - recipe_dir = os.path.join(subpackage_dir, '_rm_rf_stays_within_prefix') +def test_rm_rf_does_not_remove_relative_source_package_files( + testing_config, monkeypatch +): + recipe_dir = os.path.join(subpackage_dir, "_rm_rf_stays_within_prefix") monkeypatch.chdir(recipe_dir) - bin_file_that_disappears = os.path.join('bin', 'lsfm') + bin_file_that_disappears = os.path.join("bin", "lsfm") if not os.path.isfile(bin_file_that_disappears): - with open(bin_file_that_disappears, 'w') as f: - f.write('weee') + with open(bin_file_that_disappears, "w") as f: + f.write("weee") assert os.path.isfile(bin_file_that_disappears) - api.build('conda', config=testing_config) + api.build("conda", config=testing_config) assert os.path.isfile(bin_file_that_disappears) def test_output_pkg_path_shows_all_subpackages(testing_metadata): - testing_metadata.meta['outputs'] = [{'name': 'a'}, {'name': 'b'}] + testing_metadata.meta["outputs"] = [{"name": "a"}, {"name": "b"}] out_dicts_and_metadata = testing_metadata.get_output_metadata_set() - outputs = api.get_output_file_path([(m, None, None) for (_, m) in out_dicts_and_metadata]) + outputs = api.get_output_file_path( + [(m, None, None) for (_, m) in out_dicts_and_metadata] + ) assert len(outputs) == 2 def test_subpackage_version_provided(testing_metadata): - testing_metadata.meta['outputs'] = [{'name': 'a', 'version': '2.0'}] + testing_metadata.meta["outputs"] = [{"name": "a", "version": "2.0"}] out_dicts_and_metadata = testing_metadata.get_output_metadata_set() - outputs = api.get_output_file_path([(m, None, None) for (_, m) in out_dicts_and_metadata]) + outputs = api.get_output_file_path( + [(m, None, None) for (_, m) in out_dicts_and_metadata] + ) assert len(outputs) == 1 assert "a-2.0-1" in outputs[0] @@ -65,28 +74,30 @@ def test_subpackage_version_provided(testing_metadata): def test_subpackage_independent_hash(testing_metadata): # this recipe is creating 2 outputs. One is the output here, a. The other is the top-level # output, implicitly created by adding the run requirement. - testing_metadata.meta['outputs'] = [{'name': 'a', 'requirements': 'bzip2'}] - testing_metadata.meta['requirements']['run'] = ['a'] + testing_metadata.meta["outputs"] = [{"name": "a", "requirements": "bzip2"}] + testing_metadata.meta["requirements"]["run"] = ["a"] out_dicts_and_metadata = testing_metadata.get_output_metadata_set() assert len(out_dicts_and_metadata) == 2 - outputs = api.get_output_file_path([(m, None, None) for (_, m) in out_dicts_and_metadata]) + outputs = api.get_output_file_path( + [(m, None, None) for (_, m) in out_dicts_and_metadata] + ) assert len(outputs) == 2 assert outputs[0][-15:] != outputs[1][-15:] def test_run_exports_in_subpackage(testing_metadata): p1 = testing_metadata.copy() - p1.meta['outputs'] = [{'name': 'has_run_exports', 'run_exports': 'bzip2 1.0'}] + p1.meta["outputs"] = [{"name": "has_run_exports", "run_exports": "bzip2 1.0"}] api.build(p1, config=testing_metadata.config)[0] # api.update_index(os.path.dirname(output), config=testing_metadata.config) p2 = testing_metadata.copy() - p2.meta['requirements']['host'] = ['has_run_exports'] + p2.meta["requirements"]["host"] = ["has_run_exports"] p2_final = finalize_metadata(p2) - assert 'bzip2 1.0.*' in p2_final.meta['requirements']['run'] + assert "bzip2 1.0.*" in p2_final.meta["requirements"]["run"] def test_subpackage_variant_override(testing_config): - recipe = os.path.join(subpackage_dir, '_variant_override') + recipe = os.path.join(subpackage_dir, "_variant_override") outputs = api.build(recipe, config=testing_config) # Three total: # one subpackage with no deps - one output @@ -95,7 +106,7 @@ def test_subpackage_variant_override(testing_config): def test_intradependencies(testing_config): - recipe = os.path.join(subpackage_dir, '_intradependencies') + recipe = os.path.join(subpackage_dir, "_intradependencies") outputs1 = api.get_output_file_paths(recipe, config=testing_config) outputs1_set = {os.path.basename(p) for p in outputs1} # 2 * abc + 1 foo + 2 * (2 * abc, 1 * lib, 1 * foo) @@ -103,121 +114,157 @@ def test_intradependencies(testing_config): outputs2 = api.build(recipe, config=testing_config) assert len(outputs2) == 11 outputs2_set = {os.path.basename(p) for p in outputs2} - assert outputs1_set == outputs2_set, 'pkgs differ :: get_output_file_paths()={} but build()={}'.format(outputs1_set, - outputs2_set) + assert ( + outputs1_set == outputs2_set + ), "pkgs differ :: get_output_file_paths()={} but build()={}".format( + outputs1_set, outputs2_set + ) def test_git_in_output_version(testing_config, conda_build_test_recipe_envvar: str): - recipe = os.path.join(subpackage_dir, '_git_in_output_version') - outputs = api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True) + recipe = os.path.join(subpackage_dir, "_git_in_output_version") + outputs = api.render( + recipe, config=testing_config, finalize=False, bypass_env_check=True + ) assert len(outputs) == 1 - assert outputs[0][0].version() == '1.21.11' + assert outputs[0][0].version() == "1.21.11" def test_intradep_with_templated_output_name(testing_config): - recipe = os.path.join(subpackage_dir, '_intradep_with_templated_output_name') + recipe = os.path.join(subpackage_dir, "_intradep_with_templated_output_name") metadata = api.render(recipe, config=testing_config) assert len(metadata) == 3 - expected_names = {'test_templated_subpackage_name', 'templated_subpackage_nameabc', - 'depends_on_templated'} + expected_names = { + "test_templated_subpackage_name", + "templated_subpackage_nameabc", + "depends_on_templated", + } assert {m.name() for (m, _, _) in metadata} == expected_names def test_output_specific_subdir(testing_config): - recipe = os.path.join(subpackage_dir, '_output_specific_subdir') + recipe = os.path.join(subpackage_dir, "_output_specific_subdir") metadata = api.render(recipe, config=testing_config) assert len(metadata) == 3 - for (m, _, _) in metadata: - if m.name() in ('default_subdir', 'default_subdir_2'): + for m, _, _ in metadata: + if m.name() in ("default_subdir", "default_subdir_2"): assert m.config.target_subdir == subdir - elif m.name() == 'custom_subdir': - assert m.config.target_subdir == 'linux-aarch64' + elif m.name() == "custom_subdir": + assert m.config.target_subdir == "linux-aarch64" else: - raise AssertionError("Test for output_specific_subdir written incorrectly - " - "package name not recognized") + raise AssertionError( + "Test for output_specific_subdir written incorrectly - " + "package name not recognized" + ) def test_about_metadata(testing_config): - recipe = os.path.join(subpackage_dir, '_about_metadata') + recipe = os.path.join(subpackage_dir, "_about_metadata") metadata = api.render(recipe, config=testing_config) assert len(metadata) == 2 for m, _, _ in metadata: - if m.name() == 'abc': - assert 'summary' in m.meta['about'] - assert m.meta['about']['summary'] == 'weee' - assert 'home' not in m.meta['about'] - elif m.name() == 'def': - assert 'home' in m.meta['about'] - assert 'summary' not in m.meta['about'] - assert m.meta['about']['home'] == 'http://not.a.url' + if m.name() == "abc": + assert "summary" in m.meta["about"] + assert m.meta["about"]["summary"] == "weee" + assert "home" not in m.meta["about"] + elif m.name() == "def": + assert "home" in m.meta["about"] + assert "summary" not in m.meta["about"] + assert m.meta["about"]["home"] == "http://not.a.url" outs = api.build(recipe, config=testing_config) for out in outs: - about_meta = utils.package_has_file(out, 'info/about.json') + about_meta = utils.package_has_file(out, "info/about.json") assert about_meta info = json.loads(about_meta) - if os.path.basename(out).startswith('abc'): - assert 'summary' in info - assert info['summary'] == 'weee' - assert 'home' not in info - elif os.path.basename(out).startswith('def'): - assert 'home' in info - assert 'summary' not in info - assert info['home'] == 'http://not.a.url' + if os.path.basename(out).startswith("abc"): + assert "summary" in info + assert info["summary"] == "weee" + assert "home" not in info + elif os.path.basename(out).startswith("def"): + assert "home" in info + assert "summary" not in info + assert info["home"] == "http://not.a.url" @pytest.mark.slow def test_toplevel_entry_points_do_not_apply_to_subpackages(testing_config): - recipe_dir = os.path.join(subpackage_dir, '_entry_points') + recipe_dir = os.path.join(subpackage_dir, "_entry_points") outputs = api.build(recipe_dir, config=testing_config) if utils.on_win: - script_dir = 'Scripts' - ext = '.exe' + script_dir = "Scripts" + ext = ".exe" else: - script_dir = 'bin' - ext = '' + script_dir = "bin" + ext = "" for out in outputs: fn = os.path.basename(out) - if fn.startswith('split_package_entry_points1'): - assert utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'pkg1', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'pkg2', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'top1', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'top2', ext)) - elif fn.startswith('split_package_entry_points2'): - assert utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'pkg2', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'pkg1', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'top1', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'top2', ext)) - elif fn.startswith('test_split_package_entry_points'): + if fn.startswith("split_package_entry_points1"): + assert utils.package_has_file( + out, "{}/{}{}".format(script_dir, "pkg1", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "pkg2", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "top1", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "top2", ext) + ) + elif fn.startswith("split_package_entry_points2"): + assert utils.package_has_file( + out, "{}/{}{}".format(script_dir, "pkg2", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "pkg1", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "top1", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "top2", ext) + ) + elif fn.startswith("test_split_package_entry_points"): # python commands will make sure that these are available. - assert utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'top1', ext)) - assert utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'top2', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'pkg1', ext)) - assert not utils.package_has_file(out, '{}/{}{}'.format(script_dir, 'pkg2', ext)) + assert utils.package_has_file( + out, "{}/{}{}".format(script_dir, "top1", ext) + ) + assert utils.package_has_file( + out, "{}/{}{}".format(script_dir, "top2", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "pkg1", ext) + ) + assert not utils.package_has_file( + out, "{}/{}{}".format(script_dir, "pkg2", ext) + ) else: - raise ValueError(f"Didn't see any of the 3 expected filenames. Filename was {fn}") + raise ValueError( + f"Didn't see any of the 3 expected filenames. Filename was {fn}" + ) def test_subpackage_hash_inputs(testing_config): - recipe_dir = os.path.join(subpackage_dir, '_hash_inputs') + recipe_dir = os.path.join(subpackage_dir, "_hash_inputs") outputs = api.build(recipe_dir, config=testing_config) assert len(outputs) == 2 for out in outputs: - if os.path.basename(out).startswith('test_subpackage'): - assert utils.package_has_file(out, 'info/recipe/install-script.sh') + if os.path.basename(out).startswith("test_subpackage"): + assert utils.package_has_file(out, "info/recipe/install-script.sh") # will have full parent recipe in nested folder - assert utils.package_has_file(out, 'info/recipe/parent/build.sh') - assert not utils.package_has_file(out, 'info/recipe/meta.yaml.template') - assert utils.package_has_file(out, 'info/recipe/meta.yaml') + assert utils.package_has_file(out, "info/recipe/parent/build.sh") + assert not utils.package_has_file(out, "info/recipe/meta.yaml.template") + assert utils.package_has_file(out, "info/recipe/meta.yaml") else: - assert utils.package_has_file(out, 'info/recipe/install-script.sh') - assert utils.package_has_file(out, 'info/recipe/build.sh') + assert utils.package_has_file(out, "info/recipe/install-script.sh") + assert utils.package_has_file(out, "info/recipe/build.sh") # will have full parent recipe in base recipe folder (this is an output for the top level) - assert utils.package_has_file(out, 'info/recipe/meta.yaml.template') - assert utils.package_has_file(out, 'info/recipe/meta.yaml') + assert utils.package_has_file(out, "info/recipe/meta.yaml.template") + assert utils.package_has_file(out, "info/recipe/meta.yaml") def test_overlapping_files(testing_config, caplog): - recipe_dir = os.path.join(subpackage_dir, '_overlapping_files') + recipe_dir = os.path.join(subpackage_dir, "_overlapping_files") utils.reset_deduplicator() outputs = api.build(recipe_dir, config=testing_config) assert len(outputs) == 3 @@ -226,7 +273,7 @@ def test_overlapping_files(testing_config, caplog): @pytest.mark.sanity def test_per_output_tests(testing_config): - recipe_dir = os.path.join(subpackage_dir, '_per_output_tests') + recipe_dir = os.path.join(subpackage_dir, "_per_output_tests") api.build(recipe_dir, config=testing_config) # out, err = capfd.readouterr() # windows echoes commands, so we see the result and the command @@ -237,97 +284,111 @@ def test_per_output_tests(testing_config): @pytest.mark.sanity def test_per_output_tests_script(testing_config): - recipe_dir = os.path.join(subpackage_dir, '_output_test_script') + recipe_dir = os.path.join(subpackage_dir, "_output_test_script") with pytest.raises(SystemExit): api.build(recipe_dir, config=testing_config) def test_pin_compatible_in_outputs(testing_config): - recipe_dir = os.path.join(subpackage_dir, '_pin_compatible_in_output') + recipe_dir = os.path.join(subpackage_dir, "_pin_compatible_in_output") m = api.render(recipe_dir, config=testing_config)[0][0] - assert any(re.search(r'numpy\s*>=.*,<.*', req) for req in m.meta['requirements']['run']) + assert any( + re.search(r"numpy\s*>=.*,<.*", req) for req in m.meta["requirements"]["run"] + ) def test_output_same_name_as_top_level_does_correct_output_regex(testing_config): - recipe_dir = os.path.join(subpackage_dir, '_output_named_same_as_top_level') + recipe_dir = os.path.join(subpackage_dir, "_output_named_same_as_top_level") ms = api.render(recipe_dir, config=testing_config) # TODO: need to decide what best behavior is for saying whether the # top-level build reqs or the output reqs for the similarly naemd output # win. I think you could have both, but it means rendering a new, extra, # build-only metadata in addition to all the outputs for m, _, _ in ms: - if m.name() == 'ipp': - for env in ('build', 'host', 'run'): - assert not m.meta.get('requirements', {}).get(env) + if m.name() == "ipp": + for env in ("build", "host", "run"): + assert not m.meta.get("requirements", {}).get(env) def test_subpackage_order_natural(testing_config): - recipe = os.path.join(subpackage_dir, '_order') + recipe = os.path.join(subpackage_dir, "_order") outputs = api.build(recipe, config=testing_config) assert len(outputs) == 2 def test_subpackage_order_bad(testing_config): - recipe = os.path.join(subpackage_dir, '_order_bad') + recipe = os.path.join(subpackage_dir, "_order_bad") outputs = api.build(recipe, config=testing_config) assert len(outputs) == 2 @pytest.mark.sanity def test_subpackage_script_and_files(testing_config): - recipe = os.path.join(subpackage_dir, '_script_and_files') + recipe = os.path.join(subpackage_dir, "_script_and_files") api.build(recipe, config=testing_config) @pytest.mark.sanity def test_build_script_and_script_env(testing_config): - recipe = os.path.join(subpackage_dir, '_build_script') - os.environ['TEST_FN'] = 'test' + recipe = os.path.join(subpackage_dir, "_build_script") + os.environ["TEST_FN"] = "test" api.build(recipe, config=testing_config) @pytest.mark.sanity -@pytest.mark.skipif(sys.platform != 'darwin', reason="only implemented for mac") +@pytest.mark.skipif(sys.platform != "darwin", reason="only implemented for mac") def test_strong_run_exports_from_build_applies_to_host(testing_config): - recipe = os.path.join(subpackage_dir, '_strong_run_exports_applies_from_build_to_host') + recipe = os.path.join( + subpackage_dir, "_strong_run_exports_applies_from_build_to_host" + ) api.build(recipe, config=testing_config) -@pytest.mark.parametrize("recipe", ('_line_up_python_compiled_libs', - '_line_up_python_compiled_libs_top_level_same_name_output')) +@pytest.mark.parametrize( + "recipe", + ( + "_line_up_python_compiled_libs", + "_line_up_python_compiled_libs_top_level_same_name_output", + ), +) def test_python_line_up_with_compiled_lib(recipe, testing_config): recipe = os.path.join(subpackage_dir, recipe) # we use windows so that we have 2 libxyz results (VS2008, VS2015) - ms = api.render(recipe, config=testing_config, platform='win', arch='64') + ms = api.render(recipe, config=testing_config, platform="win", arch="64") # 2 libxyz, 3 py-xyz, 3 xyz assert len(ms) == 8 for m, _, _ in ms: - if m.name() in ('py-xyz' or 'xyz'): - deps = m.meta['requirements']['run'] - assert any(dep.startswith('libxyz ') and len(dep.split()) == 3 for dep in deps), (m.name(), deps) - assert any(dep.startswith('python >') for dep in deps), (m.name(), deps) - assert any(dep.startswith('zlib >') for dep in deps), (m.name(), deps) - if m.name() == 'xyz': - deps = m.meta['requirements']['run'] - assert any(dep.startswith('py-xyz ') and len(dep.split()) == 3 for dep in deps), (m.name(), deps) - assert any(dep.startswith('python >') for dep in deps), (m.name(), deps) - - -@pytest.mark.xfail(sys.platform == "win32", - reason="Defaults channel has conflicting vc packages") + if m.name() in ("py-xyz" or "xyz"): + deps = m.meta["requirements"]["run"] + assert any( + dep.startswith("libxyz ") and len(dep.split()) == 3 for dep in deps + ), (m.name(), deps) + assert any(dep.startswith("python >") for dep in deps), (m.name(), deps) + assert any(dep.startswith("zlib >") for dep in deps), (m.name(), deps) + if m.name() == "xyz": + deps = m.meta["requirements"]["run"] + assert any( + dep.startswith("py-xyz ") and len(dep.split()) == 3 for dep in deps + ), (m.name(), deps) + assert any(dep.startswith("python >") for dep in deps), (m.name(), deps) + + +@pytest.mark.xfail( + sys.platform == "win32", reason="Defaults channel has conflicting vc packages" +) def test_merge_build_host_applies_in_outputs(testing_config): - recipe = os.path.join(subpackage_dir, '_merge_build_host') + recipe = os.path.join(subpackage_dir, "_merge_build_host") ms = api.render(recipe, config=testing_config) for m, _, _ in ms: # top level - if m.name() == 'test_build_host_merge': - assert not m.meta.get('requirements', {}).get('run') + if m.name() == "test_build_host_merge": + assert not m.meta.get("requirements", {}).get("run") # output else: - run_exports = set(m.meta.get('build', {}).get('run_exports', [])) + run_exports = set(m.meta.get("build", {}).get("run_exports", [])) assert len(run_exports) == 2 assert all(len(export.split()) > 1 for export in run_exports) - run_deps = set(m.meta.get('requirements', {}).get('run', [])) + run_deps = set(m.meta.get("requirements", {}).get("run", [])) assert len(run_deps) == 2 assert all(len(dep.split()) > 1 for dep in run_deps) @@ -336,41 +397,47 @@ def test_merge_build_host_applies_in_outputs(testing_config): @pytest.mark.sanity def test_activation_in_output_scripts(testing_config): - recipe = os.path.join(subpackage_dir, '_output_activation') + recipe = os.path.join(subpackage_dir, "_output_activation") testing_config.activate = True api.build(recipe, config=testing_config) def test_inherit_build_number(testing_config): - recipe = os.path.join(subpackage_dir, '_inherit_build_number') + recipe = os.path.join(subpackage_dir, "_inherit_build_number") ms = api.render(recipe, config=testing_config) for m, _, _ in ms: - assert 'number' in m.meta['build'], "build number was not inherited at all" - assert int(m.meta['build']['number']) == 1, "build number should have been inherited as '1'" + assert "number" in m.meta["build"], "build number was not inherited at all" + assert ( + int(m.meta["build"]["number"]) == 1 + ), "build number should have been inherited as '1'" def test_circular_deps_cross(testing_config): - recipe = os.path.join(subpackage_dir, '_circular_deps_cross') + recipe = os.path.join(subpackage_dir, "_circular_deps_cross") # check that this does not raise an exception api.render(recipe, config=testing_config) @pytest.mark.slow def test_loops_do_not_remove_earlier_packages(testing_config): - recipe = os.path.join(subpackage_dir, '_xgboost_example') + recipe = os.path.join(subpackage_dir, "_xgboost_example") output_files = api.get_output_file_paths(recipe, config=testing_config) api.build(recipe, config=testing_config) - assert len(output_files) == len(glob(os.path.join(testing_config.croot, testing_config.host_subdir, "*.tar.bz2"))) + assert len(output_files) == len( + glob( + os.path.join(testing_config.croot, testing_config.host_subdir, "*.tar.bz2") + ) + ) # regression test for https://github.com/conda/conda-build/issues/3248 @pytest.mark.skipif( utils.on_win and sys.version_info <= (3, 4), - reason="Skipping it on windows and vc<14" + reason="Skipping it on windows and vc<14", ) def test_build_string_does_not_incorrectly_add_hash(testing_config): - recipe = os.path.join(subpackage_dir, '_build_string_with_variant') + recipe = os.path.join(subpackage_dir, "_build_string_with_variant") output_files = api.get_output_file_paths(recipe, config=testing_config) assert len(output_files) == 4 assert any("clang_variant-1.0-cling.tar.bz2" in f for f in output_files) diff --git a/tests/test_utils.py b/tests/test_utils.py index 1677f0f833..f901cc6f36 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,16 +1,16 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import contextlib -import filelock import os import subprocess import sys from typing import NamedTuple +import filelock import pytest -from conda_build.exceptions import BuildLockError import conda_build.utils as utils +from conda_build.exceptions import BuildLockError def makefile(name, contents=""): @@ -20,16 +20,18 @@ def makefile(name, contents=""): if not os.path.exists(path): os.makedirs(path) - with open(name, 'w') as f: + with open(name, "w") as f: f.write(contents) -@pytest.mark.skipif(utils.on_win, reason="only unix has python version in site-packages path") +@pytest.mark.skipif( + utils.on_win, reason="only unix has python version in site-packages path" +) def test_get_site_packages(): # https://github.com/conda/conda-build/issues/1055#issuecomment-250961576 # crazy unreal python version that should show up in a second - crazy_path = os.path.join('/dummy', 'lib', 'python8.2', 'site-packages') - site_packages = utils.get_site_packages('/dummy', '8.2') + crazy_path = os.path.join("/dummy", "lib", "python8.2", "site-packages") + site_packages = utils.get_site_packages("/dummy", "8.2") assert site_packages == crazy_path @@ -41,164 +43,184 @@ def test_prepend_sys_path(): def test_copy_source_tree(namespace_setup): - dst = os.path.join(namespace_setup, 'dest') - utils.copy_into(os.path.join(namespace_setup, 'namespace'), dst) - assert os.path.isfile(os.path.join(dst, 'package', 'module.py')) + dst = os.path.join(namespace_setup, "dest") + utils.copy_into(os.path.join(namespace_setup, "namespace"), dst) + assert os.path.isfile(os.path.join(dst, "package", "module.py")) def test_merge_namespace_trees(namespace_setup): - dep = os.path.join(namespace_setup, 'other_tree', 'namespace', 'package', 'dependency.py') + dep = os.path.join( + namespace_setup, "other_tree", "namespace", "package", "dependency.py" + ) makefile(dep) - utils.copy_into(os.path.join(namespace_setup, 'other_tree'), namespace_setup) - assert os.path.isfile(os.path.join(namespace_setup, 'namespace', 'package', - 'module.py')) + utils.copy_into(os.path.join(namespace_setup, "other_tree"), namespace_setup) + assert os.path.isfile( + os.path.join(namespace_setup, "namespace", "package", "module.py") + ) assert os.path.isfile(dep) -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def namespace_setup(testing_workdir): - namespace = os.path.join(testing_workdir, 'namespace') - package = os.path.join(namespace, 'package') + namespace = os.path.join(testing_workdir, "namespace") + package = os.path.join(namespace, "package") makefile(os.path.join(package, "module.py")) return testing_workdir @pytest.mark.sanity def test_disallow_merge_conflicts(namespace_setup): - duplicate = os.path.join(namespace_setup, 'dupe', 'namespace', 'package', 'module.py') + duplicate = os.path.join( + namespace_setup, "dupe", "namespace", "package", "module.py" + ) makefile(duplicate) with pytest.raises(IOError): - utils.merge_tree(os.path.dirname(duplicate), os.path.join(namespace_setup, 'namespace', - 'package')) + utils.merge_tree( + os.path.dirname(duplicate), + os.path.join(namespace_setup, "namespace", "package"), + ) @pytest.mark.sanity def test_disallow_in_tree_merge(testing_workdir): - with open('testfile', 'w') as f: + with open("testfile", "w") as f: f.write("test") with pytest.raises(AssertionError): - utils.merge_tree(testing_workdir, os.path.join(testing_workdir, 'subdir')) + utils.merge_tree(testing_workdir, os.path.join(testing_workdir, "subdir")) def test_relative_default(): for f, r in [ - ('bin/python', '../lib'), - ('lib/libhdf5.so', '.'), - ('lib/python2.6/foobar.so', '..'), - ('lib/python2.6/lib-dynload/zlib.so', '../..'), - ('lib/python2.6/site-packages/pyodbc.so', '../..'), - ('lib/python2.6/site-packages/bsdiff4/core.so', '../../..'), - ('xyz', './lib'), - ('bin/somedir/cmd', '../../lib'), + ("bin/python", "../lib"), + ("lib/libhdf5.so", "."), + ("lib/python2.6/foobar.so", ".."), + ("lib/python2.6/lib-dynload/zlib.so", "../.."), + ("lib/python2.6/site-packages/pyodbc.so", "../.."), + ("lib/python2.6/site-packages/bsdiff4/core.so", "../../.."), + ("xyz", "./lib"), + ("bin/somedir/cmd", "../../lib"), ]: assert utils.relative(f) == r def test_relative_lib(): for f, r in [ - ('bin/python', '../lib'), - ('lib/libhdf5.so', '.'), - ('lib/python2.6/foobar.so', '..'), - ('lib/python2.6/lib-dynload/zlib.so', '../..'), - ('lib/python2.6/site-packages/pyodbc.so', '../..'), - ('lib/python2.6/site-packages/bsdiff3/core.so', '../../..'), - ('xyz', './lib'), - ('bin/somedir/cmd', '../../lib'), - ('bin/somedir/somedir2/cmd', '../../../lib'), + ("bin/python", "../lib"), + ("lib/libhdf5.so", "."), + ("lib/python2.6/foobar.so", ".."), + ("lib/python2.6/lib-dynload/zlib.so", "../.."), + ("lib/python2.6/site-packages/pyodbc.so", "../.."), + ("lib/python2.6/site-packages/bsdiff3/core.so", "../../.."), + ("xyz", "./lib"), + ("bin/somedir/cmd", "../../lib"), + ("bin/somedir/somedir2/cmd", "../../../lib"), ]: - assert utils.relative(f, 'lib') == r + assert utils.relative(f, "lib") == r def test_relative_subdir(): for f, r in [ - ('lib/libhdf5.so', './sub'), - ('lib/sub/libhdf5.so', '.'), - ('bin/python', '../lib/sub'), - ('bin/somedir/cmd', '../../lib/sub'), + ("lib/libhdf5.so", "./sub"), + ("lib/sub/libhdf5.so", "."), + ("bin/python", "../lib/sub"), + ("bin/somedir/cmd", "../../lib/sub"), ]: - assert utils.relative(f, 'lib/sub') == r + assert utils.relative(f, "lib/sub") == r def test_relative_prefix(): for f, r in [ - ('xyz', '.'), - ('a/xyz', '..'), - ('a/b/xyz', '../..'), - ('a/b/c/xyz', '../../..'), - ('a/b/c/d/xyz', '../../../..'), + ("xyz", "."), + ("a/xyz", ".."), + ("a/b/xyz", "../.."), + ("a/b/c/xyz", "../../.."), + ("a/b/c/d/xyz", "../../../.."), ]: - assert utils.relative(f, '.') == r + assert utils.relative(f, ".") == r def test_relative_2(): for f, r in [ - ('a/b/c/d/libhdf5.so', '../..'), - ('a/b/c/libhdf5.so', '..'), - ('a/b/libhdf5.so', '.'), - ('a/libhdf5.so', './b'), - ('x/x/libhdf5.so', '../../a/b'), - ('x/b/libhdf5.so', '../../a/b'), - ('x/libhdf5.so', '../a/b'), - ('libhdf5.so', './a/b'), + ("a/b/c/d/libhdf5.so", "../.."), + ("a/b/c/libhdf5.so", ".."), + ("a/b/libhdf5.so", "."), + ("a/libhdf5.so", "./b"), + ("x/x/libhdf5.so", "../../a/b"), + ("x/b/libhdf5.so", "../../a/b"), + ("x/libhdf5.so", "../a/b"), + ("libhdf5.so", "./a/b"), ]: - assert utils.relative(f, 'a/b') == r + assert utils.relative(f, "a/b") == r def test_relative_3(): for f, r in [ - ('a/b/c/d/libhdf5.so', '..'), - ('a/b/c/libhdf5.so', '.'), - ('a/b/libhdf5.so', './c'), - ('a/libhdf5.so', './b/c'), - ('libhdf5.so', './a/b/c'), - ('a/b/x/libhdf5.so', '../c'), - ('a/x/x/libhdf5.so', '../../b/c'), - ('x/x/x/libhdf5.so', '../../../a/b/c'), - ('x/x/libhdf5.so', '../../a/b/c'), - ('x/libhdf5.so', '../a/b/c'), + ("a/b/c/d/libhdf5.so", ".."), + ("a/b/c/libhdf5.so", "."), + ("a/b/libhdf5.so", "./c"), + ("a/libhdf5.so", "./b/c"), + ("libhdf5.so", "./a/b/c"), + ("a/b/x/libhdf5.so", "../c"), + ("a/x/x/libhdf5.so", "../../b/c"), + ("x/x/x/libhdf5.so", "../../../a/b/c"), + ("x/x/libhdf5.so", "../../a/b/c"), + ("x/libhdf5.so", "../a/b/c"), ]: - assert utils.relative(f, 'a/b/c') == r + assert utils.relative(f, "a/b/c") == r def test_relative_4(): for f, r in [ - ('a/b/c/d/libhdf5.so', '.'), - ('a/b/c/x/libhdf5.so', '../d'), - ('a/b/x/x/libhdf5.so', '../../c/d'), - ('a/x/x/x/libhdf5.so', '../../../b/c/d'), - ('x/x/x/x/libhdf5.so', '../../../../a/b/c/d'), + ("a/b/c/d/libhdf5.so", "."), + ("a/b/c/x/libhdf5.so", "../d"), + ("a/b/x/x/libhdf5.so", "../../c/d"), + ("a/x/x/x/libhdf5.so", "../../../b/c/d"), + ("x/x/x/x/libhdf5.so", "../../../../a/b/c/d"), ]: - assert utils.relative(f, 'a/b/c/d') == r + assert utils.relative(f, "a/b/c/d") == r def test_expand_globs(testing_workdir): - sub_dir = os.path.join(testing_workdir, 'sub1') + sub_dir = os.path.join(testing_workdir, "sub1") os.mkdir(sub_dir) - ssub_dir = os.path.join(sub_dir, 'ssub1') + ssub_dir = os.path.join(sub_dir, "ssub1") os.mkdir(ssub_dir) - files = ['abc', 'acb', - os.path.join(sub_dir, 'def'), - os.path.join(sub_dir, 'abc'), - os.path.join(ssub_dir, 'ghi'), - os.path.join(ssub_dir, 'abc')] + files = [ + "abc", + "acb", + os.path.join(sub_dir, "def"), + os.path.join(sub_dir, "abc"), + os.path.join(ssub_dir, "ghi"), + os.path.join(ssub_dir, "abc"), + ] for f in files: - with open(f, 'w') as _f: - _f.write('weee') + with open(f, "w") as _f: + _f.write("weee") # Test dirs - exp = utils.expand_globs([os.path.join('sub1', 'ssub1')], testing_workdir) - assert sorted(exp) == sorted([os.path.sep.join(('sub1', 'ssub1', 'ghi')), - os.path.sep.join(('sub1', 'ssub1', 'abc'))]) + exp = utils.expand_globs([os.path.join("sub1", "ssub1")], testing_workdir) + assert sorted(exp) == sorted( + [ + os.path.sep.join(("sub1", "ssub1", "ghi")), + os.path.sep.join(("sub1", "ssub1", "abc")), + ] + ) # Test files - exp = sorted(utils.expand_globs(['abc', files[2]], testing_workdir)) - assert exp == sorted(['abc', os.path.sep.join(('sub1', 'def'))]) + exp = sorted(utils.expand_globs(["abc", files[2]], testing_workdir)) + assert exp == sorted(["abc", os.path.sep.join(("sub1", "def"))]) # Test globs - exp = sorted(utils.expand_globs(['a*', '*/*f', '**/*i'], testing_workdir)) - assert exp == sorted(['abc', 'acb', os.path.sep.join(('sub1', 'def')), - os.path.sep.join(('sub1', 'ssub1', 'ghi'))]) + exp = sorted(utils.expand_globs(["a*", "*/*f", "**/*i"], testing_workdir)) + assert exp == sorted( + [ + "abc", + "acb", + os.path.sep.join(("sub1", "def")), + os.path.sep.join(("sub1", "ssub1", "ghi")), + ] + ) def test_filter_files(): @@ -219,41 +241,51 @@ def test_filter_files(): # Files that should *not* be filtered out. # Example of valid 'x.git' directory: # lib/python3.4/site-packages/craftr/stl/craftr.utils.git/Craftrfile - files_list = ['a', 'x.git/a', 'something/x.git/a', - 'x.git\\a', 'something\\x.git\\a', 'something/.gitmodules', - 'some/template/directory/.gitignore', 'another.lab', - 'miniconda_trashcan.py', 'conda_trash_avoider.py'] - assert len(utils.filter_files(files_list, '')) == len(files_list) + files_list = [ + "a", + "x.git/a", + "something/x.git/a", + "x.git\\a", + "something\\x.git\\a", + "something/.gitmodules", + "some/template/directory/.gitignore", + "another.lab", + "miniconda_trashcan.py", + "conda_trash_avoider.py", + ] + assert len(utils.filter_files(files_list, "")) == len(files_list) @pytest.mark.serial def test_logger_filtering(caplog, capfd): import logging + log = utils.get_logger(__name__, level=logging.DEBUG) - log.debug('test debug message') - log.info('test info message') - log.info('test duplicate message') - log.info('test duplicate message') - log.warn('test warn message') - log.error('test error message') + log.debug("test debug message") + log.info("test info message") + log.info("test duplicate message") + log.info("test duplicate message") + log.warn("test warn message") + log.error("test error message") out, err = capfd.readouterr() - assert 'test debug message' in out - assert 'test info message' in out - assert 'test warn message' not in out - assert 'test error message' not in out - assert 'test debug message' not in err - assert 'test info message' not in err - assert 'test warn message' in err - assert 'test error message' in err - assert caplog.text.count('duplicate') == 1 + assert "test debug message" in out + assert "test info message" in out + assert "test warn message" not in out + assert "test error message" not in out + assert "test debug message" not in err + assert "test info message" not in err + assert "test warn message" in err + assert "test error message" in err + assert caplog.text.count("duplicate") == 1 log.removeHandler(logging.StreamHandler(sys.stdout)) log.removeHandler(logging.StreamHandler(sys.stderr)) def test_logger_config_from_file(testing_workdir, capfd, mocker): - test_file = os.path.join(testing_workdir, 'build_log_config.yaml') - with open(test_file, 'w') as f: - f.write(""" + test_file = os.path.join(testing_workdir, "build_log_config.yaml") + with open(test_file, "w") as f: + f.write( + """ version: 1 formatters: simple: @@ -272,83 +304,93 @@ def test_logger_config_from_file(testing_workdir, capfd, mocker): root: level: DEBUG handlers: [console] -""".format(__name__)) - cc_conda_build = mocker.patch.object(utils, 'cc_conda_build') +""".format( + __name__ + ) + ) + cc_conda_build = mocker.patch.object(utils, "cc_conda_build") cc_conda_build.get.return_value = test_file log = utils.get_logger(__name__) # default log level is INFO, but our config file should set level to DEBUG - log.warn('test message') + log.warn("test message") # output should have gone to stdout according to config above. out, err = capfd.readouterr() - assert 'test message' in out + assert "test message" in out # make sure that it is not in stderr - this is testing override of defaults. - assert 'test message' not in err + assert "test message" not in err def test_ensure_valid_spec(): - assert utils.ensure_valid_spec('python') == 'python' - assert utils.ensure_valid_spec('python 3.8') == 'python 3.8.*' - assert utils.ensure_valid_spec('python 3.8.2') == 'python 3.8.2.*' - assert utils.ensure_valid_spec('python 3.8.10 0') == 'python 3.8.10 0' - assert utils.ensure_valid_spec('python >=3.8,<3.9') == 'python >=3.8,<3.9' - assert utils.ensure_valid_spec('numpy x.x') == 'numpy x.x' - assert utils.ensure_valid_spec(utils.MatchSpec('numpy x.x')) == utils.MatchSpec('numpy x.x') + assert utils.ensure_valid_spec("python") == "python" + assert utils.ensure_valid_spec("python 3.8") == "python 3.8.*" + assert utils.ensure_valid_spec("python 3.8.2") == "python 3.8.2.*" + assert utils.ensure_valid_spec("python 3.8.10 0") == "python 3.8.10 0" + assert utils.ensure_valid_spec("python >=3.8,<3.9") == "python >=3.8,<3.9" + assert utils.ensure_valid_spec("numpy x.x") == "numpy x.x" + assert utils.ensure_valid_spec(utils.MatchSpec("numpy x.x")) == utils.MatchSpec( + "numpy x.x" + ) def test_insert_variant_versions(testing_metadata): - testing_metadata.meta['requirements']['build'] = ['python', 'numpy 1.13'] - testing_metadata.config.variant = {'python': '3.8', 'numpy': '1.11'} - utils.insert_variant_versions(testing_metadata.meta.get('requirements', {}), - testing_metadata.config.variant, 'build') + testing_metadata.meta["requirements"]["build"] = ["python", "numpy 1.13"] + testing_metadata.config.variant = {"python": "3.8", "numpy": "1.11"} + utils.insert_variant_versions( + testing_metadata.meta.get("requirements", {}), + testing_metadata.config.variant, + "build", + ) # this one gets inserted - assert 'python 3.8.*' in testing_metadata.meta['requirements']['build'] + assert "python 3.8.*" in testing_metadata.meta["requirements"]["build"] # this one should not be altered - assert 'numpy 1.13' in testing_metadata.meta['requirements']['build'] + assert "numpy 1.13" in testing_metadata.meta["requirements"]["build"] # the overall length does not change - assert len(testing_metadata.meta['requirements']['build']) == 2 + assert len(testing_metadata.meta["requirements"]["build"]) == 2 def test_subprocess_stats_call(testing_workdir): stats = {} - utils.check_call_env(['hostname'], stats=stats, cwd=testing_workdir) + utils.check_call_env(["hostname"], stats=stats, cwd=testing_workdir) assert stats stats = {} - out = utils.check_output_env(['hostname'], stats=stats, cwd=testing_workdir) + out = utils.check_output_env(["hostname"], stats=stats, cwd=testing_workdir) assert out assert stats with pytest.raises(subprocess.CalledProcessError): - utils.check_call_env(['bash', '-c', 'exit 1'], cwd=testing_workdir) + utils.check_call_env(["bash", "-c", "exit 1"], cwd=testing_workdir) def test_try_acquire_locks(testing_workdir): # Acquiring two unlocked locks should succeed. - lock1 = filelock.FileLock(os.path.join(testing_workdir, 'lock1')) - lock2 = filelock.FileLock(os.path.join(testing_workdir, 'lock2')) + lock1 = filelock.FileLock(os.path.join(testing_workdir, "lock1")) + lock2 = filelock.FileLock(os.path.join(testing_workdir, "lock2")) with utils.try_acquire_locks([lock1, lock2], timeout=1): pass # Acquiring the same lock twice should fail. - lock1_copy = filelock.FileLock(os.path.join(testing_workdir, 'lock1')) + lock1_copy = filelock.FileLock(os.path.join(testing_workdir, "lock1")) # Also verify that the error message contains the word "lock", since we rely # on this elsewhere. - with pytest.raises(BuildLockError, match='Failed to acquire all locks'): + with pytest.raises(BuildLockError, match="Failed to acquire all locks"): with utils.try_acquire_locks([lock1, lock1_copy], timeout=1): pass def test_get_lock(testing_workdir): - lock1 = utils.get_lock(os.path.join(testing_workdir, 'lock1')) - lock2 = utils.get_lock(os.path.join(testing_workdir, 'lock2')) + lock1 = utils.get_lock(os.path.join(testing_workdir, "lock1")) + lock2 = utils.get_lock(os.path.join(testing_workdir, "lock2")) # Different folders should get different lock files. assert lock1.lock_file != lock2.lock_file # Same folder should get the same lock file. - lock1_copy = utils.get_lock(os.path.join(testing_workdir, 'lock1')) + lock1_copy = utils.get_lock(os.path.join(testing_workdir, "lock1")) assert lock1.lock_file == lock1_copy.lock_file # ...even when not normalized - lock1_unnormalized = utils.get_lock(os.path.join(testing_workdir, 'foo', '..', 'lock1')) + lock1_unnormalized = utils.get_lock( + os.path.join(testing_workdir, "foo", "..", "lock1") + ) assert lock1.lock_file == lock1_unnormalized.lock_file @@ -388,7 +430,10 @@ def _generate_tmp_tree(): def test_rec_glob(): with _generate_tmp_tree() as (tmp, _, (f1, f2, f3, f4)): assert sorted(utils.rec_glob(tmp, "fileA")) == [f1, f3] - assert sorted(utils.rec_glob(tmp, ("fileA", "fileB"), ignores="dirB")) == [f3, f4] + assert sorted(utils.rec_glob(tmp, ("fileA", "fileB"), ignores="dirB")) == [ + f3, + f4, + ] assert sorted(utils.rec_glob(tmp, "fileB", ignores=("dirC",))) == [f2] @@ -484,27 +529,22 @@ class IsCondaPkgTestData(NamedTuple): IS_CONDA_PKG_DATA = ( IsCondaPkgTestData( - value='aws-c-common-0.4.57-hb1e8313_1.tar.bz2', + value="aws-c-common-0.4.57-hb1e8313_1.tar.bz2", expected=True, is_dir=False, - create=True + create=True, ), IsCondaPkgTestData( - value='aws-c-common-0.4.57-hb1e8313_1.tar.bz2', + value="aws-c-common-0.4.57-hb1e8313_1.tar.bz2", expected=False, is_dir=False, - create=False - ), - IsCondaPkgTestData( - value='somedir', - expected=False, - is_dir=True, - create=False + create=False, ), + IsCondaPkgTestData(value="somedir", expected=False, is_dir=True, create=False), ) -@pytest.mark.parametrize('value,expected,is_dir,create', IS_CONDA_PKG_DATA) +@pytest.mark.parametrize("value,expected,is_dir,create", IS_CONDA_PKG_DATA) def test_is_conda_pkg(tmpdir, value: str, expected: bool, is_dir: bool, create: bool): if create: value = os.path.join(tmpdir, value) diff --git a/tests/test_variants.py b/tests/test_variants.py index e590c8a476..4df2c3f768 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -2,23 +2,23 @@ # SPDX-License-Identifier: BSD-3-Clause import json import os -from pathlib import Path import platform import re import sys +from pathlib import Path import pytest import yaml - from conda.common.compat import on_mac + from conda_build import api, exceptions +from conda_build.utils import ensure_list, package_has_file from conda_build.variants import ( combine_specs, dict_of_lists_to_list_of_dicts, get_package_variants, validate_spec, ) -from conda_build.utils import ensure_list, package_has_file from .utils import variants_dir @@ -127,9 +127,9 @@ def test_variant_with_numpy_pinned_has_matrix(): def test_pinning_in_build_requirements(): recipe = os.path.join(variants_dir, "05_compatible") metadata = api.render(recipe)[0][0] - build_requirements = metadata.meta['requirements']['build'] + build_requirements = metadata.meta["requirements"]["build"] # make sure that everything in the build deps is exactly pinned - assert all(len(req.split(' ')) == 3 for req in build_requirements) + assert all(len(req.split(" ")) == 3 for req in build_requirements) @pytest.mark.sanity @@ -142,22 +142,30 @@ def test_no_satisfiable_variants_raises_error(): def test_zip_fields(): """Zipping keys together allows people to tie different versions as sets of combinations.""" - variants = {'packageA': ['1.2', '3.4'], 'packageB': ['5', '6'], 'zip_keys': [('packageA', 'packageB')]} + variants = { + "packageA": ["1.2", "3.4"], + "packageB": ["5", "6"], + "zip_keys": [("packageA", "packageB")], + } zipped = dict_of_lists_to_list_of_dicts(variants) assert len(zipped) == 2 - assert zipped[0]['packageA'] == '1.2' - assert zipped[0]['packageB'] == '5' - assert zipped[1]['packageA'] == '3.4' - assert zipped[1]['packageB'] == '6' + assert zipped[0]["packageA"] == "1.2" + assert zipped[0]["packageB"] == "5" + assert zipped[1]["packageA"] == "3.4" + assert zipped[1]["packageB"] == "6" # allow duplication of values, but lengths of lists must always match - variants = {'packageA': ['1.2', '1.2'], 'packageB': ['5', '6'], 'zip_keys': [('packageA', 'packageB')]} + variants = { + "packageA": ["1.2", "1.2"], + "packageB": ["5", "6"], + "zip_keys": [("packageA", "packageB")], + } zipped = dict_of_lists_to_list_of_dicts(variants) assert len(zipped) == 2 - assert zipped[0]['packageA'] == '1.2' - assert zipped[0]['packageB'] == '5' - assert zipped[1]['packageA'] == '1.2' - assert zipped[1]['packageB'] == '6' + assert zipped[0]["packageA"] == "1.2" + assert zipped[0]["packageB"] == "5" + assert zipped[1]["packageA"] == "1.2" + assert zipped[1]["packageB"] == "6" def test_validate_spec(): @@ -230,8 +238,12 @@ def test_variants_in_versions_with_setup_py_data(): recipe = os.path.join(variants_dir, "12_variant_versions") outputs = api.get_output_file_paths(recipe) assert len(outputs) == 2 - assert any(os.path.basename(pkg).startswith('my_package-470.470') for pkg in outputs) - assert any(os.path.basename(pkg).startswith('my_package-480.480') for pkg in outputs) + assert any( + os.path.basename(pkg).startswith("my_package-470.470") for pkg in outputs + ) + assert any( + os.path.basename(pkg).startswith("my_package-480.480") for pkg in outputs + ) def test_git_variables_with_variants(testing_config): @@ -245,12 +257,12 @@ def test_git_variables_with_variants(testing_config): def test_variant_input_with_zip_keys_keeps_zip_keys_list(): spec = { - 'scipy': ['0.17', '0.19'], - 'sqlite': ['3'], - 'zlib': ['1.2'], - 'xz': ['5'], - 'zip_keys': ['sqlite', 'zlib', 'xz'], - 'pin_run_as_build': {'python': {'min_pin': 'x.x', 'max_pin': 'x.x'}} + "scipy": ["0.17", "0.19"], + "sqlite": ["3"], + "zlib": ["1.2"], + "xz": ["5"], + "zip_keys": ["sqlite", "zlib", "xz"], + "pin_run_as_build": {"python": {"min_pin": "x.x", "max_pin": "x.x"}}, } vrnts = dict_of_lists_to_list_of_dicts(spec) assert len(vrnts) == 2 @@ -272,15 +284,18 @@ def test_ensure_valid_spec_on_run_and_test(testing_config, caplog): assert "Adding .* to spec 'pytest-mock 1.6'" not in text -@pytest.mark.skipif(on_mac and platform.machine() == "arm64", reason="Unsatisfiable dependencies for M1 MacOS: {'bzip2=1.0.6'}") +@pytest.mark.skipif( + on_mac and platform.machine() == "arm64", + reason="Unsatisfiable dependencies for M1 MacOS: {'bzip2=1.0.6'}", +) def test_serial_builds_have_independent_configs(testing_config): recipe = os.path.join(variants_dir, "17_multiple_recipes_independent_config") recipes = [os.path.join(recipe, dirname) for dirname in ("a", "b")] outputs = api.build(recipes, config=testing_config) - index_json = json.loads(package_has_file(outputs[0], 'info/index.json')) - assert 'bzip2 >=1,<1.0.7.0a0' in index_json['depends'] - index_json = json.loads(package_has_file(outputs[1], 'info/index.json')) - assert 'bzip2 >=1.0.6,<2.0a0' in index_json['depends'] + index_json = json.loads(package_has_file(outputs[0], "info/index.json")) + assert "bzip2 >=1,<1.0.7.0a0" in index_json["depends"] + index_json = json.loads(package_has_file(outputs[1], "info/index.json")) + assert "bzip2 >=1.0.6,<2.0a0" in index_json["depends"] def test_subspace_selection(testing_config): @@ -292,41 +307,47 @@ def test_subspace_selection(testing_config): # there are two entries with a==coffee, so we should end up with 2 variants assert len(ms) == 2 # ensure that the zipped keys still agree - assert sum(m.config.variant['b'] == '123' for m, _, _ in ms) == 1 - assert sum(m.config.variant['b'] == 'abc' for m, _, _ in ms) == 1 - assert sum(m.config.variant['b'] == 'concrete' for m, _, _ in ms) == 0 - assert sum(m.config.variant['c'] == 'mooo' for m, _, _ in ms) == 1 - assert sum(m.config.variant['c'] == 'baaa' for m, _, _ in ms) == 1 - assert sum(m.config.variant['c'] == 'woof' for m, _, _ in ms) == 0 + assert sum(m.config.variant["b"] == "123" for m, _, _ in ms) == 1 + assert sum(m.config.variant["b"] == "abc" for m, _, _ in ms) == 1 + assert sum(m.config.variant["b"] == "concrete" for m, _, _ in ms) == 0 + assert sum(m.config.variant["c"] == "mooo" for m, _, _ in ms) == 1 + assert sum(m.config.variant["c"] == "baaa" for m, _, _ in ms) == 1 + assert sum(m.config.variant["c"] == "woof" for m, _, _ in ms) == 0 # test compound selection - testing_config.variant = {'a': 'coffee', 'b': '123'} - ms = api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True) + testing_config.variant = {"a": "coffee", "b": "123"} + ms = api.render( + recipe, config=testing_config, finalize=False, bypass_env_check=True + ) # there are two entries with a==coffee, but one with both 'coffee' for a, and '123' for b, # so we should end up with 1 variants assert len(ms) == 1 # ensure that the zipped keys still agree - assert sum(m.config.variant['b'] == '123' for m, _, _ in ms) == 1 - assert sum(m.config.variant['b'] == 'abc' for m, _, _ in ms) == 0 - assert sum(m.config.variant['b'] == 'concrete' for m, _, _ in ms) == 0 - assert sum(m.config.variant['c'] == 'mooo' for m, _, _ in ms) == 1 - assert sum(m.config.variant['c'] == 'baaa' for m, _, _ in ms) == 0 - assert sum(m.config.variant['c'] == 'woof' for m, _, _ in ms) == 0 + assert sum(m.config.variant["b"] == "123" for m, _, _ in ms) == 1 + assert sum(m.config.variant["b"] == "abc" for m, _, _ in ms) == 0 + assert sum(m.config.variant["b"] == "concrete" for m, _, _ in ms) == 0 + assert sum(m.config.variant["c"] == "mooo" for m, _, _ in ms) == 1 + assert sum(m.config.variant["c"] == "baaa" for m, _, _ in ms) == 0 + assert sum(m.config.variant["c"] == "woof" for m, _, _ in ms) == 0 # test when configuration leads to no valid combinations - only c provided, and its value # doesn't match any other existing values of c, so it's then ambiguous which zipped # values to choose - testing_config.variant = {'c': 'not an animal'} + testing_config.variant = {"c": "not an animal"} with pytest.raises(ValueError): - ms = api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True) + ms = api.render( + recipe, config=testing_config, finalize=False, bypass_env_check=True + ) # all zipped keys provided by the new variant. It should clobber the old one. - testing_config.variant = {'a': 'some', 'b': 'new', 'c': 'animal'} - ms = api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True) + testing_config.variant = {"a": "some", "b": "new", "c": "animal"} + ms = api.render( + recipe, config=testing_config, finalize=False, bypass_env_check=True + ) assert len(ms) == 1 - assert ms[0][0].config.variant['a'] == 'some' - assert ms[0][0].config.variant['b'] == 'new' - assert ms[0][0].config.variant['c'] == 'animal' + assert ms[0][0].config.variant["a"] == "some" + assert ms[0][0].config.variant["b"] == "new" + assert ms[0][0].config.variant["c"] == "animal" def test_get_used_loop_vars(): @@ -338,9 +359,15 @@ def test_get_used_loop_vars(): # conda_build_config.yaml has 4 loop variables defined, but only 3 are used. # python and zlib are both implicitly used (depend on name matching), while # some_package is explicitly used as a jinja2 variable - assert m.get_used_loop_vars() == {'python', 'some_package'} + assert m.get_used_loop_vars() == {"python", "some_package"} # these are all used vars - including those with only one value (and thus not loop vars) - assert m.get_used_vars() == {'python', 'some_package', 'zlib', 'pthread_stubs', 'target_platform'} + assert m.get_used_vars() == { + "python", + "some_package", + "zlib", + "pthread_stubs", + "target_platform", + } def test_reprovisioning_source(): @@ -380,7 +407,7 @@ def test_reduced_hashing_behavior(testing_config): bypass_env_check=True, )[0][0] assert not m.get_hash_contents() - assert not re.search('h[0-9a-f]{%d}' % testing_config.hash_length, m.build_id()) + assert not re.search("h[0-9a-f]{%d}" % testing_config.hash_length, m.build_id()) def test_variants_used_in_jinja2_conditionals(): @@ -390,8 +417,8 @@ def test_variants_used_in_jinja2_conditionals(): bypass_env_check=True, ) assert len(ms) == 2 - assert sum(m.config.variant['blas_impl'] == 'mkl' for m, _, _ in ms) == 1 - assert sum(m.config.variant['blas_impl'] == 'openblas' for m, _, _ in ms) == 1 + assert sum(m.config.variant["blas_impl"] == "mkl" for m, _, _ in ms) == 1 + assert sum(m.config.variant["blas_impl"] == "openblas" for m, _, _ in ms) == 1 def test_build_run_exports_act_on_host(caplog): @@ -411,24 +438,24 @@ def test_detect_variables_in_build_and_output_scripts(): arch="64", ) for m, _, _ in ms: - if m.name() == 'test_find_used_variables_in_scripts': + if m.name() == "test_find_used_variables_in_scripts": used_vars = m.get_used_vars() assert used_vars - assert 'SELECTOR_VAR' in used_vars - assert 'OUTPUT_SELECTOR_VAR' not in used_vars - assert 'BASH_VAR1' in used_vars - assert 'BASH_VAR2' in used_vars - assert 'BAT_VAR' not in used_vars - assert 'OUTPUT_VAR' not in used_vars + assert "SELECTOR_VAR" in used_vars + assert "OUTPUT_SELECTOR_VAR" not in used_vars + assert "BASH_VAR1" in used_vars + assert "BASH_VAR2" in used_vars + assert "BAT_VAR" not in used_vars + assert "OUTPUT_VAR" not in used_vars else: used_vars = m.get_used_vars() assert used_vars - assert 'SELECTOR_VAR' not in used_vars - assert 'OUTPUT_SELECTOR_VAR' in used_vars - assert 'BASH_VAR1' not in used_vars - assert 'BASH_VAR2' not in used_vars - assert 'BAT_VAR' not in used_vars - assert 'OUTPUT_VAR' in used_vars + assert "SELECTOR_VAR" not in used_vars + assert "OUTPUT_SELECTOR_VAR" in used_vars + assert "BASH_VAR1" not in used_vars + assert "BASH_VAR2" not in used_vars + assert "BAT_VAR" not in used_vars + assert "OUTPUT_VAR" in used_vars # on windows, we find variables in bat scripts as well as shell scripts ms = api.render( os.path.join(variants_dir, "24_test_used_vars_in_scripts"), @@ -436,25 +463,25 @@ def test_detect_variables_in_build_and_output_scripts(): arch="64", ) for m, _, _ in ms: - if m.name() == 'test_find_used_variables_in_scripts': + if m.name() == "test_find_used_variables_in_scripts": used_vars = m.get_used_vars() assert used_vars - assert 'SELECTOR_VAR' in used_vars - assert 'OUTPUT_SELECTOR_VAR' not in used_vars - assert 'BASH_VAR1' in used_vars - assert 'BASH_VAR2' in used_vars + assert "SELECTOR_VAR" in used_vars + assert "OUTPUT_SELECTOR_VAR" not in used_vars + assert "BASH_VAR1" in used_vars + assert "BASH_VAR2" in used_vars # bat is in addition to bash, not instead of - assert 'BAT_VAR' in used_vars - assert 'OUTPUT_VAR' not in used_vars + assert "BAT_VAR" in used_vars + assert "OUTPUT_VAR" not in used_vars else: used_vars = m.get_used_vars() assert used_vars - assert 'SELECTOR_VAR' not in used_vars - assert 'OUTPUT_SELECTOR_VAR' in used_vars - assert 'BASH_VAR1' not in used_vars - assert 'BASH_VAR2' not in used_vars - assert 'BAT_VAR' not in used_vars - assert 'OUTPUT_VAR' in used_vars + assert "SELECTOR_VAR" not in used_vars + assert "OUTPUT_SELECTOR_VAR" in used_vars + assert "BASH_VAR1" not in used_vars + assert "BASH_VAR2" not in used_vars + assert "BAT_VAR" not in used_vars + assert "OUTPUT_VAR" in used_vars def test_target_platform_looping(): @@ -466,7 +493,10 @@ def test_target_platform_looping(): assert len(outputs) == 2 -@pytest.mark.skipif(on_mac and platform.machine() == "arm64", reason="Unsatisfiable dependencies for M1 MacOS systems: {'numpy=1.16'}") +@pytest.mark.skipif( + on_mac and platform.machine() == "arm64", + reason="Unsatisfiable dependencies for M1 MacOS systems: {'numpy=1.16'}", +) # TODO Remove the above skip decorator once https://github.com/conda/conda-build/issues/4717 is resolved def test_numpy_used_variable_looping(): outputs = api.get_output_file_paths(os.path.join(variants_dir, "numpy_used")) @@ -474,18 +504,24 @@ def test_numpy_used_variable_looping(): def test_exclusive_config_files(): - with open('conda_build_config.yaml', 'w') as f: - yaml.dump({'abc': ['someval'], 'cwd': ['someval']}, f, default_flow_style=False) - os.makedirs('config_dir') - with open(os.path.join('config_dir', 'config-0.yaml'), 'w') as f: - yaml.dump({'abc': ['super_0'], 'exclusive_0': ['0'], 'exclusive_both': ['0']}, - f, default_flow_style=False) - with open(os.path.join('config_dir', 'config-1.yaml'), 'w') as f: - yaml.dump({'abc': ['super_1'], 'exclusive_1': ['1'], 'exclusive_both': ['1']}, - f, default_flow_style=False) + with open("conda_build_config.yaml", "w") as f: + yaml.dump({"abc": ["someval"], "cwd": ["someval"]}, f, default_flow_style=False) + os.makedirs("config_dir") + with open(os.path.join("config_dir", "config-0.yaml"), "w") as f: + yaml.dump( + {"abc": ["super_0"], "exclusive_0": ["0"], "exclusive_both": ["0"]}, + f, + default_flow_style=False, + ) + with open(os.path.join("config_dir", "config-1.yaml"), "w") as f: + yaml.dump( + {"abc": ["super_1"], "exclusive_1": ["1"], "exclusive_both": ["1"]}, + f, + default_flow_style=False, + ) exclusive_config_files = ( - os.path.join('config_dir', 'config-0.yaml'), - os.path.join('config_dir', 'config-1.yaml'), + os.path.join("config_dir", "config-0.yaml"), + os.path.join("config_dir", "config-1.yaml"), ) output = api.render( os.path.join(variants_dir, "exclusive_config_file"), @@ -493,15 +529,15 @@ def test_exclusive_config_files(): )[0][0] variant = output.config.variant # is cwd ignored? - assert 'cwd' not in variant + assert "cwd" not in variant # did we load the exclusive configs? - assert variant['exclusive_0'] == '0' - assert variant['exclusive_1'] == '1' + assert variant["exclusive_0"] == "0" + assert variant["exclusive_1"] == "1" # does later exclusive config override initial one? - assert variant['exclusive_both'] == '1' + assert variant["exclusive_both"] == "1" # does recipe config override exclusive? - assert 'unique_to_recipe' in variant - assert variant['abc'] == '123' + assert "unique_to_recipe" in variant + assert variant["abc"] == "123" def test_exclusive_config_file(): @@ -518,15 +554,18 @@ def test_exclusive_config_file(): )[0][0] variant = output.config.variant # is cwd ignored? - assert 'cwd' not in variant + assert "cwd" not in variant # did we load the exclusive config - assert 'exclusive' in variant + assert "exclusive" in variant # does recipe config override exclusive? - assert 'unique_to_recipe' in variant - assert variant['abc'] == '123' + assert "unique_to_recipe" in variant + assert variant["abc"] == "123" -@pytest.mark.skipif(on_mac and platform.machine() == "arm64", reason="M1 Mac-specific file system error related to this test") +@pytest.mark.skipif( + on_mac and platform.machine() == "arm64", + reason="M1 Mac-specific file system error related to this test", +) def test_inner_python_loop_with_output(testing_config): outputs = api.get_output_file_paths( os.path.join(variants_dir, "test_python_as_subpackage_loop"), @@ -534,9 +573,9 @@ def test_inner_python_loop_with_output(testing_config): ) outputs = [os.path.basename(out) for out in outputs] assert len(outputs) == 5 - assert len([out for out in outputs if out.startswith('tbb-2018')]) == 1 - assert len([out for out in outputs if out.startswith('tbb-devel-2018')]) == 1 - assert len([out for out in outputs if out.startswith('tbb4py-2018')]) == 3 + assert len([out for out in outputs if out.startswith("tbb-2018")]) == 1 + assert len([out for out in outputs if out.startswith("tbb-devel-2018")]) == 1 + assert len([out for out in outputs if out.startswith("tbb4py-2018")]) == 3 testing_config.variant_config_files = [ os.path.join( @@ -549,9 +588,9 @@ def test_inner_python_loop_with_output(testing_config): ) outputs = [os.path.basename(out) for out in outputs] assert len(outputs) == 5 - assert len([out for out in outputs if out.startswith('tbb-2018')]) == 1 - assert len([out for out in outputs if out.startswith('tbb-devel-2018')]) == 1 - assert len([out for out in outputs if out.startswith('tbb4py-2018')]) == 3 + assert len([out for out in outputs if out.startswith("tbb-2018")]) == 1 + assert len([out for out in outputs if out.startswith("tbb-devel-2018")]) == 1 + assert len([out for out in outputs if out.startswith("tbb4py-2018")]) == 3 testing_config.variant_config_files = [ os.path.join( @@ -566,9 +605,9 @@ def test_inner_python_loop_with_output(testing_config): ) outputs = [os.path.basename(out) for out in outputs] assert len(outputs) == 5 - assert len([out for out in outputs if out.startswith('tbb-2018')]) == 1 - assert len([out for out in outputs if out.startswith('tbb-devel-2018')]) == 1 - assert len([out for out in outputs if out.startswith('tbb4py-2018')]) == 3 + assert len([out for out in outputs if out.startswith("tbb-2018")]) == 1 + assert len([out for out in outputs if out.startswith("tbb-devel-2018")]) == 1 + assert len([out for out in outputs if out.startswith("tbb4py-2018")]) == 3 def test_variant_as_dependency_name(testing_config): @@ -597,13 +636,15 @@ def test_different_git_vars(): assert "1.21.11" in versions -@pytest.mark.skipif(sys.platform != "linux", reason="recipe uses a unix specific script") +@pytest.mark.skipif( + sys.platform != "linux", reason="recipe uses a unix specific script" +) def test_top_level_finalized(testing_config): # see https://github.com/conda/conda-build/issues/3618 recipe = os.path.join(variants_dir, "30_top_level_finalized") outputs = api.build(recipe, config=testing_config) - xzcat_output = package_has_file(outputs[0], 'xzcat_output') - assert '5.2.3' in xzcat_output + xzcat_output = package_has_file(outputs[0], "xzcat_output") + assert "5.2.3" in xzcat_output def test_variant_subkeys_retained(): @@ -614,6 +655,7 @@ def test_variant_subkeys_retained(): )[0][0] found_replacements = False from conda_build.build import get_all_replacements + for variant in m.config.variants: found_replacements = get_all_replacements(variant) assert len(found_replacements), "Did not find replacements" diff --git a/tests/utils.py b/tests/utils.py index 8c839a6c3e..4819d3c76b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -3,14 +3,14 @@ from __future__ import annotations import os -from pathlib import Path import shlex import sys +from pathlib import Path from typing import Generator from conda.common.compat import on_mac -from conda_build.metadata import MetaData +from conda_build.metadata import MetaData tests_path = Path(__file__).parent metadata_path = tests_path / "test-recipes" / "metadata" From d558fa9e216cc94cb26857fae564b8fab99ac54b Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 30 Mar 2023 12:25:11 -0500 Subject: [PATCH 094/366] Ignore auto format commit in git history --- .git-blame-ignore-revs | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000000..1cf332271a --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,11 @@ +# git revs to ignore when exploring the repo history with git blame +# usage: +# * via args: `git blame --ignore-revs-file .git-blame-ignore-revs $file` +# * via settings `git config --local blame.ignoreRevsFile .git-blame-ignore-revs` +# +# useful: +# * mark skipped lines with "?": `git config --global blame.markIgnoredLines true` +# * mark lines added by skipped commit with "*": `git config --global blame.markUnblamableLines true` + +# black & isort auto format (#4836) +4131968d4ae902e6419028bf6c5102f3e061a950 From aae62cf84225eb18b9e07964130311ae5a49aa6b Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Mon, 3 Apr 2023 22:32:21 +0200 Subject: [PATCH 095/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4827)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/issues.yml | 2 +- .github/workflows/lock.yml | 32 ++++---------- .github/workflows/stale.yml | 85 ++++++++++++++++++++---------------- HOW_WE_USE_GITHUB.md | 10 +++++ rever.xsh | 2 +- 5 files changed, 69 insertions(+), 62 deletions(-) diff --git a/.github/workflows/issues.yml b/.github/workflows/issues.yml index b4e6de62f3..ae54f5fdea 100644 --- a/.github/workflows/issues.yml +++ b/.github/workflows/issues.yml @@ -28,7 +28,7 @@ jobs: labels: ${{ env.FEEDBACK_LBL }} github_token: ${{ secrets.PROJECT_TOKEN }} # add [pending::support], if still open - - uses: actions-ecosystem/action-add-labels@v1.1.0 + - uses: actions-ecosystem/action-add-labels@v1.1.3 if: github.event.issue.state == 'open' with: labels: ${{ env.SUPPORT_LBL }} diff --git a/.github/workflows/lock.yml b/.github/workflows/lock.yml index d46934651a..03a6e6367e 100644 --- a/.github/workflows/lock.yml +++ b/.github/workflows/lock.yml @@ -4,12 +4,6 @@ on: # NOTE: github.event is workflow_dispatch payload: # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_dispatch workflow_dispatch: - # inputs: - # dryrun: - # description: "dryrun: Preview locking issues/prs without marking them (true|false)" - # required: true - # type: boolean - # default: true schedule: - cron: 0 6 * * * @@ -23,35 +17,27 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - # - id: read_yaml - # uses: conda/actions/read-yaml@v22.2.1 - # with: - # path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml - - uses: dessant/lock-threads@v2 + - uses: dessant/lock-threads@v4 with: # Number of days of inactivity before a closed issue is locked - issue-lock-inactive-days: 365 + issue-inactive-days: 365 # Do not lock issues created before a given timestamp, value must follow ISO 8601 - issue-exclude-created-before: '' + exclude-issue-created-before: '' # Do not lock issues with these labels, value must be a comma separated list of labels or '' - issue-exclude-labels: '' + exclude-any-issue-labels: '' # Labels to add before locking an issue, value must be a comma separated list of labels or '' - issue-lock-labels: 'locked' - # Comment to post before locking an issue - # issue-lock-comment: ${{ fromJSON(steps.read_yaml.outputs.value)['lock-issue'] }} + add-issue-labels: 'locked' # Reason for locking an issue, value must be one of resolved, off-topic, too heated, spam or '' issue-lock-reason: 'resolved' # Number of days of inactivity before a closed pull request is locked - pr-lock-inactive-days: 365 + pr-inactive-days: 365 # Do not lock pull requests created before a given timestamp, value must follow ISO 8601 - pr-exclude-created-before: '' + exclude-pr-created-before: '' # Do not lock pull requests with these labels, value must be a comma separated list of labels or '' - pr-exclude-labels: '' + exclude-any-pr-labels: '' # Labels to add before locking a pull request, value must be a comma separated list of labels or '' - pr-lock-labels: 'locked' - # Comment to post before locking a pull request - # pr-lock-comment: ${{ fromJSON(steps.read_yaml.outputs.value)['lock-pr'] }} + add-pr-labels: 'locked' # Reason for locking a pull request, value must be one of resolved, off-topic, too heated, spam or '' pr-lock-reason: 'resolved' diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index ce974c0d05..660cd6332f 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -14,74 +14,85 @@ on: schedule: - cron: 0 4 * * * +permissions: + issues: write + pull-requests: write + jobs: stale: if: '!github.event.repository.fork' runs-on: ubuntu-latest strategy: matrix: - # The issues labeled "support" have a more aggressive stale/close timeline from the rest - only-issue-labels: ['type::support', ''] + include: + - only-issue-labels: '' + days-before-issue-stale: 365 + days-before-issue-close: 30 + # [type::support] issues have a more aggressive stale/close timeline + - only-issue-labels: type::support + days-before-issue-stale: 21 + days-before-issue-close: 7 steps: - - id: read_yaml - uses: conda/actions/read-yaml@v22.9.0 + - uses: conda/actions/read-yaml@v22.9.0 + id: read_yaml with: path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml - - uses: actions/stale@v4 + + - uses: actions/stale@v7 id: stale with: - # Idle number of days before marking issues stale (default: 60) - days-before-issue-stale: ${{ matrix.only-issue-labels && 21 || 365 }} - # Idle number of days before closing stale issues/PRs (default: 7) - days-before-issue-close: ${{ matrix.only-issue-labels && 7 || 30 }} - # Idle number of days before marking PRs stale (default: 60) + # Only issues with these labels are checked whether they are stale + only-issue-labels: ${{ matrix.only-issue-labels }} + + # Idle number of days before marking issues stale + days-before-issue-stale: ${{ matrix.days-before-issue-stale }} + # Idle number of days before closing stale issues/PRs + days-before-issue-close: ${{ matrix.days-before-issue-close }} + # Idle number of days before marking PRs stale days-before-pr-stale: 365 - # Idle number of days before closing stale PRs (default: 7) + # Idle number of days before closing stale PRs days-before-pr-close: 30 # Comment on the staled issues stale-issue-message: ${{ fromJSON(steps.read_yaml.outputs.value)['stale-issue'] }} - # Comment on the staled issues while closed - # close-issue-message: ${{ fromJSON(steps.read_yaml.outputs.value)['close-issue'] }} - # Comment on the staled PRs - stale-pr-message: ${{ fromJSON(steps.read_yaml.outputs.value)['stale-pr'] }} - # Comment on the staled PRs while closed - # close-pr-message: ${{ fromJSON(steps.read_yaml.outputs.value)['close-pr'] }} # Label to apply on staled issues - stale-issue-label: 'stale' + stale-issue-label: stale # Label to apply on closed issues - close-issue-label: 'stale::closed' + close-issue-label: stale::closed + # Reason to use when closing issues + close-issue-reason: not_planned + + # Comment on the staled PRs + stale-pr-message: ${{ fromJSON(steps.read_yaml.outputs.value)['stale-pr'] }} # Label to apply on staled PRs - stale-pr-label: 'stale' + stale-pr-label: stale # Label to apply on closed PRs - close-pr-label: 'stale::closed' + close-pr-label: stale::closed + # Reason to use when closing PRs + close-pr-reason: not_planned - # Issues with these labels will never be considered stale - exempt-issue-labels: 'stale::recovered,epic' - # Issues with these labels will never be considered stale - exempt-pr-labels: 'stale::recovered,epic' - # Only issues with these labels are checked whether they are stale - only-issue-labels: ${{ matrix.only-issue-labels }} - - # Max number of operations per run - operations-per-run: ${{ secrets.STALE_OPERATIONS_PER_RUN || 100 }} # Remove stale label from issues/PRs on updates/comments remove-stale-when-updated: true - # Add specified labels to issues/PRs when they become unstale - labels-to-add-when-unstale: 'stale::recovered' - labels-to-remove-when-unstale: 'stale,stale::closed' + labels-to-add-when-unstale: stale::recovered + # Remove specified labels to issues/PRs when they become unstale + labels-to-remove-when-unstale: stale,stale::closed - # Dry-run (default: false) + # Max number of operations per run + operations-per-run: ${{ secrets.STALE_OPERATIONS_PER_RUN || 100 }} + # Dry-run debug-only: ${{ github.event.inputs.dryrun || false }} - # Order to get issues/PRs (default: false) + # Order to get issues/PRs ascending: true - # Delete branch after closing a stale PR (default: false) + # Delete branch after closing a stale PR delete-branch: false + # Issues with these labels will never be considered stale + exempt-issue-labels: stale::recovered,epic + # Issues with these labels will never be considered stale + exempt-pr-labels: stale::recovered,epic # Exempt all issues/PRs with milestones from stale exempt-all-milestones: true - # Assignees on issues/PRs exempted from stale exempt-assignees: mingwandroid diff --git a/HOW_WE_USE_GITHUB.md b/HOW_WE_USE_GITHUB.md index 491e92aa04..12570640de 100644 --- a/HOW_WE_USE_GITHUB.md +++ b/HOW_WE_USE_GITHUB.md @@ -15,6 +15,8 @@ [workflow-sync]: https://github.com/conda/infra/blob/main/.github/workflows/sync.yml [labels-global]: https://github.com/conda/infra/blob/main/.github/global.yml +[signing-commits]: https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits + [workflow-issues]: /.github/workflows/issues.yml [workflow-project]: /.github/workflows/project.yml @@ -30,6 +32,7 @@ This document seeks to outline how we as a community use GitHub Issues to track Topics: - [What is Issue Sorting?](#what-is-issue-sorting) + - [Commit signing](#commit-signing) - [Types of tickets](#types-of-tickets) - [Normal Ticket/Issue](#normal-ticketissue) - [Epics](#epics) @@ -230,6 +233,13 @@ please post details to the [Nucleus forums](https://community.anaconda.cloud/). In order to not have to manually type or copy/paste the above repeatedly, please note that it's possible to add text for the most commonly-used responses via [GitHub's "Add Saved Reply" option][docs-saved-reply]. +### Commit signing + +For all conda maintainers, we require commit signing and strongly recommend it for all others wishing to contribute to conda +related projects. More information about how to set this up within GitHub can be found here: + +- [Signing Commits][signing-commits] + ### Types of Tickets #### Standard Ticket/Issue diff --git a/rever.xsh b/rever.xsh index a0d145cebd..644107dfd9 100644 --- a/rever.xsh +++ b/rever.xsh @@ -1,7 +1,7 @@ $ACTIVITIES = ["authors", "changelog"] # Basic settings -$PROJECT = $GITHUB_REPO = $(basename $(git remote get-url origin)).split('.')[0] +$PROJECT = $GITHUB_REPO = $(basename $(git remote get-url origin)).split('.')[0].strip() $GITHUB_ORG = "conda" # Authors settings From 3d35b7a4e97acc798d9f8b192137995b8e601f43 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 3 Apr 2023 22:32:32 +0200 Subject: [PATCH 096/366] [pre-commit.ci] pre-commit autoupdate (#4830) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/Lucas-C/pre-commit-hooks: v1.4.2 → v1.5.1](https://github.com/Lucas-C/pre-commit-hooks/compare/v1.4.2...v1.5.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 964d6b446c..3141ec928b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,7 +39,7 @@ repos: - id: check-merge-conflict # Python verification and formatting - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.4.2 + rev: v1.5.1 hooks: # auto inject license blurb - id: insert-license From a9a3bd5b235bd80bbbe44a8ada33824802298795 Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Tue, 4 Apr 2023 15:58:15 -0400 Subject: [PATCH 097/366] Misc. documentation fixes (#4734) * Update conda-inspect docstring, edit conda-render command docstring, change wording for --no-deps option w/ pip install command, update Recipe documentation file, make changes to Recipe, Package Naming Convention, and Building Conda Packages pages in docs, and fix package naming conventions formatting --------- Co-authored-by: Katherine Kinnaman Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- conda_build/cli/main_inspect.py | 4 +- conda_build/cli/main_render.py | 9 +- docs/source/concepts/package-naming-conv.rst | 88 ++++++------------- docs/source/concepts/recipe.rst | 58 ++++++------ docs/source/user-guide/getting-started.rst | 16 ++-- .../tutorials/building-conda-packages.rst | 6 +- docs/source/user-guide/wheel-files.rst | 4 +- 7 files changed, 77 insertions(+), 108 deletions(-) diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index 9bdded9128..472bd2f3af 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -164,8 +164,8 @@ def parse_args(args): hash_inputs = subcommand.add_parser( "hash-inputs", - help="Show data used to compute hash identifier (h????) for package", - description="Show data used to compute hash identifier (h????) for package", + help="Show data used to compute hash identifier for package", + description="Show data used to compute hash identifier for package", ) hash_inputs.add_argument( "packages", diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 5aa7b8f3fd..15cb3bdc4b 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -45,11 +45,10 @@ def __call__(self, parser, namespace, values, option_string=None): def get_render_parser(): p = ArgumentParser( description=""" -Tool for building conda packages. A conda package is a binary tarball -containing system-level libraries, Python modules, executable programs, or -other components. conda keeps track of dependencies between packages and -platform specifics, making it simple to create working environments from - different sets of packages.""", +Tool for expanding the template meta.yml file (containing Jinja syntax and +selectors) into the rendered meta.yml files. The template meta.yml file is +combined with user-specified configurations, static recipe files, and +environment information to generate the rendered meta.yml files.""", conflict_handler="resolve", ) p.add_argument( diff --git a/docs/source/concepts/package-naming-conv.rst b/docs/source/concepts/package-naming-conv.rst index 89683e7b24..ef083430a9 100644 --- a/docs/source/concepts/package-naming-conv.rst +++ b/docs/source/concepts/package-naming-conv.rst @@ -5,67 +5,33 @@ Package naming conventions To facilitate communication and documentation, conda observes the package naming conventions listed below. -.. _package_name: -.. index:: - pair: terminology; package name - seealso: name; package name - -Package name -============ - -The name of a package, without any reference to a particular -version. Conda package names are normalized and they may contain -only lowercase alpha characters, numeric digits, underscores, -hyphens, or dots. In usage documentation, these are referred to -by ``package_name``. - -.. _package_version: -.. index:: - pair: terminology; package version - seealso: name; package version - -Package version -=============== - -A version number or string, often similar to ``X.Y`` or -``X.Y.Z``, but it may take other forms as well. - -.. _build_string: -.. index:: - pair: terminology; build string - seealso: name; build string - -Build string -============ - -An arbitrary string that identifies a particular build of a -package for conda. It may contain suggestive mnemonics, but -these are subject to change, and you should not rely on it or try -to parse it for any specific information. - -.. _canonical_name: -.. index:: - pair: terminology; canonical name - seealso: name; canonical name - -Canonical name -============== - -The package name, version, and build string joined together by -hyphens: name-version-buildstring. In usage documentation, these -are referred to by ``canonical_name``. - -.. _filename: -.. index:: - pair: terminology; filename - -Filename -======== - -Conda package filenames are canonical names, plus the suffix -``.tar.bz2`` or ``.conda``. - -The following figure compares a canonical name to a file name: +**Package name** + The name of a package, without any reference to a particular + version. Conda package names are normalized and they may contain + only lowercase alpha characters, numeric digits, underscores, + hyphens, or dots. In usage documentation, these are referred to + by ``package_name``. + +**Package version** + A version number or string, often similar to ``X.Y`` or + ``X.Y.Z``, but it may take other forms as well. + +**Build string** + An arbitrary string that identifies a particular build of a + package for conda. It may contain suggestive mnemonics, but + these are subject to change, and you should not rely on it or try + to parse it for any specific information. + +**Canonical name** + The package name, version, and build string joined together by + hyphens: name-version-buildstring. In usage documentation, these + are referred to by ``canonical_name``. + +**Filename** + Conda package filenames are canonical names, plus the suffix + ``.tar.bz2`` or ``.conda``. + +The following figure compares a canonical name to a filename: .. figure:: /img/conda_names.png :align: center diff --git a/docs/source/concepts/recipe.rst b/docs/source/concepts/recipe.rst index 91b323324f..39006e9f0c 100644 --- a/docs/source/concepts/recipe.rst +++ b/docs/source/concepts/recipe.rst @@ -32,6 +32,15 @@ is a flat directory that contains the following files: be generated by the build scripts. Examples are icon files, readme files and build notes. +Review :doc:`../resources/define-metadata` to see a breakdown of the +components of a recipe, including: + + * Package name + * Package version + * Descriptive metadata + * Where to obtain source code + * How to test the package + .. tip:: When you use the :ref:`conda skeleton ` command, the first 3 files — ``meta.yaml``, ``build.sh``, and @@ -71,8 +80,7 @@ Conda-build performs the following steps: * Deletes the build environment and source directory to ensure that the new conda package does not inadvertantly depend on artifacts not included in the package. - * Creates a test environment with the package and its - dependencies. + * Creates a test environment with the package and its dependencies. * Runs the test scripts. @@ -127,7 +135,7 @@ and install all of the build and run dependencies in that environment. Conda-build will indicate where you can successfully build the package. The prefix will take the form:: - /conda-bld//h_env_placeholder… + /conda-bld//h_env_placeholder… Conda-build downloads your package source and then builds the conda package in the context of the build environment. For example, you may @@ -155,22 +163,22 @@ If you do not define the script section, then you can create a Prefix replacement ------------------ -When the build environment is created, it is in a placeholder prefix. -When the package is all bundled up, the prefix is set to a "dummy" prefix. -When conda is ready to install the package, it rewrites the dummy -prefix with the correct one. +The build environment is created in a placeholder prefix. +When the package is bundled, the prefix is set to a "dummy" prefix. +Once conda is ready to install the package, it rewrites the dummy +prefix with the final one. Testing ------- -Once a package is built, conda-build will test it. To do this, it +Once a package is built, conda-build has the ability to test it. To do this, it creates another environment and installs the conda package. The form of this prefix is:: - /conda-bld//_test_env_placeholder… + /conda-bld//_test_env_placeholder… -At this point, conda-build has all of the info from ``meta.yaml`` about +At this point, conda-build has all of the information from ``meta.yaml`` about what its runtime dependencies are, so those dependencies are installed as well. This generates a test runner script with a reference to the testing ``meta.yaml`` that is created. See the :ref:`meta-test` for @@ -182,36 +190,26 @@ Output metadata After the package is built and tested, conda-build cleans up the environments created during prior steps and outputs the metadata. The recipe for the package is also added in the output metadata. The metadata directory -is on the top level of the tarball in the ``info`` directory. +is at the top level of the package contents in the ``info`` directory. The metadata contains information about the dependencies of the package and a list of where all of the files in the package go when it is installed. Conda reads that metadata when it needs to install. Running ``conda install`` causes conda to: - * reach out to the repo data containing the dependencies, - * guess the right dependencies, - * install a list of packages, - * unpack the tarball to look at the info, - * verify the file based on metadata in the package, and then - * go through each file in the package and put it in the right location. - - -More information -================ - -Review :doc:`../resources/define-metadata` to see a breakdown of the -components of a recipe, including: - - * Package name - * Package version - * Descriptive metadata - * Where to obtain source code - * How to test the package +#. Reach out to the repodata containing the dependencies for the package(s) you are installing. +#. Determine the correct dependencies. +#. Install a list of additional packages determined by those dependencies. +#. For each dependency package being installed: + #. Unpack the tarball to look at the information contained within. + #. Verify the file based on metadata in the package. + #. Go through each file in the package and put it in the right location. +For additional information on ``conda install``, please visit the conda documentation `deep dive`_ page on that topic. .. _`conda packages`: https://conda.io/projects/conda/en/latest/user-guide/concepts/packages.html .. _`conda-recipes`: https://github.com/continuumio/conda-recipes .. _`AnacondaRecipes`: https://github.com/AnacondaRecipes/aggregate .. _`conda-forge`: https://github.com/conda-forge/feedstocks/tree/main/feedstocks .. _PyPI: https://pypi.python.org/pypi +.. _`deep dive`: https://docs.conda.io/projects/conda/en/stable/dev-guide/deep-dives/install.html diff --git a/docs/source/user-guide/getting-started.rst b/docs/source/user-guide/getting-started.rst index 5642b0aa90..113632fe73 100644 --- a/docs/source/user-guide/getting-started.rst +++ b/docs/source/user-guide/getting-started.rst @@ -14,12 +14,18 @@ collection helpful. Prerequisites ============= -Before starting the tutorials, you will need to -install `Miniconda or Anaconda `_, -conda-build, and Git. +Before starting the tutorials, you need to install: -After you've installed Miniconda or Anaconda, you can use conda -to install conda-build and Git. +- `Miniconda or Anaconda `_ +- conda-build +- Git + +The most straightforward way to do this is to install Miniconda or +Anaconda, which contain conda, and then use conda to install conda-build +and Git. Make sure you install these packages into a new environment +and not your base environment.:: + + conda create -n my-conda-build-environment conda-build git .. _submissions: diff --git a/docs/source/user-guide/tutorials/building-conda-packages.rst b/docs/source/user-guide/tutorials/building-conda-packages.rst index c0d7093422..31ffe7d8ac 100644 --- a/docs/source/user-guide/tutorials/building-conda-packages.rst +++ b/docs/source/user-guide/tutorials/building-conda-packages.rst @@ -42,9 +42,9 @@ Before you start Before you start, make sure you have installed: - * `Conda`_. - * :ref:`Conda-build `. - * Any compilers you want. + * `Conda`_ + * :ref:`Conda-build ` + * Any compilers you want .. _toolkit: diff --git a/docs/source/user-guide/wheel-files.rst b/docs/source/user-guide/wheel-files.rst index 2226f67426..36551f3192 100644 --- a/docs/source/user-guide/wheel-files.rst +++ b/docs/source/user-guide/wheel-files.rst @@ -42,8 +42,8 @@ file does not download or list the .whl file. It is important to ``pip install`` only the one desired package. Whenever possible, install dependencies with conda and not pip. -We strongly recommend using the ``--no-deps`` option in the ``pip install`` -command. +You must use the ``--no-deps`` option in the ``pip install`` command in order +to avoid bundling dependencies into your conda-package. If you run ``pip install`` without the ``--no-deps`` option, pip will often install dependencies in your conda recipe and those dependencies will become From 26d1f3c197f399127e7be4608c633dda6980dd4b Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 6 Apr 2023 22:50:50 +0200 Subject: [PATCH 098/366] `create_test.py`: Add type hints and refactor to use pathlib (#4826) Co-authored-by: Daniel Holth --- conda_build/create_test.py | 131 +++++++++++----------- docs/source/resources/define-metadata.rst | 3 +- tests/test_create_test.py | 8 +- 3 files changed, 70 insertions(+), 72 deletions(-) diff --git a/conda_build/create_test.py b/conda_build/create_test.py index b3dc6e11a4..dbad7bf083 100644 --- a/conda_build/create_test.py +++ b/conda_build/create_test.py @@ -3,44 +3,42 @@ """ Module to handle generating test files. """ - +from __future__ import annotations import json import os -from os.path import exists, join +from os.path import basename, exists, isfile, join +from pathlib import Path -from conda_build.utils import copy_into, ensure_list, glob, on_win, rm_rf +from .metadata import MetaData +from .utils import copy_into, ensure_list, on_win, rm_rf -def create_files(m, test_dir=None): - """ - Create the test files for pkg in the directory given. The resulting - test files are configuration (i.e. platform, architecture, Python and - numpy version, ...) independent. - Return False, if the package has no tests (for any configuration), and - True if it has. +def create_files(m: MetaData, test_dir: Path) -> bool: """ - if not test_dir: - test_dir = m.config.test_dir - has_files = False - if not os.path.isdir(test_dir): - os.makedirs(test_dir) + Copy all test files from recipe over into testing directory. - for pattern in ensure_list(m.get_value("test/files", [])): - has_files = True - files = glob(join(m.path, pattern.replace("/", os.sep))) - for f in files: + :param metadata: The meta.yaml object. + :param test_dir: The testing directory. + :return: Whether any test scripts copied were copied over. + """ + patterns = ensure_list(m.get_value("test/files", [])) + for pattern in patterns: + for file in Path(m.path).rglob(pattern): copy_into( - f, - f.replace(m.path, test_dir), + file, + test_dir / file.relative_to(m.path), m.config.timeout, locking=False, clobber=True, ) - return has_files + return bool(patterns) -def _get_output_script_name(m, win_status): +def _get_output_script_name( + m: MetaData, + win_status: bool, +) -> tuple[os.PathLike, os.PathLike]: # the way this works is that each output needs to explicitly define a test script to run. # They do not automatically pick up run_test.*, but can be pointed at that explicitly. @@ -58,14 +56,8 @@ def _get_output_script_name(m, win_status): return src_name, dst_name -def create_shell_files(m, test_dir=None): - if not test_dir: - test_dir = m.config.test_dir - - win_status = [on_win] - - if m.noarch: - win_status = [False, True] +def create_shell_files(m: MetaData, test_dir: os.PathLike) -> list[str]: + win_status = [False, True] if m.noarch else [on_win] shell_files = [] for status in win_status: @@ -74,9 +66,12 @@ def create_shell_files(m, test_dir=None): if exists(join(m.path, src_name)): # disable locking to avoid locking a temporary directory (the extracted test folder) copy_into( - join(m.path, src_name), dest_file, m.config.timeout, locking=False + join(m.path, src_name), + dest_file, + m.config.timeout, + locking=False, ) - if os.path.basename(test_dir) != "test_tmp": + if basename(test_dir) != "test_tmp": commands = ensure_list(m.get_value("test/commands", [])) if commands: with open(join(dest_file), "a") as f: @@ -93,12 +88,17 @@ def create_shell_files(m, test_dir=None): f.write("exit /B 0\n") else: f.write("exit 0\n") - if os.path.isfile(dest_file): + if isfile(dest_file): shell_files.append(dest_file) return shell_files -def _create_test_files(m, test_dir, ext, comment_char="# "): +def _create_test_files( + m: MetaData, + test_dir: os.PathLike, + ext: str, + comment_char: str = "# ", +) -> tuple[os.PathLike, bool]: name = "run_test" + ext if m.is_output: name = "" @@ -113,8 +113,8 @@ def _create_test_files(m, test_dir, ext, comment_char="# "): out_file = join(test_dir, "run_test" + ext) if name: - test_file = os.path.join(m.path, name) - if os.path.isfile(test_file): + test_file = join(m.path, name) + if isfile(test_file): with open(out_file, "w") as fo: fo.write( f"{comment_char} tests for {m.dist()} (this is a generated file);\n" @@ -134,15 +134,11 @@ def _create_test_files(m, test_dir, ext, comment_char="# "): fo.write("\nprint('===== %s OK =====');\n" % m.dist()) return ( out_file, - bool(name) - and os.path.isfile(out_file) - and os.path.basename(test_file) != "no-file", + bool(name) and isfile(out_file) and basename(test_file) != "no-file", ) -def create_py_files(m, test_dir=None): - if not test_dir: - test_dir = m.config.test_dir +def create_py_files(m: MetaData, test_dir: os.PathLike) -> bool: tf, tf_exists = _create_test_files(m, test_dir, ".py") # Ways in which we can mark imports as none python imports @@ -182,9 +178,7 @@ def create_py_files(m, test_dir=None): return tf if (tf_exists or imports) else False -def create_r_files(m, test_dir=None): - if not test_dir: - test_dir = m.config.test_dir +def create_r_files(m: MetaData, test_dir: os.PathLike) -> bool: tf, tf_exists = _create_test_files(m, test_dir, ".r") imports = None @@ -211,10 +205,9 @@ def create_r_files(m, test_dir=None): return tf if (tf_exists or imports) else False -def create_pl_files(m, test_dir=None): - if not test_dir: - test_dir = m.config.test_dir +def create_pl_files(m: MetaData, test_dir: os.PathLike) -> bool: tf, tf_exists = _create_test_files(m, test_dir, ".pl") + imports = None if m.name().startswith("perl-"): imports = ensure_list(m.get_value("test/imports", [])) @@ -252,10 +245,9 @@ def create_pl_files(m, test_dir=None): return tf if (tf_exists or imports) else False -def create_lua_files(m, test_dir=None): - if not test_dir: - test_dir = m.config.test_dir +def create_lua_files(m: MetaData, test_dir: os.PathLike) -> bool: tf, tf_exists = _create_test_files(m, test_dir, ".lua") + imports = None if m.name().startswith("lua-"): imports = ensure_list(m.get_value("test/imports", [])) @@ -276,24 +268,29 @@ def create_lua_files(m, test_dir=None): return tf if (tf_exists or imports) else False -def create_all_test_files(m, test_dir=None): +def create_all_test_files( + m: MetaData, + test_dir: os.PathLike | None = None, +) -> tuple[bool, bool, bool, bool, bool, list[str]]: if test_dir: + # this happens when we're finishing the build rm_rf(test_dir) - os.makedirs(test_dir) - # this happens when we're finishing the build. - test_deps = m.meta.get("test", {}).get("requires", []) - if test_deps: - with open(os.path.join(test_dir, "test_time_dependencies.json"), "w") as f: - json.dump(test_deps, f) + os.makedirs(test_dir, exist_ok=True) + test_requires = ensure_list(m.get_value("test/requires", [])) + if test_requires: + Path(test_dir, "test_time_dependencies.json").write_text( + json.dumps(test_requires) + ) else: # this happens when we're running a package's tests test_dir = m.config.test_dir + os.makedirs(test_dir, exist_ok=True) - files = create_files(m, test_dir) - - pl_files = create_pl_files(m, test_dir) - py_files = create_py_files(m, test_dir) - r_files = create_r_files(m, test_dir) - lua_files = create_lua_files(m, test_dir) - shell_files = create_shell_files(m, test_dir) - return files, pl_files, py_files, r_files, lua_files, shell_files + return ( + create_files(m, Path(test_dir)), + create_pl_files(m, test_dir), + create_py_files(m, test_dir), + create_r_files(m, test_dir), + create_lua_files(m, test_dir), + create_shell_files(m, test_dir), + ) diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index 59d9444e51..e09c4b65b6 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -1111,7 +1111,8 @@ Test files ---------- Test files that are copied from the recipe into the temporary -test directory and are needed during testing. +test directory and are needed during testing. If providing a path, +forward slashes must be used. .. code-block:: yaml diff --git a/tests/test_create_test.py b/tests/test_create_test.py index 67a811a640..f515391802 100644 --- a/tests/test_create_test.py +++ b/tests/test_create_test.py @@ -129,10 +129,10 @@ def test_create_run_test( ): testing_metadata.meta["package"]["name"] = name testing_metadata.meta["test"]["imports"] = imports - create_py_files(testing_metadata) - create_r_files(testing_metadata) - create_pl_files(testing_metadata) - create_lua_files(testing_metadata) + create_py_files(testing_metadata, testing_metadata.config.test_dir) + create_r_files(testing_metadata, testing_metadata.config.test_dir) + create_pl_files(testing_metadata, testing_metadata.config.test_dir) + create_lua_files(testing_metadata, testing_metadata.config.test_dir) # assert expected test file exists for ext, tests in expected.items(): From f170405a3eb94cc7dd4a32a2c4db20b917c06754 Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Fri, 7 Apr 2023 14:03:51 -0400 Subject: [PATCH 099/366] Rename `ns_cfg()` to `get_selectors()` in `conda_build.metadata` (#4837) --- conda_build/jinja_context.py | 4 ++-- conda_build/metadata.py | 35 +++++++++++++++++++++++++++++++---- conda_build/variants.py | 4 ++-- 3 files changed, 35 insertions(+), 8 deletions(-) diff --git a/conda_build/jinja_context.py b/conda_build/jinja_context.py index e77de5bf8a..61219be134 100644 --- a/conda_build/jinja_context.py +++ b/conda_build/jinja_context.py @@ -156,7 +156,7 @@ def __init__(self, unfiltered_loader, config): def get_source(self, environment, template): # we have circular imports here. Do a local import - from .metadata import ns_cfg, select_lines + from .metadata import get_selectors, select_lines contents, filename, uptodate = self._unfiltered_loader.get_source( environment, template @@ -164,7 +164,7 @@ def get_source(self, environment, template): return ( select_lines( contents, - ns_cfg(self.config), + get_selectors(self.config), variants_in_place=bool(self.config.variant), ), filename, diff --git a/conda_build/metadata.py b/conda_build/metadata.py index b9941df872..1f69045294 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import contextlib import copy import hashlib @@ -8,6 +10,7 @@ import re import sys import time +import warnings from collections import OrderedDict from functools import lru_cache from os.path import isfile, join @@ -76,7 +79,18 @@ used_vars_cache = {} -def ns_cfg(config): +def get_selectors(config: Config) -> dict[str, bool]: + """Aggregates selectors for use in recipe templating. + + Derives selectors from the config and variants to be injected + into the Jinja environment prior to templating. + + Args: + config (Config): The config object + + Returns: + dict[str, bool]: Dictionary of on/off selectors for Jinja + """ # Remember to update the docs of any of this changes plat = config.host_subdir d = dict( @@ -159,6 +173,15 @@ def ns_cfg(config): return d +def ns_cfg(config: Config) -> dict[str, bool]: + warnings.warn( + "`conda_build.metadata.ns_cfg` is pending deprecation and will be removed in a " + "future release. Please use `conda_build.metadata.get_selectors` instead.", + PendingDeprecationWarning, + ) + return get_selectors(config) + + # Selectors must be either: # - at end of the line # - embedded (anywhere) within a comment @@ -381,7 +404,11 @@ def ensure_matching_hashes(output_metadata): def parse(data, config, path=None): - data = select_lines(data, ns_cfg(config), variants_in_place=bool(config.variant)) + data = select_lines( + data, + get_selectors(config), + variants_in_place=bool(config.variant), + ) res = yamlize(data) # ensure the result is a dict if res is None: @@ -1828,7 +1855,7 @@ def _get_contents( loader = FilteredLoader(jinja2.ChoiceLoader(loaders), config=self.config) env = jinja2.Environment(loader=loader, undefined=undefined_type) - env.globals.update(ns_cfg(self.config)) + env.globals.update(get_selectors(self.config)) env.globals.update(environ.get_dict(m=self, skip_build_id=skip_build_id)) env.globals.update({"CONDA_BUILD_STATE": "RENDER"}) env.globals.update( @@ -2005,7 +2032,7 @@ def get_recipe_text( if apply_selectors: recipe_text = select_lines( recipe_text, - ns_cfg(self.config), + get_selectors(self.config), variants_in_place=bool(self.config.variant), ) return recipe_text.rstrip() diff --git a/conda_build/variants.py b/conda_build/variants.py index 4e6f47eba8..289a61385c 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -125,11 +125,11 @@ def get_default_variant(config): def parse_config_file(path, config): - from conda_build.metadata import ns_cfg, select_lines + from conda_build.metadata import get_selectors, select_lines with open(path) as f: contents = f.read() - contents = select_lines(contents, ns_cfg(config), variants_in_place=False) + contents = select_lines(contents, get_selectors(config), variants_in_place=False) content = yaml.load(contents, Loader=yaml.loader.BaseLoader) or {} trim_empty_keys(content) return content From 7f3011cc5b1e73a11960162bc5d7170f8726ef07 Mon Sep 17 00:00:00 2001 From: Ryan Date: Tue, 11 Apr 2023 22:18:19 -0600 Subject: [PATCH 100/366] Deprecate Python 2 silliness and use `os.makedirs(exists_ok=True)` (#4843) --- conda_build/config.py | 54 +++++++++++++++++++++++----------------- conda_build/index.py | 32 +++++++++++++----------- news/4843-config-cleanup | 20 +++++++++++++++ 3 files changed, 69 insertions(+), 37 deletions(-) create mode 100644 news/4843-config-cleanup diff --git a/conda_build/config.py b/conda_build/config.py index 50d6505ce0..fad7744d29 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -11,6 +11,7 @@ import shutil import sys import time +import warnings from collections import namedtuple from os.path import abspath, expanduser, expandvars, join @@ -57,22 +58,29 @@ def set_invocation_time(): zstd_compression_level_default = 19 -# Python2 silliness: def python2_fs_encode(strin): - return ( - strin.decode(sys.getfilesystemencoding()) if hasattr(strin, "decode") else strin + warnings.warn( + "`conda_build.config.python2_fs_encode` is pending deprecation and will be removed in a future release.", + PendingDeprecationWarning, ) + return strin -def _ensure_dir(path): +def _ensure_dir(path: os.PathLike): + """Try to ensure a directory exists + + Args: + path (os.PathLike): Path to directory + """ # this can fail in parallel operation, depending on timing. Just try to make the dir, # but don't bail if fail. - encpath = python2_fs_encode(path) - if not os.path.isdir(encpath): - try: - os.makedirs(encpath) - except OSError: - pass + warnings.warn( + "`conda_build.config._ensure_dir` is pending deprecation and will be removed " + "in a future release. Please use `pathlib.Path.mkdir(exist_ok=True)` or " + "`os.makedirs(exist_ok=True)` instead", + PendingDeprecationWarning, + ) + os.makedirs(path, exist_ok=True) # we need this to be accessible to the CLI, so it needs to be more static. @@ -485,7 +493,7 @@ def croot(self): self._croot = join(root_dir, "conda-bld") else: self._croot = abspath(expanduser("~/conda-bld")) - return python2_fs_encode(self._croot) + return self._croot @croot.setter def croot(self, croot): @@ -672,7 +680,7 @@ def build_id(self, _build_id): "build_id should not be an absolute path, " "to preserve croot during path joins" ) - self._build_id = python2_fs_encode(_build_id) + self._build_id = _build_id @property def prefix_length(self): @@ -766,7 +774,7 @@ def rscript_bin(self, prefix, platform): def info_dir(self): """Path to the info dir in the build prefix, where recipe metadata is stored""" path = join(self.host_prefix, "info") - _ensure_dir(path) + os.makedirs(path, exist_ok=True) return path @property @@ -774,21 +782,21 @@ def meta_dir(self): """Path to the conda-meta dir in the build prefix, where package index json files are stored""" path = join(self.host_prefix, "conda-meta") - _ensure_dir(path) + os.makedirs(path, exist_ok=True) return path @property def broken_dir(self): """Where packages that fail the test phase are placed""" path = join(self.croot, "broken") - _ensure_dir(path) + os.makedirs(path, exist_ok=True) return path @property def bldpkgs_dir(self): """Dir where the package is saved.""" path = join(self.croot, self.host_subdir) - _ensure_dir(path) + os.makedirs(path, exist_ok=True) return path @property @@ -806,41 +814,41 @@ def bldpkgs_dirs(self): def src_cache(self): """Where tarballs and zip files are downloaded and stored""" path = join(self.src_cache_root, "src_cache") - _ensure_dir(path) + os.makedirs(path, exist_ok=True) return path @property def git_cache(self): """Where local clones of git sources are stored""" path = join(self.src_cache_root, "git_cache") - _ensure_dir(path) + os.makedirs(path, exist_ok=True) return path @property def hg_cache(self): """Where local clones of hg sources are stored""" path = join(self.src_cache_root, "hg_cache") - _ensure_dir(path) + os.makedirs(path, exist_ok=True) return path @property def svn_cache(self): """Where local checkouts of svn sources are stored""" path = join(self.src_cache_root, "svn_cache") - _ensure_dir(path) + os.makedirs(path, exist_ok=True) return path @property def work_dir(self): """Where the source for the build is extracted/copied to.""" path = join(self.build_folder, "work") - _ensure_dir(path) + os.makedirs(path, exist_ok=True) return path @property def pip_cache_dir(self): path = self._pip_cache_dir or join(self.build_folder, "pip_cache") - _ensure_dir(path) + os.makedirs(path, exist_ok=True) return path @pip_cache_dir.setter @@ -851,7 +859,7 @@ def pip_cache_dir(self, path): def test_dir(self): """The temporary folder where test files are copied to, and where tests start execution""" path = join(self.build_folder, "test_tmp") - _ensure_dir(path) + os.makedirs(path, exist_ok=True) return path @property diff --git a/conda_build/index.py b/conda_build/index.py index 088e895e77..32eea4bc8e 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -23,11 +23,11 @@ dirname, getmtime, getsize, - isdir, isfile, join, splitext, ) +from pathlib import Path from uuid import uuid4 import conda_package_handling.api @@ -1179,20 +1179,24 @@ def index_subdir(self, subdir, index_file=None, verbose=False, progress=False): json.dump(stat_cache, fh) return new_repodata - def _ensure_dirs(self, subdir): + def _ensure_dirs(self, subdir: str): + """Create cache directories within a subdir. + + Args: + subdir (str): name of the subdirectory + """ # Create all cache directories in the subdir. - ensure = lambda path: isdir(path) or os.makedirs(path) - cache_path = join(self.channel_root, subdir, ".cache") - ensure(cache_path) - ensure(join(cache_path, "index")) - ensure(join(cache_path, "about")) - ensure(join(cache_path, "paths")) - ensure(join(cache_path, "recipe")) - ensure(join(cache_path, "run_exports")) - ensure(join(cache_path, "post_install")) - ensure(join(cache_path, "icon")) - ensure(join(self.channel_root, "icons")) - ensure(join(cache_path, "recipe_log")) + cache_path = Path(self.channel_root, subdir, ".cache") + cache_path.mkdir(parents=True, exist_ok=True) + (cache_path / "index").mkdir(exist_ok=True) + (cache_path / "about").mkdir(exist_ok=True) + (cache_path / "paths").mkdir(exist_ok=True) + (cache_path / "recipe").mkdir(exist_ok=True) + (cache_path / "run_exports").mkdir(exist_ok=True) + (cache_path / "post_install").mkdir(exist_ok=True) + (cache_path / "icon").mkdir(exist_ok=True) + (cache_path / "recipe_log").mkdir(exist_ok=True) + Path(self.channel_root, "icons").mkdir(exist_ok=True) def _calculate_update_set( self, diff --git a/news/4843-config-cleanup b/news/4843-config-cleanup new file mode 100644 index 0000000000..9db85efc15 --- /dev/null +++ b/news/4843-config-cleanup @@ -0,0 +1,20 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Mark `conda_build.config.python2_fs_encode` as pending deprecation. (#4843) +* Mark `conda_build.config._ensure_dir` as pending deprecation. Use stdlib's `pathlib.Path.mkdir(exist_ok=True)` or `os.makedirs(exist_ok=True)` instead. (#4843) + +### Docs + +* + +### Other + +* Minor code simplification for `conda_build.index.ChannelIndex._ensuredirs`. (#4843) From 19ca75a4ebe1721fd9648fe752e8efd2b3c9419e Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 12 Apr 2023 08:17:49 +0200 Subject: [PATCH 101/366] Replace makefile with Path.touch (#4844) --- tests/test_utils.py | 53 +++++++++++++++++++-------------------------- 1 file changed, 22 insertions(+), 31 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index f901cc6f36..7423bf6931 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -4,6 +4,7 @@ import os import subprocess import sys +from pathlib import Path from typing import NamedTuple import filelock @@ -13,17 +14,6 @@ from conda_build.exceptions import BuildLockError -def makefile(name, contents=""): - name = os.path.abspath(name) - path = os.path.dirname(name) - - if not os.path.exists(path): - os.makedirs(path) - - with open(name, "w") as f: - f.write(contents) - - @pytest.mark.skipif( utils.on_win, reason="only unix has python version in site-packages path" ) @@ -49,10 +39,9 @@ def test_copy_source_tree(namespace_setup): def test_merge_namespace_trees(namespace_setup): - dep = os.path.join( - namespace_setup, "other_tree", "namespace", "package", "dependency.py" - ) - makefile(dep) + dep = Path(namespace_setup, "other_tree", "namespace", "package", "dependency.py") + dep.parent.mkdir(parents=True, exist_ok=True) + dep.touch() utils.copy_into(os.path.join(namespace_setup, "other_tree"), namespace_setup) assert os.path.isfile( @@ -62,19 +51,19 @@ def test_merge_namespace_trees(namespace_setup): @pytest.fixture(scope="function") -def namespace_setup(testing_workdir): - namespace = os.path.join(testing_workdir, "namespace") - package = os.path.join(namespace, "package") - makefile(os.path.join(package, "module.py")) +def namespace_setup(testing_workdir: os.PathLike) -> os.PathLike: + module = Path(testing_workdir, "namespace", "package", "module.py") + module.parent.mkdir(parents=True, exist_ok=True) + module.touch() return testing_workdir @pytest.mark.sanity -def test_disallow_merge_conflicts(namespace_setup): - duplicate = os.path.join( - namespace_setup, "dupe", "namespace", "package", "module.py" - ) - makefile(duplicate) +def test_disallow_merge_conflicts(namespace_setup: os.PathLike): + duplicate = Path(namespace_setup, "dupe", "namespace", "package", "module.py") + duplicate.parent.mkdir(parents=True, exist_ok=True) + duplicate.touch() + with pytest.raises(IOError): utils.merge_tree( os.path.dirname(duplicate), @@ -420,7 +409,7 @@ def _generate_tmp_tree(): f3 = os.path.join(dC, "fileA") f4 = os.path.join(dC, "fileB") for f in (f1, f2, f3, f4): - makefile(f) + Path(f).touch() yield tmp, (dA, dB, dC), (f1, f2, f3, f4) finally: @@ -446,7 +435,7 @@ def test_find_recipe(): # check that each of these are valid recipes for f in (f5, f6, f7, f8): - makefile(f) + Path(f).touch() assert utils.find_recipe(tmp) == f os.remove(f) @@ -454,7 +443,7 @@ def test_find_recipe(): def test_find_recipe_relative(): with _generate_tmp_tree() as (tmp, (dA, dB, dC), (f1, f2, f3, f4)): f5 = os.path.join(dA, "meta.yaml") - makefile(f5) + Path(f5).touch() # check that even when given a relative recipe path we still return # the absolute path @@ -476,7 +465,8 @@ def test_find_recipe_no_meta(): def test_find_recipe_file(): with _generate_tmp_tree() as (tmp, _, (f1, f2, f3, f4)): f5 = os.path.join(tmp, "meta.yaml") - makefile(f5) + Path(f5).touch() + # file provided is valid meta assert utils.find_recipe(f5) == f5 @@ -494,7 +484,8 @@ def test_find_recipe_multipe_base(): f6 = os.path.join(dB, "meta.yaml") f7 = os.path.join(dC, "conda.yaml") for f in (f5, f6, f7): - makefile(f) + Path(f).touch() + # multiple meta files, use the one in base level assert utils.find_recipe(tmp) == f5 @@ -504,7 +495,7 @@ def test_find_recipe_multipe_bad(): f5 = os.path.join(dB, "meta.yaml") f6 = os.path.join(dC, "conda.yaml") for f in (f5, f6): - makefile(f) + Path(f).touch() # nothing in base with pytest.raises(IOError): @@ -513,7 +504,7 @@ def test_find_recipe_multipe_bad(): f7 = os.path.join(tmp, "meta.yaml") f8 = os.path.join(tmp, "conda.yaml") for f in (f7, f8): - makefile(f) + Path(f).touch() # too many in base with pytest.raises(IOError): From d65d98a7b72d39ab729d26ffeac91d6f15b5d394 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 12 Apr 2023 10:57:23 +0200 Subject: [PATCH 102/366] Convert `setup.py` to `pyproject.toml` using hatch build system (#4840) --- .git_archival.txt | 4 + .gitattributes | 2 +- LICENSE.txt => LICENSE | 0 conda_build/__init__.py | 5 +- conda_build/__version__.py | 16 + conda_build/_version.py | 657 --------- docs/source/conf.py | 11 +- news/4840-hatchling-build-system | 19 + pyproject.toml | 79 ++ recipe/bld.bat | 5 +- recipe/build.sh | 2 +- recipe/meta.yaml | 36 +- setup.cfg | 10 - setup.py | 87 -- tests/requirements.txt | 5 +- versioneer.py | 2140 ------------------------------ 16 files changed, 146 insertions(+), 2932 deletions(-) create mode 100644 .git_archival.txt rename LICENSE.txt => LICENSE (100%) create mode 100644 conda_build/__version__.py delete mode 100644 conda_build/_version.py create mode 100644 news/4840-hatchling-build-system delete mode 100755 setup.py delete mode 100644 versioneer.py diff --git a/.git_archival.txt b/.git_archival.txt new file mode 100644 index 0000000000..8fb235d704 --- /dev/null +++ b/.git_archival.txt @@ -0,0 +1,4 @@ +node: $Format:%H$ +node-date: $Format:%cI$ +describe-name: $Format:%(describe:tags=true,match=*[0-9]*)$ +ref-names: $Format:%D$ diff --git a/.gitattributes b/.gitattributes index 10174d0d33..cf9df9bc03 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,2 +1,2 @@ -conda_build/_version.py export-subst * text=auto eol=lf +.git_archival.txt export-subst diff --git a/LICENSE.txt b/LICENSE similarity index 100% rename from LICENSE.txt rename to LICENSE diff --git a/conda_build/__init__.py b/conda_build/__init__.py index 6f2d2f37b6..943084b6f4 100644 --- a/conda_build/__init__.py +++ b/conda_build/__init__.py @@ -1,9 +1,8 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from . import _version - -__version__ = _version.get_versions()["version"] +from .__version__ import __version__ +__all__ = ["__version__"] # Sub commands added by conda-build to the conda command sub_commands = [ diff --git a/conda_build/__version__.py b/conda_build/__version__.py new file mode 100644 index 0000000000..72533fc63e --- /dev/null +++ b/conda_build/__version__.py @@ -0,0 +1,16 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Placeholder for the actual version code injected by hatch-vcs. + +The logic here is used during development installs only so keep it simple. Conda-build +uses SemVer so our development version is simply: + MAJOR.MINOR.MICRO.devN+gHASH[.dirty] +""" +try: + from setuptools_scm import get_version + + __version__ = get_version(root="..", relative_to=__file__) +except (ImportError, OSError): + # ImportError: setuptools_scm isn't installed + # OSError: git isn't installed + __version__ = "0.0.0.dev0+placeholder" diff --git a/conda_build/_version.py b/conda_build/_version.py deleted file mode 100644 index 06b9cf8f38..0000000000 --- a/conda_build/_version.py +++ /dev/null @@ -1,657 +0,0 @@ -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. Generated by -# versioneer-0.22 (https://github.com/python-versioneer/python-versioneer) - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "$Format:%d$" - git_full = "$Format:%H$" - git_date = "$Format:%ci$" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "" - cfg.tag_prefix = "" - cfg.parentdir_prefix = "conda-build-" - cfg.versionfile_source = "conda_build/_version.py" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print(f"unable to find command, tried {commands}") - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs) as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - MATCH_ARGS = ["--match", "%s*" % tag_prefix] if tag_prefix else [] - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", *MATCH_ARGS], - cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%d.dev%d" % (post_version+1, pieces["distance"]) - else: - rendered += ".post0.dev%d" % (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} diff --git a/docs/source/conf.py b/docs/source/conf.py index c654dff3e8..eba0f759da 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -19,21 +19,18 @@ sys.path.insert(0, os.path.abspath("../..")) +import conda_build # noqa: E402 -os.chdir("../..") -import versioneer # noqa: E402 - -version = versioneer.get_versions()["version"] - -os.chdir("docs") # -- Project information ----------------------------------------------------- project = "conda-build" copyright = "2018, Anaconda, Inc." author = "Anaconda, Inc." +# The short X.Y version. +version = conda_build.__version__ or "dev" # The full version, including alpha/beta/rc tags -release = version +release = conda_build.__version__ or "dev" # -- General configuration --------------------------------------------------- diff --git a/news/4840-hatchling-build-system b/news/4840-hatchling-build-system new file mode 100644 index 0000000000..40c849137b --- /dev/null +++ b/news/4840-hatchling-build-system @@ -0,0 +1,19 @@ +### Enhancements + +* Switch from `setup.py` to `pyproject.toml` and use [Hatchling](https://pypi.org/project/hatchling/) for our build system. (#4840) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/pyproject.toml b/pyproject.toml index ac3c265a6c..d71c386c2b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,82 @@ +[build-system] +requires = [ + "hatchling >=1.12.2", + "hatch-vcs >=0.2.0", +] +build-backend = "hatchling.build" + +[project] +name = "conda-build" +description="tools for building conda packages" +readme = "README.md" +authors = [{name = "Anaconda, Inc.", email = "conda@continuum.io"}] +license = {file = "LICENSE"} +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Operating System :: OS Independent", + "License :: OSI Approved :: BSD License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy" +] +requires-python = ">=3.8" +dependencies = [ + "beautifulsoup4", + "chardet", + "conda >=4.13", + "conda-package-handling >=1.3", + "filelock", + "glob2 >=0.6", + "jinja2", + "libarchive-c", + "packaging", + "pkginfo", + "psutil", + "pytz", + "pyyaml", + "requests", + "six", + "tomli ; python_version<'3.11'", + "tqdm", +] +dynamic = ["version"] + +[project.urls] +documentation = "https://docs.conda.io/projects/conda-build/en/stable/" +repository = "https://github.com/conda/conda-build" +changelog = "https://github.com/conda/conda-build/blob/main/CHANGELOG.md" + +[project.scripts] +conda-build = "conda_build.cli.main_build:main" +conda-convert = "conda_build.cli.main_convert:main" +conda-develop = "conda_build.cli.main_develop:main" +conda-index = "conda_build.cli.main_index:main" +conda-inspect = "conda_build.cli.main_inspect:main" +conda-metapackage = "conda_build.cli.main_metapackage:main" +conda-render = "conda_build.cli.main_render:main" +conda-skeleton = "conda_build.cli.main_skeleton:main" +conda-debug = "conda_build.cli.main_debug:main" + +[project.entry-points."distutils.commands"] +bdist_conda = "conda_build.bdist_conda:bdist_conda" + +[tool.hatch.version] +source = "vcs" + +[tool.hatch.version.raw-options] +local_scheme = "dirty-tag" + +[tool.hatch.build] +include = ["conda_build", "conda_build/templates/*", "conda_build/cli-*.exe"] + +[tool.hatch.build.hooks.vcs] +version-file = "conda_build/__version__.py" + [tool.black] target-version = ['py38', 'py39', 'py310'] diff --git a/recipe/bld.bat b/recipe/bld.bat index 2eb627d075..ea289b187c 100644 --- a/recipe/bld.bat +++ b/recipe/bld.bat @@ -1,4 +1 @@ -python setup.py install --single-version-externally-managed --record=record.txt -IF %ERRORLEVEL% NEQ 0 exit 1 - -del %SCRIPTS%\conda-init +"$PYTHON" -m pip install . -vv diff --git a/recipe/build.sh b/recipe/build.sh index cbff269d18..9d7b162c92 100644 --- a/recipe/build.sh +++ b/recipe/build.sh @@ -1,3 +1,3 @@ #!/bin/bash -python setup.py install --single-version-externally-managed --record=record.txt +"$PYTHON" -m pip install . -vv diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 67bab3e894..0be0f99167 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -6,7 +6,6 @@ source: git_url: ../ build: - number: 0 entry_points: - conda-build = conda_build.cli.main_build:main - conda-convert = conda_build.cli.main_convert:main @@ -21,37 +20,36 @@ build: requirements: build: - git + host: - python - setuptools + - hatchling >=1.12.2 + - hatch-vcs >=0.2.0 run: - beautifulsoup4 - chardet - - conda >=4.13 - # - conda-verify >=3.0.2 # optional as of CB 3.12.0 - - contextlib2 # [py<34] - - enum34 # [py<34] - - pathlib2 # [py<3] + - conda >=4.13 - filelock - - futures # [py<3] - jinja2 - packaging - - patchelf # [linux] - - patch >=2.6 # [not win] - - m2-patch >=2.6 # [win] + - patchelf # [linux] + - patch >=2.6 # [not win] + - m2-patch >=2.6 # [win] - pkginfo - psutil - - py-lief # [not win] + - py-lief # [not win] - python - pyyaml - requests - - scandir # [py<34] - six - - glob2 >=0.6 + - glob2 >=0.6 - pytz - - tomli # [py<311] + - tomli # [py<311] - tqdm - - conda-package-handling >=1.3 + - conda-package-handling >=1.3 - python-libarchive-c + run_constrained: + - conda-verify >=3.0.2 test: files: @@ -103,10 +101,8 @@ test: source_files: - tests -outputs: - - type: wheel - - name: {{ PKG_NAME }} - about: home: https://github.com/conda/conda-build - license: BSD 3-clause + license: BSD-3-Clause + license_file: LICENSE + summary: Canary release of conda-build diff --git a/setup.cfg b/setup.cfg index 91017e0bac..867ecb1f15 100644 --- a/setup.cfg +++ b/setup.cfg @@ -35,13 +35,3 @@ markers = slow: execute the slow tests if active sanity: execute the sanity tests no_default_testing_config: used internally to disable monkeypatching for testing_config - -[versioneer] -VCS = git -versionfile_source = conda_build/_version.py -versionfile_build = conda_build/_version.py -tag_prefix = -parentdir_prefix = conda-build- - -[bdist_wheel] -universal=1 diff --git a/setup.py b/setup.py deleted file mode 100755 index 19370a3001..0000000000 --- a/setup.py +++ /dev/null @@ -1,87 +0,0 @@ -#!/usr/bin/env python -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -from pathlib import Path - -from setuptools import setup - -import versioneer - -# Don't proceed with 'unknown' in version -version_dict = versioneer.get_versions() -if version_dict["error"]: - raise RuntimeError(version_dict["error"]) - -deps = [ - "conda", - "requests", - "filelock", - "pyyaml", - "jinja2", - "pkginfo", - "beautifulsoup4", - "chardet", - "pytz", - "tomli", - "tqdm", - "psutil", - "six", - "libarchive-c", - "packaging", - # "conda-package-handling", # remove comment once released on PyPI - "glob2", -] - -# We cannot build lief for Python 2.7 on Windows (unless we use mingw-w64 for it, which -# would be a non-trivial amount of work). -# .. lief is missing the egg-info directory so we cannot do this .. besides it is not on -# pypi. -# if sys.platform != 'win-32' or sys.version_info >= (3, 0): -# deps.extend(['lief']) - -setup( - name="conda-build", - version=version_dict["version"], - cmdclass=versioneer.get_cmdclass(), - author="Continuum Analytics, Inc.", - author_email="conda@continuum.io", - url="https://github.com/conda/conda-build", - license="BSD-3-Clause", - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "Operating System :: OS Independent", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - ], - python_requires=">=3.8", - description="tools for building conda packages", - long_description=Path("README.md").read_text(), - packages=[ - "conda_build", - "conda_build.cli", - "conda_build.skeletons", - "conda_build.os_utils", - ], - entry_points={ - "console_scripts": [ - "conda-build = conda_build.cli.main_build:main", - "conda-convert = conda_build.cli.main_convert:main", - "conda-develop = conda_build.cli.main_develop:main", - "conda-index = conda_build.cli.main_index:main", - "conda-inspect = conda_build.cli.main_inspect:main", - "conda-metapackage = conda_build.cli.main_metapackage:main", - "conda-render = conda_build.cli.main_render:main", - "conda-skeleton = conda_build.cli.main_skeleton:main", - "conda-debug = conda_build.cli.main_debug:main", - ], - "distutils.commands": [ - "bdist_conda = conda_build.bdist_conda:bdist_conda", - ], - }, - install_requires=deps, - package_data={"conda_build": ["templates/*", "cli-*.exe"]}, - zip_safe=False, -) diff --git a/tests/requirements.txt b/tests/requirements.txt index e5d8e6c11b..fe7c767c60 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -3,14 +3,14 @@ anaconda-client beautifulsoup4 chardet -conda +conda >=4.13 conda-package-handling conda-verify contextlib2 cytoolz filelock git -glob2 +glob2 >=0.6 jinja2 numpy perl @@ -32,5 +32,6 @@ pytz requests ripgrep ruamel.yaml +setuptools_scm # needed for devenv version detection tomli tqdm diff --git a/versioneer.py b/versioneer.py deleted file mode 100644 index a142bf53e8..0000000000 --- a/versioneer.py +++ /dev/null @@ -1,2140 +0,0 @@ - -# Version: 0.22 - -"""The Versioneer - like a rocketeer, but for versions. - -The Versioneer -============== - -* like a rocketeer, but for versions! -* https://github.com/python-versioneer/python-versioneer -* Brian Warner -* License: Public Domain -* Compatible with: Python 3.6, 3.7, 3.8, 3.9, 3.10 and pypy3 -* [![Latest Version][pypi-image]][pypi-url] -* [![Build Status][travis-image]][travis-url] - -This is a tool for managing a recorded version number in distutils/setuptools-based -python projects. The goal is to remove the tedious and error-prone "update -the embedded version string" step from your release process. Making a new -release should be as easy as recording a new tag in your version-control -system, and maybe making new tarballs. - - -## Quick Install - -* `pip install versioneer` to somewhere in your $PATH -* add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md)) -* run `versioneer install` in your source tree, commit the results -* Verify version information with `python setup.py version` - -## Version Identifiers - -Source trees come from a variety of places: - -* a version-control system checkout (mostly used by developers) -* a nightly tarball, produced by build automation -* a snapshot tarball, produced by a web-based VCS browser, like github's - "tarball from tag" feature -* a release tarball, produced by "setup.py sdist", distributed through PyPI - -Within each source tree, the version identifier (either a string or a number, -this tool is format-agnostic) can come from a variety of places: - -* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows - about recent "tags" and an absolute revision-id -* the name of the directory into which the tarball was unpacked -* an expanded VCS keyword ($Id$, etc) -* a `_version.py` created by some earlier build step - -For released software, the version identifier is closely related to a VCS -tag. Some projects use tag names that include more than just the version -string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool -needs to strip the tag prefix to extract the version identifier. For -unreleased software (between tags), the version identifier should provide -enough information to help developers recreate the same tree, while also -giving them an idea of roughly how old the tree is (after version 1.2, before -version 1.3). Many VCS systems can report a description that captures this, -for example `git describe --tags --dirty --always` reports things like -"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the -0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes). - -The version identifier is used for multiple purposes: - -* to allow the module to self-identify its version: `myproject.__version__` -* to choose a name and prefix for a 'setup.py sdist' tarball - -## Theory of Operation - -Versioneer works by adding a special `_version.py` file into your source -tree, where your `__init__.py` can import it. This `_version.py` knows how to -dynamically ask the VCS tool for version information at import time. - -`_version.py` also contains `$Revision$` markers, and the installation -process marks `_version.py` to have this marker rewritten with a tag name -during the `git archive` command. As a result, generated tarballs will -contain enough information to get the proper version. - -To allow `setup.py` to compute a version too, a `versioneer.py` is added to -the top level of your source tree, next to `setup.py` and the `setup.cfg` -that configures it. This overrides several distutils/setuptools commands to -compute the version when invoked, and changes `setup.py build` and `setup.py -sdist` to replace `_version.py` with a small static file that contains just -the generated version data. - -## Installation - -See [INSTALL.md](./INSTALL.md) for detailed installation instructions. - -## Version-String Flavors - -Code which uses Versioneer can learn about its version string at runtime by -importing `_version` from your main `__init__.py` file and running the -`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can -import the top-level `versioneer.py` and run `get_versions()`. - -Both functions return a dictionary with different flavors of version -information: - -* `['version']`: A condensed version string, rendered using the selected - style. This is the most commonly used value for the project's version - string. The default "pep440" style yields strings like `0.11`, - `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section - below for alternative styles. - -* `['full-revisionid']`: detailed revision identifier. For Git, this is the - full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". - -* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the - commit date in ISO 8601 format. This will be None if the date is not - available. - -* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that - this is only accurate if run in a VCS checkout, otherwise it is likely to - be False or None - -* `['error']`: if the version string could not be computed, this will be set - to a string describing the problem, otherwise it will be None. It may be - useful to throw an exception in setup.py if this is set, to avoid e.g. - creating tarballs with a version string of "unknown". - -Some variants are more useful than others. Including `full-revisionid` in a -bug report should allow developers to reconstruct the exact code being tested -(or indicate the presence of local changes that should be shared with the -developers). `version` is suitable for display in an "about" box or a CLI -`--version` output: it can be easily compared against release notes and lists -of bugs fixed in various releases. - -The installer adds the following text to your `__init__.py` to place a basic -version in `YOURPROJECT.__version__`: - - from ._version import get_versions - __version__ = get_versions()['version'] - del get_versions - -## Styles - -The setup.cfg `style=` configuration controls how the VCS information is -rendered into a version string. - -The default style, "pep440", produces a PEP440-compliant string, equal to the -un-prefixed tag name for actual releases, and containing an additional "local -version" section with more detail for in-between builds. For Git, this is -TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags ---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the -tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and -that this commit is two revisions ("+2") beyond the "0.11" tag. For released -software (exactly equal to a known tag), the identifier will only contain the -stripped tag, e.g. "0.11". - -Other styles are available. See [details.md](details.md) in the Versioneer -source tree for descriptions. - -## Debugging - -Versioneer tries to avoid fatal errors: if something goes wrong, it will tend -to return a version of "0+unknown". To investigate the problem, run `setup.py -version`, which will run the version-lookup code in a verbose mode, and will -display the full contents of `get_versions()` (including the `error` string, -which may help identify what went wrong). - -## Known Limitations - -Some situations are known to cause problems for Versioneer. This details the -most significant ones. More can be found on Github -[issues page](https://github.com/python-versioneer/python-versioneer/issues). - -### Subprojects - -Versioneer has limited support for source trees in which `setup.py` is not in -the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are -two common reasons why `setup.py` might not be in the root: - -* Source trees which contain multiple subprojects, such as - [Buildbot](https://github.com/buildbot/buildbot), which contains both - "master" and "slave" subprojects, each with their own `setup.py`, - `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI - distributions (and upload multiple independently-installable tarballs). -* Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other languages) in subdirectories. - -Versioneer will look for `.git` in parent directories, and most operations -should get the right version string. However `pip` and `setuptools` have bugs -and implementation details which frequently cause `pip install .` from a -subproject directory to fail to find a correct version string (so it usually -defaults to `0+unknown`). - -`pip install --editable .` should work correctly. `setup.py install` might -work too. - -Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in -some later version. - -[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking -this issue. The discussion in -[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the -issue from the Versioneer side in more detail. -[pip PR#3176](https://github.com/pypa/pip/pull/3176) and -[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve -pip to let Versioneer work correctly. - -Versioneer-0.16 and earlier only looked for a `.git` directory next to the -`setup.cfg`, so subprojects were completely unsupported with those releases. - -### Editable installs with setuptools <= 18.5 - -`setup.py develop` and `pip install --editable .` allow you to install a -project into a virtualenv once, then continue editing the source code (and -test) without re-installing after every change. - -"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a -convenient way to specify executable scripts that should be installed along -with the python package. - -These both work as expected when using modern setuptools. When using -setuptools-18.5 or earlier, however, certain operations will cause -`pkg_resources.DistributionNotFound` errors when running the entrypoint -script, which must be resolved by re-installing the package. This happens -when the install happens with one version, then the egg_info data is -regenerated while a different version is checked out. Many setup.py commands -cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into -a different virtualenv), so this can be surprising. - -[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes -this one, but upgrading to a newer version of setuptools should probably -resolve it. - - -## Updating Versioneer - -To upgrade your project to a new release of Versioneer, do the following: - -* install the new Versioneer (`pip install -U versioneer` or equivalent) -* edit `setup.cfg`, if necessary, to include any new configuration settings - indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. -* re-run `versioneer install` in your source tree, to replace - `SRC/_version.py` -* commit any changed files - -## Future Directions - -This tool is designed to make it easily extended to other version-control -systems: all VCS-specific components are in separate directories like -src/git/ . The top-level `versioneer.py` script is assembled from these -components by running make-versioneer.py . In the future, make-versioneer.py -will take a VCS name as an argument, and will construct a version of -`versioneer.py` that is specific to the given VCS. It might also take the -configuration arguments that are currently provided manually during -installation by editing setup.py . Alternatively, it might go the other -direction and include code from all supported VCS systems, reducing the -number of intermediate scripts. - -## Similar projects - -* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time - dependency -* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of - versioneer -* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools - plugin - -## License - -To make Versioneer easier to embed, all its code is dedicated to the public -domain. The `_version.py` that it creates is also in the public domain. -Specifically, both are released under the Creative Commons "Public Domain -Dedication" license (CC0-1.0), as described in -https://creativecommons.org/publicdomain/zero/1.0/ . - -[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg -[pypi-url]: https://pypi.python.org/pypi/versioneer/ -[travis-image]: -https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg -[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer - -""" -# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring -# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements -# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error -# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with -# pylint:disable=attribute-defined-outside-init,too-many-arguments - -import configparser -import errno -import json -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_root(): - """Get the project root directory. - - We require that all commands are run from the project root, i.e. the - directory that contains setup.py, setup.cfg, and versioneer.py . - """ - root = os.path.realpath(os.path.abspath(os.getcwd())) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - # allow 'python path/to/setup.py COMMAND' - root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ("Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND').") - raise VersioneerBadRootError(err) - try: - # Certain runtime workflows (setup.py install/develop in a setuptools - # tree) execute all dependencies in a single python process, so - # "versioneer" may be imported multiple times, and python's shared - # module-import table will cache the first one. So we can't use - # os.path.dirname(__file__), as that will find whichever - # versioneer.py was first imported, even in later projects. - my_path = os.path.realpath(os.path.abspath(__file__)) - me_dir = os.path.normcase(os.path.splitext(my_path)[0]) - vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) - if me_dir != vsr_dir: - print("Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(my_path), versioneer_py)) - except NameError: - pass - return root - - -def get_config_from_root(root): - """Read the project setup.cfg file to determine Versioneer config.""" - # This might raise OSError (if setup.cfg is missing), or - # configparser.NoSectionError (if it lacks a [versioneer] section), or - # configparser.NoOptionError (if it lacks "VCS="). See the docstring at - # the top of versioneer.py for instructions on writing your setup.cfg . - setup_cfg = os.path.join(root, "setup.cfg") - parser = configparser.ConfigParser() - with open(setup_cfg, "r") as cfg_file: - parser.read_file(cfg_file) - VCS = parser.get("versioneer", "VCS") # mandatory - - # Dict-like interface for non-mandatory entries - section = parser["versioneer"] - - cfg = VersioneerConfig() - cfg.VCS = VCS - cfg.style = section.get("style", "") - cfg.versionfile_source = section.get("versionfile_source") - cfg.versionfile_build = section.get("versionfile_build") - cfg.tag_prefix = section.get("tag_prefix") - if cfg.tag_prefix in ("''", '""'): - cfg.tag_prefix = "" - cfg.parentdir_prefix = section.get("parentdir_prefix") - cfg.verbose = section.get("verbose") - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -# these dictionaries contain VCS-specific tools -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - HANDLERS.setdefault(vcs, {})[method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -LONG_VERSION_PY['git'] = r''' -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. Generated by -# versioneer-0.22 (https://github.com/python-versioneer/python-versioneer) - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" - git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" - git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "%(STYLE)s" - cfg.tag_prefix = "%(TAG_PREFIX)s" - cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" - cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %%s" %% dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %%s" %% (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %%s (error)" %% dispcmd) - print("stdout was %%s" %% stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %%s but none started with prefix %%s" %% - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %%d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%%s', no digits" %% ",".join(refs - tags)) - if verbose: - print("likely tags: %%s" %% ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %%s" %% r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %%s not under git control" %% root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - MATCH_ARGS = ["--match", "%%s*" %% tag_prefix] if tag_prefix else [] - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", *MATCH_ARGS], - cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%%s'" - %% describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%%s' doesn't start with prefix '%%s'" - print(fmt %% (full_tag, tag_prefix)) - pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" - %% (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%%d.dev%%d" %% (post_version+1, pieces["distance"]) - else: - rendered += ".post0.dev%%d" %% (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%%d" %% pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%%s'" %% style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} -''' - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - MATCH_ARGS = ["--match", "%s*" % tag_prefix] if tag_prefix else [] - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", *MATCH_ARGS], - cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def do_vcs_install(manifest_in, versionfile_source, ipy): - """Git-specific installation logic for Versioneer. - - For Git, this means creating/changing .gitattributes to mark _version.py - for export-subst keyword substitution. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - files = [manifest_in, versionfile_source] - if ipy: - files.append(ipy) - try: - my_path = __file__ - if my_path.endswith(".pyc") or my_path.endswith(".pyo"): - my_path = os.path.splitext(my_path)[0] + ".py" - versioneer_file = os.path.relpath(my_path) - except NameError: - versioneer_file = "versioneer.py" - files.append(versioneer_file) - present = False - try: - with open(".gitattributes", "r") as fobj: - for line in fobj: - if line.strip().startswith(versionfile_source): - if "export-subst" in line.strip().split()[1:]: - present = True - break - except OSError: - pass - if not present: - with open(".gitattributes", "a+") as fobj: - fobj.write(f"{versionfile_source} export-subst\n") - files.append(".gitattributes") - run_command(GITS, ["add", "--"] + files) - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.22) from -# revision-control system data, or from the parent directory name of an -# unpacked source archive. Distribution tarballs contain a pre-generated copy -# of this file. - -import json - -version_json = ''' -%s -''' # END VERSION_JSON - - -def get_versions(): - return json.loads(version_json) -""" - - -def versions_from_file(filename): - """Try to determine the version from _version.py if present.""" - try: - with open(filename) as f: - contents = f.read() - except OSError: - raise NotThisMethod("unable to read _version.py") - mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - raise NotThisMethod("no version_json in _version.py") - return json.loads(mo.group(1)) - - -def write_to_version_file(filename, versions): - """Write the given version number to the given _version.py file.""" - os.unlink(filename) - contents = json.dumps(versions, sort_keys=True, - indent=1, separators=(",", ": ")) - with open(filename, "w") as f: - f.write(SHORT_VERSION_PY % contents) - - print("set %s to '%s'" % (filename, versions["version"])) - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%d.dev%d" % (post_version+1, pieces["distance"]) - else: - rendered += ".post0.dev%d" % (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -class VersioneerBadRootError(Exception): - """The project root directory is unknown or missing key files.""" - - -def get_versions(verbose=False): - """Get the project version from whatever source is available. - - Returns dict with two keys: 'version' and 'full'. - """ - if "versioneer" in sys.modules: - # see the discussion in cmdclass.py:get_cmdclass() - del sys.modules["versioneer"] - - root = get_root() - cfg = get_config_from_root(root) - - assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" - handlers = HANDLERS.get(cfg.VCS) - assert handlers, "unrecognized VCS '%s'" % cfg.VCS - verbose = verbose or cfg.verbose - assert cfg.versionfile_source is not None, \ - "please set versioneer.versionfile_source" - assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" - - versionfile_abs = os.path.join(root, cfg.versionfile_source) - - # extract version from first of: _version.py, VCS command (e.g. 'git - # describe'), parentdir. This is meant to work for developers using a - # source checkout, for users of a tarball created by 'setup.py sdist', - # and for users of a tarball/zipball created by 'git archive' or github's - # download-from-tag feature or the equivalent in other VCSes. - - get_keywords_f = handlers.get("get_keywords") - from_keywords_f = handlers.get("keywords") - if get_keywords_f and from_keywords_f: - try: - keywords = get_keywords_f(versionfile_abs) - ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) - if verbose: - print("got version from expanded keyword %s" % ver) - return ver - except NotThisMethod: - pass - - try: - ver = versions_from_file(versionfile_abs) - if verbose: - print("got version from file %s %s" % (versionfile_abs, ver)) - return ver - except NotThisMethod: - pass - - from_vcs_f = handlers.get("pieces_from_vcs") - if from_vcs_f: - try: - pieces = from_vcs_f(cfg.tag_prefix, root, verbose) - ver = render(pieces, cfg.style) - if verbose: - print("got version from VCS %s" % ver) - return ver - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - if verbose: - print("got version from parentdir %s" % ver) - return ver - except NotThisMethod: - pass - - if verbose: - print("unable to compute version") - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, "error": "unable to compute version", - "date": None} - - -def get_version(): - """Get the short version string for this project.""" - return get_versions()["version"] - - -def get_cmdclass(cmdclass=None): - """Get the custom setuptools/distutils subclasses used by Versioneer. - - If the package uses a different cmdclass (e.g. one from numpy), it - should be provide as an argument. - """ - if "versioneer" in sys.modules: - del sys.modules["versioneer"] - # this fixes the "python setup.py develop" case (also 'install' and - # 'easy_install .'), in which subdependencies of the main project are - # built (using setup.py bdist_egg) in the same python process. Assume - # a main project A and a dependency B, which use different versions - # of Versioneer. A's setup.py imports A's Versioneer, leaving it in - # sys.modules by the time B's setup.py is executed, causing B to run - # with the wrong versioneer. Setuptools wraps the sub-dep builds in a - # sandbox that restores sys.modules to it's pre-build state, so the - # parent is protected against the child's "import versioneer". By - # removing ourselves from sys.modules here, before the child build - # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/python-versioneer/python-versioneer/issues/52 - - cmds = {} if cmdclass is None else cmdclass.copy() - - # we add "version" to both distutils and setuptools - try: - from setuptools import Command - except ImportError: - from distutils.core import Command - - class cmd_version(Command): - description = "report generated version string" - user_options = [] - boolean_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - vers = get_versions(verbose=True) - print("Version: %s" % vers["version"]) - print(" full-revisionid: %s" % vers.get("full-revisionid")) - print(" dirty: %s" % vers.get("dirty")) - print(" date: %s" % vers.get("date")) - if vers["error"]: - print(" error: %s" % vers["error"]) - cmds["version"] = cmd_version - - # we override "build_py" in both distutils and setuptools - # - # most invocation pathways end up running build_py: - # distutils/build -> build_py - # distutils/install -> distutils/build ->.. - # setuptools/bdist_wheel -> distutils/install ->.. - # setuptools/bdist_egg -> distutils/install_lib -> build_py - # setuptools/install -> bdist_egg ->.. - # setuptools/develop -> ? - # pip install: - # copies source tree to a tempdir before running egg_info/etc - # if .git isn't copied too, 'git describe' will fail - # then does setup.py bdist_wheel, or sometimes setup.py install - # setup.py egg_info -> ? - - # we override different "build_py" commands for both environments - if 'build_py' in cmds: - _build_py = cmds['build_py'] - elif "setuptools" in sys.modules: - from setuptools.command.build_py import build_py as _build_py - else: - from distutils.command.build_py import build_py as _build_py - - class cmd_build_py(_build_py): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_py.run(self) - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if cfg.versionfile_build: - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_py"] = cmd_build_py - - if 'build_ext' in cmds: - _build_ext = cmds['build_ext'] - elif "setuptools" in sys.modules: - from setuptools.command.build_ext import build_ext as _build_ext - else: - from distutils.command.build_ext import build_ext as _build_ext - - class cmd_build_ext(_build_ext): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_ext.run(self) - if self.inplace: - # build_ext --inplace will only build extensions in - # build/lib<..> dir with no _version.py to write to. - # As in place builds will already have a _version.py - # in the module dir, we do not need to write one. - return - # now locate _version.py in the new build/ directory and replace - # it with an updated value - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_ext"] = cmd_build_ext - - if "cx_Freeze" in sys.modules: # cx_freeze enabled? - from cx_Freeze.dist import build_exe as _build_exe - # nczeczulin reports that py2exe won't like the pep440-style string - # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. - # setup(console=[{ - # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION - # "product_version": versioneer.get_version(), - # ... - - class cmd_build_exe(_build_exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _build_exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["build_exe"] = cmd_build_exe - del cmds["build_py"] - - if 'py2exe' in sys.modules: # py2exe enabled? - from py2exe.distutils_buildexe import py2exe as _py2exe - - class cmd_py2exe(_py2exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _py2exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["py2exe"] = cmd_py2exe - - # we override different "sdist" commands for both environments - if 'sdist' in cmds: - _sdist = cmds['sdist'] - elif "setuptools" in sys.modules: - from setuptools.command.sdist import sdist as _sdist - else: - from distutils.command.sdist import sdist as _sdist - - class cmd_sdist(_sdist): - def run(self): - versions = get_versions() - self._versioneer_generated_versions = versions - # unless we update this, the command will keep using the old - # version - self.distribution.metadata.version = versions["version"] - return _sdist.run(self) - - def make_release_tree(self, base_dir, files): - root = get_root() - cfg = get_config_from_root(root) - _sdist.make_release_tree(self, base_dir, files) - # now locate _version.py in the new base_dir directory - # (remembering that it may be a hardlink) and replace it with an - # updated value - target_versionfile = os.path.join(base_dir, cfg.versionfile_source) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, - self._versioneer_generated_versions) - cmds["sdist"] = cmd_sdist - - return cmds - - -CONFIG_ERROR = """ -setup.cfg is missing the necessary Versioneer configuration. You need -a section like: - - [versioneer] - VCS = git - style = pep440 - versionfile_source = src/myproject/_version.py - versionfile_build = myproject/_version.py - tag_prefix = - parentdir_prefix = myproject- - -You will also need to edit your setup.py to use the results: - - import versioneer - setup(version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), ...) - -Please read the docstring in ./versioneer.py for configuration instructions, -edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. -""" - -SAMPLE_CONFIG = """ -# See the docstring in versioneer.py for instructions. Note that you must -# re-run 'versioneer.py setup' after changing this section, and commit the -# resulting files. - -[versioneer] -#VCS = git -#style = pep440 -#versionfile_source = -#versionfile_build = -#tag_prefix = -#parentdir_prefix = - -""" - -OLD_SNIPPET = """ -from ._version import get_versions -__version__ = get_versions()['version'] -del get_versions -""" - -INIT_PY_SNIPPET = """ -from . import {0} -__version__ = {0}.get_versions()['version'] -""" - - -def do_setup(): - """Do main VCS-independent setup function for installing Versioneer.""" - root = get_root() - try: - cfg = get_config_from_root(root) - except (OSError, configparser.NoSectionError, - configparser.NoOptionError) as e: - if isinstance(e, (OSError, configparser.NoSectionError)): - print("Adding sample versioneer config to setup.cfg", - file=sys.stderr) - with open(os.path.join(root, "setup.cfg"), "a") as f: - f.write(SAMPLE_CONFIG) - print(CONFIG_ERROR, file=sys.stderr) - return 1 - - print(" creating %s" % cfg.versionfile_source) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), - "__init__.py") - if os.path.exists(ipy): - try: - with open(ipy, "r") as f: - old = f.read() - except OSError: - old = "" - module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] - snippet = INIT_PY_SNIPPET.format(module) - if OLD_SNIPPET in old: - print(" replacing boilerplate in %s" % ipy) - with open(ipy, "w") as f: - f.write(old.replace(OLD_SNIPPET, snippet)) - elif snippet not in old: - print(" appending to %s" % ipy) - with open(ipy, "a") as f: - f.write(snippet) - else: - print(" %s unmodified" % ipy) - else: - print(" %s doesn't exist, ok" % ipy) - ipy = None - - # Make sure both the top-level "versioneer.py" and versionfile_source - # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so - # they'll be copied into source distributions. Pip won't be able to - # install the package without this. - manifest_in = os.path.join(root, "MANIFEST.in") - simple_includes = set() - try: - with open(manifest_in, "r") as f: - for line in f: - if line.startswith("include "): - for include in line.split()[1:]: - simple_includes.add(include) - except OSError: - pass - # That doesn't cover everything MANIFEST.in can do - # (http://docs.python.org/2/distutils/sourcedist.html#commands), so - # it might give some false negatives. Appending redundant 'include' - # lines is safe, though. - if "versioneer.py" not in simple_includes: - print(" appending 'versioneer.py' to MANIFEST.in") - with open(manifest_in, "a") as f: - f.write("include versioneer.py\n") - else: - print(" 'versioneer.py' already in MANIFEST.in") - if cfg.versionfile_source not in simple_includes: - print(" appending versionfile_source ('%s') to MANIFEST.in" % - cfg.versionfile_source) - with open(manifest_in, "a") as f: - f.write("include %s\n" % cfg.versionfile_source) - else: - print(" versionfile_source already in MANIFEST.in") - - # Make VCS-specific changes. For git, this means creating/changing - # .gitattributes to mark _version.py for export-subst keyword - # substitution. - do_vcs_install(manifest_in, cfg.versionfile_source, ipy) - return 0 - - -def scan_setup_py(): - """Validate the contents of setup.py against Versioneer's expectations.""" - found = set() - setters = False - errors = 0 - with open("setup.py", "r") as f: - for line in f.readlines(): - if "import versioneer" in line: - found.add("import") - if "versioneer.get_cmdclass()" in line: - found.add("cmdclass") - if "versioneer.get_version()" in line: - found.add("get_version") - if "versioneer.VCS" in line: - setters = True - if "versioneer.versionfile_source" in line: - setters = True - if len(found) != 3: - print("") - print("Your setup.py appears to be missing some important items") - print("(but I might be wrong). Please make sure it has something") - print("roughly like the following:") - print("") - print(" import versioneer") - print(" setup( version=versioneer.get_version(),") - print(" cmdclass=versioneer.get_cmdclass(), ...)") - print("") - errors += 1 - if setters: - print("You should remove lines like 'versioneer.VCS = ' and") - print("'versioneer.versionfile_source = ' . This configuration") - print("now lives in setup.cfg, and should be removed from setup.py") - print("") - errors += 1 - return errors - - -if __name__ == "__main__": - cmd = sys.argv[1] - if cmd == "setup": - errors = do_setup() - errors += scan_setup_py() - if errors: - sys.exit(1) From f5051cb454686f109e93fb2031d37345dbf9dfff Mon Sep 17 00:00:00 2001 From: "Jonathan J. Helmus" Date: Wed, 12 Apr 2023 06:17:35 -0500 Subject: [PATCH 103/366] Require the source when rendering if load_file_data is used (#4817) --- conda_build/metadata.py | 9 +++++++++ ...17-require-source-when-load-file-data-used | 19 +++++++++++++++++++ .../jinja_load_toml_from_source/meta.yaml | 8 ++++++++ .../src/pyproject.toml | 2 ++ 4 files changed, 38 insertions(+) create mode 100644 news/4817-require-source-when-load-file-data-used create mode 100644 tests/test-recipes/metadata/jinja_load_toml_from_source/meta.yaml create mode 100644 tests/test-recipes/metadata/jinja_load_toml_from_source/src/pyproject.toml diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 1f69045294..e347e782ce 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -1951,12 +1951,21 @@ def uses_regex_in_meta(self): meta_text = UnicodeDammit(f.read()).unicode_markup return "load_file_regex" in meta_text + @property + def uses_load_file_data_in_meta(self): + meta_text = "" + if self.meta_path: + with open(self.meta_path, "rb") as f: + meta_text = UnicodeDammit(f.read()).unicode_markup + return "load_file_data" in meta_text + @property def needs_source_for_render(self): return ( self.uses_vcs_in_meta or self.uses_setup_py_in_meta or self.uses_regex_in_meta + or self.uses_load_file_data_in_meta ) @property diff --git a/news/4817-require-source-when-load-file-data-used b/news/4817-require-source-when-load-file-data-used new file mode 100644 index 0000000000..a1c5ac52cd --- /dev/null +++ b/news/4817-require-source-when-load-file-data-used @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Require the source when rendering a recipe that uses the load_file_data function (#4817, fixes #4807) + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test-recipes/metadata/jinja_load_toml_from_source/meta.yaml b/tests/test-recipes/metadata/jinja_load_toml_from_source/meta.yaml new file mode 100644 index 0000000000..ab5d0ba6d8 --- /dev/null +++ b/tests/test-recipes/metadata/jinja_load_toml_from_source/meta.yaml @@ -0,0 +1,8 @@ +{% set pyproject = load_file_data("pyproject.toml") %} + +package: + name: example + version: {{ pyproject['project']['version'] }} + +source: + path: ./src diff --git a/tests/test-recipes/metadata/jinja_load_toml_from_source/src/pyproject.toml b/tests/test-recipes/metadata/jinja_load_toml_from_source/src/pyproject.toml new file mode 100644 index 0000000000..e55cde5d10 --- /dev/null +++ b/tests/test-recipes/metadata/jinja_load_toml_from_source/src/pyproject.toml @@ -0,0 +1,2 @@ +[project] +version = "1.test" From fbb36806d16a1fe5c6ee2c7f7d131e484d074853 Mon Sep 17 00:00:00 2001 From: Ferry Firmansjah <103191403+ffirmanff@users.noreply.github.com> Date: Fri, 14 Apr 2023 12:54:03 -0400 Subject: [PATCH 104/366] Add svn source credential support (#4692) * Add support for username and password for svn source --------- Co-authored-by: Ken Odegard --- conda_build/metadata.py | 2 ++ conda_build/source.py | 16 +++++++++++++--- docs/source/resources/define-metadata.rst | 15 ++++++++++++++- news/4692-add-svn-source-credential-support | 19 +++++++++++++++++++ 4 files changed, 48 insertions(+), 4 deletions(-) create mode 100644 news/4692-add-svn-source-credential-support diff --git a/conda_build/metadata.py b/conda_build/metadata.py index e347e782ce..ed2b716084 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -470,6 +470,8 @@ def parse(data, config, path=None): "svn_url": str, "svn_rev": None, "svn_ignore_externals": None, + "svn_username": None, + "svn_password": None, "folder": None, "no_hoist": None, "patches": list, diff --git a/conda_build/source.py b/conda_build/source.py index 25cee9ce85..dc90054744 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -565,10 +565,20 @@ def parse_bool(s): os.makedirs(svn_cache) svn_dn = svn_url.split(":", 1)[-1].replace("/", "_").replace(":", "_") cache_repo = join(svn_cache, svn_dn) + extra_args = [] if svn_ignore_externals: - extra_args = ["--ignore-externals"] - else: - extra_args = [] + extra_args.append("--ignore-externals") + if "svn_username" in source_dict and "svn_password" in source_dict: + extra_args.extend( + [ + "--non-interactive", + "--no-auth-cache", + "--username", + source_dict.get("svn_username"), + "--password", + source_dict.get("svn_password"), + ] + ) if isdir(cache_repo): check_call_env( ["svn", "up", "-r", svn_revision] + extra_args, diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index e09c4b65b6..891dd759ef 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -162,9 +162,22 @@ Source from svn source: svn_url: https://github.com/ilanschnell/bsdiff - svn_rev: 1.1.4 + svn_rev: 1.1.4 # (defaults to head) svn_ignore_externals: True # (defaults to False) + svn_username: username # Optional, if set must also have svn_password + svn_password: password # Optional, if set must also have svn_username +To access a restricted SVN repository, specify both ``svn_username`` and ``svn_password``. + +.. caution:: + Storing credentials in plaintext carries risks. Alternatively, consider + using environment variables: + + .. code-block:: yaml + + source: + svn_username: {{ environ["SVN_USERNAME"] }} + svn_password: {{ environ["SVN_PASSWORD"] }} Source from a local path ------------------------- diff --git a/news/4692-add-svn-source-credential-support b/news/4692-add-svn-source-credential-support new file mode 100644 index 0000000000..d8ac4c37d8 --- /dev/null +++ b/news/4692-add-svn-source-credential-support @@ -0,0 +1,19 @@ +### Enhancements + +* Add support for svn source credential (`svn_username` and `svn_password`). (#4692) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* From 572b57ca94359694b4bfbdd9187e65196e06602e Mon Sep 17 00:00:00 2001 From: jakirkham Date: Tue, 25 Apr 2023 10:14:23 -0700 Subject: [PATCH 105/366] Fix typo in `error_overdepending` field (#4849) --- tests/test-recipes/metadata/_macos_tbd_handling/meta.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test-recipes/metadata/_macos_tbd_handling/meta.yaml b/tests/test-recipes/metadata/_macos_tbd_handling/meta.yaml index 709e24afd3..12b4442977 100644 --- a/tests/test-recipes/metadata/_macos_tbd_handling/meta.yaml +++ b/tests/test-recipes/metadata/_macos_tbd_handling/meta.yaml @@ -10,7 +10,7 @@ source: build: number: 0 error_overlinking: True - error_ocerdepending: True + error_overdepending: True requirements: build: From eab7ac426271fcb0b7a48c077a0e16c4fdc042d6 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 27 Apr 2023 17:55:53 +0200 Subject: [PATCH 106/366] Enable `xattr` test for macOS (#4845) --- news/4845-enable-xattr-test-macos | 19 +++++++++ tests/conftest.py | 45 +++++++-------------- tests/test_api_build.py | 66 +++++++++++++++++-------------- 3 files changed, 69 insertions(+), 61 deletions(-) create mode 100644 news/4845-enable-xattr-test-macos diff --git a/news/4845-enable-xattr-test-macos b/news/4845-enable-xattr-test-macos new file mode 100644 index 0000000000..a1110a6a44 --- /dev/null +++ b/news/4845-enable-xattr-test-macos @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Enable `xattr` test on macOS. (#4845) diff --git a/tests/conftest.py b/tests/conftest.py index 80ac8d4ba4..3aca5b4bc7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,11 +3,13 @@ import os import subprocess import sys +import tempfile from collections import defaultdict from pathlib import Path +from typing import Iterator import pytest -from conda.common.compat import on_mac +from conda.common.compat import on_mac, on_win import conda_build.config from conda_build.config import ( @@ -58,39 +60,20 @@ def return_to_saved_path(): @pytest.fixture(scope="function") -def testing_homedir(tmpdir, request): - """Create a homedir in the users home directory; cd into dir above before test, cd out after - - :param tmpdir: py.test fixture, will be injected - :param request: py.test fixture-related, will be injected (see pytest docs) - """ - - saved_path = os.getcwd() - d1 = os.path.basename(tmpdir) - d2 = os.path.basename(os.path.dirname(tmpdir)) - d3 = os.path.basename(os.path.dirname(os.path.dirname(tmpdir))) - new_dir = os.path.join(os.path.expanduser("~"), d1, d2, d3, "pytest.conda-build") - # While pytest will make sure a folder in unique - if os.path.exists(new_dir): - import shutil - - try: - shutil.rmtree(new_dir) - except: - pass +def testing_homedir() -> Iterator[Path]: + """Create a temporary testing directory in the users home directory; cd into dir before test, cd out after.""" + saved = Path.cwd() try: - os.makedirs(new_dir) - except: - print(f"Failed to create {new_dir}") - return None - os.chdir(new_dir) + with tempfile.TemporaryDirectory(dir=Path.home(), prefix=".pytest_") as home: + os.chdir(home) - def return_to_saved_path(): - os.chdir(saved_path) - - request.addfinalizer(return_to_saved_path) + yield home - return str(new_dir) + os.chdir(saved) + except OSError: + pytest.xfail( + f"failed to create temporary directory () in {'%HOME%' if on_win else '${HOME}'} (tmpfs inappropriate for xattrs)" + ) @pytest.fixture(scope="function") diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 80fd24dc6c..15e1e641d7 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -14,6 +14,7 @@ from collections import OrderedDict from glob import glob from pathlib import Path +from shutil import which # for version import conda @@ -21,6 +22,7 @@ import yaml from binstar_client.commands import remove, show from binstar_client.errors import NotFound +from conda.common.compat import on_linux, on_mac, on_win from conda.exceptions import ClobberError, CondaMultiError import conda_build @@ -32,6 +34,7 @@ reset_context, url_path, ) +from conda_build.config import Config from conda_build.exceptions import ( CondaBuildException, DependencyNeedsBuildingError, @@ -48,7 +51,6 @@ copy_into, env_var, get_conda_operation_locks, - on_win, package_has_file, prepend_bin_path, rm_rf, @@ -1336,37 +1338,41 @@ def test_pin_subpackage_exact(testing_config): @pytest.mark.sanity @pytest.mark.serial -@pytest.mark.skipif( - sys.platform != "linux", reason="xattr code written here is specific to linux" -) -def test_copy_read_only_file_with_xattr(testing_config, testing_homedir): - if not testing_homedir: - return pytest.xfail( - "could not create a temporary folder in {} (tmpfs inappropriate for xattrs)".format( - "${HOME}" if sys.platform != "win32" else "%UserProfile%" - ) - ) - src_recipe = os.path.join(metadata_dir, "_xattr_copy") - recipe = os.path.join(testing_homedir, "_xattr_copy") - copy_into(src_recipe, recipe) - # file is r/w for owner, but we change it to 400 after setting the attribute - ro_file = os.path.join(recipe, "mode_400_file") - # tmpfs on modern Linux does not support xattr in general. - # https://stackoverflow.com/a/46598063 - # tmpfs can support extended attributes if you enable CONFIG_TMPFS_XATTR in Kernel config. - # But Currently this enables support for the trusted.* and security.* namespaces - try: - subprocess.check_call( - f"setfattr -n user.attrib -v somevalue {ro_file}", shell=True - ) - except: - return pytest.xfail( - "setfattr not possible in {}, see https://stackoverflow.com/a/46598063".format( - testing_homedir +@pytest.mark.skipif(on_mac and not which("xattr"), reason="`xattr` unavailable") +@pytest.mark.skipif(on_linux and not which("setfattr"), reason="`setfattr` unavailable") +@pytest.mark.skipif(on_win, reason="Windows doesn't support xattr") +def test_copy_read_only_file_with_xattr(testing_config: Config, testing_homedir: Path): + recipe = Path(testing_homedir, "_xattr_copy") + copy_into(metadata_path / "_xattr_copy", recipe) + + # file is u=rw,go=r (0o644) to start, change it to u=r,go= (0o400) after setting the attribute + ro_file = recipe / "mode_400_file" + + # set extended attributes + if on_linux: + # tmpfs on modern Linux does not support xattr in general. + # https://stackoverflow.com/a/46598063 + # tmpfs can support extended attributes if you enable CONFIG_TMPFS_XATTR in Kernel config. + # But Currently this enables support for the trusted.* and security.* namespaces + try: + subprocess.run( + f"setfattr -n user.attrib -v somevalue {ro_file}", + shell=True, + check=True, ) + except subprocess.CalledProcessError: + pytest.xfail("`setfattr` failed, see https://stackoverflow.com/a/46598063") + else: + subprocess.run( + f"xattr -w user.attrib somevalue {ro_file}", + shell=True, + check=True, ) - subprocess.check_call(f"chmod 400 {ro_file}", shell=True) - api.build(recipe, config=testing_config) + + # restrict file permissions + ro_file.chmod(0o400) + + api.build(str(recipe), config=testing_config) @pytest.mark.sanity From b7a85663d66ea0dd997a09a089142bdc0181b709 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 28 Apr 2023 09:11:57 +0200 Subject: [PATCH 107/366] Enable canary builds for release branches (#4857) --- .github/workflows/tests.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index da7637be2a..19062be01a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -10,6 +10,7 @@ on: branches: - main - feature/** + - '[0-9].*.x' # e.g., 3.24.x # NOTE: github.event context is pull_request payload: # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request @@ -417,16 +418,17 @@ jobs: build: name: Canary Build needs: [analyze] - # only build canary build iff + # only build canary build if # - prior steps succeeded, # - this is the main repo, and - # - we are on the main (or feature) branch + # - we are on the main, feature, or release branch if: >- success() && !github.event.repository.fork && ( github.ref_name == 'main' || startsWith(github.ref_name, 'feature/') + || endsWith(github.ref_name, '.x') ) strategy: matrix: From 85c59d44584f00a7b3688ec17bf0df5e2701bb1c Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Tue, 2 May 2023 04:27:05 -0500 Subject: [PATCH 108/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4858)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/ISSUE_TEMPLATE/0_bug.yml | 1 + .github/ISSUE_TEMPLATE/1_feature.yml | 1 + .github/ISSUE_TEMPLATE/epic.yml | 1 + .github/workflows/cla.yml | 3 +- .github/workflows/issues.yml | 1 + .github/workflows/labels.yml | 3 +- .github/workflows/lock.yml | 9 +- .github/workflows/project.yml | 3 +- .github/workflows/stale.yml | 5 +- HOW_WE_USE_GITHUB.md | 198 ++++++++++++++++++--------- RELEASE.md | 118 ++++++++++------ 11 files changed, 222 insertions(+), 121 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/0_bug.yml b/.github/ISSUE_TEMPLATE/0_bug.yml index 8227415512..bb4a6020c1 100644 --- a/.github/ISSUE_TEMPLATE/0_bug.yml +++ b/.github/ISSUE_TEMPLATE/0_bug.yml @@ -1,3 +1,4 @@ +--- name: Bug Report description: Create a bug report. labels: diff --git a/.github/ISSUE_TEMPLATE/1_feature.yml b/.github/ISSUE_TEMPLATE/1_feature.yml index 0759aac1cb..0064a1c53d 100644 --- a/.github/ISSUE_TEMPLATE/1_feature.yml +++ b/.github/ISSUE_TEMPLATE/1_feature.yml @@ -1,3 +1,4 @@ +--- name: Feature Request description: Create a feature request. labels: diff --git a/.github/ISSUE_TEMPLATE/epic.yml b/.github/ISSUE_TEMPLATE/epic.yml index 77dfc7ecb9..3f757e004c 100644 --- a/.github/ISSUE_TEMPLATE/epic.yml +++ b/.github/ISSUE_TEMPLATE/epic.yml @@ -1,3 +1,4 @@ +--- name: Epic description: A collection of related tickets. labels: diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index c4ddd1fd90..5f126f7e66 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -1,3 +1,4 @@ +--- name: CLA on: @@ -28,4 +29,4 @@ jobs: label: cla-signed # [required] # the GitHub Personal Access Token to comment and label with - token: "${{ secrets.CLA_ACTION_TOKEN }}" + token: ${{ secrets.CLA_ACTION_TOKEN }} diff --git a/.github/workflows/issues.yml b/.github/workflows/issues.yml index ae54f5fdea..52b1cfee9e 100644 --- a/.github/workflows/issues.yml +++ b/.github/workflows/issues.yml @@ -1,3 +1,4 @@ +--- name: Automate Issues on: diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index b1eb9db1ec..b2c2e821d8 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -1,3 +1,4 @@ +--- name: Sync Labels on: @@ -6,7 +7,7 @@ on: workflow_dispatch: inputs: dryrun: - description: "dryrun: Preview changes to labels without editing them (true|false)" + description: 'dryrun: Preview changes to labels without editing them (true|false)' required: true type: boolean default: true diff --git a/.github/workflows/lock.yml b/.github/workflows/lock.yml index 03a6e6367e..a1cf48bdfd 100644 --- a/.github/workflows/lock.yml +++ b/.github/workflows/lock.yml @@ -1,3 +1,4 @@ +--- name: Lock on: @@ -26,9 +27,9 @@ jobs: # Do not lock issues with these labels, value must be a comma separated list of labels or '' exclude-any-issue-labels: '' # Labels to add before locking an issue, value must be a comma separated list of labels or '' - add-issue-labels: 'locked' + add-issue-labels: locked # Reason for locking an issue, value must be one of resolved, off-topic, too heated, spam or '' - issue-lock-reason: 'resolved' + issue-lock-reason: resolved # Number of days of inactivity before a closed pull request is locked pr-inactive-days: 365 @@ -37,9 +38,9 @@ jobs: # Do not lock pull requests with these labels, value must be a comma separated list of labels or '' exclude-any-pr-labels: '' # Labels to add before locking a pull request, value must be a comma separated list of labels or '' - add-pr-labels: 'locked' + add-pr-labels: locked # Reason for locking a pull request, value must be one of resolved, off-topic, too heated, spam or '' - pr-lock-reason: 'resolved' + pr-lock-reason: resolved # Limit locking to only issues or pull requests, value must be one of issues, prs or '' process-only: '' diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index 8c3f9f87ad..2bcc43c6bc 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -1,3 +1,4 @@ +--- name: Add to Project on: @@ -13,7 +14,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/add-to-project@v0.4.1 + - uses: actions/add-to-project@v0.5.0 with: # issues are added to the Planning project # PRs are added to the Review project diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 660cd6332f..63a1255f1a 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -1,3 +1,4 @@ +--- name: Stale on: @@ -6,7 +7,7 @@ on: workflow_dispatch: inputs: dryrun: - description: "dryrun: Preview stale issues/prs without marking them (true|false)" + description: 'dryrun: Preview stale issues/prs without marking them (true|false)' required: true type: boolean default: true @@ -38,7 +39,7 @@ jobs: with: path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml - - uses: actions/stale@v7 + - uses: actions/stale@v8 id: stale with: # Only issues with these labels are checked whether they are stale diff --git a/HOW_WE_USE_GITHUB.md b/HOW_WE_USE_GITHUB.md index 12570640de..507d96c35b 100644 --- a/HOW_WE_USE_GITHUB.md +++ b/HOW_WE_USE_GITHUB.md @@ -11,40 +11,47 @@ [docs-toc]: https://github.blog/changelog/2021-04-13-table-of-contents-support-in-markdown-files/ [docs-actions]: https://docs.github.com/en/actions [docs-saved-reply]: https://docs.github.com/en/get-started/writing-on-github/working-with-saved-replies/creating-a-saved-reply +[docs-commit-signing]: https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits -[workflow-sync]: https://github.com/conda/infra/blob/main/.github/workflows/sync.yml -[labels-global]: https://github.com/conda/infra/blob/main/.github/global.yml - -[signing-commits]: https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits +[infrastructure]: https://github.com/conda/infrastructure +[workflow-sync]: https://github.com/conda/infrastructure/blob/main/.github/workflows/sync.yml +[labels-global]: https://github.com/conda/infrastructure/blob/main/.github/global.yml +[workflow-cla]: /.github/workflows/cla.yml [workflow-issues]: /.github/workflows/issues.yml +[workflow-labels]: /.github/workflows/labels.yml +[workflow-lock]: /.github/workflows/lock.yml [workflow-project]: /.github/workflows/project.yml +[workflow-stale]: /.github/workflows/stale.yml [labels-local]: /.github/labels.yml +[labels-page]: ../../labels -## How We Use GitHub - -> **Note** -> For easy navigation use [GitHub's table of contents feature][docs-toc]. +# How We Use GitHub -This document seeks to outline how we as a community use GitHub Issues to track bugs and feature requests while still catering to development practices & project management (*e.g.*, release cycles, feature planning, priority sorting, etc.). +This document seeks to outline how we as a community use GitHub Issues to track bugs and feature requests while still catering to development practices & project management (_e.g._, release cycles, feature planning, priority sorting, etc.). -Topics: - - [What is Issue Sorting?](#what-is-issue-sorting) - - [Commit signing](#commit-signing) - - [Types of tickets](#types-of-tickets) - - [Normal Ticket/Issue](#normal-ticketissue) +**Topics:** + + - [What is "Issue Sorting"?](#what-is-issue-sorting) + - [Issue Sorting Procedures](#issue-sorting-procedures) + - [Commit Signing](#commit-signing) + - [Types of Issues](#types-of-issues) + - [Standard Issue](#standard-issue) - [Epics](#epics) - [Spikes](#spikes) + - [Working on Issues](#working-on-issues) +> **Note** +> This document is written in the style of an FAQ. For easier navigation, use [GitHub's table of contents feature][docs-toc]. -### What is "Issue Sorting"? +## What is "Issue Sorting"? > **Note** -> "Issue sorting" is similar to that of "triaging", but we've chosen to use different terminology because "triaging" is a word related to very weighty topics (*e.g.*, injuries and war) and we would like to be sensitive to those connotations. Additionally, we are taking a more "fuzzy" approach to sorting (*e.g.*, severities may not be assigned, etc.). +> "Issue sorting" is similar to that of "triaging", but we've chosen to use different terminology because "triaging" is a word related to very weighty topics (_e.g._, injuries and war) and we would like to be sensitive to those connotations. Additionally, we are taking a more "fuzzy" approach to sorting (_e.g._, severities may not be assigned, etc.). -"Issue Sorting" refers to the process of assessing the priority of incoming issues. Below is a high-level diagram of the flow of tickets: +"Issue Sorting" refers to the process of assessing the priority of incoming issues. Below is a high-level diagram of the flow of issues: ```mermaid flowchart LR @@ -77,10 +84,7 @@ flowchart LR board_progress-- resolved -->state_closed ``` -In order to explain how various `conda` issues are evaluated, the following document will provide information about our sorting process in the form of an FAQ. - - -#### Why sort issues? +### Why sort issues? At the most basic "bird's eye view" level, sorted issues will fall into the category of four main priority levels: @@ -91,104 +95,133 @@ At the most basic "bird's eye view" level, sorted issues will fall into the cate At its core, sorting enables new issues to be placed into these four categories, which helps to ensure that they will be processed at a velocity similar to or exceeding the rate at which new issues are coming in. One of the benefits of actively sorting issues is to avoid engineer burnout and to make necessary work sustainable; this is done by eliminating a never-ending backlog that has not been reviewed by any maintainers. -There will always be broad-scope design and architecture implementations that the `conda` maintainers will be interested in pursuing; by actively organizing issues, the sorting engineers will be able to more easily track and tackle both specific and big-picture goals. - -#### Who does the sorting? - -Sorting engineers are a `conda` governance [sub-team][sub-team]; they are a group of Anaconda and community members who are responsible for making decisions regarding closing issues and setting feature work priorities, amongst other sorting-related tasks. +There will always be broad-scope design and architecture implementations that the maintainers will be interested in pursuing; by actively organizing issues, the sorting engineers will be able to more easily track and tackle both specific and big-picture goals. +### Who does the sorting? -#### How do items show up for sorting? +Sorting engineers are a conda governance [sub-team][sub-team]; they are a group of community members who are responsible for making decisions regarding closing issues and setting feature work priorities, among other sorting-related tasks. -New issues that are opened in any of the repositories in the [`conda` GitHub project][conda-org] will show up in the `Sorting` view of the [Planning project][project-planning]. This process is executed via [GitHub Actions][docs-actions]. The two main GitHub Actions workflows utilized for this purpose are [Issues][workflow-issues] and [Project][workflow-project]. +### How do items show up for sorting? -The GitHub Actions in the `conda/infra` repository are viewed as canonical; the [Sync workflow][workflow-sync] sends out any modifications to other `conda` repositories from there. +New issues that are opened in any of the repositories in the [conda GitHub organization][conda-org] will show up in the "Sorting" tab of the [Planning project][project-planning]. There are two [GitHub Actions][docs-actions] workflows utilized for this purpose; [`.github/workflows/issues.yml`][workflow-issues] and [`.github/workflows/projet.yml`][workflow-project]. +The GitHub Actions in the [`conda/infrastructure`][infrastructure] repository are viewed as canonical; the [`.github/workflows/sync.yml` workflow][workflow-sync] sends out any modifications to other `conda` repositories from there. -#### What is done about the issues in "sorting" mode? +### What is done about the issues in the "Sorting" tab? -Issues in the ["Sorting" tab of the project board][project-sorting] have been reviewed by a sorting engineer and are considered ready for the following procedures: +Issues in the ["Sorting" tab of the project board][project-sorting] are considered ready for the following procedures: - Mitigation via short-term workarounds and fixes - Redirection to the correct project - Determining if support can be provided for errors and questions - Closing out of any duplicate/off-topic issues -The sorting engineers on rotation are not seeking to _resolve_ issues that arise. Instead, the goal is to understand the ticket and to determine whether it is an issue in the first place, and then to collect as much relevant information as possible so that the maintainers of `conda` can make an informed decision about the appropriate resolution schedule. +The sorting engineers on rotation are not seeking to _resolve_ issues that arise. Instead, the goal is to understand the issue and to determine whether it is legitimate, and then to collect as much relevant information as possible so that the maintainers can make an informed decision about the appropriate resolution schedule. -Issues will remain in the "Sorting" tab as long as the issue is in an investigatory phase (_e.g._, querying the user for more details, asking the user to attempt other workarounds, other debugging efforts, etc.) and are likely to remain in this state the longest, but should still be progressing over the course of 1-2 weeks. +Issues will remain in the ["Sorting" tab][project-sorting] as long as the issue is in an investigatory phase (_e.g._, querying the user for more details, asking the user to attempt other workarounds, other debugging efforts, etc.) and are likely to remain in this state the longest, but should still be progressing over the course of 1-2 weeks. +For more information on the sorting process, see [Issue Sorting Procedures](#issue-sorting-procedures). -#### When do items move out of the "Sorting" tab? +### When do items move out of the "Sorting" tab? -The additional tabs in the project board that the issues can be moved to include the following: +Items move out of the ["Sorting" tab][project-sorting] once the investigatory phase described in [What is done about the issues in the "Sorting" tab?](#what-is-done-about-the-issues-in-the-sorting-tab) has concluded and the sorting engineer has enough information to make a decision about the appropriate resolution schedule for the issue. The additional tabs in the project board that the issues can be moved to include the following: -- **"Support"** - Any issue in the ["Support" tab of the Planning board][project-support] is a request for support and is not a feature request or a bug report. All issues considered "support" should include the https://github.com/conda/infra/labels/type%3A%3Asupport label. -- **"Backlog"** - The issue has revealed a bug or feature request. We have collected enough details to understand the problem/request and to reproduce it on our own. These issues have been moved into the [Backlog tab of the Planning board][project-backlog] at the end of the sorting rotation during Refinement. +- **"Support"** - Any issue in the ["Support" tab of the Planning board][project-support] is a request for support and is not a feature request or a bug report. Add the https://github.com/conda/infrastructure/labels/type%3A%3Asupport label to move an issue to this tab. +- **"Backlog"** - The issue has revealed a bug or feature request. We have collected enough details to understand the problem/request and to reproduce it on our own. These issues have been moved into the [Backlog tab of the Planning board][project-backlog] at the end of the sorting rotation during Refinement. Add the https://github.com/conda/infrastructure/labels/backlog label to move an issue to this tab. - **"Closed"** - The issue was closed due to being a duplicate, being redirected to a different project, was a user error, a question that has been resolved, etc. +### Where do work issues go after being sorted? -#### Where do items go after being sorted? +Once issues are deemed ready to be worked on, they will be moved to the ["Backlog" tab of the Planning board][project-backlog]. Once actively in progress, the issues will be moved to the ["In Progress" tab of the Planning board][project-in-progress] and then closed out once the work is complete. -All sorted issues will be reviewed by sorting engineers during a weekly Refinement meeting in order to understand how those particular issues fit into the short- and long-term roadmap of `conda`. These meetings enable the sorting engineers to get together to collectively prioritize issues, earmark feature requests for specific future releases (versus a more open-ended backlog), tag issues as ideal for first-time contributors, as well as whether or not to close/reject specific feature requests. +### What is the purpose of having a "Backlog"? -Once issues are deemed ready to be worked on, they will be moved to the [`conda` Backlog tab of the Planning board][project-backlog] on GitHub. Once actively in progress, the issues will be moved to the [In Progress tab of the Planning board][project-in-progress] and then closed out once the work is complete. +Issues are "backlogged" when they have been sorted but not yet earmarked for an upcoming release. +### What automation procedures are currently in place? -#### What is the purpose of having a "Backlog"? +Global automation procedures synced out from the [`conda/infrastructure`][infrastructure] repo include: -Issues are "backlogged" when they have been sorted but not yet earmarked for an upcoming release. Weekly Refinement meetings are a time when the `conda` engineers will transition issues from "[Sorting][project-sorting]" to "[Backlog][project-backlog]". Additionally, this time of handoff will include discussions around the kind of issues that were raised, which provides an opportunity to identify any patterns that may point to a larger problem. +- [Marking of issues and pull requests as stale][workflow-stale], resulting in: + - issues marked as https://github.com/conda/infrastructure/labels/type%3A%3Asupport being labeled stale after 21 days of inactivity and being closed after 7 further days of inactivity (that is, closed after 30 inactive days total) + - all other inactive issues (not labeled as https://github.com/conda/infrastructure/labels/type%3A%3Asupport) being labeled stale after 365 days of inactivity and being closed after 30 further days of inactivity (that is, closed after an approximate total of 1 year and 1 month of inactivity) + - all inactive pull requests being labeled stale after 365 days of inactivity and being closed after 30 further days of inactivity (that is, closed after an approximate total of 1 year and 1 month of inactivity) +- [Locking of closed issues and pull requests with no further activity][workflow-lock] after 365 days +- [Adding new issues and pull requests to the respective project boards][workflow-project] +- [Indicating an issue is ready for the sorting engineer's attention by toggling https://github.com/conda/infrastructure/labels/pending%3A%3Afeedback with https://github.com/conda/infrastructure/labels/pending%3A%3Asupport after a contributor leaves a comment][workflow-issues] +- [Verifying that contributors have signed the CLA][workflow-cla] before allowing pull requests to be merged; if the contributor hasn't signed the CLA previously, merging is be blocked until a manual review can be done +- [Syncing out templates, labels, workflows, and documentation][workflow-sync] from [`conda/infrastructure`][infrastructure] to the other repositories +## Issue Sorting Procedures -#### How does labeling work? +### How are issues sorted? + +Issues in the ["Sorting" tab of the Planning board][project-sorting] are reviewed by issue sorting engineers, who take rotational sorting shifts. In the process of sorting issues, engineers label the issues and move them to the other tabs of the project board for further action. + +Issues that require input from multiple members of the sorting team will be brought up during refinement meetings in order to understand how those particular issues fit into the short- and long-term roadmap. These meetings enable the sorting engineers to get together to collectively prioritize issues, earmark feature requests for specific future releases (versus a more open-ended backlog), tag issues as ideal for first-time contributors, as well as whether or not to close/reject specific feature requests. + +### How does labeling work? Labeling is a very important means for sorting engineers to keep track of the current state of an issue with regards to the asynchronous nature of communicating with users. Utilizing the proper labels helps to identify the severity of the issue as well as to quickly understand the current state of a discussion. -Generally speaking, labels with the same category are considered mutually exclusive but in some cases labels sharing the same category can occur concurrently as they indicate qualifiers as opposed to types. For example, we may have the following types, https://github.com/conda/infra/labels/type%3A%3Abug, https://github.com/conda/infra/labels/type%3A%3Afeature, and https://github.com/conda/infra/labels/type%3A%3Adocumentation, where for any one issue there would be _at most_ **one** of these to be defined (_i.e._ an issue shouldn’t be a bug _and_ a feature request at the same time). Alternatively, with issues involving specific operating systems (_i.e._, https://github.com/conda/infra/labels/os%3A%3Alinux, https://github.com/conda/infra/labels/os%3A%3Amacos, and https://github.com/conda/infra/labels/os%3A%3Awindows), an issue could be labeled with one or more depending on the system(s) the issue is occurring on. +Each label has an associated description that clarifies how the label should be used. Hover on the label to see its description. Label colors are used to distinguish labels by category. + +Generally speaking, labels with the same category are considered mutually exclusive, but in some cases labels sharing the same category can occur concurrently, as they indicate qualifiers as opposed to types. For example, we may have the following types, https://github.com/conda/infrastructure/labels/type%3A%3Abug, https://github.com/conda/infrastructure/labels/type%3A%3Afeature, and https://github.com/conda/infrastructure/labels/type%3A%3Adocumentation, where for any one issue there would be _at most_ **one** of these to be defined (_i.e._ an issue should not be a bug _and_ a feature request at the same time). Alternatively, with issues involving specific operating systems (_i.e._, https://github.com/conda/infrastructure/labels/os%3A%3Alinux, https://github.com/conda/infrastructure/labels/os%3A%3Amacos, and https://github.com/conda/infrastructure/labels/os%3A%3Awindows), an issue could be labeled with one or more, depending on the system(s) the issue occurs on. + +Please note that there are also automation policies in place that are affected by labeling. For example, if an issue is labeled as https://github.com/conda/infrastructure/labels/type%3A%3Asupport, that issue will be marked https://github.com/conda/infrastructure/labels/stale after 21 days of inactivity and auto-closed after seven more days without activity (30 inactive days total), which is earlier than issues without this label. See [What automation procedures are currently in place?](#what-automation-procedures-are-currently-in-place) for more details. + +### What labels are required for each issue? + +At minimum, both `type` and `source` labels should be specified on each issue before moving it from the "Sorting" tab to the "Backlog" tab. All issues that are bugs should also be tagged with a `severity` label. + +The `type` labels are exclusive of each other: each sorted issue should have exactly one `type` label. These labels give high-level information on the issue's classification (_e.g._, bug, feature, tech debt, etc.) + +The `source` labels are exclusive of each other: each sorted issue should have exactly one `source` label. These labels give information on the sub-group to which the issue's author belongs (_e.g._, a partner, a frequent contributor, the wider community, etc.). Through these labels, maintainers gain insight into how well we're meeting the needs of various groups. -Please note that there are also automation policies in place. For example, if an issue is labeled as https://github.com/conda/infra/labels/pending%3A%3Afeedback and https://github.com/conda/infra/labels/unreproducible, that issue will be auto-closed after a month of inactivity. +The `severity` labels are exclusive of each other and, while required for the https://github.com/conda/infrastructure/labels/type%3A%bug label, they can also be applied to other types to indicate demand or need. These labels help us to prioritize our work. Severity is not the only factor for work prioritization, but it is an important consideration. +Please review the descriptions of the `type`, `source`, and `severity` labels on the [labels page][labels-page] prior to use. -#### How are new labels defined? +### How are new labels defined? -Labels are defined using a scoped syntax with an optional high-level category (_e.g._, source, tag, type, etc.) and a specific topic, much like the following: +Labels are defined using a scoped syntax with an optional high-level category (_e.g._, `source`, `tag`, `type`, etc.) and a specific topic, much like the following: - `[topic]` - `[category::topic]` - `[category::topic-phrase]` -This syntax helps with issue sorting enforcement; at minimum, both `type` and `source` labels should be specified on each issue before moving it from "`Sorting`" to "`Backlog`". +This syntax helps with issue sorting enforcement, as it helps to ensure that sorted issues are, at minimum, categorized by type and source. -There are a number of labels that have been defined for the different `conda` projects. In order to create a streamlined sorting process, label terminologies are standardized using similar (if not the same) labels. +There are a number of labels that have been defined for the different repositories. In order to create a streamlined sorting process, label terminologies are standardized using similar (if not the same) labels. +### How are new labels added? -#### How are new labels added? +New **global** labels (_i.e._, labels that apply equally to all repositories within the conda GitHub organization) are added to [`conda/infrastructure`][infrastructure]'s [`.github/global.yml` file][labels-global]; new **local** labels (_i.e._, labels specific to particular repositories) are added to each repository's [`.github/labels.yml` file][labels-local]. All new labels should follow the labeling syntax described in ["How are new labels defined?"](#how-are-new-labels-defined). Global labels are combined with any local labels and these aggregated labels are used by the [`.github/workflows/labels.yml` workflow][workflow-labels] to synchronize the labels available for the repository. -New **global** labels (_i.e._, generic labels that apply equally to all `conda` repos) can be added to the `conda/infra`'s [`.github/global.yml` file][labels-global]; new **local** labels (_i.e._, labels specific to particular `conda` repos) can be added to each repository's [`.github/labels.yml`][labels-local] file. All new labels should follow the labeling syntax described in the "How are new labels defined?" section of this document. +### Are there any templates to use as responses for commonly-seen issues? - -#### Are there any templates to use as responses for commonly-seen issues? - -Some of the same types of issues appear regularly (_e.g._, issues that are duplicates of others, tickets that should be filed in the Anaconda issue tracker, errors that are due to a user's specific setup/environment, etc.). +Some of the same types of issues appear regularly (_e.g._, issues that are duplicates of others, issues that should be filed in the Anaconda issue tracker, errors that are due to a user's specific setup/environment, etc.). Below are some boilerplate responses for the most commonly-seen issues to be sorted:
Duplicate Issue +
 
 This is a duplicate of [link to primary issue]; please feel free to continue the discussion there.
 
> **Warning** -> Apply the https://github.com/conda/infra/labels/duplicate label to the issue being closed and https://github.com/conda/infra/labels/duplicate%3A%3Aprimary to the original issue. +> Apply the https://github.com/conda/infrastructure/labels/duplicate label to the issue being closed and https://github.com/conda/infrastructure/labels/duplicate%3A%3Aprimary to the original issue.
Requesting an Uninstall/Reinstall of conda +
 
 Please uninstall your current version of `conda` and reinstall the latest version.
@@ -202,6 +235,7 @@ whichever is more appropriate for your needs.
 
Redirect to Anaconda Issue Tracker +
 
 Thank you for filing this issue! Unfortunately, this is off-topic for this repo.
@@ -211,13 +245,14 @@ where `conda` installer/package issues are addressed.
 
> **Warning** -> Apply the https://github.com/conda/infra/labels/off-topic label to these tickets before closing them out. +> Apply the https://github.com/conda/infrastructure/labels/off-topic label to these issues before closing them out.
Redirecting to Nucleus Forums +
 
 Unfortunately, this issue is outside the scope of support we offer via GitHub;
@@ -226,30 +261,59 @@ please post details to the [Nucleus forums](https://community.anaconda.cloud/).
 
> **Warning** -> Apply the https://github.com/conda/infra/labels/off-topic label to these tickets before closing them out. +> Apply the https://github.com/conda/infrastructure/labels/off-topic label to these issues before closing them out.
-In order to not have to manually type or copy/paste the above repeatedly, please note that it's possible to add text for the most commonly-used responses via [GitHub's "Add Saved Reply" option][docs-saved-reply]. - +In order to not have to manually type or copy/paste the above repeatedly, note that it's possible to add text for the most commonly-used responses via [GitHub's "Add Saved Reply" option][docs-saved-reply]. -### Commit signing +## Commit Signing For all conda maintainers, we require commit signing and strongly recommend it for all others wishing to contribute to conda related projects. More information about how to set this up within GitHub can be found here: -- [Signing Commits][signing-commits] +- [GitHub's signing commits docs][docs-commit-signing] -### Types of Tickets +## Types of Issues -#### Standard Ticket/Issue +### Standard Issue TODO -#### Epics +### Epics TODO -#### Spikes +### Spikes -TODO +#### What is a spike? + +"Spike" is a term that is borrowed from extreme programming and agile development. They are used when the **outcome of an issue is unknown or even optional**. For example, when first coming across a problem that has not been solved before, a project may choose to either research the problem or create a prototype in order to better understand it. + +Additionally, spikes represent work that **may or may not actually be completed or implemented**. An example of this are prototypes created to explore possible solutions. Not all prototypes are implemented and the purpose of creating a prototype is often to explore the problem space more. For research-oriented tasks, the end result of this research may be that a feature request simply is not viable at the moment and would result in putting a stop to that work. + +Finally, spikes are usually **timeboxed**. However, given the open source/volunteer nature of our contributions, we do not enforce this for our contributors. When a timebox is set, this means that we are limiting how long we want someone to work on said spike. We do this to prevent contributors from falling into a rabbit hole they may never return from. Instead, we set a time limit to perform work on the spike and then have the assignee report back. If the tasks defined in the spike have not yet been completed, a decision is made on whether it makes sense to perform further work on the spike. + +#### When do I create a spike? + +A spike should be created when we do not have enough information to move forward with solving a problem. That simply means that, whenever we are dealing with unknowns or processes the project team has never encountered before, it may be useful for us to create a spike. + +In day-to-day work, this kind of situation may appear when new bug reports or feature requests come in that deal with problems or technologies that the project team is unfamiliar with. All issues that the project team has sufficient knowledge of should instead proceed as regular issues. + +#### When do I not create a spike? + +Below are some common scenarios where creating a spike is not appropriate: + +- Writing a technical specification for a feature we know how to implement +- Design work that would go into drafting how an API is going to look and function +- Any work that must be completed or is not optional + +## Working on Issues + +### How do I assign myself to an issue I am actively reviewing? + +If you do **not** have permissions, please indicate that you are working on an issue by leaving a comment. Someone who has permissions will assign you to the issue. If two weeks have passed without a pull request or an additional comment requesting information, you may be removed from the issue and the issue reassigned. + +If you are assigned to an issue but will not be able to continue work on it, please comment to indicate that you will no longer be working on it and press `unassign me` next to your username in the `Assignees` section of the issue page (top right). + +If you **do** have permissions, please assign yourself to the issue by pressing `assign myself` under the `Assignees` section of the issue page (top right). diff --git a/RELEASE.md b/RELEASE.md index 533f4ed52d..ee0129400f 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,20 +1,24 @@ -## Release Process + +[epic template]: {{ repo.url }}/issues/new?assignees=&labels=epic&template=epic.yml +[infrastructure]: https://github.com/conda/infrastructure +[rever docs]: https://regro.github.io/rever-docs +[compare]: {{ repo.url }}/compare +[new release]: {{ repo.url }}/releases/new +[release docs]: https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes -> **Note:** +# Release Process + +> **Note** > Throughout this document are references to the version number as `YY.M.0`, this should be replaced with the correct version number. Do **not** prefix the version with a lowercase `v`. -[epic template]: ../../issues/new?assignees=&labels=epic&template=epic.yml -[rever docs]: https://regro.github.io/rever-docs -[compare]: ../../compare -[new release]: ../../releases/new -[release docs]: https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes +## 1. Open the Release Issue. (do this ~1 week prior to release) -### 1. Open the Release Issue. +> **Note** +> The [epic template][epic template] is perfect for this, just remember to remove the {{ repo.url }}/labels/epic label. -> **Note:** -> The [epic template][epic template] is perfect for this, just remember to remove the https://github.com/conda/infra/labels/epic label. +Use the issue template below to create the release issue. After creating the release issue, pin it for easy access.
GitHub Issue Template @@ -22,41 +26,58 @@ ```markdown ### Summary -Placeholder for `conda YY.M.0` release. +Placeholder for `{{ repo.name }} YY.M.0` release. + +| Pilot | | +|---|---| +| Co-pilot | | ### Tasks -[milestone]: https://github.com/conda/conda/milestone/56 -[releases]: https://github.com/conda/conda/releases -[main]: https://github.com/AnacondaRecipes/conda-feedstock -[conda-forge]: https://github.com/conda-forge/conda-feedstock +[milestone]: {{ repo.url }}/milestone/ +[process]: {{ repo.url }}/blob/main/RELEASE.md +[releases]: {{ repo.url }}/releases +[main]: https://github.com/AnacondaRecipes/{{ repo.name }}-feedstock +[conda-forge]: https://github.com/conda-forge/{{ repo.name }}-feedstock +[ReadTheDocs]: https://readthedocs.com/projects/continuumio-{{ repo.name }}/ +[announcement]: https://github.com/conda/communications + +#### The week before release week +- [ ] Create release branch (named `YY.M.x`) +- [ ] Ensure release candidates are being successfully built (see `conda-canary/label/YY.M.x`) - [ ] [Complete outstanding PRs][milestone] -- [ ] Create release PR - - See release process https://github.com/conda/infra/issues/541 -- [ ] [Publish Release][releases] -- [ ] Create/update `YY.M.x` branch +- [ ] Test release candidates + + +#### Release week + +- [ ] Create release PR (see [release process][process]) +- [ ] [Publish release][releases] +- [ ] Activate the `YY.M.x` branch on [ReadTheDocs][ReadTheDocs] - [ ] Feedstocks - - [ ] Bump version [Anaconda's main][main] - - [ ] Bump version [conda-forge][conda-forge] - - Link any other feedstock PRs that are necessary + - [ ] Bump version & update dependencies/tests in [Anaconda, Inc.'s feedstock][main] + - [ ] Bump version & update dependencies/tests in [conda-forge feedstock][conda-forge] + - [ ] Hand off to the Anaconda packaging team - [ ] Announce release - - [ ] Slack + - [ ] Create release [announcement draft][announcement] + - [ ] Discourse - [ ] Twitter + - [ ] Matrix ```
-### 2. Ensure `rever.xsh` and `news/TEMPLATE` are up to date. +## 2. Ensure `rever.xsh` and `news/TEMPLATE` are up to date. -These are synced from https://github.com/conda/infra. +These are synced from [`conda/infrastructure`][infrastructure].
-

3. Run Rever.

+

3. Run Rever. (ideally done on the Monday of release week)

-Currently, there are only 2 activities we use rever for, (1) aggregating the authors and (2) updating the changelog. Aggregating the authors can be an error-prone process and also suffers from builtin race conditions (i.e. to generate an updated `.authors.yml` we need an updated `.mailmap` but to have an updated `.mailmap` we need an updated `.authors.yml`). This is why the following steps are very heavy-handed (and potentially repetitive) in running rever commands, undoing commits, squashing/reordering commits, etc. +Currently, there are only 2 activities we use rever for, (1) aggregating the authors and (2) updating the changelog. Aggregating the authors can be an error-prone process and also suffers from builtin race conditions (_i.e._, to generate an updated `.authors.yml` we need an updated `.mailmap` but to have an updated `.mailmap` we need an updated `.authors.yml`). This is why the following steps are very heavy-handed (and potentially repetitive) in running rever commands, undoing commits, squashing/reordering commits, etc. 1. Install [`rever`][rever docs] and activate the environment: @@ -69,11 +90,18 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 2. Clone and `cd` into the repository if you haven't done so already: ```bash - (rever) $ git clone git@github.com:conda/conda.git + (rever) $ git clone git@github.com:{{ repo.user }}/{{ repo.name }}.git (rever) $ cd conda ``` -2. Create a release branch: +2. Fetch the latest changes from the remote and checkout the release branch created a week ago: + + ```bash + (rever) $ git fetch upstream + (rever) $ git checkout YY.M.x + ``` + +2. Create a versioned branch, this is where rever will make its changes: ```bash (rever) $ git checkout -b release-YY.M.0 @@ -81,7 +109,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 2. Run `rever --activities authors`: - > **Note:** + > **Note** > Include `--force` when re-running any rever commands for the same ``, otherwise, rever will skip the activity and no changes will be made (i.e., rever remembers if an activity has been run for a given version). ```bash @@ -162,13 +190,13 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 4. Review news snippets (ensure they are all using the correct Markdown format, **not** reStructuredText) and add additional snippets for undocumented PRs/changes as necessary. - > **Note:** + > **Note** > We've found it useful to name news snippets with the following format: `-`. > > We've also found that we like to include the PR #s inline with the text itself, e.g.: > > ```markdown - > ### Enhancements + > ## Enhancements > > * Add `win-arm64` as a known platform (subdir). (#11778) > ``` @@ -195,7 +223,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 5. Run `rever --activities changelog`: - > **Note:** + > **Note** > This has previously been a notoriously fickle step (likely due to incorrect regex patterns in the `rever.xsh` config file and missing `github` keys in `.authors.yml`) so beware of potential hiccups. If this fails, it's highly likely to be an innocent issue. ```bash @@ -260,19 +288,19 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut + 93fdf029fd4cf235872c12cab12a1f7e8f95a755 Added first contributions ``` -8. Push this release branch: +8. Push this versioned branch. ```bash (rever) $ git push -u upstream release-YY.M.0 ``` -9. Open the Release PR. +9. Open the Release PR targing the `YY.M.x` branch.
GitHub PR Template ```markdown - ### Description + ## Description ✂️ snip snip ✂️ the making of a new release. @@ -285,36 +313,36 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 11. [Create][new release] the release and **SAVE AS A DRAFT** with the following values: - > **Note:** + > **Note** > Only publish the release after the Release PR is merged, until then always **save as draft**. | Field | Value | |---|---| | Choose a tag | `YY.M.0` | - | Target | `main` | + | Target | `YY.M.x` | | Body | copy/paste blurb from `CHANGELOG.md` |
-### 4. Wait for review and approval of Release PR. +## 4. Wait for review and approval of Release PR. -### 5. Merge Release PR and Publish Release. +## 5. Merge Release PR and Publish Release. -### 6. Create a new branch (`YY.M.x`) corresponding with the release. +## 6. Merge/cherry pick the release branch over to the `main` branch. -### 7. Open PRs to bump main and conda-forge feedstocks to use `YY.M.0`. +## 7. Open PRs to bump main and conda-forge feedstocks to use `YY.M.0`. -### 8. Hand off to Anaconda's packaging team. +## 8. Hand off to Anaconda's packaging team.
Internal process -1. Open packaging request in #package_requests, include links to the Release PR and feedstock PRs. +1. Open packaging request in #package_requests Slack channel, include links to the Release PR and feedstock PRs. 2. Message packaging team/PM to let them know that a release has occurred and that you are the release manager.
-### 9. Continue championing and shepherding. +## 9. Continue championing and shepherding. Remember to continue updating the Release Issue with the latest details as tasks are completed. From 3e006a2c6e41637ae39bc38492c051ce0df2072a Mon Sep 17 00:00:00 2001 From: Daniel Holth Date: Tue, 2 May 2023 05:46:32 -0400 Subject: [PATCH 109/366] minor skeleton cleanups (#4829) --- conda_build/skeletons/cran.py | 2 +- conda_build/skeletons/pypi.py | 16 +++++++++------- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index 0dac66631a..e3b22ef7d2 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -557,7 +557,7 @@ def yaml_quote_string(string): Note that this function is NOT general. """ return ( - yaml.dump(string, Dumper=SafeDumper) + yaml.dump(string, indent=True, Dumper=SafeDumper) .replace("\n...\n", "") .replace("\n", "\n ") .rstrip("\n ") diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index f376ad49a0..9ebcb4aa13 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -112,6 +112,7 @@ "extra": OrderedDict([("recipe-maintainers", "")]), } + # Note the {} formatting bits here DISTUTILS_PATCH = '''\ diff core.py core.py @@ -125,8 +126,8 @@ +import io +import os.path +import sys -+import yaml -+from yaml import Loader, SafeLoader ++import ruamel.yaml as yaml ++from ruamel.yaml import Loader, SafeLoader + +# Override the default string handling function to always return unicode +# objects (taken from StackOverflow) @@ -1334,12 +1335,13 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op :param temp_dir: Temporary directory for doing for storing pkginfo.yaml :type temp_dir: str """ - # TODO: we could make everyone's lives easier if we include packaging here, because setuptools - # needs it in recent versions. At time of writing, it is not a package in defaults, so this - # actually breaks conda-build right now. Omit it until packaging is on defaults. - # specs = ['python %s*' % python_version, 'pyyaml', 'setuptools', 'six', 'packaging', 'appdirs'] subdir = config.host_subdir - specs = [f"python {python_version}*", "pip", "pyyaml", "setuptools"] + ( + specs = [ + f"python {python_version}*", + "pip", + "ruamel.yaml", + "setuptools", + ] + ( ["m2-patch", "m2-gcc-libs"] if config.host_subdir.startswith("win") else ["patch"] From f5c8491c3dd5050b520ce408e8cddc1045a872b0 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 May 2023 14:41:13 +0200 Subject: [PATCH 110/366] [pre-commit.ci] pre-commit autoupdate (#4860) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> [skip ci] --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3141ec928b..70201956a4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,7 +47,7 @@ repos: args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] exclude: ^conda_build/version.py - repo: https://github.com/asottile/pyupgrade - rev: v3.3.1 + rev: v3.3.2 hooks: # upgrade standard Python codes - id: pyupgrade From addfbbb3180bcae5910daf383155d1722cb6fb77 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Wed, 3 May 2023 07:52:13 -0500 Subject: [PATCH 111/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4863)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/cla.yml | 22 +++++++++++++--------- .github/workflows/stale.yml | 2 +- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index 5f126f7e66..dcb1b839f6 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -6,27 +6,31 @@ on: types: - created pull_request_target: - types: - - reopened - - opened - - synchronize jobs: check: if: >- !github.event.repository.fork && ( - github.event.comment.body == '@conda-bot check' + github.event.issue.pull_request + && github.event.comment.body == '@conda-bot check' || github.event_name == 'pull_request_target' ) runs-on: ubuntu-latest steps: - name: Check CLA - uses: conda/actions/check-cla@v22.9.0 + uses: conda/actions/check-cla@v23.4.0 with: # [required] - # label to add when actor has signed the CLA + # A token with ability to comment, label, and modify the commit status + # (`pull_request: write` and `statuses: write` for fine-grained PAT; `repo` for classic PAT) + # (default: secrets.GITHUB_TOKEN) + token: ${{ secrets.CLA_ACTION_TOKEN }} + # [required] + # Label to apply to contributor's PR once CLA is singed label: cla-signed + # [required] - # the GitHub Personal Access Token to comment and label with - token: ${{ secrets.CLA_ACTION_TOKEN }} + # Token for opening singee PR in the provided `cla_repo` + # (`pull_request: write` for fine-grained PAT; `repo` and `workflow` for classic PAT) + cla_token: ${{ secrets.CLA_FORK_TOKEN }} diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 63a1255f1a..06a07b6376 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -34,7 +34,7 @@ jobs: days-before-issue-stale: 21 days-before-issue-close: 7 steps: - - uses: conda/actions/read-yaml@v22.9.0 + - uses: conda/actions/read-yaml@v23.4.0 id: read_yaml with: path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml From 49eb6aeb9aaa15ff430e987be9170ff2f303e751 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Wed, 3 May 2023 14:55:22 -0500 Subject: [PATCH 112/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4864)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/cla.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index dcb1b839f6..a087d8e096 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check CLA - uses: conda/actions/check-cla@v23.4.0 + uses: conda/actions/check-cla@v23.5.0 with: # [required] # A token with ability to comment, label, and modify the commit status From 313fb69261135db7db0ccc7a6fa26e6d3fb426ba Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Wed, 3 May 2023 15:13:09 -0500 Subject: [PATCH 113/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4865)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/cla.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index a087d8e096..c7b933b6b9 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check CLA - uses: conda/actions/check-cla@v23.5.0 + uses: conda/actions/check-cla@v23.5.1 with: # [required] # A token with ability to comment, label, and modify the commit status From b9370e663a5d390650c73ae896e6ceaeb6e0f361 Mon Sep 17 00:00:00 2001 From: jakirkham Date: Thu, 4 May 2023 06:56:59 -0700 Subject: [PATCH 114/366] Ensure `test/commands` & `run_test.*` both work (#4429) Co-authored-by: Bianca Henderson --- conda_build/create_test.py | 31 +++++++++---------- ...nsure-test-commands-and-run_test-both-work | 19 ++++++++++++ 2 files changed, 34 insertions(+), 16 deletions(-) create mode 100644 news/4429-ensure-test-commands-and-run_test-both-work diff --git a/conda_build/create_test.py b/conda_build/create_test.py index dbad7bf083..45cb20ebfe 100644 --- a/conda_build/create_test.py +++ b/conda_build/create_test.py @@ -71,23 +71,22 @@ def create_shell_files(m: MetaData, test_dir: os.PathLike) -> list[str]: m.config.timeout, locking=False, ) - if basename(test_dir) != "test_tmp": - commands = ensure_list(m.get_value("test/commands", [])) - if commands: - with open(join(dest_file), "a") as f: - f.write("\n\n") - if not status: - f.write("set -ex\n\n") - f.write("\n\n") - for cmd in commands: - f.write(cmd) - f.write("\n") - if status: - f.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") + commands = ensure_list(m.get_value("test/commands", [])) + if commands: + with open(join(dest_file), "a") as f: + f.write("\n\n") + if not status: + f.write("set -ex\n\n") + f.write("\n\n") + for cmd in commands: + f.write(cmd) + f.write("\n") if status: - f.write("exit /B 0\n") - else: - f.write("exit 0\n") + f.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n") + if status: + f.write("exit /B 0\n") + else: + f.write("exit 0\n") if isfile(dest_file): shell_files.append(dest_file) return shell_files diff --git a/news/4429-ensure-test-commands-and-run_test-both-work b/news/4429-ensure-test-commands-and-run_test-both-work new file mode 100644 index 0000000000..0015ec7cd8 --- /dev/null +++ b/news/4429-ensure-test-commands-and-run_test-both-work @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Ensure that `tests/commands` get run also in the presence of `run_test.*` (#4427) + +### Deprecations + +* + +### Docs + +* + +### Other + +* From 778475deb1e20896b9b31efa4cf7841a80d65f57 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 4 May 2023 15:58:06 +0200 Subject: [PATCH 115/366] Always use the subdir defined in the package (#4832) Co-authored-by: Jannis Leidel --- conda_build/build.py | 3 +-- news/4832-subdir-downloads | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) create mode 100644 news/4832-subdir-downloads diff --git a/conda_build/build.py b/conda_build/build.py index fd596919d0..c5c0e43701 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -3006,8 +3006,7 @@ def _construct_metadata_for_test_from_package(package, config): with open(os.path.join(info_dir, "index.json")) as f: package_data = json.load(f) - if package_data["subdir"] != "noarch": - config.host_subdir = package_data["subdir"] + config.host_subdir = package_data["subdir"] # We may be testing an (old) package built without filename hashing. hash_input = os.path.join(info_dir, "hash_input.json") if os.path.isfile(hash_input): diff --git a/news/4832-subdir-downloads b/news/4832-subdir-downloads new file mode 100644 index 0000000000..4b0e344f4c --- /dev/null +++ b/news/4832-subdir-downloads @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Download packages during build into the correct subdir folder. (#4750) + +### Deprecations + +* + +### Docs + +* + +### Other + +* From 9fd6279cf510d34008fd0423c9efe364302e7589 Mon Sep 17 00:00:00 2001 From: Duncan Macleod Date: Thu, 4 May 2023 15:23:42 +0100 Subject: [PATCH 116/366] Use a unique subdir variable name when rebuilding the index for multi-output builds (#4862) --- conda_build/build.py | 6 +++--- news/4862-multi-output-subdir-variable | 19 +++++++++++++++++++ 2 files changed, 22 insertions(+), 3 deletions(-) create mode 100644 news/4862-multi-output-subdir-variable diff --git a/conda_build/build.py b/conda_build/build.py index c5c0e43701..cd416fb14e 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -2895,14 +2895,14 @@ def build( # must rebuild index because conda has no way to incrementally add our last # package to the index. - subdir = ( + index_subdir = ( "noarch" if (m.noarch or m.noarch_python) else m.config.host_subdir ) if m.is_cross: get_build_index( - subdir=subdir, + subdir=index_subdir, bldpkgs_dir=m.config.bldpkgs_dir, output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, @@ -2913,7 +2913,7 @@ def build( clear_cache=True, ) get_build_index( - subdir=subdir, + subdir=index_subdir, bldpkgs_dir=m.config.bldpkgs_dir, output_folder=m.config.output_folder, channel_urls=m.config.channel_urls, diff --git a/news/4862-multi-output-subdir-variable b/news/4862-multi-output-subdir-variable new file mode 100644 index 0000000000..811fe525ac --- /dev/null +++ b/news/4862-multi-output-subdir-variable @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Use a unique subdir variable name when rebuilding the index for multi-output builds (#4862, fixes #4855) + +### Deprecations + +* + +### Docs + +* + +### Other + +* From ccc3e818ea2f3c2f1e1b59c993ca8fa4ee1c2f67 Mon Sep 17 00:00:00 2001 From: Isuru Fernando Date: Fri, 5 May 2023 09:54:45 -0500 Subject: [PATCH 117/366] include virtual packages in hash contents (#4606) * include virtual packages in hash contents * Update conda_build/metadata.py * Update tests/test_api_render.py * Add archspec and update test accordingly --------- Co-authored-by: Marcel Bargull Co-authored-by: Jannis Leidel Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: jaimergp Co-authored-by: Bianca Henderson --- conda_build/metadata.py | 12 ++++++-- news/4606-noarch-platform-deps | 21 ++++++++++++++ .../_noarch_with_no_platform_deps/meta.yaml | 14 ++++++++++ .../_noarch_with_platform_deps/meta.yaml | 20 +++++++++++++ tests/test_api_render.py | 28 +++++++++++++++++++ 5 files changed, 93 insertions(+), 2 deletions(-) create mode 100644 news/4606-noarch-platform-deps create mode 100644 tests/test-recipes/metadata/_noarch_with_no_platform_deps/meta.yaml create mode 100644 tests/test-recipes/metadata/_noarch_with_platform_deps/meta.yaml diff --git a/conda_build/metadata.py b/conda_build/metadata.py index ed2b716084..d158af6223 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -1583,9 +1583,17 @@ def get_hash_contents(self): # if dependencies are only 'target_platform' then ignore that. if dependencies == ["target_platform"]: - return {} + hash_contents = {} else: - return {key: self.config.variant[key] for key in dependencies} + hash_contents = {key: self.config.variant[key] for key in dependencies} + + # include virtual packages in run + run_reqs = self.meta.get("requirements", {}).get("run", []) + virtual_pkgs = [req for req in run_reqs if req.startswith("__")] + + # add name -> match spec mapping for virtual packages + hash_contents.update({pkg.split(" ")[0]: pkg for pkg in virtual_pkgs}) + return hash_contents def hash_dependencies(self): """With arbitrary pinning, we can't depend on the build string as done in diff --git a/news/4606-noarch-platform-deps b/news/4606-noarch-platform-deps new file mode 100644 index 0000000000..1cace82510 --- /dev/null +++ b/news/4606-noarch-platform-deps @@ -0,0 +1,21 @@ +### Enhancements + +* Noarch packages that use virtual packages have the virtual packages added to the hash contents of the package. + This facilitates the building of noarch packages multiple times for different platforms with platform + specific dependencies. (#4606) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test-recipes/metadata/_noarch_with_no_platform_deps/meta.yaml b/tests/test-recipes/metadata/_noarch_with_no_platform_deps/meta.yaml new file mode 100644 index 0000000000..e636c4152c --- /dev/null +++ b/tests/test-recipes/metadata/_noarch_with_no_platform_deps/meta.yaml @@ -0,0 +1,14 @@ +package: + name: test-noarch-with-no-platform-deps + version: 0.0.1 + +build: + number: 0 + noarch: python + +requirements: + build: + host: + - python >=3.7 + run: + - python >=3.7 diff --git a/tests/test-recipes/metadata/_noarch_with_platform_deps/meta.yaml b/tests/test-recipes/metadata/_noarch_with_platform_deps/meta.yaml new file mode 100644 index 0000000000..b0e2f804e2 --- /dev/null +++ b/tests/test-recipes/metadata/_noarch_with_platform_deps/meta.yaml @@ -0,0 +1,20 @@ +package: + name: test-noarch-with-platform-deps + version: 0.0.1 + +build: + number: 0 + noarch: python + +requirements: + build: + host: + - python >=3.7 + run: + - python >=3.7 + - colorama # [win] + - __win # [win] + - appnope # [osx] + - __osx # [osx] + - __archspec * ppc64le # [ppc64le] + - __linux # [linux] diff --git a/tests/test_api_render.py b/tests/test_api_render.py index c2cd8999e2..b96940a4cb 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -182,6 +182,34 @@ def test_cross_info_index_platform(testing_config): assert metadata.config.host_platform == info_index["platform"] +def test_noarch_with_platform_deps(testing_workdir, testing_config): + recipe_path = os.path.join(metadata_dir, "_noarch_with_platform_deps") + build_ids = {} + for subdir_ in ["linux-64", "linux-aarch64", "linux-ppc64le", "osx-64", "win-64"]: + platform, arch = subdir_.split("-") + m = api.render( + recipe_path, config=testing_config, platform=platform, arch=arch + )[0][0] + build_ids[subdir_] = m.build_id() + + # one hash for each platform, plus one for the archspec selector + assert len(set(build_ids.values())) == 4 + assert build_ids["linux-64"] == build_ids["linux-aarch64"] + assert ( + build_ids["linux-64"] != build_ids["linux-ppc64le"] + ) # not the same due to archspec + + +def test_noarch_with_no_platform_deps(testing_workdir, testing_config): + recipe_path = os.path.join(metadata_dir, "_noarch_with_no_platform_deps") + build_ids = set() + for platform in ["osx", "linux", "win"]: + m = api.render(recipe_path, config=testing_config, platform=platform)[0][0] + build_ids.add(m.build_id()) + + assert len(build_ids) == 1 + + def test_setting_condarc_vars_with_env_var_expansion(testing_workdir): os.makedirs("config") # python won't be used - the stuff in the recipe folder will override it From 6ecd9022b056d0d92d35cefc413bc3d09299574a Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Mon, 8 May 2023 07:16:49 -0500 Subject: [PATCH 118/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4871)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- HOW_WE_USE_GITHUB.md | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/HOW_WE_USE_GITHUB.md b/HOW_WE_USE_GITHUB.md index 507d96c35b..df3019210f 100644 --- a/HOW_WE_USE_GITHUB.md +++ b/HOW_WE_USE_GITHUB.md @@ -103,7 +103,7 @@ Sorting engineers are a conda governance [sub-team][sub-team]; they are a group ### How do items show up for sorting? -New issues that are opened in any of the repositories in the [conda GitHub organization][conda-org] will show up in the "Sorting" tab of the [Planning project][project-planning]. There are two [GitHub Actions][docs-actions] workflows utilized for this purpose; [`.github/workflows/issues.yml`][workflow-issues] and [`.github/workflows/projet.yml`][workflow-project]. +New issues that are opened in any of the repositories in the [conda GitHub organization][conda-org] will show up in the "Sorting" tab of the [Planning project][project-planning]. There are two [GitHub Actions][docs-actions] workflows utilized for this purpose; [`.github/workflows/issues.yml`][workflow-issues] and [`.github/workflows/project.yml`][workflow-project]. The GitHub Actions in the [`conda/infrastructure`][infrastructure] repository are viewed as canonical; the [`.github/workflows/sync.yml` workflow][workflow-sync] sends out any modifications to other `conda` repositories from there. @@ -126,8 +126,8 @@ For more information on the sorting process, see [Issue Sorting Procedures](#iss Items move out of the ["Sorting" tab][project-sorting] once the investigatory phase described in [What is done about the issues in the "Sorting" tab?](#what-is-done-about-the-issues-in-the-sorting-tab) has concluded and the sorting engineer has enough information to make a decision about the appropriate resolution schedule for the issue. The additional tabs in the project board that the issues can be moved to include the following: -- **"Support"** - Any issue in the ["Support" tab of the Planning board][project-support] is a request for support and is not a feature request or a bug report. Add the https://github.com/conda/infrastructure/labels/type%3A%3Asupport label to move an issue to this tab. -- **"Backlog"** - The issue has revealed a bug or feature request. We have collected enough details to understand the problem/request and to reproduce it on our own. These issues have been moved into the [Backlog tab of the Planning board][project-backlog] at the end of the sorting rotation during Refinement. Add the https://github.com/conda/infrastructure/labels/backlog label to move an issue to this tab. +- **"Support"** - Any issue in the ["Support" tab of the Planning board][project-support] is a request for support and is not a feature request or a bug report. Add the [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport) label to move an issue to this tab. +- **"Backlog"** - The issue has revealed a bug or feature request. We have collected enough details to understand the problem/request and to reproduce it on our own. These issues have been moved into the [Backlog tab of the Planning board][project-backlog] at the end of the sorting rotation during Refinement. Add the [`backlog`](https://github.com/conda/infrastructure/labels/backlog) label to move an issue to this tab. - **"Closed"** - The issue was closed due to being a duplicate, being redirected to a different project, was a user error, a question that has been resolved, etc. ### Where do work issues go after being sorted? @@ -143,12 +143,12 @@ Issues are "backlogged" when they have been sorted but not yet earmarked for an Global automation procedures synced out from the [`conda/infrastructure`][infrastructure] repo include: - [Marking of issues and pull requests as stale][workflow-stale], resulting in: - - issues marked as https://github.com/conda/infrastructure/labels/type%3A%3Asupport being labeled stale after 21 days of inactivity and being closed after 7 further days of inactivity (that is, closed after 30 inactive days total) - - all other inactive issues (not labeled as https://github.com/conda/infrastructure/labels/type%3A%3Asupport) being labeled stale after 365 days of inactivity and being closed after 30 further days of inactivity (that is, closed after an approximate total of 1 year and 1 month of inactivity) + - issues marked as [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport) being labeled stale after 21 days of inactivity and being closed after 7 further days of inactivity (that is, closed after 30 inactive days total) + - all other inactive issues (not labeled as [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport) being labeled stale after 365 days of inactivity and being closed after 30 further days of inactivity (that is, closed after an approximate total of 1 year and 1 month of inactivity) - all inactive pull requests being labeled stale after 365 days of inactivity and being closed after 30 further days of inactivity (that is, closed after an approximate total of 1 year and 1 month of inactivity) - [Locking of closed issues and pull requests with no further activity][workflow-lock] after 365 days - [Adding new issues and pull requests to the respective project boards][workflow-project] -- [Indicating an issue is ready for the sorting engineer's attention by toggling https://github.com/conda/infrastructure/labels/pending%3A%3Afeedback with https://github.com/conda/infrastructure/labels/pending%3A%3Asupport after a contributor leaves a comment][workflow-issues] +- [Indicating an issue is ready for the sorting engineer's attention][workflow-issues] by toggling [`pending::feedback`](https://github.com/conda/infrastructure/labels/pending%3A%3Afeedback) with [`pending::support`](https://github.com/conda/infrastructure/labels/pending%3A%3Asupport) after a contributor leaves a comment - [Verifying that contributors have signed the CLA][workflow-cla] before allowing pull requests to be merged; if the contributor hasn't signed the CLA previously, merging is be blocked until a manual review can be done - [Syncing out templates, labels, workflows, and documentation][workflow-sync] from [`conda/infrastructure`][infrastructure] to the other repositories @@ -166,9 +166,9 @@ Labeling is a very important means for sorting engineers to keep track of the cu Each label has an associated description that clarifies how the label should be used. Hover on the label to see its description. Label colors are used to distinguish labels by category. -Generally speaking, labels with the same category are considered mutually exclusive, but in some cases labels sharing the same category can occur concurrently, as they indicate qualifiers as opposed to types. For example, we may have the following types, https://github.com/conda/infrastructure/labels/type%3A%3Abug, https://github.com/conda/infrastructure/labels/type%3A%3Afeature, and https://github.com/conda/infrastructure/labels/type%3A%3Adocumentation, where for any one issue there would be _at most_ **one** of these to be defined (_i.e._ an issue should not be a bug _and_ a feature request at the same time). Alternatively, with issues involving specific operating systems (_i.e._, https://github.com/conda/infrastructure/labels/os%3A%3Alinux, https://github.com/conda/infrastructure/labels/os%3A%3Amacos, and https://github.com/conda/infrastructure/labels/os%3A%3Awindows), an issue could be labeled with one or more, depending on the system(s) the issue occurs on. +Generally speaking, labels with the same category are considered mutually exclusive, but in some cases labels sharing the same category can occur concurrently, as they indicate qualifiers as opposed to types. For example, we may have the following types, [`type::bug`](https://github.com/conda/infrastructure/labels/type%3A%3Abug), [`type::feature`](https://github.com/conda/infrastructure/labels/type%3A%3Afeature), and [`type::documentation`](https://github.com/conda/infrastructure/labels/type%3A%3Adocumentation), where for any one issue there would be _at most_ **one** of these to be defined (_i.e._ an issue should not be a bug _and_ a feature request at the same time). Alternatively, with issues involving specific operating systems (_i.e._, [`os::linux`](https://github.com/conda/infrastructure/labels/os%3A%3Alinux), [`os::macos`](https://github.com/conda/infrastructure/labels/os%3A%3Amacos), and [`os::windows`](https://github.com/conda/infrastructure/labels/os%3A%3Awindows)), an issue could be labeled with one or more, depending on the system(s) the issue occurs on. -Please note that there are also automation policies in place that are affected by labeling. For example, if an issue is labeled as https://github.com/conda/infrastructure/labels/type%3A%3Asupport, that issue will be marked https://github.com/conda/infrastructure/labels/stale after 21 days of inactivity and auto-closed after seven more days without activity (30 inactive days total), which is earlier than issues without this label. See [What automation procedures are currently in place?](#what-automation-procedures-are-currently-in-place) for more details. +Please note that there are also automation policies in place that are affected by labeling. For example, if an issue is labeled as [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport), that issue will be marked [`stale`](https://github.com/conda/infrastructure/labels/stale) after 21 days of inactivity and auto-closed after seven more days without activity (30 inactive days total), which is earlier than issues without this label. See [What automation procedures are currently in place?](#what-automation-procedures-are-currently-in-place) for more details. ### What labels are required for each issue? @@ -178,7 +178,7 @@ The `type` labels are exclusive of each other: each sorted issue should have exa The `source` labels are exclusive of each other: each sorted issue should have exactly one `source` label. These labels give information on the sub-group to which the issue's author belongs (_e.g._, a partner, a frequent contributor, the wider community, etc.). Through these labels, maintainers gain insight into how well we're meeting the needs of various groups. -The `severity` labels are exclusive of each other and, while required for the https://github.com/conda/infrastructure/labels/type%3A%bug label, they can also be applied to other types to indicate demand or need. These labels help us to prioritize our work. Severity is not the only factor for work prioritization, but it is an important consideration. +The `severity` labels are exclusive of each other and, while required for the [`type::bug`](https://github.com/conda/infrastructure/labels/type%3A%bug) label, they can also be applied to other types to indicate demand or need. These labels help us to prioritize our work. Severity is not the only factor for work prioritization, but it is an important consideration. Please review the descriptions of the `type`, `source`, and `severity` labels on the [labels page][labels-page] prior to use. From 07a9d43dc24e2bf12b1c61b7d7298582755c5a01 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 8 May 2023 15:31:59 -0400 Subject: [PATCH 119/366] [pre-commit.ci] pre-commit autoupdate (#4876) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.3.2 → v3.4.0](https://github.com/asottile/pyupgrade/compare/v3.3.2...v3.4.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 70201956a4..6f03884ffe 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,7 +47,7 @@ repos: args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] exclude: ^conda_build/version.py - repo: https://github.com/asottile/pyupgrade - rev: v3.3.2 + rev: v3.4.0 hooks: # upgrade standard Python codes - id: pyupgrade From 69976edf5d5a584d7f357c3dcd7545fdb573a0b0 Mon Sep 17 00:00:00 2001 From: rishabh11336 <67859818+rishabh11336@users.noreply.github.com> Date: Mon, 15 May 2023 21:49:48 +0530 Subject: [PATCH 120/366] Made changes as suggested by Issue #12055 (#4782) * Made changes as suggested by Issue #12055 Line 266: ```from EXAMPLE: python=3.4 is translated to python 3.4*.``` to ```EXAMPLE: python=3.4 is translated to python 3.4.*. ``conda search 'python=3.1'`` does NOT bring up Python 3.10, only Python 3.1.*.``` Line 318: from matched as any version of a package named ``python>=2.7.0``. to ```matched as any version of a package named python 2.7.0 or python=2.7.18.``` * Update package-spec.rst changed line 317 and line 266 * Update package-spec.rst changed line 266 and line 317 * Update package-spec.rst changed line no. 266 ,317 ,320 * Update docs/source/resources/package-spec.rst Co-authored-by: Katherine Kinnaman * Update docs/source/resources/package-spec.rst --------- Co-authored-by: Katherine Kinnaman --- docs/source/resources/package-spec.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/resources/package-spec.rst b/docs/source/resources/package-spec.rst index 601b79b6dd..aca0b5fca9 100644 --- a/docs/source/resources/package-spec.rst +++ b/docs/source/resources/package-spec.rst @@ -263,7 +263,7 @@ the command line with ``conda install``, such as ``conda install python=3.4``. Internally, conda translates the command line syntax to the spec defined in this section. -EXAMPLE: python=3.4 is translated to python 3.4*. +EXAMPLE: python=3.4 is translated to python 3.4.*. ``conda search 'python=3.1'`` does NOT bring up Python 3.10, only Python 3.1.*. Package dependencies are specified using a match specification. A match specification is a space-separated string of 1, 2, or 3 @@ -314,10 +314,10 @@ parts: Remember that the version specification cannot contain spaces, as spaces are used to delimit the package, version, and build string in the whole match specification. ``python >= 2.7`` is an -invalid match specification. Furthermore, ``python>=2.7`` is +invalid match specification. However,``"python >= 2.7"`` (with double or single quotes) is matched as any version of a package named ``python>=2.7``. -When using the command line, put double quotes around any package +When using the command line, put double or single quotes around any package version specification that contains the space character or any of the following characters: <, >, \*, or \|. From cdfeab1f809ed0c550e30e4f637783e4f759d980 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Mon, 15 May 2023 12:13:57 -0500 Subject: [PATCH 121/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4880)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/stale.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 06a07b6376..66a49f732e 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -31,8 +31,8 @@ jobs: days-before-issue-close: 30 # [type::support] issues have a more aggressive stale/close timeline - only-issue-labels: type::support - days-before-issue-stale: 21 - days-before-issue-close: 7 + days-before-issue-stale: 90 + days-before-issue-close: 21 steps: - uses: conda/actions/read-yaml@v23.4.0 id: read_yaml From 0e4ee90b3e24de5bdd8672b82fcf401a98100854 Mon Sep 17 00:00:00 2001 From: Daniel Holth Date: Tue, 16 May 2023 11:04:01 -0400 Subject: [PATCH 122/366] Use conda-index dependency instead of obsolete built-in index (#4828) Co-authored-by: Ken Odegard Co-authored-by: Jannis Leidel --- conda_build/api.py | 8 +- conda_build/build.py | 14 +- conda_build/cli/main_index.py | 14 +- conda_build/deprecations.py | 275 ++++++++ conda_build/index.py | 57 +- conda_build/render.py | 9 +- conda_build/utils.py | 3 + news/4645-use-conda-index | 23 + recipe/meta.yaml | 1 + tests/cli/test_main_render.py | 39 +- tests/requirements.txt | 1 + tests/test_api_consistency.py | 11 +- tests/test_deprecations.py | 161 +++++ tests/test_index.py | 1197 --------------------------------- 14 files changed, 575 insertions(+), 1238 deletions(-) create mode 100644 conda_build/deprecations.py create mode 100644 news/4645-use-conda-index create mode 100644 tests/test_deprecations.py delete mode 100644 tests/test_index.py diff --git a/conda_build/api.py b/conda_build/api.py index cc31f6e339..2d5fa7ee7d 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -23,6 +23,8 @@ from conda_build.utils import expand_globs as _expand_globs from conda_build.utils import get_logger as _get_logger +from .deprecations import deprecated + def render( recipe_path, @@ -518,6 +520,7 @@ def create_metapackage( ) +@deprecated("3.25.0", "4.0.0", addendum="Use standalone conda-index.") def update_index( dir_paths, config=None, @@ -538,7 +541,7 @@ def update_index( import yaml - from conda_build.index import update_index + from conda_build.index import update_index as legacy_update_index from conda_build.utils import ensure_list dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)] @@ -548,7 +551,7 @@ def update_index( current_index_versions = yaml.safe_load(f) for path in dir_paths: - update_index( + legacy_update_index( path, check_md5=check_md5, channel_name=channel_name, @@ -556,7 +559,6 @@ def update_index( threads=threads, verbose=verbose, progress=progress, - hotfix_source_repo=hotfix_source_repo, subdirs=ensure_list(subdir), current_index_versions=current_index_versions, index_file=kwargs.get("index_file", None), diff --git a/conda_build/build.py b/conda_build/build.py index cd416fb14e..e2469a56e3 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -37,7 +37,7 @@ from conda_build.config import Config from conda_build.create_test import create_all_test_files from conda_build.exceptions import CondaBuildException, DependencyNeedsBuildingError -from conda_build.index import get_build_index, update_index +from conda_build.index import _delegated_update_index, get_build_index from conda_build.metadata import FIELDS, MetaData from conda_build.post import ( fix_permissions, @@ -2105,7 +2105,7 @@ def bundle_conda(output, metadata, env, stats, **kw): tmp_path, final_output, metadata.config.timeout, locking=False ) final_outputs.append(final_output) - update_index( + _delegated_update_index( os.path.dirname(output_folder), verbose=metadata.config.debug, threads=1 ) @@ -2911,6 +2911,7 @@ def build( locking=m.config.locking, timeout=m.config.timeout, clear_cache=True, + omit_defaults=False, ) get_build_index( subdir=index_subdir, @@ -2922,6 +2923,7 @@ def build( locking=m.config.locking, timeout=m.config.timeout, clear_cache=True, + omit_defaults=False, ) else: if not provision_only: @@ -3052,7 +3054,7 @@ def _construct_metadata_for_test_from_package(package, config): local_channel = os.path.dirname(local_pkg_location) # update indices in the channel - update_index(local_channel, verbose=config.debug, threads=1) + _delegated_update_index(local_channel, verbose=config.debug, threads=1) try: metadata = render_recipe( @@ -3670,7 +3672,7 @@ def tests_failed(package_or_metadata, move_broken, broken_dir, config): ) except OSError: pass - update_index( + _delegated_update_index( os.path.dirname(os.path.dirname(pkg)), verbose=config.debug, threads=1 ) sys.exit("TESTS FAILED: " + os.path.basename(pkg)) @@ -4191,10 +4193,12 @@ def clean_build(config, folders=None): def is_package_built(metadata, env, include_local=True): + # bldpkgs_dirs is typically {'$ENVIRONMENT/conda-bld/noarch', '$ENVIRONMENT/conda-bld/osx-arm64'} + # could pop subdirs (last path element) and call update_index() once for d in metadata.config.bldpkgs_dirs: if not os.path.isdir(d): os.makedirs(d) - update_index(d, verbose=metadata.config.debug, warn=False, threads=1) + _delegated_update_index(d, verbose=metadata.config.debug, warn=False, threads=1) subdir = getattr(metadata.config, f"{env}_subdir") urls = [url_path(metadata.config.output_folder), "local"] if include_local else [] diff --git a/conda_build/cli/main_index.py b/conda_build/cli/main_index.py index fe504d4dc4..42998cd706 100644 --- a/conda_build/cli/main_index.py +++ b/conda_build/cli/main_index.py @@ -3,18 +3,21 @@ import logging import os import sys +import warnings + +from conda_index.index import MAX_THREADS_DEFAULT +from conda_index.utils import DEFAULT_SUBDIRS from conda_build import api from conda_build.conda_interface import ArgumentParser -from conda_build.index import MAX_THREADS_DEFAULT -from conda_build.utils import DEFAULT_SUBDIRS logging.basicConfig(level=logging.INFO) def parse_args(args): p = ArgumentParser( - description="Update package index metadata files in given directories." + description="Update package index metadata files in given directories. " + "Deprecated; use standalone conda-index." ) p.add_argument( @@ -96,6 +99,10 @@ def parse_args(args): def execute(args): _, args = parse_args(args) + warnings.warn( + "conda-build index is deprecated. Use the standalone conda-index package instead." + ) + api.update_index( args.dir, check_md5=args.check_md5, @@ -105,7 +112,6 @@ def execute(args): patch_generator=args.patch_generator, verbose=args.verbose, progress=args.progress, - hotfix_source_repo=args.hotfix_source_repo, current_index_versions=args.current_index_versions_file, index_file=args.file, ) diff --git a/conda_build/deprecations.py b/conda_build/deprecations.py new file mode 100644 index 0000000000..e81f6e654d --- /dev/null +++ b/conda_build/deprecations.py @@ -0,0 +1,275 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import warnings +from functools import wraps +from types import ModuleType +from typing import Any, Callable + +from packaging.version import Version, parse + +from .__version__ import __version__ + + +class DeprecatedError(RuntimeError): + pass + + +# inspired by deprecation (https://deprecation.readthedocs.io/en/latest/) and +# CPython's warnings._deprecated +class DeprecationHandler: + _version: Version + + def __init__(self, version: Version | str): + """Factory to create a deprecation handle for the specified version. + + :param version: The version to compare against when checking deprecation statuses. + """ + try: + self._version = parse(version) + except TypeError: + self._version = parse("0.0.0.dev0+placeholder") + + def __call__( + self, + deprecate_in: str, + remove_in: str, + *, + addendum: str | None = None, + stack: int = 0, + ) -> Callable[(Callable), Callable]: + """Deprecation decorator for functions, methods, & classes. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + + def deprecated_decorator(func: Callable) -> Callable: + # detect function name and generate message + category, message = self._generate_message( + deprecate_in, + remove_in, + f"{func.__module__}.{func.__qualname__}", + addendum=addendum, + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # alert user that it's time to remove something + @wraps(func) + def inner(*args, **kwargs): + warnings.warn(message, category, stacklevel=2 + stack) + + return func(*args, **kwargs) + + return inner + + return deprecated_decorator + + def argument( + self, + deprecate_in: str, + remove_in: str, + argument: str, + *, + rename: str | None = None, + addendum: str | None = None, + stack: int = 0, + ) -> Callable[(Callable), Callable]: + """Deprecation decorator for keyword arguments. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param argument: The argument to deprecate. + :param rename: Optional new argument name. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + + def deprecated_decorator(func: Callable) -> Callable: + # detect function name and generate message + category, message = self._generate_message( + deprecate_in, + remove_in, + f"{func.__module__}.{func.__qualname__}({argument})", + # provide a default addendum if renaming and no addendum is provided + addendum=f"Use '{rename}' instead." + if rename and not addendum + else addendum, + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # alert user that it's time to remove something + @wraps(func) + def inner(*args, **kwargs): + # only warn about argument deprecations if the argument is used + if argument in kwargs: + warnings.warn(message, category, stacklevel=2 + stack) + + # rename argument deprecations as needed + value = kwargs.pop(argument, None) + if rename: + kwargs.setdefault(rename, value) + + return func(*args, **kwargs) + + return inner + + return deprecated_decorator + + def module( + self, + deprecate_in: str, + remove_in: str, + *, + addendum: str | None = None, + stack: int = 0, + ) -> None: + """Deprecation function for modules. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + self.topic( + deprecate_in=deprecate_in, + remove_in=remove_in, + topic=self._get_module(stack)[1], + addendum=addendum, + stack=2 + stack, + ) + + def constant( + self, + deprecate_in: str, + remove_in: str, + constant: str, + value: Any, + *, + addendum: str | None = None, + stack: int = 0, + ) -> None: + """Deprecation function for module constant/global. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param constant: + :param value: + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + # detect calling module + module, fullname = self._get_module(stack) + # detect function name and generate message + category, message = self._generate_message( + deprecate_in, + remove_in, + f"{fullname}.{constant}", + addendum, + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # patch module level __getattr__ to alert user that it's time to remove something + super_getattr = getattr(module, "__getattr__", None) + + def __getattr__(name: str) -> Any: + if name == constant: + warnings.warn(message, category, stacklevel=2 + stack) + return value + + if super_getattr: + return super_getattr(name) + + raise AttributeError(f"module '{fullname}' has no attribute '{name}'") + + module.__getattr__ = __getattr__ + + def topic( + self, + deprecate_in: str, + remove_in: str, + *, + topic: str, + addendum: str | None = None, + stack: int = 0, + ) -> None: + """Deprecation function for a topic. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param topic: The topic being deprecated. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + # detect function name and generate message + category, message = self._generate_message( + deprecate_in, remove_in, topic, addendum + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # alert user that it's time to remove something + warnings.warn(message, category, stacklevel=2 + stack) + + def _get_module(self, stack: int) -> tuple[ModuleType, str]: + """Detect the module from which we are being called. + + :param stack: The stacklevel increment. + :return: The module and module name. + """ + import inspect # expensive + + try: + frame = inspect.stack()[2 + stack] + module = inspect.getmodule(frame[0]) + return (module, module.__name__) + except (IndexError, AttributeError): + raise DeprecatedError("unable to determine the calling module") from None + + def _generate_message( + self, deprecate_in: str, remove_in: str, prefix: str, addendum: str + ) -> tuple[type[Warning] | None, str]: + """Deprecation decorator for functions, methods, & classes. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param prefix: The message prefix, usually the function name. + :param addendum: Additional messaging. Useful to indicate what to do instead. + :return: The warning category (if applicable) and the message. + """ + deprecate_version = parse(deprecate_in) + remove_version = parse(remove_in) + + if self._version < deprecate_version: + category = PendingDeprecationWarning + warning = f"is pending deprecation and will be removed in {remove_in}." + elif self._version < remove_version: + category = DeprecationWarning + warning = f"is deprecated and will be removed in {remove_in}." + else: + category = None + warning = f"was slated for removal in {remove_in}." + + return ( + category, + " ".join(filter(None, [prefix, warning, addendum])), # message + ) + + +deprecated = DeprecationHandler(__version__) diff --git a/conda_build/index.py b/conda_build/index.py index 32eea4bc8e..690673f0c9 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -1,6 +1,5 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import bz2 import copy import fnmatch @@ -40,6 +39,7 @@ # BAD BAD BAD - conda internals from conda.core.subdir_data import SubdirData from conda.models.channel import Channel +from conda_index.index import update_index as _update_index from conda_package_handling.api import InvalidArchiveError from jinja2 import Environment, PackageLoader from tqdm import tqdm @@ -62,6 +62,7 @@ human_bytes, url_path, ) +from .deprecations import deprecated from .utils import ( CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, @@ -142,8 +143,13 @@ def get_build_index( channel_urls=None, debug=False, verbose=True, - **kwargs, + locking=None, + timeout=None, ): + """ + Used during package builds to create/get a channel including any local or + newly built packages. This function both updates and gets index data. + """ global local_index_timestamp global local_subdir global local_output_folder @@ -194,7 +200,7 @@ def get_build_index( if local_path not in urls: urls.insert(0, local_path) _ensure_valid_channel(output_folder, subdir) - update_index(output_folder, verbose=debug) + _delegated_update_index(output_folder, verbose=debug) # replace noarch with native subdir - this ends up building an index with both the # native content and the noarch content. @@ -202,6 +208,8 @@ def get_build_index( if subdir == "noarch": subdir = conda_interface.subdir try: + # get_index() is like conda reading the index, not conda_index + # creating a new index. cached_index = get_index( channel_urls=urls, prepend=not omit_defaults, @@ -280,6 +288,49 @@ def _ensure_valid_channel(local_folder, subdir): os.makedirs(path) +def _delegated_update_index( + dir_path, + check_md5=False, + channel_name=None, + patch_generator=None, + threads=1, + verbose=False, + progress=False, + subdirs=None, + warn=True, + current_index_versions=None, + debug=False, +): + """ + update_index as called by conda-build, delegating to standalone conda-index. + Needed to allow update_index calls on single subdir. + """ + # conda-build calls update_index on a single subdir internally, but + # conda-index expects to index every subdir under dir_path + parent_path, dirname = os.path.split(dir_path) + if dirname in utils.DEFAULT_SUBDIRS: + dir_path = parent_path + subdirs = [dirname] + + return _update_index( + dir_path, + check_md5=check_md5, + channel_name=channel_name, + patch_generator=patch_generator, + threads=threads, + verbose=verbose, + progress=progress, + subdirs=subdirs, + warn=warn, + current_index_versions=current_index_versions, + debug=debug, + ) + + +# Everything below is deprecated to maintain API/feature compatibility. + + +@deprecated("3.25.0", "4.0.0", addendum="Use standalone conda-index.") def update_index( dir_path, check_md5=False, diff --git a/conda_build/render.py b/conda_build/render.py index 1acc0aaf8c..881898dc9d 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -18,10 +18,10 @@ import yaml +import conda_build.index import conda_build.source as source from conda_build import environ, exceptions, utils from conda_build.exceptions import DependencyNeedsBuildingError -from conda_build.index import get_build_index from conda_build.metadata import MetaData, combine_top_level_metadata_with_output from conda_build.variants import ( filter_by_key_value, @@ -318,10 +318,13 @@ def _read_specs_from_package(pkg_loc, pkg_dist): def execute_download_actions(m, actions, env, package_subset=None, require_files=False): - index, _, _ = get_build_index( - getattr(m.config, f"{env}_subdir"), + subdir = getattr(m.config, f"{env}_subdir") + index, _, _ = conda_build.index.get_build_index( + subdir=subdir, bldpkgs_dir=m.config.bldpkgs_dir, output_folder=m.config.output_folder, + clear_cache=False, + omit_defaults=False, channel_urls=m.config.channel_urls, debug=m.config.debug, verbose=m.config.verbose, diff --git a/conda_build/utils.py b/conda_build/utils.py index 58f33eecde..085d15a6a0 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -1404,6 +1404,9 @@ class LoggingContext: "conda_build.index", "conda_build.noarch_python", "urllib3.connectionpool", + "conda_index", + "conda_index.index", + "conda_index.index.convert_cache", ] def __init__(self, level=logging.WARN, handler=None, close=True, loggers=None): diff --git a/news/4645-use-conda-index b/news/4645-use-conda-index new file mode 100644 index 0000000000..94e015e8e6 --- /dev/null +++ b/news/4645-use-conda-index @@ -0,0 +1,23 @@ +### Enhancements + +* Depend on standalone conda-index instead of bundled indexing code. (#4690) + +### Bug fixes + +* + +### Deprecations + +* `conda-build index` is deprecated. `conda-build` still provides the + `conda-build index` a.k.a. `conda index` CLI, but uses standalone + `conda-index` during builds. +* Prefer the [standalone conda-index package](https://conda.github.io/conda-index/), + instead of `conda-build index` or `conda index`, to use faster indexing code. + +### Docs + +* + +### Other + +* diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 0be0f99167..3314ccfb42 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -29,6 +29,7 @@ requirements: - beautifulsoup4 - chardet - conda >=4.13 + - conda-index - filelock - jinja2 - packaging diff --git a/tests/cli/test_main_render.py b/tests/cli/test_main_render.py index 6c946c68ba..7f385118cc 100644 --- a/tests/cli/test_main_render.py +++ b/tests/cli/test_main_render.py @@ -44,27 +44,26 @@ def test_render_add_channel(): ), f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}" -def test_render_without_channel_fails(): +def test_render_without_channel_fails(tmp_path): # do make extra channel available, so the required package should not be found - with TemporaryDirectory() as tmpdir: - rendered_filename = os.path.join(tmpdir, "out.yaml") - args = [ - "--override-channels", - os.path.join(metadata_dir, "_recipe_requiring_external_channel"), - "--file", - rendered_filename, - ] - main_render.execute(args) - with open(rendered_filename) as rendered_file: - rendered_meta = yaml.safe_load(rendered_file) - required_package_string = [ - pkg - for pkg in rendered_meta.get("requirements", {}).get("build", []) - if "conda_build_test_requirement" in pkg - ][0] - assert ( - required_package_string == "conda_build_test_requirement" - ), f"Expected to get only base package name because it should not be found, but got :{required_package_string}" + rendered_filename = tmp_path / "out.yaml" + args = [ + "--override-channels", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + "--file", + str(rendered_filename), + ] + main_render.execute(args) + with open(rendered_filename) as rendered_file: + rendered_meta = yaml.safe_load(rendered_file) + required_package_string = [ + pkg + for pkg in rendered_meta.get("requirements", {}).get("build", []) + if "conda_build_test_requirement" in pkg + ][0] + assert ( + required_package_string == "conda_build_test_requirement" + ), f"Expected to get only base package name because it should not be found, but got :{required_package_string}" def test_render_output_build_path(testing_workdir, testing_metadata, capfd, caplog): diff --git a/tests/requirements.txt b/tests/requirements.txt index fe7c767c60..cebdfd5f75 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -4,6 +4,7 @@ anaconda-client beautifulsoup4 chardet conda >=4.13 +conda-index conda-package-handling conda-verify contextlib2 diff --git a/tests/test_api_consistency.py b/tests/test_api_consistency.py index 15e4321d5d..502046e5ad 100644 --- a/tests/test_api_consistency.py +++ b/tests/test_api_consistency.py @@ -3,6 +3,7 @@ # This file makes sure that our API has not changed. Doing so can not be accidental. Whenever it # happens, we should bump our major build number, because we may have broken someone. +import inspect import sys from inspect import getfullargspec as getargspec @@ -185,8 +186,9 @@ def test_api_create_metapackage(): def test_api_update_index(): - argspec = getargspec(api.update_index) - assert argspec.args == [ + # getfullargspec() isn't friends with functools.wraps + argspec = inspect.signature(api.update_index) + assert list(argspec.parameters) == [ "dir_paths", "config", "force", @@ -200,8 +202,10 @@ def test_api_update_index(): "progress", "hotfix_source_repo", "current_index_versions", + "kwargs", ] - assert argspec.defaults == ( + assert tuple(parameter.default for parameter in argspec.parameters.values()) == ( + inspect._empty, None, False, False, @@ -214,4 +218,5 @@ def test_api_update_index(): False, None, None, + inspect._empty, ) diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py new file mode 100644 index 0000000000..3df998fe1f --- /dev/null +++ b/tests/test_deprecations.py @@ -0,0 +1,161 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import sys + +import pytest +from conda.deprecations import DeprecatedError, DeprecationHandler + + +@pytest.fixture(scope="module") +def deprecated_v1(): + return DeprecationHandler("1.0") + + +@pytest.fixture(scope="module") +def deprecated_v2(): + return DeprecationHandler("2.0") + + +@pytest.fixture(scope="module") +def deprecated_v3(): + return DeprecationHandler("3.0") + + +def test_pending(deprecated_v1): + @deprecated_v1("2.0", "3.0") + def foo(): + return True + + # alerting user that a function will be unavailable + with pytest.deprecated_call(match="pending deprecation"): + assert foo() + + +def test_deprecated(deprecated_v2): + @deprecated_v2("2.0", "3.0") + def foo(): + return True + + # alerting user that a function will be unavailable + with pytest.deprecated_call(match="deprecated"): + assert foo() + + +def test_remove(deprecated_v3): + # alerting developer that a function needs to be removed + with pytest.raises(DeprecatedError): + + @deprecated_v3("2.0", "3.0") + def foo(): + return True + + +def test_arguments_pending(deprecated_v1): + @deprecated_v1.argument("2.0", "3.0", "three") + def foo(one, two): + return True + + # too many arguments, can only deprecate keyword arguments + with pytest.raises(TypeError): + assert foo(1, 2, 3) + + # alerting user to pending deprecation + with pytest.deprecated_call(match="pending deprecation"): + assert foo(1, 2, three=3) + + # normal usage not needing deprecation + assert foo(1, 2) + + +def test_arguments_deprecated(deprecated_v2): + @deprecated_v2.argument("2.0", "3.0", "three") + def foo(one, two): + return True + + # too many arguments, can only deprecate keyword arguments + with pytest.raises(TypeError): + assert foo(1, 2, 3) + + # alerting user to pending deprecation + with pytest.deprecated_call(match="deprecated"): + assert foo(1, 2, three=3) + + # normal usage not needing deprecation + assert foo(1, 2) + + +def test_arguments_remove(deprecated_v3): + # alerting developer that a keyword argument needs to be removed + with pytest.raises(DeprecatedError): + + @deprecated_v3.argument("2.0", "3.0", "three") + def foo(one, two): + return True + + +def test_module_pending(deprecated_v1): + # alerting user to pending deprecation + with pytest.deprecated_call(match="pending deprecation"): + deprecated_v1.module("2.0", "3.0") + + +def test_module_deprecated(deprecated_v2): + # alerting user to pending deprecation + with pytest.deprecated_call(match="deprecated"): + deprecated_v2.module("2.0", "3.0") + + +def test_module_remove(deprecated_v3): + # alerting developer that a module needs to be removed + with pytest.raises(DeprecatedError): + deprecated_v3.module("2.0", "3.0") + + +def test_constant_pending(deprecated_v1): + deprecated_v1.constant("2.0", "3.0", "SOME_CONSTANT", 42) + module = sys.modules[__name__] + + # alerting user to pending deprecation + with pytest.deprecated_call(match="pending deprecation"): + module.SOME_CONSTANT + + +def test_constant_deprecated(deprecated_v2): + deprecated_v2.constant("2.0", "3.0", "SOME_CONSTANT", 42) + module = sys.modules[__name__] + + # alerting user to pending deprecation + with pytest.deprecated_call(match="deprecated"): + module.SOME_CONSTANT + + +def test_constant_remove(deprecated_v3): + # alerting developer that a module needs to be removed + with pytest.raises(DeprecatedError): + deprecated_v3.constant("2.0", "3.0", "SOME_CONSTANT", 42) + + +def test_topic_pending(deprecated_v1): + # alerting user to pending deprecation + with pytest.deprecated_call(match="pending deprecation"): + deprecated_v1.topic("2.0", "3.0", topic="Some special topic") + + +def test_topic_deprecated(deprecated_v2): + # alerting user to pending deprecation + with pytest.deprecated_call(match="deprecated"): + deprecated_v2.topic("2.0", "3.0", topic="Some special topic") + + +def test_topic_remove(deprecated_v3): + # alerting developer that a module needs to be removed + with pytest.raises(DeprecatedError): + deprecated_v3.topic("2.0", "3.0", topic="Some special topic") + + +def test_version_fallback(): + """Test that conda can run even if deprecations can't parse the version.""" + version = DeprecationHandler(None)._version # type: ignore + assert version.major == version.minor == version.micro == 0 diff --git a/tests/test_index.py b/tests/test_index.py deleted file mode 100644 index 263147fd87..0000000000 --- a/tests/test_index.py +++ /dev/null @@ -1,1197 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -""" -See also https://github.com/conda-incubator/conda-index -""" -import json -import os -import shutil -import tarfile -from logging import getLogger -from os.path import dirname, isdir, isfile, join -from unittest import mock - -import conda_package_handling.api -import pytest - -import conda_build.api -import conda_build.index -from conda_build.conda_interface import context -from conda_build.utils import copy_into, rm_rf - -from .utils import archive_dir - -log = getLogger(__name__) - -here = os.path.dirname(__file__) - -# NOTE: The recipes for test packages used in this module are at https://github.com/kalefranz/conda-test-packages - -# match ./index_hotfix_pkgs/ -TEST_SUBDIR = "osx-64" - - -def download(url, local_path): - # NOTE: The tests in this module used to download packages from the - # conda-test channel. These packages are small and are now included. - if not isdir(dirname(local_path)): - os.makedirs(dirname(local_path)) - - archive_path = join(here, "archives", url.rsplit("/", 1)[-1]) - - shutil.copy(archive_path, local_path) - return local_path - - -def test_index_on_single_subdir_1(testing_workdir): - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - download(test_package_url, test_package_path) - - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - - # ####################################### - # tests for osx-64 subdir - # ####################################### - assert isfile(join(testing_workdir, "osx-64", "index.html")) - assert isfile(join(testing_workdir, "osx-64", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json.bz2")) - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - with open(join(testing_workdir, "osx-64", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - expected_repodata_json = { - "info": { - "subdir": "osx-64", - }, - "packages": { - "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "37861df8111170f5eed4bff27868df59", - "name": "conda-index-pkg-a", - "sha256": "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30", - "size": 8733, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "packages.conda": {}, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # ####################################### - # tests for full channel - # ####################################### - - with open(join(testing_workdir, "channeldata.json")) as fh: - actual_channeldata_json = json.loads(fh.read()) - expected_channeldata_json = { - "channeldata_version": 1, - "packages": { - "conda-index-pkg-a": { - "description": "Description field for conda-index-pkg-a. Actually, this is just the python description. " - "Python is a widely used high-level, general-purpose, interpreted, dynamic " - "programming language. Its design philosophy emphasizes code " - "readability, and its syntax allows programmers to express concepts in " - "fewer lines of code than would be possible in languages such as C++ or " - "Java. The language provides constructs intended to enable clear programs " - "on both a small and large scale.", - "dev_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/meta.yaml", - "doc_source_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/README.md", - "doc_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a", - "home": "https://anaconda.org/conda-test/conda-index-pkg-a", - "license": "BSD", - "source_git_url": "https://github.com/kalefranz/conda-test-packages.git", - "subdirs": [ - "osx-64", - ], - "summary": "Summary field for conda-index-pkg-a", - "version": "1.0", - "activate.d": False, - "deactivate.d": False, - "post_link": True, - "pre_link": False, - "pre_unlink": False, - "binary_prefix": False, - "text_prefix": True, - "run_exports": {}, - "icon_hash": None, - "icon_url": None, - "identifiers": None, - "keywords": None, - "recipe_origin": None, - "source_url": None, - "tags": None, - "timestamp": 1508520039, - } - }, - "subdirs": ["noarch", "osx-64"], - } - assert actual_channeldata_json == expected_channeldata_json - - -def test_file_index_on_single_subdir_1(testing_workdir): - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - download(test_package_url, test_package_path) - - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - - # ####################################### - # tests for osx-64 subdir - # ####################################### - assert isfile(join(testing_workdir, "osx-64", "index.html")) - assert isfile(join(testing_workdir, "osx-64", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json.bz2")) - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - assert actual_repodata_json - with open(join(testing_workdir, "osx-64", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - expected_repodata_json = { - "info": { - "subdir": "osx-64", - }, - "packages": { - "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "37861df8111170f5eed4bff27868df59", - "name": "conda-index-pkg-a", - "sha256": "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30", - "size": 8733, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "packages.conda": {}, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # download two packages here, put them both in the same subdir - test_package_path = join(testing_workdir, "osx-64", "fly-2.5.2-0.tar.bz2") - test_package_url = ( - "https://conda.anaconda.org/conda-test/osx-64/fly-2.5.2-0.tar.bz2" - ) - download(test_package_url, test_package_path) - - test_package_path = join(testing_workdir, "osx-64", "nano-2.4.1-0-tar.bz2") - test_package_url = ( - "https://conda.anaconda.org/conda-test/osx-64/nano-2.4.1-0.tar.bz2" - ) - download(test_package_url, test_package_path) - - updated_packages = expected_repodata_json.get("packages") - - expected_repodata_json["packages"] = updated_packages - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - assert actual_repodata_json - with open(join(testing_workdir, "osx-64", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - assert actual_pkg_repodata_json - - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # ####################################### - # tests for full channel - # ####################################### - - with open(join(testing_workdir, "channeldata.json")) as fh: - actual_channeldata_json = json.loads(fh.read()) - expected_channeldata_json = { - "channeldata_version": 1, - "packages": { - "conda-index-pkg-a": { - "description": "Description field for conda-index-pkg-a. Actually, this is just the python description. " - "Python is a widely used high-level, general-purpose, interpreted, dynamic " - "programming language. Its design philosophy emphasizes code " - "readability, and its syntax allows programmers to express concepts in " - "fewer lines of code than would be possible in languages such as C++ or " - "Java. The language provides constructs intended to enable clear programs " - "on both a small and large scale.", - "dev_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/meta.yaml", - "doc_source_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/README.md", - "doc_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a", - "home": "https://anaconda.org/conda-test/conda-index-pkg-a", - "license": "BSD", - "source_git_url": "https://github.com/kalefranz/conda-test-packages.git", - "subdirs": [ - "osx-64", - ], - "summary": "Summary field for conda-index-pkg-a", - "version": "1.0", - "activate.d": False, - "deactivate.d": False, - "post_link": True, - "pre_link": False, - "pre_unlink": False, - "binary_prefix": False, - "text_prefix": True, - "run_exports": {}, - "icon_hash": None, - "icon_url": None, - "identifiers": None, - "keywords": None, - "recipe_origin": None, - "source_url": None, - "tags": None, - "timestamp": 1508520039, - }, - }, - "subdirs": ["noarch", "osx-64"], - } - - assert actual_channeldata_json == expected_channeldata_json - - -def test_index_noarch_osx64_1(testing_workdir): - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - download(test_package_url, test_package_path) - - test_package_path = join( - testing_workdir, "noarch", "conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/noarch/conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2" - download(test_package_url, test_package_path) - - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - - # ####################################### - # tests for osx-64 subdir - # ####################################### - assert isfile(join(testing_workdir, "osx-64", "index.html")) - assert isfile( - join(testing_workdir, "osx-64", "repodata.json") - ) # repodata is tested in test_index_on_single_subdir_1 - assert isfile(join(testing_workdir, "osx-64", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json.bz2")) - - # ####################################### - # tests for noarch subdir - # ####################################### - assert isfile(join(testing_workdir, "noarch", "index.html")) - assert isfile(join(testing_workdir, "noarch", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "noarch", "repodata_from_packages.json.bz2")) - - with open(join(testing_workdir, "noarch", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - with open(join(testing_workdir, "noarch", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - expected_repodata_json = { - "info": { - "subdir": "noarch", - }, - "packages": { - "conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2": { - "build": "pyhed9eced_1", - "build_number": 1, - "depends": ["python"], - "license": "BSD", - "md5": "56b5f6b7fb5583bccfc4489e7c657484", - "name": "conda-index-pkg-a", - "noarch": "python", - "sha256": "7430743bffd4ac63aa063ae8518e668eac269c783374b589d8078bee5ed4cbc6", - "size": 7882, - "subdir": "noarch", - "timestamp": 1508520204768, - "version": "1.0", - }, - }, - "packages.conda": {}, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # ####################################### - # tests for full channel - # ####################################### - - with open(join(testing_workdir, "channeldata.json")) as fh: - actual_channeldata_json = json.loads(fh.read()) - expected_channeldata_json = { - "channeldata_version": 1, - "packages": { - "conda-index-pkg-a": { - "description": "Description field for conda-index-pkg-a. Actually, this is just the python description. " - "Python is a widely used high-level, general-purpose, interpreted, dynamic " - "programming language. Its design philosophy emphasizes code " - "readability, and its syntax allows programmers to express concepts in " - "fewer lines of code than would be possible in languages such as C++ or " - "Java. The language provides constructs intended to enable clear programs " - "on both a small and large scale.", - "dev_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/meta.yaml", - "doc_source_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/README.md", - "doc_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a", - "home": "https://anaconda.org/conda-test/conda-index-pkg-a", - "license": "BSD", - "source_git_url": "https://github.com/kalefranz/conda-test-packages.git", - "source_url": None, - "subdirs": [ - "noarch", - "osx-64", - ], - "summary": "Summary field for conda-index-pkg-a. This is the python noarch version.", # <- tests that the higher noarch build number is the data collected - "version": "1.0", - "activate.d": False, - "deactivate.d": False, - "post_link": True, - "pre_link": False, - "pre_unlink": False, - "binary_prefix": False, - "text_prefix": True, - "run_exports": {}, - "icon_hash": None, - "icon_url": None, - "identifiers": None, - "tags": None, - "timestamp": 1508520039, - "keywords": None, - "recipe_origin": None, - } - }, - "subdirs": [ - "noarch", - "osx-64", - ], - } - assert actual_channeldata_json == expected_channeldata_json - - -def test_file_index_noarch_osx64_1(testing_workdir): - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - download(test_package_url, test_package_path) - - test_package_path = join( - testing_workdir, "noarch", "conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/noarch/conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2" - download(test_package_url, test_package_path) - - # test threads=1 flow - conda_build.index.update_index( - testing_workdir, channel_name="test-channel", threads=1 - ) - - # ####################################### - # tests for osx-64 subdir - # ####################################### - assert isfile(join(testing_workdir, "osx-64", "index.html")) - assert isfile( - join(testing_workdir, "osx-64", "repodata.json") - ) # repodata is tested in test_index_on_single_subdir_1 - assert isfile(join(testing_workdir, "osx-64", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json.bz2")) - - # ####################################### - # tests for noarch subdir - # ####################################### - assert isfile(join(testing_workdir, "noarch", "index.html")) - assert isfile(join(testing_workdir, "noarch", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "noarch", "repodata_from_packages.json.bz2")) - - with open(join(testing_workdir, "noarch", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - with open(join(testing_workdir, "noarch", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - expected_repodata_json = { - "info": {"subdir": "noarch"}, - "packages": { - "conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2": { - "build": "pyhed9eced_1", - "build_number": 1, - "depends": ["python"], - "license": "BSD", - "md5": "56b5f6b7fb5583bccfc4489e7c657484", - "name": "conda-index-pkg-a", - "noarch": "python", - "sha256": "7430743bffd4ac63aa063ae8518e668eac269c783374b589d8078bee5ed4cbc6", - "size": 7882, - "subdir": "noarch", - "timestamp": 1508520204768, - "version": "1.0", - } - }, - "packages.conda": {}, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # download two packages per subdir here, put them both in the same subdir - test_package_path = join(testing_workdir, "osx-64", "fly-2.5.2-0.tar.bz2") - test_package_url = ( - "https://conda.anaconda.org/conda-test/osx-64/fly-2.5.2-0.tar.bz2" - ) - download(test_package_url, test_package_path) - - test_package_path = join(testing_workdir, "osx-64", "nano-2.4.1-0-tar.bz2") - test_package_url = ( - "https://conda.anaconda.org/conda-test/osx-64/nano-2.4.1-0.tar.bz2" - ) - download(test_package_url, test_package_path) - - test_package_path = join( - testing_workdir, "noarch", "spiffy-test-app-0.5-pyh6afbcc8_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/noarch/spiffy-test-app-0.5-pyh6afbcc8_0.tar.bz2" - download(test_package_url, test_package_path) - - test_package_path = join(testing_workdir, "noarch", "flask-0.11.1-py_0.tar.bz2") - test_package_url = ( - "https://conda.anaconda.org/conda-test/noarch/flask-0.11.1-py_0.tar.bz2" - ) - download(test_package_url, test_package_path) - - # only tell index to index one of them and then assert that it was added - p = os.path.join(testing_workdir, "index_file") - with open(p, "a+") as fh: - fh.write("noarch/flask-0.11.1-py_0.tar.bz2\n") - fh.write("osx/fly-2.5.2-0.tar.bz2\n") - - conda_build.index.update_index( - testing_workdir, channel_name="test-channel", index_file=p - ) - - updated_packages = expected_repodata_json.get("packages", {}) - updated_packages["flask-0.11.1-py_0.tar.bz2"] = { - "build": "py_0", - "build_number": 0, - "depends": [ - "click >=2.0", - "itsdangerous >=0.21", - "jinja2 >=2.4", - "python", - "werkzeug >=0.7", - ], - "license": "BSD", - "md5": "f53df88de4ba505aadbcf42ff310a18d", - "name": "flask", - "noarch": "python", - "sha256": "20bb13679a48679964cd84571c8dd1aa110f8366565f5d82a8f4efa8dd8b160c", - "size": 5334, - "subdir": "noarch", - "version": "0.11.1", - } - - expected_repodata_json["packages"] = updated_packages - - with open(join(testing_workdir, "noarch", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - assert actual_repodata_json - with open(join(testing_workdir, "noarch", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - assert actual_pkg_repodata_json - - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # ####################################### - # tests for full channel - # ####################################### - - with open(join(testing_workdir, "channeldata.json")) as fh: - actual_channeldata_json = json.load(fh) - expected_channeldata_json = { - "channeldata_version": 1, - "packages": { - "conda-index-pkg-a": { - "description": "Description field for conda-index-pkg-a. Actually, this is just the python description. " - "Python is a widely used high-level, general-purpose, interpreted, dynamic " - "programming language. Its design philosophy emphasizes code " - "readability, and its syntax allows programmers to express concepts in " - "fewer lines of code than would be possible in languages such as C++ or " - "Java. The language provides constructs intended to enable clear programs " - "on both a small and large scale.", - "dev_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/meta.yaml", - "doc_source_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/README.md", - "doc_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a", - "home": "https://anaconda.org/conda-test/conda-index-pkg-a", - "license": "BSD", - "source_git_url": "https://github.com/kalefranz/conda-test-packages.git", - "source_url": None, - "subdirs": [ - "noarch", - "osx-64", - ], - "summary": "Summary field for conda-index-pkg-a. This is the python noarch version.", # <- tests that the higher noarch build number is the data collected - "version": "1.0", - "activate.d": False, - "deactivate.d": False, - "post_link": True, - "pre_link": False, - "pre_unlink": False, - "binary_prefix": False, - "text_prefix": True, - "run_exports": {}, - "icon_hash": None, - "icon_url": None, - "identifiers": None, - "tags": None, - "timestamp": 1508520204, - "keywords": None, - "recipe_origin": None, - }, - "flask": { - "activate.d": False, - "binary_prefix": False, - "deactivate.d": False, - "description": "Flask is a microframework for Python based on Werkzeug and Jinja2. " - "It's intended for getting started very quickly and was developed with best intentions in mind.", - "dev_url": "https://github.com/mitsuhiko/flask", - "doc_source_url": None, - "doc_url": "http://flask.pocoo.org/docs/0.10/", - "home": "http://flask.pocoo.org/", - "icon_hash": None, - "icon_url": None, - "identifiers": None, - "keywords": None, - "license": "BSD", - "post_link": False, - "pre_link": False, - "pre_unlink": False, - "recipe_origin": None, - "run_exports": {}, - "source_git_url": None, - "source_url": None, - "subdirs": ["noarch"], - "summary": "A microframework based on Werkzeug, Jinja2 and good intentions", - "tags": None, - "text_prefix": False, - "timestamp": 0, - "version": "0.11.1", - }, - }, - "subdirs": [ - "noarch", - "osx-64", - ], - } - assert actual_channeldata_json == expected_channeldata_json - - -def _build_test_index(workdir): - """ - Copy repodata.json, packages to workdir for testing. - """ - # Python 3.7 workaround "no dirs_exist_ok flag" - index_hotfix_pkgs = join(here, "index_hotfix_pkgs") - for path in os.scandir(index_hotfix_pkgs): - if path.is_dir(): - shutil.copytree( - join(here, "index_hotfix_pkgs", path.name), join(workdir, path.name) - ) - elif path.is_file(): - shutil.copyfile( - join(here, "index_hotfix_pkgs", path.name), join(workdir, path.name) - ) - - with open(os.path.join(workdir, TEST_SUBDIR, "repodata.json")) as f: - original_metadata = json.load(f) - - pkg_list = original_metadata["packages"] - assert "track_features_test-1.0-0.tar.bz2" in pkg_list - assert pkg_list["track_features_test-1.0-0.tar.bz2"]["track_features"] == "dummy" - - assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list - assert pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["features"] == "dummy" - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - - assert "revoke_test-1.0-0.tar.bz2" in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - assert ( - "package_has_been_revoked" - not in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - ) - - assert "remove_test-1.0-0.tar.bz2" in pkg_list - - -# SLOW -def test_gen_patch_py(testing_workdir): - """ - This is a channel-wide file that applies to many subdirs. It must have a function with this signature: - - def _patch_repodata(repodata, subdir): - - That function must return a dictionary of patch instructions, of the form: - - { - "patch_instructions_version": 1, - "packages": defaultdict(dict), - "revoke": [], - "remove": [], - } - - revoke and remove are lists of filenames. remove makes the file not show up - in the index (it may still be downloadable with a direct URL to the file). - revoke makes packages uninstallable by adding an unsatisfiable dependency. - This can be made installable by including a channel that has that package - (to be created by @jjhelmus). - - packages is a dictionary, where keys are package filenames. Values are - dictionaries similar to the contents of each package in repodata.json. Any - values in provided in packages here overwrite the values in repodata.json. - Any value set to None is removed. - """ - _build_test_index(testing_workdir) - - func = """ -def _patch_repodata(repodata, subdir): - pkgs = repodata["packages"] - import fnmatch - replacement_dict = {} - if "track_features_test-1.0-0.tar.bz2" in pkgs: - replacement_dict["track_features_test-1.0-0.tar.bz2"] = {"track_features": None} - if "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkgs: - replacement_dict["hotfix_depends_test-1.0-dummy_0.tar.bz2"] = { - "depends": pkgs["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] + ["dummy"], - "features": None} - revoke_list = [pkg for pkg in pkgs if fnmatch.fnmatch(pkg, "revoke_test*")] - remove_list = [pkg for pkg in pkgs if fnmatch.fnmatch(pkg, "remove_test*")] - return { - "patch_instructions_version": 1, - "packages": replacement_dict, - "revoke": revoke_list, - "remove": remove_list, - } -""" - patch_file = os.path.join(testing_workdir, "repodata_patch.py") - with open(patch_file, "w") as f: - f.write(func) - - # indexing a second time with the same patchset should keep the removals - for i in (1, 2): - conda_build.index.update_index( - testing_workdir, - patch_generator=patch_file, - verbose=True, - ) - with open(os.path.join(testing_workdir, TEST_SUBDIR, "repodata.json")) as f: - patched_metadata = json.load(f) - - pkg_list = patched_metadata["packages"] - assert "track_features_test-1.0-0.tar.bz2" in pkg_list - assert "track_features" not in pkg_list["track_features_test-1.0-0.tar.bz2"] - print("pass %s track features ok" % i) - - assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list - assert "features" not in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"] - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - assert "dummy" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - print("pass %s hotfix ok" % i) - - assert "revoke_test-1.0-0.tar.bz2" in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - assert ( - "package_has_been_revoked" - in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - ) - print("pass %s revoke ok" % i) - - assert "remove_test-1.0-0.tar.bz2" not in pkg_list - assert "remove_test-1.0-0.tar.bz2" in patched_metadata["removed"], ( - "removed list not populated in run %d" % i - ) - print("pass %s remove ok" % i) - - with open( - os.path.join(testing_workdir, TEST_SUBDIR, "repodata_from_packages.json") - ) as f: - pkg_metadata = json.load(f) - - pkg_list = pkg_metadata["packages"] - assert "track_features_test-1.0-0.tar.bz2" in pkg_list - assert ( - pkg_list["track_features_test-1.0-0.tar.bz2"]["track_features"] == "dummy" - ) - - assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list - assert ( - pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["features"] == "dummy" - ) - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - - assert "revoke_test-1.0-0.tar.bz2" in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - assert ( - "package_has_been_revoked" - not in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - ) - - -def test_channel_patch_instructions_json(testing_workdir): - _build_test_index(testing_workdir) - - replacement_dict = {} - replacement_dict["track_features_test-1.0-0.tar.bz2"] = {"track_features": None} - replacement_dict["hotfix_depends_test-1.0-dummy_0.tar.bz2"] = { - "depends": ["zlib", "dummy"], - "features": None, - } - - patch = { - "patch_instructions_version": 1, - "packages": replacement_dict, - "revoke": ["revoke_test-1.0-0.tar.bz2"], - "remove": ["remove_test-1.0-0.tar.bz2"], - } - - with open( - os.path.join(testing_workdir, TEST_SUBDIR, "patch_instructions.json"), "w" - ) as f: - json.dump(patch, f) - - conda_build.index.update_index(testing_workdir) - - with open(os.path.join(testing_workdir, TEST_SUBDIR, "repodata.json")) as f: - patched_metadata = json.load(f) - - formats = (("packages", ".tar.bz2"), ("packages.conda", ".conda")) - - for key, ext in formats: - pkg_list = patched_metadata[key] - assert "track_features_test-1.0-0" + ext in pkg_list - assert "track_features" not in pkg_list["track_features_test-1.0-0" + ext] - - assert "hotfix_depends_test-1.0-dummy_0" + ext in pkg_list - assert "features" not in pkg_list["hotfix_depends_test-1.0-dummy_0" + ext] - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0" + ext]["depends"] - assert "dummy" in pkg_list["hotfix_depends_test-1.0-dummy_0" + ext]["depends"] - - assert "revoke_test-1.0-0" + ext in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0" + ext]["depends"] - assert ( - "package_has_been_revoked" in pkg_list["revoke_test-1.0-0" + ext]["depends"] - ) - - assert "remove_test-1.0-0" + ext not in pkg_list - - with open( - os.path.join(testing_workdir, TEST_SUBDIR, "repodata_from_packages.json") - ) as f: - pkg_repodata = json.load(f) - - pkg_list = pkg_repodata[key] - assert "track_features_test-1.0-0" + ext in pkg_list - assert pkg_list["track_features_test-1.0-0" + ext]["track_features"] == "dummy" - - assert "hotfix_depends_test-1.0-dummy_0" + ext in pkg_list - assert pkg_list["hotfix_depends_test-1.0-dummy_0" + ext]["features"] == "dummy" - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0" + ext]["depends"] - - assert "revoke_test-1.0-0" + ext in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0" + ext]["depends"] - assert ( - "package_has_been_revoked" - not in pkg_list["revoke_test-1.0-0" + ext]["depends"] - ) - - assert "remove_test-1.0-0" + ext in pkg_list - - -def test_patch_from_tarball(testing_workdir): - """This is how we expect external communities to provide patches to us. - We can't let them just give us Python files for us to run, because of the - security risk of arbitrary code execution.""" - _build_test_index(testing_workdir) - - # our hotfix metadata can be generated any way you want. Hard-code this here, but in general, - # people will use some python file to generate this. - - replacement_dict = {} - replacement_dict["track_features_test-1.0-0.tar.bz2"] = {"track_features": None} - replacement_dict["hotfix_depends_test-1.0-dummy_0.tar.bz2"] = { - "depends": ["zlib", "dummy"], - "features": None, - } - - patch = { - "patch_instructions_version": 1, - "packages": replacement_dict, - "revoke": ["revoke_test-1.0-0.tar.bz2"], - "remove": ["remove_test-1.0-0.tar.bz2"], - } - with open("patch_instructions.json", "w") as f: - json.dump(patch, f) - - with tarfile.open("patch_archive.tar.bz2", "w:bz2") as archive: - archive.add( - "patch_instructions.json", "%s/patch_instructions.json" % TEST_SUBDIR - ) - - conda_build.index.update_index( - testing_workdir, patch_generator="patch_archive.tar.bz2" - ) - - with open(os.path.join(testing_workdir, TEST_SUBDIR, "repodata.json")) as f: - patched_metadata = json.load(f) - - pkg_list = patched_metadata["packages"] - assert "track_features_test-1.0-0.tar.bz2" in pkg_list - assert "track_features" not in pkg_list["track_features_test-1.0-0.tar.bz2"] - - assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list - assert "features" not in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"] - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - assert "dummy" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - - assert "revoke_test-1.0-0.tar.bz2" in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - assert ( - "package_has_been_revoked" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - ) - - assert "remove_test-1.0-0.tar.bz2" not in pkg_list - - with open( - os.path.join(testing_workdir, TEST_SUBDIR, "repodata_from_packages.json") - ) as f: - pkg_repodata = json.load(f) - - pkg_list = pkg_repodata["packages"] - assert "track_features_test-1.0-0.tar.bz2" in pkg_list - assert pkg_list["track_features_test-1.0-0.tar.bz2"]["track_features"] == "dummy" - - assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list - assert pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["features"] == "dummy" - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - - assert "revoke_test-1.0-0.tar.bz2" in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - assert ( - "package_has_been_revoked" - not in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - ) - - assert "remove_test-1.0-0.tar.bz2" in pkg_list - - -def test_index_of_removed_pkg(testing_metadata): - archive_name = "test_index_of_removed_pkg-1.0-1.tar.bz2" - archive_destination = os.path.join( - testing_metadata.config.croot, TEST_SUBDIR, archive_name - ) - - # copy the package - os.makedirs(os.path.join(testing_metadata.config.croot, TEST_SUBDIR)) - shutil.copy(os.path.join(here, "archives", archive_name), archive_destination) - - conda_build.api.update_index(testing_metadata.config.croot) - - # repodata.json should exist here - with open( - os.path.join(testing_metadata.config.croot, TEST_SUBDIR, "repodata.json") - ) as f: - repodata = json.load(f) - assert repodata["packages"] - - for f in [archive_destination]: - os.remove(f) - - # repodata.json should be empty here - conda_build.api.update_index(testing_metadata.config.croot) - with open( - os.path.join(testing_metadata.config.croot, TEST_SUBDIR, "repodata.json") - ) as f: - repodata = json.load(f) - assert not repodata["packages"] - with open( - os.path.join( - testing_metadata.config.croot, TEST_SUBDIR, "repodata_from_packages.json" - ) - ) as f: - repodata = json.load(f) - assert not repodata["packages"] - - -def test_patch_instructions_with_missing_subdir(): - os.makedirs("linux-64") - os.makedirs("zos-z") - conda_build.api.update_index(".") # what is the current working directory? - # we use conda-forge's patch instructions because they don't have zos-z data, and that triggers an error - pkg = "conda-forge-repodata-patches" - url = "https://anaconda.org/conda-forge/{0}/20180828/download/noarch/{0}-20180828-0.tar.bz2".format( - pkg - ) - patch_instructions = download(url, os.path.join(os.getcwd(), "patches.tar.bz2")) - conda_build.api.update_index(".", patch_generator=patch_instructions) - - -def test_stat_cache_used(testing_workdir, mocker): - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - download(test_package_url, test_package_path) - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - - cph_extract = mocker.spy(conda_package_handling.api, "extract") - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - cph_extract.assert_not_called() - - -def test_new_pkg_format_preferred(testing_workdir, mocker): - """Test that in one pass, the .conda file is extracted before the .tar.bz2, and the .tar.bz2 uses the cache""" - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0" - ) - exts = (".tar.bz2", ".conda") - for ext in exts: - copy_into( - os.path.join(archive_dir, "conda-index-pkg-a-1.0-py27h5e241af_0" + ext), - test_package_path + ext, - ) - # mock the extract function, so that we can assert that it is not called - # with the .tar.bz2, because the .conda should be preferred - cph_extract = mocker.spy(conda_package_handling.api, "extract") - conda_build.index.update_index( - testing_workdir, channel_name="test-channel", debug=True - ) - # extract should get called once by default. Within a channel, we assume that a .tar.bz2 and .conda have the same contents. - cph_extract.assert_called_once_with(test_package_path + ".conda", mock.ANY, "info") - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - - expected_repodata_json = { - "info": { - "subdir": "osx-64", - }, - "packages": { - "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "37861df8111170f5eed4bff27868df59", - "name": "conda-index-pkg-a", - "sha256": "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30", - "size": 8733, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "packages.conda": { - "conda-index-pkg-a-1.0-py27h5e241af_0.conda": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "4ed4b435f400dac1aabdc1fff06f78ff", - "name": "conda-index-pkg-a", - "sha256": "67b07b644105439515cc5c8c22c86939514cacf30c8c574cd70f5f1267a40f19", - "size": 9296, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - - # if we clear the stat cache, we force a re-examination. This re-examination will load files - # from the cache. This has been a source of bugs in the past, where the wrong cached file - # being loaded resulted in incorrect hashes/sizes for either the .tar.bz2 or .conda, depending - # on which of those 2 existed in the cache. - rm_rf(os.path.join(testing_workdir, "osx-64", "stat.json")) - conda_build.index.update_index( - testing_workdir, channel_name="test-channel", verbose=True, debug=True - ) - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - - assert actual_repodata_json == expected_repodata_json - - -def test_new_pkg_format_stat_cache_used(testing_workdir, mocker): - # if we have old .tar.bz2 index cache stuff, assert that we pick up correct md5, sha26 and size for .conda - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0" - ) - copy_into( - os.path.join(archive_dir, "conda-index-pkg-a-1.0-py27h5e241af_0" + ".tar.bz2"), - test_package_path + ".tar.bz2", - ) - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - - # mock the extract function, so that we can assert that it is not called, because the stat cache should exist - # if this doesn't work, something about the stat cache is confused. It's a little convoluted, because - # the index has keys for .tar.bz2's, but the stat cache comes from .conda files when they are available - # because extracting them is much, much faster. - copy_into( - os.path.join(archive_dir, "conda-index-pkg-a-1.0-py27h5e241af_0" + ".conda"), - test_package_path + ".conda", - ) - cph_extract = mocker.spy(conda_package_handling.api, "extract") - conda_build.index.update_index( - testing_workdir, channel_name="test-channel", debug=True - ) - cph_extract.assert_not_called() - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - - expected_repodata_json = { - "info": { - "subdir": "osx-64", - }, - "packages": { - "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "37861df8111170f5eed4bff27868df59", - "name": "conda-index-pkg-a", - "sha256": "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30", - "size": 8733, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "packages.conda": { - "conda-index-pkg-a-1.0-py27h5e241af_0.conda": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "4ed4b435f400dac1aabdc1fff06f78ff", - "name": "conda-index-pkg-a", - "sha256": "67b07b644105439515cc5c8c22c86939514cacf30c8c574cd70f5f1267a40f19", - "size": 9296, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - - -@pytest.mark.skipif( - not hasattr(context, "use_only_tar_bz2") or getattr(context, "use_only_tar_bz2"), - reason="conda is set to auto-disable .conda for old conda-build.", -) -def test_current_index_reduces_space(): - repodata = os.path.join( - os.path.dirname(__file__), "index_data", "time_cut", "repodata.json" - ) - with open(repodata) as f: - repodata = json.load(f) - assert len(repodata["packages"]) == 7 - assert len(repodata["packages.conda"]) == 3 - trimmed_repodata = conda_build.index._build_current_repodata( - "linux-64", repodata, None - ) - - tar_bz2_keys = { - "two-because-satisfiability-1.2.11-h7b6447c_3.tar.bz2", - "two-because-satisfiability-1.2.10-h7b6447c_3.tar.bz2", - "depends-on-older-1.2.10-h7b6447c_3.tar.bz2", - "ancient-package-1.2.10-h7b6447c_3.tar.bz2", - "one-gets-filtered-1.3.10-h7b6447c_3.tar.bz2", - } - # conda 4.7 removes .tar.bz2 files in favor of .conda files - tar_bz2_keys.remove("one-gets-filtered-1.3.10-h7b6447c_3.tar.bz2") - - # .conda files will replace .tar.bz2 files. Older packages that are necessary for satisfiability will remain - assert set(trimmed_repodata["packages"].keys()) == tar_bz2_keys - assert set(trimmed_repodata["packages.conda"].keys()) == { - "one-gets-filtered-1.3.10-h7b6447c_3.conda" - } - - # we can keep more than one version series using a collection of keys - trimmed_repodata = conda_build.index._build_current_repodata( - "linux-64", repodata, {"one-gets-filtered": ["1.2", "1.3"]} - ) - assert set(trimmed_repodata["packages.conda"].keys()) == { - "one-gets-filtered-1.2.11-h7b6447c_3.conda", - "one-gets-filtered-1.3.10-h7b6447c_3.conda", - } - - -def test_current_index_version_keys_keep_older_packages(): - pkg_dir = os.path.join(os.path.dirname(__file__), "index_data", "packages") - - # pass no version file - conda_build.api.update_index(pkg_dir) - with open(os.path.join(pkg_dir, "osx-64", "current_repodata.json")) as f: - repodata = json.load(f) - # only the newest version is kept - assert len(repodata["packages"]) == 1 - assert list(repodata["packages"].values())[0]["version"] == "2.0" - - # pass version file - conda_build.api.update_index( - pkg_dir, current_index_versions=os.path.join(pkg_dir, "versions.yml") - ) - with open(os.path.join(pkg_dir, "osx-64", "current_repodata.json")) as f: - repodata = json.load(f) - assert len(repodata["packages"]) == 2 - - # pass dict that is equivalent to version file - conda_build.api.update_index( - pkg_dir, current_index_versions={"dummy-package": ["1.0"]} - ) - with open(os.path.join(pkg_dir, "osx-64", "current_repodata.json")) as f: - repodata = json.load(f) - assert list(repodata["packages"].values())[0]["version"] == "1.0" - - -def test_channeldata_picks_up_all_versions_of_run_exports(): - pkg_dir = os.path.join(os.path.dirname(__file__), "index_data", "packages") - conda_build.api.update_index(pkg_dir) - with open(os.path.join(pkg_dir, "channeldata.json")) as f: - repodata = json.load(f) - run_exports = repodata["packages"]["run_exports_versions"]["run_exports"] - assert len(run_exports) == 2 - assert "1.0" in run_exports - assert "2.0" in run_exports - - -def test_index_invalid_packages(): - pkg_dir = os.path.join(os.path.dirname(__file__), "index_data", "corrupt") - conda_build.api.update_index(pkg_dir) - with open(os.path.join(pkg_dir, "channeldata.json")) as f: - repodata = json.load(f) - assert len(repodata["packages"]) == 0 From 6d7cf1c3edfc6dbdd65d13a31e8dd2b78d9cc5b8 Mon Sep 17 00:00:00 2001 From: rishabh11336 <67859818+rishabh11336@users.noreply.github.com> Date: Tue, 16 May 2023 20:51:43 +0530 Subject: [PATCH 123/366] Correct error in package spec documentation (#4884) --- docs/source/resources/package-spec.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/resources/package-spec.rst b/docs/source/resources/package-spec.rst index aca0b5fca9..f3b0643cc2 100644 --- a/docs/source/resources/package-spec.rst +++ b/docs/source/resources/package-spec.rst @@ -314,7 +314,7 @@ parts: Remember that the version specification cannot contain spaces, as spaces are used to delimit the package, version, and build string in the whole match specification. ``python >= 2.7`` is an -invalid match specification. However,``"python >= 2.7"`` (with double or single quotes) is +invalid match specification. However, ``"python >= 2.7"`` (with double or single quotes) is matched as any version of a package named ``python>=2.7``. When using the command line, put double or single quotes around any package From e368c53865e513131e41ce155af892942da35157 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Wed, 17 May 2023 01:21:48 +0200 Subject: [PATCH 124/366] Add py as a dependency to work around a packaging issue. (#4888) --- tests/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/requirements.txt b/tests/requirements.txt index cebdfd5f75..65b5a0ea46 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -18,6 +18,7 @@ perl pip pkginfo psutil +py # https://github.com/ContinuumIO/anaconda-issues/issues/13198 py-lief pycrypto pyflakes From c4cf1ae1f05fd668feaecceac6d909e7e1f759e1 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 17 May 2023 18:08:31 +0200 Subject: [PATCH 125/366] Add Python 3.11 support (#4852) * Remove unused pycrypto * Update bdist_conda command * Fix a few import paths to setuptools * Resolve #4717 * Bump minimum numpy --------- Co-authored-by: Jannis Leidel Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .github/workflows/tests.yml | 14 ++++++------- conda_build/bdist_conda.py | 20 ++++++++++--------- conda_build/variants.py | 2 +- conda_build/windows.py | 6 ++++-- news/4852-python-3.11 | 19 ++++++++++++++++++ pyproject.toml | 1 + tests/requirements.txt | 1 - .../metadata/_script_win_creates_exe/setup.py | 2 +- .../_script_win_creates_exe_garbled/setup.py | 2 +- .../metadata/jinja_load_setuptools/setup.py | 2 +- .../metadata/jinja_load_yaml/environment.yml | 2 +- .../metadata/state_variables/setup.py | 1 - tests/test-recipes/test-package/setup.py | 1 - .../numpy_used/conda_build_config.yaml | 2 +- tests/test_api_build.py | 2 +- tests/test_api_render.py | 2 +- tests/test_variants.py | 9 ++------- 17 files changed, 52 insertions(+), 36 deletions(-) create mode 100644 news/4852-python-3.11 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 19062be01a..dff9a41151 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -63,14 +63,14 @@ jobs: fail-fast: false matrix: # test all lower versions (w/ stable conda) and upper version (w/ canary conda) - python-version: ['3.8', '3.9'] + python-version: ['3.8', '3.9', '3.10'] conda-version: [release] test-type: [serial, parallel] include: - - python-version: '3.10' + - python-version: '3.11' conda-version: canary test-type: serial - - python-version: '3.10' + - python-version: '3.11' conda-version: canary test-type: parallel env: @@ -172,10 +172,10 @@ jobs: conda-version: [release] test-type: [serial, parallel] include: - - python-version: '3.10' + - python-version: '3.11' conda-version: canary test-type: serial - - python-version: '3.10' + - python-version: '3.11' conda-version: canary test-type: parallel env: @@ -284,10 +284,10 @@ jobs: conda-version: [release] test-type: [serial, parallel] include: - - python-version: '3.10' + - python-version: '3.11' conda-version: canary test-type: serial - - python-version: '3.10' + - python-version: '3.11' conda-version: canary test-type: parallel env: diff --git a/conda_build/bdist_conda.py b/conda_build/bdist_conda.py index 9e9d29e162..6e4a5335b9 100644 --- a/conda_build/bdist_conda.py +++ b/conda_build/bdist_conda.py @@ -8,9 +8,10 @@ import sys import time from collections import defaultdict -from distutils.command.install import install -from distutils.dist import Distribution -from distutils.errors import DistutilsGetoptError, DistutilsOptionError + +from setuptools.command.install import install +from setuptools.dist import Distribution +from setuptools.errors import BaseError, OptionError from conda_build import api from conda_build.build import handle_anaconda_upload @@ -22,6 +23,10 @@ # TODO: Add support for all the options that conda build has +class GetoptError(BaseError): + """The option table provided to 'fancy_getopt()' is bogus.""" + + class CondaDistribution(Distribution): """ Distribution subclass that supports bdist_conda options @@ -29,9 +34,6 @@ class CondaDistribution(Distribution): This class is required if you want to pass any bdist_conda specific options to setup(). To use, set distclass=CondaDistribution in setup(). - **NOTE**: If you use setuptools, you must import setuptools before - importing distutils.commands.bdist_conda. - Options that can be passed to setup() (must include distclass=CondaDistribution): @@ -115,7 +117,7 @@ def initialize_options(self): def finalize_options(self): opt_dict = self.distribution.get_option_dict("install") if self.prefix: - raise DistutilsOptionError("--prefix is not allowed") + raise OptionError("--prefix is not allowed") opt_dict["prefix"] = ("bdist_conda", self.config.host_prefix) super().finalize_options() @@ -184,7 +186,7 @@ def run(self): c.read_file(StringIO(newstr)) except Exception as err: # This seems to be the best error here - raise DistutilsGetoptError( + raise GetoptError( "ERROR: entry-points not understood: " + str(err) + "\nThe string was" @@ -203,7 +205,7 @@ def run(self): entry_points[section] = None if not isinstance(entry_points, dict): - raise DistutilsGetoptError( + raise GetoptError( "ERROR: Could not add entry points. They were:\n" + entry_points ) else: diff --git a/conda_build/variants.py b/conda_build/variants.py index 289a61385c..d7c6841238 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -19,7 +19,7 @@ DEFAULT_VARIANTS = { "python": f"{sys.version_info.major}.{sys.version_info.minor}", - "numpy": "1.21", + "numpy": "1.22", # this one actually needs to be pretty specific. The reason is that cpan skeleton uses the # version to say what's in their standard library. "perl": "5.26.2", diff --git a/conda_build/windows.py b/conda_build/windows.py index ffaad7f5ca..84da4a0f0d 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -10,8 +10,10 @@ # Leverage the hard work done by setuptools/distutils to find vcvarsall using # either the registry or the VS**COMNTOOLS environment variable try: - from distutils.msvc9compiler import WINSDK_BASE, Reg - from distutils.msvc9compiler import find_vcvarsall as distutils_find_vcvarsall + from setuptools._distutils.msvc9compiler import WINSDK_BASE, Reg + from setuptools._distutils.msvc9compiler import ( + find_vcvarsall as distutils_find_vcvarsall, + ) except: # Allow some imports to work for cross or CONDA_SUBDIR usage. pass diff --git a/news/4852-python-3.11 b/news/4852-python-3.11 new file mode 100644 index 0000000000..58f87f9796 --- /dev/null +++ b/news/4852-python-3.11 @@ -0,0 +1,19 @@ +### Enhancements + +* Add Python 3.11 support. (#4852) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/pyproject.toml b/pyproject.toml index d71c386c2b..0d7db9122b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ classifiers = [ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy" ] diff --git a/tests/requirements.txt b/tests/requirements.txt index 65b5a0ea46..2bb2fe7019 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -20,7 +20,6 @@ pkginfo psutil py # https://github.com/ContinuumIO/anaconda-issues/issues/13198 py-lief -pycrypto pyflakes pytest pytest-cov diff --git a/tests/test-recipes/metadata/_script_win_creates_exe/setup.py b/tests/test-recipes/metadata/_script_win_creates_exe/setup.py index 1411131d15..041334fbd1 100644 --- a/tests/test-recipes/metadata/_script_win_creates_exe/setup.py +++ b/tests/test-recipes/metadata/_script_win_creates_exe/setup.py @@ -1,4 +1,4 @@ -from distutils.core import setup +from setuptools import setup setup(name='foobar', version='1.0', scripts=['test-script'] diff --git a/tests/test-recipes/metadata/_script_win_creates_exe_garbled/setup.py b/tests/test-recipes/metadata/_script_win_creates_exe_garbled/setup.py index 1411131d15..041334fbd1 100644 --- a/tests/test-recipes/metadata/_script_win_creates_exe_garbled/setup.py +++ b/tests/test-recipes/metadata/_script_win_creates_exe_garbled/setup.py @@ -1,4 +1,4 @@ -from distutils.core import setup +from setuptools import setup setup(name='foobar', version='1.0', scripts=['test-script'] diff --git a/tests/test-recipes/metadata/jinja_load_setuptools/setup.py b/tests/test-recipes/metadata/jinja_load_setuptools/setup.py index 5a17ae9b0a..8e09c8abe4 100644 --- a/tests/test-recipes/metadata/jinja_load_setuptools/setup.py +++ b/tests/test-recipes/metadata/jinja_load_setuptools/setup.py @@ -1,4 +1,4 @@ -from distutils.core import setup +from setuptools import setup VERSION = '1.test' diff --git a/tests/test-recipes/metadata/jinja_load_yaml/environment.yml b/tests/test-recipes/metadata/jinja_load_yaml/environment.yml index 79859474e3..3bf9010a3b 100644 --- a/tests/test-recipes/metadata/jinja_load_yaml/environment.yml +++ b/tests/test-recipes/metadata/jinja_load_yaml/environment.yml @@ -2,6 +2,6 @@ name: foo channels: - defaults dependencies: - - python=3.10 + - python - tqdm prefix: /home/abraham/.conda/envs/foo diff --git a/tests/test-recipes/metadata/state_variables/setup.py b/tests/test-recipes/metadata/state_variables/setup.py index 2f8660659e..ecd50c54c6 100644 --- a/tests/test-recipes/metadata/state_variables/setup.py +++ b/tests/test-recipes/metadata/state_variables/setup.py @@ -1,6 +1,5 @@ import os from setuptools import setup -# from distutils.core import setup if not os.getenv("CONDA_BUILD_STATE") == "RENDER": raise ValueError("Conda build state not set correctly") diff --git a/tests/test-recipes/test-package/setup.py b/tests/test-recipes/test-package/setup.py index f3ec4e663c..b0f90841cb 100644 --- a/tests/test-recipes/test-package/setup.py +++ b/tests/test-recipes/test-package/setup.py @@ -1,6 +1,5 @@ import sys from setuptools import setup -# from distutils.core import setup # test with an old version of Python that we'll never normally use if sys.version_info[:2] == (3, 5): diff --git a/tests/test-recipes/variants/numpy_used/conda_build_config.yaml b/tests/test-recipes/variants/numpy_used/conda_build_config.yaml index 6a2ce5d722..56a761a011 100644 --- a/tests/test-recipes/variants/numpy_used/conda_build_config.yaml +++ b/tests/test-recipes/variants/numpy_used/conda_build_config.yaml @@ -2,5 +2,5 @@ python: - 3.8 - 3.9 numpy: - - 1.16 - 1.19 + - 1.22 diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 15e1e641d7..e838f17361 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -430,7 +430,7 @@ def test_checkout_tool_as_dependency(testing_workdir, testing_config, monkeypatc platforms = ["64" if sys.maxsize > 2**32 else "32"] if sys.platform == "win32": platforms = sorted({"32", *platforms}) - compilers = ["3.9", "3.10"] + compilers = ["3.10", "3.11"] msvc_vers = ["14.0"] else: msvc_vers = [] diff --git a/tests/test_api_render.py b/tests/test_api_render.py index b96940a4cb..a68f69135e 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -213,7 +213,7 @@ def test_noarch_with_no_platform_deps(testing_workdir, testing_config): def test_setting_condarc_vars_with_env_var_expansion(testing_workdir): os.makedirs("config") # python won't be used - the stuff in the recipe folder will override it - python_versions = ["2.6", "3.4", "3.10"] + python_versions = ["2.6", "3.4", "3.11"] config = {"python": python_versions, "bzip2": ["0.9", "1.0"]} with open(os.path.join("config", "conda_build_config.yaml"), "w") as f: yaml.dump(config, f, default_flow_style=False) diff --git a/tests/test_variants.py b/tests/test_variants.py index 4df2c3f768..3e7ba621a5 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -59,7 +59,7 @@ def test_python_variants(testing_workdir, testing_config, as_yaml): python 3.5 -> python >=3.5,<3.6.0a0 otherPackages 3.5 -> otherPackages 3.5 """ - variants = {"python": ["3.9", "3.10"]} + variants = {"python": ["3.10", "3.11"]} testing_config.ignore_system_config = True # write variants to disk @@ -86,7 +86,7 @@ def test_python_variants(testing_workdir, testing_config, as_yaml): assert { *metadata[0][0].meta["requirements"]["run"], *metadata[1][0].meta["requirements"]["run"], - } == {"python >=3.9,<3.10.0a0", "python >=3.10,<3.11.0a0"} + } == {"python >=3.10,<3.11.0a0", "python >=3.11,<3.12.0a0"} def test_use_selectors_in_variants(testing_workdir, testing_config): @@ -493,11 +493,6 @@ def test_target_platform_looping(): assert len(outputs) == 2 -@pytest.mark.skipif( - on_mac and platform.machine() == "arm64", - reason="Unsatisfiable dependencies for M1 MacOS systems: {'numpy=1.16'}", -) -# TODO Remove the above skip decorator once https://github.com/conda/conda-build/issues/4717 is resolved def test_numpy_used_variable_looping(): outputs = api.get_output_file_paths(os.path.join(variants_dir, "numpy_used")) assert len(outputs) == 4 From 25b859a4d60112c61bcadc97a284fd0db019ba21 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Wed, 17 May 2023 19:49:50 +0200 Subject: [PATCH 126/366] Use `deprecated.topic` for conda-build index deprecation. (#4885) --- conda_build/cli/main_index.py | 15 +++++++++------ news/4645-use-conda-index | 4 ++-- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/conda_build/cli/main_index.py b/conda_build/cli/main_index.py index 42998cd706..1f596b5d37 100644 --- a/conda_build/cli/main_index.py +++ b/conda_build/cli/main_index.py @@ -3,13 +3,13 @@ import logging import os import sys -import warnings from conda_index.index import MAX_THREADS_DEFAULT from conda_index.utils import DEFAULT_SUBDIRS -from conda_build import api -from conda_build.conda_interface import ArgumentParser +from .. import api +from ..conda_interface import ArgumentParser +from ..deprecations import deprecated logging.basicConfig(level=logging.INFO) @@ -17,7 +17,7 @@ def parse_args(args): p = ArgumentParser( description="Update package index metadata files in given directories. " - "Deprecated; use standalone conda-index." + "Pending deprecated, please use the standalone conda-index project." ) p.add_argument( @@ -99,8 +99,11 @@ def parse_args(args): def execute(args): _, args = parse_args(args) - warnings.warn( - "conda-build index is deprecated. Use the standalone conda-index package instead." + deprecated.topic( + "3.25.0", + "4.0.0", + topic="`conda index` and `conda-index`", + addendum="Use the `conda-index` project instead.", ) api.update_index( diff --git a/news/4645-use-conda-index b/news/4645-use-conda-index index 94e015e8e6..cc86501f1b 100644 --- a/news/4645-use-conda-index +++ b/news/4645-use-conda-index @@ -8,8 +8,8 @@ ### Deprecations -* `conda-build index` is deprecated. `conda-build` still provides the - `conda-build index` a.k.a. `conda index` CLI, but uses standalone +* Inline `conda index` logic is pending deprecation. `conda-build` still provides + `conda-index` a.k.a. `conda index` CLI, but uses standalone `conda-index` during builds. * Prefer the [standalone conda-index package](https://conda.github.io/conda-index/), instead of `conda-build index` or `conda index`, to use faster indexing code. From 51db8e51cdba5df452ce0145361c399e7c02d7c7 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 18 May 2023 16:06:20 +0200 Subject: [PATCH 127/366] Always build canary when pushed to main/feature/release (#4893) --- .github/workflows/tests.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index dff9a41151..7551a961d2 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -379,7 +379,7 @@ jobs: aggregate: # only aggregate test suite if there are code changes needs: [changes, linux, windows, macos] - if: needs.changes.outputs.code == 'true' && always() + if: always() && needs.changes.outputs.code == 'true' runs-on: ubuntu-latest steps: @@ -423,7 +423,7 @@ jobs: # - this is the main repo, and # - we are on the main, feature, or release branch if: >- - success() + always() && !github.event.repository.fork && ( github.ref_name == 'main' From a6fdb3e6717c5590a7bca68228f11274974ee809 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 18 May 2023 16:07:24 +0200 Subject: [PATCH 128/366] Revert py dependency (#4894) --- tests/requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/requirements.txt b/tests/requirements.txt index 2bb2fe7019..e8f64fcc10 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -18,7 +18,6 @@ perl pip pkginfo psutil -py # https://github.com/ContinuumIO/anaconda-issues/issues/13198 py-lief pyflakes pytest From fb8ea28319616b72ba13f7d96faaa8168e4587ec Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 May 2023 16:31:01 +0200 Subject: [PATCH 129/366] Bump requests from 2.26.0 to 2.31.0 in /docs (#4897) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index c30be920a9..14557857f9 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -2,7 +2,7 @@ linkify-it-py==1.0.1 myst-parser==0.15.2 Pillow==9.3.0 PyYAML==5.4.1 -requests==2.26.0 +requests==2.31.0 ruamel.yaml==0.17.16 Sphinx==4.2.0 sphinx-argparse==0.3.1 From 4e57bb27cc62dce9014a7dce620bae2d8914a305 Mon Sep 17 00:00:00 2001 From: Riadh Fezzani Date: Tue, 23 May 2023 17:46:49 +0000 Subject: [PATCH 130/366] Fix "Unknown format" / "Failed to get_static_lib_exports" messages (#4850) * Disable lief logging * Revert enable_static_default value --- conda_build/config.py | 2 +- conda_build/os_utils/liefldd.py | 1 + news/4850-Fix-unintended-warnings | 5 +++++ 3 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 news/4850-Fix-unintended-warnings diff --git a/conda_build/config.py b/conda_build/config.py index fad7744d29..377d0de9eb 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -50,7 +50,7 @@ def set_invocation_time(): error_overlinking_default = "false" error_overdepending_default = "false" noarch_python_build_age_default = 0 -enable_static_default = "true" +enable_static_default = "false" no_rewrite_stdout_env_default = "false" ignore_verify_codes_default = [] exit_on_verify_error_default = False diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index 9b01e5d07d..32c4f8cd40 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -29,6 +29,7 @@ try: import lief + lief.logging.disable() have_lief = True except: pass diff --git a/news/4850-Fix-unintended-warnings b/news/4850-Fix-unintended-warnings new file mode 100644 index 0000000000..f416781542 --- /dev/null +++ b/news/4850-Fix-unintended-warnings @@ -0,0 +1,5 @@ +### Bug fixes + +* Disabling LIEF logging to remove "Unknown format" warning message. +* Revert `enable_static` default value in `conda_build.config` to remove + "Failed to get_static_lib_exports" warning messages. From 06ccbea70e07da8cd890d327de9ec1a79abbb9c7 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 24 May 2023 05:49:50 +0200 Subject: [PATCH 131/366] Changelog 3.25.0 (#4896) Co-authored-by: Bianca Henderson --- .authors.yml | 39 +++++++++++---- .mailmap | 3 ++ AUTHORS.md | 3 ++ CHANGELOG.md | 49 +++++++++++++++++++ ...nsure-test-commands-and-run_test-both-work | 19 ------- news/4606-noarch-platform-deps | 21 -------- news/4645-use-conda-index | 23 --------- news/4692-add-svn-source-credential-support | 19 ------- ...17-require-source-when-load-file-data-used | 19 ------- news/4832-subdir-downloads | 19 ------- news/4836-auto-format | 20 -------- news/4840-hatchling-build-system | 19 ------- news/4843-config-cleanup | 20 -------- news/4845-enable-xattr-test-macos | 19 ------- news/4852-python-3.11 | 19 ------- news/4862-multi-output-subdir-variable | 19 ------- 16 files changed, 84 insertions(+), 246 deletions(-) delete mode 100644 news/4429-ensure-test-commands-and-run_test-both-work delete mode 100644 news/4606-noarch-platform-deps delete mode 100644 news/4645-use-conda-index delete mode 100644 news/4692-add-svn-source-credential-support delete mode 100644 news/4817-require-source-when-load-file-data-used delete mode 100644 news/4832-subdir-downloads delete mode 100644 news/4836-auto-format delete mode 100644 news/4840-hatchling-build-system delete mode 100644 news/4843-config-cleanup delete mode 100644 news/4845-enable-xattr-test-macos delete mode 100644 news/4852-python-3.11 delete mode 100644 news/4862-multi-output-subdir-variable diff --git a/.authors.yml b/.authors.yml index 626c87d60d..dc41041bbf 100644 --- a/.authors.yml +++ b/.authors.yml @@ -13,14 +13,14 @@ email: jjhelmus@gmail.com aliases: - Jonathan Helmus - num_commits: 109 + num_commits: 110 first_commit: 2014-06-09 17:25:05 github: jjhelmus - name: Isuru Fernando email: isuruf@gmail.com alternate_emails: - isuru.11@cse.mrt.ac.lk - num_commits: 82 + num_commits: 83 first_commit: 2017-06-16 15:14:34 github: isuruf - name: Dan Blanchard @@ -754,7 +754,7 @@ alternate_emails: - kirkhamj@janelia.hhmi.org - jakirkham@gmail.com - num_commits: 140 + num_commits: 144 first_commit: 2015-04-21 13:26:39 github: jakirkham - name: Anthony Scopatz @@ -1061,7 +1061,7 @@ github: dbast - name: Duncan Macleod email: duncan.macleod@ligo.org - num_commits: 5 + num_commits: 6 first_commit: 2019-06-13 08:07:25 github: duncanmmacleod - name: Chris Osborn @@ -1199,7 +1199,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 106 + num_commits: 121 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1222,7 +1222,7 @@ first_commit: 2020-11-19 10:46:41 - name: Jannis Leidel email: jannis@leidel.info - num_commits: 33 + num_commits: 26 github: jezdez first_commit: 2020-11-19 10:46:41 - name: Christof Kaufmann @@ -1237,7 +1237,7 @@ github: pre-commit-ci[bot] aliases: - pre-commit-ci[bot] - num_commits: 38 + num_commits: 41 first_commit: 2021-11-20 01:47:17 - name: Jacob Walls email: jacobtylerwalls@gmail.com @@ -1248,7 +1248,7 @@ github: beeankha alternate_emails: - beeankha@gmail.com - num_commits: 14 + num_commits: 16 first_commit: 2022-01-19 16:40:06 - name: Conda Bot email: 18747875+conda-bot@users.noreply.github.com @@ -1259,7 +1259,7 @@ alternate_emails: - ad-team+condabot@anaconda.com - 18747875+conda-bot@users.noreply.github.com - num_commits: 42 + num_commits: 56 first_commit: 2022-01-17 18:09:22 - name: Uwe L. Korn email: xhochy@users.noreply.github.com @@ -1268,7 +1268,7 @@ - name: Daniel Holth email: dholth@anaconda.com github: dholth - num_commits: 6 + num_commits: 10 first_commit: 2022-04-28 05:22:14 - name: Rylan Chord email: rchord@users.noreply.github.com @@ -1364,3 +1364,22 @@ num_commits: 1 first_commit: 2023-03-22 00:34:22 github: johnnynunez +- name: Ryan Keith + email: rkeith@anaconda.com + aliases: + - Ryan + github: ryanskeith + num_commits: 2 + first_commit: 2023-03-22 03:11:02 +- name: Rishabh Singh + email: 67859818+rishabh11336@users.noreply.github.com + aliases: + - rishabh11336 + github: rishabh11336 + num_commits: 2 + first_commit: 2023-05-15 11:19:48 +- name: Ferry Firmansjah + email: 103191403+ffirmanff@users.noreply.github.com + github: ffirmanff + num_commits: 1 + first_commit: 2023-04-14 11:54:03 diff --git a/.mailmap b/.mailmap index 95320e0be9..f8fc95d1a0 100644 --- a/.mailmap +++ b/.mailmap @@ -88,6 +88,7 @@ Ernst Luring Evan Hubinger Evan Klitzke Felix Kühnl +Ferry Firmansjah <103191403+ffirmanff@users.noreply.github.com> Filipe Fernandes ocefpaf Floris Bruynooghe Gabriel Reis @@ -207,6 +208,7 @@ Riccardo Vianello Richard Frank Richard Hattersley Rick Izzo +Rishabh Singh <67859818+rishabh11336@users.noreply.github.com> rishabh11336 <67859818+rishabh11336@users.noreply.github.com> Robert Coop Robert Langlois Robert T. McGibbon Robert McGibbon @@ -216,6 +218,7 @@ Ruben Vorderman Ryan Dale daler Ryan Grout Ryan Grout Ryan Grout Ryan Grout +Ryan Keith Ryan Rylan Chord Satoshi Yagi satoshi Scheah diff --git a/AUTHORS.md b/AUTHORS.md index 60a06dd9fe..10fce92d1f 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -71,6 +71,7 @@ Authors are sorted alphabetically. * Evan Hubinger * Evan Klitzke * Felix Kühnl +* Ferry Firmansjah * Filipe Fernandes * Floris Bruynooghe * Gabriel Reis @@ -172,6 +173,7 @@ Authors are sorted alphabetically. * Richard Frank * Richard Hattersley * Rick Izzo +* Rishabh Singh * Robert Coop * Robert Langlois * Robert T. McGibbon @@ -180,6 +182,7 @@ Authors are sorted alphabetically. * Ruben Vorderman * Ryan Dale * Ryan Grout +* Ryan Keith * Rylan Chord * Satoshi Yagi * Scheah diff --git a/CHANGELOG.md b/CHANGELOG.md index cdcbe6e6f6..6507e65c91 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,54 @@ [//]: # (current developments) +## 3.25.0 (2023-05-22) + +### Enhancements + +* Noarch packages that use virtual packages have the virtual packages added to the hash contents of the package. This facilitates the building of noarch packages multiple times for different platforms with platform specific dependencies. (#4606) +* Add support for `svn` source credentials (`svn_username` and `svn_password`). (#4692) +* Depend on standalone `conda-index` instead of bundled indexing code. (#4828) +* Switch from `setup.py` to `pyproject.toml` and use [Hatchling](https://pypi.org/project/hatchling/) for our build system. (#4840) +* Add Python 3.11 support. (#4852) + +### Bug fixes + +* Ensure `tests/commands` are also run in the presence of `run_test.*` (#4429) +* Require the source when rendering a recipe that uses the `load_file_data` function. (#4817) +* Download packages during build into the correct `subdir` folder. (#4832) +* Use a unique `subdir` variable name when rebuilding the index for multi-output builds. (#4862) + +### Deprecations + +* Inline `conda index` logic is pending deprecation. `conda-build` still provides `conda-index` a.k.a. `conda index` CLI, but uses standalone `conda-index` during builds. (#4828) +* Prefer the [standalone conda-index package](https://conda.github.io/conda-index/), instead of `conda-build index` or `conda index`, to use faster indexing code. (#4828) +* Mark `conda_build.metadata.ns_cfg` as pending deprecation. Use `conda_build.get_selectors.get_selectors` instead. (#4837) +* Mark `conda_build.config.python2_fs_encode` as pending deprecation. (#4843) +* Mark `conda_build.config._ensure_dir` as pending deprecation. Use `stdlib`'s `pathlib.Path.mkdir(exist_ok=True)` or `os.makedirs(exist_ok=True)` instead. (#4843) + +### Other + +* Format with `black` and replaced pre-commit's `darker` hook with `black`. (#4836) +* Format with `isort` and add pre-commit `isort` hook. (#4836) +* Minor code simplification for `conda_build.index.ChannelIndex._ensuredirs`. (#4843) +* Enable `xattr` test on macOS. (#4845) + +### Contributors + +* @beeankha +* @conda-bot +* @dholth +* @duncanmmacleod +* @ffirmanff made their first contribution in https://github.com/conda/conda-build/pull/4692 +* @isuruf +* @jezdez +* @jakirkham +* @jjhelmus +* @kenodegard +* @rishabh11336 made their first contribution in https://github.com/conda/conda-build/pull/4782 +* @ryanskeith made their first contribution in https://github.com/conda/conda-build/pull/4843 +* @pre-commit-ci[bot] + + ## 3.24.0 (2023-03-22) ### Bug fixes diff --git a/news/4429-ensure-test-commands-and-run_test-both-work b/news/4429-ensure-test-commands-and-run_test-both-work deleted file mode 100644 index 0015ec7cd8..0000000000 --- a/news/4429-ensure-test-commands-and-run_test-both-work +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Ensure that `tests/commands` get run also in the presence of `run_test.*` (#4427) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4606-noarch-platform-deps b/news/4606-noarch-platform-deps deleted file mode 100644 index 1cace82510..0000000000 --- a/news/4606-noarch-platform-deps +++ /dev/null @@ -1,21 +0,0 @@ -### Enhancements - -* Noarch packages that use virtual packages have the virtual packages added to the hash contents of the package. - This facilitates the building of noarch packages multiple times for different platforms with platform - specific dependencies. (#4606) - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4645-use-conda-index b/news/4645-use-conda-index deleted file mode 100644 index cc86501f1b..0000000000 --- a/news/4645-use-conda-index +++ /dev/null @@ -1,23 +0,0 @@ -### Enhancements - -* Depend on standalone conda-index instead of bundled indexing code. (#4690) - -### Bug fixes - -* - -### Deprecations - -* Inline `conda index` logic is pending deprecation. `conda-build` still provides - `conda-index` a.k.a. `conda index` CLI, but uses standalone - `conda-index` during builds. -* Prefer the [standalone conda-index package](https://conda.github.io/conda-index/), - instead of `conda-build index` or `conda index`, to use faster indexing code. - -### Docs - -* - -### Other - -* diff --git a/news/4692-add-svn-source-credential-support b/news/4692-add-svn-source-credential-support deleted file mode 100644 index d8ac4c37d8..0000000000 --- a/news/4692-add-svn-source-credential-support +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* Add support for svn source credential (`svn_username` and `svn_password`). (#4692) - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4817-require-source-when-load-file-data-used b/news/4817-require-source-when-load-file-data-used deleted file mode 100644 index a1c5ac52cd..0000000000 --- a/news/4817-require-source-when-load-file-data-used +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Require the source when rendering a recipe that uses the load_file_data function (#4817, fixes #4807) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4832-subdir-downloads b/news/4832-subdir-downloads deleted file mode 100644 index 4b0e344f4c..0000000000 --- a/news/4832-subdir-downloads +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Download packages during build into the correct subdir folder. (#4750) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4836-auto-format b/news/4836-auto-format deleted file mode 100644 index 60660bdd5e..0000000000 --- a/news/4836-auto-format +++ /dev/null @@ -1,20 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Format with black and replaced pre-commit's darker hook with black. (#4836) -* Format with isort and add pre-commit isort hook. (#4836) diff --git a/news/4840-hatchling-build-system b/news/4840-hatchling-build-system deleted file mode 100644 index 40c849137b..0000000000 --- a/news/4840-hatchling-build-system +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* Switch from `setup.py` to `pyproject.toml` and use [Hatchling](https://pypi.org/project/hatchling/) for our build system. (#4840) - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4843-config-cleanup b/news/4843-config-cleanup deleted file mode 100644 index 9db85efc15..0000000000 --- a/news/4843-config-cleanup +++ /dev/null @@ -1,20 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Mark `conda_build.config.python2_fs_encode` as pending deprecation. (#4843) -* Mark `conda_build.config._ensure_dir` as pending deprecation. Use stdlib's `pathlib.Path.mkdir(exist_ok=True)` or `os.makedirs(exist_ok=True)` instead. (#4843) - -### Docs - -* - -### Other - -* Minor code simplification for `conda_build.index.ChannelIndex._ensuredirs`. (#4843) diff --git a/news/4845-enable-xattr-test-macos b/news/4845-enable-xattr-test-macos deleted file mode 100644 index a1110a6a44..0000000000 --- a/news/4845-enable-xattr-test-macos +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Enable `xattr` test on macOS. (#4845) diff --git a/news/4852-python-3.11 b/news/4852-python-3.11 deleted file mode 100644 index 58f87f9796..0000000000 --- a/news/4852-python-3.11 +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* Add Python 3.11 support. (#4852) - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4862-multi-output-subdir-variable b/news/4862-multi-output-subdir-variable deleted file mode 100644 index 811fe525ac..0000000000 --- a/news/4862-multi-output-subdir-variable +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Use a unique subdir variable name when rebuilding the index for multi-output builds (#4862, fixes #4855) - -### Deprecations - -* - -### Docs - -* - -### Other - -* From fcd6880b628e937e8db5ec620b80524fceb4d259 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 26 May 2023 10:11:11 +0200 Subject: [PATCH 132/366] Update recipe (#4899) --- pyproject.toml | 1 + recipe/bld.bat | 1 - recipe/build.sh | 3 -- recipe/conda_build_config.yaml | 5 ++ recipe/meta.yaml | 84 +++++++++++++--------------------- recipe/run_test.py | 5 -- 6 files changed, 38 insertions(+), 61 deletions(-) delete mode 100644 recipe/bld.bat delete mode 100644 recipe/build.sh create mode 100644 recipe/conda_build_config.yaml delete mode 100644 recipe/run_test.py diff --git a/pyproject.toml b/pyproject.toml index 0d7db9122b..33b00c3a1b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,6 +30,7 @@ dependencies = [ "beautifulsoup4", "chardet", "conda >=4.13", + "conda-index", "conda-package-handling >=1.3", "filelock", "glob2 >=0.6", diff --git a/recipe/bld.bat b/recipe/bld.bat deleted file mode 100644 index ea289b187c..0000000000 --- a/recipe/bld.bat +++ /dev/null @@ -1 +0,0 @@ -"$PYTHON" -m pip install . -vv diff --git a/recipe/build.sh b/recipe/build.sh deleted file mode 100644 index 9d7b162c92..0000000000 --- a/recipe/build.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -"$PYTHON" -m pip install . -vv diff --git a/recipe/conda_build_config.yaml b/recipe/conda_build_config.yaml new file mode 100644 index 0000000000..a75aff37d1 --- /dev/null +++ b/recipe/conda_build_config.yaml @@ -0,0 +1,5 @@ +python: + - 3.8 + - 3.9 + - 3.10 + - 3.11 diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 3314ccfb42..850b742901 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -3,9 +3,11 @@ package: version: {{ GIT_DESCRIBE_TAG }}.{{ GIT_BUILD_STR }} source: + # git_url only captures committed code git_url: ../ build: + script: {{ PYTHON }} -m pip install . --no-deps --no-build-isolation -vv entry_points: - conda-build = conda_build.cli.main_build:main - conda-convert = conda_build.cli.main_convert:main @@ -19,91 +21,69 @@ build: requirements: build: - - git + - git # for source/git_url above host: - python - - setuptools + - pip - hatchling >=1.12.2 - hatch-vcs >=0.2.0 + - wheel run: - beautifulsoup4 - chardet - conda >=4.13 - conda-index + - conda-package-handling >=1.3 - filelock + - glob2 >=0.6 - jinja2 + - m2-patch >=2.6 # [win] - packaging - - patchelf # [linux] - patch >=2.6 # [not win] - - m2-patch >=2.6 # [win] + - patchelf # [linux] - pkginfo - psutil - py-lief # [not win] - python + - python-libarchive-c + - pytz - pyyaml - requests - six - - glob2 >=0.6 - - pytz - tomli # [py<311] - tqdm - - conda-package-handling >=1.3 - - python-libarchive-c run_constrained: - conda-verify >=3.0.2 test: + imports: + # high-level import + - conda_build + # new/updated submodules (can be dropped after 1-2 releases) + - conda_build.index files: - test_bdist_conda_setup.py - requires: - - pytest - - pytest-cov - - pytest-mock - # Optional: you can use pytest-xdist to run the tests in parallel - # - pytest-env # [win] - # - pytest-xdist - # - conda-verify >=3.0.3 # todo once it is released commands: - - type -P conda-build # [unix] - - where conda-build # [win] - - conda build -h - - type -P conda-convert # [unix] - - where conda-convert # [win] - - conda convert -h - - type -P conda-develop # [unix] - - where conda-develop # [win] - - conda develop -h - - type -P conda-index # [unix] - - where conda-index # [win] - - conda index -h - - type -P conda-inspect # [unix] - - where conda-inspect # [win] - - conda inspect -h - - conda inspect linkages -h \| grep "--name ENVIRONMENT" # [unix] - - conda inspect objects -h \| grep "--name ENVIRONMENT" # [osx] - - type -P conda-metapackage # [unix] - - where conda-metapackage # [win] - - conda metapackage -h - - type -P conda-render # [unix] - - where conda-render # [win] - - conda render -h - - type -P conda-skeleton # [unix] - - where conda-skeleton # [win] - - conda skeleton -h - - where conda-debug # [win] - - conda debug -h - # test that conda sees entry points appropriately in help + # builtin subcommands - conda --help - - # Check for bdist_conda + - conda build --help + - conda convert --help + - conda develop --help + - conda index --help + - conda inspect --help + - conda inspect linkages --help # [unix] + - conda inspect objects --help # [osx] + - conda metapackage --help + - conda render --help + - conda skeleton --help + - conda debug --help + # bdist_conda - python test_bdist_conda_setup.py bdist_conda --help - imports: - - conda_build - source_files: - - tests about: - home: https://github.com/conda/conda-build + home: https://conda.org license: BSD-3-Clause license_file: LICENSE summary: Canary release of conda-build + doc_url: https://conda.io/projects/conda-build/en/latest/ + dev_url: https://github.com/conda/conda-build diff --git a/recipe/run_test.py b/recipe/run_test.py deleted file mode 100644 index d9a40df806..0000000000 --- a/recipe/run_test.py +++ /dev/null @@ -1,5 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -import conda_build - -print("conda_build.__version__: %s" % conda_build.__version__) From 78d2a4758be9a5b2e6c194b562ae6bd16092ade6 Mon Sep 17 00:00:00 2001 From: jakirkham Date: Fri, 26 May 2023 01:18:11 -0700 Subject: [PATCH 133/366] Drop extra Jinja w/package names from variants.rst (#4834) Co-authored-by: Bianca Henderson --- docs/source/resources/variants.rst | 26 +++++++++---------- .../4834-drop-extra-jinja-pkg-names-from-docs | 19 ++++++++++++++ 2 files changed, 32 insertions(+), 13 deletions(-) create mode 100644 news/4834-drop-extra-jinja-pkg-names-from-docs diff --git a/docs/source/resources/variants.rst b/docs/source/resources/variants.rst index d46bdf13e2..3209fd3620 100644 --- a/docs/source/resources/variants.rst +++ b/docs/source/resources/variants.rst @@ -49,7 +49,7 @@ meta.yaml contents like: requirements: build: - - python {{ python }} + - python run: - python @@ -89,7 +89,7 @@ map for the content below. requirements: build: - - boost {{ boost }} + - boost run: - boost @@ -154,9 +154,9 @@ map for the content below. requirements: build: - - numpy {{ numpy }} + - numpy run: - - numpy {{ numpy }} + - numpy For legacy compatibility, Python is pinned implicitly without specifying ``{{ python }}`` in your recipe. This is generally intractable to extend to @@ -261,12 +261,12 @@ First, the ``meta.yaml`` file: - name: py-xgboost requirements: - {{ pin_subpackage('libxgboost', exact=True) }} - - python {{ python }} + - python - name: r-xgboost requirements: - {{ pin_subpackage('libxgboost', exact=True) }} - - r-base {{ r_base }} + - r-base Next, the ``conda_build_config.yaml`` file, specifying our build matrix: @@ -343,9 +343,9 @@ Again, with ``meta.yaml`` contents like: requirements: build: - - python {{ python }} + - python run: - - python {{ python }} + - python You could supply a variant to build this recipe like so: @@ -569,7 +569,7 @@ requirements, and a variant that includes 2 NumPy versions: requirements: build: - - numpy {{ numpy }} + - numpy run: - numpy @@ -844,7 +844,7 @@ An example variant/recipe is shown here: requirements: build: - - boost {{ boost }} + - boost run: - boost @@ -890,7 +890,7 @@ function. requirements: build: - - numpy {{ numpy }} + - numpy run: - {{ pin_compatible('numpy', max_pin='x.x') }} @@ -911,7 +911,7 @@ Each can be passed independently of the other. An example of specifying both: requirements: build: - - numpy {{ numpy }} + - numpy run: - {{ pin_compatible('numpy', min_pin='x.x', max_pin='x.x') }} @@ -933,7 +933,7 @@ You can also pass the minimum or maximum version directly. These arguments super requirements: build: - - numpy {{ numpy }} + - numpy run: - {{ pin_compatible('numpy', lower_bound='1.10', upper_bound='3.0') }} diff --git a/news/4834-drop-extra-jinja-pkg-names-from-docs b/news/4834-drop-extra-jinja-pkg-names-from-docs new file mode 100644 index 0000000000..dfa93c007e --- /dev/null +++ b/news/4834-drop-extra-jinja-pkg-names-from-docs @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* Package name variables in Jinja are not necessary, dropping this from variants.rst docs file. (#4834) + +### Other + +* From 73734ca636a1f9a4499cc8e6156ff840bf69bb3e Mon Sep 17 00:00:00 2001 From: Srivas Venkatesh <110486050+sven6002@users.noreply.github.com> Date: Fri, 2 Jun 2023 01:37:27 -0500 Subject: [PATCH 134/366] Also run conda-build tests from a cron job to catch ecosystem breakage (#4891) * add scheduled cron job for conda-build * run daily to test feature; reduce when happy with feature --------- Co-authored-by: Daniel Holth --- .github/workflows/tests.yml | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 7551a961d2..92bbc10830 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -4,18 +4,24 @@ name: Tests on: - # NOTE: github.event context is push payload: - # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#push + # https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#push push: branches: - main - feature/** - '[0-9].*.x' # e.g., 3.24.x - # NOTE: github.event context is pull_request payload: - # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request + # https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request pull_request: + # https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_dispatch + workflow_dispatch: + + # no payload + schedule: + # https://crontab.guru/#37_18_*_*_* + - cron: 37 18 * * * + concurrency: # Concurrency group that uses the workflow name and PR number if available # or commit SHA as a fallback. If a new build is triggered under that From b9089ed1b8b8c3432556d3ea3fafe685019bd447 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Mon, 5 Jun 2023 06:48:15 -0500 Subject: [PATCH 135/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4905)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- HOW_WE_USE_GITHUB.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/HOW_WE_USE_GITHUB.md b/HOW_WE_USE_GITHUB.md index df3019210f..aa5a3691c6 100644 --- a/HOW_WE_USE_GITHUB.md +++ b/HOW_WE_USE_GITHUB.md @@ -265,6 +265,21 @@ please post details to the [Nucleus forums](https://community.anaconda.cloud/).
+
+Slow solving of conda environment + + +
+Hi [@username],
+
+Thanks for voicing your concern about the performance of our dependency solver. To fix this, our official recommendation is using the "conda-libmamba-solver" instead of the default "classic" solver (more information about the "conda-libmamba-solver" can be found here: https://conda.github.io/conda-libmamba-solver/getting-started/).
+
+In most cases "conda-libmamba-solver" should be significantly faster than the "classic" solver. We hope it provides you with a much better experience going forward.
+
+ +
+ + In order to not have to manually type or copy/paste the above repeatedly, note that it's possible to add text for the most commonly-used responses via [GitHub's "Add Saved Reply" option][docs-saved-reply]. ## Commit Signing From b9cb695bf42e8cce9f9bef87935f7168d0d952f0 Mon Sep 17 00:00:00 2001 From: Daniel Bast <2790401+dbast@users.noreply.github.com> Date: Mon, 5 Jun 2023 20:44:43 +0200 Subject: [PATCH 136/366] Log the extra-meta data we burn into the package for better testability (#4901) Co-authored-by: jaimergp --- conda_build/build.py | 5 +++++ news/4901-log-extra-meta-data | 19 +++++++++++++++++++ tests/test_api_build.py | 9 +++++++-- 3 files changed, 31 insertions(+), 2 deletions(-) create mode 100644 news/4901-log-extra-meta-data diff --git a/conda_build/build.py b/conda_build/build.py index e2469a56e3..81fb526011 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -1445,6 +1445,11 @@ def write_about_json(m): extra = m.get_section("extra") # Add burn-in information to extra if m.config.extra_meta: + log = utils.get_logger(__name__) + log.info( + "Adding the following extra-meta data to about.json: %s", + m.config.extra_meta, + ) extra.update(m.config.extra_meta) env = environ.Environment(root_dir) d["root_pkgs"] = env.package_specs() diff --git a/news/4901-log-extra-meta-data b/news/4901-log-extra-meta-data new file mode 100644 index 0000000000..5f6325a4c3 --- /dev/null +++ b/news/4901-log-extra-meta-data @@ -0,0 +1,19 @@ +### Enhancements + +* Log extra-meta data to make it easier to verify that the right extra-meta data is burned into packages (also helps to co-relate packages and their build-log). The feature was first introduced in #4303 and is now improved via the logging call. (#4901) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test_api_build.py b/tests/test_api_build.py index e838f17361..77e55856c3 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -1826,12 +1826,17 @@ def test_ignore_verify_codes(testing_config): @pytest.mark.sanity -def test_extra_meta(testing_config): +def test_extra_meta(testing_config, caplog): recipe_dir = os.path.join(metadata_dir, "_extra_meta") - testing_config.extra_meta = {"foo": "bar"} + extra_meta_data = {"foo": "bar"} + testing_config.extra_meta = extra_meta_data outputs = api.build(recipe_dir, config=testing_config) about = json.loads(package_has_file(outputs[0], "info/about.json")) assert "foo" in about["extra"] and about["extra"]["foo"] == "bar" + assert ( + f"Adding the following extra-meta data to about.json: {extra_meta_data}" + in caplog.text + ) def test_symlink_dirs_in_always_include_files(testing_config): From e4cc6b00786239cb4ef2977bc40008130774b4c1 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Tue, 6 Jun 2023 01:57:32 -0500 Subject: [PATCH 137/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4909)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- RELEASE.md | 82 ++++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 58 insertions(+), 24 deletions(-) diff --git a/RELEASE.md b/RELEASE.md index ee0129400f..4f6199512e 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,22 +1,26 @@ + +[epic template]: https://github.com/conda/conda/issues/new?assignees=&labels=epic&template=epic.yml +[compare]: https://github.com/conda/infrastructure/compare +[new release]: https://github.com/conda/infrastructure/releases/new -[epic template]: {{ repo.url }}/issues/new?assignees=&labels=epic&template=epic.yml [infrastructure]: https://github.com/conda/infrastructure [rever docs]: https://regro.github.io/rever-docs -[compare]: {{ repo.url }}/compare -[new release]: {{ repo.url }}/releases/new [release docs]: https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes +[merge conflicts]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/about-merge-conflicts +[Anaconda Recipes]: https://github.com/AnacondaRecipes/conda-feedstock +[conda-forge]: https://github.com/conda-forge/conda-feedstock # Release Process > **Note** > Throughout this document are references to the version number as `YY.M.0`, this should be replaced with the correct version number. Do **not** prefix the version with a lowercase `v`. -## 1. Open the Release Issue. (do this ~1 week prior to release) +## 1. Open the release issue and cut a release branch. (do this ~1 week prior to release) > **Note** -> The [epic template][epic template] is perfect for this, just remember to remove the {{ repo.url }}/labels/epic label. +> The [epic template][epic template] is perfect for this; remember to remove the **`epic`** label. Use the issue template below to create the release issue. After creating the release issue, pin it for easy access. @@ -40,7 +44,6 @@ Placeholder for `{{ repo.name }} YY.M.0` release. [main]: https://github.com/AnacondaRecipes/{{ repo.name }}-feedstock [conda-forge]: https://github.com/conda-forge/{{ repo.name }}-feedstock [ReadTheDocs]: https://readthedocs.com/projects/continuumio-{{ repo.name }}/ -[announcement]: https://github.com/conda/communications #### The week before release week @@ -61,21 +64,31 @@ Placeholder for `{{ repo.name }} YY.M.0` release. - [ ] Hand off to the Anaconda packaging team - [ ] Announce release - - [ ] Create release [announcement draft][announcement] - - [ ] Discourse - - [ ] Twitter - - [ ] Matrix + - Blog Post (optional) + - [ ] conda.org (link to pull request) + - Long form + - [ ] Create release [announcement draft](https://github.com/conda/communications) + - [ ] [Discourse](https://conda.discourse.group/) + - [ ] [Matrix (conda/conda)](https://matrix.to/#/#conda_conda:gitter.im) (this auto posts from Discourse) + - Summary + - [ ] [Twitter](https://twitter.com/condaproject) ``` -
-## 2. Ensure `rever.xsh` and `news/TEMPLATE` are up to date. +> **Note** +> The new release branch should adhere to the naming convention of `YY.M.x`. + +## 2. Alert various parties of the upcoming release. (do this ~1 week prior to release) + +Let various interested parties know about the upcoming release; at minimum, conda-forge maintainers should be informed. For major features, a blog post describing the new features should be prepared and posted once the release is completed (see the announcements section of the release issue). + +## 3. Ensure `rever.xsh` and `news/TEMPLATE` are up to date. These are synced from [`conda/infrastructure`][infrastructure].
-

3. Run Rever. (ideally done on the Monday of release week)

+

4. Run rever. (ideally done on the Monday of release week)

Currently, there are only 2 activities we use rever for, (1) aggregating the authors and (2) updating the changelog. Aggregating the authors can be an error-prone process and also suffers from builtin race conditions (_i.e._, to generate an updated `.authors.yml` we need an updated `.mailmap` but to have an updated `.mailmap` we need an updated `.authors.yml`). This is why the following steps are very heavy-handed (and potentially repetitive) in running rever commands, undoing commits, squashing/reordering commits, etc. @@ -104,7 +117,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 2. Create a versioned branch, this is where rever will make its changes: ```bash - (rever) $ git checkout -b release-YY.M.0 + (rever) $ git checkout -b changelog-YY.M.0 ``` 2. Run `rever --activities authors`: @@ -291,7 +304,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 8. Push this versioned branch. ```bash - (rever) $ git push -u upstream release-YY.M.0 + (rever) $ git push -u upstream changelog-YY.M.0 ``` 9. Open the Release PR targing the `YY.M.x` branch. @@ -309,12 +322,12 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
-10. Update Release Issue to include a link to the Release PR. +10. Update release issue to include a link to the release PR. 11. [Create][new release] the release and **SAVE AS A DRAFT** with the following values: > **Note** - > Only publish the release after the Release PR is merged, until then always **save as draft**. + > Only publish the release after the release PR is merged, until then always **save as draft**. | Field | Value | |---|---| @@ -324,15 +337,36 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut -## 4. Wait for review and approval of Release PR. +## 5. Wait for review and approval of release PR. + +## 6. Merge release PR and publish release. + +## 7. Merge/cherry pick the release branch over to the `main` branch. + +
+Internal process -## 5. Merge Release PR and Publish Release. +1. From the main "< > Code" page of the repository, select the drop down menu next to the `main` branch button and then select "View all branches" at the very bottom. -## 6. Merge/cherry pick the release branch over to the `main` branch. +2. Find the applicable `YY.MM.x` branch and click the "New pull request" button. + +3. "Base" should point to `main` while "Compare" should point to `YY.MM.x`. + +4. Ensure that all of the commits being pulled in look accurate, then select "Create pull request". + +> **Note** +> Make sure NOT to push the "Update Branch" button. If there are [merge conflicts][merge conflicts], create a temporary "connector branch" dedicated to fixing merge conflicts separately from the `YY.M.0` and `main` branches. + +5. Review and merge the pull request the same as any code change pull request. + +> **Note** +> The commits from the release branch need to be retained in order to be able to compare individual commits; in other words, a "merge commit" is required when merging the resulting pull request vs. a "squash merge". Protected branches will require permissions to be temporarily relaxed in order to enable this action. + +
-## 7. Open PRs to bump main and conda-forge feedstocks to use `YY.M.0`. +## 8. Open PRs to bump [Anaconda Recipes][Anaconda Recipes] and [conda-forge][conda-forge] feedstocks to use `YY.M.0`. -## 8. Hand off to Anaconda's packaging team. +## 9. Hand off to Anaconda's packaging team.
Internal process @@ -343,6 +377,6 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
-## 9. Continue championing and shepherding. +## 10. Continue championing and shepherding. -Remember to continue updating the Release Issue with the latest details as tasks are completed. +Remember to make all relevant announcements and continue to update the release issue with the latest details as tasks are completed. From ef12b932eaa7200c975ffdcb08789a5829ad94e0 Mon Sep 17 00:00:00 2001 From: Kai Tietz <47363620+katietz@users.noreply.github.com> Date: Tue, 6 Jun 2023 09:56:05 +0200 Subject: [PATCH 138/366] Make sure we don't use binary on EmptyType with lief (#4900) Co-authored-by: Jannis Leidel --- conda_build/os_utils/liefldd.py | 12 ++++++++++-- news/4787-fix-leaf | 19 +++++++++++++++++++ 2 files changed, 29 insertions(+), 2 deletions(-) create mode 100644 news/4787-fix-leaf diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index 32c4f8cd40..a4739c84d0 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -232,6 +232,8 @@ def get_runpaths_or_rpaths_raw(file): def set_rpath(old_matching, new_rpath, file): binary = ensure_binary(file) + if not binary: + return if binary.format == lief.EXE_FORMATS.ELF and ( binary.type == lief.ELF.ELF_CLASS.CLASS32 or binary.type == lief.ELF.ELF_CLASS.CLASS64 @@ -343,7 +345,9 @@ def _get_path_dirs(prefix): def get_uniqueness_key(file): binary = ensure_binary(file) - if binary.format == lief.EXE_FORMATS.MACHO: + if not binary: + return lief.EXE_FORMATS.UNKNOWN + elif binary.format == lief.EXE_FORMATS.MACHO: return binary.name elif binary.format == lief.EXE_FORMATS.ELF and ( # noqa binary.type == lief.ELF.ELF_CLASS.CLASS32 @@ -463,7 +467,9 @@ def inspect_linkages_lief( sysroot = _trim_sysroot(sysroot) default_paths = [] - if binary.format == lief.EXE_FORMATS.ELF: + if not binary: + default_paths = [] + elif binary.format == lief.EXE_FORMATS.ELF: if binary.type == lief.ELF.ELF_CLASS.CLASS64: default_paths = [ "$SYSROOT/lib64", @@ -491,6 +497,8 @@ def inspect_linkages_lief( filename2 = element[0] binary = element[1] uniqueness_key = get_uniqueness_key(binary) + if not binary: + continue if uniqueness_key not in already_seen: parent_exe_dirname = None if binary.format == lief.EXE_FORMATS.PE: diff --git a/news/4787-fix-leaf b/news/4787-fix-leaf new file mode 100644 index 0000000000..00f7f49efe --- /dev/null +++ b/news/4787-fix-leaf @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Fixed handling of unknown binaries with newer (py)lief versions. (#4900) + +### Deprecations + +* + +### Docs + +* + +### Other + +* From b7a5252dcfb44ae1239ad898d809f58f6162b0ec Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 15:49:03 -0500 Subject: [PATCH 139/366] [pre-commit.ci] pre-commit autoupdate (#4910) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.4.0 → v3.6.0](https://github.com/asottile/pyupgrade/compare/v3.4.0...v3.6.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6f03884ffe..48a0d33260 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,7 +47,7 @@ repos: args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] exclude: ^conda_build/version.py - repo: https://github.com/asottile/pyupgrade - rev: v3.4.0 + rev: v3.6.0 hooks: # upgrade standard Python codes - id: pyupgrade From db8b17999e320f6bf1efa2fce4d174d140a98d78 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Tue, 13 Jun 2023 08:23:37 -0500 Subject: [PATCH 140/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4913)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/stale.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 66a49f732e..dbf6255cdd 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -69,8 +69,6 @@ jobs: stale-pr-label: stale # Label to apply on closed PRs close-pr-label: stale::closed - # Reason to use when closing PRs - close-pr-reason: not_planned # Remove stale label from issues/PRs on updates/comments remove-stale-when-updated: true From c1842830cf4dc8620ed96670c9d71ce19d9725ce Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 13 Jun 2023 15:46:18 +0200 Subject: [PATCH 141/366] Ensure `build` and `host` install the same OpenSSL (#4912) --- news/4912-fix-resolved_packages-test-failure | 19 +++++++++++++++++++ .../_resolved_packages_host_build/meta.yaml | 2 ++ 2 files changed, 21 insertions(+) create mode 100644 news/4912-fix-resolved_packages-test-failure diff --git a/news/4912-fix-resolved_packages-test-failure b/news/4912-fix-resolved_packages-test-failure new file mode 100644 index 0000000000..1708353057 --- /dev/null +++ b/news/4912-fix-resolved_packages-test-failure @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Fix failing `resolved_packages` test due to recent OpenSSL 3.0.8 release to defaults. (#4912) diff --git a/tests/test-recipes/metadata/_resolved_packages_host_build/meta.yaml b/tests/test-recipes/metadata/_resolved_packages_host_build/meta.yaml index 7619b42085..663d173590 100644 --- a/tests/test-recipes/metadata/_resolved_packages_host_build/meta.yaml +++ b/tests/test-recipes/metadata/_resolved_packages_host_build/meta.yaml @@ -6,8 +6,10 @@ requirements: build: - numpy - nomkl # [unix] + - openssl host: - curl + - {{ pin_compatible('openssl', exact=True) }} run: {% for package in resolved_packages('build') %} - {{ package }} From dfeee27b63ffd27a714f3a8747ddf2f6b3e50b53 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 14 Jun 2023 18:27:26 +0200 Subject: [PATCH 142/366] Add `protocol.file.allow=always` to `git submodule update` (#4914) --- conda_build/source.py | 10 +++++++- ...-fix-test_relative_git_url_submodule_clone | 19 ++++++++++++++ tests/test_api_build.py | 25 +++++++++++++++++-- 3 files changed, 51 insertions(+), 3 deletions(-) create mode 100644 news/4914-fix-test_relative_git_url_submodule_clone diff --git a/conda_build/source.py b/conda_build/source.py index dc90054744..c8d21a4c2e 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -400,7 +400,15 @@ def git_mirror_checkout_recursive( # Now that all relative-URL-specified submodules are locally mirrored to # relatively the same place we can go ahead and checkout the submodules. check_call_env( - [git, "submodule", "update", "--init", "--recursive"], + [ + git, + # CVE-2022-39253 + *("-c", "protocol.file.allow=always"), + "submodule", + "update", + "--init", + "--recursive", + ], cwd=checkout_dir, stdout=stdout, stderr=stderr, diff --git a/news/4914-fix-test_relative_git_url_submodule_clone b/news/4914-fix-test_relative_git_url_submodule_clone new file mode 100644 index 0000000000..b45398ccdc --- /dev/null +++ b/news/4914-fix-test_relative_git_url_submodule_clone @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Fix git cloning for repositories with submodules containing local relative paths. (#4914) + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 77e55856c3..e0c786dcc8 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -684,6 +684,8 @@ def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monke check_call_env( [ git, + # CVE-2022-39253 + *("-c", "protocol.file.allow=always"), "submodule", "add", convert_path_for_cygwin_or_msys2(git, absolute_sub), @@ -692,14 +694,33 @@ def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monke env=sys_git_env, ) check_call_env( - [git, "submodule", "add", "../relative_sub", "relative"], + [ + git, + # CVE-2022-39253 + *("-c", "protocol.file.allow=always"), + "submodule", + "add", + "../relative_sub", + "relative", + ], env=sys_git_env, ) else: # Once we use a more recent Git for Windows than 2.6.4 on Windows or m2-git we # can change this to `git submodule update --recursive`. gits = git.replace("\\", "/") - check_call_env([git, "submodule", "foreach", gits, "pull"], env=sys_git_env) + check_call_env( + [ + git, + # CVE-2022-39253 + *("-c", "protocol.file.allow=always"), + "submodule", + "foreach", + gits, + "pull", + ], + env=sys_git_env, + ) check_call_env( [git, "commit", "-am", f"added submodules@{tag}"], env=sys_git_env ) From 67f54833634dab4490cf4f72c483f9a300001ae6 Mon Sep 17 00:00:00 2001 From: Jose Diaz-Gonzalez Date: Wed, 14 Jun 2023 16:02:40 -0400 Subject: [PATCH 143/366] Drop duplicate get_summary() call (#3998) --- conda_build/skeletons/pypi.py | 1 - news/3998-drop-duplicate-get_summary-call | 19 +++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 news/3998-drop-duplicate-get_summary-call diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index 9ebcb4aa13..9693d41933 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -866,7 +866,6 @@ def get_package_metadata( if not metadata.get("summary"): metadata["summary"] = get_summary(pkginfo) - metadata["summary"] = get_summary(pkginfo) license_name = get_license_name(package, pkginfo, no_prompt, data) metadata["license"] = clean_license_name(license_name) diff --git a/news/3998-drop-duplicate-get_summary-call b/news/3998-drop-duplicate-get_summary-call new file mode 100644 index 0000000000..55514dd66c --- /dev/null +++ b/news/3998-drop-duplicate-get_summary-call @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Drop duplicate `get_summary` call in `conda_build.skeletons.pypi`. (#3998) From a84dd6ca7fa7fd3cdb8b34cb01887e69ba78f8b0 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 15 Jun 2023 10:01:09 +0200 Subject: [PATCH 144/366] Clone code to detect code changes for non-PR events (#4915) --- .github/workflows/tests.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 92bbc10830..e1e510beca 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -41,9 +41,9 @@ jobs: code: ${{ steps.filter.outputs.code }} steps: - uses: actions/checkout@v3 - # dorny/paths-filter needs git clone for push events + # dorny/paths-filter needs git clone for non-PR events # https://github.com/marketplace/actions/paths-changes-filter#supported-workflows - if: github.event_name == 'push' + if: github.event_name != 'pull_request' - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 id: filter with: @@ -59,7 +59,7 @@ jobs: linux: # only run test suite if there are code changes needs: changes - if: needs.changes.outputs.code == 'true' + if: github.event_name == 'schedule' || needs.changes.outputs.code == 'true' runs-on: ubuntu-latest defaults: @@ -167,7 +167,7 @@ jobs: windows: # only run test suite if there are code changes needs: changes - if: needs.changes.outputs.code == 'true' + if: github.event_name == 'schedule' || needs.changes.outputs.code == 'true' runs-on: windows-2019 strategy: @@ -276,7 +276,7 @@ jobs: macos: # only run test suite if there are code changes needs: changes - if: needs.changes.outputs.code == 'true' + if: github.event_name == 'schedule' || needs.changes.outputs.code == 'true' runs-on: macos-11 defaults: @@ -385,7 +385,7 @@ jobs: aggregate: # only aggregate test suite if there are code changes needs: [changes, linux, windows, macos] - if: always() && needs.changes.outputs.code == 'true' + if: always() && (github.event_name == 'schedule' || needs.changes.outputs.code == 'true') runs-on: ubuntu-latest steps: From de4a2e6aa0ae7ec05acced46269e90e386045561 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Tue, 20 Jun 2023 06:31:37 -0500 Subject: [PATCH 145/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4918)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/stale.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index dbf6255cdd..7db8bc5261 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -34,7 +34,7 @@ jobs: days-before-issue-stale: 90 days-before-issue-close: 21 steps: - - uses: conda/actions/read-yaml@v23.4.0 + - uses: conda/actions/read-yaml@v23.5.1 id: read_yaml with: path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml From 3690323a9d65f876af0d1e3236d08776994655ac Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 20 Jun 2023 10:38:04 -0500 Subject: [PATCH 146/366] [pre-commit.ci] pre-commit autoupdate (#4917) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.6.0 → v3.7.0](https://github.com/asottile/pyupgrade/compare/v3.6.0...v3.7.0) - [github.com/asottile/blacken-docs: 1.13.0 → 1.14.0](https://github.com/asottile/blacken-docs/compare/1.13.0...1.14.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 48a0d33260..3eb0528e95 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,7 +47,7 @@ repos: args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] exclude: ^conda_build/version.py - repo: https://github.com/asottile/pyupgrade - rev: v3.6.0 + rev: v3.7.0 hooks: # upgrade standard Python codes - id: pyupgrade @@ -63,7 +63,7 @@ repos: # auto format Python codes - id: black - repo: https://github.com/asottile/blacken-docs - rev: 1.13.0 + rev: 1.14.0 hooks: # auto format Python codes within docstrings - id: blacken-docs From cf281e0f56596b4c7fd189430c680fb31455cdcb Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 6 Jul 2023 20:08:02 +0200 Subject: [PATCH 147/366] Release candidate upload needs alpha prefix (#4927) --- .github/workflows/tests.yml | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index e1e510beca..1de7375f0d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -455,11 +455,31 @@ jobs: clean: true fetch-depth: 0 + - name: Detect label + shell: python + run: | + from pathlib import Path + from re import match + + if "${{ github.ref_name }}" == "main": + # main branch commits are uploaded to the dev label + label = "dev" + elif "${{ github.ref_name }}".startswith("feature/"): + # feature branch commits are uploaded to a custom label + label = "${{ github.ref_name }}" + else: + # release branch commits are added to the rc label + # see https://github.com/conda/infrastructure/issues/760 + _, name = "${{ github.repository }}".split("/") + label = f"rc-{name}-${{ github.ref_name }}" + + Path("${{ env.GITHUB_ENV }}").write_text(f"ANACONDA_ORG_LABEL={label}") + - name: Create and upload canary build uses: conda/actions/canary-release@v22.10.0 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} anaconda-org-channel: conda-canary - anaconda-org-label: ${{ github.ref_name == 'main' && 'dev' || github.ref_name }} + anaconda-org-label: ${{ env.ANACONDA_ORG_LABEL }} anaconda-org-token: ${{ secrets.ANACONDA_ORG_CONDA_CANARY_TOKEN }} From 31bc7f2958b28f4b4c7952b8f586a5080ae7553c Mon Sep 17 00:00:00 2001 From: Ryan Date: Mon, 10 Jul 2023 12:31:12 -0700 Subject: [PATCH 148/366] Issue/4633 (#4929) --- Makefile | 2 +- docs/source/resources/package-spec.rst | 20 ++++++++++++++++---- news/4633-update-conda-package-docs | 19 +++++++++++++++++++ 3 files changed, 36 insertions(+), 5 deletions(-) create mode 100644 news/4633-update-conda-package-docs diff --git a/Makefile b/Makefile index 649d38b766..2ad8565e7f 100644 --- a/Makefile +++ b/Makefile @@ -13,7 +13,7 @@ CONDA := $(shell which conda) # Setup env for documents env-docs: - conda create --name $(DOC_ENV_NAME) --channel defaults python=$(PYTHON_VERSION) --yes + conda create --name $(DOC_ENV_NAME) --channel defaults python=$(PYTHON_VERSION) pip --yes $(CONDA) run --name $(DOC_ENV_NAME) pip install -r ./docs/requirements.txt .PHONY: $(MAKECMDGOALS) diff --git a/docs/source/resources/package-spec.rst b/docs/source/resources/package-spec.rst index f3b0643cc2..d06e180043 100644 --- a/docs/source/resources/package-spec.rst +++ b/docs/source/resources/package-spec.rst @@ -6,14 +6,26 @@ Conda package specification :local: :depth: 1 -A conda package is a bzipped tar archive---.tar.bz2---that -contains: +A conda package is an archive file that contains: * Metadata under the ``info/`` directory. * A collection of files that are installed directly into an install prefix. -The format is identical across platforms and operating systems. +There are currently two formats of archives that are supported: + +.. list-table:: + :widths: 15 70 + + * - **Type** + - **Description** + + * - .tar.bz2 + - The original format of conda packages. Is the default output of conda-build. + * - .conda + - 2nd Gen. This is a more compact and thus faster. Can be outputed from conda-build by setting output in ``.condarc`` file. + +The formats are identical across platforms and operating systems. During the install process, all files are extracted into the install prefix, with the exception of the ones in ``info/``. Installing a conda package into an environment is similar to @@ -47,7 +59,7 @@ file is stored in ``repodata.json``, which is the repository index file, hence the name ``index.json``. The JSON object is a dictionary containing the keys shown below. The filename of the conda package is composed of the first 3 values, as in: -``--.tar.bz2``. +``--.tar.bz2`` or ``--.conda``. .. list-table:: :widths: 15 15 70 diff --git a/news/4633-update-conda-package-docs b/news/4633-update-conda-package-docs new file mode 100644 index 0000000000..23a2833090 --- /dev/null +++ b/news/4633-update-conda-package-docs @@ -0,0 +1,19 @@ +### Enhancements + +* Added pip to env-doc make command so function would work correctly (pip is no longer added by default with python conda package). + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* Updated pkg-spec docs to mention .conda package format. + +### Other + +* From 33d3d7f90869529913cca6de923cd2b27298b122 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 10 Jul 2023 15:27:03 -0500 Subject: [PATCH 149/366] [pre-commit.ci] pre-commit autoupdate (#4925) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.7.0 → v3.9.0](https://github.com/asottile/pyupgrade/compare/v3.7.0...v3.9.0) - [github.com/asottile/blacken-docs: 1.14.0 → 1.15.0](https://github.com/asottile/blacken-docs/compare/1.14.0...1.15.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3eb0528e95..370d8fb9bd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,7 +47,7 @@ repos: args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] exclude: ^conda_build/version.py - repo: https://github.com/asottile/pyupgrade - rev: v3.7.0 + rev: v3.9.0 hooks: # upgrade standard Python codes - id: pyupgrade @@ -63,7 +63,7 @@ repos: # auto format Python codes - id: black - repo: https://github.com/asottile/blacken-docs - rev: 1.14.0 + rev: 1.15.0 hooks: # auto format Python codes within docstrings - id: blacken-docs From 1b8c33b94862c4fe975d8635a6feb8fd16bfe459 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Wed, 12 Jul 2023 16:09:25 -0500 Subject: [PATCH 150/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4933)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/cla.yml | 2 +- .github/workflows/stale.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index c7b933b6b9..24c7a8b967 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check CLA - uses: conda/actions/check-cla@v23.5.1 + uses: conda/actions/check-cla@v23.7.0 with: # [required] # A token with ability to comment, label, and modify the commit status diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 7db8bc5261..1e9e46e754 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -34,7 +34,7 @@ jobs: days-before-issue-stale: 90 days-before-issue-close: 21 steps: - - uses: conda/actions/read-yaml@v23.5.1 + - uses: conda/actions/read-yaml@v23.7.0 id: read_yaml with: path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml From 19fb62c7e088170b547915a03a26f49b8be6bb07 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 14 Jul 2023 20:37:29 +0200 Subject: [PATCH 151/366] Install allure-pytest from conda-forge (#4923) --- .github/workflows/tests.yml | 3 --- tests/requirements.txt | 1 + 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 1de7375f0d..245272f834 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -116,7 +116,6 @@ jobs: --file ./tests/requirements.txt \ --file ./tests/requirements-linux.txt \ ${{ env.CONDA_CHANNEL_LABEL }}::conda - pip install allure-pytest pip install -e . - name: Show info @@ -222,7 +221,6 @@ jobs: --file .\tests\requirements.txt ` --file .\tests\requirements-windows.txt ` ${{ env.CONDA_CHANNEL_LABEL }}::conda - pip install allure-pytest pip install -e . - name: Show info @@ -334,7 +332,6 @@ jobs: --file ./tests/requirements.txt \ --file ./tests/requirements-macos.txt \ ${{ env.CONDA_CHANNEL_LABEL }}::conda - pip install allure-pytest pip install -e . - name: Show info diff --git a/tests/requirements.txt b/tests/requirements.txt index e8f64fcc10..1d23002d61 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -28,6 +28,7 @@ pytest-replay pytest-rerunfailures pytest-xdist python-libarchive-c +conda-forge::allure-pytest pytz requests ripgrep From b8c4b53c57b12559e25f765c3ad1d2466521a195 Mon Sep 17 00:00:00 2001 From: jaimergp Date: Mon, 17 Jul 2023 15:03:05 +0200 Subject: [PATCH 152/366] avoid duplicate logging by preventing propagation from the conda_build logger to the root logger (#4903) * avoid duplicate logging by preventing propagation from the conda_build logger to the root logger * print level for warnings and errors * fix tests/test_utils.py::test_logger_filtering --- conda_build/utils.py | 9 ++++++++- news/4903-duplicate-logging | 19 +++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 news/4903-duplicate-logging diff --git a/conda_build/utils.py b/conda_build/utils.py index 085d15a6a0..49b95b7ca1 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -1710,6 +1710,7 @@ def filter(self, record): dedupe_filter = DuplicateFilter() info_debug_stdout_filter = LessThanFilter(logging.WARNING) warning_error_stderr_filter = GreaterThanFilter(logging.INFO) +level_formatter = logging.Formatter("%(levelname)s: %(message)s") # set filelock's logger to only show warnings by default logging.getLogger("filelock").setLevel(logging.WARN) @@ -1746,11 +1747,17 @@ def get_logger(name, level=logging.INFO, dedupe=True, add_stdout_stderr_handlers log.addFilter(dedupe_filter) # these are defaults. They can be overridden by configuring a log config yaml file. - if not log.handlers and add_stdout_stderr_handlers: + top_pkg = name.split(".")[0] + if top_pkg == "conda_build": + # we don't want propagation in CLI, but we do want it in tests + # this is a pytest limitation: https://github.com/pytest-dev/pytest/issues/3697 + logging.getLogger(top_pkg).propagate = "PYTEST_CURRENT_TEST" in os.environ + if add_stdout_stderr_handlers and not log.handlers: stdout_handler = logging.StreamHandler(sys.stdout) stderr_handler = logging.StreamHandler(sys.stderr) stdout_handler.addFilter(info_debug_stdout_filter) stderr_handler.addFilter(warning_error_stderr_filter) + stderr_handler.setFormatter(level_formatter) stdout_handler.setLevel(level) stderr_handler.setLevel(level) log.addHandler(stdout_handler) diff --git a/news/4903-duplicate-logging b/news/4903-duplicate-logging new file mode 100644 index 0000000000..6ce91b269f --- /dev/null +++ b/news/4903-duplicate-logging @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Avoid duplicate logging by not propagating the top-level conda-build logger. (#4903) + +### Deprecations + +* + +### Docs + +* + +### Other + +* From 36ec2908f9fe0f9822aba048892dde1dcc8264db Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Mon, 17 Jul 2023 16:19:04 +0200 Subject: [PATCH 153/366] Convert conda-build entrypoints into plugins (#4921) --- conda_build/cli/main_build.py | 22 ++++----- conda_build/cli/main_convert.py | 7 ++- conda_build/cli/main_debug.py | 20 ++++---- conda_build/cli/main_develop.py | 7 ++- conda_build/cli/main_index.py | 4 +- conda_build/cli/main_inspect.py | 7 ++- conda_build/cli/main_metapackage.py | 11 ++--- conda_build/cli/main_render.py | 17 +++---- conda_build/cli/main_skeleton.py | 9 ++-- conda_build/plugin.py | 62 ++++++++++++++++++++++++ news/4921-convert-entrypoints-to-plugins | 20 ++++++++ pyproject.toml | 3 ++ recipe/meta.yaml | 2 +- 13 files changed, 142 insertions(+), 49 deletions(-) create mode 100644 conda_build/plugin.py create mode 100644 news/4921-convert-entrypoints-to-plugins diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index ca3bb8a3cf..999be96663 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -13,23 +13,18 @@ from conda.common.io import dashlist from glob2 import glob -import conda_build.api as api -import conda_build.build as build -import conda_build.source as source -import conda_build.utils as utils -from conda_build.cli.actions import KeyValueAction -from conda_build.cli.main_render import get_render_parser -from conda_build.conda_interface import ( - add_parser_channels, - binstar_upload, - cc_conda_build, -) -from conda_build.config import Config, get_channel_urls, zstd_compression_level_default -from conda_build.utils import LoggingContext +from .. import api, build, source, utils +from ..conda_interface import add_parser_channels, binstar_upload, cc_conda_build +from ..config import Config, get_channel_urls, zstd_compression_level_default +from ..deprecations import deprecated +from ..utils import LoggingContext +from .actions import KeyValueAction +from .main_render import get_render_parser def parse_args(args): p = get_render_parser() + p.prog = "conda build" p.description = dals( """ Tool for building conda packages. A conda package is a binary tarball @@ -588,6 +583,7 @@ def execute(args): return outputs +@deprecated("3.26.0", "4.0.0", addendum="Use `conda build` instead.") def main(): try: execute(sys.argv[1:]) diff --git a/conda_build/cli/main_convert.py b/conda_build/cli/main_convert.py index 43006a1e3b..4c09cfc1da 100644 --- a/conda_build/cli/main_convert.py +++ b/conda_build/cli/main_convert.py @@ -4,8 +4,9 @@ import sys from os.path import abspath, expanduser -from conda_build import api -from conda_build.conda_interface import ArgumentParser +from .. import api +from ..conda_interface import ArgumentParser +from ..deprecations import deprecated logging.basicConfig(level=logging.INFO) @@ -36,6 +37,7 @@ def parse_args(args): p = ArgumentParser( + prog="conda convert", description=""" Various tools to convert conda packages. Takes a pure Python package build for one platform and converts it to work on one or more other platforms, or @@ -127,5 +129,6 @@ def execute(args): api.convert(f, **args.__dict__) +@deprecated("3.26.0", "4.0.0", addendum="Use `conda convert` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_debug.py b/conda_build/cli/main_debug.py index 702fbfc798..00c6eeb230 100644 --- a/conda_build/cli/main_debug.py +++ b/conda_build/cli/main_debug.py @@ -4,13 +4,11 @@ import sys from argparse import ArgumentParser -from conda_build import api -from conda_build.cli import validators as valid - -# we extend the render parser because we basically need to render the recipe before -# we can say what env to create. This is not really true for debugging tests, but meh... -from conda_build.cli.main_render import get_render_parser -from conda_build.utils import on_win +from .. import api +from ..deprecations import deprecated +from ..utils import on_win +from . import validators as valid +from .main_render import get_render_parser logging.basicConfig(level=logging.INFO) @@ -18,6 +16,7 @@ def get_parser() -> ArgumentParser: """Returns a parser object for this command""" p = get_render_parser() + p.prog = "conda debug" p.description = """ Set up environments and activation scripts to debug your build or test phase. @@ -87,9 +86,9 @@ def get_parser() -> ArgumentParser: return p -def execute(): +def execute(args): parser = get_parser() - args = parser.parse_args() + args = parser.parse_args(args) try: activation_string = api.debug( @@ -119,5 +118,6 @@ def execute(): sys.exit(1) +@deprecated("3.26.0", "4.0.0", addendum="Use `conda debug` instead.") def main(): - return execute() + return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_develop.py b/conda_build/cli/main_develop.py index ec33555748..a7a202e5ff 100644 --- a/conda_build/cli/main_develop.py +++ b/conda_build/cli/main_develop.py @@ -5,14 +5,16 @@ from conda.base.context import context, determine_target_prefix -from conda_build import api -from conda_build.conda_interface import ArgumentParser, add_parser_prefix +from .. import api +from ..conda_interface import ArgumentParser, add_parser_prefix +from ..deprecations import deprecated logging.basicConfig(level=logging.INFO) def parse_args(args): p = ArgumentParser( + prog="conda develop", description=""" Install a Python package in 'development mode'. @@ -86,5 +88,6 @@ def execute(args): ) +@deprecated("3.26.0", "4.0.0", addendum="Use `conda develop` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_index.py b/conda_build/cli/main_index.py index 1f596b5d37..b0eefa8aa7 100644 --- a/conda_build/cli/main_index.py +++ b/conda_build/cli/main_index.py @@ -16,8 +16,9 @@ def parse_args(args): p = ArgumentParser( + prog="conda index", description="Update package index metadata files in given directories. " - "Pending deprecated, please use the standalone conda-index project." + "Pending deprecated, please use the standalone conda-index project.", ) p.add_argument( @@ -120,5 +121,6 @@ def execute(args): ) +@deprecated("3.26.0", "4.0.0", addendum="Use `conda index` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index 472bd2f3af..79e0594a4f 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -7,14 +7,16 @@ from conda.base.context import context, determine_target_prefix -from conda_build import api -from conda_build.conda_interface import ArgumentParser, add_parser_prefix +from .. import api +from ..conda_interface import ArgumentParser, add_parser_prefix +from ..deprecations import deprecated logging.basicConfig(level=logging.INFO) def parse_args(args): p = ArgumentParser( + prog="conda inspect", description="Tools for inspecting conda packages.", epilog=""" Run --help on the subcommands like 'conda inspect linkages --help' to see the @@ -220,5 +222,6 @@ def execute(args): raise ValueError(f"Unrecognized subcommand: {args.subcommand}.") +@deprecated("3.26.0", "4.0.0", addendum="Use `conda inspect` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_metapackage.py b/conda_build/cli/main_metapackage.py index 7657432fdc..71be2e7d3d 100644 --- a/conda_build/cli/main_metapackage.py +++ b/conda_build/cli/main_metapackage.py @@ -4,18 +4,16 @@ import logging import sys -from conda_build import api -from conda_build.conda_interface import ( - ArgumentParser, - add_parser_channels, - binstar_upload, -) +from .. import api +from ..conda_interface import ArgumentParser, add_parser_channels, binstar_upload +from ..deprecations import deprecated logging.basicConfig(level=logging.INFO) def parse_args(args): p = ArgumentParser( + prog="conda metapackage", description=""" Tool for building conda metapackages. A metapackage is a package with no files, only metadata. They are typically used to collect several packages @@ -114,5 +112,6 @@ def execute(args): api.create_metapackage(channel_urls=channel_urls, **args.__dict__) +@deprecated("3.26.0", "4.0.0", addendum="Use `conda metapackage` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 15cb3bdc4b..a563e87c1b 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -8,15 +8,12 @@ import yaml from yaml.parser import ParserError -from conda_build import __version__, api -from conda_build.conda_interface import ( - ArgumentParser, - add_parser_channels, - cc_conda_build, -) -from conda_build.config import get_channel_urls, get_or_merge_config -from conda_build.utils import LoggingContext -from conda_build.variants import get_package_variants, set_language_env_vars +from .. import __version__, api +from ..conda_interface import ArgumentParser, add_parser_channels, cc_conda_build +from ..config import get_channel_urls, get_or_merge_config +from ..deprecations import deprecated +from ..utils import LoggingContext +from ..variants import get_package_variants, set_language_env_vars on_win = sys.platform == "win32" log = logging.getLogger(__name__) @@ -44,6 +41,7 @@ def __call__(self, parser, namespace, values, option_string=None): def get_render_parser(): p = ArgumentParser( + prog="conda render", description=""" Tool for expanding the template meta.yml file (containing Jinja syntax and selectors) into the rendered meta.yml files. The template meta.yml file is @@ -245,6 +243,7 @@ def execute(args, print_results=True): return metadata_tuples +@deprecated("3.26.0", "4.0.0", addendum="Use `conda render` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_skeleton.py b/conda_build/cli/main_skeleton.py index 7642bc14da..7bb9e3369f 100644 --- a/conda_build/cli/main_skeleton.py +++ b/conda_build/cli/main_skeleton.py @@ -6,9 +6,10 @@ import pkgutil import sys -import conda_build.api as api -from conda_build.conda_interface import ArgumentParser -from conda_build.config import Config +from .. import api +from ..conda_interface import ArgumentParser +from ..config import Config +from ..deprecations import deprecated thisdir = os.path.dirname(os.path.abspath(__file__)) logging.basicConfig(level=logging.INFO) @@ -16,6 +17,7 @@ def parse_args(args): p = ArgumentParser( + prog="conda skeleton", description=""" Generates a boilerplate/skeleton recipe, which you can then edit to create a full recipe. Some simple skeleton recipes may not even need edits. @@ -60,6 +62,7 @@ def execute(args): ) +@deprecated("3.26.0", "4.0.0", addendum="Use `conda skeleton` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/plugin.py b/conda_build/plugin.py new file mode 100644 index 0000000000..03a3949f44 --- /dev/null +++ b/conda_build/plugin.py @@ -0,0 +1,62 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import conda.plugins + +from .cli.main_build import execute as build +from .cli.main_convert import execute as convert +from .cli.main_debug import execute as debug +from .cli.main_develop import execute as develop +from .cli.main_index import execute as index +from .cli.main_inspect import execute as inspect +from .cli.main_metapackage import execute as metapackage +from .cli.main_render import execute as render +from .cli.main_skeleton import execute as skeleton + + +@conda.plugins.hookimpl +def conda_subcommands(): + yield conda.plugins.CondaSubcommand( + name="build", + summary="Build conda packages from a conda recipe.", + action=build, + ) + yield conda.plugins.CondaSubcommand( + name="convert", + summary="Convert pure Python packages to other platforms (a.k.a., subdirs).", + action=convert, + ) + yield conda.plugins.CondaSubcommand( + name="debug", + summary="Debug the build or test phases of conda recipes.", + action=debug, + ) + yield conda.plugins.CondaSubcommand( + name="develop", + summary="Install a Python package in 'development mode'. Similar to `pip install --editable`.", + action=develop, + ) + yield conda.plugins.CondaSubcommand( + name="index", + summary="Update package index metadata files. Pending deprecation, use https://github.com/conda/conda-index instead.", + action=index, + ) + yield conda.plugins.CondaSubcommand( + name="inspect", + summary="Tools for inspecting conda packages.", + action=inspect, + ) + yield conda.plugins.CondaSubcommand( + name="metapackage", + summary="Specialty tool for generating conda metapackage.", + action=metapackage, + ) + yield conda.plugins.CondaSubcommand( + name="render", + summary="Expand a conda recipe into a platform-specific recipe.", + action=render, + ) + yield conda.plugins.CondaSubcommand( + name="skeleton", + summary="Generate boilerplate conda recipes.", + action=skeleton, + ) diff --git a/news/4921-convert-entrypoints-to-plugins b/news/4921-convert-entrypoints-to-plugins new file mode 100644 index 0000000000..f2b9a78563 --- /dev/null +++ b/news/4921-convert-entrypoints-to-plugins @@ -0,0 +1,20 @@ +### Enhancements + +* Implement subcommands as conda plugins. (#4921) + +### Bug fixes + +* + +### Deprecations + +* Mark executable invocations (e.g., `conda-build`) as pending deprecation. (#4921) +* Mark module based invocations (e.g., `python -m conda_build.cli.main_build`) as pending deprecation. (#4921) + +### Docs + +* + +### Other + +* diff --git a/pyproject.toml b/pyproject.toml index 33b00c3a1b..9fb3b5f222 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,6 +67,9 @@ conda-debug = "conda_build.cli.main_debug:main" [project.entry-points."distutils.commands"] bdist_conda = "conda_build.bdist_conda:bdist_conda" +[project.entry-points.conda] +conda-build = "conda_build.plugin" + [tool.hatch.version] source = "vcs" diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 850b742901..bc307a23a4 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -31,7 +31,7 @@ requirements: run: - beautifulsoup4 - chardet - - conda >=4.13 + - conda >=22.11.0 - conda-index - conda-package-handling >=1.3 - filelock From 069efd54918d395f3686c6dc5a1c4193952f3a40 Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Mon, 17 Jul 2023 20:42:33 -0400 Subject: [PATCH 154/366] Update tests workflow (#4936) --- .github/workflows/tests.yml | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 245272f834..47b744cdd9 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -452,11 +452,17 @@ jobs: clean: true fetch-depth: 0 + # Explicitly use Python 3.11 since each of the OSes has a different default Python + - uses: actions/setup-python@v4 + with: + python-version: '3.11' + - name: Detect label shell: python run: | from pathlib import Path from re import match + from os import environ if "${{ github.ref_name }}" == "main": # main branch commits are uploaded to the dev label @@ -470,10 +476,10 @@ jobs: _, name = "${{ github.repository }}".split("/") label = f"rc-{name}-${{ github.ref_name }}" - Path("${{ env.GITHUB_ENV }}").write_text(f"ANACONDA_ORG_LABEL={label}") + Path(environ["GITHUB_ENV"]).write_text(f"ANACONDA_ORG_LABEL={label}") - name: Create and upload canary build - uses: conda/actions/canary-release@v22.10.0 + uses: conda/actions/canary-release@v23.7.0 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} From 02acf153dbfd7da865b52ca1a8979fffe5643cd4 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 17 Jul 2023 22:03:03 -0500 Subject: [PATCH 155/366] [pre-commit.ci] pre-commit autoupdate (#4935) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/psf/black: 23.3.0 → 23.7.0](https://github.com/psf/black/compare/23.3.0...23.7.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 370d8fb9bd..3c6db02111 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -58,7 +58,7 @@ repos: # auto sort Python imports - id: isort - repo: https://github.com/psf/black - rev: 23.3.0 + rev: 23.7.0 hooks: # auto format Python codes - id: black From 25c695ec6ebaa758b41df3d20d8c949797f45088 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Mon, 24 Jul 2023 18:44:53 +0200 Subject: [PATCH 156/366] Replace number stringification hack with custom YAML loader (#4183) * Custom yaml Loader Instead of monkeypatching the yaml Loader in an attempt to avoid parsing numbers make a custom Loader where the float/int tags are entirely removed. * Test that all int/floats are parsed as str --- conda_build/metadata.py | 55 +++++++++++++++++++--------------- tests/test_metadata.py | 65 ++++++++++++++++++++++++++++++++++++++++- 2 files changed, 96 insertions(+), 24 deletions(-) diff --git a/conda_build/metadata.py b/conda_build/metadata.py index d158af6223..33c3230573 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -2,7 +2,6 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -import contextlib import copy import hashlib import json @@ -41,9 +40,37 @@ ) try: - loader = yaml.CLoader -except: - loader = yaml.Loader + Loader = yaml.CLoader +except AttributeError: + Loader = yaml.Loader + + +class StringifyNumbersLoader(Loader): + @classmethod + def remove_implicit_resolver(cls, tag): + if "yaml_implicit_resolvers" not in cls.__dict__: + cls.yaml_implicit_resolvers = { + k: v[:] for k, v in cls.yaml_implicit_resolvers.items() + } + for ch in tuple(cls.yaml_implicit_resolvers): + resolvers = [(t, r) for t, r in cls.yaml_implicit_resolvers[ch] if t != tag] + if resolvers: + cls.yaml_implicit_resolvers[ch] = resolvers + else: + del cls.yaml_implicit_resolvers[ch] + + @classmethod + def remove_constructor(cls, tag): + if "yaml_constructors" not in cls.__dict__: + cls.yaml_constructors = cls.yaml_constructors.copy() + if tag in cls.yaml_constructors: + del cls.yaml_constructors[tag] + + +StringifyNumbersLoader.remove_implicit_resolver("tag:yaml.org,2002:float") +StringifyNumbersLoader.remove_implicit_resolver("tag:yaml.org,2002:int") +StringifyNumbersLoader.remove_constructor("tag:yaml.org,2002:float") +StringifyNumbersLoader.remove_constructor("tag:yaml.org,2002:int") on_win = sys.platform == "win32" @@ -261,9 +288,7 @@ def select_lines(data, namespace, variants_in_place): def yamlize(data): try: - with stringify_numbers(): - loaded_data = yaml.load(data, Loader=loader) - return loaded_data + return yaml.load(data, Loader=StringifyNumbersLoader) except yaml.error.YAMLError as e: if "{{" in data: try: @@ -1056,23 +1081,7 @@ def _hash_dependencies(hashing_dependencies, hash_length): return f"h{hash_.hexdigest()}"[: hash_length + 1] -@contextlib.contextmanager -def stringify_numbers(): - # ensure that numbers are not interpreted as ints or floats. That trips up versions - # with trailing zeros. - implicit_resolver_backup = loader.yaml_implicit_resolvers.copy() - for ch in list("0123456789"): - if ch in loader.yaml_implicit_resolvers: - del loader.yaml_implicit_resolvers[ch] - yield - for ch in list("0123456789"): - if ch in implicit_resolver_backup: - loader.yaml_implicit_resolvers[ch] = implicit_resolver_backup[ch] - - class MetaData: - __hash__ = None # declare as non-hashable to avoid its use with memoization - def __init__(self, path, config=None, variant=None): self.undefined_jinja_vars = [] self.config = get_or_merge_config(config, variant=variant) diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 99545c50c9..b5a696ff6f 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -8,7 +8,7 @@ import pytest from conda_build import api -from conda_build.metadata import MetaData, _hash_dependencies, select_lines +from conda_build.metadata import MetaData, _hash_dependencies, select_lines, yamlize from conda_build.utils import DEFAULT_SUBDIRS from .utils import metadata_dir, thisdir @@ -260,3 +260,66 @@ def test_config_member_decoupling(testing_metadata): b = testing_metadata.copy() b.config.some_member = "123" assert b.config.some_member != testing_metadata.config.some_member + + +# ensure that numbers are not interpreted as ints or floats, doing so trips up versions +# with trailing zeros +def test_yamlize_zero(): + yml = yamlize( + """ + - 0 + - 0. + - 0.0 + - .0 + """ + ) + + assert yml == ["0", "0.", "0.0", ".0"] + + +def test_yamlize_positive(): + yml = yamlize( + """ + - +1 + - +1. + - +1.2 + - +.2 + """ + ) + + assert yml == ["+1", "+1.", "+1.2", "+.2"] + + +def test_yamlize_negative(): + yml = yamlize( + """ + - -1 + - -1. + - -1.2 + - -.2 + """ + ) + + assert yml == ["-1", "-1.", "-1.2", "-.2"] + + +def test_yamlize_numbers(): + yml = yamlize( + """ + - 1 + - 1.2 + """ + ) + + assert yml == ["1", "1.2"] + + +def test_yamlize_versions(): + yml = yamlize( + """ + - 1.2.3 + - 1.2.3.4 + """ + ) + + assert yml == ["1.2.3", "1.2.3.4"] From ef382e4e819a7d622c798e80e70989e855643a9b Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Mon, 24 Jul 2023 18:46:03 +0200 Subject: [PATCH 157/366] Add conjunction to comma_join & fix islist (#4184) * Add ability to change the conjunction used by comma_join. * Fix islist bug which caused it to incorrectly treat classes with __iter__ as lists (e.g. islist(dict)). --- conda_build/utils.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/conda_build/utils.py b/conda_build/utils.py index 49b95b7ca1..60f3cec3d3 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -34,6 +34,7 @@ ) from pathlib import Path from threading import Thread +from typing import Iterable import libarchive @@ -941,7 +942,7 @@ def file_info(path): } -def comma_join(items): +def comma_join(items: Iterable[str], conjunction: str = "and") -> str: """ Like ', '.join(items) but with and @@ -954,11 +955,10 @@ def comma_join(items): >>> comma_join(['a', 'b', 'c']) 'a, b, and c' """ - return ( - " and ".join(items) - if len(items) <= 2 - else ", ".join(items[:-1]) + ", and " + items[-1] - ) + items = tuple(items) + if len(items) <= 2: + return f"{items[0]} {conjunction} {items[1]}" + return f"{', '.join(items[:-1])}, {conjunction} {items[-1]}" def safe_print_unicode(*args, **kwargs): @@ -1268,7 +1268,7 @@ def islist(arg, uniform=False, include_dict=True): :return: Whether `arg` is a `list` :rtype: bool """ - if isinstance(arg, str) or not hasattr(arg, "__iter__"): + if isinstance(arg, str) or not isinstance(arg, Iterable): # str and non-iterables are not lists return False elif not include_dict and isinstance(arg, dict): @@ -1279,6 +1279,7 @@ def islist(arg, uniform=False, include_dict=True): return True # NOTE: not checking for Falsy arg since arg may be a generator + # WARNING: if uniform != False and arg is a generator then arg will be consumed if uniform is True: arg = iter(arg) From b8a8730df5ea44ed3ba73ea551b3e598f15d250b Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Tue, 25 Jul 2023 11:08:06 -0400 Subject: [PATCH 158/366] Release 3.26.0 (#4937) --- .authors.yml | 32 +++++++----- .mailmap | 2 + AUTHORS.md | 2 + CHANGELOG.md | 49 +++++++++++++++++++ news/3998-drop-duplicate-get_summary-call | 19 ------- news/4633-update-conda-package-docs | 19 ------- news/4787-fix-leaf | 19 ------- .../4834-drop-extra-jinja-pkg-names-from-docs | 19 ------- news/4850-Fix-unintended-warnings | 5 -- news/4901-log-extra-meta-data | 19 ------- news/4903-duplicate-logging | 19 ------- news/4912-fix-resolved_packages-test-failure | 19 ------- ...-fix-test_relative_git_url_submodule_clone | 19 ------- news/4921-convert-entrypoints-to-plugins | 20 -------- 14 files changed, 74 insertions(+), 188 deletions(-) delete mode 100644 news/3998-drop-duplicate-get_summary-call delete mode 100644 news/4633-update-conda-package-docs delete mode 100644 news/4787-fix-leaf delete mode 100644 news/4834-drop-extra-jinja-pkg-names-from-docs delete mode 100644 news/4850-Fix-unintended-warnings delete mode 100644 news/4901-log-extra-meta-data delete mode 100644 news/4903-duplicate-logging delete mode 100644 news/4912-fix-resolved_packages-test-failure delete mode 100644 news/4914-fix-test_relative_git_url_submodule_clone delete mode 100644 news/4921-convert-entrypoints-to-plugins diff --git a/.authors.yml b/.authors.yml index dc41041bbf..3a4cc66964 100644 --- a/.authors.yml +++ b/.authors.yml @@ -754,7 +754,7 @@ alternate_emails: - kirkhamj@janelia.hhmi.org - jakirkham@gmail.com - num_commits: 144 + num_commits: 146 first_commit: 2015-04-21 13:26:39 github: jakirkham - name: Anthony Scopatz @@ -1018,7 +1018,7 @@ github: theultimate1 - name: Kai Tietz email: ktietz@anaconda.com - num_commits: 8 + num_commits: 9 first_commit: 2019-04-04 02:38:29 github: katietz alternate_emails: @@ -1056,7 +1056,7 @@ github: spalmrot-tic - name: Daniel Bast email: 2790401+dbast@users.noreply.github.com - num_commits: 15 + num_commits: 16 first_commit: 2019-06-07 02:44:13 github: dbast - name: Duncan Macleod @@ -1199,7 +1199,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 121 + num_commits: 128 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1237,7 +1237,7 @@ github: pre-commit-ci[bot] aliases: - pre-commit-ci[bot] - num_commits: 41 + num_commits: 44 first_commit: 2021-11-20 01:47:17 - name: Jacob Walls email: jacobtylerwalls@gmail.com @@ -1248,7 +1248,7 @@ github: beeankha alternate_emails: - beeankha@gmail.com - num_commits: 16 + num_commits: 20 first_commit: 2022-01-19 16:40:06 - name: Conda Bot email: 18747875+conda-bot@users.noreply.github.com @@ -1259,7 +1259,7 @@ alternate_emails: - ad-team+condabot@anaconda.com - 18747875+conda-bot@users.noreply.github.com - num_commits: 56 + num_commits: 66 first_commit: 2022-01-17 18:09:22 - name: Uwe L. Korn email: xhochy@users.noreply.github.com @@ -1305,7 +1305,7 @@ - name: dependabot[bot] email: 49699333+dependabot[bot]@users.noreply.github.com github: dependabot[bot] - num_commits: 2 + num_commits: 3 first_commit: 2022-05-31 04:34:40 - name: Serhii Kupriienko email: 79282962+skupr-anaconda@users.noreply.github.com @@ -1322,7 +1322,7 @@ - name: Jaime Rodríguez-Guerra email: jaimergp@users.noreply.github.com github: jaimergp - num_commits: 2 + num_commits: 3 first_commit: 2022-11-02 19:34:51 - name: Dave Clements email: tnabtaf@gmail.com @@ -1336,7 +1336,7 @@ first_commit: 2022-11-16 21:54:14 - name: Srivas Venkatesh email: 110486050+sven6002@users.noreply.github.com - num_commits: 1 + num_commits: 2 first_commit: 2022-12-14 19:50:36 github: sven6002 - name: Ernst Luring @@ -1369,7 +1369,7 @@ aliases: - Ryan github: ryanskeith - num_commits: 2 + num_commits: 3 first_commit: 2023-03-22 03:11:02 - name: Rishabh Singh email: 67859818+rishabh11336@users.noreply.github.com @@ -1383,3 +1383,13 @@ github: ffirmanff num_commits: 1 first_commit: 2023-04-14 11:54:03 +- name: Riadh Fezzani + email: rfezzani@gmail.com + github: rfezzani + num_commits: 1 + first_commit: 2023-05-23 13:46:49 +- name: Jose Diaz-Gonzalez + email: email@josediazgonzalez.com + github: josegonzalez + num_commits: 1 + first_commit: 2023-06-14 16:02:40 diff --git a/.mailmap b/.mailmap index f8fc95d1a0..30d67ba59b 100644 --- a/.mailmap +++ b/.mailmap @@ -130,6 +130,7 @@ John Kirkham John Kirkham John Omotani Johnny Jonathan J. Helmus Jonathan Helmus +Jose Diaz-Gonzalez Joseph Crail Joseph Hunkeler Juan Lasheras jlas @@ -204,6 +205,7 @@ Rachel Rigdon rrigdon <45607889+rrigdon@users.noreply.git Rachel Rigdon rrigdon Ray Donnelly Remi Chateauneu +Riadh Fezzani Riccardo Vianello Richard Frank Richard Hattersley diff --git a/AUTHORS.md b/AUTHORS.md index 10fce92d1f..23a0911835 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -109,6 +109,7 @@ Authors are sorted alphabetically. * John Omotani * Johnny * Jonathan J. Helmus +* Jose Diaz-Gonzalez * Joseph Crail * Joseph Hunkeler * Juan Lasheras @@ -169,6 +170,7 @@ Authors are sorted alphabetically. * Rachel Rigdon * Ray Donnelly * Remi Chateauneu +* Riadh Fezzani * Riccardo Vianello * Richard Frank * Richard Hattersley diff --git a/CHANGELOG.md b/CHANGELOG.md index 6507e65c91..ac91b42cbb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,54 @@ [//]: # (current developments) +## 3.26.0 (2023-07-18) + +### Enhancements + +* Add `pip` to `env-doc make` command so function works correctly (`pip` is no longer added by default with the python conda package). (#4633) +* Log extra-meta data to make it easier to verify that the right extra-meta data is burned into packages (also helps to co-relate packages and their build-log). The feature was first introduced in #4303 and is now improved via the logging call. (#4901) +* Implement subcommands as conda plugins. (#4921) + +### Bug fixes + +* Fix handling of unknown binaries with newer `(py)lief` versions. (#4900) +* Disable `LIEF` logging to remove "Unknown format" warning message. (#4850) +* Revert `enable_static` default value in `conda_build.config` to remove "Failed to get_static_lib_exports" warning messages. (#4850) +* Avoid duplicate logging by not propagating the top-level conda-build logger. (#4903) +* Fix git cloning for repositories with submodules containing local relative paths. (#4914) + +### Deprecations + +* Mark executable invocations (e.g., `conda-build`) as pending deprecation. (#4921) +* Mark module based invocations (e.g., `python -m conda_build.cli.main_build`) as pending deprecation. (#4921) + +### Docs + +* Update `pkg-spec` docs to mention `.conda` package format. (#4633) +* Drop unnecessary Jinja package name variables from `variants.rst` docs file. (#4834) + +### Other + +* Drop duplicate `get_summary` call in `conda_build.skeletons.pypi`. (#3998) +* Fix failing `resolved_packages` test due to recent OpenSSL 3.0.8 release to defaults. (#4912) + +### Contributors + +* @beeankha +* @conda-bot +* @dbast +* @jaimergp +* @jakirkham +* @josegonzalez made their first contribution in https://github.com/conda/conda-build/pull/3998 +* @katietz +* @kenodegard +* @rfezzani made their first contribution in https://github.com/conda/conda-build/pull/4850 +* @ryanskeith +* @sven6002 +* @dependabot[bot] +* @pre-commit-ci[bot] + + + ## 3.25.0 (2023-05-22) ### Enhancements diff --git a/news/3998-drop-duplicate-get_summary-call b/news/3998-drop-duplicate-get_summary-call deleted file mode 100644 index 55514dd66c..0000000000 --- a/news/3998-drop-duplicate-get_summary-call +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Drop duplicate `get_summary` call in `conda_build.skeletons.pypi`. (#3998) diff --git a/news/4633-update-conda-package-docs b/news/4633-update-conda-package-docs deleted file mode 100644 index 23a2833090..0000000000 --- a/news/4633-update-conda-package-docs +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* Added pip to env-doc make command so function would work correctly (pip is no longer added by default with python conda package). - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* Updated pkg-spec docs to mention .conda package format. - -### Other - -* diff --git a/news/4787-fix-leaf b/news/4787-fix-leaf deleted file mode 100644 index 00f7f49efe..0000000000 --- a/news/4787-fix-leaf +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Fixed handling of unknown binaries with newer (py)lief versions. (#4900) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4834-drop-extra-jinja-pkg-names-from-docs b/news/4834-drop-extra-jinja-pkg-names-from-docs deleted file mode 100644 index dfa93c007e..0000000000 --- a/news/4834-drop-extra-jinja-pkg-names-from-docs +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* Package name variables in Jinja are not necessary, dropping this from variants.rst docs file. (#4834) - -### Other - -* diff --git a/news/4850-Fix-unintended-warnings b/news/4850-Fix-unintended-warnings deleted file mode 100644 index f416781542..0000000000 --- a/news/4850-Fix-unintended-warnings +++ /dev/null @@ -1,5 +0,0 @@ -### Bug fixes - -* Disabling LIEF logging to remove "Unknown format" warning message. -* Revert `enable_static` default value in `conda_build.config` to remove - "Failed to get_static_lib_exports" warning messages. diff --git a/news/4901-log-extra-meta-data b/news/4901-log-extra-meta-data deleted file mode 100644 index 5f6325a4c3..0000000000 --- a/news/4901-log-extra-meta-data +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* Log extra-meta data to make it easier to verify that the right extra-meta data is burned into packages (also helps to co-relate packages and their build-log). The feature was first introduced in #4303 and is now improved via the logging call. (#4901) - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4903-duplicate-logging b/news/4903-duplicate-logging deleted file mode 100644 index 6ce91b269f..0000000000 --- a/news/4903-duplicate-logging +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Avoid duplicate logging by not propagating the top-level conda-build logger. (#4903) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4912-fix-resolved_packages-test-failure b/news/4912-fix-resolved_packages-test-failure deleted file mode 100644 index 1708353057..0000000000 --- a/news/4912-fix-resolved_packages-test-failure +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Fix failing `resolved_packages` test due to recent OpenSSL 3.0.8 release to defaults. (#4912) diff --git a/news/4914-fix-test_relative_git_url_submodule_clone b/news/4914-fix-test_relative_git_url_submodule_clone deleted file mode 100644 index b45398ccdc..0000000000 --- a/news/4914-fix-test_relative_git_url_submodule_clone +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Fix git cloning for repositories with submodules containing local relative paths. (#4914) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4921-convert-entrypoints-to-plugins b/news/4921-convert-entrypoints-to-plugins deleted file mode 100644 index f2b9a78563..0000000000 --- a/news/4921-convert-entrypoints-to-plugins +++ /dev/null @@ -1,20 +0,0 @@ -### Enhancements - -* Implement subcommands as conda plugins. (#4921) - -### Bug fixes - -* - -### Deprecations - -* Mark executable invocations (e.g., `conda-build`) as pending deprecation. (#4921) -* Mark module based invocations (e.g., `python -m conda_build.cli.main_build`) as pending deprecation. (#4921) - -### Docs - -* - -### Other - -* From 01ee210a10bf1ca552662353a41b52bd1be90f9f Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Thu, 27 Jul 2023 17:50:49 +0200 Subject: [PATCH 159/366] [3.26.x] Build ePub, PDF and zipped HTML docs as well. (#4946) --- .readthedocs.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.readthedocs.yml b/.readthedocs.yml index 72ad6563db..a452d9dedf 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -4,3 +4,9 @@ python: version: "3" install: - requirements: docs/requirements.txt + +# Build PDF, ePub and zipped HTML +formats: + - epub + - pdf + - htmlzip From f59249b21724e93557d1140957e565ee2d74e799 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Thu, 27 Jul 2023 17:51:00 +0200 Subject: [PATCH 160/366] Build ePub, PDF and zipped HTML docs as well. (#4945) --- .readthedocs.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.readthedocs.yml b/.readthedocs.yml index 72ad6563db..a452d9dedf 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -4,3 +4,9 @@ python: version: "3" install: - requirements: docs/requirements.txt + +# Build PDF, ePub and zipped HTML +formats: + - epub + - pdf + - htmlzip From c428538f32cf5675000d3d26a59160ed797bbf9e Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Mon, 31 Jul 2023 14:50:58 -0400 Subject: [PATCH 161/366] Update conda-build recipe file (#4943) --- recipe/meta.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/recipe/meta.yaml b/recipe/meta.yaml index bc307a23a4..9ca9a95a27 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -53,7 +53,7 @@ requirements: - tomli # [py<311] - tqdm run_constrained: - - conda-verify >=3.0.2 + - conda-verify >=3.1.0 test: imports: From 63cc114264cef5fc3ea36a6873bedf69fa9c41b2 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Tue, 1 Aug 2023 05:21:54 -0500 Subject: [PATCH 162/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4952)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- RELEASE.md | 101 +++++++++++++++++++++++++++++++++-------------------- rever.xsh | 6 ++++ 2 files changed, 69 insertions(+), 38 deletions(-) diff --git a/RELEASE.md b/RELEASE.md index 4f6199512e..5f0648df52 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -14,12 +14,12 @@ # Release Process -> **Note** +> **Note:** > Throughout this document are references to the version number as `YY.M.0`, this should be replaced with the correct version number. Do **not** prefix the version with a lowercase `v`. ## 1. Open the release issue and cut a release branch. (do this ~1 week prior to release) -> **Note** +> **Note:** > The [epic template][epic template] is perfect for this; remember to remove the **`epic`** label. Use the issue template below to create the release issue. After creating the release issue, pin it for easy access. @@ -48,7 +48,7 @@ Placeholder for `{{ repo.name }} YY.M.0` release. #### The week before release week - [ ] Create release branch (named `YY.M.x`) -- [ ] Ensure release candidates are being successfully built (see `conda-canary/label/YY.M.x`) +- [ ] Ensure release candidates are being successfully built (see `conda-canary/label/rc-{{ repo.name }}-YY.M.x`) - [ ] [Complete outstanding PRs][milestone] - [ ] Test release candidates @@ -76,7 +76,7 @@ Placeholder for `{{ repo.name }} YY.M.0` release. -> **Note** +> **Note:** > The new release branch should adhere to the naming convention of `YY.M.x`. ## 2. Alert various parties of the upcoming release. (do this ~1 week prior to release) @@ -122,7 +122,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 2. Run `rever --activities authors`: - > **Note** + > **Note:** > Include `--force` when re-running any rever commands for the same ``, otherwise, rever will skip the activity and no changes will be made (i.e., rever remembers if an activity has been run for a given version). ```bash @@ -164,7 +164,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Updated .authors.yml" + (rever) $ git commit -m "Update .authors.yml" ``` - Rerun `rever --activities authors` and finally check that your `.mailmap` is correct by running: @@ -189,21 +189,21 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Updated .mailmap" + (rever) $ git commit -m "Update .mailmap" ``` - Continue repeating the above processes until the `.authors.yml` and `.mailmap` are corrected to your liking. After completing this, you will have at most two commits on your release branch: ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap ``` 4. Review news snippets (ensure they are all using the correct Markdown format, **not** reStructuredText) and add additional snippets for undocumented PRs/changes as necessary. - > **Note** + > **Note:** > We've found it useful to name news snippets with the following format: `-`. > > We've also found that we like to include the PR #s inline with the text itself, e.g.: @@ -222,21 +222,21 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Updated news" + (rever) $ git commit -m "Update news" ``` - After completing this, you will have at most three commits on your release branch: ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news ``` 5. Run `rever --activities changelog`: - > **Note** + > **Note:** > This has previously been a notoriously fickle step (likely due to incorrect regex patterns in the `rever.xsh` config file and missing `github` keys in `.authors.yml`) so beware of potential hiccups. If this fails, it's highly likely to be an innocent issue. ```bash @@ -256,9 +256,9 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news ``` 6. Now that we have successfully run the activities separately, we wish to run both together. This will ensure that the contributor list, a side-effect of the authors activity, is included in the changelog activity. @@ -271,11 +271,11 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news - + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Updated authorship for YY.M.0 - + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Updated CHANGELOG for YY.M.0 + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news + + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Update authorship for YY.M.0 + + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Update CHANGELOG for YY.M.0 ``` 7. Since rever does not include stats on first-time contributors, we will need to add this manually. @@ -286,19 +286,19 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Added first contributions" + (rever) $ git commit -m "Add first-time contributions" ``` - After completing this, you will have at most six commits on your release branch: ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news - + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Updated authorship for YY.M.0 - + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Updated CHANGELOG for YY.M.0 - + 93fdf029fd4cf235872c12cab12a1f7e8f95a755 Added first contributions + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news + + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Update authorship for YY.M.0 + + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Update CHANGELOG for YY.M.0 + + 93fdf029fd4cf235872c12cab12a1f7e8f95a755 Add first-time contributions ``` 8. Push this versioned branch. @@ -326,7 +326,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 11. [Create][new release] the release and **SAVE AS A DRAFT** with the following values: - > **Note** + > **Note:** > Only publish the release after the release PR is merged, until then always **save as draft**. | Field | Value | @@ -339,9 +339,23 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ## 5. Wait for review and approval of release PR. -## 6. Merge release PR and publish release. +## 6. Manually test canary build(s). -## 7. Merge/cherry pick the release branch over to the `main` branch. +### Canary Builds for Manual Testing + +Once the release PRs are filed, successful canary builds will be available on `https://anaconda.org/conda-canary/conda/files?channel=rc-{{ repo.name }}-YY.M.x` for manual testing. + +> **Note:** +> You do not need to apply the `build::review` label for release PRs; every commit to the release branch builds and uploads canary builds to the respective `rc-` label. + +## 7. Merge release PR and publish release. + +To publish the release, go to the project's release page (e.g., https://github.com/conda/conda/releases) and add the release notes from `CHANGELOG.md` to the draft release you created earlier. Then publish the release. + +> **Note:** +> Release notes can be drafted and saved ahead of time. + +## 8. Merge/cherry pick the release branch over to the `main` branch.
Internal process @@ -354,19 +368,30 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 4. Ensure that all of the commits being pulled in look accurate, then select "Create pull request". -> **Note** +> **Note:** > Make sure NOT to push the "Update Branch" button. If there are [merge conflicts][merge conflicts], create a temporary "connector branch" dedicated to fixing merge conflicts separately from the `YY.M.0` and `main` branches. 5. Review and merge the pull request the same as any code change pull request. -> **Note** +> **Note:** > The commits from the release branch need to be retained in order to be able to compare individual commits; in other words, a "merge commit" is required when merging the resulting pull request vs. a "squash merge". Protected branches will require permissions to be temporarily relaxed in order to enable this action.
-## 8. Open PRs to bump [Anaconda Recipes][Anaconda Recipes] and [conda-forge][conda-forge] feedstocks to use `YY.M.0`. +## 9. Open PRs to bump [Anaconda Recipes][Anaconda Recipes] and [conda-forge][conda-forge] feedstocks to use `YY.M.0`. + +> **Note:** +> Conda-forge's PRs will be auto-created via the `regro-cf-autotick-bot`. Follow the instructions below if any changes need to be made to the recipe that were not automatically added (these instructions are only necessary for anyone who is _not_ a conda-forge feedstock maintainer, since maintainers can push changes directly to the autotick branch): +> - Create a new branch based off of autotick's branch (autotick's branches usually use the `regro-cf-autotick-bot:XX.YY.0_[short hash]` syntax) +> - Add any changes via commits to that new branch +> - Open a new PR and push it against the `main` branch +> +> Make sure to include a comment on the original `autotick-bot` PR that a new pull request has been created, in order to avoid duplicating work! `regro-cf-autotick-bot` will close the auto-created PR once the new PR is merged. +> +> For more information about this process, please read the ["Pushing to regro-cf-autotick-bot branch" section of the conda-forge documentation](https://conda-forge.org/docs/maintainer/updating_pkgs.html#pushing-to-regro-cf-autotick-bot-branch). + -## 9. Hand off to Anaconda's packaging team. +## 10. Hand off to Anaconda's packaging team.
Internal process @@ -377,6 +402,6 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
-## 10. Continue championing and shepherding. +## 11. Continue championing and shepherding. Remember to make all relevant announcements and continue to update the release issue with the latest details as tasks are completed. diff --git a/rever.xsh b/rever.xsh index 644107dfd9..577ecfa980 100644 --- a/rever.xsh +++ b/rever.xsh @@ -26,3 +26,9 @@ $CHANGELOG_CATEGORIES = [ $CHANGELOG_CATEGORY_TITLE_FORMAT = "### {category}\n\n" $CHANGELOG_AUTHORS_TITLE = "Contributors" $CHANGELOG_AUTHORS_FORMAT = "* @{github}\n" + +try: + # allow repository to customize synchronized-from-infa rever config + from rever_overrides import * +except ImportError: + pass From ff714fd8753ba48937ef3fd1401b55ca1e64d744 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 2 Aug 2023 09:04:32 -0500 Subject: [PATCH 163/366] [pre-commit.ci] pre-commit autoupdate (#4954) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/asottile/pyupgrade: v3.9.0 → v3.10.1](https://github.com/asottile/pyupgrade/compare/v3.9.0...v3.10.1) - [github.com/PyCQA/flake8: 6.0.0 → 6.1.0](https://github.com/PyCQA/flake8/compare/6.0.0...6.1.0) * Ignore E721 --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Ken Odegard --- .pre-commit-config.yaml | 4 ++-- conda_build/utils.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3c6db02111..059ca9c619 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,7 +47,7 @@ repos: args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] exclude: ^conda_build/version.py - repo: https://github.com/asottile/pyupgrade - rev: v3.9.0 + rev: v3.10.1 hooks: # upgrade standard Python codes - id: pyupgrade @@ -69,7 +69,7 @@ repos: - id: blacken-docs additional_dependencies: [black] - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + rev: 6.1.0 hooks: # lint Python codes - id: flake8 diff --git a/conda_build/utils.py b/conda_build/utils.py index 60f3cec3d3..aa375790f3 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -1289,7 +1289,7 @@ def islist(arg, uniform=False, include_dict=True): # StopIteration: list is empty, an empty list is still uniform return True # check for explicit type match, do not allow the ambiguity of isinstance - uniform = lambda e: type(e) == etype + uniform = lambda e: type(e) == etype # noqa: E721 try: return all(uniform(e) for e in arg) From afe931afa3126c072c7cb9fa55f70d11d4f52e3b Mon Sep 17 00:00:00 2001 From: Daniel Holth Date: Fri, 4 Aug 2023 14:52:11 -0400 Subject: [PATCH 164/366] delay imports in command plugin (#4953) * delay imports in command plugin * add news item --- conda_build/__init__.py | 3 +- conda_build/plugin.py | 64 +++++++++++++++++++++++++++----- news/4956-improve-command-plugin | 20 ++++++++++ 3 files changed, 77 insertions(+), 10 deletions(-) create mode 100644 news/4956-improve-command-plugin diff --git a/conda_build/__init__.py b/conda_build/__init__.py index 943084b6f4..91367d0d86 100644 --- a/conda_build/__init__.py +++ b/conda_build/__init__.py @@ -12,5 +12,6 @@ "index", "inspect", "metapackage", - "render" "skeleton", + "render", + "skeleton", ] diff --git a/conda_build/plugin.py b/conda_build/plugin.py index 03a3949f44..eddb85fe66 100644 --- a/conda_build/plugin.py +++ b/conda_build/plugin.py @@ -2,15 +2,61 @@ # SPDX-License-Identifier: BSD-3-Clause import conda.plugins -from .cli.main_build import execute as build -from .cli.main_convert import execute as convert -from .cli.main_debug import execute as debug -from .cli.main_develop import execute as develop -from .cli.main_index import execute as index -from .cli.main_inspect import execute as inspect -from .cli.main_metapackage import execute as metapackage -from .cli.main_render import execute as render -from .cli.main_skeleton import execute as skeleton + +# lazy-import to avoid nasty import-time side effects when not using conda-build +def build(*args, **kwargs): + from .cli.main_build import execute + + execute(*args, **kwargs) + + +def convert(*args, **kwargs): + from .cli.main_convert import execute + + execute(*args, **kwargs) + + +def debug(*args, **kwargs): + from .cli.main_debug import execute + + execute(*args, **kwargs) + + +def develop(*args, **kwargs): + from .cli.main_develop import execute + + execute(*args, **kwargs) + + +def index(*args, **kwargs): + # deprecated! use conda-index! + from .cli.main_index import execute + + execute(*args, **kwargs) + + +def inspect(*args, **kwargs): + from .cli.main_inspect import execute + + execute(*args, **kwargs) + + +def metapackage(*args, **kwargs): + from .cli.main_metapackage import execute + + execute(*args, **kwargs) + + +def render(*args, **kwargs): + from .cli.main_render import execute + + execute(*args, **kwargs) + + +def skeleton(*args, **kwargs): + from .cli.main_skeleton import execute + + execute(*args, **kwargs) @conda.plugins.hookimpl diff --git a/news/4956-improve-command-plugin b/news/4956-improve-command-plugin new file mode 100644 index 0000000000..0795fab179 --- /dev/null +++ b/news/4956-improve-command-plugin @@ -0,0 +1,20 @@ +### Enhancements + +* + +### Bug fixes + +* Delay imports in conda command plugin until the command is used, avoiding + import-time side effects. (#4949) + +### Deprecations + +* + +### Docs + +* + +### Other + +* From f004c5d9a175263ba4ed7bd87bdcd2d507ab6ad2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 7 Aug 2023 11:59:49 -0500 Subject: [PATCH 165/366] [pre-commit.ci] pre-commit autoupdate (#4958) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/Lucas-C/pre-commit-hooks: v1.5.1 → v1.5.3](https://github.com/Lucas-C/pre-commit-hooks/compare/v1.5.1...v1.5.3) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 059ca9c619..ca7205aef0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,7 +39,7 @@ repos: - id: check-merge-conflict # Python verification and formatting - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.5.1 + rev: v1.5.3 hooks: # auto inject license blurb - id: insert-license From 09acdc69b27656a5c87864d2a1fce146721219a1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 15 Aug 2023 07:30:15 -0500 Subject: [PATCH 166/366] [pre-commit.ci] pre-commit autoupdate (#4962) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/Lucas-C/pre-commit-hooks: v1.5.3 → v1.5.4](https://github.com/Lucas-C/pre-commit-hooks/compare/v1.5.3...v1.5.4) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ca7205aef0..da87844dfb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,7 +39,7 @@ repos: - id: check-merge-conflict # Python verification and formatting - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.5.3 + rev: v1.5.4 hooks: # auto inject license blurb - id: insert-license From 4dc1e4983808e3d3ad24fd363044d8679e85e214 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Wed, 16 Aug 2023 15:00:31 -0500 Subject: [PATCH 167/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4963)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/cla.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index 24c7a8b967..ed22cae254 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -27,7 +27,7 @@ jobs: # (default: secrets.GITHUB_TOKEN) token: ${{ secrets.CLA_ACTION_TOKEN }} # [required] - # Label to apply to contributor's PR once CLA is singed + # Label to apply to contributor's PR once CLA is signed label: cla-signed # [required] From c2bfe2b44ad34b06d8930aa4239995367f3eaf12 Mon Sep 17 00:00:00 2001 From: Daniel Holth Date: Thu, 17 Aug 2023 10:17:36 -0400 Subject: [PATCH 168/366] 3.26.1 (#4970) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Conda Bot Co-authored-by: Ken Odegard Co-authored-by: Jannis Leidel Co-authored-by: Bianca Henderson Co-authored-by: conda-bot <18747875+conda-bot@users.noreply.github.com> fix islist (#4184) --- .authors.yml | 28 +++++------ .github/workflows/cla.yml | 2 +- .pre-commit-config.yaml | 8 +-- CHANGELOG.md | 18 +++++++ RELEASE.md | 101 ++++++++++++++++++++++++-------------- conda_build/__init__.py | 3 +- conda_build/metadata.py | 55 ++++++++++++--------- conda_build/plugin.py | 64 ++++++++++++++++++++---- conda_build/utils.py | 17 ++++--- recipe/meta.yaml | 2 +- rever.xsh | 6 +++ tests/test_metadata.py | 65 +++++++++++++++++++++++- 12 files changed, 269 insertions(+), 100 deletions(-) diff --git a/.authors.yml b/.authors.yml index 3a4cc66964..2989dbcb6a 100644 --- a/.authors.yml +++ b/.authors.yml @@ -7,7 +7,7 @@ aliases: - Mike Sarahan - Michael Sarahan - num_commits: 4000 + num_commits: 2000 first_commit: 2015-09-04 21:31:08 - name: Jonathan J. Helmus email: jjhelmus@gmail.com @@ -63,7 +63,7 @@ alternate_emails: - mandeep@users.noreply.github.com - mbhutani@continuum.io - num_commits: 86 + num_commits: 43 first_commit: 2017-05-17 23:54:01 github: mandeep - name: Filipe Fernandes @@ -117,7 +117,7 @@ - heather999@users.noreply.github.com aliases: - heather999 - num_commits: 4 + num_commits: 2 first_commit: 2016-04-11 12:02:50 github: heather999 - name: Ryan Grout @@ -571,7 +571,7 @@ alternate_emails: - scastellarin95@gmail.com - scastellarin@anaconda.com - num_commits: 196 + num_commits: 98 first_commit: 2016-09-06 16:58:21 github: soapy1 - name: Bruno Oliveira @@ -754,7 +754,7 @@ alternate_emails: - kirkhamj@janelia.hhmi.org - jakirkham@gmail.com - num_commits: 146 + num_commits: 73 first_commit: 2015-04-21 13:26:39 github: jakirkham - name: Anthony Scopatz @@ -873,7 +873,7 @@ alternate_emails: - 5738695+183amir@users.noreply.github.com - amir.mohammadi@idiap.ch - num_commits: 12 + num_commits: 6 first_commit: 2018-02-27 16:37:19 - name: David Li email: li.davidm96@gmail.com @@ -967,7 +967,7 @@ first_commit: 2019-01-26 13:17:33 - name: Rachel Rigdon email: rrigdon@anaconda.com - num_commits: 268 + num_commits: 134 first_commit: 2019-01-24 15:12:09 github: rrigdon aliases: @@ -1118,7 +1118,7 @@ alternate_emails: - becker.mr@gmail.com - beckermr@users.noreply.github.com - num_commits: 38 + num_commits: 19 first_commit: 2019-10-17 23:05:16 github: beckermr - name: Jinzhe Zeng @@ -1199,7 +1199,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 128 + num_commits: 130 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1222,7 +1222,7 @@ first_commit: 2020-11-19 10:46:41 - name: Jannis Leidel email: jannis@leidel.info - num_commits: 26 + num_commits: 27 github: jezdez first_commit: 2020-11-19 10:46:41 - name: Christof Kaufmann @@ -1237,7 +1237,7 @@ github: pre-commit-ci[bot] aliases: - pre-commit-ci[bot] - num_commits: 44 + num_commits: 48 first_commit: 2021-11-20 01:47:17 - name: Jacob Walls email: jacobtylerwalls@gmail.com @@ -1248,7 +1248,7 @@ github: beeankha alternate_emails: - beeankha@gmail.com - num_commits: 20 + num_commits: 19 first_commit: 2022-01-19 16:40:06 - name: Conda Bot email: 18747875+conda-bot@users.noreply.github.com @@ -1259,7 +1259,7 @@ alternate_emails: - ad-team+condabot@anaconda.com - 18747875+conda-bot@users.noreply.github.com - num_commits: 66 + num_commits: 35 first_commit: 2022-01-17 18:09:22 - name: Uwe L. Korn email: xhochy@users.noreply.github.com @@ -1268,7 +1268,7 @@ - name: Daniel Holth email: dholth@anaconda.com github: dholth - num_commits: 10 + num_commits: 12 first_commit: 2022-04-28 05:22:14 - name: Rylan Chord email: rchord@users.noreply.github.com diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index 24c7a8b967..ed22cae254 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -27,7 +27,7 @@ jobs: # (default: secrets.GITHUB_TOKEN) token: ${{ secrets.CLA_ACTION_TOKEN }} # [required] - # Label to apply to contributor's PR once CLA is singed + # Label to apply to contributor's PR once CLA is signed label: cla-signed # [required] diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 370d8fb9bd..da87844dfb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,7 +39,7 @@ repos: - id: check-merge-conflict # Python verification and formatting - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.5.1 + rev: v1.5.4 hooks: # auto inject license blurb - id: insert-license @@ -47,7 +47,7 @@ repos: args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] exclude: ^conda_build/version.py - repo: https://github.com/asottile/pyupgrade - rev: v3.9.0 + rev: v3.10.1 hooks: # upgrade standard Python codes - id: pyupgrade @@ -58,7 +58,7 @@ repos: # auto sort Python imports - id: isort - repo: https://github.com/psf/black - rev: 23.3.0 + rev: 23.7.0 hooks: # auto format Python codes - id: black @@ -69,7 +69,7 @@ repos: - id: blacken-docs additional_dependencies: [black] - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + rev: 6.1.0 hooks: # lint Python codes - id: flake8 diff --git a/CHANGELOG.md b/CHANGELOG.md index ac91b42cbb..59f76eb623 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ [//]: # (current developments) +## 3.26.1 (2023-08-17) + +### Bug fixes + +* Delay imports in conda command plugin until the command is used, avoiding + import-time side effects including unwanted logging configuration. (#4949) + +### Contributors + +* @beeankha +* @conda-bot +* @dholth +* @jezdez +* @kenodegard +* @pre-commit-ci[bot] + + + ## 3.26.0 (2023-07-18) ### Enhancements diff --git a/RELEASE.md b/RELEASE.md index 4f6199512e..5f0648df52 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -14,12 +14,12 @@ # Release Process -> **Note** +> **Note:** > Throughout this document are references to the version number as `YY.M.0`, this should be replaced with the correct version number. Do **not** prefix the version with a lowercase `v`. ## 1. Open the release issue and cut a release branch. (do this ~1 week prior to release) -> **Note** +> **Note:** > The [epic template][epic template] is perfect for this; remember to remove the **`epic`** label. Use the issue template below to create the release issue. After creating the release issue, pin it for easy access. @@ -48,7 +48,7 @@ Placeholder for `{{ repo.name }} YY.M.0` release. #### The week before release week - [ ] Create release branch (named `YY.M.x`) -- [ ] Ensure release candidates are being successfully built (see `conda-canary/label/YY.M.x`) +- [ ] Ensure release candidates are being successfully built (see `conda-canary/label/rc-{{ repo.name }}-YY.M.x`) - [ ] [Complete outstanding PRs][milestone] - [ ] Test release candidates @@ -76,7 +76,7 @@ Placeholder for `{{ repo.name }} YY.M.0` release. -> **Note** +> **Note:** > The new release branch should adhere to the naming convention of `YY.M.x`. ## 2. Alert various parties of the upcoming release. (do this ~1 week prior to release) @@ -122,7 +122,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 2. Run `rever --activities authors`: - > **Note** + > **Note:** > Include `--force` when re-running any rever commands for the same ``, otherwise, rever will skip the activity and no changes will be made (i.e., rever remembers if an activity has been run for a given version). ```bash @@ -164,7 +164,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Updated .authors.yml" + (rever) $ git commit -m "Update .authors.yml" ``` - Rerun `rever --activities authors` and finally check that your `.mailmap` is correct by running: @@ -189,21 +189,21 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Updated .mailmap" + (rever) $ git commit -m "Update .mailmap" ``` - Continue repeating the above processes until the `.authors.yml` and `.mailmap` are corrected to your liking. After completing this, you will have at most two commits on your release branch: ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap ``` 4. Review news snippets (ensure they are all using the correct Markdown format, **not** reStructuredText) and add additional snippets for undocumented PRs/changes as necessary. - > **Note** + > **Note:** > We've found it useful to name news snippets with the following format: `-`. > > We've also found that we like to include the PR #s inline with the text itself, e.g.: @@ -222,21 +222,21 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Updated news" + (rever) $ git commit -m "Update news" ``` - After completing this, you will have at most three commits on your release branch: ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news ``` 5. Run `rever --activities changelog`: - > **Note** + > **Note:** > This has previously been a notoriously fickle step (likely due to incorrect regex patterns in the `rever.xsh` config file and missing `github` keys in `.authors.yml`) so beware of potential hiccups. If this fails, it's highly likely to be an innocent issue. ```bash @@ -256,9 +256,9 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news ``` 6. Now that we have successfully run the activities separately, we wish to run both together. This will ensure that the contributor list, a side-effect of the authors activity, is included in the changelog activity. @@ -271,11 +271,11 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news - + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Updated authorship for YY.M.0 - + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Updated CHANGELOG for YY.M.0 + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news + + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Update authorship for YY.M.0 + + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Update CHANGELOG for YY.M.0 ``` 7. Since rever does not include stats on first-time contributors, we will need to add this manually. @@ -286,19 +286,19 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Added first contributions" + (rever) $ git commit -m "Add first-time contributions" ``` - After completing this, you will have at most six commits on your release branch: ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news - + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Updated authorship for YY.M.0 - + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Updated CHANGELOG for YY.M.0 - + 93fdf029fd4cf235872c12cab12a1f7e8f95a755 Added first contributions + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news + + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Update authorship for YY.M.0 + + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Update CHANGELOG for YY.M.0 + + 93fdf029fd4cf235872c12cab12a1f7e8f95a755 Add first-time contributions ``` 8. Push this versioned branch. @@ -326,7 +326,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 11. [Create][new release] the release and **SAVE AS A DRAFT** with the following values: - > **Note** + > **Note:** > Only publish the release after the release PR is merged, until then always **save as draft**. | Field | Value | @@ -339,9 +339,23 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ## 5. Wait for review and approval of release PR. -## 6. Merge release PR and publish release. +## 6. Manually test canary build(s). -## 7. Merge/cherry pick the release branch over to the `main` branch. +### Canary Builds for Manual Testing + +Once the release PRs are filed, successful canary builds will be available on `https://anaconda.org/conda-canary/conda/files?channel=rc-{{ repo.name }}-YY.M.x` for manual testing. + +> **Note:** +> You do not need to apply the `build::review` label for release PRs; every commit to the release branch builds and uploads canary builds to the respective `rc-` label. + +## 7. Merge release PR and publish release. + +To publish the release, go to the project's release page (e.g., https://github.com/conda/conda/releases) and add the release notes from `CHANGELOG.md` to the draft release you created earlier. Then publish the release. + +> **Note:** +> Release notes can be drafted and saved ahead of time. + +## 8. Merge/cherry pick the release branch over to the `main` branch.
Internal process @@ -354,19 +368,30 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 4. Ensure that all of the commits being pulled in look accurate, then select "Create pull request". -> **Note** +> **Note:** > Make sure NOT to push the "Update Branch" button. If there are [merge conflicts][merge conflicts], create a temporary "connector branch" dedicated to fixing merge conflicts separately from the `YY.M.0` and `main` branches. 5. Review and merge the pull request the same as any code change pull request. -> **Note** +> **Note:** > The commits from the release branch need to be retained in order to be able to compare individual commits; in other words, a "merge commit" is required when merging the resulting pull request vs. a "squash merge". Protected branches will require permissions to be temporarily relaxed in order to enable this action.
-## 8. Open PRs to bump [Anaconda Recipes][Anaconda Recipes] and [conda-forge][conda-forge] feedstocks to use `YY.M.0`. +## 9. Open PRs to bump [Anaconda Recipes][Anaconda Recipes] and [conda-forge][conda-forge] feedstocks to use `YY.M.0`. + +> **Note:** +> Conda-forge's PRs will be auto-created via the `regro-cf-autotick-bot`. Follow the instructions below if any changes need to be made to the recipe that were not automatically added (these instructions are only necessary for anyone who is _not_ a conda-forge feedstock maintainer, since maintainers can push changes directly to the autotick branch): +> - Create a new branch based off of autotick's branch (autotick's branches usually use the `regro-cf-autotick-bot:XX.YY.0_[short hash]` syntax) +> - Add any changes via commits to that new branch +> - Open a new PR and push it against the `main` branch +> +> Make sure to include a comment on the original `autotick-bot` PR that a new pull request has been created, in order to avoid duplicating work! `regro-cf-autotick-bot` will close the auto-created PR once the new PR is merged. +> +> For more information about this process, please read the ["Pushing to regro-cf-autotick-bot branch" section of the conda-forge documentation](https://conda-forge.org/docs/maintainer/updating_pkgs.html#pushing-to-regro-cf-autotick-bot-branch). + -## 9. Hand off to Anaconda's packaging team. +## 10. Hand off to Anaconda's packaging team.
Internal process @@ -377,6 +402,6 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
-## 10. Continue championing and shepherding. +## 11. Continue championing and shepherding. Remember to make all relevant announcements and continue to update the release issue with the latest details as tasks are completed. diff --git a/conda_build/__init__.py b/conda_build/__init__.py index 943084b6f4..91367d0d86 100644 --- a/conda_build/__init__.py +++ b/conda_build/__init__.py @@ -12,5 +12,6 @@ "index", "inspect", "metapackage", - "render" "skeleton", + "render", + "skeleton", ] diff --git a/conda_build/metadata.py b/conda_build/metadata.py index d158af6223..33c3230573 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -2,7 +2,6 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -import contextlib import copy import hashlib import json @@ -41,9 +40,37 @@ ) try: - loader = yaml.CLoader -except: - loader = yaml.Loader + Loader = yaml.CLoader +except AttributeError: + Loader = yaml.Loader + + +class StringifyNumbersLoader(Loader): + @classmethod + def remove_implicit_resolver(cls, tag): + if "yaml_implicit_resolvers" not in cls.__dict__: + cls.yaml_implicit_resolvers = { + k: v[:] for k, v in cls.yaml_implicit_resolvers.items() + } + for ch in tuple(cls.yaml_implicit_resolvers): + resolvers = [(t, r) for t, r in cls.yaml_implicit_resolvers[ch] if t != tag] + if resolvers: + cls.yaml_implicit_resolvers[ch] = resolvers + else: + del cls.yaml_implicit_resolvers[ch] + + @classmethod + def remove_constructor(cls, tag): + if "yaml_constructors" not in cls.__dict__: + cls.yaml_constructors = cls.yaml_constructors.copy() + if tag in cls.yaml_constructors: + del cls.yaml_constructors[tag] + + +StringifyNumbersLoader.remove_implicit_resolver("tag:yaml.org,2002:float") +StringifyNumbersLoader.remove_implicit_resolver("tag:yaml.org,2002:int") +StringifyNumbersLoader.remove_constructor("tag:yaml.org,2002:float") +StringifyNumbersLoader.remove_constructor("tag:yaml.org,2002:int") on_win = sys.platform == "win32" @@ -261,9 +288,7 @@ def select_lines(data, namespace, variants_in_place): def yamlize(data): try: - with stringify_numbers(): - loaded_data = yaml.load(data, Loader=loader) - return loaded_data + return yaml.load(data, Loader=StringifyNumbersLoader) except yaml.error.YAMLError as e: if "{{" in data: try: @@ -1056,23 +1081,7 @@ def _hash_dependencies(hashing_dependencies, hash_length): return f"h{hash_.hexdigest()}"[: hash_length + 1] -@contextlib.contextmanager -def stringify_numbers(): - # ensure that numbers are not interpreted as ints or floats. That trips up versions - # with trailing zeros. - implicit_resolver_backup = loader.yaml_implicit_resolvers.copy() - for ch in list("0123456789"): - if ch in loader.yaml_implicit_resolvers: - del loader.yaml_implicit_resolvers[ch] - yield - for ch in list("0123456789"): - if ch in implicit_resolver_backup: - loader.yaml_implicit_resolvers[ch] = implicit_resolver_backup[ch] - - class MetaData: - __hash__ = None # declare as non-hashable to avoid its use with memoization - def __init__(self, path, config=None, variant=None): self.undefined_jinja_vars = [] self.config = get_or_merge_config(config, variant=variant) diff --git a/conda_build/plugin.py b/conda_build/plugin.py index 03a3949f44..eddb85fe66 100644 --- a/conda_build/plugin.py +++ b/conda_build/plugin.py @@ -2,15 +2,61 @@ # SPDX-License-Identifier: BSD-3-Clause import conda.plugins -from .cli.main_build import execute as build -from .cli.main_convert import execute as convert -from .cli.main_debug import execute as debug -from .cli.main_develop import execute as develop -from .cli.main_index import execute as index -from .cli.main_inspect import execute as inspect -from .cli.main_metapackage import execute as metapackage -from .cli.main_render import execute as render -from .cli.main_skeleton import execute as skeleton + +# lazy-import to avoid nasty import-time side effects when not using conda-build +def build(*args, **kwargs): + from .cli.main_build import execute + + execute(*args, **kwargs) + + +def convert(*args, **kwargs): + from .cli.main_convert import execute + + execute(*args, **kwargs) + + +def debug(*args, **kwargs): + from .cli.main_debug import execute + + execute(*args, **kwargs) + + +def develop(*args, **kwargs): + from .cli.main_develop import execute + + execute(*args, **kwargs) + + +def index(*args, **kwargs): + # deprecated! use conda-index! + from .cli.main_index import execute + + execute(*args, **kwargs) + + +def inspect(*args, **kwargs): + from .cli.main_inspect import execute + + execute(*args, **kwargs) + + +def metapackage(*args, **kwargs): + from .cli.main_metapackage import execute + + execute(*args, **kwargs) + + +def render(*args, **kwargs): + from .cli.main_render import execute + + execute(*args, **kwargs) + + +def skeleton(*args, **kwargs): + from .cli.main_skeleton import execute + + execute(*args, **kwargs) @conda.plugins.hookimpl diff --git a/conda_build/utils.py b/conda_build/utils.py index 49b95b7ca1..aa375790f3 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -34,6 +34,7 @@ ) from pathlib import Path from threading import Thread +from typing import Iterable import libarchive @@ -941,7 +942,7 @@ def file_info(path): } -def comma_join(items): +def comma_join(items: Iterable[str], conjunction: str = "and") -> str: """ Like ', '.join(items) but with and @@ -954,11 +955,10 @@ def comma_join(items): >>> comma_join(['a', 'b', 'c']) 'a, b, and c' """ - return ( - " and ".join(items) - if len(items) <= 2 - else ", ".join(items[:-1]) + ", and " + items[-1] - ) + items = tuple(items) + if len(items) <= 2: + return f"{items[0]} {conjunction} {items[1]}" + return f"{', '.join(items[:-1])}, {conjunction} {items[-1]}" def safe_print_unicode(*args, **kwargs): @@ -1268,7 +1268,7 @@ def islist(arg, uniform=False, include_dict=True): :return: Whether `arg` is a `list` :rtype: bool """ - if isinstance(arg, str) or not hasattr(arg, "__iter__"): + if isinstance(arg, str) or not isinstance(arg, Iterable): # str and non-iterables are not lists return False elif not include_dict and isinstance(arg, dict): @@ -1279,6 +1279,7 @@ def islist(arg, uniform=False, include_dict=True): return True # NOTE: not checking for Falsy arg since arg may be a generator + # WARNING: if uniform != False and arg is a generator then arg will be consumed if uniform is True: arg = iter(arg) @@ -1288,7 +1289,7 @@ def islist(arg, uniform=False, include_dict=True): # StopIteration: list is empty, an empty list is still uniform return True # check for explicit type match, do not allow the ambiguity of isinstance - uniform = lambda e: type(e) == etype + uniform = lambda e: type(e) == etype # noqa: E721 try: return all(uniform(e) for e in arg) diff --git a/recipe/meta.yaml b/recipe/meta.yaml index bc307a23a4..9ca9a95a27 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -53,7 +53,7 @@ requirements: - tomli # [py<311] - tqdm run_constrained: - - conda-verify >=3.0.2 + - conda-verify >=3.1.0 test: imports: diff --git a/rever.xsh b/rever.xsh index 644107dfd9..577ecfa980 100644 --- a/rever.xsh +++ b/rever.xsh @@ -26,3 +26,9 @@ $CHANGELOG_CATEGORIES = [ $CHANGELOG_CATEGORY_TITLE_FORMAT = "### {category}\n\n" $CHANGELOG_AUTHORS_TITLE = "Contributors" $CHANGELOG_AUTHORS_FORMAT = "* @{github}\n" + +try: + # allow repository to customize synchronized-from-infa rever config + from rever_overrides import * +except ImportError: + pass diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 99545c50c9..b5a696ff6f 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -8,7 +8,7 @@ import pytest from conda_build import api -from conda_build.metadata import MetaData, _hash_dependencies, select_lines +from conda_build.metadata import MetaData, _hash_dependencies, select_lines, yamlize from conda_build.utils import DEFAULT_SUBDIRS from .utils import metadata_dir, thisdir @@ -260,3 +260,66 @@ def test_config_member_decoupling(testing_metadata): b = testing_metadata.copy() b.config.some_member = "123" assert b.config.some_member != testing_metadata.config.some_member + + +# ensure that numbers are not interpreted as ints or floats, doing so trips up versions +# with trailing zeros +def test_yamlize_zero(): + yml = yamlize( + """ + - 0 + - 0. + - 0.0 + - .0 + """ + ) + + assert yml == ["0", "0.", "0.0", ".0"] + + +def test_yamlize_positive(): + yml = yamlize( + """ + - +1 + - +1. + - +1.2 + - +.2 + """ + ) + + assert yml == ["+1", "+1.", "+1.2", "+.2"] + + +def test_yamlize_negative(): + yml = yamlize( + """ + - -1 + - -1. + - -1.2 + - -.2 + """ + ) + + assert yml == ["-1", "-1.", "-1.2", "-.2"] + + +def test_yamlize_numbers(): + yml = yamlize( + """ + - 1 + - 1.2 + """ + ) + + assert yml == ["1", "1.2"] + + +def test_yamlize_versions(): + yml = yamlize( + """ + - 1.2.3 + - 1.2.3.4 + """ + ) + + assert yml == ["1.2.3", "1.2.3.4"] From bbc1138e95d750a3073aeac766b8e34ee4dfe712 Mon Sep 17 00:00:00 2001 From: Shaun Walbridge <46331011+scdub@users.noreply.github.com> Date: Fri, 18 Aug 2023 03:53:28 -0400 Subject: [PATCH 169/366] Fix the broken link to the jinja2 template page (#4965) Co-authored-by: Shaun Walbridge --- docs/source/resources/define-metadata.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index 891dd759ef..48d873dffa 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -1749,7 +1749,7 @@ practice means changing the conda-build source code. See the `_. For more information, see the `Jinja2 template -documentation `_ +documentation `_ and :ref:`the list of available environment variables `. From b4f8818387ad9f81037b00acbc0889ddd40399e1 Mon Sep 17 00:00:00 2001 From: Shaun Walbridge <46331011+scdub@users.noreply.github.com> Date: Fri, 18 Aug 2023 03:54:38 -0400 Subject: [PATCH 170/366] Restore subpackage test documentation (#4964) Co-authored-by: Shaun Walbridge --- docs/source/resources/define-metadata.rst | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index 48d873dffa..478a442062 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -1478,10 +1478,9 @@ explicitly in the script section: script: run_test.py -Test requirements for subpackages are not supported. Instead, -subpackage tests install their runtime requirements---but not the -run requirements for the top-level package---and the test-time -requirements of the top-level package. +Test requirements for subpackages can be specified using the optional +`test/requires` section of subpackage tests. Subpackage tests install +their runtime requirements during the test as well. EXAMPLE: In this example, the test for ``subpackage-name`` installs ``some-test-dep`` and ``subpackage-run-req``, but not @@ -1493,16 +1492,15 @@ installs ``some-test-dep`` and ``subpackage-run-req``, but not run: - some-top-level-run-req - test: - requires: - - some-test-dep - outputs: - name: subpackage-name requirements: - subpackage-run-req test: script: run_test.py + requires: + - some-test-dep + Output type From 5376d73f083d2b4217fb7bbeaee5a229cd62e635 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrgen=20Gmach?= Date: Fri, 18 Aug 2023 14:47:20 +0200 Subject: [PATCH 171/366] Fix typo in CONTRIBUTING.md (#4957) You need to use `ENV_NAME` instead of `CONDA_ENV` for setting up conda build. Fixes #4492 --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 675651e3ae..683faf9597 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -50,7 +50,7 @@ impact the functionality of `conda/conda-build` installed in your base environme ``` bash # create/activate standalone dev env - $ CONDA_ENV=conda-build make setup + $ ENV_NAME=conda-build make setup $ conda activate conda-build # Run all tests on Linux and Mac OS X systems (this can take a long time) From b2cb091e60bf9a879e4330db6ddad19675ee7f87 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 21 Aug 2023 17:59:49 -0500 Subject: [PATCH 172/366] [pre-commit.ci] pre-commit autoupdate (#4974) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/blacken-docs: 1.15.0 → 1.16.0](https://github.com/asottile/blacken-docs/compare/1.15.0...1.16.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index da87844dfb..59f04c445b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -63,7 +63,7 @@ repos: # auto format Python codes - id: black - repo: https://github.com/asottile/blacken-docs - rev: 1.15.0 + rev: 1.16.0 hooks: # auto format Python codes within docstrings - id: blacken-docs From 7da4ba493eaed46df6bd37a952d2bcdc8cfe1608 Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Fri, 25 Aug 2023 10:43:48 -0400 Subject: [PATCH 173/366] Add a tag::noarch label to keep track of noarch-related issues and pull requests (#4986) --- .github/labels.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/labels.yml b/.github/labels.yml index cdd6853502..b072d62896 100644 --- a/.github/labels.yml +++ b/.github/labels.yml @@ -26,3 +26,9 @@ - name: knowledge-medium description: "[deprecated]" color: "888888" + +# Tags +- name: tag::noarch + description: related to noarch builds + color: "86C579" + aliases: [] From f48421bcb175688ce9022210cfa412e167f99626 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Fri, 25 Aug 2023 16:05:30 -0500 Subject: [PATCH 174/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4988)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- HOW_WE_USE_GITHUB.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HOW_WE_USE_GITHUB.md b/HOW_WE_USE_GITHUB.md index aa5a3691c6..dc32be1cf1 100644 --- a/HOW_WE_USE_GITHUB.md +++ b/HOW_WE_USE_GITHUB.md @@ -272,7 +272,7 @@ please post details to the [Nucleus forums](https://community.anaconda.cloud/).
 Hi [@username],
 
-Thanks for voicing your concern about the performance of our dependency solver. To fix this, our official recommendation is using the "conda-libmamba-solver" instead of the default "classic" solver (more information about the "conda-libmamba-solver" can be found here: https://conda.github.io/conda-libmamba-solver/getting-started/).
+Thanks for voicing your concern about the performance of the classic dependency solver. To fix this, our official recommendation is using the new default "conda-libmamba-solver" instead of the classic solver (more information about the "conda-libmamba-solver" can be found here: https://conda.github.io/conda-libmamba-solver/getting-started/).
 
 In most cases "conda-libmamba-solver" should be significantly faster than the "classic" solver. We hope it provides you with a much better experience going forward.
 
From 8231a37e8ba25e14b89c5c38e1b547b8caed13a8 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Tue, 29 Aug 2023 14:11:51 -0500 Subject: [PATCH 175/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4989)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- RELEASE.md | 35 +++++++++++++++++------------------ 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/RELEASE.md b/RELEASE.md index 5f0648df52..45e605e9eb 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -15,12 +15,12 @@ # Release Process > **Note:** -> Throughout this document are references to the version number as `YY.M.0`, this should be replaced with the correct version number. Do **not** prefix the version with a lowercase `v`. +> Throughout this document are references to the version number as `YY.M.[$patch_number]`, this should be replaced with the correct version number. Do **not** prefix the version with a lowercase `v`. ## 1. Open the release issue and cut a release branch. (do this ~1 week prior to release) > **Note:** -> The [epic template][epic template] is perfect for this; remember to remove the **`epic`** label. +> The new release branch should adhere to the naming convention of `YY.M.x` (make sure to put the `.x` at the end!). In the case of patch/hotfix releases, however, do NOT cut a new release branch; instead, use the previously-cut release branch with the appropriate `YY.M.x` version numbers. Use the issue template below to create the release issue. After creating the release issue, pin it for easy access. @@ -30,7 +30,7 @@ Use the issue template below to create the release issue. After creating the rel ```markdown ### Summary -Placeholder for `{{ repo.name }} YY.M.0` release. +Placeholder for `{{ repo.name }} YY.M.x` release. | Pilot | | |---|---| @@ -75,9 +75,8 @@ Placeholder for `{{ repo.name }} YY.M.0` release. ``` - > **Note:** -> The new release branch should adhere to the naming convention of `YY.M.x`. +> The [epic template][epic template] is perfect for this; remember to remove the **`epic`** label. ## 2. Alert various parties of the upcoming release. (do this ~1 week prior to release) @@ -117,7 +116,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 2. Create a versioned branch, this is where rever will make its changes: ```bash - (rever) $ git checkout -b changelog-YY.M.0 + (rever) $ git checkout -b changelog-YY.M.[$patch_number] ``` 2. Run `rever --activities authors`: @@ -145,7 +144,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut - Here's a sample run where we undo the commit made by rever in order to commit the changes to `.authors.yml` separately: ```bash - (rever) $ rever --activities authors --force YY.M.0 + (rever) $ rever --activities authors --force YY.M.[$patch_number] # changes were made to .authors.yml as per the prior bullet (rever) $ git diff --name-only HEAD HEAD~1 @@ -274,8 +273,8 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news - + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Update authorship for YY.M.0 - + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Update CHANGELOG for YY.M.0 + + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Update authorship for YY.M.[$patch_number] + + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Update CHANGELOG for YY.M.[$patch_number] ``` 7. Since rever does not include stats on first-time contributors, we will need to add this manually. @@ -296,15 +295,15 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news - + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Update authorship for YY.M.0 - + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Update CHANGELOG for YY.M.0 + + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Update authorship for YY.M.[$patch_number] + + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Update CHANGELOG for YY.M.[$patch_number] + 93fdf029fd4cf235872c12cab12a1f7e8f95a755 Add first-time contributions ``` 8. Push this versioned branch. ```bash - (rever) $ git push -u upstream changelog-YY.M.0 + (rever) $ git push -u upstream changelog-YY.M.[$patch_number] ``` 9. Open the Release PR targing the `YY.M.x` branch. @@ -331,7 +330,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut | Field | Value | |---|---| - | Choose a tag | `YY.M.0` | + | Choose a tag | `YY.M.[$patch_number]` | | Target | `YY.M.x` | | Body | copy/paste blurb from `CHANGELOG.md` | @@ -362,14 +361,14 @@ To publish the release, go to the project's release page (e.g., https://github.c 1. From the main "< > Code" page of the repository, select the drop down menu next to the `main` branch button and then select "View all branches" at the very bottom. -2. Find the applicable `YY.MM.x` branch and click the "New pull request" button. +2. Find the applicable `YY.M.x` branch and click the "New pull request" button. -3. "Base" should point to `main` while "Compare" should point to `YY.MM.x`. +3. "Base" should point to `main` while "Compare" should point to `YY.M.x`. 4. Ensure that all of the commits being pulled in look accurate, then select "Create pull request". > **Note:** -> Make sure NOT to push the "Update Branch" button. If there are [merge conflicts][merge conflicts], create a temporary "connector branch" dedicated to fixing merge conflicts separately from the `YY.M.0` and `main` branches. +> Make sure NOT to push the "Update Branch" button. If there are [merge conflicts][merge conflicts], create a temporary "connector branch" dedicated to fixing merge conflicts separately from the `YY.M.x` and `main` branches. 5. Review and merge the pull request the same as any code change pull request. @@ -378,11 +377,11 @@ To publish the release, go to the project's release page (e.g., https://github.c -## 9. Open PRs to bump [Anaconda Recipes][Anaconda Recipes] and [conda-forge][conda-forge] feedstocks to use `YY.M.0`. +## 9. Open PRs to bump [Anaconda Recipes][Anaconda Recipes] and [conda-forge][conda-forge] feedstocks to use `YY.M.[$patch_number]`. > **Note:** > Conda-forge's PRs will be auto-created via the `regro-cf-autotick-bot`. Follow the instructions below if any changes need to be made to the recipe that were not automatically added (these instructions are only necessary for anyone who is _not_ a conda-forge feedstock maintainer, since maintainers can push changes directly to the autotick branch): -> - Create a new branch based off of autotick's branch (autotick's branches usually use the `regro-cf-autotick-bot:XX.YY.0_[short hash]` syntax) +> - Create a new branch based off of autotick's branch (autotick's branches usually use the `regro-cf-autotick-bot:XX.YY.[$patch_number]_[short hash]` syntax) > - Add any changes via commits to that new branch > - Open a new PR and push it against the `main` branch > From 63bf8a6c08c1415a2e675639971b14a4dd4a0688 Mon Sep 17 00:00:00 2001 From: Jack Olivieri Date: Wed, 30 Aug 2023 17:32:34 +0200 Subject: [PATCH 176/366] Conform to recipe standards (#4960) Co-authored-by: Jannis Leidel Co-authored-by: Ken Odegard Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- conda_build/skeletons/pypi.py | 4 +++- news/4960-conform-to-recipe-standards | 19 +++++++++++++++++++ tests/test_pypi_skeleton.py | 4 ++-- 3 files changed, 24 insertions(+), 3 deletions(-) create mode 100644 news/4960-conform-to-recipe-standards diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index 9693d41933..b1194e6a8b 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -431,7 +431,9 @@ def skeletonize( if noarch_python: ordered_recipe["build"]["noarch"] = "python" - recipe_script_cmd = ["{{ PYTHON }} -m pip install . -vv"] + recipe_script_cmd = [ + "{{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation" + ] ordered_recipe["build"]["script"] = " ".join( recipe_script_cmd + setup_options ) diff --git a/news/4960-conform-to-recipe-standards b/news/4960-conform-to-recipe-standards new file mode 100644 index 0000000000..a28b5f6c89 --- /dev/null +++ b/news/4960-conform-to-recipe-standards @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* When templating new recipes from a pypi package, the build script `{{ PYTHON }} -m pip install . -vv` is deprecated in favor of `{{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation`. (#4960) + +### Docs + +* + +### Other + +* diff --git a/tests/test_pypi_skeleton.py b/tests/test_pypi_skeleton.py index 6562a50f14..20581ef14d 100644 --- a/tests/test_pypi_skeleton.py +++ b/tests/test_pypi_skeleton.py @@ -58,7 +58,7 @@ def test_print_dict(): }, "build": { "number": 0, - "script": "{{ PYTHON }} -m pip install . -vv", + "script": "{{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation", }, } recipe_order = ["package", "source", "build", "about"] @@ -74,7 +74,7 @@ def test_print_dict(): build: number: 0 - script: "{{ PYTHON }} -m pip install . -vv" + script: "{{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation" about: home: "https://conda.io" From bf8caf548b951bf0a06320f77e5019bb891ea106 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Tue, 5 Sep 2023 15:02:40 -0500 Subject: [PATCH 177/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#4994)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/labels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index b2c2e821d8..ebfafa82a2 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -20,7 +20,7 @@ jobs: GLOBAL: https://raw.githubusercontent.com/conda/infra/main/.github/global.yml LOCAL: .github/labels.yml steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - id: has_local uses: andstor/file-existence-action@v2.0.0 with: From 7e6b035cbdd14ed50c31a6fc3a6bcc35723fd921 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 14 Sep 2023 20:35:21 +0200 Subject: [PATCH 178/366] [pre-commit.ci] pre-commit autoupdate (#5001) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 59f04c445b..379137b52d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -58,7 +58,7 @@ repos: # auto sort Python imports - id: isort - repo: https://github.com/psf/black - rev: 23.7.0 + rev: 23.9.1 hooks: # auto format Python codes - id: black From 735e506b84c4fb5afbcab0fa5639c0a0c081ad3a Mon Sep 17 00:00:00 2001 From: Dave Karetnyk Date: Sat, 16 Sep 2023 12:21:09 +0200 Subject: [PATCH 179/366] Update docs regarding base env use (#5004) Co-authored-by: Bianca Henderson Co-authored-by: Jannis Leidel --- docs/source/install-conda-build.rst | 43 +++++++++++++++++++++++++++ news/4995-run-conda-build-in-base-env | 19 ++++++++++++ 2 files changed, 62 insertions(+) create mode 100644 news/4995-run-conda-build-in-base-env diff --git a/docs/source/install-conda-build.rst b/docs/source/install-conda-build.rst index e8cc6c9425..dd52e930ff 100644 --- a/docs/source/install-conda-build.rst +++ b/docs/source/install-conda-build.rst @@ -9,6 +9,42 @@ To enable building conda packages: * update conda and conda-build +.. _-conda-build-wow: + +Way of working +============== + +For proper functioning, it is strongly recommended to install conda-build in +the conda ``base`` environment. Not doing so may lead to problems. + +Explanation +----------- + +With earlier conda / conda-build versions, it was possible to build packages in +your own defined environment, e.g. ``my_build_env``. This was partly driven by +the general conda recommendation not to use the ``base`` env for normal work; +see `Conda Managing Environments`_ for instance. However, conda-build is better +viewed as part of the conda infrastructure, and not as a normal package. Hence, +installing it in the ``base`` env makes more sense. More information: +`Must conda-build be installed in the base envt?`_ + +Other considerations +-------------------- + +* An installation or update of conda-build (in fact, of any package) in the ``base`` + environment needs to be run from an account with the proper permissions + (i.e., the same permissions as were used to install conda and the base env in + the first place via the Miniconda or Anaconda installers). For example, on + Windows that might mean an account with administrator privileges. + +* `conda-verfiy`_ is a useful package that can also be added to the base + environment in order to remove some warnings generated when conda-build runs. + +* For critical CI/CD projects, you might want to pin to an explicit (but recent) + version of conda-build, i.e. only update to a newer version of conda-build + and conda once they have been first verified "offline". + + .. _install-conda-build: Installing conda-build @@ -18,6 +54,7 @@ To install conda-build, in your terminal window or an Anaconda Prompt, run: .. code-block:: bash + conda activate base conda install conda-build @@ -31,8 +68,14 @@ To update conda and conda-build, in your terminal window or an Anaconda Prompt, .. code-block:: bash + conda activate base conda update conda conda update conda-build For release notes, see the `conda-build GitHub page `_. + + +.. _`Conda Managing Environments`: https://conda.io/projects/conda/en/latest/user-guide/getting-started.html#managing-environments +.. _`conda-verfiy`: https://github.com/conda/conda-verify +.. _`Must conda-build be installed in the base envt?`: https://github.com/conda/conda-build/issues/4995 diff --git a/news/4995-run-conda-build-in-base-env b/news/4995-run-conda-build-in-base-env new file mode 100644 index 0000000000..41a6fc82d9 --- /dev/null +++ b/news/4995-run-conda-build-in-base-env @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* Add explanation that conda-build should be run from the base env. (#4995) + +### Other + +* From 82fc11a474bcb396b20a0ee825e68467838f9d24 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Mon, 18 Sep 2023 21:22:56 +0200 Subject: [PATCH 180/366] Migrate to new version of readthedocs config. (#5006) --- .github/dependabot.yml | 15 +++++++++++++++ .readthedocs.yml | 6 +++++- docs/requirements.txt | 2 +- 3 files changed, 21 insertions(+), 2 deletions(-) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..07210519aa --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,15 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/docs/" + schedule: + interval: "weekly" + allow: + # Allow only production updates for Sphinx + - dependency-name: "sphinx" + dependency-type: "production" diff --git a/.readthedocs.yml b/.readthedocs.yml index a452d9dedf..abdbda6254 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,7 +1,11 @@ version: 2 +build: + os: "ubuntu-22.04" + tools: + python: "3.11" + python: - version: "3" install: - requirements: docs/requirements.txt diff --git a/docs/requirements.txt b/docs/requirements.txt index 14557857f9..ac7b1b5aeb 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,7 +1,7 @@ linkify-it-py==1.0.1 myst-parser==0.15.2 Pillow==9.3.0 -PyYAML==5.4.1 +PyYAML==6.0.1 requests==2.31.0 ruamel.yaml==0.17.16 Sphinx==4.2.0 From 47909dad7552cc02276fefc53848e79f4f4b3356 Mon Sep 17 00:00:00 2001 From: jaimergp Date: Mon, 18 Sep 2023 21:24:56 +0200 Subject: [PATCH 181/366] Clarify which environment gets activated first (#4942) --- docs/source/resources/define-metadata.rst | 7 ++++--- news/4942-docs-prefix-activation | 19 +++++++++++++++++++ 2 files changed, 23 insertions(+), 3 deletions(-) create mode 100644 news/4942-docs-prefix-activation diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index 478a442062..d314349b2b 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -992,9 +992,10 @@ words, a Python package would list ``python`` here and an R package would list The PREFIX environment variable points to the host prefix. With respect to activation during builds, both the host and build environments are activated. -The build prefix is activated before the host prefix so that the host prefix -has priority over the build prefix. Executables that don't exist in the host -prefix should be found in the build prefix. +The build prefix is activated *after* the host prefix so that the build prefix, +which always contains native executables for the running platform, has priority +over the host prefix, which is not guaranteed to provide native executables (e.g. +when cross-compiling). As of conda-build 3.1.4, the build and host prefixes are always separate when both are defined, or when ``{{ compiler() }}`` Jinja2 functions are used. The diff --git a/news/4942-docs-prefix-activation b/news/4942-docs-prefix-activation new file mode 100644 index 0000000000..9c9f0ca7f5 --- /dev/null +++ b/news/4942-docs-prefix-activation @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* Clarify that the `build` prefix is activated _after_ the `host` prefix. (#4942) + +### Other + +* From 3ba167b78edf419b1c290363d62e934b6e6dc47a Mon Sep 17 00:00:00 2001 From: Ryan Date: Tue, 19 Sep 2023 10:54:11 -0600 Subject: [PATCH 182/366] Updating documentation to describe ~= package specification (#4930) * Migrating information to tables. * Adding a few line breaks for readability. * Adding in more info. * Adding rest of documentation. * Adding news. * Update news/4553-document-compatibility-release-operator Co-authored-by: Jannis Leidel * Remove raw html and moving some examples to a better section. * Update docs/source/resources/package-spec.rst Co-authored-by: Katherine Kinnaman * Update docs/source/resources/package-spec.rst Co-authored-by: Katherine Kinnaman * Update docs/source/resources/package-spec.rst Co-authored-by: Katherine Kinnaman * Update docs/source/resources/package-spec.rst Co-authored-by: Katherine Kinnaman * Update docs/source/resources/package-spec.rst Co-authored-by: Katherine Kinnaman * Making suggested edit. --------- Co-authored-by: Jannis Leidel Co-authored-by: Katherine Kinnaman --- docs/source/resources/package-spec.rst | 160 ++++++++++-------- ...53-document-compatibility-release-operator | 3 + 2 files changed, 91 insertions(+), 72 deletions(-) create mode 100644 news/4553-document-compatibility-release-operator diff --git a/docs/source/resources/package-spec.rst b/docs/source/resources/package-spec.rst index d06e180043..a3f0c98ac9 100644 --- a/docs/source/resources/package-spec.rst +++ b/docs/source/resources/package-spec.rst @@ -62,7 +62,7 @@ conda package is composed of the first 3 values, as in: ``--.tar.bz2`` or ``--.conda``. .. list-table:: - :widths: 15 15 70 + :widths: 15 15 45 * - **Key** - **Type** @@ -76,8 +76,7 @@ conda package is composed of the first 3 values, as in: * - version - string - The package version. May not contain "-". Conda - acknowledges `PEP 440 - `_. + acknowledges `PEP 440 `_. * - build - string @@ -96,15 +95,13 @@ conda package is composed of the first 3 values, as in: * - build_number - integer - - A non-negative integer representing the build number of - the package. + - A non-negative integer representing the build number of the package. - Unlike the build string, the ``build_number`` is inspected by - conda. Conda uses it to sort packages that have otherwise - identical names and versions to determine the latest one. - This is important because new builds that contain bug - fixes for the way a package is built may be added to a - repository. + Unlike the build string, the ``build_number`` is inspected by conda. + + Conda uses it to sort packages that have otherwise identical names and versions to determine the latest one. + + This is important because new builds that contain bug fixes for the way a package is built may be added to a repository. * - depends - list of strings @@ -125,10 +122,12 @@ conda package is composed of the first 3 values, as in: EXAMPLE: ``osx`` - Conda currently does not use this key. Packages for a - specific architecture and platform are usually - distinguished by the repository subdirectory that contains - them---see :ref:`repo-si`. + Conda currently does not use this key. + + Packages for a specific architecture and platform are usually distinguished by the repository subdirectory that contains + them. + + See :ref:`repo-si`. info/files ---------- @@ -284,44 +283,59 @@ parts: * The first part is always the exact name of the package. * The second part refers to the version and may contain special - characters: + characters. See table below. - * \| means OR. +* The third part is always the exact build string. When there are + three parts, the second part must be the exact version. - EXAMPLE: ``1.0|1.2`` matches version 1.0 or 1.2. +.. list-table:: Version Special Characters + :widths: 10, 40, 40 + :header-rows: 1 - * \* matches 0 or more characters in the version string. In - terms of regular expressions, it is the same as ``r'.*'``. + * - Symbol + - Meaning + - Example - EXAMPLE: 1.0|1.4* matches 1.0, 1.4 and 1.4.1b2, but not 1.2. + * - <, >, <=, >= + - Relational operators on versions, - * <, >, <=, >=, ==, and != are relational operators on versions, - which are compared using - `PEP-440 `_. For example, - ``<=1.0`` matches ``0.9``, ``0.9.1``, and ``1.0``, but not ``1.0.1``. - ``==`` and ``!=`` are exact equality. + which are compared using `PEP-440 `_. + - ``<=1.0`` matches 0.9, 0.9.1, and 1.0, but not 1.0.1. - Pre-release versioning is also supported such that ``>1.0b4`` will match - ``1.0b5`` and ``1.0rc1`` but not ``1.0b4`` or ``1.0a5``. + * - ==, and != + - Exact equality and not equalities. + - ``==0.5.1`` matches 0.5.1 and not anything else while ``!=0.5.1`` matches everything but. - EXAMPLE: <=1.0 matches 0.9, 0.9.1, and 1.0, but not 1.0.1. + * - ~= + - Compatibility Release + - ``~=0.5.3`` is equivalent to ``>=0.5.3, <0.6.0a`` - * , means AND. + * - \| + - OR + - ``1.0|1.2`` matches version 1.0 or 1.2. - EXAMPLE: >=2,<3 matches all packages in the 2 series. 2.0, - 2.1, and 2.9 all match, but 3.0 and 1.0 do not. + * - \* + - Matches 0 or more characters in the version string. - * , has higher precedence than \|, so >=1,<2|>3 means greater - than or equal to 1 AND less than 2 or greater than 3, which - matches 1, 1.3 and 3.0, but not 2.2. + In terms of regular expressions, it is the same as ``r'.*'``. + - ``1.0|1.4*`` matches 1.0, 1.4 and 1.4.1b2, but not 1.2. - Conda parses the version by splitting it into parts separated - by \|. If the part begins with <, >, =, or !, it is parsed as a - relational operator. Otherwise, it is parsed as a version, - possibly containing the "*" operator. + * - , + - AND + - ``>=2,<3`` matches all packages in the 2 series. -* The third part is always the exact build string. When there are - 3 parts, the second part must be the exact version. + 2.0, 2.1, and 2.9 all match, but 3.0 and 1.0 do not. + +.. hint:: + ``,`` has higher precedence than \|, so >=1,<2|>3 means greater than or equal to 1 AND less than 2 or greater than 3, which matches 1, 1.3 and 3.0, but not 2.2. + +.. note:: + For package match specifications, pre-release versioning is also supported such that ``>1.0b4`` will match ``1.0b5`` and ``1.0rc1`` but not ``1.0b4`` or ``1.0a5``. + +Conda parses the version by splitting it into parts separated +by \|. If the part begins with <, >, =, or !, it is parsed as a +relational operator. Otherwise, it is parsed as a version, +possibly containing the "*" operator. Remember that the version specification cannot contain spaces, as spaces are used to delimit the package, version, and build @@ -329,40 +343,13 @@ string in the whole match specification. ``python >= 2.7`` is an invalid match specification. However, ``"python >= 2.7"`` (with double or single quotes) is matched as any version of a package named ``python>=2.7``. -When using the command line, put double or single quotes around any package -version specification that contains the space character or any of -the following characters: <, >, \*, or \|. - -EXAMPLE:: - - conda install numpy=1.11 - conda install numpy==1.11 - conda install "numpy>1.11" - conda install "numpy=1.11.1|1.11.3" - conda install "numpy>=1.8,<2" - +Examples of Package Specs +------------------------- -Examples --------- - -The OR constraint "numpy=1.11.1|1.11.3" matches with 1.11.1 or -1.11.3. - -The AND constraint "numpy>=1.8,<2" matches with 1.8 and 1.9 but -not 2.0. - -The fuzzy constraint numpy=1.11 matches 1.11, 1.11.0, 1.11.1, -1.11.2, 1.11.18, and so on. - -The exact constraint numpy==1.11 matches 1.11, 1.11.0, 1.11.0.0, -and so on. - -The build string constraint "numpy=1.11.2=*nomkl*" matches the -NumPy 1.11.2 packages without MKL but not the normal MKL NumPy +The build string constraint "numpy=1.11.2=*nomkl*" matches the NumPy 1.11.2 packages without MKL, but not the normal MKL NumPy 1.11.2 packages. -The build string constraint "numpy=1.11.1|1.11.3=py36_0" matches -NumPy 1.11.1 or 1.11.3 built for Python 3.6 but not any versions +The build string constraint "numpy=1.11.1|1.11.3=py36_0" matches NumPy 1.11.1 or 1.11.3 built for Python 3.6, but not any versions of NumPy built for Python 3.5 or Python 2.7. The following are all valid match specifications for @@ -378,3 +365,32 @@ numpy-1.8.1-py27_0: * numpy >=1.8,<2|1.9 * numpy 1.8.1 py27_0 * numpy=1.8.1=py27_0 + +Command Line Match Spec Examples +-------------------------------- + +When using the command line, put double or single quotes around any package +version specification that contains the space character or any of +the following characters: <, >, \*, or \|. + +.. list-table:: Examples + :widths: 30 60 + :header-rows: 1 + + * - Example + - Meaning + + * - conda install numpy=1.11 + - The fuzzy constraint numpy=1.11 matches 1.11, 1.11.0, 1.11.1, 1.11.2, 1.11.18, and so on. + + * - conda install numpy==1.11 + - The exact constraint numpy==1.11 matches 1.11, 1.11.0, 1.11.0.0, and so on. + + * - conda install "numpy=1.11.1|1.11.3" + - The OR constraint "numpy=1.11.1|1.11.3" matches with 1.11.1 or 1.11.3. + + * - conda install "numpy>1.11" + - Any numpy version 1.12.0a or greater. + + * - conda install "numpy>=1.8,<2" + - The AND constraint "numpy>=1.8,<2" matches with 1.8 and 1.9 but not 2.0. diff --git a/news/4553-document-compatibility-release-operator b/news/4553-document-compatibility-release-operator new file mode 100644 index 0000000000..f5d3dcde47 --- /dev/null +++ b/news/4553-document-compatibility-release-operator @@ -0,0 +1,3 @@ +### Docs + +* Document `~=` (compatibility release) match spec. (#4553) From 027f2b32ac1e1e41d2a58ff4c23461aca4dd3b9c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Sep 2023 20:55:44 +0200 Subject: [PATCH 183/366] [pre-commit.ci] pre-commit autoupdate (#5007) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.10.1 → v3.11.0](https://github.com/asottile/pyupgrade/compare/v3.10.1...v3.11.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Daniel Bast <2790401+dbast@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 379137b52d..2fd1f6400b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,7 +47,7 @@ repos: args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] exclude: ^conda_build/version.py - repo: https://github.com/asottile/pyupgrade - rev: v3.10.1 + rev: v3.11.0 hooks: # upgrade standard Python codes - id: pyupgrade From f700df2fcf12323330860c899b047b05b0543436 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 22 Sep 2023 08:55:43 +0200 Subject: [PATCH 184/366] Update Sphinx and fix rendering bugs (#5008) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Daniel Bast <2790401+dbast@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- CHANGELOG.md | 2 +- Makefile | 2 +- docs/requirements.txt | 30 +++++++++++++++--------------- docs/source/conf.py | 4 ++-- 4 files changed, 19 insertions(+), 19 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 59f76eb623..c23c0d80d2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2773,7 +2773,7 @@ https://conda.io/docs/user-guide/tasks/build-packages/define-metadata.html#host * pyldd: disambiguate java .class files from Mach-O fat files (same magic number) #2328 * fix hash regex for downloaded files in `src_cache` #2330 * fix `zip_keys` becoming a loop dimension when variants passed as object rather than loaded from file #2333 -* fix windows always warning about old compiler activation. Now only warns if {{ compiler() }} is not used. #2333 +* fix windows always warning about old compiler activation. Now only warns if `{{ compiler() }}` is not used. #2333 * Add `LD_RUN_PATH` back into Linux variables for now (may remove later, but will have deprecation cycle) #2334 ### Contributors diff --git a/Makefile b/Makefile index 2ad8565e7f..db5bd26292 100644 --- a/Makefile +++ b/Makefile @@ -4,7 +4,7 @@ SHELL := /bin/bash -o pipefail -o errexit # ENV_NAME=dev TMPDIR=$HOME make test ENV_NAME ?= conda-build DOC_ENV_NAME ?= conda-build-docs -PYTHON_VERSION ?= 3.8 +PYTHON_VERSION ?= 3.11 TMPDIR := $(shell if test -w $(TMPDIR); then echo $(TMPDIR); else echo ./tmp/ ; fi)conda-build-testing # We want to bypass the shell wrapper function and use the binary directly for conda-run specifically diff --git a/docs/requirements.txt b/docs/requirements.txt index ac7b1b5aeb..b4590377d8 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,20 +1,20 @@ -linkify-it-py==1.0.1 -myst-parser==0.15.2 -Pillow==9.3.0 +linkify-it-py==2.0.2 +myst-parser==2.0.0 +Pillow==10.0.1 PyYAML==6.0.1 requests==2.31.0 -ruamel.yaml==0.17.16 -Sphinx==4.2.0 -sphinx-argparse==0.3.1 +ruamel.yaml==0.17.32 +Sphinx==7.2.6 +sphinx-argparse==0.4.0 sphinx-autobuild==2021.3.14 -sphinx-rtd-theme==1.0.0 -sphinx-sitemap==2.2.0 -sphinxcontrib-applehelp==1.0.2 -sphinxcontrib-devhelp==1.0.2 -sphinxcontrib-htmlhelp==2.0.0 +sphinx-rtd-theme==1.3.0 +sphinx-sitemap==2.5.1 +sphinxcontrib-applehelp==1.0.7 +sphinxcontrib-devhelp==1.0.5 +sphinxcontrib-htmlhelp==2.0.4 sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-plantuml==0.21 +sphinxcontrib-plantuml==0.26 sphinxcontrib-programoutput==0.17 -sphinxcontrib-qthelp==1.0.3 -sphinxcontrib-serializinghtml==1.1.5 -pylint==2.11.1 +sphinxcontrib-qthelp==1.0.6 +sphinxcontrib-serializinghtml==1.1.9 +pylint==2.17.5 diff --git a/docs/source/conf.py b/docs/source/conf.py index eba0f759da..99a7e5974e 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -88,7 +88,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -206,7 +206,7 @@ # -- Options for intersphinx extension --------------------------------------- # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {"https://docs.python.org/": None} +intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} # -- Options for todo extension ---------------------------------------------- From 0a3ecf033685c64593c72b25015e30a1ac4501ec Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Fri, 22 Sep 2023 14:01:49 +0200 Subject: [PATCH 185/366] Add emscripten-wasm32 and wasi-wasm32 support (#4813) Co-authored-by: Jannis Leidel --- conda_build/build.py | 1 + conda_build/environ.py | 2 +- conda_build/metadata.py | 5 ++++- conda_build/utils.py | 2 ++ news/4813-wasm-platforms | 19 +++++++++++++++++++ tests/test_api_skeleton.py | 10 ++++++++-- 6 files changed, 35 insertions(+), 4 deletions(-) create mode 100644 news/4813-wasm-platforms diff --git a/conda_build/build.py b/conda_build/build.py index 81fb526011..b64ea641dd 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -2607,6 +2607,7 @@ def build( utils.rm_rf(m.config.info_dir) files1 = utils.prefix_files(prefix=m.config.host_prefix) + os.makedirs(m.config.build_folder, exist_ok=True) with open(join(m.config.build_folder, "prefix_files.txt"), "w") as f: f.write("\n".join(sorted(list(files1)))) f.write("\n") diff --git a/conda_build/environ.py b/conda_build/environ.py index 853da44e02..7ef1b2a33d 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -580,7 +580,7 @@ def get_shlib_ext(host_platform): return ".dll" elif host_platform in ["osx", "darwin"]: return ".dylib" - elif host_platform.startswith("linux"): + elif host_platform.startswith("linux") or host_platform.endswith("-wasm32"): return ".so" elif host_platform == "noarch": # noarch packages should not contain shared libraries, use the system diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 33c3230573..47f3166727 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -124,14 +124,17 @@ def get_selectors(config: Config) -> dict[str, bool]: linux=plat.startswith("linux-"), linux32=bool(plat == "linux-32"), linux64=bool(plat == "linux-64"), + emscripten=plat.startswith("emscripten-"), + wasi=plat.startswith("wasi-"), arm=plat.startswith("linux-arm"), osx=plat.startswith("osx-"), - unix=plat.startswith(("linux-", "osx-")), + unix=plat.startswith(("linux-", "osx-", "emscripten-")), win=plat.startswith("win-"), win32=bool(plat == "win-32"), win64=bool(plat == "win-64"), x86=plat.endswith(("-32", "-64")), x86_64=plat.endswith("-64"), + wasm32=bool(plat.endswith("-wasm32")), os=os, environ=os.environ, nomkl=bool(int(os.environ.get("FEATURE_NOMKL", False))), diff --git a/conda_build/utils.py b/conda_build/utils.py index aa375790f3..989cf0dba4 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -113,6 +113,8 @@ def glob(pathname, recursive=True): mmap_PROT_WRITE = 0 if on_win else mmap.PROT_WRITE DEFAULT_SUBDIRS = { + "emscripten-wasm32", + "wasi-wasm32", "linux-64", "linux-32", "linux-s390x", diff --git a/news/4813-wasm-platforms b/news/4813-wasm-platforms new file mode 100644 index 0000000000..1dd82f5479 --- /dev/null +++ b/news/4813-wasm-platforms @@ -0,0 +1,19 @@ +### Enhancements + +* Handle `emscripten-wasm32` and `wasi-wasm32` platforms. (#4813) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index 10d9a6973e..514d469c56 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -28,13 +28,19 @@ from conda_build.utils import on_win from conda_build.version import _parse as parse_version -SYMPY_URL = "https://pypi.python.org/packages/source/s/sympy/sympy-1.10.tar.gz#md5=b3f5189ad782bbcb1bedc1ec2ca12f29" +SYMPY_URL = ( + "https://files.pythonhosted.org/packages/7d/23/70fa970c07f0960f7543af982d2554be805e1034b9dcee9cb3082ce80f80/sympy-1.10.tar.gz" + "#sha256=6cf85a5cfe8fff69553e745b05128de6fc8de8f291965c63871c79701dc6efc9" +) PYLINT_VERSION = "2.3.1" PYLINT_HASH_TYPE = "sha256" PYLINT_HASH_VALUE = "723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1" +PYLINT_HASH_VALUE_BLAKE2 = ( + "018b538911c0ebc2529f15004f4cb07e3ca562bb9aacea5df89cc25b62e01891" +) PYLINT_FILENAME = f"pylint-{PYLINT_VERSION}.tar.gz" -PYLINT_URL = f"https://pypi.python.org/packages/source/p/pylint/{PYLINT_FILENAME}#{PYLINT_HASH_TYPE}={PYLINT_HASH_VALUE}" +PYLINT_URL = f"https://files.pythonhosted.org/packages/{PYLINT_HASH_VALUE_BLAKE2[:2]}/{PYLINT_HASH_VALUE_BLAKE2[2:4]}/{PYLINT_HASH_VALUE_BLAKE2[4:]}/{PYLINT_FILENAME}" @pytest.fixture From ed0ddcc492da5d2f1fc7209b96107845860349f9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 25 Sep 2023 19:00:53 +0200 Subject: [PATCH 186/366] [pre-commit.ci] pre-commit autoupdate (#5010) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.11.0 → v3.13.0](https://github.com/asottile/pyupgrade/compare/v3.11.0...v3.13.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2fd1f6400b..63608a4d8a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,7 +47,7 @@ repos: args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] exclude: ^conda_build/version.py - repo: https://github.com/asottile/pyupgrade - rev: v3.11.0 + rev: v3.13.0 hooks: # upgrade standard Python codes - id: pyupgrade From ce91aa518f5ed81b6e161a51ec0fd24588ffda95 Mon Sep 17 00:00:00 2001 From: Ryan Date: Mon, 25 Sep 2023 13:09:08 -0600 Subject: [PATCH 187/366] Removing glob2 and replacing with glob(,recursive=True) where used. (#5005) Co-authored-by: Jannis Leidel Co-authored-by: Bianca Henderson Co-authored-by: Ken Odegard Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- conda_build/build.py | 9 ++++----- conda_build/cli/main_build.py | 4 ++-- conda_build/os_utils/external.py | 5 ++--- conda_build/os_utils/liefldd.py | 7 +++---- conda_build/utils.py | 22 +++++++--------------- news/4792-remove-glob2 | 3 +++ pyproject.toml | 1 - recipe/meta.yaml | 1 - tests/requirements.txt | 1 - 9 files changed, 21 insertions(+), 32 deletions(-) create mode 100644 news/4792-remove-glob2 diff --git a/conda_build/build.py b/conda_build/build.py index b64ea641dd..fa62a238d3 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -25,7 +25,6 @@ from os.path import dirname, isdir, isfile, islink, join import conda_package_handling.api -import glob2 import yaml from bs4 import UnicodeDammit from conda import __version__ as conda_version @@ -1203,11 +1202,11 @@ def get_files_with_prefix(m, replacements, files_in, prefix): for index, replacement in enumerate(replacements): all_matches = have_regex_files( files=[ - f - for f in files + file + for file in files if any( - glob2.fnmatch.fnmatch(f, r) - for r in replacement["glob_patterns"] + fnmatch.fnmatch(file, pattern) + for pattern in replacement["glob_patterns"] ) ], prefix=prefix, diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index 999be96663..cba6fec6ff 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -4,6 +4,7 @@ import logging import sys import warnings +from glob import glob from itertools import chain from os.path import abspath, expanduser, expandvars from pathlib import Path @@ -11,7 +12,6 @@ import filelock from conda.auxlib.ish import dals from conda.common.io import dashlist -from glob2 import glob from .. import api, build, source, utils from ..conda_interface import add_parser_channels, binstar_upload, cc_conda_build @@ -542,7 +542,7 @@ def execute(args): outputs = [] failed_recipes = [] recipes = chain.from_iterable( - glob(abspath(recipe)) if "*" in recipe else [recipe] + glob(abspath(recipe), recursive=True) if "*" in recipe else [recipe] for recipe in args.recipe ) for recipe in recipes: diff --git a/conda_build/os_utils/external.py b/conda_build/os_utils/external.py index 18190aba5d..215f395f00 100644 --- a/conda_build/os_utils/external.py +++ b/conda_build/os_utils/external.py @@ -3,10 +3,9 @@ import os import stat import sys +from glob import glob from os.path import expanduser, isfile, join -from glob2 import glob - from conda_build.conda_interface import root_dir @@ -55,7 +54,7 @@ def find_executable(executable, prefix=None, all_matches=False): result = path break if not result and any([f in executable for f in ("*", "?", ".")]): - matches = glob(os.path.join(dir_path, executable)) + matches = glob(os.path.join(dir_path, executable), recursive=True) if matches: if all_matches: all_matches_found.extend(matches) diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index a4739c84d0..2cf6ce92ad 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -11,11 +11,10 @@ import struct import sys import threading +from fnmatch import fnmatch from functools import partial from subprocess import PIPE, Popen -import glob2 - from .external import find_executable # lief cannot handle files it doesn't know about gracefully @@ -146,7 +145,7 @@ def _set_elf_rpathy_thing(binary, old_matching, new_rpath, set_rpath, set_runpat if ( set_runpath and e.tag == lief.ELF.DYNAMIC_TAGS.RUNPATH - and glob2.fnmatch.fnmatch(e.runpath, old_matching) + and fnmatch(e.runpath, old_matching) and e.runpath != new_rpath ): e.runpath = new_rpath @@ -154,7 +153,7 @@ def _set_elf_rpathy_thing(binary, old_matching, new_rpath, set_rpath, set_runpat elif ( set_rpath and e.tag == lief.ELF.DYNAMIC_TAGS.RPATH - and glob2.fnmatch.fnmatch(e.rpath, old_matching) + and fnmatch(e.rpath, old_matching) and e.rpath != new_rpath ): e.rpath = new_rpath diff --git a/conda_build/utils.py b/conda_build/utils.py index 989cf0dba4..a62d6700be 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -63,7 +63,8 @@ import urllib.parse as urlparse import urllib.request as urllib -from glob import glob as glob_glob +from contextlib import ExitStack # noqa: F401 +from glob import glob from conda.api import PackageCacheData # noqa @@ -91,15 +92,6 @@ win_path_to_unix, ) - -# stdlib glob is less feature-rich but considerably faster than glob2 -def glob(pathname, recursive=True): - return glob_glob(pathname, recursive=recursive) - - -# NOQA because it is not used in this file. -from contextlib import ExitStack # NOQA - PermissionError = PermissionError # NOQA FileNotFoundError = FileNotFoundError @@ -1037,7 +1029,7 @@ def get_stdlib_dir(prefix, py_ver): lib_dir = os.path.join(prefix, "Lib") else: lib_dir = os.path.join(prefix, "lib") - python_folder = glob(os.path.join(lib_dir, "python?.*")) + python_folder = glob(os.path.join(lib_dir, "python?.*"), recursive=True) python_folder = sorted(filterfalse(islink, python_folder)) if python_folder: lib_dir = os.path.join(lib_dir, python_folder[0]) @@ -1052,7 +1044,7 @@ def get_site_packages(prefix, py_ver): def get_build_folders(croot): # remember, glob is not a regex. - return glob(os.path.join(croot, "*" + "[0-9]" * 10 + "*")) + return glob(os.path.join(croot, "*" + "[0-9]" * 10 + "*"), recursive=True) def prepend_bin_path(env, prefix, prepend_prefix=False): @@ -1085,7 +1077,7 @@ def sys_path_prepended(prefix): sys.path.insert(1, os.path.join(prefix, "lib", "site-packages")) else: lib_dir = os.path.join(prefix, "lib") - python_dir = glob(os.path.join(lib_dir, r"python[0-9\.]*")) + python_dir = glob(os.path.join(lib_dir, r"python[0-9\.]*"), recursive=True) if python_dir: python_dir = python_dir[0] sys.path.insert(1, os.path.join(python_dir, "site-packages")) @@ -1327,7 +1319,7 @@ def expand_globs(path_list, root_dir): files.append(os.path.join(root, folder)) else: # File compared to the globs use / as separator independently of the os - glob_files = glob(path) + glob_files = glob(path, recursive=True) if not glob_files: log = get_logger(__name__) log.error(f"Glob {path} did not match in root_dir {root_dir}") @@ -1457,7 +1449,7 @@ def get_installed_packages(path): Files are assumed to be in 'index.json' format. """ installed = dict() - for filename in glob(os.path.join(path, "conda-meta", "*.json")): + for filename in glob(os.path.join(path, "conda-meta", "*.json"), recursive=True): with open(filename) as file: data = json.load(file) installed[data["name"]] = data diff --git a/news/4792-remove-glob2 b/news/4792-remove-glob2 new file mode 100644 index 0000000000..28be064471 --- /dev/null +++ b/news/4792-remove-glob2 @@ -0,0 +1,3 @@ +### Enhancements + +* Remove `glob2` as a dependency. As of Python 3.5, the '**', operator was available to glob when using `recursive=True`. Builtin glob is also much faster. (#5005) diff --git a/pyproject.toml b/pyproject.toml index 9fb3b5f222..4fff10abac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,6 @@ dependencies = [ "conda-index", "conda-package-handling >=1.3", "filelock", - "glob2 >=0.6", "jinja2", "libarchive-c", "packaging", diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 9ca9a95a27..6a3ed0ea27 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -35,7 +35,6 @@ requirements: - conda-index - conda-package-handling >=1.3 - filelock - - glob2 >=0.6 - jinja2 - m2-patch >=2.6 # [win] - packaging diff --git a/tests/requirements.txt b/tests/requirements.txt index 1d23002d61..b0ac07be77 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -11,7 +11,6 @@ contextlib2 cytoolz filelock git -glob2 >=0.6 jinja2 numpy perl From 4c69785c74ff536396e17ad0503c335b38cfdf78 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Tue, 26 Sep 2023 15:43:35 +0200 Subject: [PATCH 188/366] Trigger build. (#5012) --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 9c4c9a2f11..cae61abbfd 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,6 @@ $ conda info $ conda install -n base conda-build ``` - ## Building Your Own Packages You can easily build your own packages for `conda`, and upload them to From 27ea32b223ce412be5412e166068aeecf503880d Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 27 Sep 2023 15:45:06 -0400 Subject: [PATCH 189/366] Changelog 3.27.0 (#5013) Co-authored-by: Bianca Henderson Co-authored-by: Jannis Leidel --- .authors.yml | 40 ++++++++++++++----- .mailmap | 6 ++- AUTHORS.md | 4 ++ CHANGELOG.md | 40 +++++++++++++++++++ ...53-document-compatibility-release-operator | 3 -- news/4792-remove-glob2 | 3 -- news/4813-wasm-platforms | 19 --------- news/4942-docs-prefix-activation | 19 --------- news/4956-improve-command-plugin | 20 ---------- news/4960-conform-to-recipe-standards | 19 --------- news/4995-run-conda-build-in-base-env | 19 --------- 11 files changed, 80 insertions(+), 112 deletions(-) delete mode 100644 news/4553-document-compatibility-release-operator delete mode 100644 news/4792-remove-glob2 delete mode 100644 news/4813-wasm-platforms delete mode 100644 news/4942-docs-prefix-activation delete mode 100644 news/4956-improve-command-plugin delete mode 100644 news/4960-conform-to-recipe-standards delete mode 100644 news/4995-run-conda-build-in-base-env diff --git a/.authors.yml b/.authors.yml index 2989dbcb6a..e0a69846c8 100644 --- a/.authors.yml +++ b/.authors.yml @@ -1199,7 +1199,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 130 + num_commits: 133 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1222,7 +1222,7 @@ first_commit: 2020-11-19 10:46:41 - name: Jannis Leidel email: jannis@leidel.info - num_commits: 27 + num_commits: 30 github: jezdez first_commit: 2020-11-19 10:46:41 - name: Christof Kaufmann @@ -1237,7 +1237,7 @@ github: pre-commit-ci[bot] aliases: - pre-commit-ci[bot] - num_commits: 48 + num_commits: 52 first_commit: 2021-11-20 01:47:17 - name: Jacob Walls email: jacobtylerwalls@gmail.com @@ -1248,7 +1248,7 @@ github: beeankha alternate_emails: - beeankha@gmail.com - num_commits: 19 + num_commits: 20 first_commit: 2022-01-19 16:40:06 - name: Conda Bot email: 18747875+conda-bot@users.noreply.github.com @@ -1259,7 +1259,7 @@ alternate_emails: - ad-team+condabot@anaconda.com - 18747875+conda-bot@users.noreply.github.com - num_commits: 35 + num_commits: 38 first_commit: 2022-01-17 18:09:22 - name: Uwe L. Korn email: xhochy@users.noreply.github.com @@ -1294,8 +1294,10 @@ first_commit: 2022-04-18 12:03:05 - name: Jürgen Gmach email: juergen.gmach@googlemail.com + alternate_emails: + - juergen.gmach@canonical.com github: jugmac00 - num_commits: 2 + num_commits: 3 first_commit: 2022-05-31 07:52:17 - name: Katherine Kinnaman email: kkinnaman@anaconda.com @@ -1305,7 +1307,7 @@ - name: dependabot[bot] email: 49699333+dependabot[bot]@users.noreply.github.com github: dependabot[bot] - num_commits: 3 + num_commits: 4 first_commit: 2022-05-31 04:34:40 - name: Serhii Kupriienko email: 79282962+skupr-anaconda@users.noreply.github.com @@ -1322,7 +1324,7 @@ - name: Jaime Rodríguez-Guerra email: jaimergp@users.noreply.github.com github: jaimergp - num_commits: 3 + num_commits: 4 first_commit: 2022-11-02 19:34:51 - name: Dave Clements email: tnabtaf@gmail.com @@ -1369,7 +1371,7 @@ aliases: - Ryan github: ryanskeith - num_commits: 3 + num_commits: 5 first_commit: 2023-03-22 03:11:02 - name: Rishabh Singh email: 67859818+rishabh11336@users.noreply.github.com @@ -1393,3 +1395,23 @@ github: josegonzalez num_commits: 1 first_commit: 2023-06-14 16:02:40 +- name: Jack Olivieri + email: boldorider4@gmail.com + github: boldorider4 + num_commits: 1 + first_commit: 2023-08-30 10:32:34 +- name: Wolf Vollprecht + email: w.vollprecht@gmail.com + github: wolfv + num_commits: 1 + first_commit: 2023-09-22 07:01:49 +- name: Dave Karetnyk + email: Dave.Karetnyk@gmail.com + github: DaveKaretnyk + num_commits: 1 + first_commit: 2023-09-16 05:21:09 +- name: Shaun Walbridge + email: 46331011+scdub@users.noreply.github.com + github: scdub + num_commits: 2 + first_commit: 2023-08-18 02:53:28 diff --git a/.mailmap b/.mailmap index 30d67ba59b..2f7457f813 100644 --- a/.mailmap +++ b/.mailmap @@ -73,6 +73,7 @@ Daniel Holth Darren Dale Dave Clements Dave Hirschfeld David Hirschfeld +Dave Karetnyk David Froger David Li Derek Ludwig @@ -107,6 +108,7 @@ Ilan Schnell Ilan Schnell Ilan Schnell Isuru Fernando Isuru Fernando Ivan Kalev ikalev +Jack Olivieri Jacob Walls Jaime Rodríguez-Guerra James Abbott @@ -136,7 +138,7 @@ Joseph Hunkeler Juan Lasheras jlas Julian Rüth Julien Schueller -Jürgen Gmach +Jürgen Gmach Jürgen Gmach Jędrzej Nowak Jedrzej Nowak Kai Tietz Kai Tietz <47363620+katietz@users.noreply.github.com> Kale Franz Kale Franz @@ -230,6 +232,7 @@ Sean Yen seanyen Sergio Oller Serhii Kupriienko <79282962+skupr-anaconda@users.noreply.github.com> Shaun Walbridge +Shaun Walbridge <46331011+scdub@users.noreply.github.com> Siu Kwan Lam Sophia Castellarin sophia Sophia Castellarin sophia @@ -267,6 +270,7 @@ Uwe L. Korn Vlad Frolov Wes Turner Wim Glenn wim glenn +Wolf Vollprecht Wolfgang Ulmer Yann Yoav Ram diff --git a/AUTHORS.md b/AUTHORS.md index 23a0911835..cbfba08e20 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -56,6 +56,7 @@ Authors are sorted alphabetically. * Darren Dale * Dave Clements * Dave Hirschfeld +* Dave Karetnyk * David Froger * David Li * Derek Ludwig @@ -89,6 +90,7 @@ Authors are sorted alphabetically. * Ilan Schnell * Isuru Fernando * Ivan Kalev +* Jack Olivieri * Jacob Walls * Jaime Rodríguez-Guerra * James Abbott @@ -193,6 +195,7 @@ Authors are sorted alphabetically. * Sergio Oller * Serhii Kupriienko * Shaun Walbridge +* Shaun Walbridge * Siu Kwan Lam * Sophia Castellarin * Sophian Guidara @@ -226,6 +229,7 @@ Authors are sorted alphabetically. * Vlad Frolov * Wes Turner * Wim Glenn +* Wolf Vollprecht * Wolfgang Ulmer * Yann * Yoav Ram diff --git a/CHANGELOG.md b/CHANGELOG.md index c23c0d80d2..3d14e6556f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,45 @@ [//]: # (current developments) +## 3.27.0 (2023-09-26) + +### Enhancements + +* Remove `glob2` dependency. As of Python 3.5, the '**', operator was available to `glob` when using `recursive=True`. Builtin glob is also much faster. (#5005) +* Handle `emscripten-wasm32` and `wasi-wasm32` platforms. (#4813) + +### Bug fixes + +* Delay imports in conda command plugin until the command is used, avoiding import-time side effects. (#4949) + +### Deprecations + +* When templating new recipes from a PyPI package, the build script `{{ PYTHON }} -m pip install . -vv` is deprecated in favor of `{{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation`. (#4960) + +### Docs + +* Document `~=` (compatibility release) match spec. (#4553) +* Clarify that the `build` prefix is activated _after_ the `host` prefix. (#4942) +* Add explanation that conda-build should be run from the base environment. (#4995) + +### Contributors + +* @beeankha +* @conda-bot +* @dholth +* @DaveKaretnyk made their first contribution in https://github.com/conda/conda-build/pull/5004 +* @boldorider4 made their first contribution in https://github.com/conda/conda-build/pull/4960 +* @jaimergp +* @jezdez +* @jugmac00 +* @kenodegard +* @ryanskeith +* @scdub made their first contribution in https://github.com/conda/conda-build/pull/4965 +* @wolfv made their first contribution in https://github.com/conda/conda-build/pull/4813 +* @dependabot[bot] +* @pre-commit-ci[bot] + + + ## 3.26.1 (2023-08-17) ### Bug fixes diff --git a/news/4553-document-compatibility-release-operator b/news/4553-document-compatibility-release-operator deleted file mode 100644 index f5d3dcde47..0000000000 --- a/news/4553-document-compatibility-release-operator +++ /dev/null @@ -1,3 +0,0 @@ -### Docs - -* Document `~=` (compatibility release) match spec. (#4553) diff --git a/news/4792-remove-glob2 b/news/4792-remove-glob2 deleted file mode 100644 index 28be064471..0000000000 --- a/news/4792-remove-glob2 +++ /dev/null @@ -1,3 +0,0 @@ -### Enhancements - -* Remove `glob2` as a dependency. As of Python 3.5, the '**', operator was available to glob when using `recursive=True`. Builtin glob is also much faster. (#5005) diff --git a/news/4813-wasm-platforms b/news/4813-wasm-platforms deleted file mode 100644 index 1dd82f5479..0000000000 --- a/news/4813-wasm-platforms +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* Handle `emscripten-wasm32` and `wasi-wasm32` platforms. (#4813) - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4942-docs-prefix-activation b/news/4942-docs-prefix-activation deleted file mode 100644 index 9c9f0ca7f5..0000000000 --- a/news/4942-docs-prefix-activation +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* Clarify that the `build` prefix is activated _after_ the `host` prefix. (#4942) - -### Other - -* diff --git a/news/4956-improve-command-plugin b/news/4956-improve-command-plugin deleted file mode 100644 index 0795fab179..0000000000 --- a/news/4956-improve-command-plugin +++ /dev/null @@ -1,20 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Delay imports in conda command plugin until the command is used, avoiding - import-time side effects. (#4949) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4960-conform-to-recipe-standards b/news/4960-conform-to-recipe-standards deleted file mode 100644 index a28b5f6c89..0000000000 --- a/news/4960-conform-to-recipe-standards +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* When templating new recipes from a pypi package, the build script `{{ PYTHON }} -m pip install . -vv` is deprecated in favor of `{{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation`. (#4960) - -### Docs - -* - -### Other - -* diff --git a/news/4995-run-conda-build-in-base-env b/news/4995-run-conda-build-in-base-env deleted file mode 100644 index 41a6fc82d9..0000000000 --- a/news/4995-run-conda-build-in-base-env +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* Add explanation that conda-build should be run from the base env. (#4995) - -### Other - -* From a6279eef0c8b4b79ead885fe5488505474fac79a Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 29 Sep 2023 07:55:24 -0400 Subject: [PATCH 190/366] Only continue tests.yml workflow if not canceled (#5018) --- .github/workflows/tests.yml | 29 +++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 47b744cdd9..9778c04d23 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -145,18 +145,18 @@ jobs: flags: ${{ matrix.test-type }},${{ matrix.python-version }},linux-64 - name: Tar Allure Results - if: always() + if: '!cancelled()' run: tar -zcf "${{ env.ALLURE_DIR }}.tar.gz" "${{ env.ALLURE_DIR }}" - name: Upload Allure Results - if: always() + if: '!cancelled()' uses: actions/upload-artifact@v3 with: name: allure-Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} path: allure-results.tar.gz - name: Upload Pytest Replay - if: always() + if: '!cancelled()' uses: actions/upload-artifact@v3 with: name: ${{ env.REPLAY_NAME }}-${{ matrix.test-type }} @@ -250,21 +250,21 @@ jobs: flags: ${{ matrix.test-type }},${{ matrix.python-version }},win-64 - name: Tar Allure Results - if: always() + if: '!cancelled()' run: tar -zcf "${{ env.ALLURE_DIR }}.tar.gz" "${{ env.ALLURE_DIR }}" # windows-2019/powershell ships with GNU tar 1.28 which struggles with Windows paths # window-2019/cmd ships with bsdtar 3.5.2 which doesn't have this problem shell: cmd - name: Upload Allure Results - if: always() + if: '!cancelled()' uses: actions/upload-artifact@v3 with: name: allure-Win-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} path: allure-results.tar.gz - name: Upload Pytest Replay - if: always() + if: '!cancelled()' uses: actions/upload-artifact@v3 with: path: ${{ env.REPLAY_DIR }} @@ -361,18 +361,18 @@ jobs: flags: ${{ matrix.test-type }},${{ matrix.python-version }},osx-64 - name: Tar Allure Results - if: always() + if: '!cancelled()' run: tar -zcf "${{ env.ALLURE_DIR }}.tar.gz" "${{ env.ALLURE_DIR }}" - name: Upload Allure Results - if: always() + if: '!cancelled()' uses: actions/upload-artifact@v3 with: name: allure-macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} path: allure-results.tar.gz - name: Upload Pytest Replay - if: always() + if: '!cancelled()' uses: actions/upload-artifact@v3 with: name: ${{ env.REPLAY_NAME }}-${{ matrix.test-type }} @@ -382,7 +382,12 @@ jobs: aggregate: # only aggregate test suite if there are code changes needs: [changes, linux, windows, macos] - if: always() && (github.event_name == 'schedule' || needs.changes.outputs.code == 'true') + if: >- + !cancelled() + && ( + github.event_name == 'schedule' + || needs.changes.outputs.code == 'true' + ) runs-on: ubuntu-latest steps: @@ -407,7 +412,7 @@ jobs: analyze: name: Analyze results needs: [linux, windows, macos, aggregate] - if: always() + if: '!cancelled()' runs-on: ubuntu-latest steps: @@ -426,7 +431,7 @@ jobs: # - this is the main repo, and # - we are on the main, feature, or release branch if: >- - always() + !cancelled() && !github.event.repository.fork && ( github.ref_name == 'main' From b47119776b97ebc17f463afd0738d78d373caf4b Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 29 Sep 2023 07:55:50 -0400 Subject: [PATCH 191/366] Replace flake8 with ruff and consolidate configs (#5015) --- .coveragerc | 3 -- .codecov.yml => .github/codecov.yml | 0 .pre-commit-config.yaml | 28 +++++++++------ news/5015-ruff | 19 +++++++++++ pyproject.toml | 53 +++++++++++++++++++++++++++-- setup.cfg | 37 -------------------- 6 files changed, 86 insertions(+), 54 deletions(-) delete mode 100644 .coveragerc rename .codecov.yml => .github/codecov.yml (100%) create mode 100644 news/5015-ruff delete mode 100644 setup.cfg diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index ababc663b1..0000000000 --- a/.coveragerc +++ /dev/null @@ -1,3 +0,0 @@ -[run] -parallel=True -omit=conda_build/skeletons/_example_skeleton.py diff --git a/.codecov.yml b/.github/codecov.yml similarity index 100% rename from .codecov.yml rename to .github/codecov.yml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 63608a4d8a..34f2c97018 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,8 +13,6 @@ exclude: | test-skeleton )/ | .*\.(patch|diff) | - versioneer.py | - conda_build/_version.py ) repos: # generic verification and formatting @@ -52,11 +50,6 @@ repos: # upgrade standard Python codes - id: pyupgrade args: [--py38-plus] - - repo: https://github.com/pycqa/isort - rev: 5.12.0 - hooks: - # auto sort Python imports - - id: isort - repo: https://github.com/psf/black rev: 23.9.1 hooks: @@ -68,8 +61,21 @@ repos: # auto format Python codes within docstrings - id: blacken-docs additional_dependencies: [black] - - repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.0.291 + hooks: + - id: ruff + args: [--fix] + - repo: meta + # see https://pre-commit.com/#meta-hooks + hooks: + - id: check-hooks-apply + - id: check-useless-excludes + - repo: local hooks: - # lint Python codes - - id: flake8 + - id: git-diff + name: git diff + entry: git diff --exit-code + language: system + pass_filenames: false + always_run: true diff --git a/news/5015-ruff b/news/5015-ruff new file mode 100644 index 0000000000..a2ae3705d1 --- /dev/null +++ b/news/5015-ruff @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Use Ruff linter in pre-commit configuration. (#5015) diff --git a/pyproject.toml b/pyproject.toml index 4fff10abac..93522bca6a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,54 @@ include = ["conda_build", "conda_build/templates/*", "conda_build/cli-*.exe"] version-file = "conda_build/__version__.py" [tool.black] -target-version = ['py38', 'py39', 'py310'] +target-version = ['py38', 'py39', 'py310', 'py311'] -[tool.isort] -profile = "black" +[tool.coverage.run] +# store relative paths in coverage information +relative_files = true + +[tool.coverage.report] +show_missing = true +sort = "Miss" +skip_covered = true +omit = ["conda_build/skeletons/_example_skeleton.py"] + +[tool.ruff] +line-length = 180 +# E, W = pycodestyle errors and warnings +# F = pyflakes +# I = isort +# D = pydocstyle +select = ["E", "W", "F", "I", "D1"] +# E402 module level import not at top of file +# E722 do not use bare 'except' +# E731 do not assign a lambda expression, use a def +ignore = ["E402", "E722", "E731"] +# Use PEP 257-style docstrings. +pydocstyle = {convention = "pep257"} + +[tool.pytest.ini_options] +minversion = 3.0 +testpaths = ["tests"] +norecursedirs = ["tests/test-recipes/*"] +addopts = [ + "--color=yes", + # "--cov=conda_build", # passed in test runner scripts instead (avoid debugger) + "--cov-append", + "--cov-branch", + "--cov-report=term-missing", + "--cov-report=xml", + "--durations=16", + "--junitxml=junit.xml", + "--splitting-algorithm=least_duration", + "--store-durations", + "--strict-markers", + "--tb=native", + "-vv", +] +markers = [ + "serial: execute test serially (to avoid race conditions)", + "slow: execute the slow tests if active", + "sanity: execute the sanity tests", + "no_default_testing_config: used internally to disable monkeypatching for testing_config", +] diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 867ecb1f15..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,37 +0,0 @@ -[flake8] -# leave 180 line length for historical/incremental pre-commit purposes -# rely on black/darker to enforce 88 standard -max-line-length = 180 -# E122: Continuation line missing indentation or outdented -# E123: Closing bracket does not match indentation of opening bracket's line -# E126: Continuation line over-indented for hanging indent -# E127: Continuation line over-indented for visual indent -# E128: Continuation line under-indented for visual indent -# E203: Whitespace before ':' [required by black/darker] -# E722: Do not use bare except, specify exception instead -# E731: Do not assign a lambda expression, use a def -# W503: Line break occurred before a binary operator -# W504: Line break occurred after a binary operator -# W605: Invalid escape sequence 'x' -ignore = E122,E123,E126,E127,E128,E203,E731,E722,W503,W504,W605 -exclude = build,conda_build/_version.py,tests,recipe,.git,versioneer.py,conda,relative,benchmarks,.asv,docs,rever - -[tool:pytest] -norecursedirs= tests/test-recipes .* *.egg* build dist recipe -addopts = - --junitxml=junit.xml - --ignore setup.py - --ignore run_test.py - --cov-report term-missing - --tb native - --strict - --strict-markers - --durations=16 -log_level = DEBUG -env = - PYTHONHASHSEED=0 -markers = - serial: execute test serially (to avoid race conditions) - slow: execute the slow tests if active - sanity: execute the sanity tests - no_default_testing_config: used internally to disable monkeypatching for testing_config From bb0ca2a1f7e9372b4ccbbdbf4fe68ecefc790cc9 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 29 Sep 2023 07:56:18 -0400 Subject: [PATCH 192/366] Update deprecations (#5014) --- conda_build/conda_interface.py | 17 +++++----- conda_build/config.py | 62 +++++++++++----------------------- 2 files changed, 29 insertions(+), 50 deletions(-) diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 93996332ca..92d5ba0678 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -4,7 +4,6 @@ import configparser # noqa: F401 import os -import warnings from functools import partial from importlib import import_module # noqa: F401 @@ -78,6 +77,8 @@ from conda.models.channel import get_conda_build_local_url # noqa: F401 from conda.models.dist import Dist, IndexRecord # noqa: F401 +from .deprecations import deprecated + # TODO: Go to references of all properties below and import them from `context` instead binstar_upload = context.binstar_upload default_python = context.default_python @@ -104,21 +105,19 @@ class CrossPlatformStLink: def __call__(self, path: str | os.PathLike) -> int: return self.st_nlink(path) - @classmethod - def st_nlink(cls, path: str | os.PathLike) -> int: - warnings.warn( - "`conda_build.conda_interface.CrossPlatformStLink` is pending deprecation and will be removed in a " - "future release. Please use `os.stat().st_nlink` instead.", - PendingDeprecationWarning, - ) + @staticmethod + @deprecated("3.24.0", "4.0.0", addendum="Use `os.stat().st_nlink` instead.") + def st_nlink(path: str | os.PathLike) -> int: return os.stat(path).st_nlink +@deprecated("3.28.0", "4.0.0") class SignatureError(Exception): # TODO: What is this? 🤔 pass +@deprecated("3.28.0", "4.0.0") def which_package(path): """ Given the path (of a (presumably) conda installed file) iterate over @@ -137,6 +136,7 @@ def which_package(path): yield dist +@deprecated("3.28.0", "4.0.0") def which_prefix(path): """ Given the path (to a (presumably) conda installed file) return the @@ -159,6 +159,7 @@ def which_prefix(path): return prefix +@deprecated("3.28.0", "4.0.0") def get_installed_version(prefix, pkgs): """ Primarily used by conda-forge, but may be useful in general for checking when diff --git a/conda_build/config.py b/conda_build/config.py index 377d0de9eb..e1bba06518 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -3,7 +3,6 @@ """ Module to store conda build settings. """ - import copy import math import os @@ -11,7 +10,6 @@ import shutil import sys import time -import warnings from collections import namedtuple from os.path import abspath, expanduser, expandvars, join @@ -24,6 +22,7 @@ subdir, url_path, ) +from .deprecations import deprecated from .utils import get_build_folders, get_conda_operation_locks, get_logger, rm_rf from .variants import get_default_variant @@ -58,28 +57,20 @@ def set_invocation_time(): zstd_compression_level_default = 19 +@deprecated("3.25.0", "4.0.0") def python2_fs_encode(strin): - warnings.warn( - "`conda_build.config.python2_fs_encode` is pending deprecation and will be removed in a future release.", - PendingDeprecationWarning, - ) return strin +@deprecated( + "3.25.0", + "4.0.0", + addendum=( + "Use `pathlib.Path.mkdir(exist_ok=True)` or `os.makedirs(exist_ok=True)` " + "instead." + ), +) def _ensure_dir(path: os.PathLike): - """Try to ensure a directory exists - - Args: - path (os.PathLike): Path to directory - """ - # this can fail in parallel operation, depending on timing. Just try to make the dir, - # but don't bail if fail. - warnings.warn( - "`conda_build.config._ensure_dir` is pending deprecation and will be removed " - "in a future release. Please use `pathlib.Path.mkdir(exist_ok=True)` or " - "`os.makedirs(exist_ok=True)` instead", - PendingDeprecationWarning, - ) os.makedirs(path, exist_ok=True) @@ -262,19 +253,6 @@ def _get_default_settings(): ] -def print_function_deprecation_warning(func): - def func_wrapper(*args, **kw): - log = get_logger(__name__) - log.warn( - "WARNING: attribute {} is deprecated and will be removed in conda-build 4.0. " - "Please update your code - file issues on the conda-build issue tracker " - "if you need help.".format(func.__name__) - ) - return func(*args, **kw) - - return func_wrapper - - class Config: __file__ = __path__ = __file__ __package__ = __package__ @@ -516,56 +494,56 @@ def build_folder(self): # back compat for conda-build-all - expects CONDA_* vars to be attributes of the config object @property - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_LUA(self): return self.variant.get("lua", get_default_variant(self)["lua"]) @CONDA_LUA.setter - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_LUA(self, value): self.variant["lua"] = value @property - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_PY(self): value = self.variant.get("python", get_default_variant(self)["python"]) return int("".join(value.split("."))) @CONDA_PY.setter - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_PY(self, value): value = str(value) self.variant["python"] = ".".join((value[0], value[1:])) @property - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_NPY(self): value = self.variant.get("numpy", get_default_variant(self)["numpy"]) return int("".join(value.split("."))) @CONDA_NPY.setter - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_NPY(self, value): value = str(value) self.variant["numpy"] = ".".join((value[0], value[1:])) @property - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_PERL(self): return self.variant.get("perl", get_default_variant(self)["perl"]) @CONDA_PERL.setter - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_PERL(self, value): self.variant["perl"] = value @property - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_R(self): return self.variant.get("r_base", get_default_variant(self)["r_base"]) @CONDA_R.setter - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_R(self, value): self.variant["r_base"] = value From 7d3b0c545c7111bb96e9d6fff577b7aac940f750 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 3 Oct 2023 06:05:54 -0400 Subject: [PATCH 193/366] Fix pre-commit exlusions (#5023) --- .pre-commit-config.yaml | 2 +- conda_build/conda_interface.py | 15 +++++++-------- conda_build/metadata.py | 16 ++++++++++------ conda_build/utils.py | 8 ++++---- pyproject.toml | 15 +++++++-------- 5 files changed, 29 insertions(+), 27 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 34f2c97018..20688e9f6c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,7 +12,7 @@ exclude: | test-recipes | test-skeleton )/ | - .*\.(patch|diff) | + .*\.(patch|diff) ) repos: # generic verification and formatting diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 92d5ba0678..25ecc9cef9 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -11,9 +11,8 @@ from conda.auxlib.packaging import ( # noqa: F401 _get_version_from_git_tag as get_version_from_git_tag, ) -from conda.base.context import context, determine_target_prefix +from conda.base.context import context, determine_target_prefix, reset_context from conda.base.context import non_x86_machines as non_x86_linux_machines # noqa: F401 -from conda.base.context import reset_context from conda.core.package_cache import ProgressiveFetchExtract # noqa: F401 from conda.exceptions import ( # noqa: F401 CondaError, @@ -24,15 +23,12 @@ PaddingError, UnsatisfiableError, ) -from conda.exports import ArgumentParser # noqa: F401 -from conda.exports import CondaSession # noqa: F401 -from conda.exports import EntityEncoder # noqa: F401 -from conda.exports import VersionOrder # noqa: F401 -from conda.exports import _toposort # noqa: F401 -from conda.exports import get_index # noqa: F401 from conda.exports import ( # noqa: F401 + ArgumentParser, # noqa: F401 Channel, Completer, + CondaSession, # noqa: F401 + EntityEncoder, # noqa: F401 FileMode, InstalledPackages, MatchSpec, @@ -43,12 +39,15 @@ TemporaryDirectory, TmpDownload, Unsatisfiable, + VersionOrder, # noqa: F401 + _toposort, # noqa: F401 add_parser_channels, add_parser_prefix, display_actions, download, execute_actions, execute_plan, + get_index, # noqa: F401 handle_proxy_407, hashsum_file, human_bytes, diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 47f3166727..0681bcf90c 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -1752,18 +1752,20 @@ def has_prefix_files(self): def ignore_prefix_files(self): ret = self.get_value("build/ignore_prefix_files", False) - if type(ret) not in (list, bool): + if not isinstance(ret, (list, bool)): raise RuntimeError( "build/ignore_prefix_files should be boolean or a list of paths " "(optionally globs)" ) if sys.platform == "win32": - if type(ret) is list and any("\\" in i for i in ret): + if isinstance(ret, list) and any("\\" in i for i in ret): raise RuntimeError( "build/ignore_prefix_files paths must use / " "as the path delimiter on Windows" ) - return expand_globs(ret, self.config.host_prefix) if type(ret) is list else ret + return ( + expand_globs(ret, self.config.host_prefix) if isinstance(ret, list) else ret + ) def always_include_files(self): files = ensure_list(self.get_value("build/always_include_files", [])) @@ -1782,18 +1784,20 @@ def ignore_verify_codes(self): def binary_relocation(self): ret = self.get_value("build/binary_relocation", True) - if type(ret) not in (list, bool): + if not isinstance(ret, (list, bool)): raise RuntimeError( "build/binary_relocation should be boolean or a list of paths " "(optionally globs)" ) if sys.platform == "win32": - if type(ret) is list and any("\\" in i for i in ret): + if isinstance(ret, list) and any("\\" in i for i in ret): raise RuntimeError( "build/binary_relocation paths must use / " "as the path delimiter on Windows" ) - return expand_globs(ret, self.config.host_prefix) if type(ret) is list else ret + return ( + expand_globs(ret, self.config.host_prefix) if isinstance(ret, list) else ret + ) def include_recipe(self): return self.get_value("build/include_recipe", True) diff --git a/conda_build/utils.py b/conda_build/utils.py index a62d6700be..af5678247e 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -73,15 +73,15 @@ from conda_build.exceptions import BuildLockError # noqa from conda_build.os_utils import external # noqa -from .conda_interface import Dist # noqa -from .conda_interface import StringIO # noqa -from .conda_interface import cc_conda_build # noqa -from .conda_interface import context # noqa from .conda_interface import ( # noqa CondaHTTPError, + Dist, # noqa MatchSpec, + StringIO, # noqa TemporaryDirectory, VersionOrder, + cc_conda_build, # noqa + context, # noqa download, get_conda_channel, hashsum_file, diff --git a/pyproject.toml b/pyproject.toml index 93522bca6a..39cd67a674 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -99,8 +99,7 @@ line-length = 180 # E, W = pycodestyle errors and warnings # F = pyflakes # I = isort -# D = pydocstyle -select = ["E", "W", "F", "I", "D1"] +select = ["E", "W", "F", "I"] # E402 module level import not at top of file # E722 do not use bare 'except' # E731 do not assign a lambda expression, use a def @@ -121,15 +120,15 @@ addopts = [ "--cov-report=xml", "--durations=16", "--junitxml=junit.xml", - "--splitting-algorithm=least_duration", - "--store-durations", + # "--splitting-algorithm=least_duration", # not available yet + # "--store-durations", # not available yet "--strict-markers", "--tb=native", "-vv", ] markers = [ - "serial: execute test serially (to avoid race conditions)", - "slow: execute the slow tests if active", - "sanity: execute the sanity tests", - "no_default_testing_config: used internally to disable monkeypatching for testing_config", + "serial: execute test serially (to avoid race conditions)", + "slow: execute the slow tests if active", + "sanity: execute the sanity tests", + "no_default_testing_config: used internally to disable monkeypatching for testing_config", ] From 91298622e66afcc578f8e4db38bffc3aaece4ef4 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 3 Oct 2023 14:32:31 -0400 Subject: [PATCH 194/366] Add sort check for requirements.txt files (#5022) --- .pre-commit-config.yaml | 4 ++++ tests/requirements-linux.txt | 2 -- tests/requirements-macos.txt | 2 -- tests/requirements-windows.txt | 2 -- tests/requirements.txt | 4 +--- 5 files changed, 5 insertions(+), 9 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 20688e9f6c..ae9caec20c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -35,6 +35,10 @@ repos: ) # catch git merge/rebase problems - id: check-merge-conflict + # sort requirements files + - id: file-contents-sorter + files: ^tests/requirements.*\.txt + args: [--unique] # Python verification and formatting - repo: https://github.com/Lucas-C/pre-commit-hooks rev: v1.5.4 diff --git a/tests/requirements-linux.txt b/tests/requirements-linux.txt index cb3ae7e805..149ce09bad 100644 --- a/tests/requirements-linux.txt +++ b/tests/requirements-linux.txt @@ -1,5 +1,3 @@ -# conda-build supplemental test dependencies -# run as 'conda install -c defaults --file tests/requirements-linux.txt' patch patchelf shellcheck diff --git a/tests/requirements-macos.txt b/tests/requirements-macos.txt index dbe8e3dd81..133b191333 100644 --- a/tests/requirements-macos.txt +++ b/tests/requirements-macos.txt @@ -1,4 +1,2 @@ -# conda-build supplemental test dependencies -# run as 'conda install -c defaults --file tests/requirements-macos.txt' patch shellcheck diff --git a/tests/requirements-windows.txt b/tests/requirements-windows.txt index 8d40d2482b..d08b4cac29 100644 --- a/tests/requirements-windows.txt +++ b/tests/requirements-windows.txt @@ -1,4 +1,2 @@ -# conda-build supplemental test dependencies -# run as 'conda install -c defaults --file tests/requirements-windows.txt' m2-git m2-patch diff --git a/tests/requirements.txt b/tests/requirements.txt index b0ac07be77..02d34d6787 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,9 +1,8 @@ -# conda-build test dependencies -# run as 'conda install -c defaults --file tests/requirements.txt' anaconda-client beautifulsoup4 chardet conda >=4.13 +conda-forge::allure-pytest conda-index conda-package-handling conda-verify @@ -27,7 +26,6 @@ pytest-replay pytest-rerunfailures pytest-xdist python-libarchive-c -conda-forge::allure-pytest pytz requests ripgrep From c4abce5c119296d7289352ab395d735731f550d1 Mon Sep 17 00:00:00 2001 From: Duncan Macleod Date: Wed, 4 Oct 2023 15:54:49 +0100 Subject: [PATCH 195/366] Print package file name in get_hash_input (#5021) * inspect_pkg: use entire file name in get_hash_input rather than trying to strip off the suffix --------- Co-authored-by: Bianca Henderson --- conda_build/inspect_pkg.py | 2 +- news/5021-get_hash_input | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 news/5021-get_hash_input diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 6e5e9a4980..e38c5aa9e7 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -372,7 +372,7 @@ def inspect_objects(packages, prefix=sys.prefix, groupby="package"): def get_hash_input(packages): hash_inputs = {} for pkg in ensure_list(packages): - pkgname = os.path.basename(pkg)[:-8] + pkgname = os.path.basename(pkg) hash_inputs[pkgname] = {} hash_input = package_has_file(pkg, "info/hash_input.json") if hash_input: diff --git a/news/5021-get_hash_input b/news/5021-get_hash_input new file mode 100644 index 0000000000..f92b78db55 --- /dev/null +++ b/news/5021-get_hash_input @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Print package file name in `get_hash_input`. (#5021) + +### Deprecations + +* + +### Docs + +* + +### Other + +* From 0844e2348dcf7e5997d3fa75b0b4ed6327cf892b Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Wed, 4 Oct 2023 18:09:31 -0500 Subject: [PATCH 196/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#5025)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- RELEASE.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/RELEASE.md b/RELEASE.md index 45e605e9eb..d45614facc 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -392,6 +392,9 @@ To publish the release, go to the project's release page (e.g., https://github.c ## 10. Hand off to Anaconda's packaging team. +> **Note:** +> This step should NOT be done past Thursday morning EST; please start the process on a Monday, Tuesday, or Wednesday instead in order to avoid any potential debugging sessions over evenings or weekends. +
Internal process From 34ab134cc4138f4a8ef1df625b611d7212a932ad Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 11 Oct 2023 12:54:12 -0400 Subject: [PATCH 197/366] Deprecate `conda.models.dist.IndexRecord` (#5032) --- conda_build/conda_interface.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 25ecc9cef9..dba4e4b1a7 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -74,10 +74,19 @@ win_path_to_unix, ) from conda.models.channel import get_conda_build_local_url # noqa: F401 -from conda.models.dist import Dist, IndexRecord # noqa: F401 +from conda.models.dist import Dist # noqa: F401 +from conda.models.records import PackageRecord from .deprecations import deprecated +deprecated.constant( + "3.28.0", + "4.0.0", + "IndexRecord", + PackageRecord, + addendum="Use `conda.models.records.PackageRecord` instead.", +) + # TODO: Go to references of all properties below and import them from `context` instead binstar_upload = context.binstar_upload default_python = context.default_python From e8c935aa3ec3bba6ce4af0ea4575972b3bb9abab Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Wed, 11 Oct 2023 11:55:32 -0500 Subject: [PATCH 198/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#5034)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/ISSUE_TEMPLATE/1_feature.yml | 2 +- .github/workflows/cla.yml | 4 ++-- .github/workflows/stale.yml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/1_feature.yml b/.github/ISSUE_TEMPLATE/1_feature.yml index 0064a1c53d..f24cf7fdad 100644 --- a/.github/ISSUE_TEMPLATE/1_feature.yml +++ b/.github/ISSUE_TEMPLATE/1_feature.yml @@ -41,7 +41,7 @@ body: id: what attributes: label: What should happen? - description: What should be the user experience with the feature? Describe from a user perpective what they would do and see. + description: What should be the user experience with the feature? Describe from a user perspective what they would do and see. - type: textarea id: context attributes: diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index ed22cae254..b823a45165 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check CLA - uses: conda/actions/check-cla@v23.7.0 + uses: conda/actions/check-cla@v23.10.0 with: # [required] # A token with ability to comment, label, and modify the commit status @@ -31,6 +31,6 @@ jobs: label: cla-signed # [required] - # Token for opening singee PR in the provided `cla_repo` + # Token for opening signee PR in the provided `cla_repo` # (`pull_request: write` for fine-grained PAT; `repo` and `workflow` for classic PAT) cla_token: ${{ secrets.CLA_FORK_TOKEN }} diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 1e9e46e754..371b874431 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -34,7 +34,7 @@ jobs: days-before-issue-stale: 90 days-before-issue-close: 21 steps: - - uses: conda/actions/read-yaml@v23.7.0 + - uses: conda/actions/read-yaml@v23.10.0 id: read_yaml with: path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml From d9bc44d9d8f5c08034f584f329d0a91d2e208fa9 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 11 Oct 2023 12:56:49 -0400 Subject: [PATCH 199/366] Deprecate `conda_build.environ.clean_pkg_cache` (#5031) --- conda_build/build.py | 13 ------------- conda_build/environ.py | 2 ++ news/5031-post-conda-5708-cleanup | 19 +++++++++++++++++++ 3 files changed, 21 insertions(+), 13 deletions(-) create mode 100644 news/5031-post-conda-5708-cleanup diff --git a/conda_build/build.py b/conda_build/build.py index fa62a238d3..1d66cf114f 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -75,7 +75,6 @@ env_path_backup_var_exists, get_conda_channel, get_rc_urls, - pkgs_dirs, prefix_placeholder, reset_context, root_dir, @@ -3420,18 +3419,6 @@ def test( # folder destination _extract_test_files_from_package(metadata) - # When testing a .tar.bz2 in the pkgs dir, clean_pkg_cache() will remove it. - # Prevent this. When https://github.com/conda/conda/issues/5708 gets fixed - # I think we can remove this call to clean_pkg_cache(). - in_pkg_cache = ( - not hasattr(recipedir_or_package_or_metadata, "config") - and os.path.isfile(recipedir_or_package_or_metadata) - and recipedir_or_package_or_metadata.endswith(CONDA_PACKAGE_EXTENSIONS) - and os.path.dirname(recipedir_or_package_or_metadata) in pkgs_dirs[0] - ) - if not in_pkg_cache: - environ.clean_pkg_cache(metadata.dist(), metadata.config) - copy_test_source_files(metadata, metadata.config.test_dir) # this is also copying tests/source_files from work_dir to testing workdir diff --git a/conda_build/environ.py b/conda_build/environ.py index 7ef1b2a33d..5afcf93c4d 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -41,6 +41,7 @@ reset_context, root_dir, ) +from .deprecations import deprecated # these are things that we provide env vars for more explicitly. This list disables the # pass-through of variant values to env vars for these keys. @@ -1214,6 +1215,7 @@ def remove_existing_packages(dirs, fns, config): utils.rm_rf(entry) +@deprecated("3.28.0", "4.0.0") def clean_pkg_cache(dist, config): locks = [] diff --git a/news/5031-post-conda-5708-cleanup b/news/5031-post-conda-5708-cleanup new file mode 100644 index 0000000000..f698066c97 --- /dev/null +++ b/news/5031-post-conda-5708-cleanup @@ -0,0 +1,19 @@ +### Enhancements + +* Remove unnecessary cache clearing from `conda_buidl.build.test`. (#5031) + +### Bug fixes + +* + +### Deprecations + +* Mark `conda_build.environ.clean_pkg_cache` as pending deprecation. (#5031) + +### Docs + +* + +### Other + +* From 79f062f1f3a9901f3af0a87644ce61e3f3c4bd8a Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 12 Oct 2023 11:47:07 -0400 Subject: [PATCH 200/366] Replace `_generate_tmp_tree` with `tmp_path` and inline path creation (#5036) --- tests/test_utils.py | 192 ++++++++++++++++++++------------------------ 1 file changed, 86 insertions(+), 106 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 7423bf6931..b5536cdf6d 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,6 +1,5 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import contextlib import os import subprocess import sys @@ -9,6 +8,7 @@ import filelock import pytest +from pytest import MonkeyPatch import conda_build.utils as utils from conda_build.exceptions import BuildLockError @@ -383,132 +383,112 @@ def test_get_lock(testing_workdir): assert lock1.lock_file == lock1_unnormalized.lock_file -@contextlib.contextmanager -def _generate_tmp_tree(): - # dirA - # |\- dirB - # | |\- fileA - # | \-- fileB - # \-- dirC - # |\- fileA - # \-- fileB - import shutil - import tempfile - - try: - tmp = os.path.realpath(os.path.normpath(tempfile.mkdtemp())) - - dA = os.path.join(tmp, "dirA") - dB = os.path.join(dA, "dirB") - dC = os.path.join(dA, "dirC") - for d in (dA, dB, dC): - os.mkdir(d) - - f1 = os.path.join(dB, "fileA") - f2 = os.path.join(dB, "fileB") - f3 = os.path.join(dC, "fileA") - f4 = os.path.join(dC, "fileB") - for f in (f1, f2, f3, f4): - Path(f).touch() - - yield tmp, (dA, dB, dC), (f1, f2, f3, f4) - finally: - shutil.rmtree(tmp) - - -def test_rec_glob(): - with _generate_tmp_tree() as (tmp, _, (f1, f2, f3, f4)): - assert sorted(utils.rec_glob(tmp, "fileA")) == [f1, f3] - assert sorted(utils.rec_glob(tmp, ("fileA", "fileB"), ignores="dirB")) == [ - f3, - f4, - ] - assert sorted(utils.rec_glob(tmp, "fileB", ignores=("dirC",))) == [f2] +def test_rec_glob(tmp_path: Path): + (dirA := tmp_path / "dirA").mkdir() + (dirB := tmp_path / "dirB").mkdir() + (path1 := dirA / "fileA").touch() + (path2 := dirA / "fileB").touch() + (path3 := dirB / "fileA").touch() + (path4 := dirB / "fileB").touch() -def test_find_recipe(): - with _generate_tmp_tree() as (tmp, (dA, dB, dC), (f1, f2, f3, f4)): - f5 = os.path.join(tmp, "meta.yaml") - f6 = os.path.join(dA, "meta.yml") - f7 = os.path.join(dB, "conda.yaml") - f8 = os.path.join(dC, "conda.yml") + assert {str(path1), str(path3)} == set(utils.rec_glob(tmp_path, "fileA")) + assert {str(path3), str(path4)} == set( + utils.rec_glob( + tmp_path, + ("fileA", "fileB"), + ignores="dirA", + ) + ) + assert {str(path2)} == set(utils.rec_glob(tmp_path, "fileB", ignores=["dirB"])) - # check that each of these are valid recipes - for f in (f5, f6, f7, f8): - Path(f).touch() - assert utils.find_recipe(tmp) == f - os.remove(f) +@pytest.mark.parametrize("file", ["meta.yaml", "meta.yml", "conda.yaml", "conda.yml"]) +def test_find_recipe(tmp_path: Path, file: str): + # check that each of these are valid recipes + for path in ( + tmp_path / file, + tmp_path / "dirA" / file, + tmp_path / "dirA" / "dirB" / file, + tmp_path / "dirA" / "dirC" / file, + ): + path.parent.mkdir(parents=True, exist_ok=True) + path.touch() + assert path.samefile(utils.find_recipe(tmp_path)) + path.unlink() -def test_find_recipe_relative(): - with _generate_tmp_tree() as (tmp, (dA, dB, dC), (f1, f2, f3, f4)): - f5 = os.path.join(dA, "meta.yaml") - Path(f5).touch() - # check that even when given a relative recipe path we still return - # the absolute path - saved = os.getcwd() - os.chdir(tmp) - try: - assert utils.find_recipe("dirA") == f5 - finally: - os.chdir(saved) +@pytest.mark.parametrize("file", ["meta.yaml", "meta.yml", "conda.yaml", "conda.yml"]) +def test_find_recipe_relative(tmp_path: Path, monkeypatch: MonkeyPatch, file: str): + (dirA := tmp_path / "dirA").mkdir() + (path := dirA / file).touch() + # check that even when given a relative recipe path we still return + # the absolute path + monkeypatch.chdir(tmp_path) + assert path.samefile(utils.find_recipe("dirA")) -def test_find_recipe_no_meta(): - with _generate_tmp_tree() as (tmp, _, (f1, f2, f3, f4)): - # no meta files in tmp - with pytest.raises(IOError): - utils.find_recipe(tmp) +def test_find_recipe_no_meta(tmp_path: Path): + # no recipe in tmp_path + with pytest.raises(IOError): + utils.find_recipe(tmp_path) -def test_find_recipe_file(): - with _generate_tmp_tree() as (tmp, _, (f1, f2, f3, f4)): - f5 = os.path.join(tmp, "meta.yaml") - Path(f5).touch() - # file provided is valid meta - assert utils.find_recipe(f5) == f5 +def test_find_recipe_file(tmp_path: Path): + # provided recipe is valid + (path := tmp_path / "meta.yaml").touch() + assert path.samefile(utils.find_recipe(path)) -def test_find_recipe_file_bad(): - with _generate_tmp_tree() as (tmp, _, (f1, f2, f3, f4)): - # file provided is not valid meta - with pytest.raises(IOError): - utils.find_recipe(f1) +def test_find_recipe_file_bad(tmp_path: Path): + # missing recipe is invalid + path = tmp_path / "not_a_recipe" + with pytest.raises(IOError): + utils.find_recipe(path) + # provided recipe is invalid + path.touch() + with pytest.raises(IOError): + utils.find_recipe(path) -def test_find_recipe_multipe_base(): - with _generate_tmp_tree() as (tmp, (dA, dB, dC), (f1, f2, f3, f4)): - f5 = os.path.join(tmp, "meta.yaml") - f6 = os.path.join(dB, "meta.yaml") - f7 = os.path.join(dC, "conda.yaml") - for f in (f5, f6, f7): - Path(f).touch() - # multiple meta files, use the one in base level - assert utils.find_recipe(tmp) == f5 +@pytest.mark.parametrize("file", ["meta.yaml", "meta.yml", "conda.yaml", "conda.yml"]) +def test_find_recipe_multipe_base(tmp_path: Path, file: str): + (dirA := tmp_path / "dirA").mkdir() + (dirB := dirA / "dirB").mkdir() + (dirC := dirA / "dirC").mkdir() + (path1 := tmp_path / file).touch() + (dirA / file).touch() + (dirB / file).touch() + (dirC / file).touch() -def test_find_recipe_multipe_bad(): - with _generate_tmp_tree() as (tmp, (dA, dB, dC), (f1, f2, f3, f4)): - f5 = os.path.join(dB, "meta.yaml") - f6 = os.path.join(dC, "conda.yaml") - for f in (f5, f6): - Path(f).touch() + # multiple recipe, use the one at the top level + assert path1.samefile(utils.find_recipe(tmp_path)) - # nothing in base - with pytest.raises(IOError): - utils.find_recipe(tmp) - f7 = os.path.join(tmp, "meta.yaml") - f8 = os.path.join(tmp, "conda.yaml") - for f in (f7, f8): - Path(f).touch() +@pytest.mark.parametrize("stem", ["meta", "conda"]) +def test_find_recipe_multipe_bad(tmp_path: Path, stem: str): + (dirA := tmp_path / "dirA").mkdir() + (dirB := dirA / "dirB").mkdir() + (dirC := dirA / "dirC").mkdir() - # too many in base - with pytest.raises(IOError): - utils.find_recipe(tmp) + # create multiple nested recipes at the same depth + (dirB / f"{stem}.yml").touch() + (dirC / f"{stem}.yaml").touch() + + # too many equal priority recipes found + with pytest.raises(IOError): + utils.find_recipe(tmp_path) + + # create multiple recipes at the top level + (tmp_path / f"{stem}.yml").touch() + (tmp_path / f"{stem}.yaml").touch() + + # too many recipes in the top level + with pytest.raises(IOError): + utils.find_recipe(tmp_path) class IsCondaPkgTestData(NamedTuple): From ed7f5e32d289cecadd14de735ff0ba39208d33d1 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 12 Oct 2023 16:41:39 -0400 Subject: [PATCH 201/366] Add test for `conda_build.metadata.get_selectors` (#5024) --- conda_build/metadata.py | 7 +-- tests/test_metadata.py | 102 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 105 insertions(+), 4 deletions(-) diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 0681bcf90c..1160c1e9fe 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -147,15 +147,16 @@ def get_selectors(config: Config) -> dict[str, bool]: py = py[0] # go from "3.6 *_cython" -> "36" # or from "3.6.9" -> "36" - py = int("".join(py.split(" ")[0].split(".")[:2])) + py_major, py_minor, *_ = py.split(" ")[0].split(".") + py = int(f"{py_major}{py_minor}") d["build_platform"] = config.build_subdir d.update( dict( py=py, - py3k=bool(30 <= py < 40), - py2k=bool(20 <= py < 30), + py3k=bool(py_major == "3"), + py2k=bool(py_major == "2"), py26=bool(py == 26), py27=bool(py == 27), py33=bool(py == 33), diff --git a/tests/test_metadata.py b/tests/test_metadata.py index b5a696ff6f..9c2ab7bd30 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -4,11 +4,21 @@ import os import subprocess +import sys import pytest +from conda.base.context import context +from pytest import MonkeyPatch from conda_build import api -from conda_build.metadata import MetaData, _hash_dependencies, select_lines, yamlize +from conda_build.config import Config +from conda_build.metadata import ( + MetaData, + _hash_dependencies, + get_selectors, + select_lines, + yamlize, +) from conda_build.utils import DEFAULT_SUBDIRS from .utils import metadata_dir, thisdir @@ -323,3 +333,93 @@ def test_yamlize_versions(): ) assert yml == ["1.2.3", "1.2.3.4"] + + +OS_ARCH = ( + "aarch64", + "arm", + "arm64", + "armv6l", + "armv7l", + "emscripten", + "linux", + "linux32", + "linux64", + "osx", + "ppc64", + "ppc64le", + "riscv64", + "s390x", + "unix", + "wasi", + "wasm32", + "win", + "win32", + "win64", + "x86", + "x86_64", +) + + +@pytest.mark.parametrize( + ( + "subdir", # defined in conda.base.constants.KNOWN_SUBDIRS + "expected", # OS_ARCH keys expected to be True + ), + [ + ("emscripten-wasm32", {"unix", "emscripten", "wasm32"}), + ("wasi-wasm32", {"wasi", "wasm32"}), + ("freebsd-64", {"x86", "x86_64"}), + ("linux-32", {"unix", "linux", "linux32", "x86"}), + ("linux-64", {"unix", "linux", "linux64", "x86", "x86_64"}), + ("linux-aarch64", {"unix", "linux", "aarch64"}), + ("linux-armv6l", {"unix", "linux", "arm", "armv6l"}), + ("linux-armv7l", {"unix", "linux", "arm", "armv7l"}), + ("linux-ppc64", {"unix", "linux", "ppc64"}), + ("linux-ppc64le", {"unix", "linux", "ppc64le"}), + ("linux-riscv64", {"unix", "linux", "riscv64"}), + ("linux-s390x", {"unix", "linux", "s390x"}), + ("osx-64", {"unix", "osx", "x86", "x86_64"}), + ("osx-arm64", {"unix", "osx", "arm64"}), + ("win-32", {"win", "win32", "x86"}), + ("win-64", {"win", "win64", "x86", "x86_64"}), + ("win-arm64", {"win", "arm64"}), + ("zos-z", {}), + ], +) +@pytest.mark.parametrize("nomkl", [0, 1]) +def test_get_selectors( + monkeypatch: MonkeyPatch, + subdir: str, + expected: set[str], + nomkl: int, +): + monkeypatch.setenv("FEATURE_NOMKL", str(nomkl)) + + config = Config(host_subdir=subdir) + assert get_selectors(config) == { + # defaults + "build_platform": context.subdir, + "lua": "5", # see conda_build.variants.DEFAULT_VARIANTS["lua"] + "luajit": False, # lua[0] == 2 + "np": 122, # see conda_build.variants.DEFAULT_VARIANTS["numpy"] + "os": os, + "pl": "5.26.2", # see conda_build.variants.DEFAULT_VARIANTS["perl"] + "py": int(f"{sys.version_info.major}{sys.version_info.minor}"), + "py26": sys.version_info.major == 2 and sys.version_info.minor == 6, + "py27": sys.version_info.major == 2 and sys.version_info.minor == 7, + "py2k": sys.version_info.major == 2, + "py33": sys.version_info.major == 3 and sys.version_info.minor == 3, + "py34": sys.version_info.major == 3 and sys.version_info.minor == 4, + "py35": sys.version_info.major == 3 and sys.version_info.minor == 5, + "py36": sys.version_info.major == 3 and sys.version_info.minor == 6, + "py3k": sys.version_info.major == 3, + "nomkl": bool(nomkl), + # default OS/arch values + **{key: False for key in OS_ARCH}, + # environment variables + "environ": os.environ, + **os.environ, + # override with True values + **{key: True for key in expected}, + } From d480c32597d7cb986ec89dbc8152d474b627e404 Mon Sep 17 00:00:00 2001 From: Isuru Fernando Date: Mon, 16 Oct 2023 09:59:38 -0500 Subject: [PATCH 202/366] Use `conda.base.constants.KNOWN_SUBDIRS` for setting up selectors (#5009) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Ken Odegard --- conda_build/metadata.py | 33 ++++++++++++++--------- conda_build/utils.py | 21 ++------------- docs/source/resources/define-metadata.rst | 11 +++++--- news/5009-use-conda-known-subdirs | 24 +++++++++++++++++ tests/test_metadata.py | 7 +++-- 5 files changed, 60 insertions(+), 36 deletions(-) create mode 100644 news/5009-use-conda-known-subdirs diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 1160c1e9fe..d2d87912bf 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -21,6 +21,7 @@ from conda_build.features import feature_list from conda_build.license_family import ensure_valid_license_family from conda_build.utils import ( + DEFAULT_SUBDIRS, HashableDict, ensure_list, expand_globs, @@ -29,7 +30,7 @@ insert_variant_versions, ) -from .conda_interface import MatchSpec, envs_dirs, md5_file, non_x86_linux_machines +from .conda_interface import MatchSpec, envs_dirs, md5_file try: import yaml @@ -121,25 +122,36 @@ def get_selectors(config: Config) -> dict[str, bool]: # Remember to update the docs of any of this changes plat = config.host_subdir d = dict( - linux=plat.startswith("linux-"), linux32=bool(plat == "linux-32"), linux64=bool(plat == "linux-64"), - emscripten=plat.startswith("emscripten-"), - wasi=plat.startswith("wasi-"), arm=plat.startswith("linux-arm"), - osx=plat.startswith("osx-"), unix=plat.startswith(("linux-", "osx-", "emscripten-")), - win=plat.startswith("win-"), win32=bool(plat == "win-32"), win64=bool(plat == "win-64"), - x86=plat.endswith(("-32", "-64")), - x86_64=plat.endswith("-64"), - wasm32=bool(plat.endswith("-wasm32")), os=os, environ=os.environ, nomkl=bool(int(os.environ.get("FEATURE_NOMKL", False))), ) + # Add the current platform to the list of subdirs to enable conda-build + # to bootstrap new platforms without a new conda release. + subdirs = list(DEFAULT_SUBDIRS) + [plat] + + # filter out noarch and other weird subdirs + subdirs = [subdir for subdir in subdirs if "-" in subdir] + + subdir_oses = {subdir.split("-")[0] for subdir in subdirs} + subdir_archs = {subdir.split("-")[1] for subdir in subdirs} + + for subdir_os in subdir_oses: + d[subdir_os] = plat.startswith(f"{subdir_os}-") + + for arch in subdir_archs: + arch_full = ARCH_MAP.get(arch, arch) + d[arch_full] = plat.endswith(f"-{arch}") + if arch == "32": + d["x86"] = plat.endswith(("-32", "-64")) + defaults = variants.get_default_variant(config) py = config.variant.get("python", defaults["python"]) # there are times when python comes in as a tuple @@ -183,9 +195,6 @@ def get_selectors(config: Config) -> dict[str, bool]: d["lua"] = lua d["luajit"] = bool(lua[0] == "2") - for machine in non_x86_linux_machines: - d[machine] = bool(plat.endswith("-%s" % machine)) - for feature, value in feature_list: d[feature] = value d.update(os.environ) diff --git a/conda_build/utils.py b/conda_build/utils.py index af5678247e..06a5c79c6d 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -67,6 +67,7 @@ from glob import glob from conda.api import PackageCacheData # noqa +from conda.base.constants import KNOWN_SUBDIRS # NOQA because it is not used in this file. from conda_build.conda_interface import rm_rf as _rm_rf # noqa @@ -104,25 +105,7 @@ mmap_PROT_READ = 0 if on_win else mmap.PROT_READ mmap_PROT_WRITE = 0 if on_win else mmap.PROT_WRITE -DEFAULT_SUBDIRS = { - "emscripten-wasm32", - "wasi-wasm32", - "linux-64", - "linux-32", - "linux-s390x", - "linux-ppc64", - "linux-ppc64le", - "linux-armv6l", - "linux-armv7l", - "linux-aarch64", - "win-64", - "win-32", - "win-arm64", - "osx-64", - "osx-arm64", - "zos-z", - "noarch", -} +DEFAULT_SUBDIRS = set(KNOWN_SUBDIRS) RUN_EXPORTS_TYPES = { "weak", diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index d314349b2b..c9e1ddd32b 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -1928,10 +1928,10 @@ variables are booleans. * - osx - True if the platform is macOS. * - arm64 - - True if the platform is macOS and the Python architecture - is arm64. + - True if the platform is either macOS or Windows and the + Python architecture is arm64. * - unix - - True if the platform is either macOS or Linux. + - True if the platform is either macOS or Linux or emscripten. * - win - True if the platform is Windows. * - win32 @@ -1965,6 +1965,11 @@ The use of the Python version selectors, `py27`, `py34`, etc. is discouraged in favor of the more general comparison operators. Additional selectors in this series will not be added to conda-build. +Note that for each subdir with OS and architecture that `conda` supports, +two preprocessing selectors are created for the OS and the architecture separately +except when the architecture is not a valid python expression (`*-32` and `*-64` +in particular). + Because the selector is any valid Python expression, complicated logic is possible: diff --git a/news/5009-use-conda-known-subdirs b/news/5009-use-conda-known-subdirs new file mode 100644 index 0000000000..a9423202f4 --- /dev/null +++ b/news/5009-use-conda-known-subdirs @@ -0,0 +1,24 @@ +### Enhancements + +* Use subdirs known to conda for selector definitions. (#5009) + This allows conda_build to support new architectures with just + a new version of conda. For new OSes, there are more information + needed for conda_build to work properly, including whether the + new OS is a UNIX-like platform, the shared library prefix, and + the binary archive format for the platform. + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 9c2ab7bd30..37319f0de4 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -342,6 +342,7 @@ def test_yamlize_versions(): "armv6l", "armv7l", "emscripten", + "freebsd", "linux", "linux32", "linux64", @@ -358,6 +359,8 @@ def test_yamlize_versions(): "win64", "x86", "x86_64", + "z", + "zos", ) @@ -369,7 +372,7 @@ def test_yamlize_versions(): [ ("emscripten-wasm32", {"unix", "emscripten", "wasm32"}), ("wasi-wasm32", {"wasi", "wasm32"}), - ("freebsd-64", {"x86", "x86_64"}), + ("freebsd-64", {"freebsd", "x86", "x86_64"}), ("linux-32", {"unix", "linux", "linux32", "x86"}), ("linux-64", {"unix", "linux", "linux64", "x86", "x86_64"}), ("linux-aarch64", {"unix", "linux", "aarch64"}), @@ -384,7 +387,7 @@ def test_yamlize_versions(): ("win-32", {"win", "win32", "x86"}), ("win-64", {"win", "win64", "x86", "x86_64"}), ("win-arm64", {"win", "arm64"}), - ("zos-z", {}), + ("zos-z", {"zos", "z"}), ], ) @pytest.mark.parametrize("nomkl", [0, 1]) From f7cf445d57d448117bbfbb18f97a511897c397bc Mon Sep 17 00:00:00 2001 From: jaimergp Date: Wed, 18 Oct 2023 18:06:45 +0200 Subject: [PATCH 203/366] Fallback to solved record filename to find the downloaded tarball in `get_upstream_pins` (#5037) Fallback to input package filename if not found in the index; it should be equivalent anyway. --- conda_build/render.py | 2 +- news/5037-conda-libmamba-solver-pins | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 news/5037-conda-libmamba-solver-pins diff --git a/conda_build/render.py b/conda_build/render.py index 881898dc9d..fa428e07f6 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -384,7 +384,7 @@ def execute_download_actions(m, actions, env, package_subset=None, require_files with utils.LoggingContext(): pfe.execute() for pkg_dir in pkgs_dirs: - _loc = os.path.join(pkg_dir, index[pkg].fn) + _loc = os.path.join(pkg_dir, index.get(pkg, pkg).fn) if os.path.isfile(_loc): pkg_loc = _loc break diff --git a/news/5037-conda-libmamba-solver-pins b/news/5037-conda-libmamba-solver-pins new file mode 100644 index 0000000000..d4044fac0f --- /dev/null +++ b/news/5037-conda-libmamba-solver-pins @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Fallback to solved record filename to find the downloaded tarball in `get_upstream_pins`. (#4991 via #5037) + +### Deprecations + +* + +### Docs + +* + +### Other + +* From 7c9e766c29a5493a7db7278b3fedcc406bc49753 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 19 Oct 2023 12:31:19 -0400 Subject: [PATCH 204/366] Simplify codefile detection (#5040) * Deprecate is_string * Deprecate is_codefile * Deprecate codefile_type * Add codefile_class unittest --- conda_build/inspect_pkg.py | 18 ++--- conda_build/os_utils/ldd.py | 11 +-- conda_build/os_utils/liefldd.py | 116 ++++++++++++++++++++------------ conda_build/os_utils/pyldd.py | 81 +++++++++++++--------- conda_build/post.py | 57 +++++++++------- conda_build/utils.py | 3 +- news/5040-codefile | 21 ++++++ tests/data/ldd/clear.elf | Bin 0 -> 10168 bytes tests/data/ldd/clear.exe | Bin 0 -> 9839 bytes tests/data/ldd/clear.macho | Bin 0 -> 68448 bytes tests/data/ldd/jansi.dll | Bin 0 -> 20480 bytes tests/data/ldd/uuid.pyd | Bin 0 -> 12800 bytes tests/os_utils/test_codefile.py | 40 +++++++++++ 13 files changed, 230 insertions(+), 117 deletions(-) create mode 100644 news/5040-codefile create mode 100755 tests/data/ldd/clear.elf create mode 100644 tests/data/ldd/clear.exe create mode 100755 tests/data/ldd/clear.macho create mode 100755 tests/data/ldd/jansi.dll create mode 100644 tests/data/ldd/uuid.pyd create mode 100644 tests/os_utils/test_codefile.py diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index e38c5aa9e7..cbb60d4f25 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -24,7 +24,7 @@ get_package_obj_files, get_untracked_obj_files, ) -from conda_build.os_utils.liefldd import codefile_type +from conda_build.os_utils.liefldd import codefile_class, machofile from conda_build.os_utils.macho import get_rpaths, human_filetype from conda_build.utils import ( comma_join, @@ -354,14 +354,16 @@ def inspect_objects(packages, prefix=sys.prefix, groupby="package"): info = [] for f in obj_files: - f_info = {} path = join(prefix, f) - filetype = codefile_type(path) - if filetype == "machofile": - f_info["filetype"] = human_filetype(path, None) - f_info["rpath"] = ":".join(get_rpaths(path)) - f_info["filename"] = f - info.append(f_info) + codefile = codefile_class(path) + if codefile == machofile: + info.append( + { + "filetype": human_filetype(path, None), + "rpath": ":".join(get_rpaths(path)), + "filename": f, + } + ) output_string += print_object_info(info, groupby) if hasattr(output_string, "decode"): diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index 77daf4ab10..32eea125a2 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -8,12 +8,7 @@ from conda_build.conda_interface import linked_data, untracked from conda_build.os_utils.macho import otool -from conda_build.os_utils.pyldd import ( - codefile_class, - inspect_linkages, - is_codefile, - machofile, -) +from conda_build.os_utils.pyldd import codefile_class, inspect_linkages, machofile LDD_RE = re.compile(r"\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)") LDD_NOT_FOUND_RE = re.compile(r"\s*(.*?)\s*=>\s*not found") @@ -118,7 +113,7 @@ def get_package_obj_files(dist, prefix): files = get_package_files(dist, prefix) for f in files: path = join(prefix, f) - if is_codefile(path): + if codefile_class(path): res.append(f) return res @@ -130,7 +125,7 @@ def get_untracked_obj_files(prefix): files = untracked(prefix) for f in files: path = join(prefix, f) - if is_codefile(path): + if codefile_class(path): res.append(f) return res diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index 2cf6ce92ad..26a768a4f6 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -1,9 +1,6 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -try: - from collections.abc import Hashable -except ImportError: - from collections.abc import Hashable +from __future__ import annotations import hashlib import json @@ -11,34 +8,34 @@ import struct import sys import threading +from collections.abc import Hashable from fnmatch import fnmatch from functools import partial +from pathlib import Path from subprocess import PIPE, Popen +from ..deprecations import deprecated from .external import find_executable # lief cannot handle files it doesn't know about gracefully # TODO :: Remove all use of pyldd # Currently we verify the output of each against the other -from .pyldd import codefile_type as codefile_type_pyldd +from .pyldd import DLLfile, EXEfile, elffile, machofile +from .pyldd import codefile_type as _codefile_type from .pyldd import inspect_linkages as inspect_linkages_pyldd -codefile_type = codefile_type_pyldd -have_lief = False try: import lief lief.logging.disable() have_lief = True -except: - pass +except ImportError: + have_lief = False +@deprecated("3.28.0", "4.0.0", addendum="Use `isinstance(value, str)` instead.") def is_string(s): - try: - return isinstance(s, basestring) - except NameError: - return isinstance(s, str) + return isinstance(s, str) # Some functions can operate on either file names @@ -46,17 +43,16 @@ def is_string(s): # these are to be avoided, or if not avoided they # should be passed a binary when possible as that # will prevent having to parse it multiple times. -def ensure_binary(file): - if not is_string(file): +def ensure_binary(file: str | os.PathLike | Path | lief.Binary) -> lief.Binary | None: + if isinstance(file, lief.Binary): return file - else: - try: - if not os.path.exists(file): - return [] - return lief.parse(file) - except: - print(f"WARNING: liefldd: failed to ensure_binary({file})") - return None + elif not Path(file).exists(): + return None + try: + return lief.parse(str(file)) + except BaseException: + print(f"WARNING: liefldd: failed to ensure_binary({file})") + return None def nm(filename): @@ -77,25 +73,57 @@ def nm(filename): print("No symbols found") -def codefile_type_liefldd(file, skip_symlinks=True): - binary = ensure_binary(file) - result = None - if binary: - if binary.format == lief.EXE_FORMATS.PE: - if lief.PE.DLL_CHARACTERISTICS: - if binary.header.characteristics & lief.PE.HEADER_CHARACTERISTICS.DLL: - result = "DLLfile" - else: - result = "EXEfile" +if have_lief: + + def codefile_class( + path: str | os.PathLike | Path, + skip_symlinks: bool = False, + ) -> type[DLLfile | EXEfile | machofile | elffile] | None: + # same signature as conda.os_utils.pyldd.codefile_class + if not (binary := ensure_binary(path)): + return None + elif ( + binary.format == lief.EXE_FORMATS.PE + and lief.PE.HEADER_CHARACTERISTICS.DLL in binary.header.characteristics_list + ): + return DLLfile + elif binary.format == lief.EXE_FORMATS.PE: + return EXEfile elif binary.format == lief.EXE_FORMATS.MACHO: - result = "machofile" + return machofile elif binary.format == lief.EXE_FORMATS.ELF: - result = "elffile" - return result - + return elffile + else: + return None -if have_lief: - codefile_type = codefile_type_liefldd +else: + from .pyldd import codefile_class + + +@deprecated( + "3.28.0", + "4.0.0", + addendum="Use `conda_build.os_utils.liefldd.codefile_class` instead.", +) +def codefile_type_liefldd(*args, **kwargs) -> str | None: + codefile = codefile_class(*args, **kwargs) + return codefile.__name__ if codefile else None + + +deprecated.constant( + "3.28.0", + "4.0.0", + "codefile_type_pyldd", + _codefile_type, + addendum="Use `conda_build.os_utils.pyldd.codefile_class` instead.", +) +deprecated.constant( + "3.28.0", + "4.0.0", + "codefile_type", + _codefile_type, + addendum="Use `conda_build.os_utils.liefldd.codefile_class` instead.", +) def _trim_sysroot(sysroot): @@ -111,7 +139,9 @@ def get_libraries(file): if binary.format == lief.EXE_FORMATS.PE: result = binary.libraries else: - result = [lib if is_string(lib) else lib.name for lib in binary.libraries] + result = [ + lib if isinstance(lib, str) else lib.name for lib in binary.libraries + ] # LIEF returns LC_ID_DYLIB name @rpath/libbz2.dylib in binary.libraries. Strip that. binary_name = None if binary.format == lief.EXE_FORMATS.MACHO: @@ -505,7 +535,7 @@ def inspect_linkages_lief( while tmp_filename: if ( not parent_exe_dirname - and codefile_type(tmp_filename) == "EXEfile" + and codefile_class(tmp_filename) == EXEfile ): parent_exe_dirname = os.path.dirname(tmp_filename) tmp_filename = parents_by_filename[tmp_filename] @@ -595,7 +625,7 @@ def get_linkages( result_pyldd = [] debug = False if not have_lief or debug: - if codefile_type(filename) not in ("DLLfile", "EXEfile"): + if codefile_class(filename) not in (DLLfile, EXEfile): result_pyldd = inspect_linkages_pyldd( filename, resolve_filenames=resolve_filenames, @@ -607,7 +637,7 @@ def get_linkages( return result_pyldd else: print( - f"WARNING: failed to get_linkages, codefile_type('{filename}')={codefile_type(filename)}" + f"WARNING: failed to get_linkages, codefile_class('{filename}')={codefile_class(filename)}" ) return {} result_lief = inspect_linkages_lief( diff --git a/conda_build/os_utils/pyldd.py b/conda_build/os_utils/pyldd.py index 1e1cd4e4cc..42b89711ae 100644 --- a/conda_build/os_utils/pyldd.py +++ b/conda_build/os_utils/pyldd.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import argparse import glob import logging @@ -7,9 +9,12 @@ import re import struct import sys +from pathlib import Path from conda_build.utils import ensure_list, get_logger +from ..deprecations import deprecated + logging.basicConfig(level=logging.INFO) @@ -1028,46 +1033,60 @@ def codefile(file, arch="any", initial_rpaths_transitive=[]): return inscrutablefile(file, list(initial_rpaths_transitive)) -def codefile_class(filename, skip_symlinks=False): - if os.path.islink(filename): - if skip_symlinks: - return None - else: - filename = os.path.realpath(filename) - if os.path.isdir(filename): +def codefile_class( + path: str | os.PathLike | Path, + skip_symlinks: bool = False, +) -> type[DLLfile | EXEfile | machofile | elffile] | None: + # same signature as conda.os_utils.liefldd.codefile_class + path = Path(path) + if skip_symlinks and path.is_symlink(): return None - if filename.endswith((".dll", ".pyd")): + path = path.resolve() + + def _get_magic_bit(path: Path) -> bytes: + with path.open("rb") as handle: + bit = handle.read(4) + return struct.unpack(BIG_ENDIAN + "L", bit)[0] + + if path.is_dir(): + return None + elif path.suffix.lower() in (".dll", ".pyd"): return DLLfile - if filename.endswith(".exe"): + elif path.suffix.lower() == ".exe": return EXEfile - # Java .class files share 0xCAFEBABE with Mach-O FAT_MAGIC. - if filename.endswith(".class"): + elif path.suffix.lower() == ".class": + # Java .class files share 0xCAFEBABE with Mach-O FAT_MAGIC. return None - if not os.path.exists(filename) or os.path.getsize(filename) < 4: + elif not path.exists() or path.stat().st_size < 4: + return None + elif (magic := _get_magic_bit(path)) == ELF_HDR: + return elffile + elif magic in (FAT_MAGIC, MH_MAGIC, MH_CIGAM, MH_CIGAM_64): + return machofile + else: return None - with open(filename, "rb") as file: - (magic,) = struct.unpack(BIG_ENDIAN + "L", file.read(4)) - file.seek(0) - if magic in (FAT_MAGIC, MH_MAGIC, MH_CIGAM, MH_CIGAM_64): - return machofile - elif magic == ELF_HDR: - return elffile - return None -def is_codefile(filename, skip_symlinks=True): - klass = codefile_class(filename, skip_symlinks=skip_symlinks) - if not klass: - return False - return True +@deprecated( + "3.28.0", + "4.0.0", + addendum="Use `conda_build.os_utils.pyldd.codefile_class` instead.", +) +def is_codefile(path: str | os.PathLike | Path, skip_symlinks: bool = True) -> bool: + return bool(codefile_class(path, skip_symlinks=skip_symlinks)) -def codefile_type(filename, skip_symlinks=True): - "Returns None, 'machofile' or 'elffile'" - klass = codefile_class(filename, skip_symlinks=skip_symlinks) - if not klass: - return None - return klass.__name__ +@deprecated( + "3.28.0", + "4.0.0", + addendum="Use `conda_build.os_utils.pyldd.codefile_class` instead.", +) +def codefile_type( + path: str | os.PathLike | Path, + skip_symlinks: bool = True, +) -> str | None: + codefile = codefile_class(path, skip_symlinks=skip_symlinks) + return codefile.__name__ if codefile else None def _trim_sysroot(sysroot): diff --git a/conda_build/post.py b/conda_build/post.py index 290779385d..05af50b24f 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -53,18 +53,24 @@ have_lief, set_rpath, ) -from conda_build.os_utils.pyldd import codefile_type +from conda_build.os_utils.pyldd import ( + DLLfile, + EXEfile, + codefile_class, + elffile, + machofile, +) filetypes_for_platform = { - "win": ("DLLfile", "EXEfile"), - "osx": ["machofile"], - "linux": ["elffile"], + "win": (DLLfile, EXEfile), + "osx": (machofile,), + "linux": (elffile,), } def fix_shebang(f, prefix, build_python, osx_is_app=False): path = join(prefix, f) - if codefile_type(path): + if codefile_class(path): return elif islink(path): return @@ -405,7 +411,7 @@ def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): ".. seems to be linking to a compiler runtime, replacing build prefix with " "host prefix and" ) - if not codefile_type(link): + if not codefile_class(link): sys.exit( "Error: Compiler runtime library in build prefix not found in host prefix %s" % link @@ -841,7 +847,7 @@ def _collect_needed_dsos( sysroots = list(sysroots_files.keys())[0] for f in files: path = join(run_prefix, f) - if not codefile_type(path): + if not codefile_class(path): continue build_prefix = build_prefix.replace(os.sep, "/") run_prefix = run_prefix.replace(os.sep, "/") @@ -901,10 +907,9 @@ def _map_file_to_package( for subdir2, _, filez in os.walk(prefix): for file in filez: fp = join(subdir2, file) - dynamic_lib = ( - any(fnmatch(fp, ext) for ext in ("*.so*", "*.dylib*", "*.dll")) - and codefile_type(fp, skip_symlinks=False) is not None - ) + dynamic_lib = any( + fnmatch(fp, ext) for ext in ("*.so*", "*.dylib*", "*.dll") + ) and codefile_class(fp, skip_symlinks=False) static_lib = any(fnmatch(fp, ext) for ext in ("*.a", "*.lib")) # Looking at all the files is very slow. if not dynamic_lib and not static_lib: @@ -947,7 +952,7 @@ def _map_file_to_package( ) } all_lib_exports[prefix][rp_po] = exports - # Check codefile_type to filter out linker scripts. + # Check codefile_class to filter out linker scripts. if dynamic_lib: contains_dsos[prefix_owners[prefix][rp_po][0]] = True elif static_lib: @@ -1217,8 +1222,8 @@ def _show_linking_messages( ) for f in files: path = join(run_prefix, f) - filetype = codefile_type(path) - if not filetype or filetype not in filetypes_for_platform[subdir.split("-")[0]]: + codefile = codefile_class(path) + if codefile not in filetypes_for_platform[subdir.split("-")[0]]: continue warn_prelude = "WARNING ({},{})".format(pkg_name, f.replace(os.sep, "/")) err_prelude = " ERROR ({},{})".format(pkg_name, f.replace(os.sep, "/")) @@ -1316,15 +1321,15 @@ def check_overlinking_impl( files_to_inspect = [] filesu = [] - for f in files: - path = join(run_prefix, f) - filetype = codefile_type(path) - if filetype and filetype in filetypes_for_platform[subdir.split("-")[0]]: - files_to_inspect.append(f) - filesu.append(f.replace("\\", "/")) + for file in files: + path = join(run_prefix, file) + codefile = codefile_class(path) + if codefile in filetypes_for_platform[subdir.split("-")[0]]: + files_to_inspect.append(file) + filesu.append(file.replace("\\", "/")) if not files_to_inspect: - return dict() + return {} sysroot_substitution = "$SYSROOT" build_prefix_substitution = "$PATH" @@ -1633,18 +1638,18 @@ def post_process_shared_lib(m, f, files, host_prefix=None): if not host_prefix: host_prefix = m.config.host_prefix path = join(host_prefix, f) - codefile_t = codefile_type(path) - if not codefile_t or path.endswith(".debug"): + codefile = codefile_class(path) + if not codefile or path.endswith(".debug"): return rpaths = m.get_value("build/rpaths", ["lib"]) - if codefile_t == "elffile": + if codefile == elffile: mk_relative_linux( f, host_prefix, rpaths=rpaths, method=m.get_value("build/rpaths_patcher", None), ) - elif codefile_t == "machofile": + elif codefile == machofile: if m.config.host_platform != "osx": log = utils.get_logger(__name__) log.warn( @@ -1734,7 +1739,7 @@ def check_symlinks(files, prefix, croot): # symlinks to binaries outside of the same dir don't work. RPATH stuff gets confused # because ld.so follows symlinks in RPATHS # If condition exists, then copy the file rather than symlink it. - if not dirname(link_path) == dirname(real_link_path) and codefile_type(f): + if not dirname(link_path) == dirname(real_link_path) and codefile_class(f): os.remove(path) utils.copy_into(real_link_path, path) elif real_link_path.startswith(real_build_prefix): diff --git a/conda_build/utils.py b/conda_build/utils.py index 06a5c79c6d..f8606ffe9d 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -97,9 +97,10 @@ FileNotFoundError = FileNotFoundError on_win = sys.platform == "win32" +on_mac = sys.platform == "darwin" +on_linux = sys.platform == "linux" codec = getpreferredencoding() or "utf-8" -on_win = sys.platform == "win32" root_script_dir = os.path.join(root_dir, "Scripts" if on_win else "bin") mmap_MAP_PRIVATE = 0 if on_win else mmap.MAP_PRIVATE mmap_PROT_READ = 0 if on_win else mmap.PROT_READ diff --git a/news/5040-codefile b/news/5040-codefile new file mode 100644 index 0000000000..c4f85ca7cf --- /dev/null +++ b/news/5040-codefile @@ -0,0 +1,21 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Mark `conda_build.os_utils.pyldd.is_string` as pending deprecation. Use `isinstance(value, str)` instead. (#5040) +* Mark `conda_build.os_utils.pyldd.is_codefile` as pending deprecation. Use `conda_build.os_utils.pyldd.codefile_class` instead. (#5040) +* Mark `conda_build.os_utils.pyldd.codefile_type` as pending deprecation. Use `conda_build.os_utils.pyldd.codefile_class` instead. (#5040) + +### Docs + +* + +### Other + +* diff --git a/tests/data/ldd/clear.elf b/tests/data/ldd/clear.elf new file mode 100755 index 0000000000000000000000000000000000000000..52013aa3ee2ca775bba0418b389cfc1f392825de GIT binary patch literal 10168 zcmeHNZ){xEmA~_zlb9p~C$XJ`g!l~ssib6Lhcs~@i05%&k_w7z3`By}csw(DWPNr;lUHDRkQ_p~peXB+d!#q0^ahn4(|YAUQ|=$dgmRmzZ!>Ig z#M?xK?hgA=2&Q|qyywpW;<5*Ci4toL>3k)NM6RVl$%)n2){WK}$v@;I#R zNp!63iLL2LB>M-}L?Y>E_lEU>Oe)YI`%U&qHuSZJy_U$#9IqrF%^yU5Icq%T|_<5?#!|7N2^`HwiIV#e8szF{vj7pZGb3x~;@=T-} z;7V1&W6%BMv0*)q-6{af7*tdZcM1B}?B^Ndzt)}^p{Y&ljK=Kd5)q0YVdT5lb{2&S z+v^J9)R*Jm{f`A7pZs@ycz}L7y#!W|FV_y1!Knd@g*5sNlxMOtQ3kJqq0;dtb34n( zUspzcHSn3*T~Y@BDeTYG?yY6y?=FL1E@P)q#{X}X!Pl3uPwUD;E=>?ni(whVr>4F0Dw_Zc>iHGCKog&(w4#Q%v2zMru3E}khI~fs1(V?nLJQLm- zPiGRTq^IE|QcI1O}4AiNZj{fmUz4H`?1LU;`Qu?CeV?l1^th+Py<$oLH*g5uKhvNRYRXiln=! z3MNHoq9>kAiOwg}2`7%aIGKi|XBtjHCY6R&oLW&8pWa9!2?W84?x0q}UHy@COh5t- zccz}C!)ybwQB|7punXP4E!Z4h7iichZh5eETWH(Y)&>G}IEw9Gr2jDjG?@+I^ZpF4 zL{I(C=b@&C5%Osr(x*i9DHQ1pvyczu_T#uR#h|SA>ibkZs#U!)RX@J6iD2Qwp{-Or zUerzF`dy_`!}W8%O2hSg>QW8Y&u~k_`QAVc*K0W6D+q7YaJmC=vNfFUES$D#INyn= zu0_N3`^|O@*AMh|4cG5o-5O4@=hUa+`hGj0;rhOs)o}eDcTmIWj>hSThU<5yqZ-cl zN75P7aK2v>eoVvlJ6T@C=}yY&gog9G3Dv!$;p$bvQ#q^Qd{3l`f`-%mmeU0dw-^K+ z*KogvpH=hLz$B!*EvJHp>v6uI;dFQ9G`aiNBa^1cJuk%Y?|KUVh9N`NR~f_oMD=)Eqeg>h7o*!B~vFfc%%ttdZp++iD5hZ9+cxsBnEE`|dNS z^S>W*EpgB3^QIUj9&L>YyEf-P5pr>7bv>C+q|9Z=T>ZC@d-`V~_wi_LuJM(Sd){0z zdd^%tS}?0u3-g}Sbrqrk?>}vI+kH=9`)R8y_@2n(Rj#f@(hOpD7kv58Vi8j=+gdR0 zIc;J9zPgz1^n~;cw71*{mi!d_ekE8dzs|y<8`8X$Hx=JHi{Ga z*jjkWmkW-ETpKpdAx7T>ZlLc4vt~4!4Y}>gcEMb}+CMDqQ6H#{!jd1{$ZgOV-UPjs z$bUc_&Y8A*ewE1m5o3`TV*BrrD|}-6-xZ4oo<|l)S#?jzSKGu2yGGJ1UXjHw$tUsZ z)97d3414ng_?T!HIj^sv_XzQE9-skxr?+Ux>GEo_P9X)BI=t z-apYa^g86R4-W>91>Fu$*O=0!{!H%vl{b$s%@ghr+HUW#H^JstK7{`?ChZ+!)1`T~ zJGuJ}Z;pQiUwJOGKI8|VMW#6^FlYSGd-iO|rFKn>qq031EY##O&m-Q3?Pd{k(t+%| zQSgFn6fl1I+FY$vP}9ie$ATkQqs=3eyWjL;L%vZzL}6*pKOyJQk!!_Q zsV%bYpKrUXVBZh>rMAo2L~O^5EaIBAH`y2$8uM?W4SS1yoZ=j`M6T~6#7)Iua`!Lf zc%rYvkh_F=I(#_fQapHXIJ;Nm9)_;FO1M|en$?>S8>|bg-nzgKLsD#TCP=jQLWDzkUDzwyRqE5CPh!CdmGc(|)*zWk#X6Vj)>;VNP$ z*!E;`17QMnOOWdno}DOeaA%42G`{EHix*3=EEhZ~*DmRkox-f~4T#GijitCK_l9b$ zdx}5JFWQG_@9Ia~H(-7c?ms5SgVyc0!T;W@9i{lu9`H|?t2hn@X}&&dqrD@`u%5lW zXi&Ta zr+#i?ti41x)|t}7nMhasPV3f;^%%V&rIMMy@vd!c_qIMj@@;oMGC(xi6OW{+pwiq2 zrSzS1Eo)7iMdB7Eyu?{fcOqjYl6WnQ#H>`Obqm#!+#{Zd_we4TEcrgSM!nCG$iTEn zEM+BAj^(6xSx(CG9I!IcbgHMP1FuTza(V9^(xJ7n_|CPCvy1(sSHVOw(qqw^SE8fe ziD$g3TQk&I%5#Npxp`L~P`aTJ7^2&1SrN)+&)n(9tC}WvRM2G^!{tbVl_hlHt7AJRnVp6d#Y7b46O7m zpI_%;OMyL(w(GrGDR(={1Hd*b%mTHMUuLE*!tlIC%tPj_FfS~9)k?l z-c%y{dytO;?;%v_ZW}TLwRsSE;!3f&2R@^2%y+2vaLtyX>fy!B`xfnA7(7sQ@P^5KOeHHu7<^#q-UvOyl@T^DX ze)z$ev+tcgd1`@Zn%6eXkP6wo1wOPR&eVTOjg*1yp|OzuwH%sz_`2Xw`)2Q- z6+BRJ&}@Fecoe38J8c_!WW#~ZMU2nmD09E>9Wp`?M$nq(-N(U0^?Kx&AzzPi%%aSF zXuAHUrr}xpDz@%7sU!Cd&Hn1iEgybx_Vk%k?_mf%zu$-60QB^@lZh|V7ZLa(0{_n= zu+a7#e5XoM+GZ$4RDpT=c0-AI`qo2dAMfqL- zIrTf=ijsQXV+IV{tGRrwn4*GJA$k7hGd4%^{H>=&)vM!EyoED~5`UB7y}U-kQ%_Ts z^8DVe_^pc4xkHJ+9W9Zp$X{=JCB`e~NuIwY9aHV|HzaPKsWKz5eS7`mxZ^nW_!D!b zy`tikIdM_(dsIE!`M;*t0_k5V<=<^TuRllBJpH_SpH;qZk@UC9YWl1y|0}s8s-1Gm zN7Va#sJZ!0>r43c_BLx{;Eq6pwXVLQv3_I2I_pcV@tCzW;z)c=L;Y<+3O6Mq(Nr=P zvAUws+pLC$z`8)a5P?iLzP~$>4iUij{CK)g1aP~I2bzN0);N(a5$H zN@jNTN{U|uyc+(=L1)6KOUHX6q@YTDJr3E&4-Ww+K7gn96GkANibb4=2*kU?ooW2m z5$=vbmpQ2sj-=C(T~dR~FdXSkMBzdTwxo@YOhyEvsovgr(kU0c&yLf&r!~dvrqQs$ z;l8B+-j_TO4_<%FFGhjZHtV+;HaNUDD8TZ#=?CvyjNFO>?NzKlVA$Yrk5T~L#VGN9 z#`^}_@uLpc))edW{)hKzFYz6+RA1H#@nsY!2CUEfV~f&nR~sMgpX3U+&->FIz(|Dk zdA}V~8y@ek5b;uJ{O$rudl&2Teziw!&|8%r`_FPrsh;*i=6OFnq4aH~PyUk(`>$Z} zC<@e7)}Pv+)q%kKGq=zB-2aHu4=RJa-{;kVr~6N5N~!)1U=$NRpZNF3O7+p7&lP3f z^Oeg)JCSw$>3vp8wMa7ctePsx!V@66epc<*S=IiV?3mKC6lL-x3c5a@?_1OXTG}4Z zTh{+urO)wiSNmSO(yu1tS}L{oE!2?+>refEPOVfb-6BfcA1cwmP-}aN7nJ_RlJk;E?3f41`vCHjKTO$!xByiO!HJ^z{iXB5a5x4%^BFSVpZ pizHJ|ZkK_7L4jPXXFyTb-&%rmcXY~a>!R(JgH-(A@bxopWl#5j$j)IB+FSh=6pxzT2}8?u*-X z;;3nB^=z8Mvg%T(Q2PUD5$YdA5EV-EgF1xP3Lho74gCPrs4CT2pcWx$Lqu_(cV_Qy z*FY!~sYUWgbMN~+^UlmWA3OWr&i0#TSRG@m9(i$*vBQv*qWt&lKjSDqf9;X;+0V{> z?vlff{^u?k7)j|;zK|O(s2M4)X0tg%8q%b~XjV#PrM~NTNSRzh^ENd#US>8O+r(Jn zjs|wr38IxT3E~ZQx$``y^CH#=@I_=QYc?}Hb3o-7Jo!z@#f_lMAcMSM3!`z3ix$CA zeV$CEiY-M{o@eX;O~^Q7WokUi*k2(2+thqJW3y*z#A|3{2FiCg;D>ypu~Yw+G{|Cu z-h^tX;0_WmA=BKPk8DX%mY?+&gyC)~_aReTmNAogl*OXnys+bAjDjTlRmheUWxl9) zOxR5jA44X)&l9L6Za1Xs#GIl<8YJ!gX`K8^iT3zh=V3$st+v~uJ&Z#y#6k157>9BW z@)~4IB0CFQRzJoJ$i>&&<>{u;&tZxOv1Xh6v;KMi@v>pj$1M7Rnl2ZgYF{8cV@W&Z z(yM!pCB5#oe5WwN=tGi=J1;MPsN%` zf1p}7FC>7;YO$?#khI0N3r*?dvf_V*ue7+>*2v3iSlHLFXs?Dh*z9S({qyCWjAr0@ z#XfncWO1E0@p19aT{|i>Uiyt(`t{H0JSqPbPnSITl6#^DkF8wNqw;k4E=)S6^2>Oj zur!7Wb)WJ>cQ#&S(UcQu)GMCbG<80Vyn#c@}mT1bPmBx<7PWn!!!od zhqi;A+DYxq2!jC_%oX1NpaioB%oMmaS9}Y^mqe%cQrUF)Msl~b5?0uoEh$3NO&4-f zmry>;)oo*P>HDjBMcwC~c$Ml-&72}JIq#l031!N)4xUWTx+mv|F81^{LAoa%qG>4h zTr1E)fuaK4D$oXj`UMIJiZ?3iO61_QYe1lOf&TUyt#;7|RK5$NI}8Pb!UOZQJ;&T@`>0CO zJJk7fOCw)|d=%@k`Cq7p%@Z{@k1WfjSWKQe7^P;XTK;|uMCs5tcQqe~LUS9TeLq?}NW%?uj42hd#|MR60hxXz~a;Ubqvydo8(-(QJ&!C0EaR zj4iC;<{MGj)kgR|a(QZFJ8%=>QvyrCZ{u~o@T9;(J)!EJyu{Zp>gR&4B++wdz-HKidIeh0@Y{uN)36wf$};X=50#%Zi9o~avMa|A0! z%NSXB7|IdMf0y%RTz>|}gS5s7KFS(9E%(;%@m-bM}^%H32C#ad?X)LVkN5JlTX5Ej^ zl4Iz60%aJ>SI}1F@Py>dLeux)9t-@m4T}$vsmEXC<{@Z88{Cy&qo{>v0 zVBqh`7*y#DmVLVAP3Usv_2R-%O#d2}w6~Sx#kLJx^7hJ)zq7bVZ{FiaXtK)t0I_pt z5D%5(q~k3)b@2m$2F`?)cn`J_JGFH+w@()L*R!Pi`mfgG-Rb_teED)-TE`1veEKvQ z%Q$4BlErpPZYSe2DD@x5qPXuqTJAy3+GIlG@AHLD*a(_dpOUoNjKgWskOWKrAePhe z3@p#UXJrO5`hMLr9KY4$@AM|pX{K!5tK6Usr*uOr^rlr^*ZAML>Tos%t14FxE%^`F ze7pTz*xcK@L2BQ8-OekdkhjYlu-9g6x%{&|Kb7^Q_ZEHQi23Nd!gBh|%m4!LK4ki) z6NB7^+yH$L@>j^s&`&_Vh`a{+Dadz`FN2=HfU$2QhoH|ueh;}D`u&g(AV;Aef_xOY z5BeZg#WDjf11Td3krg_49;bzt{J0w4hl{qHLMvr&D(ybksmd5*2EMg zKBg)v?jBMG&P-s~6f`Xh#8^9UMonc&-@z1whf2I)_}Ffyq%!%@f~Ms8GCacKYC4^x z&-ARp5#K+&FO^kNS@fG#)8H<0P*r4Jo3ZO1qQA!-N%Y0o3(icn{)#$^NR@qLy5YJ_{lP$mN74UT za_}n{yf(+}jyd=l@B09r?C{}(?H*_ByZ9nlN!?eEWzy1Kt)QoJ*`8Lv*Vig(*?2CI z$`1Fm?i{$*6KR!n1IwhQb6KsYb-$*!UVT+#H~rTLnf+%RWkD?1L5i6wjrqbH5rdP_WrJKf3(ZGoNwY^$eO4DyotEz9w?#p!a}{$%I+&iMLHPsFGBJ)uCb%hMGe8uCPf316}^;Y+CNxhE2T zGOzw1@{Q750Q8$wb(v`on4$UmM{`cZX$H@<*e? zw%Ku%bX3lgtMK`JIeq91(6ODy6#*{x__aPo0=SU3F=8pq&Y;T9|I0tMy;ZiW|)pqDgO;OURq0SKf;w6{W z(!qeDA^=n~8spJiHMZh0q$p4*hE!@nrNCw&sf=i9Lg94-ijRkZX8DrfQm{+HLqYP` zQc`Kik3|%3PwQ=StC`!)M99}Gm1NfgC8(#vl;HXC5RPOt0B06 zOCnQ2T0-!ul|81d*zwR@05g6zODw^;U@9T9Ks@*ss%2(qBJdV37}cq;{W-9#h;22^ S@m3qMu9>+7L|5W3$G-tkOT@1L literal 0 HcmV?d00001 diff --git a/tests/data/ldd/clear.macho b/tests/data/ldd/clear.macho new file mode 100755 index 0000000000000000000000000000000000000000..8de24d5608beff97f78360325b58e7641533f648 GIT binary patch literal 68448 zcmeI5e{>Yp702JqCLu)BB>YGKF#`k*HYBM0F0>nBYEWw;MA%d7nQS%#*|OQ)?9O5$ zhK?09GvpYg?pZBSa)>Ouu(#Hkl08e_DU^ zocj)U-}k=v?w$8OZ|3n|-fN$n_-=AM%xtZ@etDk&(>6aTFs_F{`qkRCK^UJ!`%y*lWg#0fn zsu~O4Ez8<9!>JbWqd0rXjhR$JUW>%nQCz+-8-W z7u8f$HQ-gFP4zXQfEv~#|1n>?Ib4WD|9K5S`vu89)T&zR)xCWU&iS<``k2rt@{@BR z-&s_(CffHkoW0&e!pv7`3$*96UhCf8zVqXnIoDoYcEucXH_F7&GZ?!qp_^pHQK|n&11hJ!cEEg7+C2=9@f>g=aKT4mRd-54n(|9{q zxcy3XiX<-U*J3*7p;BP<{TIigk>Y^Arnt5#Pz#^RNEppy#eEBU8t$ET!-`Fn72c|i z&rvP~M$Z-!W(6Z{vdoq*fzG5ps}x`hYFO z{HcI#9BVN0y8|I_trk&@nG_Wn$c#bw83m*G>NLdB!Ow*WFaajO1egF5U;<2l2`~XB zzyz286JP>NfC(@GCcp%k025#WOn?b60Vco%m;e)C0!)AjFaajO1egF5U;<2l2`~XB zzyz286JP>NfC(@GCcp%k025#WOn?b60Vco%m;e)C0!)AjFaajO1egF5U;<2l2`~XB zzyz286JP>NfC(@GCcp%k025#WOn?b60Vco%m;e)C0!)AjFaajO1egF5U;<2l2`~XB zzyz286JP>N;ASuvosdue)Ww@Y5r>5|$`xkQ^H++BYaVlD2g zY!^3*E-^t=<|(cNr_lBh`g25lF8ODs2+#aN5hi`imDBdIE4}TAYjE39*O0b%T*KOq z8#yrch}59;^i<_lN)1PazzwMJxwuYrt%4fYlCB-M&n7-bMCxcc+w^Jjy!4b;;S(BT(lnwls?S!Cx> zEalGGFyayDo@d3r&I$?ZmRq}T6Kg2%{$k;&mfc;X3nt5+V=mEk|2UCJ{zHiIqEES4 zOeB8`e46`4)TbBh1jD{uop?TtIWR$ZS`d@|KuN1I6!pMFQ(mfQke=T#Vz+GOO(33l zZYh?*-{}Q1 z+Fe**3ieg%-Pdx=c{292Ww+SjhCYfM_aawZIy0pJKDE=jsJ$8O32|o1GiW3J)V7Mo z%4eq1Sln;gUXbOQUM(kP6%@+Gyo3~=<&{e|`^UJuYSI$3296e8k|b@GQ{%HX?so6! zUU~)QRf_q}ZpA#;(44DM#3W^;xvmoA_2C&C@yxh5GnJkhgs*(wGi4^~!2>(c27?VES>9lwB?$WsGd^z{0`R1BOW6tH^PHX9`cpA*{E|iKj zr|>PA@8aM#d0>1l?z3HsYh))G@nhs%ihBoh?p|5~Cc9gvB1ckFE*dv+r0fxLo}n{s zx?@CN`(B%Fx9Mh^4%>94O;5DxOq=c)Zp&}e?Ka(P(_x#gwCRa9ooUk@xwiZ^-EPy( zmR^%F2tffyu@3AzUE7_7c<^+wg^l|7* zE&UH@oHukYa_+m(2sQLkXuKnaJ`7!E>9?TgS^5oVx;&U-4?xo$;G|!NUS{dnpzpTy ztI+E#{Sx$pmVN;m-?b6@Ec7l*C!h~odMES;mVN@7E==|XG;c&ymCgO3sG2dqQCuJO zF4ATwbO_ECZ=mqF)m6GWEmo( z!VH^{p(qoo6w0hniA6O2d%hEJcR~uVFFBm2`~XB zzyz286JP>NfC(@GCcp%k025#WOn?b60Vco%m;e)C0!)AjFaajO1egF5U;<2l2`~XB zzyz286JP>NfC(@GCcp%k025#WOn?b60Vco%m;e)C0!)AjFaajO1io1UgO&^9uLL}P zi&;Ns)wx!CzEw}O>X}wuZq-#*U2W9?t8TRF6;|DB)qAb{_gQ|6Rd2KEx2^tTR^4gU z!s<`bN;v#vJJ(Er2`~XBzyz286JP>NfC(@GCcp%k025#WOn?b60Vco%m;e)C0!)Aj zFaajO1egF5U;<2l2`~XBzyz286JP>NfC(@GCcp%k025#WOn?b60Vco%m;e)C0!)Aj zFaajO1pXfhWQ`To!%D6bS#H%Ai>SI5sTbEpGMlsAs;WlyS}hU@i}9MfE3vrF; zjOvlz<;5j5=srButLsMion{N7k}b^=vgm${K_oqVozZ|t8qe5BUM#Aq*0fuazU~kC z^Z){kBB^S99RO**n69aHnwNOz$lum?vGYSIE?;wxl(JVYq&be2gqVqXc9}Po{1n*v zQYJ~`?^5KNb?-gwR7*E{?r4j+G>qTlmZIQ)AY z{#u8BpTqyH!*6r=TOIzB4u6;F_pbj{NB=>Gf7Ic>?}$I`@Vgy;8lGhSLkCK>>Gbj> zK^r>&mJS;T%YdB&8w5KSb{_0}814FC*bvxI*f3ZQjNXCauo1A4unS-=7`-cLuq;?M zY!vLPdLB}XFZ$AON}3kZ~Sa=g(P(e8gKDnNeIk zdCH}eCQrJIfT4VHcu)d6)d5LcMkODHZIa$JcU+#2G30L-GVPvGYVbP=Mh#g)N~<0s z8Lx}cNk6`ThGY4&OFCa3l|TE*<>L!?oG$6Us%_fS4ZTBb;=)fK41ShBU#{GF&|g$g z|L0A!uKd9T$3LFcdEUHHr=HomxZ<%rzx;G*XZCA&xh8tHOyx0Bk9a}b^9CzT| zJ?m%uub=zJ%NjDLrM7IEG&SolwL=cc+w(Sk^4~n;!^KUHs&%KzPQNy9{8ihk#ymcH z-4^T`zICw-Mm6p35VWBZbuen9kCS#V)!Z#8vyDD>>Z3z|N=7tR^ApXd7nNRX_!LWp z4hGU0m)u@{`^Uc8G}-Dg-x&GVwm0TxjO+KtDDoG*F^7VWzVzmbM;iaNa>gI#d^GT) z?`G$3e(%E<_H=wNZ*}22SuZ2YmzH<$-am5vtKp=qN5Qi382qD1=3YZ}?;SId}aWV;@kzta&BqNiVIP(TW z>p2@6u+6@vrL}d{ZdYt~d-}(9skJJ$NUT<*fmoc-}wPdwD;>uq@>8Dlb(L# zi8rsE^)GvkQoWh&ZAZU)^OC*md7ZyEh&rH@?A^%W?%ewsK3=8I+jV%GUKjA%AMWfW znrB);vzM{1o5!(tUv;iCU{0_K=LPvij8$Od6Jpd+iQ2*2gLza@JD)K-{lH_?Y$rNU z;uCS2t_7$-GOFCZ7-^Ony9z9KtY&OE5gcUfZuD6{XmN%#h-U|5#i(lCj9tVB&hbYr zaIAE)&?;4BK*iX*PW&K|Y3<|;mdDB1rpkC%NDVP|*Hlc@qLib248`))Vy04nu?NRt zViO9fX(fu~sby@mvR?r5HDawC4inY~-P+1{BHoEXu9HDDNC~S@EKeex+vkd_tAH7Ep7>_jE?kbdqRhMI(H1wuZ(x%HJ zC6_H`Odhc>MI~wPOWND1TL$4X-kSZA_PnG$C2OOpTTWp_(q;p^P=HV8;50A5-Mcwk z?RkLnK@;P-D$d|NRDz;`LEeOsT#R1}jNfEq{2(t4E@?wYxJ>P?DoImaOQoU5oIxpd z)L1LkZtV#vHTUEq#>D7PjfM{7tWAY2W$kBzhCB6@a%^W>OtYj#TNaCl1a07hsC5*DQ0exwWodBGd}HL zs;u(dMU{wCv#@F{w<`Hv+Ux!{^4&l0oK<Wo{XuHh^v-_swbf+g3|6=w`}3hkoKnql1dGM9>#E%mVPLPx`(&%$ZMr8 zjoi)0XzmW84Ica~0gonR7lT$NLl+oKw&pU48caN{YJ-D=at|aJRV06vM{8g_TtA1j z!HDy72?9xhPrIhdr)^&9(_Rs&Kyahs;JSYs`D4wfPkX^fnb038gsxp?|M6Pm$aha9 z3CBaz4`7TR`8c}tBmZtOM6$qR$MZ9Rrf6V|G=fphfxJk>PV6`%xtrSpA=A;5^1H+0 zNUjm%B3?w(KuC;qfY)7wn7u!Z$c7d0V_?)c1`yLzJLcI*aDEKH+n!F9%>rR@223Z* zPm!UZ#Xl#%jJO7BPBv&FyF`lcs`lILv*`)W;{=^eb^HXD zwbFEM02wII>1n_v5)Sx5xDrni8#F9mrn3ZkeJ)kzfJSIB{187zX`(p9ameb2IHien zJkO$FS`It8cT>hLYmfUlNNPtR9n!jE)yqg1Va~=ZwaJb9EmP0duSCINEqa|921FT z0;gL{B-`1MmI)-n<@^4rJ%wjSvI(TCOeCD)XA|$~OSpIoOeE*ok?s&kr6y8s1`=1D zxY3!4Pz2KJsPr`rW*~{dEbUbZq=P0BT}scQ@;_DZHSMw>!BA&GdR!oV-GXF0E7F$* z(#;km80svX+6B_}X(abg=oO2=!jS0%N1(Q)Q8QTOdhFYCxF+h;@O#)` zDwTnh>#_X;X?_|>mkit9+!f}f+%3RoWP!VPp9H7VVw4IDmV@!`nJ|9i>JqC60SH zayFBR^*zoP1jbDqgNMaDfS$(5uyPR&GWC6$*DVKXThe!#>sNrU z=-%lhM5{e1uBPp2RmG(O<7SQ_0wBEsbMGD$7{(4YcTa)9=*q#s0P)DhI6R9>wJHar z_DmRe3Jhls2F23T^5_#7AHcF=oyLC4KNF8D1jex(3_7?@%VUDT_<0TndCzGvp2r(* z*ke}?hVK8ly81VPacd5SZb5-@4m&a{Ulsd(wV*yo4wvB{`kF;(JqPLs;O~^146S&j zmM{K=(`vuAvn4;Y~KrDye&8ZqexoW575Kly;R|DHB#~u z;aa;n3hq}NZrFZ107~yUwa>1i!F7!dD0YDvfed+WxsnmN7fQFk%o`ap#RP-ftTJr| zf+^Fr=s~89RB@S>3zWVLnY6M?0RV|t&E(*h8Q}J};3NDRrmwL?z^x?oVf(Z+Tquj) z?^bepPZ9dC{XO(Jd%HISZrDz6To>O#!>Ws{{E^4Up%vQ!#eDQ)0MX!ur2Y7_AJ7p; z7unBXifmNhobo0nN>%xqQqW zvdrB=bAr?KGjh5)gVO|)lMU29ZJ}o6MDLcnEprAZlA;iE_pUG(rVEJCe@&0N5&vkt zxJH!9Biv4r}tmzarX9`vf%z*z)ckN z_NC#J47g$Yk1)e+){n;NY?foMH2}=8*Y`wd@|50B_F9fapNu}tsDOEBoIu0*h9>Q?GIDM{oyFGSolK;M=|~3YXE>h{FL5H4%`390Jpz} zU1;^2J^^=!fV&|Lw?6}J*sfrP+ph|ZbFyC%05k0OzF|MU-~POm1VKg#nJqS=W8$^+ zRHpsNw$Gknj%@o#dM?)x#KIaL4j?<3H}rlUj=WWn^Fe({^D<)Wfdk#$r5A6*JY>D1 zWH-L+CNGp8dBoBE-Xp10GXD>cN{{S!y!InZ^ZQM?e(SJ*iM{|2;guqj_EvxMrG>YD zk_hZmi>3Dt!5jmaeq)O?BD<a~&mUcOT0D7r0nYs|x z1eCpbVx7u7m z^lp--s^~3cEw$!JBX$q*MB6C?zgfojh4qpKay9#p9|qbR6%uYfT!3M_gLsbU-Jwx@ zZn1xZ&^7x>PQ~6+MLpw_oV^#L`|K+*IKjq7_t}@C#SN+1O9&Eddx1PbZ}+FlzU^Ra z*!~?1@Ne?R>mfqT{#4nm7>5QfyO1>SWpsvCd=@|~StQ_Qqn#=n#Kc3yO46Q4m3@1mmKzWKDtJm;*hjDhb9$D1#1gWx(iCx;?n!w1w%(qNVmxaPslYt_i6j- zGJnl$;r~sgxF0uYZhTn4#Np$^@C4JDA<##U(161y>MFfnqu0H9{aL;Ks$PFvuOHOw zXY~4*Ucax`MffmD&pf?es@Jk!U#HiDdi_GBb6A&KB-R2GYTHJxa z28Q2)VD(LY?`nTjYojN)N^bR9`>Xw4FTSIR#{%c~dNS~QjR9|Ciw^|xta8__wbHZn zGw4~yIXz21gI--zLqk(zP;Ofll)Y}Od3A8rhJcq@#shx0Pfkzh@>=Fs`+c6E)a3VF z-P9Ox>-Nf?m)pGlfUnM&w$kVL{xi?}y_jq92HSidFSiAWV4)A)V!jTF#yIrY;0^>E zye%#6)!tyezo{YEvY{p5#TP(&V7H*Ju^Fb6+>IWYQ20aRFmTQUjl-53z$S)IGNMQ*BF8*CCLZ)|O_c3DfEze$$OA#Won z*Lgib5F!$?w>}Vb%Yk6CA4?+-5f2})3;5-%3BPZ(bUFkvo{P{T`M|&=9#YkKxFe}5 z3FsQElF@J{el@hNH$*!K^2Fjji@K8uB@s);JC#M(g`$b@qHt7I;?YoK(OOhWq^8tCvXX2tNOl_9gC1g0H6DwsRkpUMAyr`!rJK`KWBn{1?&+mYM@&^? zeT?~AJS}@4yFE~H`!wI@UY&V&`_~q8G92kL#s)sMWMN#1D4~SX9tn2{H2mUc2WS)hydoPbT2jc0 zs>k)`73Z_!ybD>LEsxoddghJGXXC2#tMaB4vMF6Cf$?n0l5uQGwWB|;yoi-|p#&zd za^RO&PdPMc6yr0B*bH?7n*sP4fS&>Q8P!uq=YUr~KKkE=QoNe6ImGv!Qf5CkF=IT> zQOF!i9Ly0fK2$VX*gt;LxaPdlajdj!JSzpQ(&`KPr%$u7!nm!!U{ik8kt)V0$IF{q z#HI!=WK)4V^(&K(Iu1=7E$+Wy)AX6+Sn-k)RvZ_!((tp3*=)6h&F(@SxPZ-GQp9Ha zXB;g*Gv_IBf3>1ECi^;+cvE*> zESiW#6koJIsq$T!U;$f;f^Eph1NhZxw-LwKd|OK=c3ecCL`PAgtCHQ_N*upo-kw(< zNhW%&6vZ67HxC^3@dnz`tShfR7VitG4N4*r>QR8b*1}c5Txl7p%Zhn5_I2^Fs^~P= zvHL9?nvyCOQ`gMcW?E^B+9hFAN5Hi*uRffXUI#-w33Vdmu49D+Er$}SHy)26M1QfM zSy47ym$MGHMRO(A<@=)DF~*uL5}Q)ASw;h4HA1UhkSmF)X7=L(zY^-ox4luJjgY_HK8hffpDMVPC)Srwb)a(fD-QuBbX{WIAf3J(qJ#!TBE(( zZe8AiPNkpDIuIej*bhmgXPnM2@FhG-N3y3!i8sfU1RXUWv-#CXTR5&JLy^`foqmj6 zOk;8^wmI2fpNw`A120Y#9DC(7Qy1!2lW}q}{r?YGcb$_Vx+o3>h% zLnud42B(2mIbdgi1`54Pbf7pm4p{t@Ldlv614+ZekVWdwyZn9mBBjoI-ltm9cBo4|_L1^90eC2S%qg&Hqplh|Z7 zg_W_XY#J+P)7cCx_98Zm&1M%fC!2%+(s2oMF@9)sw_ele3pPdNSg4Dw32g}lk!l6a zR4d2SOU{wb?n$kqbP?OJNB5_>D&4o)@P)WVtsXz z+aFx)YpicNXCbo)G&Qy~$=={%WZbxa1sAVst*`ffSenQX&r=?88Cfi{%)bf^BY2Kk zx0LfP=9iY>hm%Jn>qH|k_7z!Aim@yq=X5mBqz^@gt~~l7J0?13NGHZbBQwny3#xh2 z^L*s|P}6C^`Jw0J7-J-3rZZzi{0sTv7%7?g*H{p9Q?IdL=cZ3%!9HV>doHIzI#ilT zbXM{@CQ&l;=JP{6FCR3;&%6Vip9nnf9h3Sp^ZT)I4K5aG?pSbm{yY|hb3Luo?x1Jp z!5}j4V3(er278fyQU>(V^S9H0@fq&mwa#C3KSQ6(K6?J1G~jgLTLtb_PP})b`;|Ni zcOA6pp4C2Cw5{*X=p9<$6x@UG4w~xxGRBUfO?R<7Q4XL@_pv#+gS8V))K8<(INi$z za4)+LZMvV8<92(%b#0{|DtZz|;S0{{V%zKaP9Z zJVECWYX5vOz6tfqdiwgN4 zKVm+Q<)PPnn(=&$<_+qpr_)i zU6@5#52%G%gejmLW!)?W&^N~uOch&#vW(Xq_z7RK73b&S%SDX!;hzF{l_fAPgF+|o zC-Acix=doO3+*`CDs-fvcH_@Gga9XcnK%nD-^Sq+pcMnQlU1`yaCNeIX$lotoGRFS zb_F1|0wamK5A?U9j-s!kPC`23OcI3Ac4B>Etxk*&aGCo#Bx%BlKCP@D+6aSEl+Qca z4d@XCb0wLwS7K&EmIN`@0h|ypiEc=z-Or_qL5efcB3Kn~If{7cMO%eDm$OCqcLV=d z2?$e_LNc;?g6k!Y*7eX=7}SIXJGiXG zOX#B$d^g&D%i;8*1eSgRR#VU<{sV&46+gJ?j*Ml(dYrr7C^-@E-d2YSaEPC zO&U9obV|6ik7*~+Ecr6ok#tJ)XSWVQIsODnDdLET7v!^1_*5fmiXHU$Q4<%5Q@@An z%Lk}V=zyM4tUE6qpFDt0V2*r>cCC3B)=1W?&SR~7!&e$0E4`ZB7>o~9?*)S z(2U`q=Bi0MUSQjH4kSm>DAB-d4b3T z7~Y{%k|HcX{zq}ey7wm7cG;GP5(%ZRBeK;w&=-j&R#YV8(aRH^y-Hswv9K?UFNBF$ zx4N)1)^~X*(O0=;afK7#mBQUhLT$sRc6?@8QL(tPs$ykHk<+3ffi-))FEBlL4cMJVBm zZi#JH;uX$h*xgBAvQ|`dhaw53V&(EhY1)~{Y5Eq39PY~(WyrdGQF_rpU%trD=gMYx kv)eWRky>994)l<%_v%?&Gh@L|=Ce_&5;S_ly zjBP_0G(3l~e8N!-$l|C_a8YvDMzCr^2DrSgQJhN|^Frn!z%3-)1~?PY!`P^5kccQ7 z8T$~x_#(zSsQw9LJb5yMBpWnIvqHKnqy#BA4mW`vFWX9ITg#LBHf@Y*P_P zAh0=jC^lKe1Fy?8HdZhL4Ng3W6(gUriifduSzKdO`v`){r^no21=&+3Cd2>;YD^Ft zR*2^&JXyt~@(Ga`7%4^+D^(xa&nh0qR%KQHtWbVngT3G$K6To|%jOF=GRC`}~AT`Sy>}`DNS&wh{uunhAGGGELP1hE z)}OXKJ~Y^%fO+W@FUdgrN%C4*atAmKEuoO*#vST7@+*oKq5XVv9zQfx9J(;1DCd(o z=ZE>BvEox-%T-0lX8&=iHubtmT1u~LQ*WE3!u>QoREwEUp_dcpj1R!_owQ18k&hBQ zMf0N-{yjRRRu@7@jVV7M*^BvshwNO#S{OhO&wJ%_uCdI!P_iSPEJY{Le|)5jBiIpQ>S!otH;50Zd>RF7?Vke42z>GABQB4nf1lu}H|8Yp!`%ypc^&LPy36)$wa^boATv^2+PP^=-4FfYw1B{%7rw7D?L zrnC%_k7$y~nS<+07!T(}iNUK_7f<>=k?(quTIFF%)GZhfPs4q_`-Trs<+nP9E%HVA zXS{q4lO}1-{5^u;l}F^~c}k4Q1gF6~_33vN1u^%uYZKz6YfP~|KZ_FVkYYU!M6o`j zR!5)4pwc|!)?;SxA=-c!d6PO_bKnDrS!Oea8(p7qfKu?tGUWTO7lQmv)OZ;w!Vj2# zh7scj2~VYwapwD}E}Mr544BC5;B(loIO zNpd95M3$%Ma?#TZKp*htaR$-E%SB%>h!C8Q$(F$(i&n83TE#r=YxR})q+w>s(&86Vaa^O8i1jxm6_K6F>U={g$8v$ zIDq=81(@Ibp!xg2hbJNQWqt)$_*6k&<~0q;fi3JQWGnMyEX@e?B7v9lmN;q1Wxim- z%&!=gN?hr)yyaU(JD@Omrc&Dj_L(LwP)teoZ#3B%nS3li6Z114LmkWIK8>7LnVy)D z`35TcOsYYW!Ddj8JllcIL(^yI`ZD;iMUc?E%-?F28JW)jnK+##pLrOR%vQW~qCURJ z_6Y4g#v@4(tj9XfRqw9MO@c5~*K%8J11{rtIJKlK6-ZT?%nV_pg2 z{;}`TT(dL{?=+XwLjOZ7gPAXYO^-lHX17+EiFkbm=)~*0pcYGmHiuxrv#tI&n8Qpy zY3bhwn0m`(dE}pgWZuEvfDv8V|67pk=Q4V^DQvhPg#Cv`sS7I&|JL zw@3v1p}7uklRA8!^}`3?34F$jatd4Q(EiqsG8U>ecOqMpeSE;Y69_i(W);jX-d;-| zEJ@$~Sy+xjwGE}o4(}}oE#GY@;Riol%I6*hn$Hi;-sT;g{Ud&$#NM*vm{cm?G26Rh zNGz0B732p@|=PD*(@jwk2fba50S0Am1ZuW1x9Ka*e^PAw6L zdsy9HI{CpT?VjJEx7$yr5f}NSw(@@*lCYR3A3Cz^0C1N1S4?DhKG=G;VgFG&3)n%X zs$yLWL0tLomFNMEQRb| z43-*8b(`h~um1q;B!l(E02A|YYNG&PNp+G)YPf)3@rrm#oB0?+pEeGL>;x?jgY~}z z$d>&Ld2|KYgOtPAh^CgIVi&g7uUw-YHCmJPF(r(d0!m<72U$o&(k0Hq`mA2A=8 zr}cBh{B2-Zq1C4A5}m}cFmV^unU3;9zu@>;M|t#sqq7KS&#%i*R?6lSWKyFABW4i@ z<@kU(4$SlB2q#^W8s$dJA3+tPhK759(U>izv$KTniPW>j_?}X?NQ}QLU2EpSf-Q!o z?qOxgH<5ieMjeoB2mr@kLk-+z}y|AdMc;Jwc@eE8DG1rxO1x5^FJ zHh$D1AIsn{nY~-nnWtGU<>j9>q+Y$~mB$+1WO-ism~>5p>?vpswB=nm;mtk92S&Y% zkF^Ab?~qH1dAYF|+b}@En@y%%i!{sf9l+H1S#R#B*99|U>rYwFt0zn9A(g4`KV{t0 z37-((*LSsgXl=UpR2^28!;|F9N8VDDU9C7KQX4nfhA*?%dD$c*gE9AMjJ$(I$IdB= zS8k5Woj8KEZp;i`RFvJ2bYiYSqblJV+g^jQNn1WQBoBd(5d7(ugCEj->u_lBHXYh@ zSfIo6I^Rn=%+cXjb@*8wenN*Eb=agsrw*^xp%Ia#HyfA+Z@?*(%DX!KwM*K6qPK6; z;a(kf=y0_TZ`I+y=rE*1O*>gTd)-BZT`^In3@BNI1{~_cm zG<|6u_AS;F_V^RNWHKD|bqi5J^h<(I3ik@Wgg+#x44r6e6a_RerZQf^592Nq)0W+j z#{=CS`iw#`+GYc%Yr7}W_T(jPMWA~ioJLz(Zv#myw2b}{N`dpr@pUfJ=ruaL%=910 zpUdp%gf;C=?LEg!Uu}M;?~YGD`drc7y{qmg7aDKf(6zzemGA|swn{xcelgUyAt1(L z(uR1S)Dw$367fh_@>NvVy4|kjwN+I$Zuf@GG4X*7ZH-;Ya3r|F-y5uUZ&1e+m!e&| zJsKl=s9?Wiw>Nfp>2CHY?jh7WTYGPm+K%EVjb(q5W_LQ-FF@NqNyDu&r`-XUh3U@K zUWF!x_^MaEWlm=x)QxdRoj!ic8rR^{ps|5?_~|s9P;osyjn<;_t(#6$`8H0YwW)kP z(`hPSd>XAo<=Zlyrt)nAEp38-`aLw|b{e^=)$LoiZVh&!8|vM#+7D3**jpM@eFZn^ zeN$;4RrM8T(>gT1(rGk}j{~ik>}=8QtBw2mGC`};y?qd7_1IRD3*(@bPSIRMy9gTH z8|i#*qS2aoSMDUOnrL)qPQSMp_vpnHki{-j)|E)8?H=uS0ptFhelOUA2KAoY?gTgL zpq)SvbeifD(XUqK&(C9p?m|{5=EvKRKX8MY@=Ppml_mw3*j#G?vr03WH8_J=-TBN~ zId}XlzH1O3wgV1yD`V9+i;qS@>r33CM&AUZ!_!}a3;mX#+YI6OE?YQynwMgVUM6G z$Ha0Fe}jKgr`G=|8&nrzv!sBPARZ+hGgt{?Qc_tQpOn?(|6kUvW{k(v%xc=t^2v4~ zjb%P7tjv#3!_^od?rt;2YF3=aiDIVu?40tM=v45%r4Z~w?y7vi39dtZ1D=g|(#05O zKz&y#i}wXtTVG2QYu@BGwPhK#YsG_{-^L++S+o z>?qMh(bq0WtHoIFJ>h6DwmAU`^3l-KB!qmFON_l~Xjmg8q?l+ZVcIV1s;E$1D_}lf zqc0H`0^v|N;Op^6gAqYwYzgQIDd>~>;({+64aIyxs1aj*zDPI$?YFUBp*JA_Tidv% z^RAAT*5(SgQ|oOtgG5-81hJPbpTd0tTgiM0p~n{rN6_e_(3CP?AQlaUyOW~ei~2<| zw%O+wyEoy!K$Ao)5i6rfh;9mtv1l)X%$R!`M=UCA!5otOT@jr4Y|JNY5dujZ3#T<) z3wy+e^)?=M^d=me!%;^-lpOdap3aYsNQI-qp>^cn^43U^Mt@v_Zx|T$ci)$Y=8eT3 zNXAzuqX8)#i#A6kv5&D=sby_gl#>2PXB29&$)(MmQ7xpw<}CpsPVB4I&f_-|Gsf{f zo4K6Z1*tJ9is)&Z7z+s4%)V&uKuE(;9BB;_`|I46gkFh>t8v(ExgZUSfC`U9P<)$d+VLwUh&q{hFxleF;hE?LdGU66kxdV6j}vLT&Kcy+|3JGPob!HVIwH?ruReyu7n{&0Wo2SEZIam#;_v zSCk|7a>qk2D2nk5fU&mIdQSUyIXB%iqGy-1aSkum&gHlc`hyDDpt4(+*J@MmOYKdy zOverzeoQAbkNzF8?LEFHE@A)kt74yi1P`5OF$V10c-+9p0Zll&d4T5ucHkKWPOul} zuO{GhC-p9#oxtg1#8rat4&d~^F%RNlAH@F#0Dc{h9XP>0+;|oPe+ckD@RR~S3;225 zx_<~b!5`qM0e%{A2EI2vzzYBy@Wg>P0dB$b6mWurc%B5l8*o3K!@vn1!Se#}qkv8v zydMKja3h{)fD@GPJPVxQ_wj54o(6mj=eOO!30C0z#shZ)uEVnhIKdR2oxnc_`0{m( zy##z5a3OB7PXi~2`vP_m_y)kA;w+knGYXxfd-2$T#{m!FSqL0=3hFtz6gZui>CC$g z@gzv+)--T}bf&xroL~S?5A^@@MkvEV`DUpoF zVx4g2U;o2?~PX3c5nhk7xDiG6k-x z3Rg{6t=~}gTb@{KV!0oRLRn`bDN0qCq*5P)$D;!<^ z+JGZiT^n@!-TrF9RdZDZ6`|_o)q&;JjvA*>;c!>FY8|yzU0seESI`-%4myMWTn6Bq@GtdAUBB$|fgM8C)uFZ*3{Z=vJ5O=XOJ- z?KY&FP**-pqnSwu`M%x;Gpc*0}mbEh<-py_2miWen)IUy zya^Ui{fkfIo35YyAicN)V>7bk*McTO!<2p(I4ba0#;R0%)rgo$?XRTrM&VB+t0&X# zG~s`Dy{eqk^hwBhA=9l&kOy(}nIhSZeoNrGoW`+lpzoDxG%J`x#VYZw$o;qCRR+1M z+i-$o6@0uKz6pqM8DdEzb~VWgR*SzX-8&~@MON-Y=C!;JIqH?6fyTWRvSo-x9sZHm z1V6O+P_|K=jeMh;mN`CTMG`R#LW<%Ugse8EZIF$jo Date: Thu, 26 Oct 2023 01:33:34 +1100 Subject: [PATCH 205/366] support stdlib() jinja function (#4999) * support stdlib() jinja function * Fix typos * Fix formatting * Fix func signature * re-add compiler to jinja_context-namespace * add test for stdlib jinja * Fix tests * Need platform and arch as well. Due to the order in which cbc.yaml is parsed. * add documentation for stdlib jinja-function * minor edits after review * flesh out explanation of how `stdlib` is intended to work * reST nits * avoid "resp." abbreviation, reformulate sentence & reflow paragraph --------- Co-authored-by: Isuru Fernando Co-authored-by: jakirkham --- conda_build/jinja_context.py | 45 +++++++++---- docs/source/resources/compiler-tools.rst | 65 +++++++++++++++++++ .../_stdlib_jinja2/conda_build_config.yaml | 8 +++ .../metadata/_stdlib_jinja2/meta.yaml | 9 +++ tests/test_metadata.py | 27 ++++++++ 5 files changed, 140 insertions(+), 14 deletions(-) create mode 100644 tests/test-recipes/metadata/_stdlib_jinja2/conda_build_config.yaml create mode 100644 tests/test-recipes/metadata/_stdlib_jinja2/meta.yaml diff --git a/conda_build/jinja_context.py b/conda_build/jinja_context.py index 61219be134..9d507e43a6 100644 --- a/conda_build/jinja_context.py +++ b/conda_build/jinja_context.py @@ -494,34 +494,42 @@ def native_compiler(language, config): return compiler -def compiler(language, config, permit_undefined_jinja=False): - """Support configuration of compilers. This is somewhat platform specific. +def _target(language, config, permit_undefined_jinja=False, component="compiler"): + """Support configuration of compilers/stdlib. This is somewhat platform specific. - Native compilers never list their host - it is always implied. Generally, they are + Native compilers/stdlib never list their host - it is always implied. Generally, they are metapackages, pointing at a package that does specify the host. These in turn may be metapackages, pointing at a package where the host is the same as the target (both being the native architecture). """ - compiler = native_compiler(language, config) + if component == "compiler": + package_prefix = native_compiler(language, config) + else: + package_prefix = language + version = None if config.variant: target_platform = config.variant.get("target_platform", config.subdir) - language_compiler_key = f"{language}_compiler" - # fall back to native if language-compiler is not explicitly set in variant - compiler = config.variant.get(language_compiler_key, compiler) - version = config.variant.get(language_compiler_key + "_version") + language_key = f"{language}_{component}" + # fall back to native if language-key is not explicitly set in variant + package_prefix = config.variant.get(language_key, package_prefix) + version = config.variant.get(language_key + "_version") else: target_platform = config.subdir - # support cross compilers. A cross-compiler package will have a name such as + # support cross components. A cross package will have a name such as # gcc_target # gcc_linux-cos6-64 - compiler = "_".join([compiler, target_platform]) + package = f"{package_prefix}_{target_platform}" if version: - compiler = " ".join((compiler, version)) - compiler = ensure_valid_spec(compiler, warn=False) - return compiler + package = f"{package} {version}" + package = ensure_valid_spec(package, warn=False) + return package + + +# ensure we have compiler in namespace +compiler = partial(_target, component="compiler") def ccache(method, config, permit_undefined_jinja=False): @@ -788,7 +796,16 @@ def context_processor( skip_build_id=skip_build_id, ), compiler=partial( - compiler, config=config, permit_undefined_jinja=permit_undefined_jinja + _target, + config=config, + permit_undefined_jinja=permit_undefined_jinja, + component="compiler", + ), + stdlib=partial( + _target, + config=config, + permit_undefined_jinja=permit_undefined_jinja, + component="stdlib", ), cdt=partial(cdt, config=config, permit_undefined_jinja=permit_undefined_jinja), ccache=partial( diff --git a/docs/source/resources/compiler-tools.rst b/docs/source/resources/compiler-tools.rst index d206d1c947..d4832b5a0c 100644 --- a/docs/source/resources/compiler-tools.rst +++ b/docs/source/resources/compiler-tools.rst @@ -394,6 +394,71 @@ not available. You'd need to create a metapackage ``m2w64-gcc_win-64`` to point at the ``m2w64-gcc`` package, which does exist on the msys2 channel on `repo.anaconda.com `_. +Expressing the relation between compiler and its standard library +================================================================= + +For most languages, certainly for "c" and for "cxx", compiling any given +program *may* create a run-time dependence on symbols from the respective +standard library. For example, the standard library for C on linux is generally +``glibc``, and a core component of your operating system. Conda is not able to +change or supersede this library (it would be too risky to try to). A similar +situation exists on MacOS and on Windows. + +Compiler packages usually have two ways to deal with this dependence: + +* assume the package must be there (like ``glibc`` on linux). +* always add a run-time requirement on the respective stdlib (e.g. ``libcxx`` + on MacOS). + +However, even if we assume the package must be there, the information about the +``glibc`` version is still a highly relevant piece of information, which is +also why it is reflected in the ``__glibc`` +`virtual package `_. + +For example, newer packages may decide over time to increase the lowest version +of ``glibc`` that they support. We therefore need a way to express this +dependence in a way that conda will be able to understand, so that (in +conjunction with the ``__glibc`` virtual package) the environment resolver will +not consider those packages on machines whose ``glibc`` version is too old. + +The way to do this is to use the Jinja2 function ``{{ stdlib('c') }}``, which +matches ``{{ compiler('c') }}`` in as many ways as possible. Let's start again +with the ``conda_build_config.yaml``:: + + c_stdlib: + - sysroot # [linux] + - macosx_deployment_target # [osx] + c_stdlib_version: + - 2.17 # [linux] + - 10.13 # [osx] + +In the recipe we would then use:: + + requirements: + build: + - {{ compiler('c') }} + - {{ stdlib('c') }} + +This would then express that the resulting package requires ``sysroot ==2.17`` +(corresponds to ``glibc``) on linux and ``macosx_deployment_target ==10.13`` on +MacOS in the build environment, respectively. How this translates into a +run-time dependence can be defined in the metadata of the respective conda +(meta-)package which represents the standard library (i.e. those defined under +``c_stdlib`` above). + +In this example, ``sysroot 2.17`` would generate a run-export on +``__glibc >=2.17`` and ``macosx_deployment_target 10.13`` would similarly +generate ``__osx >=10.13``. This way, we enable packages to define their own +expectations about the standard library in a unified way, and without +implicitly depending on some global assumption about what the lower version +on a given platform must be. + +In principle, this facility would make it possible to also express the +dependence on separate stdlib implementations (like ``musl`` instead of +``glibc``), or to remove the need to assume that a C++ compiler always needs to +add a run-export on the C++ stdlib -- it could then be left up to packages +themselves whether they need ``{{ stdlib('cxx') }}`` or not. + Anaconda compilers implicitly add RPATH pointing to the conda environment ========================================================================= diff --git a/tests/test-recipes/metadata/_stdlib_jinja2/conda_build_config.yaml b/tests/test-recipes/metadata/_stdlib_jinja2/conda_build_config.yaml new file mode 100644 index 0000000000..a6ac88cd33 --- /dev/null +++ b/tests/test-recipes/metadata/_stdlib_jinja2/conda_build_config.yaml @@ -0,0 +1,8 @@ +c_stdlib: # [unix] + - sysroot # [linux] + - macosx_deployment_target # [osx] +c_stdlib_version: # [unix] + - 2.12 # [linux64] + - 2.17 # [aarch64 or ppc64le] + - 10.13 # [osx and x86_64] + - 11.0 # [osx and arm64] diff --git a/tests/test-recipes/metadata/_stdlib_jinja2/meta.yaml b/tests/test-recipes/metadata/_stdlib_jinja2/meta.yaml new file mode 100644 index 0000000000..c655aac2ca --- /dev/null +++ b/tests/test-recipes/metadata/_stdlib_jinja2/meta.yaml @@ -0,0 +1,9 @@ +package: + name: stdlib-test + version: 1.0 + +requirements: + host: + - {{ stdlib('c') }} + # - {{ stdlib('cxx') }} + # - {{ stdlib('fortran') }} diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 37319f0de4..e122b45b4b 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -223,6 +223,33 @@ def test_compiler_metadata_cross_compiler(): ) +@pytest.mark.parametrize( + "platform,arch,stdlibs", + [ + ("linux", "64", {"sysroot_linux-64 2.12.*"}), + ("linux", "aarch64", {"sysroot_linux-aarch64 2.17.*"}), + ("osx", "64", {"macosx_deployment_target_osx-64 10.13.*"}), + ("osx", "arm64", {"macosx_deployment_target_osx-arm64 11.0.*"}), + ], +) +def test_native_stdlib_metadata( + platform: str, arch: str, stdlibs: set[str], testing_config +): + testing_config.platform = platform + metadata = api.render( + os.path.join(metadata_dir, "_stdlib_jinja2"), + config=testing_config, + variants={"target_platform": f"{platform}-{arch}"}, + platform=platform, + arch=arch, + permit_unsatisfiable_variants=True, + finalize=False, + bypass_env_check=True, + python="3.11", # irrelevant + )[0][0] + assert stdlibs <= set(metadata.meta["requirements"]["host"]) + + def test_hash_build_id(testing_metadata): testing_metadata.config.variant["zlib"] = "1.2" testing_metadata.meta["requirements"]["host"] = ["zlib"] From 45749d723c6593d9ab44c7bbe2b8c7e4200b6b6b Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 1 Nov 2023 18:52:23 -0400 Subject: [PATCH 206/366] Deprecate `conda_build.utils.relative` (#5042) --- conda_build/post.py | 13 +------------ conda_build/utils.py | 7 +++++++ news/5042-deprecate-relative | 19 +++++++++++++++++++ 3 files changed, 27 insertions(+), 12 deletions(-) create mode 100644 news/5042-deprecate-relative diff --git a/conda_build/post.py b/conda_build/post.py index 05af50b24f..bef71e31af 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -624,18 +624,7 @@ def mk_relative_linux(f, prefix, rpaths=("lib",), method=None): for rpath in rpaths: if rpath != "": if not rpath.startswith("/"): - # IMHO utils.relative shouldn't exist, but I am too paranoid to remove - # it, so instead, make sure that what I think it should be replaced by - # gives the same result and assert if not. Yeah, I am a chicken. - rel_ours = normpath(utils.relative(f, rpath)) - rel_stdlib = normpath(relpath(rpath, dirname(f))) - if not rel_ours == rel_stdlib: - raise ValueError( - "utils.relative {} and relpath {} disagree for {}, {}".format( - rel_ours, rel_stdlib, f, rpath - ) - ) - rpath = "$ORIGIN/" + rel_stdlib + rpath = "$ORIGIN/" + normpath(relpath(rpath, dirname(f))) if rpath not in new: new.append(rpath) rpath = ":".join(new) diff --git a/conda_build/utils.py b/conda_build/utils.py index f8606ffe9d..4f68a7f79e 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -38,6 +38,8 @@ import libarchive +from .deprecations import deprecated + try: from json.decoder import JSONDecodeError except ImportError: @@ -789,6 +791,11 @@ def get_conda_operation_locks(locking=True, bldpkgs_dirs=None, timeout=900): return locks +@deprecated( + "3.28.0", + "4.0.0", + addendum="Use `os.path.relpath` or `pathlib.Path.relative_to` instead.", +) def relative(f, d="lib"): assert not f.startswith("/"), f assert not d.startswith("/"), d diff --git a/news/5042-deprecate-relative b/news/5042-deprecate-relative new file mode 100644 index 0000000000..2061f242b2 --- /dev/null +++ b/news/5042-deprecate-relative @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Mark `conda_build.utils.relative` as pending deprecation. Use `os.path.relpath` or `pathlib.Path.relative_to` instead. (#5042) + +### Docs + +* + +### Other + +* From 558999bdb76a6243e4244c98d5a99097aecb46a2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 1 Nov 2023 17:55:25 -0500 Subject: [PATCH 207/366] [pre-commit.ci] pre-commit autoupdate (#5030) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/pre-commit-hooks: v4.4.0 → v4.5.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.4.0...v4.5.0) - [github.com/asottile/pyupgrade: v3.13.0 → v3.15.0](https://github.com/asottile/pyupgrade/compare/v3.13.0...v3.15.0) - [github.com/psf/black: 23.9.1 → 23.10.1](https://github.com/psf/black/compare/23.9.1...23.10.1) - [github.com/astral-sh/ruff-pre-commit: v0.0.291 → v0.1.3](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.291...v0.1.3) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ae9caec20c..d021cb1b20 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ exclude: | repos: # generic verification and formatting - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: # standard end of line/end of file cleanup - id: mixed-line-ending @@ -49,13 +49,13 @@ repos: args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] exclude: ^conda_build/version.py - repo: https://github.com/asottile/pyupgrade - rev: v3.13.0 + rev: v3.15.0 hooks: # upgrade standard Python codes - id: pyupgrade args: [--py38-plus] - repo: https://github.com/psf/black - rev: 23.9.1 + rev: 23.10.1 hooks: # auto format Python codes - id: black @@ -66,7 +66,7 @@ repos: - id: blacken-docs additional_dependencies: [black] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.291 + rev: v0.1.3 hooks: - id: ruff args: [--fix] From f7dd4ed84fd996cb8d9f113a9817b878e9dd773e Mon Sep 17 00:00:00 2001 From: danpetry Date: Thu, 2 Nov 2023 13:57:57 -0500 Subject: [PATCH 208/366] Avoid clobbering variants from higher priority cbc.yaml (#5039) * Add record of previously checked config entries, to avoid clobbering by lower priority entries. * Add test for clobbering. --------- Co-authored-by: Ken Odegard Co-authored-by: Jean-Christophe Morin <38703886+JeanChristopheMorinPerso@users.noreply.github.com> --- conda_build/variants.py | 7 ++++- news/5039-dont-clobber-multiple-config | 19 ++++++++++++ tests/test_variants.py | 43 ++++++++++++++++++++++++++ 3 files changed, 68 insertions(+), 1 deletion(-) create mode 100644 news/5039-dont-clobber-multiple-config diff --git a/conda_build/variants.py b/conda_build/variants.py index d7c6841238..319ace7fea 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -679,9 +679,13 @@ def filter_combined_spec_to_used_keys(combined_spec, specs): # TODO: act here? combined_spec = explode_variants(combined_spec) + # seen_keys makes sure that a setting from a lower-priority spec doesn't clobber + # the same setting that has been redefined in a higher-priority spec. + seen_keys = set() + # The specs are checked from high to low priority order. for source, source_specs in reversed(specs.items()): for k, vs in source_specs.items(): - if k not in extend_keys: + if k not in extend_keys and k not in seen_keys: # when filtering ends up killing off all variants, we just ignore that. Generally, # this arises when a later variant config overrides, rather than selects a # subspace of earlier configs @@ -689,6 +693,7 @@ def filter_combined_spec_to_used_keys(combined_spec, specs): filter_by_key_value(combined_spec, k, vs, source_name=source) or combined_spec ) + seen_keys.add(k) return combined_spec diff --git a/news/5039-dont-clobber-multiple-config b/news/5039-dont-clobber-multiple-config new file mode 100644 index 0000000000..630868093d --- /dev/null +++ b/news/5039-dont-clobber-multiple-config @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Avoid clobbering of variants in high priority cbc.yaml entries when they aren't present in lower priority cbc.yamls. (#5039) + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test_variants.py b/tests/test_variants.py index 3e7ba621a5..819f39d793 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -16,6 +16,7 @@ from conda_build.variants import ( combine_specs, dict_of_lists_to_list_of_dicts, + filter_combined_spec_to_used_keys, get_package_variants, validate_spec, ) @@ -657,3 +658,45 @@ def test_variant_subkeys_retained(): m.final = False outputs = m.get_output_metadata_set(permit_unsatisfiable_variants=False) get_all_replacements(outputs[0][1].config.variant) + + +@pytest.mark.parametrize( + "internal_defaults, low_prio_config, high_prio_config, expected", + [ + pytest.param( + {"pkg_1": "1.0"}, + {"pkg_1": "1.1"}, + {"pkg_1": ["1.1", "1.2"], "pkg_2": ["1.1"]}, + [{"pkg_1": "1.1", "pkg_2": "1.1"}, {"pkg_1": "1.2", "pkg_2": "1.1"}], + id="basic", + ), + pytest.param( + {"pkg_1": "1.0"}, + {"pkg_1": "1.1"}, + { + "pkg_1": ["1.1", "1.2"], + "pkg_2": ["1.1", "1.2"], + "zip_keys": [["pkg_1", "pkg_2"]], + }, + [ + {"pkg_1": "1.1", "pkg_2": "1.1", "zip_keys": [["pkg_1", "pkg_2"]]}, + {"pkg_1": "1.2", "pkg_2": "1.2", "zip_keys": [["pkg_1", "pkg_2"]]}, + ], + id="zip_keys", + ), + ], +) +def test_zip_key_filtering( + internal_defaults, low_prio_config, high_prio_config, expected +): + combined_spec = { + **low_prio_config, + **high_prio_config, + } + specs = { + "internal_defaults": internal_defaults, + "low_prio_config": low_prio_config, + "high_prio_config": high_prio_config, + } + + assert filter_combined_spec_to_used_keys(combined_spec, specs=specs) == expected From 64e3332b766c78536884189c9c58efd1777dfd19 Mon Sep 17 00:00:00 2001 From: Travis Hathaway Date: Fri, 3 Nov 2023 14:11:32 +0100 Subject: [PATCH 209/366] Fix bug regarding missing `anaconda` executable (#5050) * making the check for missing anaconda executable a little broader * adding news file --- conda_build/build.py | 2 +- news/5050-missing-anaconda-client-bugfix | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 news/5050-missing-anaconda-client-bugfix diff --git a/conda_build/build.py b/conda_build/build.py index 1d66cf114f..4268aaef65 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -4099,7 +4099,7 @@ def handle_anaconda_upload(paths, config): print(no_upload_message) return - if anaconda is None: + if not anaconda: print(no_upload_message) sys.exit( "Error: cannot locate anaconda command (required for upload)\n" diff --git a/news/5050-missing-anaconda-client-bugfix b/news/5050-missing-anaconda-client-bugfix new file mode 100644 index 0000000000..99df06709d --- /dev/null +++ b/news/5050-missing-anaconda-client-bugfix @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Fixes the check for a missing anaconda-client so a useful error message is shown + +### Deprecations + +* + +### Docs + +* + +### Other + +* From 923b59aa791e6b97691037d460be479317923104 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 3 Nov 2023 11:20:42 -0400 Subject: [PATCH 210/366] Allow non-first-child table cells to wrap (#5056) --- docs/source/_static/css/custom.css | 4 ++++ docs/source/resources/package-spec.rst | 24 +++++++++--------------- 2 files changed, 13 insertions(+), 15 deletions(-) diff --git a/docs/source/_static/css/custom.css b/docs/source/_static/css/custom.css index 95805e211b..f78cbde400 100644 --- a/docs/source/_static/css/custom.css +++ b/docs/source/_static/css/custom.css @@ -81,3 +81,7 @@ h1, h2, .rst-content .toctree-wrapper p.caption, h3, h4, h5, h6, legend { /*color of nav at top when the window is narrow*/ background: #43B02A; } + +.wy-table-responsive table td:not(:first-child), .wy-table-responsive table th:not(:first-child) { + white-space: normal; +} diff --git a/docs/source/resources/package-spec.rst b/docs/source/resources/package-spec.rst index a3f0c98ac9..0bcd3f929b 100644 --- a/docs/source/resources/package-spec.rst +++ b/docs/source/resources/package-spec.rst @@ -289,7 +289,7 @@ parts: three parts, the second part must be the exact version. .. list-table:: Version Special Characters - :widths: 10, 40, 40 + :widths: 10 40 40 :header-rows: 1 * - Symbol @@ -297,9 +297,7 @@ parts: - Example * - <, >, <=, >= - - Relational operators on versions, - - which are compared using `PEP-440 `_. + - Relational operators on versions, which are compared using `PEP-440 `_. - ``<=1.0`` matches 0.9, 0.9.1, and 1.0, but not 1.0.1. * - ==, and != @@ -315,16 +313,12 @@ parts: - ``1.0|1.2`` matches version 1.0 or 1.2. * - \* - - Matches 0 or more characters in the version string. - - In terms of regular expressions, it is the same as ``r'.*'``. + - Matches 0 or more characters in the version string. In terms of regular expressions, it is the same as ``r'.*'``. - ``1.0|1.4*`` matches 1.0, 1.4 and 1.4.1b2, but not 1.2. * - , - AND - - ``>=2,<3`` matches all packages in the 2 series. - - 2.0, 2.1, and 2.9 all match, but 3.0 and 1.0 do not. + - ``>=2,<3`` matches all packages in the 2 series. 2.0, 2.1, and 2.9 all match, but 3.0 and 1.0 do not. .. hint:: ``,`` has higher precedence than \|, so >=1,<2|>3 means greater than or equal to 1 AND less than 2 or greater than 3, which matches 1, 1.3 and 3.0, but not 2.2. @@ -380,17 +374,17 @@ the following characters: <, >, \*, or \|. * - Example - Meaning - * - conda install numpy=1.11 + * - ``conda install numpy=1.11`` - The fuzzy constraint numpy=1.11 matches 1.11, 1.11.0, 1.11.1, 1.11.2, 1.11.18, and so on. - * - conda install numpy==1.11 + * - ``conda install numpy==1.11`` - The exact constraint numpy==1.11 matches 1.11, 1.11.0, 1.11.0.0, and so on. - * - conda install "numpy=1.11.1|1.11.3" + * - ``conda install "numpy=1.11.1|1.11.3"`` - The OR constraint "numpy=1.11.1|1.11.3" matches with 1.11.1 or 1.11.3. - * - conda install "numpy>1.11" + * - ``conda install "numpy>1.11"`` - Any numpy version 1.12.0a or greater. - * - conda install "numpy>=1.8,<2" + * - ``conda install "numpy>=1.8,<2"`` - The AND constraint "numpy>=1.8,<2" matches with 1.8 and 1.9 but not 2.0. From decbcf9dd6250dca251e195790b8fb81ec0c5fe9 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 3 Nov 2023 12:34:29 -0400 Subject: [PATCH 211/366] Use `conda_build.metadata.MetaData`'s `get_section` & `get_value` (#5055) * Use `MetaData`'s `get_section`, `get_value`, `name`, `version`, `build_number`, and `build_id` instead of manually fetching from internals. * Deprecate `MetaData.name(fail_ok)` and make it conditional on `MetaData.final` instead. --- conda_build/api.py | 18 +- conda_build/build.py | 46 +++-- conda_build/create_test.py | 4 +- conda_build/environ.py | 17 +- conda_build/metadata.py | 165 +++++++++--------- conda_build/post.py | 44 +++-- conda_build/render.py | 122 +++++++------ conda_build/skeletons/cran.py | 29 +-- .../_empty_host_avoids_merge/meta.yaml | 1 + .../metadata/_no_merge_build_host/meta.yaml | 1 + .../split-packages/_order/meta.yaml | 2 + .../variants/27_requirements_host/meta.yaml | 1 + 12 files changed, 226 insertions(+), 224 deletions(-) diff --git a/conda_build/api.py b/conda_build/api.py index 2d5fa7ee7d..727240aece 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -8,6 +8,7 @@ Design philosophy: put variability into config. Make each function here accept kwargs, but only use those kwargs in config. Config must change to support new features elsewhere. """ +from __future__ import annotations import sys as _sys @@ -76,8 +77,8 @@ def render( raise # remove outputs section from output objects for simplicity - if not om.path and om.meta.get("outputs"): - om.parent_outputs = om.meta["outputs"] + if not om.path and (outputs := om.get_section("outputs")): + om.parent_outputs = outputs del om.meta["outputs"] output_metas[ @@ -571,7 +572,7 @@ def debug( test=False, output_id=None, config=None, - verbose=True, + verbose: bool = True, link_source_method="auto", **kwargs, ): @@ -587,6 +588,8 @@ def debug( from conda_build.build import test as run_test from conda_build.utils import CONDA_PACKAGE_EXTENSIONS, LoggingContext, on_win + from .metadata import MetaData + is_package = False default_config = get_or_merge_config(config, **kwargs) args = {"set_build_id": False} @@ -622,15 +625,13 @@ def debug( config.channel_urls = get_channel_urls(kwargs) - metadata_tuples = [] + metadata_tuples: list[tuple[MetaData, bool, bool]] = [] best_link_source_method = "skip" if isinstance(recipe_or_package_path_or_metadata_tuples, str): if path_is_build_dir: for metadata_conda_debug in metadatas_conda_debug: best_link_source_method = "symlink" - from conda_build.metadata import MetaData - metadata = MetaData(metadata_conda_debug, config, {}) metadata_tuples.append((metadata, False, True)) else: @@ -681,10 +682,7 @@ def debug( "local", "src", "conda", - "{}-{}".format( - metadata.get_value("package/name"), - metadata.get_value("package/version"), - ), + f"{metadata.name()}-{metadata.version()}", ) link_target = os.path.dirname(metadata.meta_path) try: diff --git a/conda_build/build.py b/conda_build/build.py index 4268aaef65..134730138a 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -151,7 +151,7 @@ def log_stats(stats_dict, descriptor): ) -def create_post_scripts(m): +def create_post_scripts(m: MetaData): """ Create scripts to run after build step """ @@ -162,12 +162,9 @@ def create_post_scripts(m): is_output = "package:" not in m.get_recipe_text() scriptname = tp if is_output: - if m.meta.get("build", {}).get(tp, ""): - scriptname = m.meta["build"][tp] - else: - scriptname = m.name() + "-" + tp + scriptname = m.get_value(f"build/{tp}", f"{m.name()}-{tp}") scriptname += ext - dst_name = "." + m.name() + "-" + tp + ext + dst_name = f".{m.name()}-{tp}{ext}" src = join(m.path, scriptname) if isfile(src): dst_dir = join( @@ -1456,12 +1453,12 @@ def write_about_json(m): json.dump(d, fo, indent=2, sort_keys=True) -def write_info_json(m): +def write_info_json(m: MetaData): info_index = m.info_index() if m.pin_depends: # Wtih 'strict' depends, we will have pinned run deps during rendering if m.pin_depends == "strict": - runtime_deps = m.meta.get("requirements", {}).get("run", []) + runtime_deps = m.get_value("requirements/run", []) info_index["depends"] = runtime_deps else: runtime_deps = environ.get_pinned_deps(m, "run") @@ -1508,8 +1505,8 @@ def get_entry_point_script_names(entry_point_scripts): return scripts -def write_run_exports(m): - run_exports = m.meta.get("build", {}).get("run_exports", {}) +def write_run_exports(m: MetaData): + run_exports = m.get_value("build/run_exports", {}) if run_exports: with open(os.path.join(m.config.info_dir, "run_exports.json"), "w") as f: if not hasattr(run_exports, "keys"): @@ -1747,8 +1744,8 @@ def create_info_files_json_v1(m, info_dir, prefix, files, files_with_prefix): return checksums -def post_process_files(m, initial_prefix_files): - package_name = m.get_value("package/name") +def post_process_files(m: MetaData, initial_prefix_files): + package_name = m.name() host_prefix = m.config.host_prefix missing = [] for f in initial_prefix_files: @@ -1778,7 +1775,7 @@ def post_process_files(m, initial_prefix_files): ) post_process( package_name, - m.get_value("package/version"), + m.version(), sorted(current_prefix_files - initial_prefix_files), prefix=host_prefix, config=m.config, @@ -1839,7 +1836,7 @@ def post_process_files(m, initial_prefix_files): return new_files -def bundle_conda(output, metadata, env, stats, **kw): +def bundle_conda(output, metadata: MetaData, env, stats, **kw): log = utils.get_logger(__name__) log.info("Packaging %s", metadata.dist()) get_all_replacements(metadata.config) @@ -1911,7 +1908,7 @@ def bundle_conda(output, metadata, env, stats, **kw): env_output["TOP_PKG_NAME"] = env["PKG_NAME"] env_output["TOP_PKG_VERSION"] = env["PKG_VERSION"] env_output["PKG_VERSION"] = metadata.version() - env_output["PKG_NAME"] = metadata.get_value("package/name") + env_output["PKG_NAME"] = metadata.name() env_output["RECIPE_DIR"] = metadata.path env_output["MSYS2_PATH_TYPE"] = "inherit" env_output["CHERE_INVOKING"] = "1" @@ -2129,7 +2126,7 @@ def bundle_conda(output, metadata, env, stats, **kw): return final_outputs -def bundle_wheel(output, metadata, env, stats): +def bundle_wheel(output, metadata: MetaData, env, stats): ext = ".bat" if utils.on_win else ".sh" with TemporaryDirectory() as tmpdir, utils.tmp_chdir(metadata.config.work_dir): dest_file = os.path.join(metadata.config.work_dir, "wheel_output" + ext) @@ -2145,7 +2142,7 @@ def bundle_wheel(output, metadata, env, stats): env["TOP_PKG_NAME"] = env["PKG_NAME"] env["TOP_PKG_VERSION"] = env["PKG_VERSION"] env["PKG_VERSION"] = metadata.version() - env["PKG_NAME"] = metadata.get_value("package/name") + env["PKG_NAME"] = metadata.name() interpreter_and_args = guess_interpreter(dest_file) bundle_stats = {} @@ -2317,7 +2314,7 @@ def _write_activation_text(script_path, m): fh.write(data) -def create_build_envs(m, notest): +def create_build_envs(m: MetaData, notest): build_ms_deps = m.ms_depends("build") build_ms_deps = [utils.ensure_valid_spec(spec) for spec in build_ms_deps] host_ms_deps = m.ms_depends("host") @@ -2371,11 +2368,12 @@ def create_build_envs(m, notest): try: if not notest: utils.insert_variant_versions( - m.meta.get("requirements", {}), m.config.variant, "run" + m.get_section("requirements"), m.config.variant, "run" ) - test_run_ms_deps = utils.ensure_list( - m.get_value("test/requires", []) - ) + utils.ensure_list(m.get_value("requirements/run", [])) + test_run_ms_deps = [ + *utils.ensure_list(m.get_value("test/requires", [])), + *utils.ensure_list(m.get_value("requirements/run", [])), + ] # make sure test deps are available before taking time to create build env environ.get_install_actions( m.config.test_prefix, @@ -2424,7 +2422,7 @@ def create_build_envs(m, notest): def build( - m, + m: MetaData, stats, post=None, need_source_download=True, @@ -2516,7 +2514,7 @@ def build( ) specs = [ms.spec for ms in m.ms_depends("build")] - if any(out.get("type") == "wheel" for out in m.meta.get("outputs", [])): + if any(out.get("type") == "wheel" for out in m.get_section("outputs")): specs.extend(["pip", "wheel"]) # TODO :: This is broken. It does not respect build/script for example and also if you need git diff --git a/conda_build/create_test.py b/conda_build/create_test.py index 45cb20ebfe..35511ef503 100644 --- a/conda_build/create_test.py +++ b/conda_build/create_test.py @@ -47,7 +47,7 @@ def _get_output_script_name( src_name = dst_name if m.is_output: src_name = "no-file" - for out in m.meta.get("outputs", []): + for out in m.get_section("outputs"): if m.name() == out.get("name"): out_test_script = out.get("test", {}).get("script", "no-file") if os.path.splitext(out_test_script)[1].lower() == ext: @@ -103,7 +103,7 @@ def _create_test_files( name = "" # the way this works is that each output needs to explicitly define a test script to run # They do not automatically pick up run_test.*, but can be pointed at that explicitly. - for out in m.meta.get("outputs", []): + for out in m.get_section("outputs"): if m.name() == out.get("name"): out_test_script = out.get("test", {}).get("script", "no-file") if out_test_script.endswith(ext): diff --git a/conda_build/environ.py b/conda_build/environ.py index 5afcf93c4d..9e128ad511 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -42,6 +42,7 @@ root_dir, ) from .deprecations import deprecated +from .metadata import MetaData # these are things that we provide env vars for more explicitly. This list disables the # pass-through of variant values to env vars for these keys. @@ -388,7 +389,7 @@ def python_vars(metadata, prefix, escape_backslash): } build_or_host = "host" if metadata.is_cross else "build" deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)] - if "python" in deps or metadata.name(fail_ok=True) == "python": + if "python" in deps or metadata.name() == "python": python_bin = metadata.config.python_bin(prefix, metadata.config.host_subdir) if utils.on_win and escape_backslash: @@ -417,7 +418,7 @@ def perl_vars(metadata, prefix, escape_backslash): } build_or_host = "host" if metadata.is_cross else "build" deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)] - if "perl" in deps or metadata.name(fail_ok=True) == "perl": + if "perl" in deps or metadata.name() == "perl": perl_bin = metadata.config.perl_bin(prefix, metadata.config.host_subdir) if utils.on_win and escape_backslash: @@ -464,10 +465,7 @@ def r_vars(metadata, prefix, escape_backslash): build_or_host = "host" if metadata.is_cross else "build" deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)] - if ( - any(r_pkg in deps for r_pkg in R_PACKAGES) - or metadata.name(fail_ok=True) in R_PACKAGES - ): + if any(r_pkg in deps for r_pkg in R_PACKAGES) or metadata.name() in R_PACKAGES: r_bin = metadata.config.r_bin(prefix, metadata.config.host_subdir) # set R_USER explicitly to prevent crosstalk with existing R_LIBS_USER packages r_user = join(prefix, "Libs", "R") @@ -484,7 +482,7 @@ def r_vars(metadata, prefix, escape_backslash): return vars_ -def meta_vars(meta, skip_build_id=False): +def meta_vars(meta: MetaData, skip_build_id=False): d = {} for var_name in ensure_list(meta.get_value("build/script_env", [])): if "=" in var_name: @@ -545,12 +543,11 @@ def meta_vars(meta, skip_build_id=False): ): d.update(get_hg_build_info(hg_dir)) - # use `get_value` to prevent early exit while name is still unresolved during rendering - d["PKG_NAME"] = meta.get_value("package/name") + d["PKG_NAME"] = meta.name() d["PKG_VERSION"] = meta.version() d["PKG_BUILDNUM"] = str(meta.build_number()) if meta.final and not skip_build_id: - d["PKG_BUILD_STRING"] = str(meta.build_id()) + d["PKG_BUILD_STRING"] = meta.build_id() d["PKG_HASH"] = meta.hash_dependencies() else: d["PKG_BUILD_STRING"] = "placeholder" diff --git a/conda_build/metadata.py b/conda_build/metadata.py index d2d87912bf..7ad51c7880 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -13,10 +13,11 @@ from collections import OrderedDict from functools import lru_cache from os.path import isfile, join +from typing import Literal from bs4 import UnicodeDammit -from conda_build import environ, exceptions, utils, variants +from conda_build import exceptions, utils, variants from conda_build.config import Config, get_or_merge_config from conda_build.features import feature_list from conda_build.license_family import ensure_valid_license_family @@ -31,6 +32,7 @@ ) from .conda_interface import MatchSpec, envs_dirs, md5_file +from .deprecations import deprecated try: import yaml @@ -409,7 +411,7 @@ def ensure_matching_hashes(output_metadata): for _, m in output_metadata.values(): for _, om in output_metadata.values(): if m != om: - run_exports = om.meta.get("build", {}).get("run_exports", []) + run_exports = om.get_value("build/run_exports", []) if hasattr(run_exports, "keys"): run_exports_list = [] for export_type in utils.RUN_EXPORTS_TYPES: @@ -550,7 +552,7 @@ def parse(data, config, path=None): "provides_features": dict, "force_use_keys": list, "force_ignore_keys": list, - "merge_build_host": bool, + "merge_build_host": None, "pre-link": str, "post-link": str, "pre-unlink": str, @@ -1123,33 +1125,28 @@ def __init__(self, path, config=None, variant=None): # establish whether this recipe should squish build and host together @property - def is_cross(self): - return bool(self.get_depends_top_and_out("host")) or "host" in self.meta.get( - "requirements", {} + def is_cross(self) -> bool: + return bool( + self.get_depends_top_and_out("host") + or "host" in self.get_section("requirements") ) @property - def final(self): - return self.get_value("extra/final") + def final(self) -> bool: + return bool(self.get_value("extra/final")) @final.setter - def final(self, boolean): - extra = self.meta.get("extra", {}) - extra["final"] = boolean - self.meta["extra"] = extra + def final(self, value: bool) -> None: + self.meta.setdefault("extra", {})["final"] = bool(value) @property - def disable_pip(self): - return self.config.disable_pip or ( - "build" in self.meta and "disable_pip" in self.meta["build"] - ) + def disable_pip(self) -> bool: + return bool(self.config.disable_pip or self.get_value("build/disable_pip")) @disable_pip.setter - def disable_pip(self, value): - self.config.disable_pip = value - build = self.meta.get("build", {}) - build["disable_pip"] = value - self.meta["build"] = build + def disable_pip(self, value: bool) -> None: + self.config.disable_pip = bool(value) + self.meta.setdefault("build", {})["disable_pip"] = bool(value) def append_metadata_sections( self, sections_file_or_dict, merge, raise_on_clobber=False @@ -1175,10 +1172,9 @@ def append_metadata_sections( ) @property - def is_output(self): - self_name = self.name(fail_ok=True) - parent_name = self.meta.get("extra", {}).get("parent_recipe", {}).get("name") - return bool(parent_name) and parent_name != self_name + def is_output(self) -> str: + parent_name = self.get_value("extra/parent_recipe", {}).get("name") + return parent_name and parent_name != self.name() def parse_again( self, @@ -1245,17 +1241,16 @@ def parse_again( dependencies = _get_dependencies_from_environment(self.config.bootstrap) self.append_metadata_sections(dependencies, merge=True) - if "error_overlinking" in self.meta.get("build", {}): + if "error_overlinking" in self.get_section("build"): self.config.error_overlinking = self.meta["build"]["error_overlinking"] - if "error_overdepending" in self.meta.get("build", {}): + if "error_overdepending" in self.get_section("build"): self.config.error_overdepending = self.meta["build"]["error_overdepending"] self.validate_features() self.ensure_no_pip_requirements() def ensure_no_pip_requirements(self): - keys = "requirements/build", "requirements/run", "test/requires" - for key in keys: + for key in ("requirements/build", "requirements/run", "test/requires"): if any(hasattr(item, "keys") for item in (self.get_value(key) or [])): raise ValueError( "Dictionaries are not supported as values in requirements sections" @@ -1265,15 +1260,13 @@ def ensure_no_pip_requirements(self): def append_requirements(self): """For dynamic determination of build or run reqs, based on configuration""" - reqs = self.meta.get("requirements", {}) - run_reqs = reqs.get("run", []) + run_reqs = self.meta.setdefault("requirements", {}).setdefault("run", []) if ( - bool(self.get_value("build/osx_is_app", False)) + self.get_value("build/osx_is_app", False) and self.config.platform == "osx" + and "python.app" not in run_reqs ): - if "python.app" not in run_reqs: - run_reqs.append("python.app") - self.meta["requirements"] = reqs + run_reqs.append("python.app") def parse_until_resolved( self, allow_no_other_outputs=False, bypass_env_check=False @@ -1436,26 +1429,28 @@ def check_field(key, section): check_field(key_or_dict, section) return True - def name(self, fail_ok=False): - res = self.meta.get("package", {}).get("name", "") - if not res and not fail_ok: + @deprecated.argument("3.28.0", "4.0.0", "fail_ok") + def name(self) -> str: + name = self.get_value("package/name", "") + if not name and self.final: sys.exit("Error: package/name missing in: %r" % self.meta_path) - res = str(res) - if res != res.lower(): - sys.exit("Error: package/name must be lowercase, got: %r" % res) - check_bad_chrs(res, "package/name") - return res - - def version(self): - res = str(self.get_value("package/version")) - if res is None: + name = str(name) + if name != name.lower(): + sys.exit("Error: package/name must be lowercase, got: %r" % name) + check_bad_chrs(name, "package/name") + return name + + def version(self) -> str: + version = self.get_value("package/version", "") + if not version and self.final: sys.exit("Error: package/version missing in: %r" % self.meta_path) - check_bad_chrs(res, "package/version") - if self.final and res.startswith("."): + version = str(version) + check_bad_chrs(version, "package/version") + if self.final and version.startswith("."): raise ValueError( - "Fully-rendered version can't start with period - got %s", res + "Fully-rendered version can't start with period - got %s", version ) - return res + return version def build_number(self): number = self.get_value("build/number") @@ -1809,8 +1804,8 @@ def binary_relocation(self): expand_globs(ret, self.config.host_prefix) if isinstance(ret, list) else ret ) - def include_recipe(self): - return self.get_value("build/include_recipe", True) + def include_recipe(self) -> bool: + return bool(self.get_value("build/include_recipe", True)) def binary_has_prefix_files(self): ret = ensure_list(self.get_value("build/binary_has_prefix_files", [])) @@ -1826,8 +1821,8 @@ def binary_has_prefix_files(self): ) return expand_globs(ret, self.config.host_prefix) - def skip(self): - return self.get_value("build/skip", False) + def skip(self) -> bool: + return bool(self.get_value("build/skip", False)) def _get_contents( self, @@ -1891,8 +1886,10 @@ def _get_contents( loader = FilteredLoader(jinja2.ChoiceLoader(loaders), config=self.config) env = jinja2.Environment(loader=loader, undefined=undefined_type) + from .environ import get_dict + env.globals.update(get_selectors(self.config)) - env.globals.update(environ.get_dict(m=self, skip_build_id=skip_build_id)) + env.globals.update(get_dict(m=self, skip_build_id=skip_build_id)) env.globals.update({"CONDA_BUILD_STATE": "RENDER"}) env.globals.update( context_processor( @@ -1964,9 +1961,11 @@ def __repr__(self): @property def meta_path(self): - meta_path = self._meta_path or self.meta.get("extra", {}).get( - "parent_recipe", {} - ).get("path", "") + meta_path = ( + self._meta_path + # get the parent recipe path if this is a subpackage + or self.get_value("extra/parent_recipe", {}).get("path", "") + ) if meta_path and os.path.basename(meta_path) != self._meta_name: meta_path = os.path.join(meta_path, self._meta_name) return meta_path @@ -2014,7 +2013,7 @@ def uses_jinja(self): return len(matches) > 0 @property - def uses_vcs_in_meta(self): + def uses_vcs_in_meta(self) -> Literal["git" | "svn" | "mercurial"] | None: """returns name of vcs used if recipe contains metadata associated with version control systems. If this metadata is present, a download/copy will be forced in parse_or_try_download. """ @@ -2026,7 +2025,7 @@ def uses_vcs_in_meta(self): meta_text = UnicodeDammit(f.read()).unicode_markup for _vcs in vcs_types: matches = re.findall(rf"{_vcs.upper()}_[^\.\s\'\"]+", meta_text) - if len(matches) > 0 and _vcs != self.meta["package"]["name"]: + if len(matches) > 0 and _vcs != self.get_value("package/name"): if _vcs == "hg": _vcs = "mercurial" vcs = _vcs @@ -2034,7 +2033,7 @@ def uses_vcs_in_meta(self): return vcs @property - def uses_vcs_in_build(self): + def uses_vcs_in_build(self) -> Literal["git" | "svn" | "mercurial"] | None: # TODO :: Re-work this. Is it even useful? We can declare any vcs in our build deps. build_script = "bld.bat" if on_win else "build.sh" build_script = os.path.join(self.path, build_script) @@ -2053,7 +2052,7 @@ def uses_vcs_in_build(self): build_script, flags=re.IGNORECASE, ) - if len(matches) > 0 and vcs != self.meta["package"]["name"]: + if len(matches) > 0 and vcs != self.get_value("package/name"): if vcs == "hg": vcs = "mercurial" return vcs @@ -2155,15 +2154,14 @@ def extract_single_output_text( return output @property - def numpy_xx(self): + def numpy_xx(self) -> bool: """This is legacy syntax that we need to support for a while. numpy x.x means "pin run as build" for numpy. It was special-cased to only numpy.""" text = self.extract_requirements_text() - uses_xx = bool(numpy_xx_re.search(text)) - return uses_xx + return bool(numpy_xx_re.search(text)) @property - def uses_numpy_pin_compatible_without_xx(self): + def uses_numpy_pin_compatible_without_xx(self) -> tuple[bool, bool]: text = self.extract_requirements_text() compatible_search = numpy_compatible_re.search(text) max_pin_search = None @@ -2225,24 +2223,20 @@ def noarch(self): return self.get_value("build/noarch") @noarch.setter - def noarch(self, value): - build = self.meta.get("build", {}) - build["noarch"] = value - self.meta["build"] = build + def noarch(self, value: str | None) -> None: + self.meta.setdefault("build", {})["noarch"] = value if not self.noarch_python and not value: self.config.reset_platform() elif value: self.config.host_platform = "noarch" @property - def noarch_python(self): - return self.get_value("build/noarch_python") + def noarch_python(self) -> bool: + return bool(self.get_value("build/noarch_python")) @noarch_python.setter - def noarch_python(self, value): - build = self.meta.get("build", {}) - build["noarch_python"] = value - self.meta["build"] = build + def noarch_python(self, value: bool) -> None: + self.meta.setdefault("build", {})["noarch_python"] = value if not self.noarch and not value: self.config.reset_platform() elif value: @@ -2574,7 +2568,7 @@ def get_output_metadata_set( ) output_d["requirements"] = output_d.get("requirements", {}) output_d["requirements"]["build"] = build_reqs - m.meta["requirements"] = m.meta.get("requirements", {}) + m.meta["requirements"] = m.get_section("requirements") m.meta["requirements"]["build"] = build_reqs non_conda_packages.append((output_d, m)) else: @@ -2889,18 +2883,19 @@ def clean(self): self.config.clean() @property - def activate_build_script(self): - b = self.meta.get("build", {}) or {} - should_activate = b.get("activate_in_script") is not False - return bool(self.config.activate and should_activate) + def activate_build_script(self) -> bool: + return bool( + self.config.activate + and self.get_value("build/activate_in_script") is not False + ) @property - def build_is_host(self): + def build_is_host(self) -> bool: manual_overrides = ( - self.meta.get("build", {}).get("merge_build_host") is True + self.get_value("build/merge_build_host") is True or self.config.build_is_host ) - manually_disabled = self.meta.get("build", {}).get("merge_build_host") is False + manually_disabled = self.get_value("build/merge_build_host") is False return manual_overrides or ( self.config.subdirs_same and not manually_disabled diff --git a/conda_build/post.py b/conda_build/post.py index bef71e31af..7be43cbe21 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -61,6 +61,8 @@ machofile, ) +from .metadata import MetaData + filetypes_for_platform = { "win": (DLLfile, EXEfile), "osx": (machofile,), @@ -1583,33 +1585,27 @@ def check_overlinking_impl( return dict() -def check_overlinking(m, files, host_prefix=None): - if not host_prefix: - host_prefix = m.config.host_prefix - - overlinking_ignore_patterns = m.meta.get("build", {}).get( - "overlinking_ignore_patterns" - ) - if overlinking_ignore_patterns: - files = [ - f - for f in files - if not any([fnmatch(f, p) for p in overlinking_ignore_patterns]) - ] +def check_overlinking(m: MetaData, files, host_prefix=None): + patterns = m.get_value("build/overlinking_ignore_patterns", []) + files = [ + file + for file in files + if not any([fnmatch(file, pattern) for pattern in patterns]) + ] return check_overlinking_impl( - m.get_value("package/name"), - m.get_value("package/version"), - m.get_value("build/string"), - m.get_value("build/number"), + m.name(), + m.version(), + m.build_id(), + m.build_number(), m.config.target_subdir, m.get_value("build/ignore_run_exports"), - [req.split(" ")[0] for req in m.meta.get("requirements", {}).get("run", [])], - [req.split(" ")[0] for req in m.meta.get("requirements", {}).get("build", [])], - [req.split(" ")[0] for req in m.meta.get("requirements", {}).get("host", [])], - host_prefix, + [req.split(" ")[0] for req in m.get_value("requirements/run", [])], + [req.split(" ")[0] for req in m.get_value("requirements/build", [])], + [req.split(" ")[0] for req in m.get_value("requirements/host", [])], + host_prefix or m.config.host_prefix, m.config.build_prefix, - m.meta.get("build", {}).get("missing_dso_whitelist", []), - m.meta.get("build", {}).get("runpath_whitelist", []), + m.get_value("build/missing_dso_whitelist", []), + m.get_value("build/runpath_whitelist", []), m.config.error_overlinking, m.config.error_overdepending, m.config.verbose, @@ -1617,7 +1613,7 @@ def check_overlinking(m, files, host_prefix=None): files, m.config.bldpkgs_dir, m.config.output_folder, - list(m.config.channel_urls) + ["local"], + [*m.config.channel_urls, "local"], m.config.enable_static, m.config.variant, ) diff --git a/conda_build/render.py b/conda_build/render.py index fa428e07f6..c0f1d8be73 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -13,7 +13,15 @@ import tempfile from collections import OrderedDict, defaultdict from functools import lru_cache -from os.path import abspath, isdir, isfile +from os.path import ( + abspath, + dirname, + isabs, + isdir, + isfile, + join, + normpath, +) from pathlib import Path import yaml @@ -67,15 +75,17 @@ def bldpkg_path(m): # the default case will switch over to conda_v2 at some point if pkg_type == "conda": - path = os.path.join( + path = join( m.config.output_folder, subdir, f"{m.dist()}{CONDA_PACKAGE_EXTENSION_V1}" ) elif pkg_type == "conda_v2": - path = os.path.join( + path = join( m.config.output_folder, subdir, f"{m.dist()}{CONDA_PACKAGE_EXTENSION_V2}" ) else: - path = f"{m.type} file for {m.name()} in: {os.path.join(m.config.output_folder, subdir)}" + path = ( + f"{m.type} file for {m.name()} in: {join(m.config.output_folder, subdir)}" + ) return path @@ -118,7 +128,7 @@ def _categorize_deps(m, specs, exclude_pattern, variant): def get_env_dependencies( - m, + m: MetaData, env, variant, exclude_pattern=None, @@ -178,7 +188,7 @@ def get_env_dependencies( return ( utils.ensure_list( (specs + subpackages + pass_through_deps) - or m.meta.get("requirements", {}).get(env, []) + or m.get_value(f"requirements/{env}", []) ), actions, unsat, @@ -278,19 +288,19 @@ def find_pkg_dir_or_file_in_pkgs_dirs( @lru_cache(maxsize=None) def _read_specs_from_package(pkg_loc, pkg_dist): specs = {} - if pkg_loc and os.path.isdir(pkg_loc): - downstream_file = os.path.join(pkg_loc, "info/run_exports") - if os.path.isfile(downstream_file): + if pkg_loc and isdir(pkg_loc): + downstream_file = join(pkg_loc, "info/run_exports") + if isfile(downstream_file): with open(downstream_file) as f: specs = {"weak": [spec.rstrip() for spec in f.readlines()]} # a later attempt: record more info in the yaml file, to support "strong" run exports - elif os.path.isfile(downstream_file + ".yaml"): + elif isfile(downstream_file + ".yaml"): with open(downstream_file + ".yaml") as f: specs = yaml.safe_load(f) - elif os.path.isfile(downstream_file + ".json"): + elif isfile(downstream_file + ".json"): with open(downstream_file + ".json") as f: specs = json.load(f) - if not specs and pkg_loc and os.path.isfile(pkg_loc): + if not specs and pkg_loc and isfile(pkg_loc): # switching to json for consistency in conda-build 4 specs_yaml = utils.package_has_file(pkg_loc, "info/run_exports.yaml") specs_json = utils.package_has_file(pkg_loc, "info/run_exports.json") @@ -384,8 +394,8 @@ def execute_download_actions(m, actions, env, package_subset=None, require_files with utils.LoggingContext(): pfe.execute() for pkg_dir in pkgs_dirs: - _loc = os.path.join(pkg_dir, index.get(pkg, pkg).fn) - if os.path.isfile(_loc): + _loc = join(pkg_dir, index.get(pkg, pkg).fn) + if isfile(_loc): pkg_loc = _loc break pkg_files[pkg] = pkg_loc, pkg_dist @@ -393,11 +403,10 @@ def execute_download_actions(m, actions, env, package_subset=None, require_files return pkg_files -def get_upstream_pins(m, actions, env): +def get_upstream_pins(m: MetaData, actions, env): """Download packages from specs, then inspect each downloaded package for additional downstream dependency specs. Return these additional specs.""" - - env_specs = m.meta.get("requirements", {}).get(env, []) + env_specs = m.get_value(f"requirements/{env}", []) explicit_specs = [req.split(" ")[0] for req in env_specs] if env_specs else [] linked_packages = actions.get("LINK", []) linked_packages = [pkg for pkg in linked_packages if pkg.name in explicit_specs] @@ -427,7 +436,12 @@ def get_upstream_pins(m, actions, env): return additional_specs -def _read_upstream_pin_files(m, env, permit_unsatisfiable_variants, exclude_pattern): +def _read_upstream_pin_files( + m: MetaData, + env, + permit_unsatisfiable_variants, + exclude_pattern, +): deps, actions, unsat = get_env_dependencies( m, env, @@ -439,16 +453,16 @@ def _read_upstream_pin_files(m, env, permit_unsatisfiable_variants, exclude_patt # vc feature activation to work correctly in the host env. extra_run_specs = get_upstream_pins(m, actions, env) return ( - list(set(deps)) or m.meta.get("requirements", {}).get(env, []), + list(set(deps)) or m.get_value(f"requirements/{env}", []), unsat, extra_run_specs, ) -def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern): +def add_upstream_pins(m: MetaData, permit_unsatisfiable_variants, exclude_pattern): """Applies run_exports from any build deps to host and run sections""" # if we have host deps, they're more important than the build deps. - requirements = m.meta.get("requirements", {}) + requirements = m.get_section("requirements") build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files( m, "build", permit_unsatisfiable_variants, exclude_pattern ) @@ -464,7 +478,7 @@ def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern): if not host_reqs: matching_output = [ - out for out in m.meta.get("outputs", []) if out.get("name") == m.name() + out for out in m.get_section("outputs") if out.get("name") == m.name() ] if matching_output: requirements = utils.expand_reqs( @@ -580,7 +594,11 @@ def _simplify_to_exact_constraints(metadata): metadata.meta["requirements"] = requirements -def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False): +def finalize_metadata( + m: MetaData, + parent_metadata=None, + permit_unsatisfiable_variants=False, +): """Fully render a recipe. Fill in versions for build/host dependencies.""" if not parent_metadata: parent_metadata = m @@ -605,7 +623,7 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal ) ) - parent_recipe = m.meta.get("extra", {}).get("parent_recipe", {}) + parent_recipe = m.get_value("extra/parent_recipe", {}) # extract the topmost section where variables are defined, and put it on top of the # requirements for a particular output @@ -625,13 +643,9 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal requirements = utils.expand_reqs(output.get("requirements", {})) m.meta["requirements"] = requirements - if m.meta.get("requirements"): - utils.insert_variant_versions( - m.meta["requirements"], m.config.variant, "build" - ) - utils.insert_variant_versions( - m.meta["requirements"], m.config.variant, "host" - ) + if requirements := m.get_section("requirements"): + utils.insert_variant_versions(requirements, m.config.variant, "build") + utils.insert_variant_versions(requirements, m.config.variant, "host") m = parent_metadata.get_output_metadata(m.get_rendered_output(m.name())) build_unsat, host_unsat = add_upstream_pins( @@ -639,7 +653,7 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal ) # getting this AFTER add_upstream_pins is important, because that function adds deps # to the metadata. - requirements = m.meta.get("requirements", {}) + requirements = m.get_section("requirements") # here's where we pin run dependencies to their build time versions. This happens based # on the keys in the 'pin_run_as_build' key in the variant, which is a list of package @@ -700,34 +714,26 @@ def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=Fal utils.ensure_valid_spec(spec, warn=True) for spec in versioned_test_deps ] m.meta["test"]["requires"] = versioned_test_deps - extra = m.meta.get("extra", {}) + extra = m.get_section("extra") extra["copy_test_source_files"] = m.config.copy_test_source_files m.meta["extra"] = extra # if source/path is relative, then the output package makes no sense at all. The next # best thing is to hard-code the absolute path. This probably won't exist on any # system other than the original build machine, but at least it will work there. - if m.meta.get("source"): - if "path" in m.meta["source"]: - source_path = m.meta["source"]["path"] - os.path.expanduser(source_path) - if not os.path.isabs(source_path): - m.meta["source"]["path"] = os.path.normpath( - os.path.join(m.path, source_path) - ) - elif "git_url" in m.meta["source"] and not ( - # absolute paths are not relative paths - os.path.isabs(m.meta["source"]["git_url"]) - or - # real urls are not relative paths - ":" in m.meta["source"]["git_url"] - ): - m.meta["source"]["git_url"] = os.path.normpath( - os.path.join(m.path, m.meta["source"]["git_url"]) - ) - - if not m.meta.get("build"): - m.meta["build"] = {} + if source_path := m.get_value("source/path"): + if not isabs(source_path): + m.meta["source"]["path"] = normpath(join(m.path, source_path)) + elif ( + (git_url := m.get_value("source/git_url")) + # absolute paths are not relative paths + and not isabs(git_url) + # real urls are not relative paths + and ":" not in git_url + ): + m.meta["source"]["git_url"] = normpath(join(m.path, git_url)) + + m.meta.setdefault("build", {}) _simplify_to_exact_constraints(m) @@ -953,7 +959,7 @@ def render_recipe( t.close() need_cleanup = True elif arg.endswith(".yaml"): - recipe_dir = os.path.dirname(arg) + recipe_dir = dirname(arg) need_cleanup = False else: print("Ignoring non-recipe: %s" % arg) @@ -987,9 +993,9 @@ def render_recipe( if m.final: if not hasattr(m.config, "variants") or not m.config.variant: m.config.ignore_system_variants = True - if os.path.isfile(os.path.join(m.path, "conda_build_config.yaml")): + if isfile(join(m.path, "conda_build_config.yaml")): m.config.variant_config_files = [ - os.path.join(m.path, "conda_build_config.yaml") + join(m.path, "conda_build_config.yaml") ] m.config.variants = get_package_variants(m, variants=variants) m.config.variant = m.config.variants[0] @@ -1076,7 +1082,7 @@ def output_yaml(metadata, filename=None, suppress_outputs=False): if filename: if any(sep in filename for sep in ("\\", "/")): try: - os.makedirs(os.path.dirname(filename)) + os.makedirs(dirname(filename)) except OSError: pass with open(filename, "w") as f: diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index e3b22ef7d2..cd093e6d9e 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -3,7 +3,7 @@ """ Tools for converting Cran packages to conda recipes. """ - +from __future__ import annotations import argparse import copy @@ -28,6 +28,7 @@ realpath, relpath, ) +from typing import Literal import requests import yaml @@ -40,13 +41,15 @@ from conda.common.io import dashlist -from conda_build import metadata, source +from conda_build import source from conda_build.conda_interface import TemporaryDirectory, cc_conda_build from conda_build.config import get_or_merge_config from conda_build.license_family import allowed_license_families, guess_license_family from conda_build.utils import ensure_list, rm_rf from conda_build.variants import DEFAULT_VARIANTS, get_package_variants +from ..metadata import MetaData + SOURCE_META = """\ {archive_keys} {git_url_key} {git_url} @@ -736,7 +739,9 @@ def strip_end(string, end): return string -def package_to_inputs_dict(output_dir, output_suffix, git_tag, package, version=None): +def package_to_inputs_dict( + output_dir, output_suffix, git_tag, package: str, version=None +): """ Converts `package` (*) into a tuple of: @@ -802,9 +807,10 @@ def package_to_inputs_dict(output_dir, output_suffix, git_tag, package, version= location = existing_location = existing_recipe_dir( output_dir, output_suffix, package, version ) + m: MetaData | None if existing_location: try: - m = metadata.MetaData(existing_location) + m = MetaData(existing_location) except: # Happens when the folder exists but contains no recipe. m = None @@ -868,7 +874,7 @@ def skeletonize( r_interp="r-base", use_binaries_ver=None, use_noarch_generic=False, - use_when_no_binary="src", + use_when_no_binary: Literal["error" | "src" | "old" | "old-src"] = "src", use_rtools_win=False, config=None, variant_config_files=None, @@ -884,6 +890,9 @@ def skeletonize( ): print(f"ERROR: --use_when_no_binary={use_when_no_binary} not yet implemented") sys.exit(1) + + m: MetaData + output_dir = realpath(output_dir) config = get_or_merge_config(config, variant_config_files=variant_config_files) @@ -970,9 +979,7 @@ def skeletonize( elif is_github_url or is_tarfile: rm_rf(config.work_dir) - m = metadata.MetaData.fromdict( - {"source": {"git_url": location}}, config=config - ) + m = MetaData.fromdict({"source": {"git_url": location}}, config=config) source.git_source( m.get_section("source"), m.config.git_cache, m.config.work_dir ) @@ -1088,7 +1095,7 @@ def skeletonize( m, "extra/recipe-maintainers", add_maintainer ) if m.version() == d["conda_version"]: - build_number = int(m.get_value("build/number", 0)) + build_number = m.build_number() build_number += 1 if update_policy == "merge-incr-build-num" else 0 if add_maintainer: new_maintainer = "{indent}{add_maintainer}".format( @@ -1695,8 +1702,8 @@ def skeletonize( ) -def version_compare(recipe_dir, newest_conda_version): - m = metadata.MetaData(recipe_dir) +def version_compare(recipe_dir: str, newest_conda_version): + m = MetaData(recipe_dir) local_version = m.version() package = basename(recipe_dir) diff --git a/tests/test-recipes/metadata/_empty_host_avoids_merge/meta.yaml b/tests/test-recipes/metadata/_empty_host_avoids_merge/meta.yaml index cbe6ac859b..4bc665ad7d 100644 --- a/tests/test-recipes/metadata/_empty_host_avoids_merge/meta.yaml +++ b/tests/test-recipes/metadata/_empty_host_avoids_merge/meta.yaml @@ -1,5 +1,6 @@ package: name: pkg + version: 0.0.1 # build: # merge_build_host: False diff --git a/tests/test-recipes/metadata/_no_merge_build_host/meta.yaml b/tests/test-recipes/metadata/_no_merge_build_host/meta.yaml index d4f463886f..8aae740991 100644 --- a/tests/test-recipes/metadata/_no_merge_build_host/meta.yaml +++ b/tests/test-recipes/metadata/_no_merge_build_host/meta.yaml @@ -1,5 +1,6 @@ package: name: pkg + version: 0.0.1 build: merge_build_host: False diff --git a/tests/test-recipes/split-packages/_order/meta.yaml b/tests/test-recipes/split-packages/_order/meta.yaml index df0c0db7b2..0db9f6bbce 100644 --- a/tests/test-recipes/split-packages/_order/meta.yaml +++ b/tests/test-recipes/split-packages/_order/meta.yaml @@ -1,5 +1,7 @@ package: name: toplevel-ab + version: 0.0.1 + outputs: - name: a version: 1 diff --git a/tests/test-recipes/variants/27_requirements_host/meta.yaml b/tests/test-recipes/variants/27_requirements_host/meta.yaml index 0c4a833fa8..0ab071e56b 100644 --- a/tests/test-recipes/variants/27_requirements_host/meta.yaml +++ b/tests/test-recipes/variants/27_requirements_host/meta.yaml @@ -1,5 +1,6 @@ package: name: cfastpm + version: 0.0.1 requirements: host: From 90aee03547d0a0524adc840749805d7e2af2559f Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 3 Nov 2023 14:43:31 -0400 Subject: [PATCH 212/366] Test minimum conda supported (#5054) --- .github/workflows/tests.yml | 13 +++++++++++-- tests/test_deprecations.py | 3 ++- tests/test_metadata.py | 18 ++++++++++++------ 3 files changed, 25 insertions(+), 9 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 9778c04d23..174eb4971e 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -69,10 +69,18 @@ jobs: fail-fast: false matrix: # test all lower versions (w/ stable conda) and upper version (w/ canary conda) - python-version: ['3.8', '3.9', '3.10'] + python-version: ['3.9', '3.10'] conda-version: [release] test-type: [serial, parallel] include: + # minimum Python/conda combo + - python-version: '3.8' + conda-version: 22.11.0 + test-type: serial + - python-version: '3.8' + conda-version: 22.11.0 + test-type: parallel + # maximum Python/conda combo - python-version: '3.11' conda-version: canary test-type: serial @@ -81,6 +89,7 @@ jobs: test-type: parallel env: CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }} + CONDA_VERSION: ${{ contains('canary,release', matrix.conda-version) && 'conda' || format('conda={0}', matrix.conda-version) }} REPLAY_NAME: Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }} REPLAY_DIR: ${{ github.workspace }}/pytest-replay ALLURE_DIR: ${{ github.workspace }}/allure-results @@ -115,7 +124,7 @@ jobs: conda install -q -y -c defaults \ --file ./tests/requirements.txt \ --file ./tests/requirements-linux.txt \ - ${{ env.CONDA_CHANNEL_LABEL }}::conda + ${{ env.CONDA_CHANNEL_LABEL }}::${{ env.CONDA_VERSION }} pip install -e . - name: Show info diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py index 3df998fe1f..d0f97370fb 100644 --- a/tests/test_deprecations.py +++ b/tests/test_deprecations.py @@ -5,7 +5,8 @@ import sys import pytest -from conda.deprecations import DeprecatedError, DeprecationHandler + +from conda_build.deprecations import DeprecatedError, DeprecationHandler @pytest.fixture(scope="module") diff --git a/tests/test_metadata.py b/tests/test_metadata.py index e122b45b4b..0fd89a22c3 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -7,7 +7,9 @@ import sys import pytest +from conda import __version__ as conda_version from conda.base.context import context +from packaging.version import Version from pytest import MonkeyPatch from conda_build import api @@ -362,25 +364,20 @@ def test_yamlize_versions(): assert yml == ["1.2.3", "1.2.3.4"] -OS_ARCH = ( +OS_ARCH: tuple[str, ...] = ( "aarch64", "arm", "arm64", "armv6l", "armv7l", - "emscripten", - "freebsd", "linux", "linux32", "linux64", "osx", "ppc64", "ppc64le", - "riscv64", "s390x", "unix", - "wasi", - "wasm32", "win", "win32", "win64", @@ -390,6 +387,15 @@ def test_yamlize_versions(): "zos", ) +if Version(conda_version) >= Version("23.3"): + OS_ARCH = (*OS_ARCH, "riscv64") + +if Version(conda_version) >= Version("23.7"): + OS_ARCH = (*OS_ARCH, "freebsd") + +if Version(conda_version) >= Version("23.9"): + OS_ARCH = (*OS_ARCH, "emscripten", "wasi", "wasm32") + @pytest.mark.parametrize( ( From c71c4abee1c85f5a36733c461f224941ab3ebbd1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 6 Nov 2023 14:31:49 -0600 Subject: [PATCH 213/366] [pre-commit.ci] pre-commit autoupdate (#5057) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.3 → v0.1.4](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.3...v0.1.4) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d021cb1b20..ce528cfa47 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -66,7 +66,7 @@ repos: - id: blacken-docs additional_dependencies: [black] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.3 + rev: v0.1.4 hooks: - id: ruff args: [--fix] From d1b96db963fc0e519cc4b724f3001a28ba03d93d Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 9 Nov 2023 12:21:50 -0500 Subject: [PATCH 214/366] Refactor which_package & which_prefix (#5041) * Refactor which_package & which_prefix * Additional conda.models.dist.Dist -> conda.models.records.PrefixRecord replacements --- conda_build/conda_interface.py | 53 +++--- conda_build/inspect_pkg.py | 231 ++++++++++++++------------ conda_build/os_utils/ldd.py | 144 ++++++++-------- conda_build/os_utils/pyldd.py | 18 +- conda_build/post.py | 271 ++++++++++++++++++------------- conda_build/utils.py | 17 +- news/5041-refactor-which_package | 32 ++++ tests/cli/test_main_inspect.py | 2 +- tests/test_api_build.py | 1 - 9 files changed, 433 insertions(+), 336 deletions(-) create mode 100644 news/5041-refactor-which_package diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index dba4e4b1a7..833a4339f6 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -6,6 +6,8 @@ import os from functools import partial from importlib import import_module # noqa: F401 +from pathlib import Path +from typing import Iterable from conda import __version__ as CONDA_VERSION # noqa: F401 from conda.auxlib.packaging import ( # noqa: F401 @@ -53,7 +55,6 @@ human_bytes, input, install_actions, - is_linked, lchmod, linked, linked_data, @@ -75,7 +76,7 @@ ) from conda.models.channel import get_conda_build_local_url # noqa: F401 from conda.models.dist import Dist # noqa: F401 -from conda.models.records import PackageRecord +from conda.models.records import PackageRecord, PrefixRecord from .deprecations import deprecated @@ -125,46 +126,36 @@ class SignatureError(Exception): pass -@deprecated("3.28.0", "4.0.0") -def which_package(path): - """ - Given the path (of a (presumably) conda installed file) iterate over - the conda packages the file came from. Usually the iteration yields - only one package. - """ - from os.path import abspath, join +@deprecated( + "3.28.0", + "4.0.0", + addendum="Use `conda_build.inspect_pkg.which_package` instead.", +) +def which_package(path: str | os.PathLike | Path) -> Iterable[PrefixRecord]: + from .inspect_pkg import which_package - path = abspath(path) - prefix = which_prefix(path) - if prefix is None: - raise RuntimeError("could not determine conda prefix from: %s" % path) - for dist in linked(prefix): - meta = is_linked(prefix, dist) - if any(abspath(join(prefix, f)) == path for f in meta["files"]): - yield dist + return which_package(path, which_prefix(path)) @deprecated("3.28.0", "4.0.0") -def which_prefix(path): +def which_prefix(path: str | os.PathLike | Path) -> Path: """ Given the path (to a (presumably) conda installed file) return the environment prefix in which the file in located """ - from os.path import abspath, dirname, isdir, join + from conda.gateways.disk.test import is_conda_environment - prefix = abspath(path) - iteration = 0 - while iteration < 20: - if isdir(join(prefix, "conda-meta")): - # we found it, so let's return it - break - if prefix == dirname(prefix): + prefix = Path(path) + for _ in range(20): + if is_conda_environment(prefix): + return prefix + elif prefix == (parent := prefix.parent): # we cannot chop off any more directories, so we didn't find it - prefix = None break - prefix = dirname(prefix) - iteration += 1 - return prefix + else: + prefix = parent + + raise RuntimeError("could not determine conda prefix from: %s" % path) @deprecated("3.28.0", "4.0.0") diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index cbb60d4f25..e8b94978c1 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import json import os import re @@ -9,13 +11,19 @@ from functools import lru_cache from itertools import groupby from operator import itemgetter -from os.path import abspath, basename, dirname, exists, join, normcase +from os.path import abspath, basename, dirname, exists, join +from pathlib import Path +from typing import Iterable, Literal + +from conda.core.prefix_data import PrefixData +from conda.models.dist import Dist +from conda.models.records import PrefixRecord +from conda.resolve import MatchSpec from conda_build.conda_interface import ( display_actions, get_index, install_actions, - is_linked, linked_data, specs_from_args, ) @@ -34,32 +42,47 @@ rm_rf, ) +from .deprecations import deprecated +from .utils import on_mac, on_win + +@deprecated("3.28.0", "4.0.0") @lru_cache(maxsize=None) -def dist_files(prefix, dist): - meta = is_linked(prefix, dist) - return set(meta["files"]) if meta else set() +def dist_files(prefix: str | os.PathLike | Path, dist: Dist) -> set[str]: + if (prec := PrefixData(prefix).get(dist.name, None)) is None: + return set() + elif MatchSpec(dist).match(prec): + return set(prec["files"]) + else: + return set() -def which_package(in_prefix_path, prefix, avoid_canonical_channel_name=False): +@deprecated.argument("3.28.0", "4.0.0", "avoid_canonical_channel_name") +def which_package( + path: str | os.PathLike | Path, + prefix: str | os.PathLike | Path, +) -> Iterable[PrefixRecord]: """ - given the path of a conda installed file iterate over + Given the path (of a (presumably) conda installed file) iterate over the conda packages the file came from. Usually the iteration yields only one package. """ - norm_ipp = normcase(in_prefix_path.replace(os.sep, "/")) - from conda_build.utils import linked_data_no_multichannels + prefix = Path(prefix) + # historically, path was relative to prefix just to be safe we append to prefix + # (pathlib correctly handles this even if path is absolute) + path = prefix / path - if avoid_canonical_channel_name: - fn = linked_data_no_multichannels - else: - fn = linked_data - for dist in fn(prefix): - # dfiles = set(dist.get('files', [])) - dfiles = dist_files(prefix, dist) - # TODO :: This is completely wrong when the env is on a case-sensitive FS! - if any(norm_ipp == normcase(w) for w in dfiles): - yield dist + def samefile(path1: Path, path2: Path) -> bool: + try: + return path1.samefile(path2) + except FileNotFoundError: + # FileNotFoundError: path doesn't exist + return path1 == path2 + + for prec in PrefixData(str(prefix)).iter_records(): + for file in prec["files"]: + if samefile(prefix / file, path): + yield prec def print_object_info(info, key): @@ -106,25 +129,37 @@ def check_install( return None -def print_linkages(depmap, show_files=False): - # Print system and not found last - dist_depmap = {} - for k, v in depmap.items(): - if hasattr(k, "dist_name"): - k = k.dist_name - dist_depmap[k] = v +def print_linkages( + depmap: dict[ + PrefixRecord | Literal["not found" | "system" | "untracked"], + list[tuple[str, str, str]], + ], + show_files: bool = False, +) -> str: + # print system, not found, and untracked last + sort_order = { + # PrefixRecord: (0, PrefixRecord.name), + "system": (1, "system"), + "not found": (2, "not found"), + "untracked": (3, "untracked"), + # str: (4, str), + } - depmap = dist_depmap - k = sorted(set(depmap.keys()) - {"system", "not found"}) - all_deps = k if "not found" not in depmap.keys() else k + ["system", "not found"] output_string = "" - for dep in all_deps: - output_string += "%s:\n" % dep + for prec, links in sorted( + depmap.items(), + key=( + lambda key: (0, key[0].name) + if isinstance(key[0], PrefixRecord) + else sort_order.get(key[0], (4, key[0])) + ), + ): + output_string += "%s:\n" % prec if show_files: - for lib, path, binary in sorted(depmap[dep]): + for lib, path, binary in sorted(links): output_string += f" {lib} ({path}) from {binary}\n" else: - for lib, path in sorted(set(map(itemgetter(0, 1), depmap[dep]))): + for lib, path in sorted(set(map(itemgetter(0, 1), links))): output_string += f" {lib} ({path})\n" output_string += "\n" return output_string @@ -214,10 +249,9 @@ def test_installable(channel="defaults"): return success -def _installed(prefix): - installed = linked_data(prefix) - installed = {rec["name"]: dist for dist, rec in installed.items()} - return installed +@deprecated("3.28.0", "4.0.0") +def _installed(prefix: str | os.PathLike | Path) -> dict[str, Dist]: + return {dist.name: dist for dist in linked_data(str(prefix))} def _underlined_text(text): @@ -225,79 +259,66 @@ def _underlined_text(text): def inspect_linkages( - packages, - prefix=sys.prefix, - untracked=False, - all_packages=False, - show_files=False, - groupby="package", + packages: Iterable[str | _untracked_package], + prefix: str | os.PathLike | Path = sys.prefix, + untracked: bool = False, + all_packages: bool = False, + show_files: bool = False, + groupby: Literal["package" | "dependency"] = "package", sysroot="", ): - pkgmap = {} - - installed = _installed(prefix) - if not packages and not untracked and not all_packages: - raise ValueError( - "At least one package or --untracked or --all must be provided" - ) + sys.exit("At least one package or --untracked or --all must be provided") + elif on_win: + sys.exit("Error: conda inspect linkages is only implemented in Linux and OS X") + + prefix = Path(prefix) + installed = {prec.name: prec for prec in PrefixData(str(prefix)).iter_records()} if all_packages: packages = sorted(installed.keys()) - + packages = ensure_list(packages) if untracked: packages.append(untracked_package) - for pkg in ensure_list(packages): - if pkg == untracked_package: - dist = untracked_package - elif pkg not in installed: - sys.exit(f"Package {pkg} is not installed in {prefix}") - else: - dist = installed[pkg] - - if not sys.platform.startswith(("linux", "darwin")): - sys.exit( - "Error: conda inspect linkages is only implemented in Linux and OS X" - ) - - if dist == untracked_package: + pkgmap: dict[str | _untracked_package, dict[str, list]] = {} + for name in packages: + if name == untracked_package: obj_files = get_untracked_obj_files(prefix) + elif name not in installed: + sys.exit(f"Package {name} is not installed in {prefix}") else: - obj_files = get_package_obj_files(dist, prefix) + obj_files = get_package_obj_files(installed[name], prefix) + linkages = get_linkages(obj_files, prefix, sysroot) - depmap = defaultdict(list) - pkgmap[pkg] = depmap - depmap["not found"] = [] - depmap["system"] = [] - for binary in linkages: - for lib, path in linkages[binary]: + pkgmap[name] = depmap = defaultdict(list) + for binary, paths in linkages.items(): + for lib, path in paths: path = ( replace_path(binary, path, prefix) if path not in {"", "not found"} else path ) - if path.startswith(prefix): - in_prefix_path = re.sub("^" + prefix + "/", "", path) - deps = list(which_package(in_prefix_path, prefix)) - if len(deps) > 1: - deps_str = [str(dep) for dep in deps] + try: + relative = str(Path(path).relative_to(prefix)) + except ValueError: + # ValueError: path is not relative to prefix + relative = None + if relative: + precs = list(which_package(relative, prefix)) + if len(precs) > 1: get_logger(__name__).warn( - "Warning: %s comes from multiple " "packages: %s", + "Warning: %s comes from multiple packages: %s", path, - comma_join(deps_str), + comma_join(map(str, precs)), ) - if not deps: + elif not precs: if exists(path): - depmap["untracked"].append( - (lib, path.split(prefix + "/", 1)[-1], binary) - ) + depmap["untracked"].append((lib, relative, binary)) else: - depmap["not found"].append( - (lib, path.split(prefix + "/", 1)[-1], binary) - ) - for d in deps: - depmap[d].append((lib, path.split(prefix + "/", 1)[-1], binary)) + depmap["not found"].append((lib, relative, binary)) + for prec in precs: + depmap[prec].append((lib, relative, binary)) elif path == "not found": depmap["not found"].append((lib, path, binary)) else: @@ -330,27 +351,27 @@ def inspect_linkages( return output_string -def inspect_objects(packages, prefix=sys.prefix, groupby="package"): - installed = _installed(prefix) - - output_string = "" - for pkg in ensure_list(packages): - if pkg == untracked_package: - dist = untracked_package - elif pkg not in installed: - raise ValueError(f"Package {pkg} is not installed in {prefix}") - else: - dist = installed[pkg] - - output_string += _underlined_text(pkg) +def inspect_objects( + packages: Iterable[str], + prefix: str | os.PathLike | Path = sys.prefix, + groupby: str = "package", +): + if not on_mac: + sys.exit("Error: conda inspect objects is only implemented in OS X") - if not sys.platform.startswith("darwin"): - sys.exit("Error: conda inspect objects is only implemented in OS X") + prefix = Path(prefix) + installed = {prec.name: prec for prec in PrefixData(str(prefix)).iter_records()} - if dist == untracked_package: + output_string = "" + for name in ensure_list(packages): + if name == untracked_package: obj_files = get_untracked_obj_files(prefix) + elif name not in installed: + raise ValueError(f"Package {name} is not installed in {prefix}") else: - obj_files = get_package_obj_files(dist, prefix) + obj_files = get_package_obj_files(installed[name], prefix) + + output_string += _underlined_text(name) info = [] for f in obj_files: diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index 32eea125a2..f3597d065a 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -1,15 +1,24 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import os import re import subprocess -import sys from functools import lru_cache -from os.path import basename, join +from os.path import basename +from pathlib import Path +from typing import Iterable + +from conda.models.records import PrefixRecord -from conda_build.conda_interface import linked_data, untracked +from conda_build.conda_interface import untracked from conda_build.os_utils.macho import otool from conda_build.os_utils.pyldd import codefile_class, inspect_linkages, machofile +from ..deprecations import deprecated +from ..utils import on_linux, on_mac + LDD_RE = re.compile(r"\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)") LDD_NOT_FOUND_RE = re.compile(r"\s*(.*?)\s*=>\s*not found") @@ -38,94 +47,85 @@ def ldd(path): return res -def get_linkages(obj_files, prefix, sysroot): - return _get_linkages(tuple(obj_files), prefix, sysroot) +def get_linkages( + obj_files: Iterable[str], + prefix: str | os.PathLike | Path, + sysroot, +) -> dict[str, list[tuple[str, str]]]: + return _get_linkages(tuple(obj_files), Path(prefix), sysroot) @lru_cache(maxsize=None) -def _get_linkages(obj_files, prefix, sysroot): - res = {} - - for f in obj_files: - path = join(prefix, f) - # ldd quite often fails on foreign architectures. - ldd_failed = False +def _get_linkages( + obj_files: tuple[str], + prefix: Path, + sysroot, +) -> dict[str, list[tuple[str, str]]]: + linkages = {} + for file in obj_files: # Detect the filetype to emulate what the system-native tool does. - klass = codefile_class(path) - if klass == machofile: + path = prefix / file + if codefile_class(path) == machofile: resolve_filenames = False recurse = False else: resolve_filenames = True recurse = True - try: - if sys.platform.startswith("linux"): - res[f] = ldd(path) - elif sys.platform.startswith("darwin"): - links = otool(path) - res[f] = [(basename(line["name"]), line["name"]) for line in links] - except: - ldd_failed = True - finally: - res_py = inspect_linkages( + ldd_emulate = [ + (basename(link), link) + for link in inspect_linkages( path, resolve_filenames=resolve_filenames, sysroot=sysroot, recurse=recurse, ) - res_py = [(basename(lp), lp) for lp in res_py] - if ldd_failed: - res[f] = res_py - else: - if set(res[f]) != set(res_py): - print( - "WARNING: pyldd disagrees with ldd/otool. This will not cause any" - ) - print("WARNING: problems for this build, but please file a bug at:") - print("WARNING: https://github.com/conda/conda-build") - print(f"WARNING: and (if possible) attach file {path}") - print( - "WARNING: \nldd/otool gives:\n{}\npyldd gives:\n{}\n".format( - "\n".join(str(e) for e in res[f]), - "\n".join(str(e) for e in res_py), - ) - ) - print(f"Diffs\n{set(res[f]) - set(res_py)}") - print(f"Diffs\n{set(res_py) - set(res[f])}") - return res - + ] + try: + if on_linux: + ldd_computed = ldd(path) + elif on_mac: + ldd_computed = [ + (basename(link["name"]), link["name"]) for link in otool(path) + ] + except: + # ldd quite often fails on foreign architectures, fallback to + ldd_computed = ldd_emulate + + if set(ldd_computed) != set(ldd_emulate): + print("WARNING: pyldd disagrees with ldd/otool. This will not cause any") + print("WARNING: problems for this build, but please file a bug at:") + print("WARNING: https://github.com/conda/conda-build") + print(f"WARNING: and (if possible) attach file {path}") + print("WARNING:") + print(" ldd/otool gives:") + print(" " + "\n ".join(map(str, ldd_computed))) + print(" pyldd gives:") + print(" " + "\n ".join(map(str, ldd_emulate))) + print(f"Diffs\n{set(ldd_computed) - set(ldd_emulate)}") + print(f"Diffs\n{set(ldd_emulate) - set(ldd_computed)}") + + linkages[file] = ldd_computed + return linkages + + +@deprecated("3.28.0", "4.0.0") @lru_cache(maxsize=None) -def get_package_files(dist, prefix): - files = [] - if hasattr(dist, "get"): - files = dist.get("files") - else: - data = linked_data(prefix).get(dist) - if data: - files = data.get("files", []) - return files +def get_package_files( + prec: PrefixRecord, prefix: str | os.PathLike | Path +) -> list[str]: + return prec["files"] @lru_cache(maxsize=None) -def get_package_obj_files(dist, prefix): - res = [] - files = get_package_files(dist, prefix) - for f in files: - path = join(prefix, f) - if codefile_class(path): - res.append(f) - - return res +def get_package_obj_files( + prec: PrefixRecord, prefix: str | os.PathLike | Path +) -> list[str]: + return [file for file in prec["files"] if codefile_class(Path(prefix, file))] @lru_cache(maxsize=None) -def get_untracked_obj_files(prefix): - res = [] - files = untracked(prefix) - for f in files: - path = join(prefix, f) - if codefile_class(path): - res.append(f) - - return res +def get_untracked_obj_files(prefix: str | os.PathLike | Path) -> list[str]: + return [ + file for file in untracked(str(prefix)) if codefile_class(Path(prefix, file)) + ] diff --git a/conda_build/os_utils/pyldd.py b/conda_build/os_utils/pyldd.py index 42b89711ae..90679409c9 100644 --- a/conda_build/os_utils/pyldd.py +++ b/conda_build/os_utils/pyldd.py @@ -365,6 +365,7 @@ def do_file(file, lc_operation, off_sz, arch, results, *args): results.append(do_macho(file, 64, LITTLE_ENDIAN, lc_operation, *args)) +@deprecated("3.28.0", "4.0.0") def mach_o_change(path, arch, what, value): """ Replace a given name (what) in any LC_LOAD_DYLIB command found in @@ -1139,6 +1140,7 @@ def _inspect_linkages_this(filename, sysroot="", arch="native"): return cf.uniqueness_key(), orig_names, resolved_names +@deprecated("3.28.0", "4.0.0") def inspect_rpaths( filename, resolve_dirnames=True, use_os_varnames=True, sysroot="", arch="native" ): @@ -1170,6 +1172,7 @@ def inspect_rpaths( return cf.rpaths_nontransitive +@deprecated("3.28.0", "4.0.0") def get_runpaths(filename, arch="native"): if not os.path.exists(filename): return [] @@ -1257,16 +1260,16 @@ def otool(*args): return 1 +@deprecated("3.28.0", "4.0.0") def otool_sys(*args): import subprocess - result = subprocess.check_output("/usr/bin/otool", args).decode(encoding="ascii") - return result + return subprocess.check_output("/usr/bin/otool", args).decode(encoding="ascii") +@deprecated("3.28.0", "4.0.0") def ldd_sys(*args): - result = [] - return result + return [] def ldd(*args): @@ -1297,12 +1300,11 @@ def main(argv): elif re.match(r".*otool(?:$|\.exe|\.py)", progname): return otool(*argv[2 - idx :]) elif os.path.isfile(progname): - klass = codefile_class(progname) - if not klass: + if not (codefile := codefile_class(progname)): return 1 - elif klass == elffile: + elif codefile == elffile: return ldd(*argv[1 - idx :]) - elif klass == machofile: + elif codefile == machofile: return otool("-L", *argv[1 - idx :]) return 1 diff --git a/conda_build/post.py b/conda_build/post.py index 7be43cbe21..93c761d3d0 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -1,5 +1,8 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import json import locale import os import re @@ -27,12 +30,12 @@ sep, splitext, ) +from pathlib import Path from subprocess import CalledProcessError, call, check_output +from typing import Iterable, Literal -try: - from os import readlink -except ImportError: - readlink = False +from conda.core.prefix_data import PrefixData +from conda.models.records import PrefixRecord from conda_build import utils from conda_build.conda_interface import ( @@ -44,7 +47,6 @@ from conda_build.exceptions import OverDependingError, OverLinkingError, RunPathError from conda_build.inspect_pkg import which_package from conda_build.os_utils import external, macho -from conda_build.os_utils.ldd import get_package_files, get_package_obj_files from conda_build.os_utils.liefldd import ( get_exports_memoized, get_linkages_memoized, @@ -61,6 +63,7 @@ machofile, ) +from .deprecations import deprecated from .metadata import MetaData filetypes_for_platform = { @@ -648,33 +651,88 @@ def assert_relative_osx(path, host_prefix, build_prefix): ) +@deprecated( + "3.28.0", + "4.0.0", + addendum="Use `conda_build.post.get_dsos` and `conda_build.post.get_run_exports` instead.", +) def determine_package_nature( - pkg, prefix, subdir, bldpkgs_dir, output_folder, channel_urls -): - run_exports = None - lib_prefix = pkg.name.startswith("lib") - codefiles = get_package_obj_files(pkg, prefix) - # get_package_obj_files already filters by extension and I'm not sure we need two. - dsos = [ - f for f in codefiles for ext in (".dylib", ".so", ".dll", ".pyd") if ext in f - ] - # TODO :: Is this package not in a channel somewhere at this point? It would be good not to be special - # casing like this. Clearly we aren't able to get run_exports for starters and that's not good - if not isinstance(pkg, FakeDist): - # we don't care about the actual run_exports value, just whether or not run_exports are present. - json_file = os.path.join(prefix, "conda-meta", pkg.dist_name + ".json") - import json - - assert os.path.isfile(json_file), f"conda-meta :: Not a file: {json_file}" - json_info = json.loads(open(json_file).read()) - epd = json_info["extracted_package_dir"] - run_exports_json = os.path.join(epd, "info", "run_exports.json") - if os.path.isfile(run_exports_json): - run_exports = json.loads(open(run_exports_json).read()) - return (dsos, run_exports, lib_prefix) - - -def library_nature(pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_urls): + prec: PrefixRecord, + prefix: str | os.PathLike | Path, + subdir, + bldpkgs_dir, + output_folder, + channel_urls, +) -> tuple[set[str], tuple[str, ...], bool]: + return ( + get_dsos(prec, prefix), + get_run_exports(prec, prefix), + prec.name.startswith("lib"), + ) + + +def get_dsos(prec: PrefixRecord, prefix: str | os.PathLike | Path) -> set[str]: + return { + file + for file in prec["files"] + if codefile_class(Path(prefix, file)) + # codefile_class already filters by extension/binary type, do we need this second filter? + for ext in (".dylib", ".so", ".dll", ".pyd") + if ext in file + } + + +def get_run_exports( + prec: PrefixRecord, + prefix: str | os.PathLike | Path, +) -> tuple[str, ...]: + json_file = Path( + prefix, + "conda-meta", + f"{prec.name}-{prec.version}-{prec.build}.json", + ) + try: + json_info = json.loads(json_file.read_text()) + except (FileNotFoundError, IsADirectoryError): + # FileNotFoundError: path doesn't exist + # IsADirectoryError: path is a directory + # raise CondaBuildException(f"Not a file: {json_file}") + # is this a "fake" PrefixRecord? + # i.e. this is the package being built and hasn't been "installed" to disk? + return () + + run_exports_json = Path( + json_info["extracted_package_dir"], + "info", + "run_exports.json", + ) + try: + return tuple(json.loads(run_exports_json.read_text())) + except (FileNotFoundError, IsADirectoryError): + # FileNotFoundError: path doesn't exist + # IsADirectoryError: path is a directory + return () + + +@deprecated.argument("3.28.0", "4.0.0", "subdir") +@deprecated.argument("3.28.0", "4.0.0", "bldpkgs_dirs") +@deprecated.argument("3.28.0", "4.0.0", "output_folder") +@deprecated.argument("3.28.0", "4.0.0", "channel_urls") +def library_nature( + prec: PrefixRecord, prefix: str | os.PathLike | Path +) -> Literal[ + "interpreter (Python)" + | "interpreter (R)" + | "run-exports library" + | "dso library" + | "plugin library (Python,R)" + | "plugin library (Python)" + | "plugin library (R)" + | "interpreted library (Python,R)" + | "interpreted library (Python)" + | "interpreted library (R)" + | "non-library" +]: """ Result :: "non-library", "interpreted library (Python|R|Python,R)", @@ -685,55 +743,53 @@ def library_nature(pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_url "interpreter (Python)" .. in that order, i.e. if have both dsos and run_exports, it's a run_exports_library. """ - dsos, run_exports, _ = determine_package_nature( - pkg, prefix, subdir, bldpkgs_dirs, output_folder, channel_urls - ) - if pkg.name == "python": + if prec.name == "python": return "interpreter (Python)" - elif pkg.name == "r-base": + elif prec.name == "r-base": return "interpreter (R)" - if run_exports: + elif get_run_exports(prec, prefix): return "run-exports library" - elif len(dsos): + elif dsos := get_dsos(prec, prefix): # If all DSOs are under site-packages or R/lib/ - python_dsos = [dso for dso in dsos if "site-packages" in dso] - r_dsos = [dso for dso in dsos if "lib/R/library" in dso] - dsos_without_plugins = [dso for dso in dsos if dso not in r_dsos + python_dsos] - if len(dsos_without_plugins): + python_dsos = {dso for dso in dsos if "site-packages" in dso} + r_dsos = {dso for dso in dsos if "lib/R/library" in dso} + if dsos - python_dsos - r_dsos: return "dso library" - else: - if python_dsos and r_dsos: - return "plugin library (Python,R)" - elif python_dsos: - return "plugin library (Python)" - elif r_dsos: - return "plugin library (R)" + elif python_dsos and r_dsos: + return "plugin library (Python,R)" + elif python_dsos: + return "plugin library (Python)" + elif r_dsos: + return "plugin library (R)" else: - files = get_package_files(pkg, prefix) - python_files = [f for f in files if "site-packages" in f] - r_files = [f for f in files if "lib/R/library" in f] + python_files = {file for file in prec["files"] if "site-packages" in file} + r_files = {file for file in prec["files"] if "lib/R/library" in file} if python_files and r_files: return "interpreted library (Python,R)" elif python_files: return "interpreted library (Python)" elif r_files: return "interpreted library (R)" - return "non-library" -def dists_from_names(names, prefix): - results = [] +@deprecated( + "3.28.0", + "4.0.0", + addendum="Query `conda.core.prefix_data.PrefixData` instead.", +) +def dists_from_names(names: Iterable[str], prefix: str | os.PathLike | Path): from conda_build.utils import linked_data_no_multichannels - pkgs = linked_data_no_multichannels(prefix) - for name in names: - for pkg in pkgs: - if pkg.quad[0] == name: - results.append(pkg) - return results + names = utils.ensure_list(names) + return [prec for prec in linked_data_no_multichannels(prefix) if prec.name in names] +@deprecated( + "3.28.0", + "4.0.0", + addendum="Use `conda.models.records.PrefixRecord` instead.", +) class FakeDist: def __init__(self, name, version, build_number, build_str, channel, files): self.name = name @@ -922,9 +978,7 @@ def _map_file_to_package( if not len(owners): if any(rp == normpath(w) for w in files): owners.append(pkg_vendored_dist) - new_pkgs = list( - which_package(rp, prefix, avoid_canonical_channel_name=True) - ) + new_pkgs = list(which_package(rp, prefix)) # Cannot filter here as this means the DSO (eg libomp.dylib) will not be found in any package # [owners.append(new_pkg) for new_pkg in new_pkgs if new_pkg not in owners # and not any([fnmatch(new_pkg.name, i) for i in ignore_for_statics])] @@ -964,25 +1018,20 @@ def _map_file_to_package( return prefix_owners, contains_dsos, contains_static_libs, all_lib_exports +@deprecated( + "3.28.0", "4.0.0", addendum="Use `conda.models.records.PrefixRecord` instead." +) def _get_fake_pkg_dist(pkg_name, pkg_version, build_str, build_number, channel, files): - pkg_vendoring_name = pkg_name - pkg_vendoring_version = str(pkg_version) - pkg_vendoring_build_str = build_str - pkg_vendoring_build_number = build_number - pkg_vendoring_key = "-".join( - [pkg_vendoring_name, pkg_vendoring_version, pkg_vendoring_build_str] - ) - return ( FakeDist( - pkg_vendoring_name, - pkg_vendoring_version, - pkg_vendoring_build_number, - pkg_vendoring_build_str, + pkg_name, + str(pkg_version), + build_number, + build_str, channel, files, ), - pkg_vendoring_key, + f"{pkg_name}-{pkg_version}-{build_str}", ) @@ -1121,20 +1170,18 @@ def _lookup_in_prefix_packages( in_prefix_dso = normpath(needed_dso) n_dso_p = "Needed DSO {}".format(in_prefix_dso.replace("\\", "/")) and_also = " (and also in this package)" if in_prefix_dso in files else "" - pkgs = list( - which_package(in_prefix_dso, run_prefix, avoid_canonical_channel_name=True) - ) - in_pkgs_in_run_reqs = [pkg for pkg in pkgs if pkg.quad[0] in requirements_run] + precs = list(which_package(in_prefix_dso, run_prefix)) + precs_in_reqs = [prec for prec in precs if prec.name in requirements_run] # TODO :: metadata build/inherit_child_run_exports (for vc, mro-base-impl). - for pkg in in_pkgs_in_run_reqs: - if pkg in lib_packages: - lib_packages_used.add(pkg) + for prec in precs_in_reqs: + if prec in lib_packages: + lib_packages_used.add(prec) in_whitelist = any([fnmatch(in_prefix_dso, w) for w in whitelist]) - if len(in_pkgs_in_run_reqs) == 1: + if len(precs_in_reqs) == 1: _print_msg( errors, "{}: {} found in {}{}".format( - info_prelude, n_dso_p, in_pkgs_in_run_reqs[0], and_also + info_prelude, n_dso_p, precs_in_reqs[0], and_also ), verbose=verbose, ) @@ -1144,11 +1191,11 @@ def _lookup_in_prefix_packages( f"{info_prelude}: {n_dso_p} found in the whitelist", verbose=verbose, ) - elif len(in_pkgs_in_run_reqs) == 0 and len(pkgs) > 0: + elif len(precs_in_reqs) == 0 and len(precs) > 0: _print_msg( errors, "{}: {} found in {}{}".format( - msg_prelude, n_dso_p, [p.quad[0] for p in pkgs], and_also + msg_prelude, n_dso_p, [prec.name for prec in precs], and_also ), verbose=verbose, ) @@ -1156,15 +1203,15 @@ def _lookup_in_prefix_packages( errors, "{}: .. but {} not in reqs/run, (i.e. it is overlinking)" " (likely) or a missing dependency (less likely)".format( - msg_prelude, [p.quad[0] for p in pkgs] + msg_prelude, [prec.name for prec in precs] ), verbose=verbose, ) - elif len(in_pkgs_in_run_reqs) > 1: + elif len(precs_in_reqs) > 1: _print_msg( errors, "{}: {} found in multiple packages in run/reqs: {}{}".format( - warn_prelude, in_prefix_dso, in_pkgs_in_run_reqs, and_also + warn_prelude, in_prefix_dso, precs_in_reqs, and_also ), verbose=verbose, ) @@ -1283,11 +1330,11 @@ def _show_linking_messages( def check_overlinking_impl( - pkg_name, - pkg_version, - build_str, - build_number, - subdir, + pkg_name: str, + pkg_version: str, + build_str: str, + build_number: int, + subdir: str, ignore_run_exports, requirements_run, requirements_build, @@ -1326,30 +1373,32 @@ def check_overlinking_impl( build_prefix_substitution = "$PATH" # Used to detect overlinking (finally) requirements_run = [req.split(" ")[0] for req in requirements_run] - packages = dists_from_names(requirements_run, run_prefix) + pd = PrefixData(run_prefix) + precs = [prec for req in requirements_run if (prec := pd.get(req, None))] local_channel = ( dirname(bldpkgs_dirs).replace("\\", "/") if utils.on_win else dirname(bldpkgs_dirs)[1:] ) - pkg_vendored_dist, pkg_vendoring_key = _get_fake_pkg_dist( - pkg_name, pkg_version, build_str, build_number, local_channel, files + pkg_vendored_dist = PrefixRecord( + name=pkg_name, + version=str(pkg_version), + build=build_str, + build_number=build_number, + channel=local_channel, + files=files, ) - packages.append(pkg_vendored_dist) + pkg_vendoring_key = f"{pkg_name}-{pkg_version}-{build_str}" + precs.append(pkg_vendored_dist) ignore_list = utils.ensure_list(ignore_run_exports) if subdir.startswith("linux"): ignore_list.append("libgcc-ng") - package_nature = { - package: library_nature( - package, run_prefix, subdir, bldpkgs_dirs, output_folder, channel_urls - ) - for package in packages - } + package_nature = {prec: library_nature(prec, run_prefix) for prec in precs} lib_packages = { - package - for package in packages - if package.quad[0] not in ignore_list and [package] != "non-library" + prec + for prec, nature in package_nature.items() + if prec.name not in ignore_list and nature != "non-library" } lib_packages_used = {pkg_vendored_dist} @@ -1712,14 +1761,12 @@ def post_build(m, files, build_python, host_prefix=None, is_already_linked=False def check_symlinks(files, prefix, croot): - if readlink is False: - return # Not on Unix system msgs = [] real_build_prefix = realpath(prefix) for f in files: path = join(real_build_prefix, f) if islink(path): - link_path = readlink(path) + link_path = os.readlink(path) real_link_path = realpath(path) # symlinks to binaries outside of the same dir don't work. RPATH stuff gets confused # because ld.so follows symlinks in RPATHS diff --git a/conda_build/utils.py b/conda_build/utils.py index 4f68a7f79e..9f41400990 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import contextlib import fnmatch import hashlib @@ -70,6 +72,9 @@ from conda.api import PackageCacheData # noqa from conda.base.constants import KNOWN_SUBDIRS +from conda.core.prefix_data import PrefixData +from conda.models.dist import Dist +from conda.models.records import PrefixRecord # NOQA because it is not used in this file. from conda_build.conda_interface import rm_rf as _rm_rf # noqa @@ -1274,7 +1279,7 @@ def islist(arg, uniform=False, include_dict=True): # StopIteration: list is empty, an empty list is still uniform return True # check for explicit type match, do not allow the ambiguity of isinstance - uniform = lambda e: type(e) == etype # noqa: E721 + uniform = lambda e: type(e) == etype # noqa: E731 try: return all(uniform(e) for e in arg) @@ -2161,17 +2166,17 @@ def download_channeldata(channel_url): return data -def linked_data_no_multichannels(prefix): +def linked_data_no_multichannels( + prefix: str | os.PathLike | Path, +) -> dict[Dist, PrefixRecord]: """ Return a dictionary of the linked packages in prefix, with correct channels, hopefully. cc @kalefranz. """ - from conda.core.prefix_data import PrefixData - from conda.models.dist import Dist - + prefix = Path(prefix) return { Dist.from_string(prec.fn, channel_override=prec.channel.name): prec - for prec in PrefixData(prefix)._prefix_records.values() + for prec in PrefixData(str(prefix)).iter_records() } diff --git a/news/5041-refactor-which_package b/news/5041-refactor-which_package new file mode 100644 index 0000000000..0b060e3e51 --- /dev/null +++ b/news/5041-refactor-which_package @@ -0,0 +1,32 @@ +### Enhancements + +* Consolidate `which_package` implementations and replacing `conda.models.dist.Dist` usage in favor of `conda.models.records.PrefixRecords`. (#5041) + +### Bug fixes + +* + +### Deprecations + +* Mark `conda_build.inspect_pkg.dist_files` as pending deprecation. (#5041) +* Mark `conda_build.inspect_pkg.which_package(avoid_canonical_channel_name)` as pending deprecation. (#5041) +* Mark `conda_build.inspect_pkg._installed` as pending deprecation. (#5041) +* Mark `conda_build.os_utils.ldd.get_package_files` as pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.mach_o_change` as pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.inspect_rpath` as pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.get_runpaths` as pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.otool_sys` as pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.ldd_sys` as pending deprecation. (#5041) +* Mark `conda_build.post.determine_package_nature` as pending deprecation. Use `conda_build.post.get_dsos` and `conda_build.post.get_run_exports` instead. (#5041) +* Mark `conda_build.post.library_nature(subdir, bldpkgs_dirs, output_folder, channel_urls)` as pending deprecation. (#5041) +* Mark `conda_build.post.dist_from_names` as pending deprecation. Query `conda.core.prefix_data.PrefixData` instead. (#5041) +* Mark `conda_build.post.FakeDist` as pending deprecation. Use `conda.models.records.PrefixRecord` instead. (#5041) +* Mark `conda_build.post._get_fake_pkg_dist` as pending deprecation. Use `conda.models.records.PrefixRecord` instead. (#5041) + +### Docs + +* + +### Other + +* diff --git a/tests/cli/test_main_inspect.py b/tests/cli/test_main_inspect.py index 94ede317dc..b8931b5220 100644 --- a/tests/cli/test_main_inspect.py +++ b/tests/cli/test_main_inspect.py @@ -22,7 +22,7 @@ def test_inspect_installable(testing_workdir): def test_inspect_linkages(testing_workdir, capfd): # get a package that has known object output args = ["linkages", "python"] - if sys.platform == "win32": + if on_win: with pytest.raises(SystemExit) as exc: main_inspect.execute(args) assert "conda inspect linkages is only implemented in Linux and OS X" in exc diff --git a/tests/test_api_build.py b/tests/test_api_build.py index e0c786dcc8..c3059f066d 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -1691,7 +1691,6 @@ def test_provides_features_metadata(testing_config): # using different MACOSX_DEPLOYMENT_TARGET in parallel causes some SDK race condition # https://github.com/conda/conda-build/issues/4708 @pytest.mark.serial -@pytest.mark.flaky(reruns=5, reruns_delay=2) def test_overlinking_detection(testing_config, variants_conda_build_sysroot): testing_config.activate = True testing_config.error_overlinking = True From 785ba7d43f18fb0869a82af56f26bb468667d755 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 13 Nov 2023 19:17:28 +0100 Subject: [PATCH 215/366] [pre-commit.ci] pre-commit autoupdate (#5070) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ce528cfa47..3c4ed05246 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -55,7 +55,7 @@ repos: - id: pyupgrade args: [--py38-plus] - repo: https://github.com/psf/black - rev: 23.10.1 + rev: 23.11.0 hooks: # auto format Python codes - id: black @@ -66,7 +66,7 @@ repos: - id: blacken-docs additional_dependencies: [black] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.4 + rev: v0.1.5 hooks: - id: ruff args: [--fix] From b2b7ab3fee51a97a707d39ec905f58521ad327d7 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Fri, 17 Nov 2023 17:15:54 +0100 Subject: [PATCH 216/366] Fix testing config monkeypatch for concurrent test flakiness (#5068) * Fix testing config monkeypatch for concurrent test flakiness The default_testing_config monkeypatching fixture was added in gh-4653 but did not consider "from .config import get_or_merge_config" cases in which get_or_merge_config is already bound and thus not patched. * main_build: Construct config via get_or_merge_config Helps tests with default_testing_config monkeypatch. * Tests: Don't preset values for get_or_merge_config Otherwise default values set for testing_config before, e.g., environment variable-dependent config can be set in the tests. Example: tests/cli/test_main_build.py::test_conda_py_no_period failed since it sets CONDA_PY=36 but testing_config already had set it before. --------- Signed-off-by: Marcel Bargull Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- conda_build/cli/main_build.py | 8 ++++++-- conda_build/config.py | 10 ++++++++-- tests/cli/test_main_metapackage.py | 13 ++++--------- tests/cli/test_main_render.py | 12 ++++++------ tests/conftest.py | 15 ++++++++++----- 5 files changed, 34 insertions(+), 24 deletions(-) diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index cba6fec6ff..73da5bdec6 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -15,7 +15,11 @@ from .. import api, build, source, utils from ..conda_interface import add_parser_channels, binstar_upload, cc_conda_build -from ..config import Config, get_channel_urls, zstd_compression_level_default +from ..config import ( + get_channel_urls, + get_or_merge_config, + zstd_compression_level_default, +) from ..deprecations import deprecated from ..utils import LoggingContext from .actions import KeyValueAction @@ -514,7 +518,7 @@ def check_action(recipe, config): def execute(args): _parser, args = parse_args(args) - config = Config(**args.__dict__) + config = get_or_merge_config(None, **args.__dict__) build.check_external() # change globals in build module, see comment there as well diff --git a/conda_build/config.py b/conda_build/config.py index e1bba06518..09f3cbcb67 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -913,8 +913,9 @@ def __exit__(self, e_type, e_value, traceback): self.clean(remove_folders=False) -def get_or_merge_config(config, variant=None, **kwargs): - """Always returns a new object - never changes the config that might be passed in.""" +def _get_or_merge_config(config, variant=None, **kwargs): + # This function should only ever be called via get_or_merge_config. + # It only exists for us to monkeypatch a default config when running tests. if not config: config = Config(variant=variant) else: @@ -928,6 +929,11 @@ def get_or_merge_config(config, variant=None, **kwargs): return config +def get_or_merge_config(config, variant=None, **kwargs): + """Always returns a new object - never changes the config that might be passed in.""" + return _get_or_merge_config(config, variant=variant, **kwargs) + + def get_channel_urls(args): channel_urls = args.get("channel") or args.get("channels") or () final_channel_urls = [] diff --git a/tests/cli/test_main_metapackage.py b/tests/cli/test_main_metapackage.py index 19312ae539..44ec145264 100644 --- a/tests/cli/test_main_metapackage.py +++ b/tests/cli/test_main_metapackage.py @@ -2,7 +2,6 @@ # SPDX-License-Identifier: BSD-3-Clause import json import os -import sys from glob import glob from conda_build.cli import main_metapackage @@ -15,8 +14,7 @@ def test_metapackage(testing_config, testing_workdir): main_metapackage.execute(args) test_path = glob( os.path.join( - sys.prefix, - "conda-bld", + testing_config.croot, testing_config.host_subdir, "metapackage_test-1.0-0.tar.bz2", ) @@ -38,8 +36,7 @@ def test_metapackage_build_number(testing_config, testing_workdir): main_metapackage.execute(args) test_path = glob( os.path.join( - sys.prefix, - "conda-bld", + testing_config.croot, testing_config.host_subdir, "metapackage_test_build_number-1.0-1.tar.bz2", ) @@ -61,8 +58,7 @@ def test_metapackage_build_string(testing_config, testing_workdir): main_metapackage.execute(args) test_path = glob( os.path.join( - sys.prefix, - "conda-bld", + testing_config.croot, testing_config.host_subdir, "metapackage_test_build_string-1.0-frank*.tar.bz2", ) @@ -88,8 +84,7 @@ def test_metapackage_metadata(testing_config, testing_workdir): test_path = glob( os.path.join( - sys.prefix, - "conda-bld", + testing_config.croot, testing_config.host_subdir, "metapackage_testing_metadata-1.0-0.tar.bz2", ) diff --git a/tests/cli/test_main_render.py b/tests/cli/test_main_render.py index 7f385118cc..10ed9f803e 100644 --- a/tests/cli/test_main_render.py +++ b/tests/cli/test_main_render.py @@ -66,13 +66,14 @@ def test_render_without_channel_fails(tmp_path): ), f"Expected to get only base package name because it should not be found, but got :{required_package_string}" -def test_render_output_build_path(testing_workdir, testing_metadata, capfd, caplog): +def test_render_output_build_path( + testing_workdir, testing_config, testing_metadata, capfd, caplog +): api.output_yaml(testing_metadata, "meta.yaml") args = ["--output", testing_workdir] main_render.execute(args) test_path = os.path.join( - sys.prefix, - "conda-bld", + testing_config.croot, testing_metadata.config.host_subdir, "test_render_output_build_path-1.0-1.tar.bz2", ) @@ -82,15 +83,14 @@ def test_render_output_build_path(testing_workdir, testing_metadata, capfd, capl def test_render_output_build_path_and_file( - testing_workdir, testing_metadata, capfd, caplog + testing_workdir, testing_config, testing_metadata, capfd, caplog ): api.output_yaml(testing_metadata, "meta.yaml") rendered_filename = "out.yaml" args = ["--output", "--file", rendered_filename, testing_workdir] main_render.execute(args) test_path = os.path.join( - sys.prefix, - "conda-bld", + testing_config.croot, testing_metadata.config.host_subdir, "test_render_output_build_path_and_file-1.0-1.tar.bz2", ) diff --git a/tests/conftest.py b/tests/conftest.py index 3aca5b4bc7..9da98ee1d4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -14,6 +14,7 @@ import conda_build.config from conda_build.config import ( Config, + _get_or_merge_config, _src_cache_root_default, conda_pkg_format_default, enable_static_default, @@ -21,7 +22,6 @@ error_overlinking_default, exit_on_verify_error_default, filename_hashing_default, - get_or_merge_config, ignore_verify_codes_default, no_rewrite_stdout_env_default, noarch_python_build_age_default, @@ -81,13 +81,12 @@ def testing_config(testing_workdir): def boolify(v): return v == "true" - result = Config( + testing_config_kwargs = dict( croot=testing_workdir, anaconda_upload=False, verbose=True, activate=False, debug=False, - variant=None, test_run_post=False, # These bits ensure that default values are used instead of any # present in ~/.condarc @@ -102,6 +101,8 @@ def boolify(v): exit_on_verify_error=exit_on_verify_error_default, conda_pkg_format=conda_pkg_format_default, ) + result = Config(variant=None, **testing_config_kwargs) + result._testing_config_kwargs = testing_config_kwargs assert result.no_rewrite_stdout_env is False assert result._src_cache_root is None assert result.src_cache_root == testing_workdir @@ -121,11 +122,15 @@ def default_testing_config(testing_config, monkeypatch, request): return def get_or_merge_testing_config(config, variant=None, **kwargs): - return get_or_merge_config(config or testing_config, variant, **kwargs) + merged_kwargs = {} + if not config: + merged_kwargs.update(testing_config._testing_config_kwargs) + merged_kwargs.update(kwargs) + return _get_or_merge_config(config, variant, **merged_kwargs) monkeypatch.setattr( conda_build.config, - "get_or_merge_config", + "_get_or_merge_config", get_or_merge_testing_config, ) From 05cc56a7ea2a0fa6acbe882208ea35dad8add8f9 Mon Sep 17 00:00:00 2001 From: jaimergp Date: Fri, 17 Nov 2023 19:06:16 +0100 Subject: [PATCH 217/366] CI adjustments for conda-libmamba-solver as default (#5059) Co-authored-by: Ken Odegard --- news/5059-ci-conda-libmamba-solver | 19 +++++++++ tests/cli/test_main_build.py | 64 +++++++++++++++--------------- tests/conftest.py | 27 +++++-------- tests/test_api_build.py | 4 ++ 4 files changed, 66 insertions(+), 48 deletions(-) create mode 100644 news/5059-ci-conda-libmamba-solver diff --git a/news/5059-ci-conda-libmamba-solver b/news/5059-ci-conda-libmamba-solver new file mode 100644 index 0000000000..daf2d919bf --- /dev/null +++ b/news/5059-ci-conda-libmamba-solver @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Mark Unicode tests as incompatible with `libmamba`. (#5059) diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py index 3f91d42d8c..e1ccd90d8c 100644 --- a/tests/cli/test_main_build.py +++ b/tests/cli/test_main_build.py @@ -3,6 +3,7 @@ import os import re import sys +from pathlib import Path import pytest @@ -196,41 +197,40 @@ def test_build_multiple_recipes(testing_metadata, testing_workdir, testing_confi main_build.execute(args) -def test_build_output_folder(testing_workdir, testing_metadata, capfd): +def test_build_output_folder(testing_workdir: str, testing_metadata): api.output_yaml(testing_metadata, "meta.yaml") - with TemporaryDirectory() as tmp: - out = os.path.join(tmp, "out") - args = [ - testing_workdir, - "--no-build-id", - "--croot", - tmp, - "--no-activate", - "--no-anaconda-upload", - "--output-folder", - out, - ] - output = main_build.execute(args)[0] - assert os.path.isfile( - os.path.join( - out, testing_metadata.config.host_subdir, os.path.basename(output) - ) - ) + out = Path(testing_workdir, "out") + out.mkdir(parents=True) -def test_build_source(testing_workdir): - with TemporaryDirectory() as tmp: - args = [ - os.path.join(metadata_dir, "_pyyaml_find_header"), - "--source", - "--no-build-id", - "--croot", - tmp, - "--no-activate", - "--no-anaconda-upload", - ] - main_build.execute(args) - assert os.path.isfile(os.path.join(tmp, "work", "setup.py")) + args = [ + testing_workdir, + "--no-build-id", + "--croot", + testing_workdir, + "--no-activate", + "--no-anaconda-upload", + "--output-folder", + str(out), + ] + output = main_build.execute(args)[0] + assert ( + out / testing_metadata.config.host_subdir / os.path.basename(output) + ).is_file() + + +def test_build_source(testing_workdir: str): + args = [ + os.path.join(metadata_dir, "_pyyaml_find_header"), + "--source", + "--no-build-id", + "--croot", + testing_workdir, + "--no-activate", + "--no-anaconda-upload", + ] + main_build.execute(args) + assert Path(testing_workdir, "work", "setup.py").is_file() @pytest.mark.serial diff --git a/tests/conftest.py b/tests/conftest.py index 9da98ee1d4..9bb2c27616 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,7 @@ import pytest from conda.common.compat import on_mac, on_win +from pytest import MonkeyPatch import conda_build.config from conda_build.config import ( @@ -32,31 +33,25 @@ @pytest.fixture(scope="function") -def testing_workdir(tmpdir, request): +def testing_workdir(monkeypatch: MonkeyPatch, tmp_path: Path) -> Iterator[str]: """Create a workdir in a safe temporary folder; cd into dir above before test, cd out after :param tmpdir: py.test fixture, will be injected :param request: py.test fixture-related, will be injected (see pytest docs) """ + saved_path = Path.cwd() + monkeypatch.chdir(tmp_path) - saved_path = os.getcwd() - - tmpdir.chdir() # temporary folder for profiling output, if any - tmpdir.mkdir("prof") - - def return_to_saved_path(): - if os.path.isdir(os.path.join(saved_path, "prof")): - profdir = tmpdir.join("prof") - files = profdir.listdir("*.prof") if profdir.isdir() else [] - - for f in files: - copy_into(str(f), os.path.join(saved_path, "prof", f.basename)) - os.chdir(saved_path) + prof = tmp_path / "prof" + prof.mkdir(parents=True) - request.addfinalizer(return_to_saved_path) + yield str(tmp_path) - return str(tmpdir) + # if the original CWD has a prof folder, copy any new prof files into it + if (saved_path / "prof").is_dir() and prof.is_dir(): + for file in prof.glob("*.prof"): + copy_into(str(file), str(saved_path / "prof" / file.name)) @pytest.fixture(scope="function") diff --git a/tests/test_api_build.py b/tests/test_api_build.py index c3059f066d..7c379237a3 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -31,6 +31,7 @@ CondaError, LinkError, cc_conda_build, + context, reset_context, url_path, ) @@ -123,6 +124,9 @@ def test_recipe_builds( # ``source_setup_py_data_subdir`` reproduces the problem. if recipe.name == "source_setup_py_data_subdir": pytest.xfail("Issue related to #3754 on conda-build.") + elif recipe.name == "unicode_all_over" and context.solver == "libmamba": + pytest.xfail("Unicode package names not supported in libmamba.") + # These variables are defined solely for testing purposes, # so they can be checked within build scripts testing_config.activate = True From eb5ecc0368c0cdfd26356648402721e2d333389e Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Fri, 17 Nov 2023 21:18:37 +0100 Subject: [PATCH 218/366] Fix conda_index.index verbose DEBUG/INFO logging (#5066) Signed-off-by: Marcel Bargull Co-authored-by: Ken Odegard --- conda_build/index.py | 28 +++++++++++++++------------- news/5066-fix-conda_index-log-spam | 19 +++++++++++++++++++ 2 files changed, 34 insertions(+), 13 deletions(-) create mode 100644 news/5066-fix-conda_index-log-spam diff --git a/conda_build/index.py b/conda_build/index.py index 690673f0c9..c80fd12a31 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -312,19 +312,21 @@ def _delegated_update_index( dir_path = parent_path subdirs = [dirname] - return _update_index( - dir_path, - check_md5=check_md5, - channel_name=channel_name, - patch_generator=patch_generator, - threads=threads, - verbose=verbose, - progress=progress, - subdirs=subdirs, - warn=warn, - current_index_versions=current_index_versions, - debug=debug, - ) + log_level = logging.DEBUG if debug else logging.INFO if verbose else logging.WARNING + with utils.LoggingContext(log_level): + return _update_index( + dir_path, + check_md5=check_md5, + channel_name=channel_name, + patch_generator=patch_generator, + threads=threads, + verbose=verbose, + progress=progress, + subdirs=subdirs, + warn=warn, + current_index_versions=current_index_versions, + debug=debug, + ) # Everything below is deprecated to maintain API/feature compatibility. diff --git a/news/5066-fix-conda_index-log-spam b/news/5066-fix-conda_index-log-spam new file mode 100644 index 0000000000..aceb93cc2a --- /dev/null +++ b/news/5066-fix-conda_index-log-spam @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Fix conda_index.index verbose DEBUG/INFO message logging. (#5066) + +### Deprecations + +* + +### Docs + +* + +### Other + +* From 3f1cecbf16de931ee3ba2fe1229b23b613c27d73 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Sat, 18 Nov 2023 08:09:40 -0500 Subject: [PATCH 219/366] Replace `black` with `ruff-format` (#5052) --- .pre-commit-config.yaml | 15 +--- conda_build/_load_setup_py_data.py | 6 +- conda_build/api.py | 13 ++-- conda_build/build.py | 26 ++++--- conda_build/cli/main_build.py | 2 +- conda_build/cli/main_develop.py | 2 +- conda_build/cli/main_render.py | 2 +- conda_build/config.py | 4 +- conda_build/exceptions.py | 3 +- conda_build/index.py | 3 +- conda_build/jinja_context.py | 14 +--- conda_build/metadata.py | 5 +- conda_build/noarch_python.py | 6 +- conda_build/os_utils/liefldd.py | 56 +++++++------- conda_build/os_utils/pyldd.py | 10 +-- conda_build/plugin.py | 10 ++- conda_build/post.py | 9 ++- conda_build/skeletons/cran.py | 10 +-- conda_build/skeletons/rpm.py | 5 +- conda_build/tarcheck.py | 5 +- conda_build/utils.py | 5 +- conda_build/variants.py | 3 +- news/5052-ruff-format | 19 +++++ pyproject.toml | 7 +- tests/conftest.py | 3 +- tests/test_api_build.py | 113 ++++++++++------------------- tests/test_api_render.py | 5 +- tests/test_utils.py | 4 +- 28 files changed, 163 insertions(+), 202 deletions(-) create mode 100644 news/5052-ruff-format diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3c4ed05246..3afdd26a6d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -48,28 +48,19 @@ repos: files: \.py$ args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] exclude: ^conda_build/version.py - - repo: https://github.com/asottile/pyupgrade - rev: v3.15.0 - hooks: - # upgrade standard Python codes - - id: pyupgrade - args: [--py38-plus] - - repo: https://github.com/psf/black - rev: 23.11.0 - hooks: - # auto format Python codes - - id: black - repo: https://github.com/asottile/blacken-docs rev: 1.16.0 hooks: # auto format Python codes within docstrings - id: blacken-docs - additional_dependencies: [black] - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.1.5 hooks: + # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff args: [--fix] + # compatible replacement for black + - id: ruff-format - repo: meta # see https://pre-commit.com/#meta-hooks hooks: diff --git a/conda_build/_load_setup_py_data.py b/conda_build/_load_setup_py_data.py index 71b96dbaa7..efeb14c69d 100644 --- a/conda_build/_load_setup_py_data.py +++ b/conda_build/_load_setup_py_data.py @@ -140,18 +140,18 @@ def setup(**kw): parser.add_argument("setup_file", help="path or filename of setup.py file") parser.add_argument( "--from-recipe-dir", - help=("look for setup.py file in recipe " "dir (as opposed to work dir)"), + help="look for setup.py file in recipe dir (as opposed to work dir)", default=False, action="store_true", ) parser.add_argument( "--recipe-dir", - help=("(optional) path to recipe dir, where " "setup.py should be found"), + help="(optional) path to recipe dir, where setup.py should be found", ) parser.add_argument( "--permit-undefined-jinja", - help=("look for setup.py file in recipe " "dir (as opposed to work dir)"), + help="look for setup.py file in recipe dir (as opposed to work dir)", default=False, action="store_true", ) diff --git a/conda_build/api.py b/conda_build/api.py index 727240aece..e310e11b6b 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -415,7 +415,7 @@ def convert( ) elif package_file.endswith(".whl"): raise RuntimeError( - "Conversion from wheel packages is not " "implemented yet, stay tuned." + "Conversion from wheel packages is not implemented yet, stay tuned." ) else: raise RuntimeError("cannot convert: %s" % package_file) @@ -656,17 +656,18 @@ def debug( ] if len(matched_outputs) > 1: raise ValueError( - "Specified --output-id matches more than one output ({}). Please refine your output id so that only " - "a single output is found.".format(matched_outputs) + f"Specified --output-id matches more than one output ({matched_outputs}). " + "Please refine your output id so that only a single output is found." ) elif not matched_outputs: raise ValueError( - f"Specified --output-id did not match any outputs. Available outputs are: {outputs} Please check it and try again" + f"Specified --output-id did not match any outputs. Available outputs are: {outputs} " + "Please check it and try again" ) if len(matched_outputs) > 1 and not path_is_build_dir: raise ValueError( - "More than one output found for this recipe ({}). Please use the --output-id argument to filter down " - "to a single output.".format(outputs) + f"More than one output found for this recipe ({outputs}). " + "Please use the --output-id argument to filter down to a single output." ) else: matched_outputs = outputs diff --git a/conda_build/build.py b/conda_build/build.py index 134730138a..47600ffff4 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -498,7 +498,12 @@ def regex_files_py( match_records[file] = {"type": type, "submatches": []} # else: # if match_records[file]['absolute_offset'] != absolute_offset: - # print("Dropping match.pos() of {}, neq {}".format(absolute_offset, match_records[file]['absolute_offset'])) + # print( + # "Dropping match.pos() of {}, neq {}".format( + # absolute_offset, + # match_records[file]['absolute_offset'], + # ) + # ) g_index = len(match.groups()) if g_index == 0: # Complete match. @@ -636,8 +641,9 @@ def have_regex_files( return match_records import copy - match_records_rg, match_records_re = copy.deepcopy(match_records), copy.deepcopy( - match_records + match_records_rg, match_records_re = ( + copy.deepcopy(match_records), + copy.deepcopy(match_records), ) if not isinstance(regex_re, (bytes, bytearray)): regex_re = regex_re.encode("utf-8") @@ -2254,7 +2260,9 @@ def _write_sh_activation_text(file_handle, m): if value: if not done_necessary_env: # file_handle.write( - # 'export CCACHE_SLOPPINESS="pch_defines,time_macros${CCACHE_SLOPPINESS+,$CCACHE_SLOPPINESS}"\n') + # 'export CCACHE_SLOPPINESS="pch_defines,time_macros' + # '${CCACHE_SLOPPINESS+,$CCACHE_SLOPPINESS}"\n' + # ) # file_handle.write('export CCACHE_CPP2=true\n') done_necessary_env = True if method == "symlinks": @@ -2263,16 +2271,12 @@ def _write_sh_activation_text(file_handle, m): file_handle.write(f"pushd {dirname_ccache_ln_bin}\n") file_handle.write('if [ -n "$CC" ]; then\n') file_handle.write( - " [ -f {ccache} ] && [ ! -f $(basename $CC) ] && ln -s {ccache} $(basename $CC) || true\n".format( - ccache=ccache - ) + f" [ -f {ccache} ] && [ ! -f $(basename $CC) ] && ln -s {ccache} $(basename $CC) || true\n" ) file_handle.write("fi\n") file_handle.write('if [ -n "$CXX" ]; then\n') file_handle.write( - " [ -f {ccache} ] && [ ! -f $(basename $CXX) ] && ln -s {ccache} $(basename $CXX) || true\n".format( - ccache=ccache - ) + f" [ -f {ccache} ] && [ ! -f $(basename $CXX) ] && ln -s {ccache} $(basename $CXX) || true\n" ) file_handle.write("fi\n") file_handle.write("popd\n") @@ -4084,7 +4088,7 @@ def handle_anaconda_upload(paths, config): prompter = "$ " if not upload or anaconda is None: no_upload_message = ( - "# If you want to upload package(s) to anaconda.org later, type:\n" "\n" + "# If you want to upload package(s) to anaconda.org later, type:\n\n" ) no_upload_message += ( "\n" diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index 73da5bdec6..25bca5a6fd 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -441,7 +441,7 @@ def parse_args(args): ) p.add_argument( "--stats-file", - help=("File path to save build statistics to. Stats are " "in JSON format"), + help="File path to save build statistics to. Stats are in JSON format", ) p.add_argument( "--extra-deps", diff --git a/conda_build/cli/main_develop.py b/conda_build/cli/main_develop.py index a7a202e5ff..46c8384826 100644 --- a/conda_build/cli/main_develop.py +++ b/conda_build/cli/main_develop.py @@ -19,7 +19,7 @@ def parse_args(args): Install a Python package in 'development mode'. -This works by creating a conda.pth file in site-packages.""" +This works by creating a conda.pth file in site-packages.""", # TODO: Use setup.py to determine any entry-points to install. ) diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index a563e87c1b..4647d43996 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -66,7 +66,7 @@ def get_render_parser(): p.add_argument( "--output", action="store_true", - help="Output the conda package filename which would have been " "created", + help="Output the conda package filename which would have been created", ) p.add_argument( "--python", diff --git a/conda_build/config.py b/conda_build/config.py index 09f3cbcb67..8c598fdee0 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -868,11 +868,11 @@ def clean(self, remove_folders=True): rm_rf(os.path.join(self.build_folder, "prefix_files")) else: print( - "\nLeaving build/test directories:" "\n Work:\n", + "\nLeaving build/test directories:\n Work:\n", self.work_dir, "\n Test:\n", self.test_dir, - "\nLeaving build/test environments:" "\n Test:\nsource activate ", + "\nLeaving build/test environments:\n Test:\nsource activate ", self.test_prefix, "\n Build:\nsource activate ", self.build_prefix, diff --git a/conda_build/exceptions.py b/conda_build/exceptions.py index d8ed36ff06..857141fb4f 100644 --- a/conda_build/exceptions.py +++ b/conda_build/exceptions.py @@ -88,8 +88,7 @@ def __init__( self.packages.append(pkg) if not self.packages: raise RuntimeError( - "failed to parse packages from exception:" - " {}".format(str(conda_exception)) + f"failed to parse packages from exception: {conda_exception}" ) def __str__(self): diff --git a/conda_build/index.py b/conda_build/index.py index c80fd12a31..edf0bdedba 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -362,7 +362,8 @@ def update_index( if dirname in utils.DEFAULT_SUBDIRS: if warn: log.warn( - "The update_index function has changed to index all subdirs at once. You're pointing it at a single subdir. " + "The update_index function has changed to index all subdirs at once. " + "You're pointing it at a single subdir. " "Please update your code to point it at the channel root, rather than a subdir." ) return update_index( diff --git a/conda_build/jinja_context.py b/conda_build/jinja_context.py index 9d507e43a6..eaadc3a100 100644 --- a/conda_build/jinja_context.py +++ b/conda_build/jinja_context.py @@ -97,18 +97,8 @@ def __init__( __call__ ) = ( __getitem__ - ) = ( - __lt__ - ) = ( - __le__ - ) = ( - __gt__ - ) = ( - __ge__ - ) = ( - __complex__ - ) = __pow__ = __rpow__ = lambda self, *args, **kwargs: self._return_undefined( - self._undefined_name + ) = __lt__ = __le__ = __gt__ = __ge__ = __complex__ = __pow__ = __rpow__ = ( + lambda self, *args, **kwargs: self._return_undefined(self._undefined_name) ) # Accessing an attribute of an Undefined variable diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 7ad51c7880..71021a1d4f 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -321,7 +321,7 @@ def ensure_valid_fields(meta): pin_depends = meta.get("build", {}).get("pin_depends", "") if pin_depends and pin_depends not in ("", "record", "strict"): raise RuntimeError( - "build/pin_depends must be 'record' or 'strict' - " "not '%s'" % pin_depends + f"build/pin_depends must be 'record' or 'strict' - not '{pin_depends}'" ) @@ -1470,7 +1470,8 @@ def get_depends_top_and_out(self, typ): meta_requirements = ensure_list(self.get_value("requirements/" + typ, []))[:] req_names = {req.split()[0] for req in meta_requirements if req} extra_reqs = [] - # this is for the edge case of requirements for top-level being also partially defined in a similarly named output + # this is for the edge case of requirements for top-level being + # partially defined in a similarly named output if not self.is_output: matching_output = [ out diff --git a/conda_build/noarch_python.py b/conda_build/noarch_python.py index 380367d43d..30efb3d45d 100644 --- a/conda_build/noarch_python.py +++ b/conda_build/noarch_python.py @@ -142,11 +142,7 @@ def transform(m, files, prefix): """\ @echo off "%PREFIX%\\python.exe" "%SOURCE_DIR%\\link.py" - """.replace( - "\n", "\r\n" - ).encode( - "utf-8" - ) + """.replace("\n", "\r\n").encode("utf-8") ) d = populate_files(m, files, prefix) diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index 26a768a4f6..9e5c9836bb 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -588,8 +588,14 @@ def inspect_linkages_lief( """ if binary.format == lief.EXE_FORMATS.PE: import random - path_fixed = os.path.dirname(path_fixed) + os.sep + \ - ''.join(random.choice((str.upper, str.lower))(c) for c in os.path.basename(path_fixed)) + path_fixed = ( + os.path.dirname(path_fixed) + + os.sep + + ''.join( + random.choice((str.upper, str.lower))(c) + for c in os.path.basename(path_fixed) + ) + ) if random.getrandbits(1): path_fixed = path_fixed.replace(os.sep + 'lib' + os.sep, os.sep + 'Lib' + os.sep) else: @@ -650,16 +656,11 @@ def get_linkages( ) if debug and result_pyldd and set(result_lief) != set(result_pyldd): print( - "WARNING: Disagreement in get_linkages(filename={}, resolve_filenames={}, recurse={}, sysroot={}, envroot={}, arch={}):\n lief: {}\npyldd: {}\n (using lief)".format( - filename, - resolve_filenames, - recurse, - sysroot, - envroot, - arch, - result_lief, - result_pyldd, - ) + f"WARNING: Disagreement in get_linkages({filename=}, " + f"{resolve_filenames=}, {recurse=}, {sysroot=}, {envroot=}, {arch=}):\n" + f" lief: {result_lief}\n" + f"pyldd: {result_pyldd}\n" + " (using lief)" ) return result_lief @@ -689,7 +690,7 @@ def is_archive(file): def get_static_lib_exports(file): - # file = '/Users/rdonnelly/conda/main-augmented-tmp/osx-64_14354bd0cd1882bc620336d9a69ae5b9/lib/python2.7/config/libpython2.7.a' + # file = '/Users/rdonnelly/conda/main-augmented-tmp/osx-64_14354bd0cd1882bc620336d9a69ae5b9/lib/python2.7/config/libpython2.7.a' # noqa: E501 # References: # https://github.com/bminor/binutils-gdb/tree/master/bfd/archive.c # https://en.wikipedia.org/wiki/Ar_(Unix) @@ -737,7 +738,8 @@ def _parse_ar_hdr(content, index): typ = "NORMAL" if b"/" in name: name = name[: name.find(b"/")] - # if debug_static_archives: print("index={}, name={}, ending={}, size={}, type={}".format(index, name, ending, size, typ)) + # if debug_static_archives: + # print(f"index={index}, name={name}, ending={ending}, size={size}, type={typ}") index += header_sz + name_len return index, name, name_len, size, typ @@ -813,9 +815,7 @@ def _parse_ar_hdr(content, index): (size_string_table,) = struct.unpack( "<" + toc_integers_fmt, content[ - index - + toc_integers_sz - + (nsymbols * ranlib_struct_sz) : index + index + toc_integers_sz + (nsymbols * ranlib_struct_sz) : index + 4 + 4 + (nsymbols * ranlib_struct_sz) @@ -827,8 +827,7 @@ def _parse_ar_hdr(content, index): ran_off, ran_strx = struct.unpack( "<" + ranlib_struct_field_fmt + ranlib_struct_field_fmt, content[ - ranlib_index - + (i * ranlib_struct_sz) : ranlib_index + ranlib_index + (i * ranlib_struct_sz) : ranlib_index + ((i + 1) * ranlib_struct_sz) ], ) @@ -845,8 +844,7 @@ def _parse_ar_hdr(content, index): ) ) string_table = content[ - ranlib_index - + (nsymbols * ranlib_struct_sz) : ranlib_index + ranlib_index + (nsymbols * ranlib_struct_sz) : ranlib_index + (nsymbols * ranlib_struct_sz) + size_string_table ] @@ -958,7 +956,7 @@ def get_static_lib_exports_dumpbin(filename): > 020 00000000 UNDEF notype () External | malloc > vs > 004 00000010 SECT1 notype () External | _ZN3gnu11autosprintfC1EPKcz - """ + """ # noqa: E501 dumpbin_exe = find_executable("dumpbin") if not dumpbin_exe: """ @@ -1077,19 +1075,15 @@ def get_exports(filename, arch="native", enable_static=False): print(f"errors: {error_count} (-{len(diff1)}, +{len(diff2)})") if debug_static_archives: print( - "WARNING :: Disagreement regarding static lib exports in {} between nm (nsyms={}) and lielfldd (nsyms={}):".format( - filename, len(exports), len(exports2) - ) + "WARNING :: Disagreement regarding static lib exports in " + f"{filename} between nm (nsyms={len(exports)}) and " + "lielfldd (nsyms={len(exports2)}):" ) print( - "** nm.diff(liefldd) [MISSING SYMBOLS] **\n{}".format( - "\n".join(diff1) - ) + "\n".join(("** nm.diff(liefldd) [MISSING SYMBOLS] **", *diff1)) ) print( - "** liefldd.diff(nm) [ EXTRA SYMBOLS] **\n{}".format( - "\n".join(diff2) - ) + "\n".join(("** liefldd.diff(nm) [ EXTRA SYMBOLS] **", *diff2)) ) if not result: diff --git a/conda_build/os_utils/pyldd.py b/conda_build/os_utils/pyldd.py index 90679409c9..427622dbb5 100644 --- a/conda_build/os_utils/pyldd.py +++ b/conda_build/os_utils/pyldd.py @@ -709,13 +709,9 @@ def __init__(self, file): get_logger(__name__).warning(f"file.tell()={loc} != ehsize={self.ehsize}") def __str__(self): - return "bitness {}, endian {}, version {}, type {}, machine {}, entry {}".format( # noqa - self.bitness, - self.endian, - self.version, - self.type, - hex(self.machine), - hex(self.entry), + return ( + f"bitness {self.bitness}, endian {self.endian}, version {self.version}, " + f"type {self.type}, machine {hex(self.machine)}, entry {hex(self.entry)}" ) diff --git a/conda_build/plugin.py b/conda_build/plugin.py index eddb85fe66..6ca5c34cc1 100644 --- a/conda_build/plugin.py +++ b/conda_build/plugin.py @@ -78,12 +78,18 @@ def conda_subcommands(): ) yield conda.plugins.CondaSubcommand( name="develop", - summary="Install a Python package in 'development mode'. Similar to `pip install --editable`.", + summary=( + "Install a Python package in 'development mode'. " + "Similar to `pip install --editable`." + ), action=develop, ) yield conda.plugins.CondaSubcommand( name="index", - summary="Update package index metadata files. Pending deprecation, use https://github.com/conda/conda-index instead.", + summary=( + "Update package index metadata files. Pending deprecation, " + "use https://github.com/conda/conda-index instead." + ), action=index, ) yield conda.plugins.CondaSubcommand( diff --git a/conda_build/post.py b/conda_build/post.py index 93c761d3d0..76fe82ae96 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -522,7 +522,14 @@ def check_binary(binary, expected=None): print("trying {}".format(binary)) # import pdb; pdb.set_trace() try: - txt = check_output([sys.executable, '-c', 'from ctypes import cdll; cdll.LoadLibrary("' + binary + '")'], timeout=2) + txt = check_output( + [ + sys.executable, + '-c', + 'from ctypes import cdll; cdll.LoadLibrary("' + binary + '")' + ], + timeout=2, + ) # mydll = cdll.LoadLibrary(binary) except Exception as e: print(e) diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index cd093e6d9e..d942013d65 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -201,7 +201,7 @@ popd fi fi -""" +""" # noqa: E501 CRAN_BUILD_SH_BINARY = """\ #!/bin/bash @@ -392,7 +392,7 @@ def add_parser(repos): "--use-noarch-generic", action="store_true", dest="use_noarch_generic", - help=("Mark packages that do not need compilation as `noarch: generic`"), + help="Mark packages that do not need compilation as `noarch: generic`", ) cran.add_argument( "--use-rtools-win", @@ -1306,10 +1306,8 @@ def skeletonize( ) if not is_github_url: available_details["archive_keys"] = ( - "{url_key}{sel}" - " {cranurl}\n" - " {hash_entry}{sel}".format(**available_details) - ) + "{url_key}{sel} {cranurl}\n {hash_entry}{sel}" + ).format(**available_details) # Extract the DESCRIPTION data from the source if cran_package is None: diff --git a/conda_build/skeletons/rpm.py b/conda_build/skeletons/rpm.py index 409e3aad4b..76f2e5ea86 100644 --- a/conda_build/skeletons/rpm.py +++ b/conda_build/skeletons/rpm.py @@ -473,8 +473,9 @@ def remap_license(rpm_license): } l_rpm_license = rpm_license.lower() if l_rpm_license in mapping: - license, family = mapping[l_rpm_license], guess_license_family( - mapping[l_rpm_license] + license, family = ( + mapping[l_rpm_license], + guess_license_family(mapping[l_rpm_license]), ) else: license, family = rpm_license, guess_license_family(rpm_license) diff --git a/conda_build/tarcheck.py b/conda_build/tarcheck.py index 7884066014..3fc363986e 100644 --- a/conda_build/tarcheck.py +++ b/conda_build/tarcheck.py @@ -90,9 +90,8 @@ def correct_subdir(self): self.config.host_subdir, "noarch", self.config.target_subdir, - ], ( - "Inconsistent subdir in package - index.json expecting {}," - " got {}".format(self.config.host_subdir, info["subdir"]) + ], "Inconsistent subdir in package - index.json expecting {}, got {}".format( + self.config.host_subdir, info["subdir"] ) diff --git a/conda_build/utils.py b/conda_build/utils.py index 9f41400990..9fdfc2ad37 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -170,7 +170,6 @@ def directory_size_slow(path): def directory_size(path): - """ """ try: if on_win: command = 'dir /s "{}"' # Windows path can have spaces @@ -723,9 +722,7 @@ def merge_tree( existing = [f for f in new_files if isfile(f)] if existing and not clobber: - raise OSError( - "Can't merge {} into {}: file exists: " "{}".format(src, dst, existing[0]) - ) + raise OSError(f"Can't merge {src} into {dst}: file exists: {existing[0]}") locks = [] if locking: diff --git a/conda_build/variants.py b/conda_build/variants.py index 319ace7fea..7e9dfc7ff0 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -164,7 +164,8 @@ def validate_spec(src, spec): " zip_key entry {} in group {} is a duplicate, keys can only occur " "in one group".format(k, zg) # include error if key has already been seen, otherwise add to unique keys - if k in unique else unique.add(k) + if k in unique + else unique.add(k) for zg in zip_keys for k in zg ) diff --git a/news/5052-ruff-format b/news/5052-ruff-format new file mode 100644 index 0000000000..ae88f823e9 --- /dev/null +++ b/news/5052-ruff-format @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Replace `black` with `ruff format` in pre-commit. (#5052) diff --git a/pyproject.toml b/pyproject.toml index 39cd67a674..6125bce2ee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,9 +81,6 @@ include = ["conda_build", "conda_build/templates/*", "conda_build/cli-*.exe"] [tool.hatch.build.hooks.vcs] version-file = "conda_build/__version__.py" -[tool.black] -target-version = ['py38', 'py39', 'py310', 'py311'] - [tool.coverage.run] # store relative paths in coverage information relative_files = true @@ -95,7 +92,7 @@ skip_covered = true omit = ["conda_build/skeletons/_example_skeleton.py"] [tool.ruff] -line-length = 180 +pycodestyle = {max-line-length = 120} # E, W = pycodestyle errors and warnings # F = pyflakes # I = isort @@ -104,7 +101,7 @@ select = ["E", "W", "F", "I"] # E722 do not use bare 'except' # E731 do not assign a lambda expression, use a def ignore = ["E402", "E722", "E731"] -# Use PEP 257-style docstrings. +target-version = "py38" pydocstyle = {convention = "pep257"} [tool.pytest.ini_options] diff --git a/tests/conftest.py b/tests/conftest.py index 9bb2c27616..a0e683da8b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -67,7 +67,8 @@ def testing_homedir() -> Iterator[Path]: os.chdir(saved) except OSError: pytest.xfail( - f"failed to create temporary directory () in {'%HOME%' if on_win else '${HOME}'} (tmpfs inappropriate for xattrs)" + f"failed to create temporary directory () in {'%HOME%' if on_win else '${HOME}'} " + "(tmpfs inappropriate for xattrs)" ) diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 7c379237a3..0a61a414c3 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -402,9 +402,7 @@ def dummy_executable(folder, exename): echo ******* conda that makes it not add the _build/bin directory onto the echo ******* PATH before running the source checkout tool exit -1 - """.format( - exename - ) + """.format(exename) ) if sys.platform != "win32": import stat @@ -738,77 +736,41 @@ def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monke # # Also, git is set to False here because it needs to be rebuilt with the longer prefix. As # things stand, my _b_env folder for this test contains more than 80 characters. - requirements = ( - "requirements", - OrderedDict( - [ - ( - "build", - [ - "git # [False]", - "m2-git # [win]", - "m2-filesystem # [win]", - ], - ) - ] - ), - ) recipe_dir = os.path.join(testing_workdir, "recipe") if not os.path.exists(recipe_dir): os.makedirs(recipe_dir) filename = os.path.join(testing_workdir, "recipe", "meta.yaml") - data = OrderedDict( - [ - ( - "package", - OrderedDict( - [ - ("name", "relative_submodules"), - ("version", "{{ GIT_DESCRIBE_TAG }}"), - ] - ), - ), - ("source", OrderedDict([("git_url", toplevel), ("git_tag", str(tag))])), - requirements, - ( - "build", - OrderedDict( - [ - ( - "script", - [ - "git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%%s > " - "%PREFIX%\\summaries.txt # [win]", - "git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%s > " - "$PREFIX/summaries.txt # [not win]", - ], - ) - ] - ), - ), - ( - "test", - OrderedDict( - [ - ( - "commands", - [ - "echo absolute{}relative{} > %PREFIX%\\expected_summaries.txt # [win]".format( - tag, tag - ), - "fc.exe /W %PREFIX%\\expected_summaries.txt %PREFIX%\\summaries.txt # [win]", - "echo absolute{}relative{} > $PREFIX/expected_summaries.txt # [not win]".format( - tag, tag - ), - "diff -wuN ${PREFIX}/expected_summaries.txt ${PREFIX}/summaries.txt # [not win]", - ], - ) - ] - ), - ), - ] - ) + data = { + "package": { + "name": "relative_submodules", + "version": "{{ GIT_DESCRIBE_TAG }}", + }, + "source": {"git_url": toplevel, "git_tag": str(tag)}, + "requirements": { + "build": [ + "git # [False]", + "m2-git # [win]", + "m2-filesystem # [win]", + ], + }, + "build": { + "script": [ + "git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%%s > " + "%PREFIX%\\summaries.txt # [win]", + "git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%s > " + "$PREFIX/summaries.txt # [not win]", + ], + }, + "test": { + "commands": [ + f"echo absolute{tag}relative{tag} > %PREFIX%\\expected_summaries.txt # [win]", + "fc.exe /W %PREFIX%\\expected_summaries.txt %PREFIX%\\summaries.txt # [win]", + f"echo absolute{tag}relative{tag} > $PREFIX/expected_summaries.txt # [not win]", + "diff -wuN ${PREFIX}/expected_summaries.txt ${PREFIX}/summaries.txt # [not win]", + ], + }, + } with open(filename, "w") as outfile: outfile.write(yaml.dump(data, default_flow_style=False, width=999999999)) @@ -1431,7 +1393,7 @@ def test_recursion_layers(testing_config): @pytest.mark.sanity @pytest.mark.skipif( sys.platform != "win32", - reason=("spaces break openssl prefix " "replacement on *nix"), + reason="spaces break openssl prefix replacement on *nix", ) def test_croot_with_spaces(testing_metadata, testing_workdir): testing_metadata.config.croot = os.path.join(testing_workdir, "space path") @@ -1618,9 +1580,10 @@ def test_copy_test_source_files(testing_config): found = True break if found: - assert ( - copy - ), "'info/test/test_files_folder/text.txt' found in tar.bz2 but not copying test source files" + assert copy, ( + "'info/test/test_files_folder/text.txt' found in tar.bz2 " + "but not copying test source files" + ) if copy: api.test(outputs[0]) else: @@ -1628,8 +1591,8 @@ def test_copy_test_source_files(testing_config): api.test(outputs[0]) else: assert not copy, ( - "'info/test/test_files_folder/text.txt' not found in tar.bz2 but copying test source files. File list: %r" - % files + "'info/test/test_files_folder/text.txt' not found in tar.bz2 " + f"but copying test source files. File list: {files!r}" ) diff --git a/tests/test_api_render.py b/tests/test_api_render.py index a68f69135e..1451fbbbe0 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -99,8 +99,9 @@ def test_get_output_file_path_jinja2(testing_config): assert build_path == os.path.join( testing_config.croot, testing_config.host_subdir, - "conda-build-test-source-git-jinja2-1.20.2-" - "py{}{}_0_g262d444.tar.bz2".format(python, _hash), + "conda-build-test-source-git-jinja2-1.20.2-py{}{}_0_g262d444.tar.bz2".format( + python, _hash + ), ) diff --git a/tests/test_utils.py b/tests/test_utils.py index b5536cdf6d..baa5bf5a34 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -293,9 +293,7 @@ def test_logger_config_from_file(testing_workdir, capfd, mocker): root: level: DEBUG handlers: [console] -""".format( - __name__ - ) +""".format(__name__) ) cc_conda_build = mocker.patch.object(utils, "cc_conda_build") cc_conda_build.get.return_value = test_file From b28d2856b44add94bc46895b06b6675519fa5b14 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Mon, 20 Nov 2023 16:57:32 +0100 Subject: [PATCH 220/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#5077)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/lock.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/lock.yml b/.github/workflows/lock.yml index a1cf48bdfd..7fd6b91347 100644 --- a/.github/workflows/lock.yml +++ b/.github/workflows/lock.yml @@ -18,7 +18,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: dessant/lock-threads@v4 + - uses: dessant/lock-threads@v5 with: # Number of days of inactivity before a closed issue is locked issue-inactive-days: 365 @@ -42,5 +42,5 @@ jobs: # Reason for locking a pull request, value must be one of resolved, off-topic, too heated, spam or '' pr-lock-reason: resolved - # Limit locking to only issues or pull requests, value must be one of issues, prs or '' - process-only: '' + # Limit locking to issues, pull requests or discussions, value must be a comma separated list of issues, prs, discussions or '' + process-only: issues, prs From 9eaa9fcc3daaa74bc766c817d2b6d95a12a91e6d Mon Sep 17 00:00:00 2001 From: Travis Hathaway Date: Mon, 20 Nov 2023 17:03:53 +0100 Subject: [PATCH 221/366] Update conda skeleton documentation to add link to Grayskull (#5027) Adding a little admonition to let users know about Grayskull for building PyPI packages. --- docs/source/user-guide/tutorials/build-pkgs-skeleton.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/source/user-guide/tutorials/build-pkgs-skeleton.rst b/docs/source/user-guide/tutorials/build-pkgs-skeleton.rst index 5953107423..90c8d8bfe1 100644 --- a/docs/source/user-guide/tutorials/build-pkgs-skeleton.rst +++ b/docs/source/user-guide/tutorials/build-pkgs-skeleton.rst @@ -7,6 +7,11 @@ Building conda packages with conda skeleton :local: :depth: 1 +.. tip:: + We recommend `Grayskull `_, a newer alternative to conda-skeleton, + to generate conda recipes for PyPI packages. Please check out their project page on GitHub + for more information. + Overview ======== From 67cc72efcd6155a0d4c5fec1f966bef45d88f48c Mon Sep 17 00:00:00 2001 From: jakirkham Date: Mon, 20 Nov 2023 08:07:04 -0800 Subject: [PATCH 222/366] [DOC] Clarify `PREFIX` usage in environment variables (#5065) * Clarify `LIBRARY_*`, `SCRIPT`, & `LD_RUN_PATH` use `PREFIX` Saying `` now implies `BUILD_PREFIX`, which isn't the case. AFAIK these have always been constructed based on `PREFIX`, which is the host environment. Guessing this is just leftover verbiage that needs a refresh. * Add `BUILD_PREFIX` to environment variables --- .../user-guide/environment-variables.rst | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/docs/source/user-guide/environment-variables.rst b/docs/source/user-guide/environment-variables.rst index 98f99fe15d..d925040c17 100644 --- a/docs/source/user-guide/environment-variables.rst +++ b/docs/source/user-guide/environment-variables.rst @@ -53,6 +53,8 @@ inherited from the shell environment in which you invoke environment variable and defaults to the architecture the interpreter running conda was compiled with. + * - BUILD_PREFIX + - Build prefix where command line tools are installed. * - CMAKE_GENERATOR - The CMake generator string for the current build environment. On Linux systems, this is always @@ -92,7 +94,7 @@ inherited from the shell environment in which you invoke - Inherited from your shell environment and augmented with ``$PREFIX/bin``. * - PREFIX - - Build prefix to which the build script should install. + - Host prefix to which the build script should install. * - PKG_BUILDNUM - Build number of the package being built. * - PKG_NAME @@ -110,11 +112,11 @@ inherited from the shell environment in which you invoke is installed only in the host prefix when it is listed as a host requirement. * - PY3K - - ``1`` when Python 3 is installed in the build prefix, + - ``1`` when Python 3 is installed in the host prefix, otherwise ``0``. * - R - - Path to the R executable in the build prefix. R is only - installed in the build prefix when it is listed as a build + - Path to the R executable in the host prefix. R is only + installed in the host prefix when it is listed as a build requirement. * - RECIPE_DIR - Directory of the recipe. @@ -132,7 +134,7 @@ inherited from the shell environment in which you invoke Unix-style packages on Windows, which are usually statically linked to executables, are built in a special ``Library`` -directory under the build prefix. The environment variables +directory under the host prefix. The environment variables listed in the following table are defined only on Windows. .. list-table:: @@ -142,15 +144,15 @@ listed in the following table are defined only on Windows. - Same as PREFIX, but as a Unix-style path, such as ``/cygdrive/c/path/to/prefix``. * - LIBRARY_BIN - - ``\Library\bin``. + - ``%PREFIX%\Library\bin``. * - LIBRARY_INC - - ``\Library\include``. + - ``%PREFIX%\Library\include``. * - LIBRARY_LIB - - ``\Library\lib``. + - ``%PREFIX%\Library\lib``. * - LIBRARY_PREFIX - - ``\Library``. + - ``%PREFIX%\Library``. * - SCRIPTS - - ``\Scripts``. + - ``%PREFIX%\Scripts``. * - VS_MAJOR - The major version number of the Visual Studio version activated within the build, such as ``9``. @@ -196,7 +198,7 @@ defined only on Linux. :widths: 20 80 * - LD_RUN_PATH - - ``/lib``. + - ``$PREFIX/lib``. .. _git-env: From bc46f28546d7db47177d10ab083337bf4f50f4c2 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Mon, 20 Nov 2023 17:34:04 +0100 Subject: [PATCH 223/366] Tests: Fix get_or_merge_testing_config for cli tests (#5076) * Tests: Fix get_or_merge_testing_config cli tests cli.main_build.execute (et al.) call the function with **args.__dict__ with args having default values (e.g., croot=None) via parse_args. Signed-off-by: Marcel Bargull * Fix some concurrency issues in tests Signed-off-by: Marcel Bargull * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Add comment for test concurrency issue workaround Signed-off-by: Marcel Bargull --------- Signed-off-by: Marcel Bargull Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- tests/cli/test_main_build.py | 6 ++--- tests/conftest.py | 14 ++++++++--- .../metadata/entry_points/meta.yaml | 4 +++ tests/test_api_build.py | 25 +++++++++++-------- tests/test_api_build_conda_v2.py | 13 +++++++--- tests/test_api_convert.py | 7 ++++-- 6 files changed, 45 insertions(+), 24 deletions(-) diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py index e1ccd90d8c..59a080eace 100644 --- a/tests/cli/test_main_build.py +++ b/tests/cli/test_main_build.py @@ -2,7 +2,6 @@ # SPDX-License-Identifier: BSD-3-Clause import os import re -import sys from pathlib import Path import pytest @@ -104,8 +103,7 @@ def test_build_output_build_path( args = ["--output", testing_workdir] main_build.execute(args) test_path = os.path.join( - sys.prefix, - "conda-bld", + testing_config.croot, testing_config.host_subdir, "test_build_output_build_path-1.0-1.tar.bz2", ) @@ -125,7 +123,7 @@ def test_build_output_build_path_multiple_recipes( main_build.execute(args) test_path = lambda pkg: os.path.join( - sys.prefix, "conda-bld", testing_config.host_subdir, pkg + testing_config.croot, testing_config.host_subdir, pkg ) test_paths = [ test_path("test_build_output_build_path_multiple_recipes-1.0-1.tar.bz2"), diff --git a/tests/conftest.py b/tests/conftest.py index a0e683da8b..f347317d90 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -118,11 +118,17 @@ def default_testing_config(testing_config, monkeypatch, request): return def get_or_merge_testing_config(config, variant=None, **kwargs): - merged_kwargs = {} if not config: - merged_kwargs.update(testing_config._testing_config_kwargs) - merged_kwargs.update(kwargs) - return _get_or_merge_config(config, variant, **merged_kwargs) + # If no existing config, override kwargs that are None with testing config defaults. + # (E.g., "croot" is None if called via "(..., *args.__dict__)" in cli.main_build.) + kwargs.update( + { + key: value + for key, value in testing_config._testing_config_kwargs.items() + if kwargs.get(key) is None + } + ) + return _get_or_merge_config(config, variant, **kwargs) monkeypatch.setattr( conda_build.config, diff --git a/tests/test-recipes/metadata/entry_points/meta.yaml b/tests/test-recipes/metadata/entry_points/meta.yaml index 217fcaa30e..ac07a8cb4b 100644 --- a/tests/test-recipes/metadata/entry_points/meta.yaml +++ b/tests/test-recipes/metadata/entry_points/meta.yaml @@ -20,3 +20,7 @@ requirements: - setuptools run: - python + +# Ensure we get different build strings for concurrently tested packages. +extra: + dummy: '{{ pytest_name is defined }}' diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 0a61a414c3..ff3e431ff3 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -1655,14 +1655,17 @@ def test_provides_features_metadata(testing_config): assert index["provides_features"] == {"test2": "also_ok"} -# using different MACOSX_DEPLOYMENT_TARGET in parallel causes some SDK race condition -# https://github.com/conda/conda-build/issues/4708 -@pytest.mark.serial -def test_overlinking_detection(testing_config, variants_conda_build_sysroot): +def test_overlinking_detection( + testing_config, testing_workdir, variants_conda_build_sysroot +): testing_config.activate = True testing_config.error_overlinking = True testing_config.verify = False - recipe = os.path.join(metadata_dir, "_overlinking_detection") + recipe = os.path.join(testing_workdir, "recipe") + copy_into( + os.path.join(metadata_dir, "_overlinking_detection"), + recipe, + ) dest_sh = os.path.join(recipe, "build.sh") dest_bat = os.path.join(recipe, "bld.bat") copy_into( @@ -1684,17 +1687,17 @@ def test_overlinking_detection(testing_config, variants_conda_build_sysroot): rm_rf(dest_bat) -# using different MACOSX_DEPLOYMENT_TARGET in parallel causes some SDK race condition -# https://github.com/conda/conda-build/issues/4708 -@pytest.mark.serial -@pytest.mark.flaky(reruns=5, reruns_delay=2) def test_overlinking_detection_ignore_patterns( - testing_config, variants_conda_build_sysroot + testing_config, testing_workdir, variants_conda_build_sysroot ): testing_config.activate = True testing_config.error_overlinking = True testing_config.verify = False - recipe = os.path.join(metadata_dir, "_overlinking_detection_ignore_patterns") + recipe = os.path.join(testing_workdir, "recipe") + copy_into( + os.path.join(metadata_dir, "_overlinking_detection_ignore_patterns"), + recipe, + ) dest_sh = os.path.join(recipe, "build.sh") dest_bat = os.path.join(recipe, "bld.bat") copy_into( diff --git a/tests/test_api_build_conda_v2.py b/tests/test_api_build_conda_v2.py index 25e7915848..4c0c09b9ac 100644 --- a/tests/test_api_build_conda_v2.py +++ b/tests/test_api_build_conda_v2.py @@ -10,7 +10,9 @@ @pytest.mark.parametrize("pkg_format,pkg_ext", [(None, ".tar.bz2"), ("2", ".conda")]) -def test_conda_pkg_format(pkg_format, pkg_ext, testing_config, monkeypatch, capfd): +def test_conda_pkg_format( + pkg_format, pkg_ext, testing_config, monkeypatch, capfd, request +): """Conda package format "2" builds .conda packages.""" # Build the "entry_points" recipe, which contains a test pass for package. @@ -23,10 +25,15 @@ def test_conda_pkg_format(pkg_format, pkg_ext, testing_config, monkeypatch, capf monkeypatch.setenv("CONDA_TEST_VAR", "conda_test") monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2") - (output_file,) = api.get_output_file_paths(recipe, config=testing_config) + # Recipe "entry_points" is used in other test -> add test-specific variant + # (change build hash) to avoid clashes in package cache from other tests. + variants = {"pytest_name": [request.node.name]} + (output_file,) = api.get_output_file_paths( + recipe, config=testing_config, variants=variants + ) assert output_file.endswith(pkg_ext) - api.build(recipe, config=testing_config) + api.build(recipe, config=testing_config, variants=variants) assert os.path.exists(output_file) out, err = capfd.readouterr() diff --git a/tests/test_api_convert.py b/tests/test_api_convert.py index 8eacad1475..bc17db6ffe 100644 --- a/tests/test_api_convert.py +++ b/tests/test_api_convert.py @@ -194,9 +194,12 @@ def test_convert_platform_to_others(base_platform, package): @pytest.mark.skipif( on_win, reason="we create the pkg to be converted in *nix; don't run on win." ) -def test_convert_from_unix_to_win_creates_entry_points(testing_config): +def test_convert_from_unix_to_win_creates_entry_points(testing_config, request): recipe_dir = os.path.join(metadata_dir, "entry_points") - fn = api.build(recipe_dir, config=testing_config)[0] + # Recipe "entry_points" is used in other test -> add test-specific variant + # (change build hash) to avoid clashes in package cache from other tests. + variants = {"pytest_name": [request.node.name]} + fn = api.build(recipe_dir, config=testing_config, variants=variants)[0] for platform in ["win-64", "win-32"]: api.convert(fn, platforms=[platform], force=True) converted_fn = os.path.join(platform, os.path.basename(fn)) From aefdab62c40476c0f213a9f9eea80cda5259b08e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 20 Nov 2023 19:05:10 +0100 Subject: [PATCH 224/366] [pre-commit.ci] pre-commit autoupdate (#5079) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3afdd26a6d..aa5a565768 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,7 +54,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.5 + rev: v0.1.6 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff From f77849b2461532ebb4d9a22b3624a71eb7240885 Mon Sep 17 00:00:00 2001 From: jaimergp Date: Sat, 25 Nov 2023 16:58:00 +0100 Subject: [PATCH 225/366] add menuinst JSON file validation (#4405) * add menuinst JSON file validation * use RECIPE_DIR * add deps * oopsie typo * use pip for now * add on windows too * make sure test can fail * revert, it fails as expected! * why timeout=120 on windows? * pre-commit * take from canary channel * --no-deps * fix import error * fix tests * pre-commit * require menuinst v2 stable * fix backslashes * use the libmamba solver in env setup * exit early * use jsonschema instead of pydantic * remove solver=libmamba bits * pre-commit * do not deduplicate log warnings * make them docstrings * no clear needed here * no print needed here either * try with libmamba again * windows uses pwsh, not batch * exit on error early by switching to cmd on this step * get latest miniconda to avoid pre 23.11 bugs with history * revert * install menuinst from url on windows * pre-commit * rename to underscore * use copy? * debug * gotta use CALL in 'conda' for CMD * fix comment syntax * add docstring * lowercase match --- .github/workflows/tests.yml | 26 ++++---- conda_build/post.py | 60 +++++++++++++++++++ pyproject.toml | 2 + recipe/meta.yaml | 2 + tests/requirements-linux.txt | 2 + tests/requirements-macos.txt | 2 + tests/requirements.txt | 1 + .../metadata/_menu_json_validation/menu.json | 23 +++++++ .../metadata/_menu_json_validation/meta.yaml | 10 ++++ tests/test_post.py | 57 ++++++++++++++++++ 10 files changed, 175 insertions(+), 10 deletions(-) create mode 100644 tests/test-recipes/metadata/_menu_json_validation/menu.json create mode 100644 tests/test-recipes/metadata/_menu_json_validation/meta.yaml diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 174eb4971e..5fcaf26458 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -64,7 +64,7 @@ jobs: runs-on: ubuntu-latest defaults: run: - shell: bash -l {0} + shell: bash -el {0} strategy: fail-fast: false matrix: @@ -125,7 +125,7 @@ jobs: --file ./tests/requirements.txt \ --file ./tests/requirements-linux.txt \ ${{ env.CONDA_CHANNEL_LABEL }}::${{ env.CONDA_VERSION }} - pip install -e . + pip install -e . --no-deps - name: Show info run: | @@ -224,13 +224,19 @@ jobs: run-post: false # skip post cleanup - name: Setup environment + shell: cmd /C CALL {0} run: | - choco install visualstudio2017-workload-vctools - conda install -q -y -c defaults ` - --file .\tests\requirements.txt ` - --file .\tests\requirements-windows.txt ` - ${{ env.CONDA_CHANNEL_LABEL }}::conda - pip install -e . + @echo on + CALL choco install visualstudio2017-workload-vctools || exit 1 + CALL conda install -q -y -c defaults ^ + --file .\tests\requirements.txt ^ + --file .\tests\requirements-windows.txt ^ + ${{ env.CONDA_CHANNEL_LABEL }}::conda || exit 1 + :: TEMPORARY + if "${{ matrix.python-version }}" == "3.8" CALL conda install "https://anaconda.org/conda-forge/menuinst/2.0.0/download/win-64/menuinst-2.0.0-py38hd3f51b4_1.conda" || exit 1 + if "${{ matrix.python-version }}" == "3.11" CALL conda install "https://anaconda.org/conda-forge/menuinst/2.0.0/download/win-64/menuinst-2.0.0-py311h12c1d0e_1.conda" || exit 1 + :: /TEMPORARY + CALL pip install -e . --no-deps || exit 1 - name: Show info run: | @@ -288,7 +294,7 @@ jobs: runs-on: macos-11 defaults: run: - shell: bash -l {0} + shell: bash -el {0} strategy: fail-fast: false matrix: @@ -341,7 +347,7 @@ jobs: --file ./tests/requirements.txt \ --file ./tests/requirements-macos.txt \ ${{ env.CONDA_CHANNEL_LABEL }}::conda - pip install -e . + pip install -e . --no-deps - name: Show info run: | diff --git a/conda_build/post.py b/conda_build/post.py index 76fe82ae96..018c1f7e58 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -1732,6 +1732,65 @@ def fix_permissions(files, prefix): log.warn(str(e)) +def check_menuinst_json(files, prefix) -> None: + """ + Check that Menu/*.json files are valid menuinst v2 JSON documents, + as defined by the CEP-11 schema. This JSON schema is part of the `menuinst` + package. + + Validation can fail if the menu/*.json file is not valid JSON, or if it doesn't + comply with the menuinst schema. + + We validate at build-time so we don't have to validate at install-time, saving + `conda` a few dependencies. + """ + json_files = fnmatch_filter(files, "[Mm][Ee][Nn][Uu][/\\]*.[Jj][Ss][Oo][Nn]") + if not json_files: + return + + print("Validating Menu/*.json files") + log = utils.get_logger(__name__, dedupe=False) + try: + import jsonschema + from menuinst.utils import data_path + except ModuleNotFoundError as exc: + log.warning( + "Found 'Menu/*.json' files but couldn't validate: %s", + ", ".join(json_files), + exc_info=exc, + ) + return + + try: + schema_path = data_path("menuinst.schema.json") + with open(schema_path) as f: + schema = json.load(f) + ValidatorClass = jsonschema.validators.validator_for(schema) + validator = ValidatorClass(schema) + except (jsonschema.SchemaError, json.JSONDecodeError, OSError) as exc: + log.warning("'%s' is not a valid menuinst schema", schema_path, exc_info=exc) + return + + for json_file in json_files: + try: + with open(join(prefix, json_file)) as f: + text = f.read() + if "$schema" not in text: + log.warning( + "menuinst v1 JSON document '%s' won't be validated.", json_file + ) + continue + validator.validate(json.loads(text)) + except (jsonschema.ValidationError, json.JSONDecodeError, OSError) as exc: + log.warning( + "'%s' is not a valid menuinst JSON document!", + json_file, + exc_info=exc, + ) + else: + log.info("'%s' is a valid menuinst JSON document", json_file) + + def post_build(m, files, build_python, host_prefix=None, is_already_linked=False): print("number of files:", len(files)) @@ -1765,6 +1824,7 @@ def post_build(m, files, build_python, host_prefix=None, is_already_linked=False ): post_process_shared_lib(m, f, prefix_files, host_prefix) check_overlinking(m, files, host_prefix) + check_menuinst_json(files, host_prefix) def check_symlinks(files, prefix, croot): diff --git a/pyproject.toml b/pyproject.toml index 6125bce2ee..edf6f493b2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,8 @@ dependencies = [ "six", "tomli ; python_version<'3.11'", "tqdm", + "jsonschema >=4.19", + "menuinst >=2" ] dynamic = ["version"] diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 6a3ed0ea27..c2451656da 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -51,6 +51,8 @@ requirements: - six - tomli # [py<311] - tqdm + - menuinst >=2 + - jsonschema >=4.19 run_constrained: - conda-verify >=3.1.0 diff --git a/tests/requirements-linux.txt b/tests/requirements-linux.txt index 149ce09bad..b1785e2c4f 100644 --- a/tests/requirements-linux.txt +++ b/tests/requirements-linux.txt @@ -1,3 +1,5 @@ +# TEMP +conda-forge::menuinst >=2 patch patchelf shellcheck diff --git a/tests/requirements-macos.txt b/tests/requirements-macos.txt index 133b191333..caa4235c84 100644 --- a/tests/requirements-macos.txt +++ b/tests/requirements-macos.txt @@ -1,2 +1,4 @@ +# TEMP +conda-forge::menuinst >=2 patch shellcheck diff --git a/tests/requirements.txt b/tests/requirements.txt index 02d34d6787..3e230a6c24 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -11,6 +11,7 @@ cytoolz filelock git jinja2 +jsonschema numpy perl pip diff --git a/tests/test-recipes/metadata/_menu_json_validation/menu.json b/tests/test-recipes/metadata/_menu_json_validation/menu.json new file mode 100644 index 0000000000..eeed9e756f --- /dev/null +++ b/tests/test-recipes/metadata/_menu_json_validation/menu.json @@ -0,0 +1,23 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "$id": "https://schemas.conda.io/menuinst-1.schema.json", + "menu_name": "Example 1", + "menu_items": [ + { + "name": "Example", + "description": "This will install to Windows and Linux with default options. MacOS has a custom option.", + "command": [ + "{{ PYTHON }}", + "-c", + "import sys; print(sys.executable)" + ], + "platforms": { + "win": {}, + "linux": {}, + "osx": { + "CFBundleName": "My Example" + } + } + } + ] +} \ No newline at end of file diff --git a/tests/test-recipes/metadata/_menu_json_validation/meta.yaml b/tests/test-recipes/metadata/_menu_json_validation/meta.yaml new file mode 100644 index 0000000000..ac23805ec9 --- /dev/null +++ b/tests/test-recipes/metadata/_menu_json_validation/meta.yaml @@ -0,0 +1,10 @@ +package: + name: menu_json_validation + version: "1.0" + +build: + script: + - mkdir -p "${PREFIX}/Menu" # [unix] + - cp "${RECIPE_DIR}/menu.json" "${PREFIX}/Menu/menu_json_validation.json" # [unix] + - md "%PREFIX%\\Menu" # [win] + - copy /y "%RECIPE_DIR%\\menu.json" "%PREFIX%\\Menu\\menu_json_validation.json" # [win] diff --git a/tests/test_post.py b/tests/test_post.py index 3fa808fad5..c15fffaf2a 100644 --- a/tests/test_post.py +++ b/tests/test_post.py @@ -1,8 +1,11 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +import json +import logging import os import shutil import sys +from pathlib import Path import pytest @@ -91,3 +94,57 @@ def test_pypi_installer_metadata(testing_config): get_site_packages("", "3.9") ) assert "conda" == (package_has_file(pkg, expected_installer, refresh_mode="forced")) + + +def test_menuinst_validation_ok(testing_config, caplog, tmp_path): + "1st check - validation passes with recipe as is" + recipe = Path(metadata_dir, "_menu_json_validation") + recipe_tmp = tmp_path / "_menu_json_validation" + shutil.copytree(recipe, recipe_tmp) + + with caplog.at_level(logging.INFO): + pkg = api.build(str(recipe_tmp), config=testing_config, notest=True)[0] + + captured_text = caplog.text + assert "Found 'Menu/*.json' files but couldn't validate:" not in captured_text + assert "not a valid menuinst JSON file" not in captured_text + assert "is a valid menuinst JSON document" in captured_text + assert package_has_file(pkg, "Menu/menu_json_validation.json") + + +def test_menuinst_validation_fails_bad_schema(testing_config, caplog, tmp_path): + "2nd check - valid JSON but invalid content fails validation" + recipe = Path(metadata_dir, "_menu_json_validation") + recipe_tmp = tmp_path / "_menu_json_validation" + shutil.copytree(recipe, recipe_tmp) + menu_json = recipe_tmp / "menu.json" + menu_json_contents = menu_json.read_text() + + bad_data = json.loads(menu_json_contents) + bad_data["menu_items"][0]["osx"] = ["bad", "schema"] + menu_json.write_text(json.dumps(bad_data, indent=2)) + with caplog.at_level(logging.WARNING): + api.build(str(recipe_tmp), config=testing_config, notest=True) + + captured_text = caplog.text + assert "Found 'Menu/*.json' files but couldn't validate:" not in captured_text + assert "not a valid menuinst JSON document" in captured_text + assert "ValidationError" in captured_text + + +def test_menuinst_validation_fails_bad_json(testing_config, caplog, tmp_path): + "3rd check - non-parsable JSON fails validation" + recipe = Path(metadata_dir, "_menu_json_validation") + recipe_tmp = tmp_path / "_menu_json_validation" + shutil.copytree(recipe, recipe_tmp) + menu_json = recipe_tmp / "menu.json" + menu_json_contents = menu_json.read_text() + menu_json.write_text(menu_json_contents + "Make this an invalid JSON") + + with caplog.at_level(logging.WARNING): + api.build(str(recipe_tmp), config=testing_config, notest=True) + + captured_text = caplog.text + assert "Found 'Menu/*.json' files but couldn't validate:" not in captured_text + assert "not a valid menuinst JSON document" in captured_text + assert "JSONDecodeError" in captured_text From dce97d77ef4732cc1d36cbaafb0cc7545bcb105b Mon Sep 17 00:00:00 2001 From: Travis Hathaway Date: Tue, 28 Nov 2023 18:06:59 +0100 Subject: [PATCH 226/366] Add conda-sphinx-theme to conda-build main (#5067) * switch over to new conda-sphinx-theme * adding news snippet --- docs/requirements.txt | 3 +- docs/source/_static/css/custom.css | 87 ------------------- docs/source/concepts/package-naming-conv.rst | 56 ++++++------ docs/source/concepts/recipe.rst | 4 - docs/source/conf.py | 50 +++++++++-- docs/source/index.rst | 2 - docs/source/resources/define-metadata.rst | 5 -- docs/source/resources/package-spec.rst | 4 - docs/source/resources/tutorial-template.rst | 4 - .../user-guide/environment-variables.rst | 4 - docs/source/user-guide/index.rst | 12 ++- .../recipes/build-without-recipe.rst | 5 -- .../tutorials/build-pkgs-skeleton.rst | 5 -- .../user-guide/tutorials/build-pkgs.rst | 5 -- .../user-guide/tutorials/build-r-pkgs.rst | 4 - .../tutorials/building-conda-packages.rst | 4 - news/5067-use-conda-sphinx-theme | 20 +++++ 17 files changed, 102 insertions(+), 172 deletions(-) delete mode 100644 docs/source/_static/css/custom.css create mode 100644 news/5067-use-conda-sphinx-theme diff --git a/docs/requirements.txt b/docs/requirements.txt index b4590377d8..b6f1b46b9c 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,3 +1,4 @@ +conda-sphinx-theme==0.1.1 linkify-it-py==2.0.2 myst-parser==2.0.0 Pillow==10.0.1 @@ -7,8 +8,8 @@ ruamel.yaml==0.17.32 Sphinx==7.2.6 sphinx-argparse==0.4.0 sphinx-autobuild==2021.3.14 -sphinx-rtd-theme==1.3.0 sphinx-sitemap==2.5.1 +sphinx_design==0.5.0 sphinxcontrib-applehelp==1.0.7 sphinxcontrib-devhelp==1.0.5 sphinxcontrib-htmlhelp==2.0.4 diff --git a/docs/source/_static/css/custom.css b/docs/source/_static/css/custom.css deleted file mode 100644 index f78cbde400..0000000000 --- a/docs/source/_static/css/custom.css +++ /dev/null @@ -1,87 +0,0 @@ -@import url("theme.css"); - -.wy-nav-content { - padding: 1.618em 3.236em; - height: 100%; - max-width: 1500px; - /* max-width: 800px; */ - margin: auto; - background-color: #ffffff; -} - -.wy-side-nav-search { - /*background color of the top search bar*/ - background-color: #43B02A; -} - -.wy-nav-side { -/* This relates to the entire color of the sidebar */ - background-color:#EEEEEE; -} - -.wy-menu a:hover { - /*background color of text upon hovering*/ - background: #c9c9c9 -} - -.wy-menu-vertical li.on a:hover,.wy-menu-vertical li.current>a:hover { - /*background color of text upon hovering an open list*/ - background: #c9c9c9 -} - -.wy-menu-vertical { - /* text color of expanded menu items in the sidebar */ - color:#414042; -} - -.section h1 { - /*header 1 text color */ - color: #047704; - } - -.rst-content .toctree-wrapper p.caption, h2, h3, h4, h5, h6, legend { - /*text color of rst content and subheads*/ - color: #414042; -} - -.wy-menu-vertical a { - /* Text color of toc */ - color: #025C02; -} - -.wy-nav-content-wrap { - /* background color of wrap around main content*/ - background-color: white; -} - -.section-title { - /*text color of section titles*/ - color:#078E07; -} - -.rst-content pre.literal-block, .rst-content div[class^='highlight'] pre, .rst-content .linenodiv pre { - /*color of code blocks*/ - background-color: #EEEEEE -} - -h1, h2, .rst-content .toctree-wrapper p.caption, h3, h4, h5, h6, legend { - /*font formats*/ - font-family: "Proxima Nova","Helvetica","Arial",sans-serif; -} -.wy-menu-vertical li.toctree-l1.current>a { - /*text color of toctree*/ - color: #025C02; -} - -.toctree-l1:hover { - background-color: #EEEEEE; -} - -.wy-nav-top { - /*color of nav at top when the window is narrow*/ - background: #43B02A; -} - -.wy-table-responsive table td:not(:first-child), .wy-table-responsive table th:not(:first-child) { - white-space: normal; -} diff --git a/docs/source/concepts/package-naming-conv.rst b/docs/source/concepts/package-naming-conv.rst index ef083430a9..5d3f20f538 100644 --- a/docs/source/concepts/package-naming-conv.rst +++ b/docs/source/concepts/package-naming-conv.rst @@ -3,33 +3,35 @@ Package naming conventions ========================== To facilitate communication and documentation, conda observes the -package naming conventions listed below. - -**Package name** - The name of a package, without any reference to a particular - version. Conda package names are normalized and they may contain - only lowercase alpha characters, numeric digits, underscores, - hyphens, or dots. In usage documentation, these are referred to - by ``package_name``. - -**Package version** - A version number or string, often similar to ``X.Y`` or - ``X.Y.Z``, but it may take other forms as well. - -**Build string** - An arbitrary string that identifies a particular build of a - package for conda. It may contain suggestive mnemonics, but - these are subject to change, and you should not rely on it or try - to parse it for any specific information. - -**Canonical name** - The package name, version, and build string joined together by - hyphens: name-version-buildstring. In usage documentation, these - are referred to by ``canonical_name``. - -**Filename** - Conda package filenames are canonical names, plus the suffix - ``.tar.bz2`` or ``.conda``. +package naming conventions listed below: + +.. glossary:: + + Package name + The name of a package, without any reference to a particular + version. Conda package names are normalized and they may contain + only lowercase alpha characters, numeric digits, underscores, + hyphens, or dots. In usage documentation, these are referred to + by ``package_name``. + + Package version + A version number or string, often similar to ``X.Y`` or + ``X.Y.Z``, but it may take other forms as well. + + Build string + An arbitrary string that identifies a particular build of a + package for conda. It may contain suggestive mnemonics, but + these are subject to change, and you should not rely on it or try + to parse it for any specific information. + + Canonical name + The package name, version, and build string joined together by + hyphens: name-version-buildstring. In usage documentation, these + are referred to by ``canonical_name``. + + Filename + Conda package filenames are canonical names, plus the suffix + ``.tar.bz2`` or ``.conda``. The following figure compares a canonical name to a filename: diff --git a/docs/source/concepts/recipe.rst b/docs/source/concepts/recipe.rst index 39006e9f0c..170f74f2ca 100644 --- a/docs/source/concepts/recipe.rst +++ b/docs/source/concepts/recipe.rst @@ -2,10 +2,6 @@ Conda-build recipes =================== -.. contents:: - :local: - :depth: 2 - To enable building `conda packages`_, :ref:`install and update conda and conda-build `. diff --git a/docs/source/conf.py b/docs/source/conf.py index 99a7e5974e..0aaacec6f0 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -50,6 +50,7 @@ "sphinx.ext.todo", "sphinx.ext.coverage", "sphinx_sitemap", + "sphinx_design", ] myst_heading_anchors = 3 @@ -104,23 +105,60 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = "sphinx_rtd_theme" +html_theme = "conda_sphinx_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # -# html_theme_options = {} +html_theme_options = { + # The maximum depth of the table of contents tree. Set this to -1 to allow + # unlimited depth. + "navigation_depth": -1, + "show_prev_next": False, + # Navbar icon links + "navbar_start": ["navbar-logo"], + "use_edit_page_button": True, + "icon_links": [ + { + "name": "GitHub", + "url": "https://github.com/conda/conda-build", + "icon": "fa-brands fa-square-github", + "type": "fontawesome", + }, + { + "name": "Element", + "url": "https://matrix.to/#/#conda-build:matrix.org", + "icon": "_static/element_logo.svg", + "type": "local", + }, + { + "name": "Discourse", + "url": "https://conda.discourse.group/", + "icon": "fa-brands fa-discourse", + "type": "fontawesome", + }, + ], +} + +html_context = { + "github_user": "conda", + "github_repo": "conda-build", + "github_version": "main", + "doc_path": "docs/source", +} html_short_title = "conda-build" -html_show_sourcelink = False -html_favicon = "conda-logo.png" +# html_show_sourcelink = False html_extra_path = ["robots.txt"] # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +# html_static_path = ["_static"] + +# Custom CSS rules +# html_style = "css/custom.css" # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -212,5 +250,3 @@ # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True - -html_style = "css/custom.css" diff --git a/docs/source/index.rst b/docs/source/index.rst index f544933cce..12e4d9fdcc 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -1,5 +1,3 @@ -.. _index: - Conda-build documentation ========================= diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index c9e1ddd32b..63cbb3eb86 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -4,11 +4,6 @@ Defining metadata (meta.yaml) ============================= -.. contents:: - :local: - :depth: 1 - - All the metadata in the conda-build recipe is specified in the ``meta.yaml`` file. See the example below: diff --git a/docs/source/resources/package-spec.rst b/docs/source/resources/package-spec.rst index 0bcd3f929b..90c09289aa 100644 --- a/docs/source/resources/package-spec.rst +++ b/docs/source/resources/package-spec.rst @@ -2,10 +2,6 @@ Conda package specification =========================== -.. contents:: - :local: - :depth: 1 - A conda package is an archive file that contains: * Metadata under the ``info/`` directory. diff --git a/docs/source/resources/tutorial-template.rst b/docs/source/resources/tutorial-template.rst index e8b63add27..69d9784538 100644 --- a/docs/source/resources/tutorial-template.rst +++ b/docs/source/resources/tutorial-template.rst @@ -2,10 +2,6 @@ Tutorial template ================= -.. contents:: - :local: - :depth: 1 - .. _documentation@anaconda.com: documentation@anaconda.com *This document describes the steps for creating* diff --git a/docs/source/user-guide/environment-variables.rst b/docs/source/user-guide/environment-variables.rst index d925040c17..f0d002cd27 100644 --- a/docs/source/user-guide/environment-variables.rst +++ b/docs/source/user-guide/environment-variables.rst @@ -4,10 +4,6 @@ Environment variables ===================== -.. contents:: - :local: - :depth: 1 - .. _build-state: Dynamic behavior based on state of build process diff --git a/docs/source/user-guide/index.rst b/docs/source/user-guide/index.rst index 86874e581d..09a374aca6 100644 --- a/docs/source/user-guide/index.rst +++ b/docs/source/user-guide/index.rst @@ -17,26 +17,30 @@ variables and wheel files. wheel-files -**Tutorials** +Tutorials +......... The :doc:`tutorials <../user-guide/tutorials/index>` will guide you through how to build conda packages — whether you're creating a package with compilers, using conda skeleton, creating from scratch, or building R packages using skeleton CRAN. -**Recipes** +Recipes +....... Conda-build uses :doc:`recipes <../user-guide/recipes/index>` to create conda packages. We have guides on debugging conda recipes, sample recipes for you to use, and information on how to build a package without a recipe. -**Environment variables** +Environment variables +..................... Use our :doc:`environment variables ` guide to understand which environment variables are available, set, and inherited, and how they affect different processes. -**Wheel files** +Wheel files +........... The user guide includes information about :doc:`wheel files ` and how to build conda diff --git a/docs/source/user-guide/recipes/build-without-recipe.rst b/docs/source/user-guide/recipes/build-without-recipe.rst index a3bfaeebf7..51c465db4a 100644 --- a/docs/source/user-guide/recipes/build-without-recipe.rst +++ b/docs/source/user-guide/recipes/build-without-recipe.rst @@ -2,11 +2,6 @@ Building a package without a recipe (bdist_conda) ================================================= -.. contents:: - :local: - :depth: 2 - - You can use conda-build to build packages for Python to install rather than conda by using ``setup.py bdist_conda``. This is a quick way to build packages without using a recipe, but it has diff --git a/docs/source/user-guide/tutorials/build-pkgs-skeleton.rst b/docs/source/user-guide/tutorials/build-pkgs-skeleton.rst index 90c8d8bfe1..4d60acc3c9 100644 --- a/docs/source/user-guide/tutorials/build-pkgs-skeleton.rst +++ b/docs/source/user-guide/tutorials/build-pkgs-skeleton.rst @@ -2,11 +2,6 @@ Building conda packages with conda skeleton =========================================== - -.. contents:: - :local: - :depth: 1 - .. tip:: We recommend `Grayskull `_, a newer alternative to conda-skeleton, to generate conda recipes for PyPI packages. Please check out their project page on GitHub diff --git a/docs/source/user-guide/tutorials/build-pkgs.rst b/docs/source/user-guide/tutorials/build-pkgs.rst index d10a5411bd..d3d97423d1 100644 --- a/docs/source/user-guide/tutorials/build-pkgs.rst +++ b/docs/source/user-guide/tutorials/build-pkgs.rst @@ -2,11 +2,6 @@ Building conda packages from scratch ==================================== - -.. contents:: - :local: - :depth: 1 - Overview ======== diff --git a/docs/source/user-guide/tutorials/build-r-pkgs.rst b/docs/source/user-guide/tutorials/build-r-pkgs.rst index 7649b1dae2..3837fc736d 100644 --- a/docs/source/user-guide/tutorials/build-r-pkgs.rst +++ b/docs/source/user-guide/tutorials/build-r-pkgs.rst @@ -2,10 +2,6 @@ Building R packages with skeleton CRAN ====================================== -.. contents:: - :local: - :depth: 1 - Overview ======== diff --git a/docs/source/user-guide/tutorials/building-conda-packages.rst b/docs/source/user-guide/tutorials/building-conda-packages.rst index 31ffe7d8ac..0ec1b71708 100644 --- a/docs/source/user-guide/tutorials/building-conda-packages.rst +++ b/docs/source/user-guide/tutorials/building-conda-packages.rst @@ -2,10 +2,6 @@ Building conda packages ======================= -.. contents:: - :local: - :depth: 1 - Overview ======== diff --git a/news/5067-use-conda-sphinx-theme b/news/5067-use-conda-sphinx-theme new file mode 100644 index 0000000000..da1257358b --- /dev/null +++ b/news/5067-use-conda-sphinx-theme @@ -0,0 +1,20 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* Adds the conda-sphinx-theme to conda-build documentation +* Updates some pages to remove redundant TOC + +### Other + +* From 6db1d261a4f044aca98b87e7e9915315e2d5f98d Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Wed, 29 Nov 2023 16:12:03 +0100 Subject: [PATCH 227/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#5086)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/ISSUE_TEMPLATE/0_bug.yml | 2 +- .github/ISSUE_TEMPLATE/1_feature.yml | 2 +- .github/ISSUE_TEMPLATE/epic.yml | 2 +- CODE_OF_CONDUCT.md | 2 +- HOW_WE_USE_GITHUB.md | 10 +++++----- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/0_bug.yml b/.github/ISSUE_TEMPLATE/0_bug.yml index bb4a6020c1..cfccd360ed 100644 --- a/.github/ISSUE_TEMPLATE/0_bug.yml +++ b/.github/ISSUE_TEMPLATE/0_bug.yml @@ -9,7 +9,7 @@ body: value: | Because processing new bug reports is time-consuming, we would like to ask you to fill out the following form to the best of your ability and as completely as possible. - > **Note** + > [!NOTE] > Bug reports that are incomplete or missing information may be closed as inactionable. Since there are already a lot of open issues, please also take a moment to search existing ones to see if your bug has already been reported. If you find something related, please upvote that issue and provide additional details as necessary. diff --git a/.github/ISSUE_TEMPLATE/1_feature.yml b/.github/ISSUE_TEMPLATE/1_feature.yml index f24cf7fdad..a1e739821d 100644 --- a/.github/ISSUE_TEMPLATE/1_feature.yml +++ b/.github/ISSUE_TEMPLATE/1_feature.yml @@ -9,7 +9,7 @@ body: value: | Because processing new feature requests is time-consuming, we would like to ask you to fill out the following form to the best of your ability and as completely as possible. - > **Note** + > [!NOTE] > Feature requests that are incomplete or missing information may be closed as inactionable. Since there are already a lot of open issues, please also take a moment to search existing ones to see if your feature request has already been submitted. If you find something related, please upvote that issue and provide additional details as necessary. diff --git a/.github/ISSUE_TEMPLATE/epic.yml b/.github/ISSUE_TEMPLATE/epic.yml index 3f757e004c..f9c412b177 100644 --- a/.github/ISSUE_TEMPLATE/epic.yml +++ b/.github/ISSUE_TEMPLATE/epic.yml @@ -11,7 +11,7 @@ body: If you are attempting to report a bug, propose a new feature, or some other code change please use one of the other forms available. - > **Note** + > [!NOTE] > Epics that are incomplete or missing information may be closed as inactionable. Since there are already a lot of open issues, please also take a moment to search existing ones to see if a similar epic has already been opened. If you find something related, please upvote that issue and provide additional details as necessary. diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 1c3434a0ef..663464fe82 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,6 +1,6 @@ # Conda Organization Code of Conduct -> **Note** +> [!NOTE] > Below is the short version of our CoC, see the long version [here](https://github.com/conda-incubator/governance/blob/main/CODE_OF_CONDUCT.md). # The Short Version diff --git a/HOW_WE_USE_GITHUB.md b/HOW_WE_USE_GITHUB.md index dc32be1cf1..d0a4f4266f 100644 --- a/HOW_WE_USE_GITHUB.md +++ b/HOW_WE_USE_GITHUB.md @@ -43,12 +43,12 @@ This document seeks to outline how we as a community use GitHub Issues to track - [Spikes](#spikes) - [Working on Issues](#working-on-issues) -> **Note** +> [!NOTE] > This document is written in the style of an FAQ. For easier navigation, use [GitHub's table of contents feature][docs-toc]. ## What is "Issue Sorting"? -> **Note** +> [!NOTE] > "Issue sorting" is similar to that of "triaging", but we've chosen to use different terminology because "triaging" is a word related to very weighty topics (_e.g._, injuries and war) and we would like to be sensitive to those connotations. Additionally, we are taking a more "fuzzy" approach to sorting (_e.g._, severities may not be assigned, etc.). "Issue Sorting" refers to the process of assessing the priority of incoming issues. Below is a high-level diagram of the flow of issues: @@ -213,7 +213,7 @@ Below are some boilerplate responses for the most commonly-seen issues to be sor This is a duplicate of [link to primary issue]; please feel free to continue the discussion there. -> **Warning** +> [!WARNING] > Apply the https://github.com/conda/infrastructure/labels/duplicate label to the issue being closed and https://github.com/conda/infrastructure/labels/duplicate%3A%3Aprimary to the original issue.
@@ -244,7 +244,7 @@ If you are still encountering this issue please reopen in the where `conda` installer/package issues are addressed. -> **Warning** +> [!WARNING] > Apply the https://github.com/conda/infrastructure/labels/off-topic label to these issues before closing them out. @@ -260,7 +260,7 @@ if you continue to experience the problems described here, please post details to the [Nucleus forums](https://community.anaconda.cloud/). -> **Warning** +> [!WARNING] > Apply the https://github.com/conda/infrastructure/labels/off-topic label to these issues before closing them out. From 6b1cab907326f0af0c57c5803ead45492d21e11b Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 29 Nov 2023 17:06:27 -0500 Subject: [PATCH 228/366] test_keep_old_work: Update flaky Windows permissions test (#5087) --- conda_build/config.py | 9 +++++--- conda_build/inspect_pkg.py | 9 +------- conda_build/utils.py | 9 ++++++++ tests/test_config.py | 42 ++++++++++++++++++++------------------ 4 files changed, 38 insertions(+), 31 deletions(-) diff --git a/conda_build/config.py b/conda_build/config.py index 8c598fdee0..6756e16d58 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -3,6 +3,8 @@ """ Module to store conda build settings. """ +from __future__ import annotations + import copy import math import os @@ -12,6 +14,7 @@ import time from collections import namedtuple from os.path import abspath, expanduser, expandvars, join +from pathlib import Path from .conda_interface import ( binstar_upload, @@ -458,7 +461,7 @@ def src_cache_root(self, value): self._src_cache_root = value @property - def croot(self): + def croot(self) -> str: """This is where source caches and work folders live""" if not self._croot: _bld_root_env = os.getenv("CONDA_BLD_PATH") @@ -474,9 +477,9 @@ def croot(self): return self._croot @croot.setter - def croot(self, croot): + def croot(self, croot: str | os.PathLike | Path) -> None: """Set croot - if None is passed, then the default value will be used""" - self._croot = croot + self._croot = str(croot) if croot else None @property def output_folder(self): diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index e8b94978c1..a189931940 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -43,7 +43,7 @@ ) from .deprecations import deprecated -from .utils import on_mac, on_win +from .utils import on_mac, on_win, samefile @deprecated("3.28.0", "4.0.0") @@ -72,13 +72,6 @@ def which_package( # (pathlib correctly handles this even if path is absolute) path = prefix / path - def samefile(path1: Path, path2: Path) -> bool: - try: - return path1.samefile(path2) - except FileNotFoundError: - # FileNotFoundError: path doesn't exist - return path1 == path2 - for prec in PrefixData(str(prefix)).iter_records(): for file in prec["files"]: if samefile(prefix / file, path): diff --git a/conda_build/utils.py b/conda_build/utils.py index 9fdfc2ad37..509803351c 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -2220,3 +2220,12 @@ def is_conda_pkg(pkg_path: str) -> bool: return path.is_file() and ( any(path.name.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) ) + + +def samefile(path1: Path, path2: Path) -> bool: + try: + return path1.samefile(path2) + except (FileNotFoundError, PermissionError): + # FileNotFoundError: path doesn't exist + # PermissionError: don't have permissions to read path + return path1 == path2 diff --git a/tests/test_config.py b/tests/test_config.py index 528e4a5122..7c46ca0693 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -2,22 +2,22 @@ # SPDX-License-Identifier: BSD-3-Clause import os import sys +from pathlib import Path import pytest -from conda_build.conda_interface import TemporaryDirectory from conda_build.config import Config, get_or_merge_config -from conda_build.utils import on_win +from conda_build.utils import on_win, samefile @pytest.fixture -def config(): +def config() -> Config: """a tiny bit of a fixture to save us from manually creating a new Config each test""" return Config() @pytest.fixture -def build_id(): +def build_id() -> str: """Small support fixture for setting build id's in multiple builds which may need them""" return "test123" @@ -34,23 +34,25 @@ def test_set_build_id(config, build_id): assert config.host_prefix == long_prefix -def test_keep_old_work(config, build_id): +def test_keep_old_work(config: Config, build_id: str, tmp_path: Path): config.keep_old_work = True - with TemporaryDirectory() as temp_dir: - config.croot = temp_dir - config.build_id = build_id - work_path = os.path.join(temp_dir, build_id, "work") - os.makedirs(work_path) - # assert False - assert len(os.listdir(config.work_dir)) == 0 - with open(os.path.join(work_path, "a_touched_file.magic"), "w") as _: - # Touch a random file so the "work_dir" is not empty - pass - assert len(os.listdir(config.work_dir)) > 0 - config.compute_build_id("a_new_name", reset=True) - assert config.work_dir != work_path - assert not os.path.exists(work_path) - assert len(os.listdir(config.work_dir)) > 0 + config.croot = tmp_path + config.build_id = build_id + + # empty working directory + orig_dir = Path(config.work_dir) + assert not len(os.listdir(config.work_dir)) + + # touch a file so working directory is not empty + (orig_dir / "a_touched_file.magic").touch() + assert len(os.listdir(config.work_dir)) + + config.compute_build_id("a_new_name", reset=True) + + # working directory should still exist and have the touched file + assert not samefile(orig_dir, config.work_dir) + assert not orig_dir.exists() + assert len(os.listdir(config.work_dir)) @pytest.mark.skipif(on_win, reason="Windows uses only the short prefix") From 328f7e1f9939125efcc934eb5f2c44343b303208 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 30 Nov 2023 09:02:37 -0500 Subject: [PATCH 229/366] Officially adopt CEP-8 and an expedited CEP-9 (#5080) * Switch from 4.0.0 to 24.1.0 * Update comment in __version__ * Update deprecations from conda --- conda_build/__version__.py | 6 +- conda_build/api.py | 2 +- conda_build/cli/main_build.py | 2 +- conda_build/cli/main_convert.py | 2 +- conda_build/cli/main_debug.py | 2 +- conda_build/cli/main_develop.py | 2 +- conda_build/cli/main_index.py | 4 +- conda_build/cli/main_inspect.py | 2 +- conda_build/cli/main_metapackage.py | 2 +- conda_build/cli/main_render.py | 2 +- conda_build/cli/main_skeleton.py | 2 +- conda_build/conda_interface.py | 12 +- conda_build/config.py | 24 ++-- conda_build/deprecations.py | 55 ++++++++- conda_build/environ.py | 2 +- conda_build/index.py | 2 +- conda_build/inspect_pkg.py | 6 +- conda_build/metadata.py | 2 +- conda_build/os_utils/ldd.py | 2 +- conda_build/os_utils/liefldd.py | 8 +- conda_build/os_utils/pyldd.py | 14 +-- conda_build/post.py | 16 +-- conda_build/utils.py | 2 +- tests/test_deprecations.py | 171 ++++++++++++++++++++++------ 24 files changed, 246 insertions(+), 98 deletions(-) diff --git a/conda_build/__version__.py b/conda_build/__version__.py index 72533fc63e..e835e1be9d 100644 --- a/conda_build/__version__.py +++ b/conda_build/__version__.py @@ -2,9 +2,9 @@ # SPDX-License-Identifier: BSD-3-Clause """Placeholder for the actual version code injected by hatch-vcs. -The logic here is used during development installs only so keep it simple. Conda-build -uses SemVer so our development version is simply: - MAJOR.MINOR.MICRO.devN+gHASH[.dirty] +The logic here is used during development installs only so keep it simple. +Conda-build abides by CEP-8 which specifies using CalVer, so the dev version is: + YY.MM.MICRO.devN+gHASH[.dirty] """ try: from setuptools_scm import get_version diff --git a/conda_build/api.py b/conda_build/api.py index e310e11b6b..522aa1b08d 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -521,7 +521,7 @@ def create_metapackage( ) -@deprecated("3.25.0", "4.0.0", addendum="Use standalone conda-index.") +@deprecated("3.25.0", "24.1.0", addendum="Use standalone conda-index.") def update_index( dir_paths, config=None, diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index 25bca5a6fd..5b5bb67ef6 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -587,7 +587,7 @@ def execute(args): return outputs -@deprecated("3.26.0", "4.0.0", addendum="Use `conda build` instead.") +@deprecated("3.26.0", "24.1.0", addendum="Use `conda build` instead.") def main(): try: execute(sys.argv[1:]) diff --git a/conda_build/cli/main_convert.py b/conda_build/cli/main_convert.py index 4c09cfc1da..34b748d407 100644 --- a/conda_build/cli/main_convert.py +++ b/conda_build/cli/main_convert.py @@ -129,6 +129,6 @@ def execute(args): api.convert(f, **args.__dict__) -@deprecated("3.26.0", "4.0.0", addendum="Use `conda convert` instead.") +@deprecated("3.26.0", "24.1.0", addendum="Use `conda convert` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_debug.py b/conda_build/cli/main_debug.py index 00c6eeb230..dd29f988ee 100644 --- a/conda_build/cli/main_debug.py +++ b/conda_build/cli/main_debug.py @@ -118,6 +118,6 @@ def execute(args): sys.exit(1) -@deprecated("3.26.0", "4.0.0", addendum="Use `conda debug` instead.") +@deprecated("3.26.0", "24.1.0", addendum="Use `conda debug` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_develop.py b/conda_build/cli/main_develop.py index 46c8384826..68abbbce00 100644 --- a/conda_build/cli/main_develop.py +++ b/conda_build/cli/main_develop.py @@ -88,6 +88,6 @@ def execute(args): ) -@deprecated("3.26.0", "4.0.0", addendum="Use `conda develop` instead.") +@deprecated("3.26.0", "24.1.0", addendum="Use `conda develop` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_index.py b/conda_build/cli/main_index.py index b0eefa8aa7..79cff86fa6 100644 --- a/conda_build/cli/main_index.py +++ b/conda_build/cli/main_index.py @@ -102,7 +102,7 @@ def execute(args): deprecated.topic( "3.25.0", - "4.0.0", + "24.1.0", topic="`conda index` and `conda-index`", addendum="Use the `conda-index` project instead.", ) @@ -121,6 +121,6 @@ def execute(args): ) -@deprecated("3.26.0", "4.0.0", addendum="Use `conda index` instead.") +@deprecated("3.26.0", "24.1.0", addendum="Use `conda index` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index 79e0594a4f..aa38ce51f1 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -222,6 +222,6 @@ def execute(args): raise ValueError(f"Unrecognized subcommand: {args.subcommand}.") -@deprecated("3.26.0", "4.0.0", addendum="Use `conda inspect` instead.") +@deprecated("3.26.0", "24.1.0", addendum="Use `conda inspect` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_metapackage.py b/conda_build/cli/main_metapackage.py index 71be2e7d3d..4b7e4df110 100644 --- a/conda_build/cli/main_metapackage.py +++ b/conda_build/cli/main_metapackage.py @@ -112,6 +112,6 @@ def execute(args): api.create_metapackage(channel_urls=channel_urls, **args.__dict__) -@deprecated("3.26.0", "4.0.0", addendum="Use `conda metapackage` instead.") +@deprecated("3.26.0", "24.1.0", addendum="Use `conda metapackage` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 4647d43996..9026fb1b57 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -243,7 +243,7 @@ def execute(args, print_results=True): return metadata_tuples -@deprecated("3.26.0", "4.0.0", addendum="Use `conda render` instead.") +@deprecated("3.26.0", "24.1.0", addendum="Use `conda render` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_skeleton.py b/conda_build/cli/main_skeleton.py index 7bb9e3369f..219c8d1734 100644 --- a/conda_build/cli/main_skeleton.py +++ b/conda_build/cli/main_skeleton.py @@ -62,7 +62,7 @@ def execute(args): ) -@deprecated("3.26.0", "4.0.0", addendum="Use `conda skeleton` instead.") +@deprecated("3.26.0", "24.1.0", addendum="Use `conda skeleton` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 833a4339f6..10bd300ed3 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -82,7 +82,7 @@ deprecated.constant( "3.28.0", - "4.0.0", + "24.1.0", "IndexRecord", PackageRecord, addendum="Use `conda.models.records.PackageRecord` instead.", @@ -115,12 +115,12 @@ def __call__(self, path: str | os.PathLike) -> int: return self.st_nlink(path) @staticmethod - @deprecated("3.24.0", "4.0.0", addendum="Use `os.stat().st_nlink` instead.") + @deprecated("3.24.0", "24.1.0", addendum="Use `os.stat().st_nlink` instead.") def st_nlink(path: str | os.PathLike) -> int: return os.stat(path).st_nlink -@deprecated("3.28.0", "4.0.0") +@deprecated("3.28.0", "24.1.0") class SignatureError(Exception): # TODO: What is this? 🤔 pass @@ -128,7 +128,7 @@ class SignatureError(Exception): @deprecated( "3.28.0", - "4.0.0", + "24.1.0", addendum="Use `conda_build.inspect_pkg.which_package` instead.", ) def which_package(path: str | os.PathLike | Path) -> Iterable[PrefixRecord]: @@ -137,7 +137,7 @@ def which_package(path: str | os.PathLike | Path) -> Iterable[PrefixRecord]: return which_package(path, which_prefix(path)) -@deprecated("3.28.0", "4.0.0") +@deprecated("3.28.0", "24.1.0") def which_prefix(path: str | os.PathLike | Path) -> Path: """ Given the path (to a (presumably) conda installed file) return the @@ -158,7 +158,7 @@ def which_prefix(path: str | os.PathLike | Path) -> Path: raise RuntimeError("could not determine conda prefix from: %s" % path) -@deprecated("3.28.0", "4.0.0") +@deprecated("3.28.0", "24.1.0") def get_installed_version(prefix, pkgs): """ Primarily used by conda-forge, but may be useful in general for checking when diff --git a/conda_build/config.py b/conda_build/config.py index 6756e16d58..67f0e9f6a0 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -60,14 +60,14 @@ def set_invocation_time(): zstd_compression_level_default = 19 -@deprecated("3.25.0", "4.0.0") +@deprecated("3.25.0", "24.1.0") def python2_fs_encode(strin): return strin @deprecated( "3.25.0", - "4.0.0", + "24.1.0", addendum=( "Use `pathlib.Path.mkdir(exist_ok=True)` or `os.makedirs(exist_ok=True)` " "instead." @@ -497,56 +497,56 @@ def build_folder(self): # back compat for conda-build-all - expects CONDA_* vars to be attributes of the config object @property - @deprecated("3.0.28", "4.0.0") + @deprecated("3.0.28", "24.1.0") def CONDA_LUA(self): return self.variant.get("lua", get_default_variant(self)["lua"]) @CONDA_LUA.setter - @deprecated("3.0.28", "4.0.0") + @deprecated("3.0.28", "24.1.0") def CONDA_LUA(self, value): self.variant["lua"] = value @property - @deprecated("3.0.28", "4.0.0") + @deprecated("3.0.28", "24.1.0") def CONDA_PY(self): value = self.variant.get("python", get_default_variant(self)["python"]) return int("".join(value.split("."))) @CONDA_PY.setter - @deprecated("3.0.28", "4.0.0") + @deprecated("3.0.28", "24.1.0") def CONDA_PY(self, value): value = str(value) self.variant["python"] = ".".join((value[0], value[1:])) @property - @deprecated("3.0.28", "4.0.0") + @deprecated("3.0.28", "24.1.0") def CONDA_NPY(self): value = self.variant.get("numpy", get_default_variant(self)["numpy"]) return int("".join(value.split("."))) @CONDA_NPY.setter - @deprecated("3.0.28", "4.0.0") + @deprecated("3.0.28", "24.1.0") def CONDA_NPY(self, value): value = str(value) self.variant["numpy"] = ".".join((value[0], value[1:])) @property - @deprecated("3.0.28", "4.0.0") + @deprecated("3.0.28", "24.1.0") def CONDA_PERL(self): return self.variant.get("perl", get_default_variant(self)["perl"]) @CONDA_PERL.setter - @deprecated("3.0.28", "4.0.0") + @deprecated("3.0.28", "24.1.0") def CONDA_PERL(self, value): self.variant["perl"] = value @property - @deprecated("3.0.28", "4.0.0") + @deprecated("3.0.28", "24.1.0") def CONDA_R(self): return self.variant.get("r_base", get_default_variant(self)["r_base"]) @CONDA_R.setter - @deprecated("3.0.28", "4.0.0") + @deprecated("3.0.28", "24.1.0") def CONDA_R(self, value): self.variant["r_base"] = value diff --git a/conda_build/deprecations.py b/conda_build/deprecations.py index e81f6e654d..4d09205da5 100644 --- a/conda_build/deprecations.py +++ b/conda_build/deprecations.py @@ -1,17 +1,18 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -# Copyright (C) 2012 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause +"""Tools to aid in deprecating code.""" from __future__ import annotations +import sys import warnings +from argparse import Action from functools import wraps from types import ModuleType from typing import Any, Callable from packaging.version import Version, parse -from .__version__ import __version__ +from . import __version__ class DeprecatedError(RuntimeError): @@ -127,6 +128,50 @@ def inner(*args, **kwargs): return deprecated_decorator + def action( + self, + deprecate_in: str, + remove_in: str, + action: Action, + *, + addendum: str | None = None, + stack: int = 0, + ): + class DeprecationMixin: + def __init__(inner_self, *args, **kwargs): + super().__init__(*args, **kwargs) + + category, message = self._generate_message( + deprecate_in, + remove_in, + ( + # option_string are ordered shortest to longest, + # use the longest as it's the most descriptive + f"`{inner_self.option_strings[-1]}`" + if inner_self.option_strings + # if not a flag/switch, use the destination itself + else f"`{inner_self.dest}`" + ), + addendum=addendum, + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + inner_self.category = category + inner_self.help = message + + def __call__(inner_self, parser, namespace, values, option_string=None): + # alert user that it's time to remove something + warnings.warn( + inner_self.help, inner_self.category, stacklevel=7 + stack + ) + + super().__call__(parser, namespace, values, option_string) + + return type(action.__name__, (DeprecationMixin, action), {}) + def module( self, deprecate_in: str, @@ -236,8 +281,8 @@ def _get_module(self, stack: int) -> tuple[ModuleType, str]: import inspect # expensive try: - frame = inspect.stack()[2 + stack] - module = inspect.getmodule(frame[0]) + frame = sys._getframe(2 + stack) + module = inspect.getmodule(frame) return (module, module.__name__) except (IndexError, AttributeError): raise DeprecatedError("unable to determine the calling module") from None diff --git a/conda_build/environ.py b/conda_build/environ.py index 9e128ad511..c165bdeba2 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -1212,7 +1212,7 @@ def remove_existing_packages(dirs, fns, config): utils.rm_rf(entry) -@deprecated("3.28.0", "4.0.0") +@deprecated("3.28.0", "24.1.0") def clean_pkg_cache(dist, config): locks = [] diff --git a/conda_build/index.py b/conda_build/index.py index edf0bdedba..5f296a164d 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -332,7 +332,7 @@ def _delegated_update_index( # Everything below is deprecated to maintain API/feature compatibility. -@deprecated("3.25.0", "4.0.0", addendum="Use standalone conda-index.") +@deprecated("3.25.0", "24.1.0", addendum="Use standalone conda-index.") def update_index( dir_path, check_md5=False, diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index a189931940..7c8eddc7de 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -46,7 +46,7 @@ from .utils import on_mac, on_win, samefile -@deprecated("3.28.0", "4.0.0") +@deprecated("3.28.0", "24.1.0") @lru_cache(maxsize=None) def dist_files(prefix: str | os.PathLike | Path, dist: Dist) -> set[str]: if (prec := PrefixData(prefix).get(dist.name, None)) is None: @@ -57,7 +57,7 @@ def dist_files(prefix: str | os.PathLike | Path, dist: Dist) -> set[str]: return set() -@deprecated.argument("3.28.0", "4.0.0", "avoid_canonical_channel_name") +@deprecated.argument("3.28.0", "24.1.0", "avoid_canonical_channel_name") def which_package( path: str | os.PathLike | Path, prefix: str | os.PathLike | Path, @@ -242,7 +242,7 @@ def test_installable(channel="defaults"): return success -@deprecated("3.28.0", "4.0.0") +@deprecated("3.28.0", "24.1.0") def _installed(prefix: str | os.PathLike | Path) -> dict[str, Dist]: return {dist.name: dist for dist in linked_data(str(prefix))} diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 71021a1d4f..9f9e11eefe 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -1429,7 +1429,7 @@ def check_field(key, section): check_field(key_or_dict, section) return True - @deprecated.argument("3.28.0", "4.0.0", "fail_ok") + @deprecated.argument("3.28.0", "24.1.0", "fail_ok") def name(self) -> str: name = self.get_value("package/name", "") if not name and self.final: diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index f3597d065a..ed68a461aa 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -109,7 +109,7 @@ def _get_linkages( return linkages -@deprecated("3.28.0", "4.0.0") +@deprecated("3.28.0", "24.1.0") @lru_cache(maxsize=None) def get_package_files( prec: PrefixRecord, prefix: str | os.PathLike | Path diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index 9e5c9836bb..0c47fd2533 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -33,7 +33,7 @@ have_lief = False -@deprecated("3.28.0", "4.0.0", addendum="Use `isinstance(value, str)` instead.") +@deprecated("3.28.0", "24.1.0", addendum="Use `isinstance(value, str)` instead.") def is_string(s): return isinstance(s, str) @@ -102,7 +102,7 @@ def codefile_class( @deprecated( "3.28.0", - "4.0.0", + "24.1.0", addendum="Use `conda_build.os_utils.liefldd.codefile_class` instead.", ) def codefile_type_liefldd(*args, **kwargs) -> str | None: @@ -112,14 +112,14 @@ def codefile_type_liefldd(*args, **kwargs) -> str | None: deprecated.constant( "3.28.0", - "4.0.0", + "24.1.0", "codefile_type_pyldd", _codefile_type, addendum="Use `conda_build.os_utils.pyldd.codefile_class` instead.", ) deprecated.constant( "3.28.0", - "4.0.0", + "24.1.0", "codefile_type", _codefile_type, addendum="Use `conda_build.os_utils.liefldd.codefile_class` instead.", diff --git a/conda_build/os_utils/pyldd.py b/conda_build/os_utils/pyldd.py index 427622dbb5..d65e0cbc3b 100644 --- a/conda_build/os_utils/pyldd.py +++ b/conda_build/os_utils/pyldd.py @@ -365,7 +365,7 @@ def do_file(file, lc_operation, off_sz, arch, results, *args): results.append(do_macho(file, 64, LITTLE_ENDIAN, lc_operation, *args)) -@deprecated("3.28.0", "4.0.0") +@deprecated("3.28.0", "24.1.0") def mach_o_change(path, arch, what, value): """ Replace a given name (what) in any LC_LOAD_DYLIB command found in @@ -1066,7 +1066,7 @@ def _get_magic_bit(path: Path) -> bytes: @deprecated( "3.28.0", - "4.0.0", + "24.1.0", addendum="Use `conda_build.os_utils.pyldd.codefile_class` instead.", ) def is_codefile(path: str | os.PathLike | Path, skip_symlinks: bool = True) -> bool: @@ -1075,7 +1075,7 @@ def is_codefile(path: str | os.PathLike | Path, skip_symlinks: bool = True) -> b @deprecated( "3.28.0", - "4.0.0", + "24.1.0", addendum="Use `conda_build.os_utils.pyldd.codefile_class` instead.", ) def codefile_type( @@ -1136,7 +1136,7 @@ def _inspect_linkages_this(filename, sysroot="", arch="native"): return cf.uniqueness_key(), orig_names, resolved_names -@deprecated("3.28.0", "4.0.0") +@deprecated("3.28.0", "24.1.0") def inspect_rpaths( filename, resolve_dirnames=True, use_os_varnames=True, sysroot="", arch="native" ): @@ -1168,7 +1168,7 @@ def inspect_rpaths( return cf.rpaths_nontransitive -@deprecated("3.28.0", "4.0.0") +@deprecated("3.28.0", "24.1.0") def get_runpaths(filename, arch="native"): if not os.path.exists(filename): return [] @@ -1256,14 +1256,14 @@ def otool(*args): return 1 -@deprecated("3.28.0", "4.0.0") +@deprecated("3.28.0", "24.1.0") def otool_sys(*args): import subprocess return subprocess.check_output("/usr/bin/otool", args).decode(encoding="ascii") -@deprecated("3.28.0", "4.0.0") +@deprecated("3.28.0", "24.1.0") def ldd_sys(*args): return [] diff --git a/conda_build/post.py b/conda_build/post.py index 018c1f7e58..6bdb043da7 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -660,7 +660,7 @@ def assert_relative_osx(path, host_prefix, build_prefix): @deprecated( "3.28.0", - "4.0.0", + "24.1.0", addendum="Use `conda_build.post.get_dsos` and `conda_build.post.get_run_exports` instead.", ) def determine_package_nature( @@ -721,10 +721,10 @@ def get_run_exports( return () -@deprecated.argument("3.28.0", "4.0.0", "subdir") -@deprecated.argument("3.28.0", "4.0.0", "bldpkgs_dirs") -@deprecated.argument("3.28.0", "4.0.0", "output_folder") -@deprecated.argument("3.28.0", "4.0.0", "channel_urls") +@deprecated.argument("3.28.0", "24.1.0", "subdir") +@deprecated.argument("3.28.0", "24.1.0", "bldpkgs_dirs") +@deprecated.argument("3.28.0", "24.1.0", "output_folder") +@deprecated.argument("3.28.0", "24.1.0", "channel_urls") def library_nature( prec: PrefixRecord, prefix: str | os.PathLike | Path ) -> Literal[ @@ -782,7 +782,7 @@ def library_nature( @deprecated( "3.28.0", - "4.0.0", + "24.1.0", addendum="Query `conda.core.prefix_data.PrefixData` instead.", ) def dists_from_names(names: Iterable[str], prefix: str | os.PathLike | Path): @@ -794,7 +794,7 @@ def dists_from_names(names: Iterable[str], prefix: str | os.PathLike | Path): @deprecated( "3.28.0", - "4.0.0", + "24.1.0", addendum="Use `conda.models.records.PrefixRecord` instead.", ) class FakeDist: @@ -1026,7 +1026,7 @@ def _map_file_to_package( @deprecated( - "3.28.0", "4.0.0", addendum="Use `conda.models.records.PrefixRecord` instead." + "3.28.0", "24.1.0", addendum="Use `conda.models.records.PrefixRecord` instead." ) def _get_fake_pkg_dist(pkg_name, pkg_version, build_str, build_number, channel, files): return ( diff --git a/conda_build/utils.py b/conda_build/utils.py index 509803351c..6c300737c1 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -795,7 +795,7 @@ def get_conda_operation_locks(locking=True, bldpkgs_dirs=None, timeout=900): @deprecated( "3.28.0", - "4.0.0", + "24.1.0", addendum="Use `os.path.relpath` or `pathlib.Path.relative_to` instead.", ) def relative(f, d="lib"): diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py index d0f97370fb..0c6d8b11b4 100644 --- a/tests/test_deprecations.py +++ b/tests/test_deprecations.py @@ -1,8 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -# Copyright (C) 2012 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause import sys +from argparse import ArgumentParser, _StoreTrueAction import pytest @@ -10,42 +9,47 @@ @pytest.fixture(scope="module") -def deprecated_v1(): +def deprecated_v1() -> DeprecationHandler: + """Fixture mocking the conda_build.deprecations.deprecated object with `version=1.0`.""" return DeprecationHandler("1.0") @pytest.fixture(scope="module") -def deprecated_v2(): +def deprecated_v2() -> DeprecationHandler: + """Fixture mocking the conda_build.deprecations.deprecated object with `version=2.0`.""" return DeprecationHandler("2.0") @pytest.fixture(scope="module") -def deprecated_v3(): +def deprecated_v3() -> DeprecationHandler: + """Fixture mocking the conda_build.deprecations.deprecated object with `version=3.0`.""" return DeprecationHandler("3.0") -def test_pending(deprecated_v1): +def test_function_pending(deprecated_v1: DeprecationHandler): + """Calling a pending deprecation function displays associated warning.""" + @deprecated_v1("2.0", "3.0") def foo(): return True - # alerting user that a function will be unavailable with pytest.deprecated_call(match="pending deprecation"): assert foo() -def test_deprecated(deprecated_v2): +def test_function_deprecated(deprecated_v2: DeprecationHandler): + """Calling a deprecated function displays associated warning.""" + @deprecated_v2("2.0", "3.0") def foo(): return True - # alerting user that a function will be unavailable with pytest.deprecated_call(match="deprecated"): assert foo() -def test_remove(deprecated_v3): - # alerting developer that a function needs to be removed +def test_function_remove(deprecated_v3: DeprecationHandler): + """A function existing past its removal version raises an error.""" with pytest.raises(DeprecatedError): @deprecated_v3("2.0", "3.0") @@ -53,7 +57,74 @@ def foo(): return True -def test_arguments_pending(deprecated_v1): +def test_method_pending(deprecated_v1: DeprecationHandler): + """Calling a pending deprecation method displays associated warning.""" + + class Bar: + @deprecated_v1("2.0", "3.0") + def foo(self): + return True + + with pytest.deprecated_call(match="pending deprecation"): + assert Bar().foo() + + +def test_method_deprecated(deprecated_v2: DeprecationHandler): + """Calling a deprecated method displays associated warning.""" + + class Bar: + @deprecated_v2("2.0", "3.0") + def foo(self): + return True + + with pytest.deprecated_call(match="deprecated"): + assert Bar().foo() + + +def test_method_remove(deprecated_v3: DeprecationHandler): + """A method existing past its removal version raises an error.""" + with pytest.raises(DeprecatedError): + + class Bar: + @deprecated_v3("2.0", "3.0") + def foo(self): + return True + + +def test_class_pending(deprecated_v1: DeprecationHandler): + """Calling a pending deprecation class displays associated warning.""" + + @deprecated_v1("2.0", "3.0") + class Foo: + pass + + with pytest.deprecated_call(match="pending deprecation"): + assert Foo() + + +def test_class_deprecated(deprecated_v2: DeprecationHandler): + """Calling a deprecated class displays associated warning.""" + + @deprecated_v2("2.0", "3.0") + class Foo: + pass + + with pytest.deprecated_call(match="deprecated"): + assert Foo() + + +def test_class_remove(deprecated_v3: DeprecationHandler): + """A class existing past its removal version raises an error.""" + with pytest.raises(DeprecatedError): + + @deprecated_v3("2.0", "3.0") + class Foo: + pass + + +def test_arguments_pending(deprecated_v1: DeprecationHandler): + """Calling a pending deprecation argument displays associated warning.""" + @deprecated_v1.argument("2.0", "3.0", "three") def foo(one, two): return True @@ -70,7 +141,9 @@ def foo(one, two): assert foo(1, 2) -def test_arguments_deprecated(deprecated_v2): +def test_arguments_deprecated(deprecated_v2: DeprecationHandler): + """Calling a deprecated argument displays associated warning.""" + @deprecated_v2.argument("2.0", "3.0", "three") def foo(one, two): return True @@ -87,8 +160,8 @@ def foo(one, two): assert foo(1, 2) -def test_arguments_remove(deprecated_v3): - # alerting developer that a keyword argument needs to be removed +def test_arguments_remove(deprecated_v3: DeprecationHandler): + """An argument existing past its removal version raises an error.""" with pytest.raises(DeprecatedError): @deprecated_v3.argument("2.0", "3.0", "three") @@ -96,67 +169,97 @@ def foo(one, two): return True -def test_module_pending(deprecated_v1): - # alerting user to pending deprecation +def test_action_pending(deprecated_v1: DeprecationHandler): + """Calling a pending deprecation argparse.Action displays associated warning.""" + parser = ArgumentParser() + parser.add_argument( + "--foo", action=deprecated_v1.action("2.0", "3.0", _StoreTrueAction) + ) + + with pytest.deprecated_call(match="pending deprecation"): + parser.parse_args(["--foo"]) + + +def test_action_deprecated(deprecated_v2: DeprecationHandler): + """Calling a deprecated argparse.Action displays associated warning.""" + parser = ArgumentParser() + parser.add_argument( + "--foo", action=deprecated_v2.action("2.0", "3.0", _StoreTrueAction) + ) + + with pytest.deprecated_call(match="deprecated"): + parser.parse_args(["--foo"]) + + +def test_action_remove(deprecated_v3: DeprecationHandler): + """An argparse.Action existing past its removal version raises an error.""" + with pytest.raises(DeprecatedError): + ArgumentParser().add_argument( + "--foo", action=deprecated_v3.action("2.0", "3.0", _StoreTrueAction) + ) + + +def test_module_pending(deprecated_v1: DeprecationHandler): + """Importing a pending deprecation module displays associated warning.""" with pytest.deprecated_call(match="pending deprecation"): deprecated_v1.module("2.0", "3.0") -def test_module_deprecated(deprecated_v2): - # alerting user to pending deprecation +def test_module_deprecated(deprecated_v2: DeprecationHandler): + """Importing a deprecated module displays associated warning.""" with pytest.deprecated_call(match="deprecated"): deprecated_v2.module("2.0", "3.0") -def test_module_remove(deprecated_v3): - # alerting developer that a module needs to be removed +def test_module_remove(deprecated_v3: DeprecationHandler): + """A module existing past its removal version raises an error.""" with pytest.raises(DeprecatedError): deprecated_v3.module("2.0", "3.0") -def test_constant_pending(deprecated_v1): +def test_constant_pending(deprecated_v1: DeprecationHandler): + """Using a pending deprecation constant displays associated warning.""" deprecated_v1.constant("2.0", "3.0", "SOME_CONSTANT", 42) module = sys.modules[__name__] - # alerting user to pending deprecation with pytest.deprecated_call(match="pending deprecation"): module.SOME_CONSTANT -def test_constant_deprecated(deprecated_v2): +def test_constant_deprecated(deprecated_v2: DeprecationHandler): + """Using a deprecated constant displays associated warning.""" deprecated_v2.constant("2.0", "3.0", "SOME_CONSTANT", 42) module = sys.modules[__name__] - # alerting user to pending deprecation with pytest.deprecated_call(match="deprecated"): module.SOME_CONSTANT -def test_constant_remove(deprecated_v3): - # alerting developer that a module needs to be removed +def test_constant_remove(deprecated_v3: DeprecationHandler): + """A constant existing past its removal version raises an error.""" with pytest.raises(DeprecatedError): deprecated_v3.constant("2.0", "3.0", "SOME_CONSTANT", 42) -def test_topic_pending(deprecated_v1): - # alerting user to pending deprecation +def test_topic_pending(deprecated_v1: DeprecationHandler): + """Reaching a pending deprecation topic displays associated warning.""" with pytest.deprecated_call(match="pending deprecation"): deprecated_v1.topic("2.0", "3.0", topic="Some special topic") -def test_topic_deprecated(deprecated_v2): - # alerting user to pending deprecation +def test_topic_deprecated(deprecated_v2: DeprecationHandler): + """Reaching a deprecated topic displays associated warning.""" with pytest.deprecated_call(match="deprecated"): deprecated_v2.topic("2.0", "3.0", topic="Some special topic") -def test_topic_remove(deprecated_v3): - # alerting developer that a module needs to be removed +def test_topic_remove(deprecated_v3: DeprecationHandler): + """A topic reached past its removal version raises an error.""" with pytest.raises(DeprecatedError): deprecated_v3.topic("2.0", "3.0", topic="Some special topic") def test_version_fallback(): - """Test that conda can run even if deprecations can't parse the version.""" + """Test that conda_build can run even if deprecations can't parse the version.""" version = DeprecationHandler(None)._version # type: ignore assert version.major == version.minor == version.micro == 0 From 0fa3053efc3de1e3f10b6cb7a4ef283a87681288 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 30 Nov 2023 21:32:11 -0500 Subject: [PATCH 230/366] Correct & sync dependencies (#5089) * Sync dependencies * Add missing build::review label * Update builds-review.yaml to latest from conda/conda --- .github/labels.yml | 17 +++++++++++------ .github/workflows/builds-review.yaml | 4 ++-- pyproject.toml | 8 ++++---- recipe/meta.yaml | 7 +++---- tests/requirements-linux.txt | 2 -- tests/requirements-macos.txt | 2 -- tests/requirements.txt | 10 ++++++---- 7 files changed, 26 insertions(+), 24 deletions(-) diff --git a/.github/labels.yml b/.github/labels.yml index b072d62896..ba799038bb 100644 --- a/.github/labels.yml +++ b/.github/labels.yml @@ -1,3 +1,14 @@ +# Builds +- name: build::review + description: trigger a build for this PR + color: "7B4052" + +# Tags +- name: tag::noarch + description: related to noarch builds + color: "86C579" + aliases: [] + # Deprecated - name: 3_In_Progress description: "[deprecated] use milestones/project boards" @@ -26,9 +37,3 @@ - name: knowledge-medium description: "[deprecated]" color: "888888" - -# Tags -- name: tag::noarch - description: related to noarch builds - color: "86C579" - aliases: [] diff --git a/.github/workflows/builds-review.yaml b/.github/workflows/builds-review.yaml index 672657ed26..c10129b56f 100644 --- a/.github/workflows/builds-review.yaml +++ b/.github/workflows/builds-review.yaml @@ -53,12 +53,12 @@ jobs: fetch-depth: 0 - name: Create and upload review build - uses: conda/actions/canary-release@v22.10.0 + uses: conda/actions/canary-release@v23.7.0 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} anaconda-org-channel: conda-canary anaconda-org-label: ${{ github.repository_owner }}-${{ github.event.repository.name }}-pr-${{ github.event.number }} - anaconda-org-token: ${{ secrets.ANACONDA_ORG_TOKEN }} + anaconda-org-token: ${{ secrets.ANACONDA_ORG_CONDA_CANARY_TOKEN }} comment-headline: Review build status comment-token: ${{ secrets.CANARY_ACTION_TOKEN }} diff --git a/pyproject.toml b/pyproject.toml index edf6f493b2..fe8d97e26c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,23 +29,23 @@ requires-python = ">=3.8" dependencies = [ "beautifulsoup4", "chardet", - "conda >=4.13", + "conda >=22.11", "conda-index", "conda-package-handling >=1.3", "filelock", "jinja2", + "jsonschema >=4.19", "libarchive-c", + "lief", + "menuinst", "packaging", "pkginfo", "psutil", "pytz", "pyyaml", "requests", - "six", "tomli ; python_version<'3.11'", "tqdm", - "jsonschema >=4.19", - "menuinst >=2" ] dynamic = ["version"] diff --git a/recipe/meta.yaml b/recipe/meta.yaml index c2451656da..886e8390a5 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -36,23 +36,22 @@ requirements: - conda-package-handling >=1.3 - filelock - jinja2 + - jsonschema >=4.19 - m2-patch >=2.6 # [win] + - menuinst - packaging - patch >=2.6 # [not win] - patchelf # [linux] - pkginfo - psutil - - py-lief # [not win] + - py-lief - python - python-libarchive-c - pytz - pyyaml - requests - - six - tomli # [py<311] - tqdm - - menuinst >=2 - - jsonschema >=4.19 run_constrained: - conda-verify >=3.1.0 diff --git a/tests/requirements-linux.txt b/tests/requirements-linux.txt index b1785e2c4f..149ce09bad 100644 --- a/tests/requirements-linux.txt +++ b/tests/requirements-linux.txt @@ -1,5 +1,3 @@ -# TEMP -conda-forge::menuinst >=2 patch patchelf shellcheck diff --git a/tests/requirements-macos.txt b/tests/requirements-macos.txt index caa4235c84..133b191333 100644 --- a/tests/requirements-macos.txt +++ b/tests/requirements-macos.txt @@ -1,4 +1,2 @@ -# TEMP -conda-forge::menuinst >=2 patch shellcheck diff --git a/tests/requirements.txt b/tests/requirements.txt index 3e230a6c24..d50ae7fec9 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,18 +1,20 @@ anaconda-client beautifulsoup4 chardet -conda >=4.13 +conda >=22.11.0 conda-forge::allure-pytest conda-index -conda-package-handling +conda-package-handling >=1.3 conda-verify contextlib2 cytoolz filelock git jinja2 -jsonschema +jsonschema >=4.19 +menuinst numpy +packaging perl pip pkginfo @@ -32,5 +34,5 @@ requests ripgrep ruamel.yaml setuptools_scm # needed for devenv version detection -tomli +tomli # [py<3.11] for coverage pyproject.toml tqdm From 91d10fc7d1ba27fe97cb0e37fb4d60be6490c3b0 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 30 Nov 2023 23:43:28 -0500 Subject: [PATCH 231/366] Release 3.28.0 (#5090) --- .authors.yml | 36 ++++++++---- .mailmap | 4 +- AUTHORS.md | 2 + CHANGELOG.md | 72 ++++++++++++++++++++++++ news/5009-use-conda-known-subdirs | 24 -------- news/5015-ruff | 19 ------- news/5021-get_hash_input | 19 ------- news/5031-post-conda-5708-cleanup | 19 ------- news/5037-conda-libmamba-solver-pins | 19 ------- news/5039-dont-clobber-multiple-config | 19 ------- news/5040-codefile | 21 ------- news/5041-refactor-which_package | 32 ----------- news/5042-deprecate-relative | 19 ------- news/5050-missing-anaconda-client-bugfix | 19 ------- news/5052-ruff-format | 19 ------- news/5059-ci-conda-libmamba-solver | 19 ------- news/5066-fix-conda_index-log-spam | 19 ------- news/5067-use-conda-sphinx-theme | 20 ------- 18 files changed, 103 insertions(+), 298 deletions(-) delete mode 100644 news/5009-use-conda-known-subdirs delete mode 100644 news/5015-ruff delete mode 100644 news/5021-get_hash_input delete mode 100644 news/5031-post-conda-5708-cleanup delete mode 100644 news/5037-conda-libmamba-solver-pins delete mode 100644 news/5039-dont-clobber-multiple-config delete mode 100644 news/5040-codefile delete mode 100644 news/5041-refactor-which_package delete mode 100644 news/5042-deprecate-relative delete mode 100644 news/5050-missing-anaconda-client-bugfix delete mode 100644 news/5052-ruff-format delete mode 100644 news/5059-ci-conda-libmamba-solver delete mode 100644 news/5066-fix-conda_index-log-spam delete mode 100644 news/5067-use-conda-sphinx-theme diff --git a/.authors.yml b/.authors.yml index e0a69846c8..b70c534e36 100644 --- a/.authors.yml +++ b/.authors.yml @@ -20,7 +20,7 @@ email: isuruf@gmail.com alternate_emails: - isuru.11@cse.mrt.ac.lk - num_commits: 83 + num_commits: 84 first_commit: 2017-06-16 15:14:34 github: isuruf - name: Dan Blanchard @@ -611,7 +611,7 @@ first_commit: 2015-08-30 06:44:37 - name: Marcel Bargull email: marcel.bargull@udo.edu - num_commits: 73 + num_commits: 76 first_commit: 2016-09-26 11:45:54 github: mbargull alternate_emails: @@ -754,7 +754,7 @@ alternate_emails: - kirkhamj@janelia.hhmi.org - jakirkham@gmail.com - num_commits: 73 + num_commits: 74 first_commit: 2015-04-21 13:26:39 github: jakirkham - name: Anthony Scopatz @@ -1060,10 +1060,12 @@ first_commit: 2019-06-07 02:44:13 github: dbast - name: Duncan Macleod - email: duncan.macleod@ligo.org - num_commits: 6 + num_commits: 7 + email: duncanmmacleod+github@gmail.com first_commit: 2019-06-13 08:07:25 github: duncanmmacleod + alternate_emails: + - duncan.macleod@ligo.org - name: Chris Osborn email: csosborn@users.noreply.github.com num_commits: 1 @@ -1199,7 +1201,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 133 + num_commits: 153 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1237,7 +1239,7 @@ github: pre-commit-ci[bot] aliases: - pre-commit-ci[bot] - num_commits: 52 + num_commits: 56 first_commit: 2021-11-20 01:47:17 - name: Jacob Walls email: jacobtylerwalls@gmail.com @@ -1259,7 +1261,7 @@ alternate_emails: - ad-team+condabot@anaconda.com - 18747875+conda-bot@users.noreply.github.com - num_commits: 38 + num_commits: 42 first_commit: 2022-01-17 18:09:22 - name: Uwe L. Korn email: xhochy@users.noreply.github.com @@ -1278,7 +1280,7 @@ - name: Travis Hathaway email: travis.j.hathaway@gmail.com github: travishathaway - num_commits: 2 + num_commits: 5 first_commit: 2022-05-12 05:53:02 - name: Kyle Leaders email: remkade@users.noreply.github.com @@ -1324,7 +1326,7 @@ - name: Jaime Rodríguez-Guerra email: jaimergp@users.noreply.github.com github: jaimergp - num_commits: 4 + num_commits: 7 first_commit: 2022-11-02 19:34:51 - name: Dave Clements email: tnabtaf@gmail.com @@ -1415,3 +1417,17 @@ github: scdub num_commits: 2 first_commit: 2023-08-18 02:53:28 +- name: Daniel Petry + email: dpetry@anaconda.com + github: danpetry + aliases: + - danpetry + num_commits: 1 + first_commit: 2023-11-02 13:57:57 +- name: H. Vetinari + email: h.vetinari@gmx.com + github: h-vetinari + aliases: + - h-vetinari + num_commits: 1 + first_commit: 2023-10-25 09:33:34 diff --git a/.mailmap b/.mailmap index 2f7457f813..34499e8009 100644 --- a/.mailmap +++ b/.mailmap @@ -70,6 +70,7 @@ Dan Lovell dlovell Daniel Bast <2790401+dbast@users.noreply.github.com> Daniel Damiani Daniel Holth +Daniel Petry danpetry Darren Dale Dave Clements Dave Hirschfeld David Hirschfeld @@ -80,7 +81,7 @@ Derek Ludwig Devon Ryan dpryan79 Diogo de Campos Dougal J. Sutherland -Duncan Macleod +Duncan Macleod Duncan Macleod Ed Campbell Eli Rykoff erykoff Elliot Hughes @@ -98,6 +99,7 @@ Geir Ove Myhr Greg Brener Guilherme Quentel Melo Guillaume Baty +H. Vetinari h-vetinari Harsh Gupta Harun Reşit Zafer Heather Kelly heather999 diff --git a/AUTHORS.md b/AUTHORS.md index cbfba08e20..7667f98c40 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -53,6 +53,7 @@ Authors are sorted alphabetically. * Daniel Bast * Daniel Damiani * Daniel Holth +* Daniel Petry * Darren Dale * Dave Clements * Dave Hirschfeld @@ -81,6 +82,7 @@ Authors are sorted alphabetically. * Greg Brener * Guilherme Quentel Melo * Guillaume Baty +* H. Vetinari * Harsh Gupta * Harun Reşit Zafer * Heather Kelly diff --git a/CHANGELOG.md b/CHANGELOG.md index 3d14e6556f..35d3181c95 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,77 @@ [//]: # (current developments) +## 3.28.0 (2023-11-30) + +### Special announcement + +In the upcoming January 2024 release of conda-build, significant changes are underway. We're set to transition to the [CalVer](https://calver.org/) versioning system. Additionally, we'll be formally embracing [CEP 8](https://github.com/conda-incubator/ceps/blob/main/cep-8.md) to manage our release schedule. Moreover, an expedited version of [CEP 9](https://github.com/conda-incubator/ceps/blob/main/cep-8.md) will be adopted for deprecation handling, omitting the pending deprecation phase and streamlining the period from deprecation to removal to a mere 2 months. + +### Enhancements + +* Add `stblib` jinja function similar to `compiler` to explicitly define sysroot dependencies. (#4999) +* Utilize conda-known subdirs for selector definitions, enabling conda_build to support new architectures with only an updated conda version. New OS support requires additional information for proper conda_build functionality, including UNIX-like platform designation, shared library prefix, and binary archive format for the platform. (#5009) +* Eliminate unnecessary cache clearing from `conda_build.build.test`. (#5031) +* Consolidate `which_package` implementations and replace `conda.models.dist.Dist` usage with `conda.models.records.PrefixRecords`. (#5041) + +### Bug fixes + +* Display package file name in `get_hash_input`. (#5021) +* Fall back to solved record filename to locate the downloaded tarball in `get_upstream_pins`. (#4991 via #5037) +* Prevent overwriting of variants in high priority cbc.yaml entries when absent in lower priority cbc.yamls. (#5039) +* Correct the check for a missing anaconda-client to display a useful error message. (#5050) +* Fix conda_index.index verbose DEBUG/INFO message logging. (#5066) + +### Deprecations + +* Mark `conda_build.environ.clean_pkg_cache` for pending deprecation. (#5031) +* Mark `conda_build.conda_interface.IndexRecord` for pending deprecation. Use `conda.models.records.PackageRecord` instead. (#5032) +* Mark `conda_build.os_utils.pyldd.is_string` for pending deprecation. Use `isinstance(value, str)` instead. (#5040) +* Mark `conda_build.os_utils.pyldd.is_codefile` for pending deprecation. Use `conda_build.os_utils.pyldd.codefile_class` instead. (#5040) +* Mark `conda_build.os_utils.pyldd.codefile_type` for pending deprecation. Use `conda_build.os_utils.pyldd.codefile_class` instead. (#5040) +* Mark `conda_build.inspect_pkg.dist_files` for pending deprecation. (#5041) +* Mark `conda_build.inspect_pkg.which_package(avoid_canonical_channel_name)` for pending deprecation. (#5041) +* Mark `conda_build.inspect_pkg._installed` for pending deprecation. (#5041) +* Mark `conda_build.os_utils.ldd.get_package_files` for pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.mach_o_change` for pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.inspect_rpath` for pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.get_runpaths` for pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.otool_sys` for pending deprecation. (#5041) +* Mark `conda_build.os_utils.pyldd.ldd_sys` for pending deprecation. (#5041) +* Mark `conda_build.post.determine_package_nature` for pending deprecation. Use `conda_build.post.get_dsos` and `conda_build.post.get_run_exports` instead. (#5041) +* Mark `conda_build.post.library_nature(subdir, bldpkgs_dirs, output_folder, channel_urls)` for pending deprecation. (#5041) +* Mark `conda_build.post.dist_from_names` for pending deprecation. Query `conda.core.prefix_data.PrefixData` instead. (#5041) +* Mark `conda_build.post.FakeDist` for pending deprecation. Use `conda.models.records.PrefixRecord` instead. (#5041) +* Mark `conda_build.post._get_fake_pkg_dist` for pending deprecation. Use `conda.models.records.PrefixRecord` instead. (#5041) +* Mark `conda_build.utils.relative` for pending deprecation. Use `os.path.relpath` or `pathlib.Path.relative_to` instead. (#5042) + +### Docs + +* Incorporate the conda-sphinx-theme into conda-build documentation. (#5067) +* Update certain pages to remove redundant TOC entries. (#5067) + +### Other + +* Implement Ruff linter in pre-commit configuration. (#5015) +* Replace `black` with `ruff format` in pre-commit setup. (#5052) +* Identify Unicode tests as incompatible with `libmamba`. (#5059) + + +### Contributors + +* @conda-bot +* @danpetry made their first contribution in https://github.com/conda/conda-build/pull/5039 +* @duncanmmacleod +* @h-vetinari made their first contribution in https://github.com/conda/conda-build/pull/4999 +* @isuruf +* @jaimergp +* @jakirkham +* @kenodegard +* @mbargull +* @travishathaway +* @pre-commit-ci[bot] + + + ## 3.27.0 (2023-09-26) ### Enhancements diff --git a/news/5009-use-conda-known-subdirs b/news/5009-use-conda-known-subdirs deleted file mode 100644 index a9423202f4..0000000000 --- a/news/5009-use-conda-known-subdirs +++ /dev/null @@ -1,24 +0,0 @@ -### Enhancements - -* Use subdirs known to conda for selector definitions. (#5009) - This allows conda_build to support new architectures with just - a new version of conda. For new OSes, there are more information - needed for conda_build to work properly, including whether the - new OS is a UNIX-like platform, the shared library prefix, and - the binary archive format for the platform. - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/5015-ruff b/news/5015-ruff deleted file mode 100644 index a2ae3705d1..0000000000 --- a/news/5015-ruff +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Use Ruff linter in pre-commit configuration. (#5015) diff --git a/news/5021-get_hash_input b/news/5021-get_hash_input deleted file mode 100644 index f92b78db55..0000000000 --- a/news/5021-get_hash_input +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Print package file name in `get_hash_input`. (#5021) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/5031-post-conda-5708-cleanup b/news/5031-post-conda-5708-cleanup deleted file mode 100644 index f698066c97..0000000000 --- a/news/5031-post-conda-5708-cleanup +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* Remove unnecessary cache clearing from `conda_buidl.build.test`. (#5031) - -### Bug fixes - -* - -### Deprecations - -* Mark `conda_build.environ.clean_pkg_cache` as pending deprecation. (#5031) - -### Docs - -* - -### Other - -* diff --git a/news/5037-conda-libmamba-solver-pins b/news/5037-conda-libmamba-solver-pins deleted file mode 100644 index d4044fac0f..0000000000 --- a/news/5037-conda-libmamba-solver-pins +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Fallback to solved record filename to find the downloaded tarball in `get_upstream_pins`. (#4991 via #5037) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/5039-dont-clobber-multiple-config b/news/5039-dont-clobber-multiple-config deleted file mode 100644 index 630868093d..0000000000 --- a/news/5039-dont-clobber-multiple-config +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Avoid clobbering of variants in high priority cbc.yaml entries when they aren't present in lower priority cbc.yamls. (#5039) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/5040-codefile b/news/5040-codefile deleted file mode 100644 index c4f85ca7cf..0000000000 --- a/news/5040-codefile +++ /dev/null @@ -1,21 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Mark `conda_build.os_utils.pyldd.is_string` as pending deprecation. Use `isinstance(value, str)` instead. (#5040) -* Mark `conda_build.os_utils.pyldd.is_codefile` as pending deprecation. Use `conda_build.os_utils.pyldd.codefile_class` instead. (#5040) -* Mark `conda_build.os_utils.pyldd.codefile_type` as pending deprecation. Use `conda_build.os_utils.pyldd.codefile_class` instead. (#5040) - -### Docs - -* - -### Other - -* diff --git a/news/5041-refactor-which_package b/news/5041-refactor-which_package deleted file mode 100644 index 0b060e3e51..0000000000 --- a/news/5041-refactor-which_package +++ /dev/null @@ -1,32 +0,0 @@ -### Enhancements - -* Consolidate `which_package` implementations and replacing `conda.models.dist.Dist` usage in favor of `conda.models.records.PrefixRecords`. (#5041) - -### Bug fixes - -* - -### Deprecations - -* Mark `conda_build.inspect_pkg.dist_files` as pending deprecation. (#5041) -* Mark `conda_build.inspect_pkg.which_package(avoid_canonical_channel_name)` as pending deprecation. (#5041) -* Mark `conda_build.inspect_pkg._installed` as pending deprecation. (#5041) -* Mark `conda_build.os_utils.ldd.get_package_files` as pending deprecation. (#5041) -* Mark `conda_build.os_utils.pyldd.mach_o_change` as pending deprecation. (#5041) -* Mark `conda_build.os_utils.pyldd.inspect_rpath` as pending deprecation. (#5041) -* Mark `conda_build.os_utils.pyldd.get_runpaths` as pending deprecation. (#5041) -* Mark `conda_build.os_utils.pyldd.otool_sys` as pending deprecation. (#5041) -* Mark `conda_build.os_utils.pyldd.ldd_sys` as pending deprecation. (#5041) -* Mark `conda_build.post.determine_package_nature` as pending deprecation. Use `conda_build.post.get_dsos` and `conda_build.post.get_run_exports` instead. (#5041) -* Mark `conda_build.post.library_nature(subdir, bldpkgs_dirs, output_folder, channel_urls)` as pending deprecation. (#5041) -* Mark `conda_build.post.dist_from_names` as pending deprecation. Query `conda.core.prefix_data.PrefixData` instead. (#5041) -* Mark `conda_build.post.FakeDist` as pending deprecation. Use `conda.models.records.PrefixRecord` instead. (#5041) -* Mark `conda_build.post._get_fake_pkg_dist` as pending deprecation. Use `conda.models.records.PrefixRecord` instead. (#5041) - -### Docs - -* - -### Other - -* diff --git a/news/5042-deprecate-relative b/news/5042-deprecate-relative deleted file mode 100644 index 2061f242b2..0000000000 --- a/news/5042-deprecate-relative +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Mark `conda_build.utils.relative` as pending deprecation. Use `os.path.relpath` or `pathlib.Path.relative_to` instead. (#5042) - -### Docs - -* - -### Other - -* diff --git a/news/5050-missing-anaconda-client-bugfix b/news/5050-missing-anaconda-client-bugfix deleted file mode 100644 index 99df06709d..0000000000 --- a/news/5050-missing-anaconda-client-bugfix +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Fixes the check for a missing anaconda-client so a useful error message is shown - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/5052-ruff-format b/news/5052-ruff-format deleted file mode 100644 index ae88f823e9..0000000000 --- a/news/5052-ruff-format +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Replace `black` with `ruff format` in pre-commit. (#5052) diff --git a/news/5059-ci-conda-libmamba-solver b/news/5059-ci-conda-libmamba-solver deleted file mode 100644 index daf2d919bf..0000000000 --- a/news/5059-ci-conda-libmamba-solver +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Mark Unicode tests as incompatible with `libmamba`. (#5059) diff --git a/news/5066-fix-conda_index-log-spam b/news/5066-fix-conda_index-log-spam deleted file mode 100644 index aceb93cc2a..0000000000 --- a/news/5066-fix-conda_index-log-spam +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Fix conda_index.index verbose DEBUG/INFO message logging. (#5066) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/5067-use-conda-sphinx-theme b/news/5067-use-conda-sphinx-theme deleted file mode 100644 index da1257358b..0000000000 --- a/news/5067-use-conda-sphinx-theme +++ /dev/null @@ -1,20 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* Adds the conda-sphinx-theme to conda-build documentation -* Updates some pages to remove redundant TOC - -### Other - -* From 5015b17f9ef0cdbce7b7ccd792aefb9105c7969b Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 6 Dec 2023 02:08:22 -0500 Subject: [PATCH 232/366] Relax `package/version` check for multi `outputs` (#5096) * Relax package/version check for multi outputs * Skip GitHub + SVN failure * Add news --- conda_build/metadata.py | 2 +- news/5096-relax-metadata-version-checks | 19 +++++++++++++++++++ .../meta.yaml | 6 ++++++ .../meta.yaml | 12 ++++++++++++ .../split-packages/_order/meta.yaml | 2 +- tests/test_api_build.py | 3 +++ tests/test_subpackages.py | 17 +++++++++++++++++ 7 files changed, 59 insertions(+), 2 deletions(-) create mode 100644 news/5096-relax-metadata-version-checks create mode 100644 tests/test-recipes/split-packages/_empty_outputs_requires_package_version/meta.yaml create mode 100644 tests/test-recipes/split-packages/_multi_outputs_without_package_version/meta.yaml diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 9f9e11eefe..9539dc5d14 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -1442,7 +1442,7 @@ def name(self) -> str: def version(self) -> str: version = self.get_value("package/version", "") - if not version and self.final: + if not version and not self.get_section("outputs") and self.final: sys.exit("Error: package/version missing in: %r" % self.meta_path) version = str(version) check_bad_chrs(version, "package/version") diff --git a/news/5096-relax-metadata-version-checks b/news/5096-relax-metadata-version-checks new file mode 100644 index 0000000000..12325a70e1 --- /dev/null +++ b/news/5096-relax-metadata-version-checks @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Relax `conda_build.metadata.MetaData.version` checks when `outputs` have been defined. (#5096) + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test-recipes/split-packages/_empty_outputs_requires_package_version/meta.yaml b/tests/test-recipes/split-packages/_empty_outputs_requires_package_version/meta.yaml new file mode 100644 index 0000000000..2cb957bfff --- /dev/null +++ b/tests/test-recipes/split-packages/_empty_outputs_requires_package_version/meta.yaml @@ -0,0 +1,6 @@ +package: + name: _empty_outputs_requires_package_version + # when there are not outputs, package/version is required + # version: 0 + +outputs: diff --git a/tests/test-recipes/split-packages/_multi_outputs_without_package_version/meta.yaml b/tests/test-recipes/split-packages/_multi_outputs_without_package_version/meta.yaml new file mode 100644 index 0000000000..6943d411f7 --- /dev/null +++ b/tests/test-recipes/split-packages/_multi_outputs_without_package_version/meta.yaml @@ -0,0 +1,12 @@ +package: + name: _multi_outputs_without_package_version + # when there are outputs, package/version is not required + # version: 0 + +outputs: + - name: a + version: 1 + - name: b + version: 2 + - name: c + version: 3 diff --git a/tests/test-recipes/split-packages/_order/meta.yaml b/tests/test-recipes/split-packages/_order/meta.yaml index 0db9f6bbce..fb171942a8 100644 --- a/tests/test-recipes/split-packages/_order/meta.yaml +++ b/tests/test-recipes/split-packages/_order/meta.yaml @@ -1,6 +1,6 @@ package: name: toplevel-ab - version: 0.0.1 + version: 1 outputs: - name: a diff --git a/tests/test_api_build.py b/tests/test_api_build.py index ff3e431ff3..f7e6864412 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -412,6 +412,9 @@ def dummy_executable(folder, exename): return exename +@pytest.mark.skip( + reason="GitHub discontinued SVN, see https://github.com/conda/conda-build/issues/5098" +) def test_checkout_tool_as_dependency(testing_workdir, testing_config, monkeypatch): # "hide" svn by putting a known bad one on PATH exename = dummy_executable(testing_workdir, "svn") diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index a27402cb47..2d63042cb5 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -442,3 +442,20 @@ def test_build_string_does_not_incorrectly_add_hash(testing_config): assert len(output_files) == 4 assert any("clang_variant-1.0-cling.tar.bz2" in f for f in output_files) assert any("clang_variant-1.0-default.tar.bz2" in f for f in output_files) + + +def test_multi_outputs_without_package_version(testing_config): + # outputs without package/version is allowed + recipe = os.path.join(subpackage_dir, "_multi_outputs_without_package_version") + outputs = api.build(recipe, config=testing_config) + assert len(outputs) == 3 + assert outputs[0].endswith("a-1-0.tar.bz2") + assert outputs[1].endswith("b-2-0.tar.bz2") + assert outputs[2].endswith("c-3-0.tar.bz2") + + +def test_empty_outputs_requires_package_version(testing_config): + # no outputs means package/version is required + recipe = os.path.join(subpackage_dir, "_empty_outputs_requires_package_version") + with pytest.raises(SystemExit, match="package/version missing"): + api.build(recipe, config=testing_config) From 37d2ca6d451e5c1b26df61d0c3f1bb1598b952f0 Mon Sep 17 00:00:00 2001 From: Daniel Holth Date: Wed, 6 Dec 2023 11:37:31 -0500 Subject: [PATCH 233/366] Add `pip check` to recipe tests (#5099) Also removes `lief` from `pyproject.toml`, see https://github.com/conda/conda-build/issues/5101. --- pyproject.toml | 1 - recipe/meta.yaml | 4 ++++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index fe8d97e26c..a3477043d0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,6 @@ dependencies = [ "jinja2", "jsonschema >=4.19", "libarchive-c", - "lief", "menuinst", "packaging", "pkginfo", diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 886e8390a5..1fd9801ca5 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -61,9 +61,13 @@ test: - conda_build # new/updated submodules (can be dropped after 1-2 releases) - conda_build.index + requires: + - setuptools + - pip files: - test_bdist_conda_setup.py commands: + - python -m pip check # builtin subcommands - conda --help - conda build --help From 6f3ad10618ae4792aa232fda417978d1c8880b0a Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 6 Dec 2023 12:30:42 -0500 Subject: [PATCH 234/366] Release 3.28.1 (#5100) --- .authors.yml | 4 ++-- CHANGELOG.md | 14 ++++++++++++++ news/5096-relax-metadata-version-checks | 19 ------------------- 3 files changed, 16 insertions(+), 21 deletions(-) delete mode 100644 news/5096-relax-metadata-version-checks diff --git a/.authors.yml b/.authors.yml index b70c534e36..e361a2a63e 100644 --- a/.authors.yml +++ b/.authors.yml @@ -1201,7 +1201,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 153 + num_commits: 152 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1270,7 +1270,7 @@ - name: Daniel Holth email: dholth@anaconda.com github: dholth - num_commits: 12 + num_commits: 13 first_commit: 2022-04-28 05:22:14 - name: Rylan Chord email: rchord@users.noreply.github.com diff --git a/CHANGELOG.md b/CHANGELOG.md index 35d3181c95..9ad846230f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ [//]: # (current developments) +## 3.28.1 (2023-12-06) + +### Bug fixes + +* Relax `conda_build.metadata.MetaData.version` checks when `outputs` have been defined. (#5096) +* Remove `lief` from `pyproject.toml` since it causes `pip check` to fail. To be re-added in the future after an update to `py-lief` package. (#5099) + +### Contributors + +* @dholth +* @kenodegard + + + ## 3.28.0 (2023-11-30) ### Special announcement diff --git a/news/5096-relax-metadata-version-checks b/news/5096-relax-metadata-version-checks deleted file mode 100644 index 12325a70e1..0000000000 --- a/news/5096-relax-metadata-version-checks +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Relax `conda_build.metadata.MetaData.version` checks when `outputs` have been defined. (#5096) - -### Deprecations - -* - -### Docs - -* - -### Other - -* From 5348582a1007daa9bbb1e65190d471555292a551 Mon Sep 17 00:00:00 2001 From: Travis Hathaway Date: Wed, 6 Dec 2023 19:00:47 +0100 Subject: [PATCH 235/366] Adding goat counter and bumping docs theme version (#5093) Co-authored-by: Ken Odegard --- docs/requirements.txt | 2 +- docs/source/conf.py | 2 ++ news/5093-add-goat-counter | 19 +++++++++++++++++++ 3 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 news/5093-add-goat-counter diff --git a/docs/requirements.txt b/docs/requirements.txt index b6f1b46b9c..81d30818d9 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,4 +1,4 @@ -conda-sphinx-theme==0.1.1 +conda-sphinx-theme==0.2.0 linkify-it-py==2.0.2 myst-parser==2.0.0 Pillow==10.0.1 diff --git a/docs/source/conf.py b/docs/source/conf.py index 0aaacec6f0..8680d7451f 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -119,6 +119,7 @@ # Navbar icon links "navbar_start": ["navbar-logo"], "use_edit_page_button": True, + "goatcounter_url": "https://docs-conda-io.goatcounter.com/count", "icon_links": [ { "name": "GitHub", @@ -146,6 +147,7 @@ "github_repo": "conda-build", "github_version": "main", "doc_path": "docs/source", + "goatcounter_dashboard_url": "https://docs-conda-io.goatcounter.com", } html_short_title = "conda-build" diff --git a/news/5093-add-goat-counter b/news/5093-add-goat-counter new file mode 100644 index 0000000000..37b3a9b3b9 --- /dev/null +++ b/news/5093-add-goat-counter @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* Add goat counter (https://www.goatcounter.com/) as an analytics tool. (#5093) + +### Other + +* From 37ab8d3de084d32b907b726ba2ad4570e91d326b Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 8 Dec 2023 16:47:29 -0500 Subject: [PATCH 236/366] Update `conda inspect channels` (#5033) Use modern `solver` & `transactions` logic instead of legacy `get_index` & `actions`. --------- Co-authored-by: jaimergp --- conda_build/cli/main_inspect.py | 16 ++--- conda_build/inspect_pkg.py | 96 ++++++++++--------------- news/5033-update-conda-inspect-channels | 22 ++++++ 3 files changed, 65 insertions(+), 69 deletions(-) create mode 100644 news/5033-update-conda-inspect-channels diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index aa38ce51f1..7f9a30c847 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -134,8 +134,10 @@ def parse_args(args): "--test-installable", "-t", action="store_true", - help="""Test every package in the channel to see if it is installable - by conda.""", + help=( + "DEPRECATED. This is the default (and only) behavior. " + "Test every package in the channel to see if it is installable by conda." + ), ) channels.add_argument( "channel", @@ -184,13 +186,9 @@ def execute(args): if not args.subcommand: parser.print_help() - exit() - + sys.exit(0) elif args.subcommand == "channels": - if not args.test_installable: - parser.error("At least one option (--test-installable) is required.") - else: - print(api.test_installable(args.channel)) + print(api.test_installable(args.channel)) elif args.subcommand == "linkages": print( api.inspect_linkages( @@ -219,7 +217,7 @@ def execute(args): elif args.subcommand == "hash-inputs": pprint(api.inspect_hash_inputs(args.packages)) else: - raise ValueError(f"Unrecognized subcommand: {args.subcommand}.") + parser.error(f"Unrecognized subcommand: {args.subcommand}.") @deprecated("3.26.0", "24.1.0", addendum="Use `conda inspect` instead.") diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 7c8eddc7de..ac20e1f65a 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -4,26 +4,24 @@ import json import os -import re import sys -import tempfile from collections import defaultdict from functools import lru_cache from itertools import groupby from operator import itemgetter from os.path import abspath, basename, dirname, exists, join from pathlib import Path +from tempfile import TemporaryDirectory from typing import Iterable, Literal +from conda.api import Solver +from conda.core.index import get_index from conda.core.prefix_data import PrefixData from conda.models.dist import Dist from conda.models.records import PrefixRecord from conda.resolve import MatchSpec from conda_build.conda_interface import ( - display_actions, - get_index, - install_actions, linked_data, specs_from_args, ) @@ -39,12 +37,14 @@ ensure_list, get_logger, package_has_file, - rm_rf, ) +from . import conda_interface from .deprecations import deprecated from .utils import on_mac, on_win, samefile +log = get_logger(__name__) + @deprecated("3.28.0", "24.1.0") @lru_cache(maxsize=None) @@ -103,23 +103,21 @@ def __str__(self): untracked_package = _untracked_package() +@deprecated.argument("24.1.0", "24.3.0", "platform", rename="subdir") +@deprecated.argument("24.1.0", "24.3.0", "prepend") +@deprecated.argument("24.1.0", "24.3.0", "minimal_hint") def check_install( - packages, platform=None, channel_urls=(), prepend=True, minimal_hint=False -): - prefix = tempfile.mkdtemp("conda") - try: - specs = specs_from_args(packages) - index = get_index( - channel_urls=channel_urls, prepend=prepend, platform=platform, prefix=prefix - ) - actions = install_actions( - prefix, index, specs, pinned=False, minimal_hint=minimal_hint - ) - display_actions(actions, index) - return actions - finally: - rm_rf(prefix) - return None + packages: Iterable[str], + subdir: str | None = None, + channel_urls: Iterable[str] = (), +) -> None: + with TemporaryDirectory() as prefix: + Solver( + prefix, + channel_urls, + [subdir or conda_interface.subdir], + specs_from_args(packages), + ).solve_for_transaction(ignore_pinned=True).print_transaction_summary() def print_linkages( @@ -183,61 +181,39 @@ def replace_path(binary, path, prefix): return "not found" -def test_installable(channel="defaults"): +def test_installable(channel: str = "defaults") -> bool: success = True - log = get_logger(__name__) - has_py = re.compile(r"py(\d)(\d)") - for platform in ["osx-64", "linux-32", "linux-64", "win-32", "win-64"]: - log.info("######## Testing platform %s ########", platform) - channels = [channel] - index = get_index(channel_urls=channels, prepend=False, platform=platform) - for _, rec in index.items(): - # If we give channels at the command line, only look at - # packages from those channels (not defaults). - if channel != "defaults" and rec.get("schannel", "defaults") == "defaults": - continue - name = rec["name"] + for subdir in ["osx-64", "linux-32", "linux-64", "win-32", "win-64"]: + log.info("######## Testing subdir %s ########", subdir) + for prec in get_index(channel_urls=[channel], prepend=False, platform=subdir): + name = prec["name"] if name in {"conda", "conda-build"}: # conda can only be installed in the root environment continue - if name.endswith("@"): + elif name.endswith("@"): # this is a 'virtual' feature record that conda adds to the index for the solver # and should be ignored here continue - # Don't fail just because the package is a different version of Python - # than the default. We should probably check depends rather than the - # build string. - build = rec["build"] - match = has_py.search(build) - assert match if "py" in build else True, build - if match: - additional_packages = [f"python={match.group(1)}.{match.group(2)}"] - else: - additional_packages = [] - version = rec["version"] + version = prec["version"] log.info("Testing %s=%s", name, version) try: - install_steps = check_install( - [name + "=" + version] + additional_packages, - channel_urls=channels, + check_install( + [f"{name}={version}"], + channel_urls=[channel], prepend=False, - platform=platform, + subdir=subdir, ) - success &= bool(install_steps) - except KeyboardInterrupt: - raise - # sys.exit raises an exception that doesn't subclass from Exception - except BaseException as e: + except Exception as err: success = False log.error( - "FAIL: %s %s on %s with %s (%s)", + "[%s/%s::%s=%s] %s", + channel, + subdir, name, version, - platform, - additional_packages, - e, + repr(err), ) return success diff --git a/news/5033-update-conda-inspect-channels b/news/5033-update-conda-inspect-channels new file mode 100644 index 0000000000..13fba0b6b4 --- /dev/null +++ b/news/5033-update-conda-inspect-channels @@ -0,0 +1,22 @@ +### Enhancements + +* Update `conda inspect channels` to use updated solver/transaction logic. (#5033) + +### Bug fixes + +* + +### Deprecations + +* Mark `conda inspect channels --test-installable` as pending deprecation. (#5033) +* Mark `conda_build.inspect_pkg.check_install(package)` as pending deprecation in favor of `conda_build.inspect_pkg.check_install(subdir)`. (#5033) +* Mark `conda_build.inspect_pkg.check_install(prepend)` as pending deprecation. (#5033) +* Mark `conda_build.inspect_pkg.check_install(minimal_hint)` as pending deprecation. (#5033) + +### Docs + +* + +### Other + +* From 950278ba7bee6e962f96a6ee3d6b4eeca2b06d59 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 11 Dec 2023 15:04:58 -0600 Subject: [PATCH 237/366] [pre-commit.ci] pre-commit autoupdate (#5104) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.6 → v0.1.7](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.6...v0.1.7) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index aa5a565768..50de7f9e3d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,7 +54,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.6 + rev: v0.1.7 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff From 061063f10c51162707c7c5253daa39b2752fcfcc Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Tue, 12 Dec 2023 04:34:27 +0100 Subject: [PATCH 238/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#5107)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/stale.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 371b874431..6284ac0c42 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -39,7 +39,7 @@ jobs: with: path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml - - uses: actions/stale@v8 + - uses: actions/stale@v9 id: stale with: # Only issues with these labels are checked whether they are stale From c049d6c170277291a277b773777357fbe907955e Mon Sep 17 00:00:00 2001 From: Daniel Holth Date: Wed, 13 Dec 2023 13:01:18 -0500 Subject: [PATCH 239/366] Allure removal (#5113) * remove allure * add news --- .github/workflows/tests.yml | 42 ------------------------------------- news/allure-removal | 19 +++++++++++++++++ tests/requirements.txt | 1 - 3 files changed, 19 insertions(+), 43 deletions(-) create mode 100644 news/allure-removal diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5fcaf26458..448fde3bce 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -92,7 +92,6 @@ jobs: CONDA_VERSION: ${{ contains('canary,release', matrix.conda-version) && 'conda' || format('conda={0}', matrix.conda-version) }} REPLAY_NAME: Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }} REPLAY_DIR: ${{ github.workspace }}/pytest-replay - ALLURE_DIR: ${{ github.workspace }}/allure-results PYTEST_MARKER: ${{ matrix.test-type == 'serial' && 'serial' || 'not serial' }} PYTEST_NUMPROCESSES: ${{ matrix.test-type == 'serial' && 0 || 'auto' }} @@ -145,7 +144,6 @@ jobs: --cov-report xml \ --replay-record-dir="${{ env.REPLAY_DIR }}" \ --replay-base-name="${{ env.REPLAY_NAME }}" \ - --alluredir="${{ env.ALLURE_DIR }}" \ -m "${{ env.PYTEST_MARKER }}" \ ./tests @@ -153,17 +151,6 @@ jobs: with: flags: ${{ matrix.test-type }},${{ matrix.python-version }},linux-64 - - name: Tar Allure Results - if: '!cancelled()' - run: tar -zcf "${{ env.ALLURE_DIR }}.tar.gz" "${{ env.ALLURE_DIR }}" - - - name: Upload Allure Results - if: '!cancelled()' - uses: actions/upload-artifact@v3 - with: - name: allure-Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} - path: allure-results.tar.gz - - name: Upload Pytest Replay if: '!cancelled()' uses: actions/upload-artifact@v3 @@ -196,7 +183,6 @@ jobs: CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }} REPLAY_NAME: Win-${{ matrix.conda-version }}-Py${{ matrix.python-version }} REPLAY_DIR: ${{ github.workspace }}\pytest-replay - ALLURE_DIR: ${{ github.workspace }}\allure-results PYTEST_MARKER: ${{ matrix.test-type == 'serial' && 'serial' || 'not serial and not slow' }} PYTEST_NUMPROCESSES: ${{ matrix.test-type == 'serial' && 0 || 'auto' }} @@ -256,7 +242,6 @@ jobs: --cov-report xml ` --replay-record-dir="${{ env.REPLAY_DIR }}" ` --replay-base-name="${{ env.REPLAY_NAME }}" ` - --alluredir="${{ env.ALLURE_DIR }}" ` -m "${{ env.PYTEST_MARKER }}" ` .\tests @@ -264,20 +249,6 @@ jobs: with: flags: ${{ matrix.test-type }},${{ matrix.python-version }},win-64 - - name: Tar Allure Results - if: '!cancelled()' - run: tar -zcf "${{ env.ALLURE_DIR }}.tar.gz" "${{ env.ALLURE_DIR }}" - # windows-2019/powershell ships with GNU tar 1.28 which struggles with Windows paths - # window-2019/cmd ships with bsdtar 3.5.2 which doesn't have this problem - shell: cmd - - - name: Upload Allure Results - if: '!cancelled()' - uses: actions/upload-artifact@v3 - with: - name: allure-Win-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} - path: allure-results.tar.gz - - name: Upload Pytest Replay if: '!cancelled()' uses: actions/upload-artifact@v3 @@ -313,7 +284,6 @@ jobs: CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }} REPLAY_NAME: macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }} REPLAY_DIR: ${{ github.workspace }}/pytest-replay - ALLURE_DIR: ${{ github.workspace }}/allure-results PYTEST_MARKER: ${{ matrix.test-type == 'serial' && 'serial' || 'not serial' }} PYTEST_NUMPROCESSES: ${{ matrix.test-type == 'serial' && 0 || 'auto' }} @@ -367,7 +337,6 @@ jobs: --cov-report xml \ --replay-record-dir="${{ env.REPLAY_DIR }}" \ --replay-base-name="${{ env.REPLAY_NAME }}" \ - --alluredir="${{ env.ALLURE_DIR }}" \ -m "${{ env.PYTEST_MARKER }}" \ ./tests @@ -375,17 +344,6 @@ jobs: with: flags: ${{ matrix.test-type }},${{ matrix.python-version }},osx-64 - - name: Tar Allure Results - if: '!cancelled()' - run: tar -zcf "${{ env.ALLURE_DIR }}.tar.gz" "${{ env.ALLURE_DIR }}" - - - name: Upload Allure Results - if: '!cancelled()' - uses: actions/upload-artifact@v3 - with: - name: allure-macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} - path: allure-results.tar.gz - - name: Upload Pytest Replay if: '!cancelled()' uses: actions/upload-artifact@v3 diff --git a/news/allure-removal b/news/allure-removal new file mode 100644 index 0000000000..3a2df35fba --- /dev/null +++ b/news/allure-removal @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Remove unused Allure test report collection. diff --git a/tests/requirements.txt b/tests/requirements.txt index d50ae7fec9..3498103928 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -2,7 +2,6 @@ anaconda-client beautifulsoup4 chardet conda >=22.11.0 -conda-forge::allure-pytest conda-index conda-package-handling >=1.3 conda-verify From ca9de6013cf7d6909f063247512546d351f7a757 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Thu, 14 Dec 2023 02:28:25 +0100 Subject: [PATCH 240/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#5114)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/ISSUE_TEMPLATE/2_documentation.yml | 37 ++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/2_documentation.yml diff --git a/.github/ISSUE_TEMPLATE/2_documentation.yml b/.github/ISSUE_TEMPLATE/2_documentation.yml new file mode 100644 index 0000000000..68ae890de9 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/2_documentation.yml @@ -0,0 +1,37 @@ +--- +name: Documentation +description: Create a documentation related issue. +labels: + - type::documentation +body: + - type: markdown + attributes: + value: | + > [!NOTE] + > Documentation requests that are incomplete or missing information may be closed as inactionable. + + Since there are already a lot of open issues, please also take a moment to search existing ones to see if your bug has already been reported. If you find something related, please upvote that issue and provide additional details as necessary. + + 💐 Thank you for helping to make conda better. We would be unable to improve conda without our community! + - type: checkboxes + id: checks + attributes: + label: Checklist + description: Please confirm and check all of the following options. + options: + - label: I added a descriptive title + required: true + - label: I searched open reports and couldn't find a duplicate + required: true + - type: textarea + id: what + attributes: + label: What happened? + description: Mention here any typos, broken links, or missing, incomplete, or outdated information, etc. that you have noticed in the conda docs or CLI help. + validations: + required: true + - type: textarea + id: context + attributes: + label: Additional Context + description: Include any additional information (or screenshots) that you think would be valuable. From b52f4a0f9f19523bffaf44c9785b61b945c72914 Mon Sep 17 00:00:00 2001 From: jaimergp Date: Thu, 14 Dec 2023 11:14:50 +0100 Subject: [PATCH 241/366] Clean up after menuinst v2 compatible releases (#5110) --- .github/workflows/tests.yml | 4 ---- pyproject.toml | 2 +- recipe/meta.yaml | 2 +- tests/requirements.txt | 2 +- 4 files changed, 3 insertions(+), 7 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 448fde3bce..80caa4cdea 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -218,10 +218,6 @@ jobs: --file .\tests\requirements.txt ^ --file .\tests\requirements-windows.txt ^ ${{ env.CONDA_CHANNEL_LABEL }}::conda || exit 1 - :: TEMPORARY - if "${{ matrix.python-version }}" == "3.8" CALL conda install "https://anaconda.org/conda-forge/menuinst/2.0.0/download/win-64/menuinst-2.0.0-py38hd3f51b4_1.conda" || exit 1 - if "${{ matrix.python-version }}" == "3.11" CALL conda install "https://anaconda.org/conda-forge/menuinst/2.0.0/download/win-64/menuinst-2.0.0-py311h12c1d0e_1.conda" || exit 1 - :: /TEMPORARY CALL pip install -e . --no-deps || exit 1 - name: Show info diff --git a/pyproject.toml b/pyproject.toml index a3477043d0..fc9969adc7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,7 @@ dependencies = [ "jinja2", "jsonschema >=4.19", "libarchive-c", - "menuinst", + "menuinst >=2", "packaging", "pkginfo", "psutil", diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 1fd9801ca5..54c792b9c9 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -38,7 +38,7 @@ requirements: - jinja2 - jsonschema >=4.19 - m2-patch >=2.6 # [win] - - menuinst + - menuinst >=2 - packaging - patch >=2.6 # [not win] - patchelf # [linux] diff --git a/tests/requirements.txt b/tests/requirements.txt index 3498103928..a7140e8673 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -11,7 +11,7 @@ filelock git jinja2 jsonschema >=4.19 -menuinst +menuinst >=2 numpy packaging perl From f57088484578b5821c860caa94a5da648c544764 Mon Sep 17 00:00:00 2001 From: jaimergp Date: Fri, 15 Dec 2023 00:15:07 +0100 Subject: [PATCH 242/366] Catch `ImportError` if `menuinst` cannot be imported (#5116) --- conda_build/post.py | 2 +- news/5116-menuinst-importerror | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 news/5116-menuinst-importerror diff --git a/conda_build/post.py b/conda_build/post.py index 6bdb043da7..4c54c9341a 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -1753,7 +1753,7 @@ def check_menuinst_json(files, prefix) -> None: try: import jsonschema from menuinst.utils import data_path - except ModuleNotFoundError as exc: + except ImportError as exc: log.warning( "Found 'Menu/*.json' files but couldn't validate: %s", ", ".join(json_files), diff --git a/news/5116-menuinst-importerror b/news/5116-menuinst-importerror new file mode 100644 index 0000000000..7d8f3f060d --- /dev/null +++ b/news/5116-menuinst-importerror @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Catch the more general `ImportError` instead of `ModuleNotFoundError`, so we do handle the cases where `menuinst 1.x` is found. (#5116) + +### Deprecations + +* + +### Docs + +* + +### Other + +* From a22d6ef75918b68c3e87ff8b6e15881ae4e68a54 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 14 Dec 2023 17:17:09 -0600 Subject: [PATCH 243/366] `which_package` to yield unique collection of package records (#5108) * Add `which_package` unittest * `which_package` only yields package record once * Prefer `str` over `repr` in `_lookup_in_prefix_packages` --------- Co-authored-by: Marcel Bargull --- conda_build/inspect_pkg.py | 5 +- conda_build/post.py | 19 ++---- news/5108-fix-which_package | 20 ++++++ tests/test_inspect_pkg.py | 119 ++++++++++++++++++++++++++++++++++++ 4 files changed, 147 insertions(+), 16 deletions(-) create mode 100644 news/5108-fix-which_package create mode 100644 tests/test_inspect_pkg.py diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 7c8eddc7de..f8fa57a1b5 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -73,9 +73,8 @@ def which_package( path = prefix / path for prec in PrefixData(str(prefix)).iter_records(): - for file in prec["files"]: - if samefile(prefix / file, path): - yield prec + if any(samefile(prefix / file, path) for file in prec["files"]): + yield prec def print_object_info(info, key): diff --git a/conda_build/post.py b/conda_build/post.py index 4c54c9341a..18e2723531 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -1187,9 +1187,7 @@ def _lookup_in_prefix_packages( if len(precs_in_reqs) == 1: _print_msg( errors, - "{}: {} found in {}{}".format( - info_prelude, n_dso_p, precs_in_reqs[0], and_also - ), + f"{info_prelude}: {n_dso_p} found in {precs_in_reqs[0]}{and_also}", verbose=verbose, ) elif in_whitelist: @@ -1201,25 +1199,20 @@ def _lookup_in_prefix_packages( elif len(precs_in_reqs) == 0 and len(precs) > 0: _print_msg( errors, - "{}: {} found in {}{}".format( - msg_prelude, n_dso_p, [prec.name for prec in precs], and_also - ), + f"{msg_prelude}: {n_dso_p} found in {[str(prec) for prec in precs]}{and_also}", verbose=verbose, ) _print_msg( errors, - "{}: .. but {} not in reqs/run, (i.e. it is overlinking)" - " (likely) or a missing dependency (less likely)".format( - msg_prelude, [prec.name for prec in precs] - ), + f"{msg_prelude}: .. but {[str(prec) for prec in precs]} not in reqs/run, " + "(i.e. it is overlinking) (likely) or a missing dependency (less likely)", verbose=verbose, ) elif len(precs_in_reqs) > 1: _print_msg( errors, - "{}: {} found in multiple packages in run/reqs: {}{}".format( - warn_prelude, in_prefix_dso, precs_in_reqs, and_also - ), + f"{warn_prelude}: {in_prefix_dso} found in multiple packages in run/reqs: " + f"{[str(prec) for prec in precs_in_reqs]}{and_also}", verbose=verbose, ) else: diff --git a/news/5108-fix-which_package b/news/5108-fix-which_package new file mode 100644 index 0000000000..6123309d2a --- /dev/null +++ b/news/5108-fix-which_package @@ -0,0 +1,20 @@ +### Enhancements + +* + +### Bug fixes + +* Fix `conda_build.inspect_pkg.which_package` so it does not return duplicate package records. (#5108) +* Fix `conda_build.post._lookup_in_prefix_packages` to display `str(PackageRecord)` instead of `repr(PackageRecord)`. (#5108) + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test_inspect_pkg.py b/tests/test_inspect_pkg.py new file mode 100644 index 0000000000..edefa96a54 --- /dev/null +++ b/tests/test_inspect_pkg.py @@ -0,0 +1,119 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import json +from pathlib import Path + +from conda.core.prefix_data import PrefixData + +from conda_build.inspect_pkg import which_package + + +def test_which_package(tmp_path: Path): + # create a dummy environment + (tmp_path / "conda-meta").mkdir() + (tmp_path / "conda-meta" / "history").touch() + + # dummy files + (tmp_path / "hardlinkA").touch() # packageA + (tmp_path / "shared").touch() # packageA & packageB + (tmp_path / "internal").symlink_to(tmp_path / "hardlinkA") # packageA + (tmp_path / "external").symlink_to(tmp_path / "hardlinkB") # packageA + (tmp_path / "hardlinkB").touch() # packageB + + # a dummy package with a hardlink file, shared file, internal softlink, and external softlink + (tmp_path / "conda-meta" / "packageA-1-0.json").write_text( + json.dumps( + { + "build": "0", + "build_number": 0, + "channel": "packageA-channel", + "files": ["hardlinkA", "shared", "internal", "external"], + "name": "packageA", + "paths_data": { + "paths": [ + { + "_path": "hardlinkA", + "path_type": "hardlink", + "size_in_bytes": 0, + }, + { + "_path": "shared", + "path_type": "hardlink", + "size_in_bytes": 0, + }, + { + "_path": "internal", + "path_type": "softlink", + "size_in_bytes": 0, + }, + { + "_path": "external", + "path_type": "softlink", + "size_in_bytes": 0, + }, + ], + "paths_version": 1, + }, + "version": "1", + } + ) + ) + # a dummy package with a hardlink file and shared file + (tmp_path / "conda-meta" / "packageB-1-0.json").write_text( + json.dumps( + { + "build": "0", + "build_number": 0, + "channel": "packageB-channel", + "files": ["hardlinkB", "shared"], + "name": "packageB", + "paths_data": { + "paths": [ + { + "_path": "hardlinkB", + "path_type": "hardlink", + "size_in_bytes": 0, + }, + { + "_path": "shared", + "path_type": "hardlink", + "size_in_bytes": 0, + }, + ], + "paths_version": 1, + }, + "version": "1", + } + ) + ) + + # fetch package records + pd = PrefixData(tmp_path) + precA = pd.get("packageA") + precB = pd.get("packageB") + + # test returned package records given a path + precs_missing = list(which_package(tmp_path / "missing", tmp_path)) + assert not precs_missing + + precs_hardlinkA = list(which_package(tmp_path / "hardlinkA", tmp_path)) + assert len(precs_hardlinkA) == 1 + assert precs_hardlinkA[0] == precA + + precs_shared = list(which_package(tmp_path / "shared", tmp_path)) + assert len(precs_shared) == 2 + assert set(precs_shared) == {precA, precB} + + precs_internal = list(which_package(tmp_path / "internal", tmp_path)) + assert len(precs_internal) == 1 + assert precs_internal[0] == precA + + precs_external = list(which_package(tmp_path / "external", tmp_path)) + assert len(precs_external) == 2 + assert set(precs_external) == {precA, precB} + + precs_hardlinkB = list(which_package(tmp_path / "hardlinkB", tmp_path)) + assert len(precs_hardlinkB) == 2 + assert set(precs_hardlinkB) == {precA, precB} From f2087e6ac1786b1119183246ee28a3a4fba2821c Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 15 Dec 2023 10:52:26 -0600 Subject: [PATCH 244/366] Fix `MetaData.get_section` handling for "source" & "outputs" (#5112) * Correct variant handling in MetaData.fromstring * Ensure get_section returns list for source and outputs * Add MetaData tests --------- Co-authored-by: jaimergp --- conda_build/build.py | 10 ++--- conda_build/metadata.py | 71 ++++++++++++++++++++++++---------- conda_build/render.py | 10 ++--- conda_build/source.py | 11 +----- news/5112-fix-multiple-sources | 19 +++++++++ tests/test_metadata.py | 23 ++++++++++- 6 files changed, 102 insertions(+), 42 deletions(-) create mode 100644 news/5112-fix-multiple-sources diff --git a/conda_build/build.py b/conda_build/build.py index 47600ffff4..c007286474 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -1564,15 +1564,11 @@ def create_info_files(m, replacements, files, prefix): write_no_link(m, files) - sources = m.get_section("source") - if hasattr(sources, "keys"): - sources = [sources] - with open(join(m.config.info_dir, "git"), "w", encoding="utf-8") as fo: - for src in sources: - if src.get("git_url"): + for source_dict in m.get_section("source"): + if source_dict.get("git_url"): source.git_info( - os.path.join(m.config.work_dir, src.get("folder", "")), + os.path.join(m.config.work_dir, source_dict.get("folder", "")), m.config.build_prefix, git=None, verbose=m.config.verbose, diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 9539dc5d14..f25e57f280 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -13,7 +13,7 @@ from collections import OrderedDict from functools import lru_cache from os.path import isfile, join -from typing import Literal +from typing import Literal, overload from bs4 import UnicodeDammit @@ -622,6 +622,7 @@ def parse(data, config, path=None): "prelink_message": None, "readme": None, }, + "extra": {}, } # Fields that may either be a dictionary or a list of dictionaries. @@ -1316,9 +1317,11 @@ def parse_until_resolved( @classmethod def fromstring(cls, metadata, config=None, variant=None): m = super().__new__(cls) - if not config: - config = Config() - m.meta = parse(metadata, config=config, path="", variant=variant) + m.path = "" + m._meta_path = "" + m.requirements_path = "" + config = config or Config(variant=variant) + m.meta = parse(metadata, config=config, path="") m.config = config m.parse_again(permit_undefined_jinja=True) return m @@ -1333,18 +1336,45 @@ def fromdict(cls, metadata, config=None, variant=None): m._meta_path = "" m.requirements_path = "" m.meta = sanitize(metadata) - - if not config: - config = Config(variant=variant) - - m.config = config + m.config = config or Config(variant=variant) m.undefined_jinja_vars = [] m.final = False - return m - def get_section(self, section): - return self.meta.get(section, {}) + @overload + def get_section(self, section: Literal["source", "outputs"]) -> list[dict]: + ... + + @overload + def get_section( + self, + section: Literal[ + "package", + "build", + "requirements", + "app", + "test", + "about", + "extra", + ], + ) -> dict: + ... + + def get_section(self, name): + section = self.meta.get(name) + if name in OPTIONALLY_ITERABLE_FIELDS: + if not section: + return [] + elif isinstance(section, dict): + return [section] + elif not isinstance(section, list): + raise ValueError(f"Expected {name} to be a list") + else: + if not section: + return {} + elif not isinstance(section, dict): + raise ValueError(f"Expected {name} to be a dict") + return section def get_value(self, name, default=None, autotype=True): """ @@ -1364,7 +1394,9 @@ def get_value(self, name, default=None, autotype=True): index = None elif len(names) == 3: section, index, key = names - assert section == "source", "Section is not a list: " + section + assert section in OPTIONALLY_ITERABLE_FIELDS, ( + "Section is not a list: " + section + ) index = int(index) # get correct default @@ -1386,7 +1418,7 @@ def get_value(self, name, default=None, autotype=True): ) index = 0 - if len(section_data) == 0: + if not section_data: section_data = {} else: section_data = section_data[index] @@ -1475,7 +1507,7 @@ def get_depends_top_and_out(self, typ): if not self.is_output: matching_output = [ out - for out in self.meta.get("outputs", []) + for out in self.get_section("outputs") if out.get("name") == self.name() ] if matching_output: @@ -2014,7 +2046,7 @@ def uses_jinja(self): return len(matches) > 0 @property - def uses_vcs_in_meta(self) -> Literal["git" | "svn" | "mercurial"] | None: + def uses_vcs_in_meta(self) -> Literal["git", "svn", "mercurial"] | None: """returns name of vcs used if recipe contains metadata associated with version control systems. If this metadata is present, a download/copy will be forced in parse_or_try_download. """ @@ -2034,7 +2066,7 @@ def uses_vcs_in_meta(self) -> Literal["git" | "svn" | "mercurial"] | None: return vcs @property - def uses_vcs_in_build(self) -> Literal["git" | "svn" | "mercurial"] | None: + def uses_vcs_in_build(self) -> Literal["git", "svn", "mercurial"] | None: # TODO :: Re-work this. Is it even useful? We can declare any vcs in our build deps. build_script = "bld.bat" if on_win else "build.sh" build_script = os.path.join(self.path, build_script) @@ -2271,9 +2303,8 @@ def pin_depends(self): @property def source_provided(self): - return not bool(self.meta.get("source")) or ( - os.path.isdir(self.config.work_dir) - and len(os.listdir(self.config.work_dir)) > 0 + return not self.get_section("source") or ( + os.path.isdir(self.config.work_dir) and os.listdir(self.config.work_dir) ) def reconcile_metadata_with_output_dict(self, output_metadata, output_dict): diff --git a/conda_build/render.py b/conda_build/render.py index c0f1d8be73..1e8ddae08a 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -721,17 +721,17 @@ def finalize_metadata( # if source/path is relative, then the output package makes no sense at all. The next # best thing is to hard-code the absolute path. This probably won't exist on any # system other than the original build machine, but at least it will work there. - if source_path := m.get_value("source/path"): - if not isabs(source_path): - m.meta["source"]["path"] = normpath(join(m.path, source_path)) + for source_dict in m.get_section("source"): + if (source_path := source_dict.get("path")) and not isabs(source_path): + source_dict["path"] = normpath(join(m.path, source_path)) elif ( - (git_url := m.get_value("source/git_url")) + (git_url := source_dict.get("git_url")) # absolute paths are not relative paths and not isabs(git_url) # real urls are not relative paths and ":" not in git_url ): - m.meta["source"]["git_url"] = normpath(join(m.path, git_url)) + source_dict["git_url"] = normpath(join(m.path, git_url)) m.meta.setdefault("build", {}) diff --git a/conda_build/source.py b/conda_build/source.py index c8d21a4c2e..85e64c8292 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -1027,18 +1027,11 @@ def provide(metadata): - unpack - apply patches (if any) """ - meta = metadata.get_section("source") - if not os.path.isdir(metadata.config.build_folder): - os.makedirs(metadata.config.build_folder) + os.makedirs(metadata.config.build_folder, exist_ok=True) git = None - if hasattr(meta, "keys"): - dicts = [meta] - else: - dicts = meta - try: - for source_dict in dicts: + for source_dict in metadata.get_section("source"): folder = source_dict.get("folder") src_dir = os.path.join(metadata.config.work_dir, folder if folder else "") if any(k in source_dict for k in ("fn", "url")): diff --git a/news/5112-fix-multiple-sources b/news/5112-fix-multiple-sources new file mode 100644 index 0000000000..f988080a8b --- /dev/null +++ b/news/5112-fix-multiple-sources @@ -0,0 +1,19 @@ +### Enhancements + +* Update `conda_build.metadata.MetaData.get_section` to always return lists for "source" and "outputs". (#5112) + +### Bug fixes + +* Fix finalizing recipes with multiple sources. (#5111 via #5112) + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 0fd89a22c3..7ac5bbdf01 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -15,6 +15,8 @@ from conda_build import api from conda_build.config import Config from conda_build.metadata import ( + FIELDS, + OPTIONALLY_ITERABLE_FIELDS, MetaData, _hash_dependencies, get_selectors, @@ -23,7 +25,7 @@ ) from conda_build.utils import DEFAULT_SUBDIRS -from .utils import metadata_dir, thisdir +from .utils import metadata_dir, metadata_path, thisdir def test_uses_vcs_in_metadata(testing_workdir, testing_metadata): @@ -459,3 +461,22 @@ def test_get_selectors( # override with True values **{key: True for key in expected}, } + + +def test_fromstring(): + MetaData.fromstring((metadata_path / "multiple_sources" / "meta.yaml").read_text()) + + +def test_fromdict(): + MetaData.fromdict( + yamlize((metadata_path / "multiple_sources" / "meta.yaml").read_text()) + ) + + +def test_get_section(testing_metadata: MetaData): + for name in FIELDS: + section = testing_metadata.get_section(name) + if name in OPTIONALLY_ITERABLE_FIELDS: + assert isinstance(section, list) + else: + assert isinstance(section, dict) From d0ee29349f0b18f52f8ca98fd01c706c06afcfca Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Mon, 18 Dec 2023 11:12:04 -0600 Subject: [PATCH 245/366] Release 3.28.2 (#5117) --- .authors.yml | 4 ++-- CHANGELOG.md | 20 ++++++++++++++++++++ news/5108-fix-which_package | 20 -------------------- news/5112-fix-multiple-sources | 19 ------------------- news/5116-menuinst-importerror | 19 ------------------- 5 files changed, 22 insertions(+), 60 deletions(-) delete mode 100644 news/5108-fix-which_package delete mode 100644 news/5112-fix-multiple-sources delete mode 100644 news/5116-menuinst-importerror diff --git a/.authors.yml b/.authors.yml index e361a2a63e..b22e4b31e4 100644 --- a/.authors.yml +++ b/.authors.yml @@ -1201,7 +1201,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 152 + num_commits: 155 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1326,7 +1326,7 @@ - name: Jaime Rodríguez-Guerra email: jaimergp@users.noreply.github.com github: jaimergp - num_commits: 7 + num_commits: 8 first_commit: 2022-11-02 19:34:51 - name: Dave Clements email: tnabtaf@gmail.com diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ad846230f..dfc774b884 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,25 @@ [//]: # (current developments) +## 3.28.2 (2023-12-15) + +### Enhancements + +* Update `conda_build.metadata.MetaData.get_section` to consistently return lists for "source" and "outputs". (#5111 via #5112) + +### Bug fixes + +* Resolve duplicate package record issue in `conda_build.inspect_pkg.which_package`. (#5106 via #5108) +* Ensure `conda_build.post._lookup_in_prefix_packages` displays `str(PackageRecord)` instead of `repr(PackageRecord)`. (#5106 via #5108) +* Fix finalization of recipes with multiple sources. (#5111 via #5112) +* Improve handling by catching the more general `ImportError` instead of `ModuleNotFoundError` to cover cases involving `menuinst 1.x`. (#5116) + +### Contributors + +* @jaimergp +* @kenodegard + + + ## 3.28.1 (2023-12-06) ### Bug fixes diff --git a/news/5108-fix-which_package b/news/5108-fix-which_package deleted file mode 100644 index 6123309d2a..0000000000 --- a/news/5108-fix-which_package +++ /dev/null @@ -1,20 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Fix `conda_build.inspect_pkg.which_package` so it does not return duplicate package records. (#5108) -* Fix `conda_build.post._lookup_in_prefix_packages` to display `str(PackageRecord)` instead of `repr(PackageRecord)`. (#5108) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/5112-fix-multiple-sources b/news/5112-fix-multiple-sources deleted file mode 100644 index f988080a8b..0000000000 --- a/news/5112-fix-multiple-sources +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* Update `conda_build.metadata.MetaData.get_section` to always return lists for "source" and "outputs". (#5112) - -### Bug fixes - -* Fix finalizing recipes with multiple sources. (#5111 via #5112) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/5116-menuinst-importerror b/news/5116-menuinst-importerror deleted file mode 100644 index 7d8f3f060d..0000000000 --- a/news/5116-menuinst-importerror +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Catch the more general `ImportError` instead of `ModuleNotFoundError`, so we do handle the cases where `menuinst 1.x` is found. (#5116) - -### Deprecations - -* - -### Docs - -* - -### Other - -* From bc1973dbf6c447918df409d391ded953ee701c56 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 11:41:28 -0600 Subject: [PATCH 246/366] [pre-commit.ci] pre-commit autoupdate (#5120) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.7 → v0.1.8](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.7...v0.1.8) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 50de7f9e3d..4260f8d246 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,7 +54,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.7 + rev: v0.1.8 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff From f230865caae511677afab1a4c4017643619f0e14 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Mon, 18 Dec 2023 15:23:31 -0600 Subject: [PATCH 247/366] Correct typing in `conda_build.deprecations` (#5118) --- conda_build/deprecations.py | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/conda_build/deprecations.py b/conda_build/deprecations.py index 4d09205da5..372f700876 100644 --- a/conda_build/deprecations.py +++ b/conda_build/deprecations.py @@ -41,7 +41,7 @@ def __call__( *, addendum: str | None = None, stack: int = 0, - ) -> Callable[(Callable), Callable]: + ) -> Callable[[Callable], Callable]: """Deprecation decorator for functions, methods, & classes. :param deprecate_in: Version in which code will be marked as deprecated. @@ -83,7 +83,7 @@ def argument( rename: str | None = None, addendum: str | None = None, stack: int = 0, - ) -> Callable[(Callable), Callable]: + ) -> Callable[[Callable], Callable]: """Deprecation decorator for keyword arguments. :param deprecate_in: Version in which code will be marked as deprecated. @@ -132,7 +132,7 @@ def action( self, deprecate_in: str, remove_in: str, - action: Action, + action: type[Action], *, addendum: str | None = None, stack: int = 0, @@ -262,7 +262,10 @@ def topic( """ # detect function name and generate message category, message = self._generate_message( - deprecate_in, remove_in, topic, addendum + deprecate_in, + remove_in, + topic, + addendum, ) # alert developer that it's time to remove something @@ -283,12 +286,20 @@ def _get_module(self, stack: int) -> tuple[ModuleType, str]: try: frame = sys._getframe(2 + stack) module = inspect.getmodule(frame) - return (module, module.__name__) - except (IndexError, AttributeError): - raise DeprecatedError("unable to determine the calling module") from None + if module is not None: + return (module, module.__name__) + except IndexError: + # IndexError: 2 + stack is out of range + pass + + raise DeprecatedError("unable to determine the calling module") def _generate_message( - self, deprecate_in: str, remove_in: str, prefix: str, addendum: str + self, + deprecate_in: str, + remove_in: str, + prefix: str, + addendum: str | None, ) -> tuple[type[Warning] | None, str]: """Deprecation decorator for functions, methods, & classes. @@ -301,6 +312,7 @@ def _generate_message( deprecate_version = parse(deprecate_in) remove_version = parse(remove_in) + category: type[Warning] | None if self._version < deprecate_version: category = PendingDeprecationWarning warning = f"is pending deprecation and will be removed in {remove_in}." From 06380d7ce67142e5c4eb93afdc6f967875eb2805 Mon Sep 17 00:00:00 2001 From: jaimergp Date: Tue, 19 Dec 2023 10:43:22 +0100 Subject: [PATCH 248/366] Add tests to ensure that context is properly reset (#5083) Co-authored-by: Marcel Bargull --- tests/test_api_build.py | 43 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/tests/test_api_build.py b/tests/test_api_build.py index f7e6864412..0cc85f01c3 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -12,6 +12,7 @@ import tarfile import uuid from collections import OrderedDict +from contextlib import nullcontext from glob import glob from pathlib import Path from shutil import which @@ -28,8 +29,10 @@ import conda_build from conda_build import __version__, api, exceptions from conda_build.conda_interface import ( + CONDA_VERSION, CondaError, LinkError, + VersionOrder, cc_conda_build, context, reset_context, @@ -1902,3 +1905,43 @@ def test_activated_prefixes_in_actual_path(testing_metadata): if path in expected_paths ] assert actual_paths == expected_paths + + +@pytest.mark.parametrize("add_pip_as_python_dependency", [False, True]) +def test_add_pip_as_python_dependency_from_condarc_file( + testing_metadata, testing_workdir, add_pip_as_python_dependency, monkeypatch +): + """ + Test whether settings from .condarc files are heeded. + ref: https://github.com/conda/conda-libmamba-solver/issues/393 + """ + if VersionOrder(CONDA_VERSION) <= VersionOrder("23.10.0"): + if not add_pip_as_python_dependency and context.solver == "libmamba": + pytest.xfail( + "conda.plan.install_actions from conda<=23.10.0 ignores .condarc files." + ) + from conda.base.context import context_stack + + # ContextStack's pop/replace methods don't call self.apply. + context_stack.apply() + + # TODO: SubdirData._cache_ clearing might not be needed for future conda versions. + # See https://github.com/conda/conda/pull/13365 for proposed changes. + from conda.core.subdir_data import SubdirData + + # SubdirData's cache doesn't distinguish on add_pip_as_python_dependency. + SubdirData._cache_.clear() + + testing_metadata.meta["build"]["script"] = ['python -c "import pip"'] + testing_metadata.meta["requirements"]["host"] = ["python"] + del testing_metadata.meta["test"] + if add_pip_as_python_dependency: + check_build_fails = nullcontext() + else: + check_build_fails = pytest.raises(subprocess.CalledProcessError) + + conda_rc = Path(testing_workdir, ".condarc") + conda_rc.write_text(f"add_pip_as_python_dependency: {add_pip_as_python_dependency}") + with env_var("CONDARC", conda_rc, reset_context): + with check_build_fails: + api.build(testing_metadata) From 75b0e70f64f66fb1ae3e988e35354d6d2cc84840 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 26 Dec 2023 09:09:30 -0600 Subject: [PATCH 249/366] Fix `conda_build.os_utils.liefldd.ensure_binary` to handle `None` (#5124) --- conda_build/os_utils/liefldd.py | 8 +++++--- news/5124-fix-ensure_binary-None-handling | 19 +++++++++++++++++++ 2 files changed, 24 insertions(+), 3 deletions(-) create mode 100644 news/5124-fix-ensure_binary-None-handling diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index 0c47fd2533..9d638d055c 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -43,10 +43,12 @@ def is_string(s): # these are to be avoided, or if not avoided they # should be passed a binary when possible as that # will prevent having to parse it multiple times. -def ensure_binary(file: str | os.PathLike | Path | lief.Binary) -> lief.Binary | None: +def ensure_binary( + file: str | os.PathLike | Path | lief.Binary | None, +) -> lief.Binary | None: if isinstance(file, lief.Binary): return file - elif not Path(file).exists(): + elif not file or not Path(file).exists(): return None try: return lief.parse(str(file)) @@ -525,9 +527,9 @@ def inspect_linkages_lief( todo.pop(0) filename2 = element[0] binary = element[1] - uniqueness_key = get_uniqueness_key(binary) if not binary: continue + uniqueness_key = get_uniqueness_key(binary) if uniqueness_key not in already_seen: parent_exe_dirname = None if binary.format == lief.EXE_FORMATS.PE: diff --git a/news/5124-fix-ensure_binary-None-handling b/news/5124-fix-ensure_binary-None-handling new file mode 100644 index 0000000000..edd4b9d9a4 --- /dev/null +++ b/news/5124-fix-ensure_binary-None-handling @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Update `conda_build.os_utils.liefldd.ensure_binary` to handle `None` inputs. (#5123 via #5124) + +### Deprecations + +* + +### Docs + +* + +### Other + +* From 5e26382d9cbfa3b27cda06adb3d1b4dde48f7432 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 26 Dec 2023 09:19:34 -0600 Subject: [PATCH 250/366] [pre-commit.ci] pre-commit autoupdate (#5127) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.8 → v0.1.9](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.8...v0.1.9) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4260f8d246..030c783909 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,7 +54,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.8 + rev: v0.1.9 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff From 739bfa1933ae4b52d75e87272e0de34ecd01d0fa Mon Sep 17 00:00:00 2001 From: Mike Sarahan Date: Wed, 3 Jan 2024 10:21:15 -0600 Subject: [PATCH 251/366] Reduce unset script_env variable to warning (#5105) --- conda_build/build.py | 7 +++++-- conda_build/environ.py | 4 +++- news/5105-script-env-warn | 20 +++++++++++++++++++ .../_build_script_missing_var/meta.yaml | 9 +++++++++ tests/test_subpackages.py | 10 ++++++++++ 5 files changed, 47 insertions(+), 3 deletions(-) create mode 100644 news/5105-script-env-warn create mode 100644 tests/test-recipes/split-packages/_build_script_missing_var/meta.yaml diff --git a/conda_build/build.py b/conda_build/build.py index c007286474..c6575bada0 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -1919,9 +1919,12 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): val = var.split("=", 1)[1] var = var.split("=", 1)[0] elif var not in os.environ: - raise ValueError( - f"env var '{var}' specified in script_env, but is not set." + warnings.warn( + "The environment variable '%s' specified in script_env is undefined." + % var, + UserWarning, ) + val = "" else: val = os.environ[var] env_output[var] = val diff --git a/conda_build/environ.py b/conda_build/environ.py index c165bdeba2..85733d97d1 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -491,7 +491,9 @@ def meta_vars(meta: MetaData, skip_build_id=False): value = os.getenv(var_name) if value is None: warnings.warn( - "The environment variable '%s' is undefined." % var_name, UserWarning + "The environment variable '%s' specified in script_env is undefined." + % var_name, + UserWarning, ) else: d[var_name] = value diff --git a/news/5105-script-env-warn b/news/5105-script-env-warn new file mode 100644 index 0000000000..5b7d66bad5 --- /dev/null +++ b/news/5105-script-env-warn @@ -0,0 +1,20 @@ +### Enhancements + +* Relax script_env error in outputs when variable referenced in script_env is not defined. + This unifies current behavior with the top-level build. (#5105) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test-recipes/split-packages/_build_script_missing_var/meta.yaml b/tests/test-recipes/split-packages/_build_script_missing_var/meta.yaml new file mode 100644 index 0000000000..d1c2bfbe57 --- /dev/null +++ b/tests/test-recipes/split-packages/_build_script_missing_var/meta.yaml @@ -0,0 +1,9 @@ +package: + name: test_build_script_in_output + version: 1.0 + +outputs: + - name: test_1 + build: + script_env: + - TEST_FN_DOESNT_EXIST diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index 2d63042cb5..db75006a6d 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -335,6 +335,16 @@ def test_build_script_and_script_env(testing_config): api.build(recipe, config=testing_config) +@pytest.mark.sanity +def test_build_script_and_script_env_warn_empty_script_env(testing_config): + recipe = os.path.join(subpackage_dir, "_build_script_missing_var") + with pytest.warns( + UserWarning, + match="The environment variable 'TEST_FN_DOESNT_EXIST' specified in script_env is undefined", + ): + api.build(recipe, config=testing_config) + + @pytest.mark.sanity @pytest.mark.skipif(sys.platform != "darwin", reason="only implemented for mac") def test_strong_run_exports_from_build_applies_to_host(testing_config): From a323ce5684c819d198e7f3feaef9cb3f71dab196 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 4 Jan 2024 12:27:27 -0600 Subject: [PATCH 252/366] Misc cleanup (#5129) * Deprecate `conda_build.noarch_python._force_dir`, use `os.makedirs(exist_ok=True)` instead * Deprecate `conda_build.noarch_python._error_exit` * Combine `on_win` & `ISWIN` constants * Always use `on_*` constants * Remove Python 2.7 and conda<4.7.0 fallback imports * Relative imports * Add pyupgrade * Add flake8-implicit-str-concat --- conda_build/api.py | 88 ++++++------- conda_build/bdist_conda.py | 17 +-- conda_build/build.py | 206 +++++++++++++------------------ conda_build/cli/main_build.py | 4 +- conda_build/cli/main_render.py | 5 +- conda_build/cli/validators.py | 5 +- conda_build/conda_interface.py | 14 +-- conda_build/config.py | 18 +-- conda_build/convert.py | 32 ++--- conda_build/create_test.py | 18 ++- conda_build/develop.py | 8 +- conda_build/environ.py | 41 +++--- conda_build/exceptions.py | 4 +- conda_build/features.py | 4 +- conda_build/index.py | 15 +-- conda_build/inspect_pkg.py | 24 ++-- conda_build/jinja_context.py | 17 ++- conda_build/license_family.py | 8 +- conda_build/metadata.py | 70 +++++------ conda_build/metapackage.py | 6 +- conda_build/noarch_python.py | 31 +++-- conda_build/os_utils/external.py | 10 +- conda_build/os_utils/ldd.py | 7 +- conda_build/os_utils/liefldd.py | 16 ++- conda_build/os_utils/macho.py | 13 +- conda_build/os_utils/pyldd.py | 29 ++--- conda_build/post.py | 90 +++++--------- conda_build/render.py | 32 +++-- conda_build/skeletons/cpan.py | 76 +++++------- conda_build/skeletons/cran.py | 130 +++++-------------- conda_build/skeletons/pypi.py | 36 +++--- conda_build/skeletons/rpm.py | 33 ++--- conda_build/source.py | 44 +++---- conda_build/tarcheck.py | 6 +- conda_build/utils.py | 128 +++++++------------ conda_build/variants.py | 24 ++-- conda_build/windows.py | 10 +- pyproject.toml | 5 +- tests/cli/test_main_render.py | 2 +- tests/test_api_build.py | 20 ++- tests/test_api_convert.py | 48 +++---- tests/test_api_render.py | 4 +- tests/test_jinja_context.py | 2 +- tests/test_subpackages.py | 4 +- tests/test_utils.py | 6 +- tests/utils.py | 5 +- 46 files changed, 572 insertions(+), 843 deletions(-) diff --git a/conda_build/api.py b/conda_build/api.py index 522aa1b08d..8f55488708 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -10,21 +10,27 @@ """ from __future__ import annotations -import sys as _sys - # imports are done locally to keep the api clean and limited strictly # to conda-build's functionality. +import os +import sys from os.path import dirname, expanduser, join from pathlib import Path # make the Config class available in the api namespace -from conda_build.config import DEFAULT_PREFIX_LENGTH as _prefix_length -from conda_build.config import Config, get_channel_urls, get_or_merge_config -from conda_build.utils import ensure_list as _ensure_list -from conda_build.utils import expand_globs as _expand_globs -from conda_build.utils import get_logger as _get_logger - +from .config import DEFAULT_PREFIX_LENGTH as _prefix_length +from .config import Config, get_channel_urls, get_or_merge_config from .deprecations import deprecated +from .utils import ( + CONDA_PACKAGE_EXTENSIONS, + LoggingContext, + ensure_list, + expand_globs, + find_recipe, + get_logger, + get_skip_message, + on_win, +) def render( @@ -42,9 +48,9 @@ def render( Returns a list of (metadata, needs_download, needs_reparse in env) tuples""" from collections import OrderedDict - from conda_build.conda_interface import NoPackagesFoundError - from conda_build.exceptions import DependencyNeedsBuildingError - from conda_build.render import finalize_metadata, render_recipe + from .conda_interface import NoPackagesFoundError + from .exceptions import DependencyNeedsBuildingError + from .render import finalize_metadata, render_recipe config = get_or_merge_config(config, **kwargs) @@ -104,7 +110,7 @@ def render( def output_yaml(metadata, file_path=None, suppress_outputs=False): """Save a rendered recipe in its final form to the path given by file_path""" - from conda_build.render import output_yaml + from .render import output_yaml return output_yaml(metadata, file_path, suppress_outputs=suppress_outputs) @@ -121,8 +127,7 @@ def get_output_file_paths( Both split packages (recipes with more than one output) and build matrices, created with variants, contribute to the list of file paths here. """ - from conda_build.render import bldpkg_path - from conda_build.utils import get_skip_message + from .render import bldpkg_path config = get_or_merge_config(config, **kwargs) @@ -176,7 +181,7 @@ def get_output_file_path( Both split packages (recipes with more than one output) and build matrices, created with variants, contribute to the list of file paths here. """ - log = _get_logger(__name__) + log = get_logger(__name__) log.warn( "deprecation warning: this function has been renamed to get_output_file_paths, " "to reflect that potentially multiple paths are returned. This function will be " @@ -222,10 +227,7 @@ def build( If recipe paths are provided, renders recipe before building. Tests built packages by default. notest=True to skip test.""" - import os - - from conda_build.build import build_tree - from conda_build.utils import find_recipe + from .build import build_tree assert post in (None, True, False), ( "post must be boolean or None. Remember, you must pass " @@ -233,9 +235,9 @@ def build( ) recipes = [] - for recipe in _ensure_list(recipe_paths_or_metadata): + for recipe in ensure_list(recipe_paths_or_metadata): if isinstance(recipe, str): - for recipe in _expand_globs(recipe, os.getcwd()): + for recipe in expand_globs(recipe, os.getcwd()): try: recipe = find_recipe(recipe) except OSError: @@ -275,7 +277,7 @@ def test( For a recipe folder, it renders the recipe enough to know what package to download, and obtains it from your currently configuured channels.""" - from conda_build.build import test + from .build import test if hasattr(recipedir_or_package_or_metadata, "config"): config = recipedir_or_package_or_metadata.config @@ -335,7 +337,7 @@ def skeletonize( # only relevant ones below config = get_or_merge_config(config, **kwargs) config.compute_build_id("skeleton") - packages = _ensure_list(packages) + packages = ensure_list(packages) # This is a little bit of black magic. The idea is that for any keyword argument that # we inspect from the given module's skeletonize function, we should hoist the argument @@ -370,7 +372,7 @@ def skeletonize( def develop( recipe_dir, - prefix=_sys.prefix, + prefix=sys.prefix, no_pth_file=False, build_ext=False, clean=False, @@ -381,7 +383,7 @@ def develop( This works by creating a conda.pth file in site-packages.""" from .develop import execute - recipe_dir = _ensure_list(recipe_dir) + recipe_dir = ensure_list(recipe_dir) return execute(recipe_dir, prefix, no_pth_file, build_ext, clean, uninstall) @@ -400,7 +402,7 @@ def convert( portable, such as pure python, or header-only C/C++ libraries.""" from .convert import conda_convert - platforms = _ensure_list(platforms) + platforms = ensure_list(platforms) if package_file.endswith("tar.bz2"): return conda_convert( package_file, @@ -431,7 +433,7 @@ def test_installable(channel="defaults"): def inspect_linkages( packages, - prefix=_sys.prefix, + prefix=sys.prefix, untracked=False, all_packages=False, show_files=False, @@ -440,7 +442,7 @@ def inspect_linkages( ): from .inspect_pkg import inspect_linkages - packages = _ensure_list(packages) + packages = ensure_list(packages) return inspect_linkages( packages, prefix=prefix, @@ -452,18 +454,18 @@ def inspect_linkages( ) -def inspect_objects(packages, prefix=_sys.prefix, groupby="filename"): +def inspect_objects(packages, prefix=sys.prefix, groupby="filename"): from .inspect_pkg import inspect_objects - packages = _ensure_list(packages) + packages = ensure_list(packages) return inspect_objects(packages, prefix=prefix, groupby=groupby) def inspect_prefix_length(packages, min_prefix_length=_prefix_length): - from conda_build.tarcheck import check_prefix_lengths + from .tarcheck import check_prefix_lengths config = Config(prefix_length=min_prefix_length) - packages = _ensure_list(packages) + packages = ensure_list(packages) prefix_lengths = check_prefix_lengths(packages, config) if prefix_lengths: print( @@ -538,14 +540,11 @@ def update_index( current_index_versions=None, **kwargs, ): - import os - import yaml - from conda_build.index import update_index as legacy_update_index - from conda_build.utils import ensure_list + from .index import update_index as legacy_update_index - dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)] + dir_paths = [os.path.abspath(path) for path in ensure_list(dir_paths)] if isinstance(current_index_versions, str): with open(current_index_versions) as f: @@ -580,14 +579,11 @@ def debug( your package's build or test phase. """ import logging - import os import time from fnmatch import fnmatch - from conda_build.build import build as run_build - from conda_build.build import test as run_test - from conda_build.utils import CONDA_PACKAGE_EXTENSIONS, LoggingContext, on_win - + from .build import build as run_build + from .build import test as run_test from .metadata import MetaData is_package = False @@ -702,15 +698,11 @@ def debug( os.symlink(link_target, debug_source_loc) except PermissionError as e: raise Exception( - "You do not have the necessary permissions to create symlinks in {}\nerror: {}".format( - dn, str(e) - ) + f"You do not have the necessary permissions to create symlinks in {dn}\nerror: {str(e)}" ) except Exception as e: raise Exception( - "Unknown error creating symlinks in {}\nerror: {}".format( - dn, str(e) - ) + f"Unknown error creating symlinks in {dn}\nerror: {str(e)}" ) ext = ".bat" if on_win else ".sh" diff --git a/conda_build/bdist_conda.py b/conda_build/bdist_conda.py index 6e4a5335b9..b10e4758c4 100644 --- a/conda_build/bdist_conda.py +++ b/conda_build/bdist_conda.py @@ -1,10 +1,5 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -""" -bdist_conda - -""" - import sys import time from collections import defaultdict @@ -13,12 +8,12 @@ from setuptools.dist import Distribution from setuptools.errors import BaseError, OptionError -from conda_build import api -from conda_build.build import handle_anaconda_upload -from conda_build.conda_interface import StringIO, configparser, spec_from_line -from conda_build.config import Config -from conda_build.metadata import MetaData -from conda_build.skeletons import pypi +from . import api +from .build import handle_anaconda_upload +from .conda_interface import StringIO, configparser, spec_from_line +from .config import Config +from .metadata import MetaData +from .skeletons import pypi # TODO: Add support for all the options that conda build has diff --git a/conda_build/build.py b/conda_build/build.py index c6575bada0..fe0b5fe5a4 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -3,12 +3,6 @@ """ Module that does most of the heavy lifting for the ``conda build`` command. """ - -# this is to compensate for a requests idna encoding error. Conda is a better place to fix, -# eventually -# exception is raises: "LookupError: unknown encoding: idna" -# http://stackoverflow.com/a/13057751/1170370 -import encodings.idna # NOQA import fnmatch import json import os @@ -29,39 +23,8 @@ from bs4 import UnicodeDammit from conda import __version__ as conda_version -import conda_build.noarch_python as noarch_python -import conda_build.os_utils.external as external -from conda_build import __version__ as conda_build_version -from conda_build import environ, source, tarcheck, utils -from conda_build.config import Config -from conda_build.create_test import create_all_test_files -from conda_build.exceptions import CondaBuildException, DependencyNeedsBuildingError -from conda_build.index import _delegated_update_index, get_build_index -from conda_build.metadata import FIELDS, MetaData -from conda_build.post import ( - fix_permissions, - get_build_metadata, - post_build, - post_process, -) -from conda_build.render import ( - add_upstream_pins, - bldpkg_path, - distribute_variants, - execute_download_actions, - expand_outputs, - output_yaml, - render_recipe, - reparse, - try_download, -) -from conda_build.variants import ( - dict_of_lists_to_list_of_dicts, - get_package_variants, - set_language_env_vars, -) - -# used to get version +from . import __version__ as conda_build_version +from . import environ, noarch_python, source, tarcheck, utils from .conda_interface import ( CondaError, EntityEncoder, @@ -80,18 +43,50 @@ root_dir, url_path, ) +from .config import Config +from .create_test import create_all_test_files +from .exceptions import CondaBuildException, DependencyNeedsBuildingError +from .index import _delegated_update_index, get_build_index +from .metadata import FIELDS, MetaData +from .os_utils import external +from .post import ( + fix_permissions, + get_build_metadata, + post_build, + post_process, +) +from .render import ( + add_upstream_pins, + bldpkg_path, + distribute_variants, + execute_download_actions, + expand_outputs, + output_yaml, + render_recipe, + reparse, + try_download, +) from .utils import ( CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, CONDA_PACKAGE_EXTENSIONS, env_var, glob, + on_linux, + on_mac, + on_win, shutil_move_more_retrying, tmp_chdir, + write_bat_activation_text, +) +from .variants import ( + dict_of_lists_to_list_of_dicts, + get_package_variants, + set_language_env_vars, ) -if sys.platform == "win32": - import conda_build.windows as windows +if on_win: + from . import windows if "bsd" in sys.platform: shell_path = "/bin/sh" @@ -181,7 +176,7 @@ def create_post_scripts(m: MetaData): def prefix_replacement_excluded(path): if path.endswith((".pyc", ".pyo")) or not isfile(path): return True - if sys.platform != "darwin" and islink(path): + if not on_mac and islink(path): # OSX does not allow hard-linking symbolic links, so we cannot # skip symbolic links (as we can on Linux) return True @@ -746,9 +741,7 @@ def perform_replacements(matches, prefix, verbose=False, diff=None): if match["type"] == "binary": if len(original) < len(new_string): print( - "ERROR :: Cannot replace {} with {} in binary file {}".format( - original, new_string, filename - ) + f"ERROR :: Cannot replace {original} with {new_string} in binary file {filename}" ) new_string = new_string.ljust(len(original), b"\0") assert len(new_string) == len(original) @@ -1041,9 +1034,7 @@ def copy_test_source_files(m, destination): except OSError as e: log = utils.get_logger(__name__) log.warn( - "Failed to copy {} into test files. Error was: {}".format( - f, str(e) - ) + f"Failed to copy {f} into test files. Error was: {str(e)}" ) for ext in ".pyc", ".pyo": for f in utils.get_ext_files(destination, ext): @@ -1313,9 +1304,9 @@ def record_prefix_files(m, files_with_prefix): and detect_binary_files_with_prefix ): print( - "File {} force-identified as 'binary', " + f"File {fn} force-identified as 'binary', " "But it is 'binary' anyway, suggest removing it from " - "`build/binary_has_prefix_files`".format(fn) + "`build/binary_has_prefix_files`" ) if fn in binary_has_prefix_files: binary_has_prefix_files.remove(fn) @@ -1328,9 +1319,9 @@ def record_prefix_files(m, files_with_prefix): mode = "text" elif fn in text_has_prefix_files and not len_text_has_prefix_files: print( - "File {} force-identified as 'text', " + f"File {fn} force-identified as 'text', " "But it is 'text' anyway, suggest removing it from " - "`build/has_prefix_files`".format(fn) + "`build/has_prefix_files`" ) if fn in text_has_prefix_files: text_has_prefix_files.remove(fn) @@ -1472,13 +1463,10 @@ def write_info_json(m: MetaData): fo.write( "# This file as created when building:\n" "#\n" - "# {}.tar.bz2 (on '{}')\n" + f"# {m.dist()}.tar.bz2 (on '{m.config.build_subdir}')\n" "#\n" "# It can be used to create the runtime environment of this package using:\n" - "# $ conda create --name --file ".format( - m.dist(), - m.config.build_subdir, - ) + "# $ conda create --name --file " ) for dist in sorted(runtime_deps + [" ".join(m.dist().rsplit("-", 2))]): fo.write("%s\n" % "=".join(dist.split())) @@ -1756,11 +1744,11 @@ def post_process_files(m: MetaData, initial_prefix_files): if len(missing): log = utils.get_logger(__name__) log.warning( - "The install/build script(s) for {} deleted the following " - "files (from dependencies) from the prefix:\n{}\n" + f"The install/build script(s) for {package_name} deleted the following " + f"files (from dependencies) from the prefix:\n{missing}\n" "This will cause the post-link checks to mis-report. Please " "try not to delete and files (DSOs in particular) from the " - "prefix".format(package_name, missing) + "prefix" ) get_build_metadata(m) create_post_scripts(m) @@ -1793,7 +1781,7 @@ def post_process_files(m: MetaData, initial_prefix_files): if m.noarch == 'python' and m.config.subdir == 'win-32': # Delete any PIP-created .exe launchers and fix entry_points.txt # .. but we need to provide scripts instead here. - from conda_build.post import caseless_sepless_fnmatch + from .post import caseless_sepless_fnmatch exes = caseless_sepless_fnmatch(new_files, 'Scripts/*.exe') for ff in exes: os.unlink(os.path.join(m.config.host_prefix, ff)) @@ -1806,11 +1794,9 @@ def post_process_files(m: MetaData, initial_prefix_files): tuple(f for f in new_files if m.config.meta_dir in join(host_prefix, f)), ) sys.exit( - "Error: Untracked file(s) {} found in conda-meta directory. This error usually comes " + f"Error: Untracked file(s) {meta_files} found in conda-meta directory. This error usually comes " "from using conda in the build script. Avoid doing this, as it can lead to packages " - "that include their dependencies.".format( - meta_files, - ) + "that include their dependencies." ) post_build(m, new_files, build_python=python) @@ -1976,11 +1962,11 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): for dep, env_var_name in dangerous_double_deps.items(): if all(dep in pkgs_list for pkgs_list in (build_pkgs, host_pkgs)): raise CondaBuildException( - "Empty package; {0} present in build and host deps. " - "You probably picked up the build environment's {0} " + f"Empty package; {dep} present in build and host deps. " + f"You probably picked up the build environment's {dep} " " executable. You need to alter your recipe to " - " use the {1} env var in your recipe to " - "run that executable.".format(dep, env_var_name) + f" use the {env_var_name} env var in your recipe to " + "run that executable." ) elif dep in build_pkgs and metadata.uses_new_style_compiler_activation: link = ( @@ -1988,10 +1974,10 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): "define-metadata.html#host" ) raise CondaBuildException( - "Empty package; {0} dep present in build but not " - "host requirements. You need to move your {0} dep " - "to the host requirements section. See {1} for more " - "info.".format(dep, link) + f"Empty package; {dep} dep present in build but not " + f"host requirements. You need to move your {dep} dep " + f"to the host requirements section. See {link} for more " + "info." ) initial_files = set(utils.prefix_files(metadata.config.host_prefix)) @@ -2081,9 +2067,7 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): except KeyError as e: log.warn( "Package doesn't have necessary files. It might be too old to inspect." - "Legacy noarch packages are known to fail. Full message was {}".format( - e - ) + f"Legacy noarch packages are known to fail. Full message was {e}" ) try: crossed_subdir = metadata.config.target_subdir @@ -2242,7 +2226,7 @@ def _write_sh_activation_text(file_handle, m): stack = "--stack" if m.is_cross else "" file_handle.write(f'conda activate {stack} "{build_prefix_path}"\n') - from conda_build.os_utils.external import find_executable + from .os_utils.external import find_executable ccache = find_executable("ccache", m.config.build_prefix, False) if ccache: @@ -2303,16 +2287,14 @@ def _write_activation_text(script_path, m): data = fh.read() fh.seek(0) if os.path.splitext(script_path)[1].lower() == ".bat": - if m.config.build_subdir.startswith("win"): - from conda_build.utils import write_bat_activation_text write_bat_activation_text(fh, m) elif os.path.splitext(script_path)[1].lower() == ".sh": _write_sh_activation_text(fh, m) else: log = utils.get_logger(__name__) log.warn( - "not adding activation to {} - I don't know how to do so for " - "this file type".format(script_path) + f"not adding activation to {script_path} - I don't know how to do so for " + "this file type" ) fh.write(data) @@ -2491,11 +2473,9 @@ def build( print( "Packages for ", m.path or m.name(), - "with variant {} " + f"with variant {m.get_hash_contents()} " "are already built and available from your configured channels " - "(including local) or are otherwise specified to be skipped.".format( - m.get_hash_contents() - ), + "(including local) or are otherwise specified to be skipped.", ) return default_return @@ -2760,10 +2740,8 @@ def build( if test_script: if not os.path.isfile(os.path.join(m.path, test_script)): raise ValueError( - "test script specified as {} does not exist. Please " - "check for typos or create the file and try again.".format( - test_script - ) + f"test script specified as {test_script} does not exist. Please " + "check for typos or create the file and try again." ) utils.copy_into( os.path.join(m.path, test_script), @@ -2956,8 +2934,8 @@ def guess_interpreter(script_filename): break else: raise NotImplementedError( - "Don't know how to run {} file. Please specify " - "script_interpreter for {} output".format(file_ext, script_filename) + f"Don't know how to run {file_ext} file. Please specify " + f"script_interpreter for {script_filename} output" ) return interpreter_command @@ -3243,7 +3221,7 @@ def _write_test_run_script( if py_files: test_python = metadata.config.test_python # use pythonw for import tests when osx_is_app is set - if metadata.get_value("build/osx_is_app") and sys.platform == "darwin": + if metadata.get_value("build/osx_is_app") and on_mac: test_python = test_python + "w" tf.write( '"{python}" -s "{test_file}"\n'.format( @@ -3298,11 +3276,7 @@ def _write_test_run_script( ) elif os.path.splitext(shell_file)[1] == ".sh": # TODO: Run the test/commands here instead of in run_test.py - tf.write( - '"{shell_path}" {trace}-e "{test_file}"\n'.format( - shell_path=shell_path, test_file=shell_file, trace=trace - ) - ) + tf.write(f'"{shell_path}" {trace}-e "{shell_file}"\n') def write_test_scripts( @@ -3565,7 +3539,7 @@ def test( env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] if config.test_run_post: - from conda_build.utils import get_installed_packages + from .utils import get_installed_packages installed = get_installed_packages(metadata.config.test_prefix) files = installed[metadata.meta["package"]["name"]]["files"] @@ -3659,9 +3633,7 @@ def tests_failed(package_or_metadata, move_broken, broken_dir, config): try: shutil.move(pkg, dest) log.warn( - "Tests failed for {} - moving package to {}".format( - os.path.basename(pkg), broken_dir - ) + f"Tests failed for {os.path.basename(pkg)} - moving package to {broken_dir}" ) except OSError: pass @@ -3672,17 +3644,15 @@ def tests_failed(package_or_metadata, move_broken, broken_dir, config): def check_external(): - if sys.platform.startswith("linux"): + if on_linux: patchelf = external.find_executable("patchelf") if patchelf is None: sys.exit( "Error:\n" - " Did not find 'patchelf' in: {}\n" + f" Did not find 'patchelf' in: {os.pathsep.join(external.dir_paths)}\n" " 'patchelf' is necessary for building conda packages on Linux with\n" " relocatable ELF libraries. You can install patchelf using conda install\n" - " patchelf.\n".format( - os.pathsep.join(external.dir_paths), - ) + " patchelf.\n" ) @@ -3862,8 +3832,8 @@ def build_tree( DependencyNeedsBuildingError, ) as e: log.warn( - "Skipping downstream test for spec {}; was " - "unsatisfiable. Error was {}".format(dep, e) + f"Skipping downstream test for spec {dep}; was " + f"unsatisfiable. Error was {e}" ) continue # make sure to download that package to the local cache if not there @@ -3936,9 +3906,7 @@ def build_tree( if pkg in to_build_recursive: cfg.clean(remove_folders=False) raise RuntimeError( - "Can't build {} due to environment creation error:\n".format( - recipe - ) + f"Can't build {recipe} due to environment creation error:\n" + str(e.message) + "\n" + extra_help @@ -3979,11 +3947,9 @@ def build_tree( MatchSpec(matchspec), dep_meta[0], metadata ): print( - ( - "Missing dependency {0}, but found" - + " recipe directory, so building " - + "{0} first" - ).format(pkg) + f"Missing dependency {pkg}, but found " + f"recipe directory, so building " + f"{pkg} first" ) add_recipes.append(recipe_dir) available = True @@ -4015,7 +3981,7 @@ def build_tree( handle_pypi_upload(wheels, config=config) # Print the variant information for each package because it is very opaque and never printed. - from conda_build.inspect_pkg import get_hash_input + from .inspect_pkg import get_hash_input hash_inputs = get_hash_input(tarballs) print( @@ -4061,7 +4027,7 @@ def build_tree( def handle_anaconda_upload(paths, config): - from conda_build.os_utils.external import find_executable + from .os_utils.external import find_executable paths = utils.ensure_list(paths) @@ -4092,7 +4058,7 @@ def handle_anaconda_upload(paths, config): no_upload_message += ( "\n" "# To have conda build upload to anaconda.org automatically, use\n" - "# {}conda config --set anaconda_upload yes\n".format(prompter) + f"# {prompter}conda config --set anaconda_upload yes\n" ) no_upload_message += f"anaconda upload{joiner}" + joiner.join(paths) @@ -4105,7 +4071,7 @@ def handle_anaconda_upload(paths, config): sys.exit( "Error: cannot locate anaconda command (required for upload)\n" "# Try:\n" - "# {}conda install anaconda-client".format(prompter) + f"# {prompter}conda install anaconda-client" ) cmd = [ anaconda, diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index 5b5bb67ef6..e66ff0e11b 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -596,8 +596,8 @@ def main(): sys.exit(1) except filelock.Timeout as e: print( - "File lock on {} could not be obtained. You might need to try fewer builds at once." - " Otherwise, run conda clean --lock".format(e.lock_file) + f"File lock on {e.lock_file} could not be obtained. You might need to try fewer builds at once." + " Otherwise, run conda clean --lock" ) sys.exit(1) return diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 9026fb1b57..cdd831021b 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -15,7 +15,6 @@ from ..utils import LoggingContext from ..variants import get_package_variants, set_language_env_vars -on_win = sys.platform == "win32" log = logging.getLogger(__name__) @@ -194,7 +193,7 @@ def execute(args, print_results=True): config = get_or_merge_config(None, **args.__dict__) variants = get_package_variants(args.recipe, config, variants=args.variants) - from conda_build.build import get_all_replacements + from ..build import get_all_replacements get_all_replacements(variants) set_language_env_vars(variants) @@ -217,7 +216,7 @@ def execute(args, print_results=True): if args.file and len(metadata_tuples) > 1: log.warning( "Multiple variants rendered. " - "Only one will be written to the file you specified ({}).".format(args.file) + f"Only one will be written to the file you specified ({args.file})." ) if print_results: diff --git a/conda_build/cli/validators.py b/conda_build/cli/validators.py index b1c1144662..e21304e074 100644 --- a/conda_build/cli/validators.py +++ b/conda_build/cli/validators.py @@ -5,8 +5,7 @@ import os from argparse import ArgumentError -from conda_build import utils -from conda_build.utils import CONDA_PACKAGE_EXTENSIONS +from ..utils import CONDA_PACKAGE_EXTENSIONS, is_conda_pkg CONDA_PKG_OR_RECIPE_ERROR_MESSAGE = ( "\nUnable to parse provided recipe directory or package file.\n\n" @@ -21,7 +20,7 @@ def validate_is_conda_pkg_or_recipe_dir(arg_val: str) -> str: """ if os.path.isdir(arg_val): return arg_val - elif utils.is_conda_pkg(arg_val): + elif is_conda_pkg(arg_val): return arg_val else: raise ArgumentError(None, CONDA_PKG_OR_RECIPE_ERROR_MESSAGE) diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 10bd300ed3..78eeb03f95 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -26,11 +26,11 @@ UnsatisfiableError, ) from conda.exports import ( # noqa: F401 - ArgumentParser, # noqa: F401 + ArgumentParser, Channel, Completer, - CondaSession, # noqa: F401 - EntityEncoder, # noqa: F401 + CondaSession, + EntityEncoder, FileMode, InstalledPackages, MatchSpec, @@ -41,15 +41,15 @@ TemporaryDirectory, TmpDownload, Unsatisfiable, - VersionOrder, # noqa: F401 - _toposort, # noqa: F401 + VersionOrder, + _toposort, add_parser_channels, add_parser_prefix, display_actions, download, execute_actions, execute_plan, - get_index, # noqa: F401 + get_index, handle_proxy_407, hashsum_file, human_bytes, @@ -164,7 +164,7 @@ def get_installed_version(prefix, pkgs): Primarily used by conda-forge, but may be useful in general for checking when a package needs to be updated """ - from conda_build.utils import ensure_list + from .utils import ensure_list pkgs = ensure_list(pkgs) linked_pkgs = linked(prefix) diff --git a/conda_build/config.py b/conda_build/config.py index 67f0e9f6a0..89c158e52d 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -10,7 +10,6 @@ import os import re import shutil -import sys import time from collections import namedtuple from os.path import abspath, expanduser, expandvars, join @@ -26,10 +25,15 @@ url_path, ) from .deprecations import deprecated -from .utils import get_build_folders, get_conda_operation_locks, get_logger, rm_rf +from .utils import ( + get_build_folders, + get_conda_operation_locks, + get_logger, + on_win, + rm_rf, +) from .variants import get_default_variant -on_win = sys.platform == "win32" invocation_time = "" @@ -551,9 +555,7 @@ def CONDA_R(self, value): self.variant["r_base"] = value def _get_python(self, prefix, platform): - if platform.startswith("win") or ( - platform == "noarch" and sys.platform == "win32" - ): + if platform.startswith("win") or (platform == "noarch" and on_win): if os.path.isfile(os.path.join(prefix, "python_d.exe")): res = join(prefix, "python_d.exe") else: @@ -580,9 +582,7 @@ def _get_lua(self, prefix, platform): return res def _get_r(self, prefix, platform): - if platform.startswith("win") or ( - platform == "noarch" and sys.platform == "win32" - ): + if platform.startswith("win") or (platform == "noarch" and on_win): res = join(prefix, "Scripts", "R.exe") # MRO test: if not os.path.exists(res): diff --git a/conda_build/convert.py b/conda_build/convert.py index 5c283cb98d..c2882d1508 100644 --- a/conda_build/convert.py +++ b/conda_build/convert.py @@ -14,7 +14,7 @@ import tempfile from pathlib import Path -from conda_build.utils import filter_info_files, walk +from .utils import filter_info_files, walk def retrieve_c_extensions(file_path, show_imports=False): @@ -113,9 +113,7 @@ def retrieve_python_version(file_path): ) build_version = re.sub(r"\A.*py\d\d.*\Z", "python", index["build"]) - return "{}{}.{}".format( - build_version, build_version_number[0], build_version_number[1] - ) + return f"{build_version}{build_version_number[0]}.{build_version_number[1]}" def extract_temporary_directory(file_path): @@ -200,11 +198,7 @@ def update_index_file(temp_dir, target_platform, dependencies, verbose): if verbose: print("Updating platform from {} to {}".format(index["platform"], platform)) print("Updating subdir from {} to {}".format(index["subdir"], target_platform)) - print( - "Updating architecture from {} to {}".format( - source_architecture, architecture - ) - ) + print(f"Updating architecture from {source_architecture} to {architecture}") index["platform"] = platform index["subdir"] = target_platform @@ -719,9 +713,7 @@ def convert_from_unix_to_windows( ) prefixes.add( - "/opt/anaconda1anaconda2anaconda3 text Scripts/{}-script.py\n".format( - retrieve_executable_name(script) - ) + f"/opt/anaconda1anaconda2anaconda3 text Scripts/{retrieve_executable_name(script)}-script.py\n" ) new_bin_path = os.path.join(temp_dir, "Scripts") @@ -766,9 +758,7 @@ def convert_from_windows_to_unix( remove_executable(directory, script) prefixes.add( - "/opt/anaconda1anaconda2anaconda3 text bin/{}\n".format( - retrieve_executable_name(script) - ) + f"/opt/anaconda1anaconda2anaconda3 text bin/{retrieve_executable_name(script)}\n" ) new_bin_path = os.path.join(temp_dir, "bin") @@ -824,8 +814,8 @@ def conda_convert( if len(retrieve_c_extensions(file_path)) > 0 and not force: sys.exit( - "WARNING: Package {} contains C extensions; skipping conversion. " - "Use -f to force conversion.".format(os.path.basename(file_path)) + f"WARNING: Package {os.path.basename(file_path)} contains C extensions; skipping conversion. " + "Use -f to force conversion." ) conversion_platform, source_platform, architecture = retrieve_package_platform( @@ -853,16 +843,14 @@ def conda_convert( for platform in platforms: if platform == source_platform_architecture: print( - "Source platform '{}' and target platform '{}' are identical. " - "Skipping conversion.".format(source_platform_architecture, platform) + f"Source platform '{source_platform_architecture}' and target platform '{platform}' are identical. " + "Skipping conversion." ) continue if not quiet: print( - "Converting {} from {} to {}".format( - os.path.basename(file_path), source_platform_architecture, platform - ) + f"Converting {os.path.basename(file_path)} from {source_platform_architecture} to {platform}" ) if platform.startswith(("osx", "linux")) and conversion_platform == "unix": diff --git a/conda_build/create_test.py b/conda_build/create_test.py index 35511ef503..334645dcac 100644 --- a/conda_build/create_test.py +++ b/conda_build/create_test.py @@ -229,16 +229,14 @@ def create_pl_files(m: MetaData, test_dir: os.PathLike) -> bool: # Don't try to print version for complex imports if " " not in name: print( - ( - "if (defined {0}->VERSION) {{\n" - + "\tmy $given_version = {0}->VERSION;\n" - + "\t$given_version =~ s/0+$//;\n" - + "\tdie('Expected version ' . $expected_version . ' but" - + " found ' . $given_version) unless ($expected_version " - + "eq $given_version);\n" - + "\tprint('\tusing version ' . {0}->VERSION . '\n');\n" - + "\n}}" - ).format(name), + f"if (defined {name}->VERSION) {{\n" + f"\tmy $given_version = {name}->VERSION;\n" + f"\t$given_version =~ s/0+$//;\n" + f"\tdie('Expected version ' . $expected_version . ' but" + f" found ' . $given_version) unless ($expected_version " + f"eq $given_version);\n" + f"\tprint('\tusing version ' . {name}->VERSION . '\n');\n" + f"\n}}", file=fo, ) return tf if (tf_exists or imports) else False diff --git a/conda_build/develop.py b/conda_build/develop.py index 5e9c892e36..5b83185fdc 100644 --- a/conda_build/develop.py +++ b/conda_build/develop.py @@ -4,9 +4,9 @@ import sys from os.path import abspath, exists, expanduser, isdir, join -from conda_build.os_utils.external import find_executable -from conda_build.post import mk_relative_osx -from conda_build.utils import check_call_env, get_site_packages, rec_glob +from .os_utils.external import find_executable +from .post import mk_relative_osx +from .utils import check_call_env, get_site_packages, on_mac, rec_glob def relink_sharedobjects(pkg_path, build_prefix): @@ -24,7 +24,7 @@ def relink_sharedobjects(pkg_path, build_prefix): # find binaries in package dir and make them relocatable bin_files = rec_glob(pkg_path, [".so"]) for b_file in bin_files: - if sys.platform == "darwin": + if on_mac: mk_relative_osx(b_file, build_prefix) else: print("Nothing to do on Linux or Windows.") diff --git a/conda_build/environ.py b/conda_build/environ.py index 85733d97d1..319b4e7bf4 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -14,14 +14,7 @@ from glob import glob from os.path import join, normpath -from conda_build import utils -from conda_build.exceptions import BuildLockError, DependencyNeedsBuildingError -from conda_build.features import feature_list -from conda_build.index import get_build_index -from conda_build.os_utils import external -from conda_build.utils import ensure_list, env_var, prepend_bin_path -from conda_build.variants import get_default_variant - +from . import utils from .conda_interface import ( CondaError, LinkError, @@ -42,7 +35,19 @@ root_dir, ) from .deprecations import deprecated +from .exceptions import BuildLockError, DependencyNeedsBuildingError +from .features import feature_list +from .index import get_build_index from .metadata import MetaData +from .os_utils import external +from .utils import ( + ensure_list, + env_var, + on_mac, + on_win, + prepend_bin_path, +) +from .variants import get_default_variant # these are things that we provide env vars for more explicitly. This list disables the # pass-through of variant values to env vars for these keys. @@ -149,7 +154,7 @@ def verify_git_repo( stderr=stderr, ) except subprocess.CalledProcessError: - if sys.platform == "win32" and cache_dir.startswith("/"): + if on_win and cache_dir.startswith("/"): cache_dir = utils.convert_unix_path_to_win(cache_dir) remote_details = utils.check_output_env( [git_exe, "--git-dir", cache_dir, "remote", "-v"], @@ -161,7 +166,7 @@ def verify_git_repo( # on windows, remote URL comes back to us as cygwin or msys format. Python doesn't # know how to normalize it. Need to convert it to a windows path. - if sys.platform == "win32" and remote_url.startswith("/"): + if on_win and remote_url.startswith("/"): remote_url = utils.convert_unix_path_to_win(git_url) if os.path.exists(remote_url): @@ -498,10 +503,10 @@ def meta_vars(meta: MetaData, skip_build_id=False): else: d[var_name] = value warnings.warn( - "The environment variable '%s' is being passed through with value '%s'. " + f"The environment variable '{var_name}' is being passed through with value " + f"'{'' if meta.config.suppress_variables else value}'. " "If you are splitting build and test phases with --no-test, please ensure " - "that this value is also set similarly at test time." - % (var_name, "" if meta.config.suppress_variables else value), + "that this value is also set similarly at test time.", UserWarning, ) @@ -520,7 +525,7 @@ def meta_vars(meta: MetaData, skip_build_id=False): git_url = meta.get_value("source/0/git_url") if os.path.exists(git_url): - if sys.platform == "win32": + if on_win: git_url = utils.convert_unix_path_to_win(git_url) # If git_url is a relative path instead of a url, convert it to an abspath git_url = normpath(join(meta.path, git_url)) @@ -560,7 +565,7 @@ def meta_vars(meta: MetaData, skip_build_id=False): @lru_cache(maxsize=None) def get_cpu_count(): - if sys.platform == "darwin": + if on_mac: # multiprocessing.cpu_count() is not reliable on OSX # See issue #645 on github.com/conda/conda-build out, _ = subprocess.Popen( @@ -756,7 +761,7 @@ def os_vars(m, prefix): if not m.config.activate: d = prepend_bin_path(d, m.config.host_prefix) - if sys.platform == "win32": + if on_win: windows_vars(m, get_default, prefix) else: unix_vars(m, get_default, prefix) @@ -1226,8 +1231,8 @@ def clean_pkg_cache(dist, config): locks = get_pkg_dirs_locks([config.bldpkgs_dir] + pkgs_dirs, config) with utils.try_acquire_locks(locks, timeout=config.timeout): rmplan = [ - "RM_EXTRACTED {0} local::{0}".format(dist), - "RM_FETCHED {0} local::{0}".format(dist), + f"RM_EXTRACTED {dist} local::{dist}", + f"RM_FETCHED {dist} local::{dist}", ] execute_plan(rmplan) diff --git a/conda_build/exceptions.py b/conda_build/exceptions.py index 857141fb4f..f38706786a 100644 --- a/conda_build/exceptions.py +++ b/conda_build/exceptions.py @@ -96,9 +96,7 @@ def __str__(self): @property def message(self): - return "Unsatisfiable dependencies for platform {}: {}".format( - self.subdir, set(self.matchspecs) - ) + return f"Unsatisfiable dependencies for platform {self.subdir}: {set(self.matchspecs)}" class RecipeError(CondaBuildException): diff --git a/conda_build/features.py b/conda_build/features.py index 4b506cbc80..414b15333f 100644 --- a/conda_build/features.py +++ b/conda_build/features.py @@ -16,7 +16,7 @@ if key in env_vars: if value not in ("0", "1"): sys.exit( - "Error: did not expect environment variable '%s' " - "being set to '%s' (not '0' or '1')" % (key, value) + f"Error: did not expect environment variable '{key}' " + f"being set to '{value}' (not '0' or '1')" ) feature_list.append((key[8:].lower(), bool(int(value)))) diff --git a/conda_build/index.py b/conda_build/index.py index 5f296a164d..8ac164dccf 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -48,8 +48,7 @@ from yaml.reader import ReaderError from yaml.scanner import ScannerError -from conda_build import conda_interface, utils - +from . import conda_interface, utils from .conda_interface import ( CondaError, CondaHTTPError, @@ -67,10 +66,10 @@ CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, CONDA_PACKAGE_EXTENSIONS, - FileNotFoundError, JSONDecodeError, get_logger, glob, + on_win, ) log = get_logger(__name__) @@ -123,9 +122,7 @@ def map(self, func, *iterables): MAX_THREADS_DEFAULT = ( os.cpu_count() if (hasattr(os, "cpu_count") and os.cpu_count() > 1) else 1 ) -if ( - sys.platform == "win32" -): # see https://github.com/python/cpython/commit/8ea0fd85bc67438f679491fae29dfe0a3961900a +if on_win: # see https://github.com/python/cpython/commit/8ea0fd85bc67438f679491fae29dfe0a3961900a MAX_THREADS_DEFAULT = min(48, MAX_THREADS_DEFAULT) LOCK_TIMEOUT_SECS = 3 * 3600 LOCKFILE_NAME = ".lock" @@ -1725,10 +1722,8 @@ def _create_patch_instructions(self, subdir, repodata, patch_generator=None): else: if patch_generator: raise ValueError( - "Specified metadata patch file '{}' does not exist. Please try an absolute " - "path, or examine your relative path carefully with respect to your cwd.".format( - patch_generator - ) + f"Specified metadata patch file '{patch_generator}' does not exist. Please try an absolute " + "path, or examine your relative path carefully with respect to your cwd." ) return {} diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index c531bcc9d1..59772d1f7f 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -21,28 +21,30 @@ from conda.models.records import PrefixRecord from conda.resolve import MatchSpec -from conda_build.conda_interface import ( +from . import conda_interface +from .conda_interface import ( linked_data, specs_from_args, ) -from conda_build.os_utils.ldd import ( +from .deprecations import deprecated +from .os_utils.ldd import ( get_linkages, get_package_obj_files, get_untracked_obj_files, ) -from conda_build.os_utils.liefldd import codefile_class, machofile -from conda_build.os_utils.macho import get_rpaths, human_filetype -from conda_build.utils import ( +from .os_utils.liefldd import codefile_class, machofile +from .os_utils.macho import get_rpaths, human_filetype +from .utils import ( comma_join, ensure_list, get_logger, + on_linux, + on_mac, + on_win, package_has_file, + samefile, ) -from . import conda_interface -from .deprecations import deprecated -from .utils import on_mac, on_win, samefile - log = get_logger(__name__) @@ -156,9 +158,9 @@ def print_linkages( def replace_path(binary, path, prefix): - if sys.platform.startswith("linux"): + if on_linux: return abspath(path) - elif sys.platform.startswith("darwin"): + elif on_mac: if path == basename(binary): return abspath(join(prefix, binary)) if "@rpath" in path: diff --git a/conda_build/jinja_context.py b/conda_build/jinja_context.py index eaadc3a100..6933f631ad 100644 --- a/conda_build/jinja_context.py +++ b/conda_build/jinja_context.py @@ -214,7 +214,7 @@ def load_setup_py_data( else: raise CondaBuildException( "Could not render recipe - need modules " - 'installed in root env. Import error was "{}"'.format(e) + f'installed in root env. Import error was "{e}"' ) # cleanup: we must leave the source tree empty unless the source code is already present rm_rf(os.path.join(m.config.work_dir, "_load_setup_py_data.py")) @@ -349,11 +349,11 @@ def pin_compatible( compatibility = apply_pin_expressions(version, min_pin, max_pin) if not compatibility and not permit_undefined_jinja and not bypass_env_check: - check = re.compile(r"pin_compatible\s*\(\s*[" '"]{}[' '"]'.format(package_name)) + check = re.compile(rf'pin_compatible\s*\(\s*["]{package_name}["]') if check.search(m.extract_requirements_text()): raise RuntimeError( - "Could not get compatibility information for {} package. " - "Is it one of your host dependencies?".format(package_name) + f"Could not get compatibility information for {package_name} package. " + "Is it one of your host dependencies?" ) return ( " ".join((package_name, compatibility)) @@ -409,10 +409,7 @@ def pin_subpackage_against_outputs( ] ) else: - pin = "{} {}".format( - sp_m.name(), - apply_pin_expressions(sp_m.version(), min_pin, max_pin), - ) + pin = f"{sp_m.name()} {apply_pin_expressions(sp_m.version(), min_pin, max_pin)}" else: pin = matching_package_keys[0][0] return pin @@ -463,9 +460,9 @@ def pin_subpackage( pin = subpackage_name if not permit_undefined_jinja and not allow_no_other_outputs: raise ValueError( - "Didn't find subpackage version info for '{}', which is used in a" + f"Didn't find subpackage version info for '{subpackage_name}', which is used in a" " pin_subpackage expression. Is it actually a subpackage? If not, " - "you want pin_compatible instead.".format(subpackage_name) + "you want pin_compatible instead." ) return pin diff --git a/conda_build/license_family.py b/conda_build/license_family.py index 2833974066..976cc1b33a 100644 --- a/conda_build/license_family.py +++ b/conda_build/license_family.py @@ -3,8 +3,8 @@ import re import string -from conda_build import exceptions -from conda_build.utils import comma_join +from . import exceptions +from .utils import comma_join allowed_license_families = """ AGPL @@ -109,7 +109,7 @@ def ensure_valid_license_family(meta): if remove_special_characters(normalize(license_family)) not in allowed_families: raise RuntimeError( exceptions.indent( - "about/license_family '%s' not allowed. Allowed families are %s." - % (license_family, comma_join(sorted(allowed_license_families))) + f"about/license_family '{license_family}' not allowed. " + f"Allowed families are {comma_join(sorted(allowed_license_families))}." ) ) diff --git a/conda_build/metadata.py b/conda_build/metadata.py index f25e57f280..906ce0b628 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -17,11 +17,13 @@ from bs4 import UnicodeDammit -from conda_build import exceptions, utils, variants -from conda_build.config import Config, get_or_merge_config -from conda_build.features import feature_list -from conda_build.license_family import ensure_valid_license_family -from conda_build.utils import ( +from . import exceptions, utils, variants +from .conda_interface import MatchSpec, envs_dirs, md5_file +from .config import Config, get_or_merge_config +from .deprecations import deprecated +from .features import feature_list +from .license_family import ensure_valid_license_family +from .utils import ( DEFAULT_SUBDIRS, HashableDict, ensure_list, @@ -29,11 +31,9 @@ find_recipe, get_installed_packages, insert_variant_versions, + on_win, ) -from .conda_interface import MatchSpec, envs_dirs, md5_file -from .deprecations import deprecated - try: import yaml except ImportError: @@ -75,8 +75,6 @@ def remove_constructor(cls, tag): StringifyNumbersLoader.remove_constructor("tag:yaml.org,2002:float") StringifyNumbersLoader.remove_constructor("tag:yaml.org,2002:int") -on_win = sys.platform == "win32" - # arches that don't follow exact names in the subdir need to be mapped here ARCH_MAP = {"32": "x86", "64": "x86_64"} @@ -347,9 +345,7 @@ def _trim_None_strings(meta_dict): meta_dict[key] = keep else: log.debug( - "found unrecognized data type in dictionary: {}, type: {}".format( - value, type(value) - ) + f"found unrecognized data type in dictionary: {value}, type: {type(value)}" ) return meta_dict @@ -466,14 +462,14 @@ def parse(data, config, path=None): or (hasattr(res[field], "__iter__") and not isinstance(res[field], str)) ): raise RuntimeError( - "The %s field should be a dict or list of dicts, not " - "%s in file %s." % (field, res[field].__class__.__name__, path) + f"The {field} field should be a dict or list of dicts, not " + f"{res[field].__class__.__name__} in file {path}." ) else: if not isinstance(res[field], dict): raise RuntimeError( - "The %s field should be a dict, not %s in file %s." - % (field, res[field].__class__.__name__, path) + f"The {field} field should be a dict, not " + f"{res[field].__class__.__name__} in file {path}." ) ensure_valid_fields(res) @@ -973,7 +969,7 @@ def finalize_outputs_pass( log = utils.get_logger(__name__) log.warn( "Could not finalize metadata due to missing dependencies: " - "{}".format(e.packages) + f"{e.packages}" ) outputs[ ( @@ -1302,8 +1298,8 @@ def parse_until_resolved( bypass_env_check=bypass_env_check, ) sys.exit( - "Undefined Jinja2 variables remain ({}). Please enable " - "source downloading and try again.".format(self.undefined_jinja_vars) + f"Undefined Jinja2 variables remain ({self.undefined_jinja_vars}). Please enable " + "source downloading and try again." ) # always parse again at the end, too. @@ -1562,20 +1558,18 @@ def ms_depends(self, typ="run"): for c in "=!@#$%^&*:;\"'\\|<>?/": if c in ms.name: sys.exit( - "Error: bad character '%s' in package name " - "dependency '%s'" % (c, ms.name) + f"Error: bad character '{c}' in package name " + f"dependency '{ms.name}'" ) parts = spec.split() if len(parts) >= 2: if parts[1] in {">", ">=", "=", "==", "!=", "<", "<="}: msg = ( - "Error: bad character '%s' in package version " - "dependency '%s'" % (parts[1], ms.name) + f"Error: bad character '{parts[1]}' in package version " + f"dependency '{ms.name}'" ) if len(parts) >= 3: - msg += "\nPerhaps you meant '{} {}{}'".format( - ms.name, parts[1], parts[2] - ) + msg += f"\nPerhaps you meant '{ms.name} {parts[1]}{parts[2]}'" sys.exit(msg) specs[spec] = ms return list(specs.values()) @@ -1780,7 +1774,7 @@ def has_prefix_files(self): ret = ensure_list(self.get_value("build/has_prefix_files", [])) if not isinstance(ret, list): raise RuntimeError("build/has_prefix_files should be a list of paths") - if sys.platform == "win32": + if on_win: if any("\\" in i for i in ret): raise RuntimeError( "build/has_prefix_files paths must use / " @@ -1795,7 +1789,7 @@ def ignore_prefix_files(self): "build/ignore_prefix_files should be boolean or a list of paths " "(optionally globs)" ) - if sys.platform == "win32": + if on_win: if isinstance(ret, list) and any("\\" in i for i in ret): raise RuntimeError( "build/ignore_prefix_files paths must use / " @@ -1827,7 +1821,7 @@ def binary_relocation(self): "build/binary_relocation should be boolean or a list of paths " "(optionally globs)" ) - if sys.platform == "win32": + if on_win: if isinstance(ret, list) and any("\\" in i for i in ret): raise RuntimeError( "build/binary_relocation paths must use / " @@ -1846,7 +1840,7 @@ def binary_has_prefix_files(self): raise RuntimeError( "build/binary_has_prefix_files should be a list of paths" ) - if sys.platform == "win32": + if on_win: if any("\\" in i for i in ret): raise RuntimeError( "build/binary_has_prefix_files paths must use / " @@ -1886,7 +1880,7 @@ def _get_contents( with open(self.meta_path) as fd: return fd.read() - from conda_build.jinja_context import ( + from .jinja_context import ( FilteredLoader, UndefinedNeverFail, context_processor, @@ -1968,9 +1962,7 @@ def _get_contents( if "'None' has not attribute" in str(ex): ex = "Failed to run jinja context function" sys.exit( - "Error: Failed to render jinja template in {}:\n{}".format( - self.meta_path, str(ex) - ) + f"Error: Failed to render jinja template in {self.meta_path}:\n{str(ex)}" ) finally: if "CONDA_BUILD_STATE" in os.environ: @@ -2102,7 +2094,7 @@ def get_recipe_text( self.name(), getattr(self, "type", None) ) else: - from conda_build.render import output_yaml + from .render import output_yaml recipe_text = output_yaml(self) recipe_text = _filter_recipe_text(recipe_text, extract_pattern) @@ -2496,7 +2488,7 @@ def get_output_metadata_set( permit_unsatisfiable_variants=False, bypass_env_check=False, ): - from conda_build.source import provide + from .source import provide out_metadata_map = {} if self.final: @@ -2902,8 +2894,8 @@ def _get_used_vars_output_script(self): else: log = utils.get_logger(__name__) log.warn( - "Not detecting used variables in output script {}; conda-build only knows " - "how to search .sh and .bat files right now.".format(script) + f"Not detecting used variables in output script {script}; conda-build only knows " + "how to search .sh and .bat files right now." ) return used_vars diff --git a/conda_build/metapackage.py b/conda_build/metapackage.py index 0566836030..5c7b57c7b5 100644 --- a/conda_build/metapackage.py +++ b/conda_build/metapackage.py @@ -2,8 +2,8 @@ # SPDX-License-Identifier: BSD-3-Clause from collections import defaultdict -from conda_build.config import Config -from conda_build.metadata import MetaData +from .config import Config +from .metadata import MetaData def create_metapackage( @@ -19,7 +19,7 @@ def create_metapackage( config=None, ): # local import to avoid circular import, we provide create_metapackage in api - from conda_build.api import build + from .api import build if not config: config = Config() diff --git a/conda_build/noarch_python.py b/conda_build/noarch_python.py index 30efb3d45d..daaf163490 100644 --- a/conda_build/noarch_python.py +++ b/conda_build/noarch_python.py @@ -8,14 +8,25 @@ import sys from os.path import basename, dirname, isdir, isfile, join -ISWIN = sys.platform.startswith("win") +from .deprecations import deprecated +from .utils import on_win +deprecated.constant( + "24.1", + "24.3", + "ISWIN", + on_win, + addendum="Use `conda_build.utils.on_win` instead.", +) + +@deprecated("24.1", "24.3", addendum="Use `os.makedirs(exist_ok=True)` instead.") def _force_dir(dirname): if not isdir(dirname): os.makedirs(dirname) +@deprecated("24.1", "24.3") def _error_exit(exit_message): sys.exit("[noarch_python] %s" % exit_message) @@ -26,7 +37,7 @@ def rewrite_script(fn, prefix): noarch pacakges""" # Load and check the source file for not being a binary - src = join(prefix, "Scripts" if ISWIN else "bin", fn) + src = join(prefix, "Scripts" if on_win else "bin", fn) encoding = locale.getpreferredencoding() # if default locale is ascii, allow UTF-8 (a reasonably modern ASCII extension) if encoding == "ANSI_X3.4-1968": @@ -35,17 +46,17 @@ def rewrite_script(fn, prefix): try: data = fi.read() except UnicodeDecodeError: # file is binary - _error_exit("Noarch package contains binary script: %s" % fn) + sys.exit("[noarch_python] Noarch package contains binary script: %s" % fn) src_mode = os.stat(src).st_mode os.unlink(src) # Get rid of '-script.py' suffix on Windows - if ISWIN and fn.endswith("-script.py"): + if on_win and fn.endswith("-script.py"): fn = fn[:-10] # Rewrite the file to the python-scripts directory dst_dir = join(prefix, "python-scripts") - _force_dir(dst_dir) + os.makedirs(dst_dir, exist_ok=True) dst = join(dst_dir, fn) with open(dst, "w") as fo: fo.write(data) @@ -69,12 +80,12 @@ def handle_file(f, d, prefix): elif "site-packages" in f: nsp = join(prefix, "site-packages") - _force_dir(nsp) + os.makedirs(nsp, exist_ok=True) g = f[f.find("site-packages") :] dst = join(prefix, g) dst_dir = dirname(dst) - _force_dir(dst_dir) + os.makedirs(dst_dir, exist_ok=True) shutil.move(path, dst) d["site-packages"].append(g[14:]) @@ -103,7 +114,7 @@ def populate_files(m, files, prefix, entry_point_scripts=None): handle_file(f, d, prefix) # Windows path conversion - if ISWIN: + if on_win: for fns in (d["site-packages"], d["Examples"]): for i, fn in enumerate(fns): fns[i] = fn.replace("\\", "/") @@ -119,10 +130,10 @@ def populate_files(m, files, prefix, entry_point_scripts=None): def transform(m, files, prefix): bin_dir = join(prefix, "bin") - _force_dir(bin_dir) + os.makedirs(bin_dir, exist_ok=True) scripts_dir = join(prefix, "Scripts") - _force_dir(scripts_dir) + os.makedirs(scripts_dir, exist_ok=True) name = m.name() diff --git a/conda_build/os_utils/external.py b/conda_build/os_utils/external.py index 215f395f00..8b84833c00 100644 --- a/conda_build/os_utils/external.py +++ b/conda_build/os_utils/external.py @@ -2,11 +2,11 @@ # SPDX-License-Identifier: BSD-3-Clause import os import stat -import sys from glob import glob from os.path import expanduser, isfile, join -from conda_build.conda_interface import root_dir +from ..conda_interface import root_dir +from ..utils import on_win def find_executable(executable, prefix=None, all_matches=False): @@ -14,7 +14,7 @@ def find_executable(executable, prefix=None, all_matches=False): # in other code global dir_paths result = None - if sys.platform == "win32": + if on_win: dir_paths = [ join(root_dir, "Scripts"), join(root_dir, "Library\\mingw-w64\\bin"), @@ -36,7 +36,7 @@ def find_executable(executable, prefix=None, all_matches=False): dir_paths.insert(0, join(prefix, "bin")) dir_paths.extend(os.environ["PATH"].split(os.pathsep)) - if sys.platform == "win32": + if on_win: exts = (".exe", ".bat", "") else: exts = ("",) @@ -47,7 +47,7 @@ def find_executable(executable, prefix=None, all_matches=False): path = expanduser(join(dir_path, executable + ext)) if isfile(path): st = os.stat(path) - if sys.platform == "win32" or st.st_mode & stat.S_IEXEC: + if on_win or st.st_mode & stat.S_IEXEC: if all_matches: all_matches_found.append(path) else: diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index ed68a461aa..70267d08f4 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -12,12 +12,11 @@ from conda.models.records import PrefixRecord -from conda_build.conda_interface import untracked -from conda_build.os_utils.macho import otool -from conda_build.os_utils.pyldd import codefile_class, inspect_linkages, machofile - +from ..conda_interface import untracked from ..deprecations import deprecated from ..utils import on_linux, on_mac +from .macho import otool +from .pyldd import codefile_class, inspect_linkages, machofile LDD_RE = re.compile(r"\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)") LDD_NOT_FOUND_RE = re.compile(r"\s*(.*?)\s*=>\s*not found") diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index 0c47fd2533..535b924771 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -6,7 +6,6 @@ import json import os import struct -import sys import threading from collections.abc import Hashable from fnmatch import fnmatch @@ -15,6 +14,7 @@ from subprocess import PIPE, Popen from ..deprecations import deprecated +from ..utils import on_mac, on_win, rec_glob from .external import find_executable # lief cannot handle files it doesn't know about gracefully @@ -923,12 +923,12 @@ def get_static_lib_exports_nope(file): def get_static_lib_exports_nm(filename): nm_exe = find_executable("nm") - if sys.platform == "win32" and not nm_exe: + if on_win and not nm_exe: nm_exe = "C:\\msys64\\mingw64\\bin\\nm.exe" if not nm_exe or not os.path.exists(nm_exe): return None flags = "-Pg" - if sys.platform == "darwin": + if on_mac: flags = "-PgUj" try: out, _ = Popen( @@ -971,8 +971,6 @@ def get_static_lib_exports_dumpbin(filename): ] results = [] for p in programs: - from conda_build.utils import rec_glob - dumpbin = rec_glob(os.path.join(pfx86, p), ("dumpbin.exe",)) for result in dumpbin: try: @@ -984,7 +982,7 @@ def get_static_lib_exports_dumpbin(filename): results.append((result, version)) except: pass - from conda_build.conda_interface import VersionOrder + from ..conda_interface import VersionOrder results = sorted(results, key=lambda x: VersionOrder(x[1])) dumpbin_exe = results[-1][0] @@ -1042,7 +1040,7 @@ def get_exports(filename, arch="native", enable_static=False): os.path.exists(filename) and (filename.endswith(".a") or filename.endswith(".lib")) and is_archive(filename) - ) and sys.platform != "win32": + ) and not on_win: # syms = os.system('nm -g {}'.filename) # on macOS at least: # -PgUj is: @@ -1050,11 +1048,11 @@ def get_exports(filename, arch="native", enable_static=False): # g: global (exported) only # U: not undefined # j: name only - if debug_static_archives or sys.platform == "win32": + if debug_static_archives or on_win: exports = get_static_lib_exports_externally(filename) # Now, our own implementation which does not require nm and can # handle .lib files. - if sys.platform == "win32": + if on_win: # Sorry, LIEF does not handle COFF (only PECOFF) and object files are COFF. exports2 = exports else: diff --git a/conda_build/os_utils/macho.py b/conda_build/os_utils/macho.py index eb13669049..950ebd6d57 100644 --- a/conda_build/os_utils/macho.py +++ b/conda_build/os_utils/macho.py @@ -7,8 +7,9 @@ from itertools import islice from subprocess import PIPE, STDOUT, CalledProcessError, Popen, check_output -from conda_build import utils -from conda_build.os_utils.external import find_preferably_prefixed_executable +from .. import utils +from ..utils import on_mac +from .external import find_preferably_prefixed_executable NO_EXT = ( ".py", @@ -76,7 +77,7 @@ def human_filetype(path, build_prefix): if not lines[0].startswith((path, "Mach header")): raise ValueError( "Expected `otool -h` output to start with" - " Mach header or {}, got:\n{}".format(path, output) + f" Mach header or {path}, got:\n{output}" ) assert lines[0].startswith((path, "Mach header")), path @@ -183,8 +184,8 @@ def find_apple_cctools_executable(name, build_prefix, nofail=False): except Exception as e: log = utils.get_logger(__name__) log.error( - "ERROR :: Found `{}` but is is an Apple Xcode stub executable\n" - "and it returned an error:\n{}".format(tool, e.output) + f"ERROR :: Found `{tool}` but is is an Apple Xcode stub executable\n" + f"and it returned an error:\n{e.output}" ) raise e tool = tool_xcr @@ -356,6 +357,6 @@ def install_name_change(path, build_prefix, cb_func, dylibs, verbose=False): if __name__ == "__main__": - if sys.platform == "darwin": + if on_mac: for path in "/bin/ls", "/etc/locate.rc": print(path, is_macho(path)) diff --git a/conda_build/os_utils/pyldd.py b/conda_build/os_utils/pyldd.py index d65e0cbc3b..7f1eb81a8b 100644 --- a/conda_build/os_utils/pyldd.py +++ b/conda_build/os_utils/pyldd.py @@ -11,9 +11,8 @@ import sys from pathlib import Path -from conda_build.utils import ensure_list, get_logger - from ..deprecations import deprecated +from ..utils import ensure_list, get_logger, on_linux, on_mac, on_win logging.basicConfig(level=logging.INFO) @@ -1095,7 +1094,7 @@ def _trim_sysroot(sysroot): def _get_arch_if_native(arch): if arch == "native": - if sys.platform == "win32": + if on_win: arch = "x86_64" if sys.maxsize > 2**32 else "i686" else: _, _, _, _, arch = os.uname() @@ -1248,9 +1247,7 @@ def otool(*args): args.filename, resolve_filenames=False, recurse=False, arch=args.arch_type ) print( - "Shared libs used (non-recursively) by {} are:\n{}".format( - args.filename, shared_libs - ) + f"Shared libs used (non-recursively) by {args.filename} are:\n{shared_libs}" ) return 0 return 1 @@ -1280,11 +1277,7 @@ def ldd(*args): shared_libs = inspect_linkages( args.filename, resolve_filenames=False, recurse=True ) - print( - "Shared libs used (recursively) by {} are:\n{}".format( - args.filename, shared_libs - ) - ) + print(f"Shared libs used (recursively) by {args.filename} are:\n{shared_libs}") return 0 return 1 @@ -1311,7 +1304,7 @@ def main_maybe_test(): tool = sys.argv[2] if tool != "otool" and tool != "ldd": - if sys.platform == "darwin": + if on_mac: tool = "otool" else: tool = "ldd" @@ -1333,14 +1326,14 @@ def main_maybe_test(): resolve_filenames=False, recurse=False, ) - if sys.platform == "darwin": + if on_mac: test_that = functools.partial(inspect_linkages_otool) SOEXT = "dylib" elif tool == "ldd": test_this = functools.partial( inspect_linkages, sysroot=sysroot, resolve_filenames=True, recurse=True ) - if sys.platform.startswith("linux"): + if on_linux: test_that = functools.partial(inspect_linkages_ldd) SOEXT = "so" # Find a load of dylibs or elfs and compare @@ -1363,11 +1356,9 @@ def main_maybe_test(): else: that = this print("\n".join(this)) - assert set(this) == set( - that - ), "py-ldd result incorrect for {}, this:\n{}\nvs that:\n{}".format( - codefile, set(this), set(that) - ) + assert ( + set(this) == set(that) + ), f"py-ldd result incorrect for {codefile}, this:\n{set(this)}\nvs that:\n{set(that)}" else: return main(sys.argv) diff --git a/conda_build/post.py b/conda_build/post.py index 18e2723531..558ae50bc9 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -37,17 +37,19 @@ from conda.core.prefix_data import PrefixData from conda.models.records import PrefixRecord -from conda_build import utils -from conda_build.conda_interface import ( +from . import utils +from .conda_interface import ( TemporaryDirectory, lchmod, md5_file, walk_prefix, ) -from conda_build.exceptions import OverDependingError, OverLinkingError, RunPathError -from conda_build.inspect_pkg import which_package -from conda_build.os_utils import external, macho -from conda_build.os_utils.liefldd import ( +from .deprecations import deprecated +from .exceptions import OverDependingError, OverLinkingError, RunPathError +from .inspect_pkg import which_package +from .metadata import MetaData +from .os_utils import external, macho +from .os_utils.liefldd import ( get_exports_memoized, get_linkages_memoized, get_rpaths_raw, @@ -55,16 +57,14 @@ have_lief, set_rpath, ) -from conda_build.os_utils.pyldd import ( +from .os_utils.pyldd import ( DLLfile, EXEfile, codefile_class, elffile, machofile, ) - -from .deprecations import deprecated -from .metadata import MetaData +from .utils import linked_data_no_multichannels, on_mac, on_win, prefix_files filetypes_for_platform = { "win": (DLLfile, EXEfile), @@ -127,7 +127,7 @@ def fix_shebang(f, prefix, build_python, osx_is_app=False): py_exec = "#!" + ( "/bin/bash " + prefix + "/bin/pythonw" - if sys.platform == "darwin" and osx_is_app + if on_mac and osx_is_app else prefix + "/bin/" + basename(build_python) ) if bytes_ and hasattr(py_exec, "encode"): @@ -198,13 +198,11 @@ def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False): except OSError as e: fn = basename(str(e).split()[-1]) raise OSError( - "Tried to merge folder {egg_path} into {sp_dir}, but {fn}" + f"Tried to merge folder {egg_path} into {sp_dir}, but {fn}" " exists in both locations. Please either add " "build/preserve_egg_dir: True to meta.yaml, or manually " "remove the file during your install process to avoid " - "this conflict.".format( - egg_path=egg_path, sp_dir=sp_dir, fn=fn - ) + "this conflict." ) else: shutil.move(join(egg_path, fn), join(sp_dir, fn)) @@ -276,7 +274,7 @@ def compile_missing_pyc(files, cwd, python_exe, skip_compile_pyc=()): unskipped_files = set(files) - skipped_files for fn in unskipped_files: # omit files in Library/bin, Scripts, and the root prefix - they are not generally imported - if sys.platform == "win32": + if on_win: if any( [ fn.lower().startswith(start) @@ -300,7 +298,7 @@ def compile_missing_pyc(files, cwd, python_exe, skip_compile_pyc=()): else: print("compiling .pyc files...") # We avoid command lines longer than 8190 - if sys.platform == "win32": + if on_win: limit = 8190 else: limit = 32760 @@ -400,8 +398,8 @@ def find_lib(link, prefix, files, path=None): else: file_names[link].sort() print( - "Found multiple instances of %s (%s). " - "Choosing the first one." % (link, file_names[link]) + f"Found multiple instances of {link} ({file_names[link]}). " + "Choosing the first one." ) return file_names[link][0] print("Don't know how to find %s, skipping" % link) @@ -594,15 +592,11 @@ def mk_relative_linux(f, prefix, rpaths=("lib",), method=None): except CalledProcessError: if method == "patchelf": print( - "ERROR :: `patchelf --print-rpath` failed for {}, but patchelf was specified".format( - elf - ) + f"ERROR :: `patchelf --print-rpath` failed for {elf}, but patchelf was specified" ) elif method != "LIEF": print( - "WARNING :: `patchelf --print-rpath` failed for {}, will proceed with LIEF (was {})".format( - elf, method - ) + f"WARNING :: `patchelf --print-rpath` failed for {elf}, will proceed with LIEF (was {method})" ) method = "LIEF" else: @@ -612,9 +606,7 @@ def mk_relative_linux(f, prefix, rpaths=("lib",), method=None): existing2, _, _ = get_rpaths_raw(elf) if existing_pe and existing_pe != existing2: print( - "WARNING :: get_rpaths_raw()={} and patchelf={} disagree for {} :: ".format( - existing2, existing_pe, elf - ) + f"WARNING :: get_rpaths_raw()={existing2} and patchelf={existing_pe} disagree for {elf} :: " ) # Use LIEF if method is LIEF to get the initial value? if method == "LIEF": @@ -786,8 +778,6 @@ def library_nature( addendum="Query `conda.core.prefix_data.PrefixData` instead.", ) def dists_from_names(names: Iterable[str], prefix: str | os.PathLike | Path): - from conda_build.utils import linked_data_no_multichannels - names = utils.ensure_list(names) return [prec for prec in linked_data_no_multichannels(prefix) if prec.name in names] @@ -1123,16 +1113,14 @@ def _lookup_in_sysroots_and_whitelist( if len(pkgs): _print_msg( errors, - "{}: {} found in CDT/compiler package {}".format( - info_prelude, n_dso_p, pkgs[0] - ), + f"{info_prelude}: {n_dso_p} found in CDT/compiler package {pkgs[0]}", verbose=verbose, ) else: _print_msg( errors, - "{}: {} not found in any CDT/compiler package," - " nor the whitelist?!".format(msg_prelude, n_dso_p), + f"{msg_prelude}: {n_dso_p} not found in any CDT/compiler package," + " nor the whitelist?!", verbose=verbose, ) if not in_sysroots: @@ -1154,8 +1142,8 @@ def _lookup_in_sysroots_and_whitelist( if not in_whitelist and not in_sysroots: _print_msg( errors, - "{}: {} not found in packages, sysroot(s) nor the missing_dso_whitelist.\n" - ".. is this binary repackaging?".format(msg_prelude, needed_dso), + f"{msg_prelude}: {needed_dso} not found in packages, sysroot(s) nor the missing_dso_whitelist.\n" + ".. is this binary repackaging?", verbose=verbose, ) @@ -1253,9 +1241,7 @@ def _show_linking_messages( for sysroot, sr_files in sysroots.items(): _print_msg( errors, - " INFO: sysroot: '{}' files: '{}'".format( - sysroot, sorted(list(sr_files), reverse=True)[1:5] - ), + f" INFO: sysroot: '{sysroot}' files: '{sorted(list(sr_files), reverse=True)[1:5]}'", verbose=verbose, ) for f in files: @@ -1309,9 +1295,7 @@ def _show_linking_messages( elif needed_dso.startswith("$PATH"): _print_msg( errors, - "{}: {} found in build prefix; should never happen".format( - err_prelude, needed_dso - ), + f"{err_prelude}: {needed_dso} found in build prefix; should never happen", verbose=verbose, ) else: @@ -1440,11 +1424,11 @@ def check_overlinking_impl( # .. and in that sysroot there are 3 suddirs in which we may search for DSOs. sysroots = ["/usr/lib", "/opt/X11", "/System/Library/Frameworks"] whitelist = DEFAULT_MAC_WHITELIST - build_is_host = True if sys.platform == "darwin" else False + build_is_host = True if on_mac else False elif subdir.startswith("win"): sysroots = ["C:/Windows"] whitelist = DEFAULT_WIN_WHITELIST - build_is_host = True if sys.platform == "win-32" else False + build_is_host = True if on_win else False whitelist += missing_dso_whitelist or [] @@ -1452,8 +1436,6 @@ def check_overlinking_impl( # the first sysroot is more important than others. sysroots_files = dict() for sysroot in sysroots: - from conda_build.utils import prefix_files - srs = sysroot if sysroot.endswith("/") else sysroot + "/" sysroot_files = prefix_files(sysroot) sysroot_files = [p.replace("\\", "/") for p in sysroot_files] @@ -1590,19 +1572,15 @@ def check_overlinking_impl( if found_interpreted_and_interpreter: _print_msg( errors, - "{}: Interpreted package '{}' is interpreted by '{}'".format( - info_prelude, pkg_vendored_dist.name, lib.name - ), + f"{info_prelude}: Interpreted package '{pkg_vendored_dist.name}' is interpreted by '{lib.name}'", verbose=verbose, ) elif package_nature[lib] != "non-library": _print_msg( errors, - "{}: {} package {} in requirements/run but it is not used " + f"{msg_prelude}: {package_nature[lib]} package {lib} in requirements/run but it is not used " "(i.e. it is overdepending or perhaps statically linked? " - "If that is what you want then add it to `build/ignore_run_exports`)".format( - msg_prelude, package_nature[lib], lib - ), + "If that is what you want then add it to `build/ignore_run_exports`)", verbose=verbose, ) if len(errors): @@ -1852,8 +1830,8 @@ def check_symlinks(files, prefix, croot): # Symlinks to absolute paths on the system (like /usr) are fine. if real_link_path.startswith(croot): msgs.append( - "%s is a symlink to a path that may not " - "exist after the build is completed (%s)" % (f, link_path) + f"{f} is a symlink to a path that may not " + f"exist after the build is completed ({link_path})" ) if msgs: diff --git a/conda_build/render.py b/conda_build/render.py index 1e8ddae08a..c97f3bbe9f 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -26,17 +26,7 @@ import yaml -import conda_build.index -import conda_build.source as source -from conda_build import environ, exceptions, utils -from conda_build.exceptions import DependencyNeedsBuildingError -from conda_build.metadata import MetaData, combine_top_level_metadata_with_output -from conda_build.variants import ( - filter_by_key_value, - get_package_variants, - list_of_dicts_to_dict_of_lists, -) - +from . import environ, exceptions, source, utils from .conda_interface import ( ProgressiveFetchExtract, TemporaryDirectory, @@ -45,9 +35,15 @@ pkgs_dirs, specs_from_url, ) +from .exceptions import DependencyNeedsBuildingError +from .index import get_build_index +from .metadata import MetaData, combine_top_level_metadata_with_output from .utils import CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2 - -# from conda_build.jinja_context import pin_subpackage_against_outputs +from .variants import ( + filter_by_key_value, + get_package_variants, + list_of_dicts_to_dict_of_lists, +) def odict_representer(dumper, data): @@ -329,7 +325,7 @@ def _read_specs_from_package(pkg_loc, pkg_dist): def execute_download_actions(m, actions, env, package_subset=None, require_files=False): subdir = getattr(m.config, f"{env}_subdir") - index, _, _ = conda_build.index.get_build_index( + index, _, _ = get_build_index( subdir=subdir, bldpkgs_dir=m.config.bldpkgs_dir, output_folder=m.config.output_folder, @@ -741,8 +737,8 @@ def finalize_metadata( m.final = False log = utils.get_logger(__name__) log.warn( - "Returning non-final recipe for {}; one or more dependencies " - "was unsatisfiable:".format(m.dist()) + f"Returning non-final recipe for {m.dist()}; one or more dependencies " + "was unsatisfiable:" ) if build_unsat: log.warn(f"Build: {build_unsat}") @@ -851,7 +847,7 @@ def distribute_variants( top_loop = metadata.get_reduced_variant_set(used_variables) for variant in top_loop: - from conda_build.build import get_all_replacements + from .build import get_all_replacements get_all_replacements(variant) mv = metadata.copy() @@ -921,7 +917,7 @@ def expand_outputs(metadata_tuples): expanded_outputs = OrderedDict() for _m, download, reparse in metadata_tuples: - from conda_build.build import get_all_replacements + from .build import get_all_replacements get_all_replacements(_m.config) from copy import deepcopy diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index 7cd48e08ef..e1c061bf73 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -3,8 +3,6 @@ """ Tools for converting CPAN packages to conda recipes. """ - - import codecs import gzip import hashlib @@ -21,20 +19,21 @@ import requests -from conda_build import environ -from conda_build.conda_interface import ( +from .. import environ +from ..conda_interface import ( CondaError, CondaHTTPError, MatchSpec, Resolve, + TemporaryDirectory, TmpDownload, download, get_index, ) -from conda_build.config import get_or_merge_config -from conda_build.utils import check_call_env, on_win -from conda_build.variants import get_default_variant -from conda_build.version import _parse as parse_version +from ..config import Config, get_or_merge_config +from ..utils import check_call_env, on_linux, on_win +from ..variants import get_default_variant +from ..version import _parse as parse_version CPAN_META = """\ {{% set name = "{packagename}" %}} @@ -205,7 +204,7 @@ def __enter__(self): def get_build_dependencies_from_src_archive(package_url, sha256, src_cache): import tarfile - from conda_build import source + from .. import source cached_path, _ = source.download_to_cache( src_cache, "", {"url": package_url, "sha256": sha256} @@ -334,14 +333,11 @@ def load_or_pickle(filename_prefix, base_folder, data_partial, key): def install_perl_get_core_modules(version): try: - from conda_build.conda_interface import TemporaryDirectory - from conda_build.config import Config - config = Config() - if sys.platform.startswith("win"): + if on_win: subdirs = ("win-64", "Library", "bin", "perl.exe") - elif sys.platform.startswith("linux"): + elif on_linux: subdirs = ("linux-64", "bin", "perl") else: subdirs = ("osx-64", "bin", "perl") @@ -361,10 +357,8 @@ def install_perl_get_core_modules(version): "my @modules = grep {Module::CoreList::is_core($_)} Module::CoreList->find_modules(qr/.*/); " 'print join "\n", @modules;', ] - from subprocess import check_output - all_core_modules = ( - check_output(args, shell=False) + subprocess.check_output(args, shell=False) .decode("utf-8") .replace("\r\n", "\n") .split("\n") @@ -456,19 +450,15 @@ def skeletonize( ) if package == "perl": print( - ( - "WARNING: {0} is a Perl core module that is not developed " - + "outside of Perl, so we are skipping creating a recipe " - + "for it." - ).format(orig_package) + f"WARNING: {orig_package} is a Perl core module that is not developed " + f"outside of Perl, so we are skipping creating a recipe " + f"for it." ) continue elif package not in {orig_package, orig_package.replace("::", "-")}: print( - ( - "WARNING: {0} was part of the {1} distribution, so we are " - + "making a recipe for {1} instead." - ).format(orig_package, package) + f"WARNING: {orig_package} was part of the {package} distribution, so we are " + f"making a recipe for {package} instead." ) latest_release_data = get_release_info( @@ -823,13 +813,10 @@ def deps_for_package( ) except InvalidReleaseError: print( - ( - "WARNING: The version of %s listed as a " - + "dependency for %s, %s, is not available on MetaCPAN, " - + "so we are just assuming the latest version is " - + "okay." - ) - % (orig_dist, package, str(dep_version)) + f"WARNING: The version of {orig_dist} listed as a " + f"dependency for {package}, {dep_version}, is not available on MetaCPAN, " + f"so we are just assuming the latest version is " + f"okay." ) dep_version = parse_version("0") @@ -968,10 +955,8 @@ def release_module_dict_direct(cpan_url, cache_dir, module): print(f"INFO :: OK, found 'dependency' in module {module}") if not rel_dict or "dependency" not in rel_dict: print( - "WARNING :: No dependencies found for module {} in distribution {}\n" - "WARNING :: Please check {} and {}".format( - module, distribution, url_module, url_release - ) + f"WARNING :: No dependencies found for module {module} in distribution {distribution}\n" + f"WARNING :: Please check {url_module} and {url_release}" ) return rel_dict @@ -1035,11 +1020,8 @@ def core_module_dict_old(cpan_url, module): # If there was an error, report it except CondaHTTPError as e: sys.exit( - ( - "Error: Could not find module or distribution named" - " %s on MetaCPAN. Error was: %s" - ) - % (module, e.message) + f"Error: Could not find module or distribution named" + f" {module} on MetaCPAN. Error was: {e.message}" ) else: mod_dict = {"distribution": "perl"} @@ -1106,12 +1088,10 @@ def get_release_info(cpan_url, cache_dir, core_modules, package, version): core_version = metacpan_api_is_core_version(cpan_url, package) if core_version is not None and (version is None or (version == core_version)): print( - ( - "WARNING: {0} is not available on MetaCPAN, but it's a " - + "core module, so we do not actually need the source file, " - + "and are omitting the URL and MD5 from the recipe " - + "entirely." - ).format(orig_package) + f"WARNING: {orig_package} is not available on MetaCPAN, but it's a " + f"core module, so we do not actually need the source file, " + f"and are omitting the URL and MD5 from the recipe " + f"entirely." ) rel_dict = { "version": str(core_version), diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index d942013d65..e1a4406252 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -41,14 +41,13 @@ from conda.common.io import dashlist -from conda_build import source -from conda_build.conda_interface import TemporaryDirectory, cc_conda_build -from conda_build.config import get_or_merge_config -from conda_build.license_family import allowed_license_families, guess_license_family -from conda_build.utils import ensure_list, rm_rf -from conda_build.variants import DEFAULT_VARIANTS, get_package_variants - +from .. import source +from ..conda_interface import TemporaryDirectory, cc_conda_build +from ..config import get_or_merge_config +from ..license_family import allowed_license_families, guess_license_family from ..metadata import MetaData +from ..utils import ensure_list, rm_rf +from ..variants import DEFAULT_VARIANTS, get_package_variants SOURCE_META = """\ {archive_keys} @@ -790,9 +789,7 @@ def package_to_inputs_dict( commp = commonprefix((package, output_dir)) if commp != output_dir: raise RuntimeError( - "package {} specified with abs path outside of output-dir {}".format( - package, output_dir - ) + f"package {package} specified with abs path outside of output-dir {output_dir}" ) location = package existing_location = existing_recipe_dir( @@ -995,8 +992,8 @@ def skeletonize( stderr = stderr.decode("utf-8") if p.returncode: sys.exit( - "Error: 'git checkout %s' failed (%s).\nInvalid tag?" - % (new_git_tag, stderr.strip()) + f"Error: 'git checkout {new_git_tag}' failed ({stderr.strip()}).\n" + "Invalid tag?" ) if stdout: print(stdout, file=sys.stdout) @@ -1014,9 +1011,8 @@ def skeletonize( DESCRIPTION = sub_description_name else: sys.exit( - "%s does not appear to be a valid R package " - "(no DESCRIPTION file in %s, %s)" - % (location, sub_description_pkg, sub_description_name) + f"{location} does not appear to be a valid R package " + f"(no DESCRIPTION file in {sub_description_pkg}, {sub_description_name})" ) cran_package = get_archive_metadata(DESCRIPTION) @@ -1098,9 +1094,7 @@ def skeletonize( build_number = m.build_number() build_number += 1 if update_policy == "merge-incr-build-num" else 0 if add_maintainer: - new_maintainer = "{indent}{add_maintainer}".format( - indent=INDENT, add_maintainer=add_maintainer - ) + new_maintainer = f"{INDENT}{add_maintainer}" if new_maintainer not in extra_recipe_maintainers: if not len(extra_recipe_maintainers): # We hit this case when there is no existing recipe. @@ -1198,9 +1192,7 @@ def skeletonize( ) except: print( - "logic error, file {} should exist, we found it in a dir listing earlier.".format( - package_url - ) + f"logic error, file {package_url} should exist, we found it in a dir listing earlier." ) sys.exit(1) if description_path is None or archive_type == "source": @@ -1387,11 +1379,7 @@ def skeletonize( for s in list(chain(imports, depends, links)): match = VERSION_DEPENDENCY_REGEX.match(s) if not match: - sys.exit( - "Could not parse version from dependency of {}: {}".format( - package, s - ) - ) + sys.exit(f"Could not parse version from dependency of {package}: {s}") name = match.group("name") if name in seen: continue @@ -1406,7 +1394,7 @@ def skeletonize( if archs: sys.exit( "Don't know how to handle archs from dependency of " - "package %s: %s" % (package, s) + f"package {package}: {s}" ) dep_dict[name] = f"{relop}{ver}" @@ -1476,44 +1464,28 @@ def skeletonize( if dep_type == "build": if need_c: deps.append( - "{indent}{{{{ compiler('c') }}}} {sel}".format( - indent=INDENT, sel=sel_src_not_win - ) + f"{INDENT}{{{{ compiler('c') }}}} {sel_src_not_win}" ) deps.append( - "{indent}{{{{ compiler('m2w64_c') }}}} {sel}".format( - indent=INDENT, sel=sel_src_and_win - ) + f"{INDENT}{{{{ compiler('m2w64_c') }}}} {sel_src_and_win}" ) if need_cxx: deps.append( - "{indent}{{{{ compiler('cxx') }}}} {sel}".format( - indent=INDENT, sel=sel_src_not_win - ) + f"{INDENT}{{{{ compiler('cxx') }}}} {sel_src_not_win}" ) deps.append( - "{indent}{{{{ compiler('m2w64_cxx') }}}} {sel}".format( - indent=INDENT, sel=sel_src_and_win - ) + f"{INDENT}{{{{ compiler('m2w64_cxx') }}}} {sel_src_and_win}" ) if need_f: deps.append( - "{indent}{{{{ compiler('fortran') }}}} {sel}".format( - indent=INDENT, sel=sel_src_not_win - ) + f"{INDENT}{{{{ compiler('fortran') }}}} {sel_src_not_win}" ) deps.append( - "{indent}{{{{ compiler('m2w64_fortran') }}}}{sel}".format( - indent=INDENT, sel=sel_src_and_win - ) + f"{INDENT}{{{{ compiler('m2w64_fortran') }}}}{sel_src_and_win}" ) if use_rtools_win: need_c = need_cxx = need_f = need_autotools = need_make = False - deps.append( - "{indent}rtools {sel}".format( - indent=INDENT, sel=sel_src_and_win - ) - ) + deps.append(f"{INDENT}rtools {sel_src_and_win}") # extsoft is legacy. R packages will download rwinlib subprojects # as necessary according to Jeroen Ooms. (may need to disable that # for non-MRO builds or maybe switch to Jeroen's toolchain?) @@ -1521,69 +1493,41 @@ def skeletonize( # indent=INDENT, sel=sel_src_and_win)) if need_autotools or need_make or need_git: deps.append( - "{indent}{{{{ posix }}}}filesystem {sel}".format( - indent=INDENT, sel=sel_src_and_win - ) + f"{INDENT}{{{{ posix }}}}filesystem {sel_src_and_win}" ) if need_git: deps.append(f"{INDENT}{{{{ posix }}}}git") if need_autotools: deps.append( - "{indent}{{{{ posix }}}}sed {sel}".format( - indent=INDENT, sel=sel_src_and_win - ) + f"{INDENT}{{{{ posix }}}}sed {sel_src_and_win}" ) deps.append( - "{indent}{{{{ posix }}}}grep {sel}".format( - indent=INDENT, sel=sel_src_and_win - ) + f"{INDENT}{{{{ posix }}}}grep {sel_src_and_win}" ) + deps.append(f"{INDENT}{{{{ posix }}}}autoconf {sel_src}") deps.append( - "{indent}{{{{ posix }}}}autoconf {sel}".format( - indent=INDENT, sel=sel_src - ) + f"{INDENT}{{{{ posix }}}}automake {sel_src_not_win}" ) deps.append( - "{indent}{{{{ posix }}}}automake {sel}".format( - indent=INDENT, sel=sel_src_not_win - ) - ) - deps.append( - "{indent}{{{{ posix }}}}automake-wrapper{sel}".format( - indent=INDENT, sel=sel_src_and_win - ) + f"{INDENT}{{{{ posix }}}}automake-wrapper{sel_src_and_win}" ) deps.append(f"{INDENT}{{{{ posix }}}}pkg-config") if need_make: - deps.append( - "{indent}{{{{ posix }}}}make {sel}".format( - indent=INDENT, sel=sel_src - ) - ) + deps.append(f"{INDENT}{{{{ posix }}}}make {sel_src}") if not need_autotools: deps.append( - "{indent}{{{{ posix }}}}sed {sel}".format( - indent=INDENT, sel=sel_src_and_win - ) + f"{INDENT}{{{{ posix }}}}sed {sel_src_and_win}" ) deps.append( - "{indent}{{{{ posix }}}}coreutils {sel}".format( - indent=INDENT, sel=sel_src_and_win - ) - ) - deps.append( - "{indent}{{{{ posix }}}}zip {sel}".format( - indent=INDENT, sel=sel_src_and_win + f"{INDENT}{{{{ posix }}}}coreutils {sel_src_and_win}" ) - ) + deps.append(f"{INDENT}{{{{ posix }}}}zip {sel_src_and_win}") if add_cross_r_base: deps.append(f"{INDENT}cross-r-base {{{{ r_base }}}} {sel_cross}") elif dep_type == "run": if need_c or need_cxx or need_f: deps.append( - "{indent}{{{{native}}}}gcc-libs {sel}".format( - indent=INDENT, sel=sel_src_and_win - ) + f"{INDENT}{{{{native}}}}gcc-libs {sel_src_and_win}" ) if dep_type == "host" or dep_type == "run": @@ -1605,13 +1549,7 @@ def skeletonize( conda_name = "r-" + name.lower() if dep_dict[name]: - deps.append( - "{indent}{name} {version}".format( - name=conda_name, - version=dep_dict[name], - indent=INDENT, - ) - ) + deps.append(f"{INDENT}{conda_name} {dep_dict[name]}") else: deps.append(f"{INDENT}{conda_name}") if recursive: diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index b1194e6a8b..fe69e09d23 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -3,8 +3,6 @@ """ Tools for converting PyPI packages to conda recipes. """ - - import keyword import logging import os @@ -23,7 +21,7 @@ import yaml from requests.packages.urllib3.util.url import parse_url -from conda_build.conda_interface import ( +from ..conda_interface import ( StringIO, configparser, default_python, @@ -34,20 +32,21 @@ normalized_version, spec_from_line, ) -from conda_build.config import Config -from conda_build.environ import create_env -from conda_build.license_family import allowed_license_families, guess_license_family -from conda_build.metadata import MetaData -from conda_build.render import FIELDS as EXPECTED_SECTION_ORDER -from conda_build.source import apply_patch -from conda_build.utils import ( +from ..config import Config +from ..environ import create_env +from ..license_family import allowed_license_families, guess_license_family +from ..metadata import MetaData +from ..render import FIELDS as EXPECTED_SECTION_ORDER +from ..source import apply_patch +from ..utils import ( check_call_env, decompressible_exts, ensure_list, + on_win, rm_rf, tar_xf, ) -from conda_build.version import _parse as parse_version +from ..version import _parse as parse_version pypi_example = """ Examples: @@ -336,8 +335,7 @@ def skeletonize( if version: if version not in versions: sys.exit( - "Error: Version %s of %s is not available on PyPI." - % (version, package) + f"Error: Version {version} of {package} is not available on PyPI." ) d["version"] = version else: @@ -1283,9 +1281,9 @@ def get_pkginfo( download(pypiurl, join(config.src_cache, filename)) if hashsum_file(download_path, hash_type) != hash_value: raise RuntimeError( - " Download of {} failed" - " checksum type {} expected value {}. Please" - " try again.".format(package, hash_type, hash_value) + f" Download of {package} failed" + f" checksum type {hash_type} expected value {hash_value}. Please" + " try again." ) else: print("Using cached download") @@ -1366,7 +1364,7 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op ) stdlib_dir = join( config.host_prefix, - "Lib" if sys.platform == "win32" else "lib/python%s" % python_version, + "Lib" if on_win else "lib/python%s" % python_version, ) patch = join(temp_dir, "pypi-distutils.patch") @@ -1385,7 +1383,7 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op stdlib_dir, "distutils", "__pycache__", - "core.cpython-%s%s.pyc" % sys.version_info[:2], + f"core.cpython-{sys.version_info[0]}{sys.version_info[1]}.pyc", ) ) rm_rf( @@ -1393,7 +1391,7 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op stdlib_dir, "distutils", "__pycache__", - "core.cpython-%s%s.pyo" % sys.version_info[:2], + f"core.cpython-{sys.version_info[0]}{sys.version_info[1]}.pyo", ) ) else: diff --git a/conda_build/skeletons/rpm.py b/conda_build/skeletons/rpm.py index 76f2e5ea86..f0abb8c747 100644 --- a/conda_build/skeletons/rpm.py +++ b/conda_build/skeletons/rpm.py @@ -1,32 +1,21 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import argparse -from copy import copy - -from conda_build.license_family import guess_license_family -from conda_build.source import download_to_cache - -try: - import cPickle as pickle -except: - import pickle as pickle - import gzip import hashlib +import pickle import re +from copy import copy from os import chmod, makedirs from os.path import basename, dirname, exists, join, splitext from textwrap import wrap +from urllib.request import urlopen from xml.etree import ElementTree as ET +from ..license_family import guess_license_family +from ..source import download_to_cache from .cran import yaml_quote_string -try: - from urllib.request import urlopen -except ImportError: - from urllib2 import urlopen - - # This is used in two places default_architecture = "x86_64" default_distro = "centos6" @@ -337,9 +326,7 @@ def get_repo_dict(repomd_url, data_type, dict_massager, cdt, src_cache): ) assert ( csum == cached_csum - ), "Checksum for {} does not match value in {}".format( - xmlgz_file, repomd_url - ) + ), f"Checksum for {xmlgz_file} does not match value in {repomd_url}" with gzip.open(cached_path, "rb") as gz: xml_content = gz.read() xml_csum = cdt["checksummer"]() @@ -547,9 +534,7 @@ def write_conda_recipes( depends.append(copy_provides) else: print( - "WARNING: Additional dependency of {}, {} not found".format( - package, missing_dep - ) + f"WARNING: Additional dependency of {package}, {missing_dep} not found" ) for depend in depends: dep_entry, dep_name, dep_arch = find_repo_entry_and_arch( @@ -798,9 +783,7 @@ def distro(distro_name): "--distro", type=distro, default=default_distro, - help="Distro to use. Applies to all packages, valid values are: {}".format( - valid_distros() - ), + help=f"Distro to use. Applies to all packages, valid values are: {valid_distros()}", ) rpm.add_argument( diff --git a/conda_build/source.py b/conda_build/source.py index 85e64c8292..0db306fb75 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -13,14 +13,23 @@ from pathlib import Path from subprocess import CalledProcessError from typing import Iterable - -from conda_build.conda_interface import CondaHTTPError, url_path -from conda_build.os_utils import external -from conda_build.utils import ( +from urllib.parse import urljoin + +from .conda_interface import ( + CondaHTTPError, + TemporaryDirectory, + download, + hashsum_file, + url_path, +) +from .exceptions import MissingDependency +from .os_utils import external +from .utils import ( LoggingContext, check_call_env, check_output_env, convert_path_for_cygwin_or_msys2, + convert_unix_path_to_win, copy_into, decompressible_exts, ensure_list, @@ -31,17 +40,7 @@ tar_xf, ) -from .conda_interface import TemporaryDirectory, download, hashsum_file -from .exceptions import MissingDependency - log = get_logger(__name__) -if on_win: - from conda_build.utils import convert_unix_path_to_win - -if sys.version_info[0] == 3: - from urllib.parse import urljoin -else: - from urlparse import urljoin git_submod_re = re.compile(r"(?:.+)\.(.+)\.(?:.+)\s(.+)") ext_re = re.compile(r"(.*?)(\.(?:tar\.)?[^.]+)$") @@ -74,8 +73,8 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): break else: log.warn( - "No hash (md5, sha1, sha256) provided for {}. Source download forced. " - "Add hash to recipe to use source cache.".format(unhashed_fn) + f"No hash (md5, sha1, sha256) provided for {unhashed_fn}. Source download forced. " + "Add hash to recipe to use source cache." ) path = join(cache_folder, fn) if isfile(path): @@ -122,9 +121,7 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): if expected_hash != hashed: rm_rf(path) raise RuntimeError( - "{} mismatch: '{}' != '{}'".format( - tp.upper(), hashed, expected_hash - ) + f"{tp.upper()} mismatch: '{hashed}' != '{expected_hash}'" ) break @@ -253,8 +250,7 @@ def git_mirror_checkout_recursive( if not mirror_dir.startswith(git_cache + os.sep): sys.exit( - "Error: Attempting to mirror to %s which is outside of GIT_CACHE %s" - % (mirror_dir, git_cache) + f"Error: Attempting to mirror to {mirror_dir} which is outside of GIT_CACHE {git_cache}" ) # This is necessary for Cygwin git and m2-git, although it is fixed in newer MSYS2. @@ -302,7 +298,7 @@ def git_mirror_checkout_recursive( except CalledProcessError: msg = ( "Failed to update local git cache. " - "Deleting local cached repo: {} ".format(mirror_dir) + f"Deleting local cached repo: {mirror_dir} " ) print(msg) @@ -323,7 +319,7 @@ def git_mirror_checkout_recursive( except CalledProcessError: # on windows, remote URL comes back to us as cygwin or msys format. Python doesn't # know how to normalize it. Need to convert it to a windows path. - if sys.platform == "win32" and git_url.startswith("/"): + if on_win and git_url.startswith("/"): git_url = convert_unix_path_to_win(git_url) if os.path.exists(git_url): @@ -438,7 +434,7 @@ def git_source(source_dict, git_cache, src_dir, recipe_path=None, verbose=True): if git_url.startswith("."): # It's a relative path from the conda recipe git_url = abspath(normpath(os.path.join(recipe_path, git_url))) - if sys.platform == "win32": + if on_win: git_dn = git_url.replace(":", "_") else: git_dn = git_url[1:] diff --git a/conda_build/tarcheck.py b/conda_build/tarcheck.py index 3fc363986e..3a98559187 100644 --- a/conda_build/tarcheck.py +++ b/conda_build/tarcheck.py @@ -4,7 +4,7 @@ import tarfile from os.path import basename, normpath -from conda_build.utils import codec, filter_info_files +from .utils import codec, filter_info_files def dist_fn(fn): @@ -61,9 +61,7 @@ def index_json(self): for varname in "name", "version": if info[varname] != getattr(self, varname): raise Exception( - "{}: {!r} != {!r}".format( - varname, info[varname], getattr(self, varname) - ) + f"{varname}: {info[varname]!r} != {getattr(self, varname)!r}" ) assert isinstance(info["build_number"], int) diff --git a/conda_build/utils.py b/conda_build/utils.py index 6c300737c1..5ceff5f762 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -18,10 +18,15 @@ import tarfile import tempfile import time +import urllib.parse as urlparse +import urllib.request as urllib from collections import OrderedDict, defaultdict from functools import lru_cache +from glob import glob from itertools import filterfalse +from json.decoder import JSONDecodeError from locale import getpreferredencoding +from os import walk from os.path import ( abspath, dirname, @@ -38,58 +43,28 @@ from threading import Thread from typing import Iterable -import libarchive - -from .deprecations import deprecated - -try: - from json.decoder import JSONDecodeError -except ImportError: - JSONDecodeError = ValueError - import conda_package_handling.api import filelock +import libarchive import yaml - -try: - from conda.base.constants import ( - CONDA_PACKAGE_EXTENSION_V1, - CONDA_PACKAGE_EXTENSION_V2, - CONDA_PACKAGE_EXTENSIONS, - ) -except Exception: - from conda.base.constants import ( - CONDA_TARBALL_EXTENSION as CONDA_PACKAGE_EXTENSION_V1, - ) - - CONDA_PACKAGE_EXTENSION_V2 = ".conda" - CONDA_PACKAGE_EXTENSIONS = (CONDA_PACKAGE_EXTENSION_V2, CONDA_PACKAGE_EXTENSION_V1) - -import urllib.parse as urlparse -import urllib.request as urllib -from contextlib import ExitStack # noqa: F401 -from glob import glob - -from conda.api import PackageCacheData # noqa -from conda.base.constants import KNOWN_SUBDIRS +from conda.base.constants import ( + CONDA_PACKAGE_EXTENSION_V1, # noqa: F401 + CONDA_PACKAGE_EXTENSION_V2, # noqa: F401 + CONDA_PACKAGE_EXTENSIONS, + KNOWN_SUBDIRS, +) from conda.core.prefix_data import PrefixData from conda.models.dist import Dist from conda.models.records import PrefixRecord -# NOQA because it is not used in this file. -from conda_build.conda_interface import rm_rf as _rm_rf # noqa -from conda_build.exceptions import BuildLockError # noqa -from conda_build.os_utils import external # noqa - -from .conda_interface import ( # noqa +from .conda_interface import ( CondaHTTPError, - Dist, # noqa MatchSpec, - StringIO, # noqa + StringIO, TemporaryDirectory, VersionOrder, - cc_conda_build, # noqa - context, # noqa + cc_conda_build, + context, download, get_conda_channel, hashsum_file, @@ -99,9 +74,9 @@ unix_path_to_win, win_path_to_unix, ) - -PermissionError = PermissionError # NOQA -FileNotFoundError = FileNotFoundError +from .conda_interface import rm_rf as _rm_rf +from .deprecations import deprecated +from .exceptions import BuildLockError on_win = sys.platform == "win32" on_mac = sys.platform == "darwin" @@ -138,11 +113,6 @@ # filenames accepted as recipe meta files VALID_METAS = ("meta.yaml", "meta.yml", "conda.yaml", "conda.yml") -try: - from os import scandir, walk # NOQA -except ImportError: - from scandir import walk - @lru_cache(maxsize=None) def stat_file(path): @@ -230,7 +200,7 @@ def _setup_rewrite_pipe(env): r_fd, w_fd = os.pipe() r = os.fdopen(r_fd, "rt") - if sys.platform == "win32": + if on_win: replacement_t = "%{}%" else: replacement_t = "${}" @@ -714,8 +684,8 @@ def merge_tree( assert not dst.startswith(src), ( "Can't merge/copy source into subdirectory of itself. " "Please create separate spaces for these things.\n" - " src: {}\n" - " dst: {}".format(src, dst) + f" src: {src}\n" + f" dst: {dst}" ) new_files = copytree(src, dst, symlinks=symlinks, dry_run=True) @@ -833,10 +803,12 @@ def relative(f, d="lib"): def _tar_xf_fallback(tarball, dir_path, mode="r:*"): + from .os_utils.external import find_executable + if tarball.lower().endswith(".tar.z"): - uncompress = external.find_executable("uncompress") + uncompress = find_executable("uncompress") if not uncompress: - uncompress = external.find_executable("gunzip") + uncompress = find_executable("gunzip") if not uncompress: sys.exit( """\ @@ -863,8 +835,6 @@ def _tar_xf_fallback(tarball, dir_path, mode="r:*"): def tar_xf_file(tarball, entries): - from conda_build.utils import ensure_list - entries = ensure_list(entries) if not os.path.isabs(tarball): tarball = os.path.join(os.getcwd(), tarball) @@ -992,7 +962,9 @@ def rec_glob(path, patterns, ignores=None): def convert_unix_path_to_win(path): - if external.find_executable("cygpath"): + from .os_utils.external import find_executable + + if find_executable("cygpath"): cmd = f"cygpath -w {path}" path = subprocess.getoutput(cmd) @@ -1002,7 +974,9 @@ def convert_unix_path_to_win(path): def convert_win_path_to_unix(path): - if external.find_executable("cygpath"): + from .os_utils.external import find_executable + + if find_executable("cygpath"): cmd = f"cygpath -u {path}" path = subprocess.getoutput(cmd) @@ -1018,7 +992,7 @@ def path2url(path): def get_stdlib_dir(prefix, py_ver): - if sys.platform == "win32": + if on_win: lib_dir = os.path.join(prefix, "Lib") else: lib_dir = os.path.join(prefix, "lib") @@ -1042,7 +1016,7 @@ def get_build_folders(croot): def prepend_bin_path(env, prefix, prepend_prefix=False): env["PATH"] = join(prefix, "bin") + os.pathsep + env["PATH"] - if sys.platform == "win32": + if on_win: env["PATH"] = ( join(prefix, "Library", "mingw-w64", "bin") + os.pathsep @@ -1093,7 +1067,7 @@ def path_prepended(prefix, prepend_prefix=True): os.environ["PATH"] = old_path -bin_dirname = "Scripts" if sys.platform == "win32" else "bin" +bin_dirname = "Scripts" if on_win else "bin" entry_pat = re.compile(r"\s*([\w\-\.]+)\s*=\s*([\w.]+):([\w.]+)\s*$") @@ -1152,7 +1126,7 @@ def get_ext_files(start_path, pattern): def convert_path_for_cygwin_or_msys2(exe, path): "If exe is a Cygwin or MSYS2 executable then filters it through `cygpath -u`" - if sys.platform != "win32": + if not on_win: return path if exe not in _posix_exes_cache: with open(exe, "rb") as exe_file: @@ -1361,8 +1335,8 @@ def find_recipe(path): if len(metas) == 1: get_logger(__name__).warn( "Multiple meta files found. " - "The %s file in the base directory (%s) " - "will be used." % (metas[0], path) + f"The {metas[0]} file in the base directory ({path}) " + "will be used." ) return os.path.join(path, metas[0]) @@ -1801,9 +1775,7 @@ def merge_or_update_dict( and raise_on_clobber ): log.debug( - "clobbering key {} (original value {}) with value {}".format( - key, base_value, value - ) + f"clobbering key {key} (original value {base_value}) with value {value}" ) if value is None and key in base: del base[key] @@ -1957,14 +1929,12 @@ def ensure_valid_spec(spec, warn=False): if match.group(1) not in ("python", "vc") and warn: log = get_logger(__name__) log.warn( - "Adding .* to spec '{}' to ensure satisfiability. Please " + f"Adding .* to spec '{spec}' to ensure satisfiability. Please " "consider putting {{{{ var_name }}}}.* or some relational " "operator (>/=/<=) on this spec in meta.yaml, or if req is " "also a build req, using {{{{ pin_compatible() }}}} jinja2 " "function instead. See " - "https://conda.io/docs/user-guide/tasks/build-packages/variants.html#pinning-at-the-variant-level".format( # NOQA - spec - ) + "https://conda.io/docs/user-guide/tasks/build-packages/variants.html#pinning-at-the-variant-level" ) spec = spec_needing_star_re.sub(r"\1 \2.*", spec) return spec @@ -2055,6 +2025,8 @@ def sha256_checksum(filename, buffersize=65536): def write_bat_activation_text(file_handle, m): + from .os_utils.external import find_executable + file_handle.write(f'call "{root_script_dir}\\..\\condabin\\conda_hook.bat"\n') if m.is_cross: # HACK: we need both build and host envs "active" - i.e. on PATH, @@ -2087,7 +2059,6 @@ def write_bat_activation_text(file_handle, m): file_handle.write( f'call "{root_script_dir}\\..\\condabin\\conda.bat" activate --stack "{m.config.build_prefix}"\n' ) - from conda_build.os_utils.external import find_executable ccache = find_executable("ccache", m.config.build_prefix, False) if ccache: @@ -2127,10 +2098,7 @@ def write_bat_activation_text(file_handle, m): file_handle.write(f"mklink gcc-ranlib{ext} {ccache}\n") file_handle.write("popd\n") file_handle.write( - "set PATH={dirname_ccache_ln};{dirname_ccache};%PATH%\n".format( - dirname_ccache_ln=dirname_ccache_ln_bin, - dirname_ccache=os.path.dirname(ccache), - ) + f"set PATH={dirname_ccache_ln_bin};{os.path.dirname(ccache)};%PATH%\n" ) elif method == "native": pass @@ -2190,17 +2158,15 @@ def shutil_move_more_retrying(src, dest, debug_name): shutil.move(src, dest) if attempts_left != 5: log.warning( - "shutil.move({}={}, dest={}) succeeded on attempt number {}".format( - debug_name, src, dest, 6 - attempts_left - ) + f"shutil.move({debug_name}={src}, dest={dest}) succeeded on attempt number {6 - attempts_left}" ) attempts_left = -1 except: attempts_left = attempts_left - 1 if attempts_left > 0: log.warning( - "Failed to rename {} directory, check with strace, struss or procmon. " - "Will sleep for 3 seconds and try again!".format(debug_name) + f"Failed to rename {debug_name} directory, check with strace, struss or procmon. " + "Will sleep for 3 seconds and try again!" ) import time diff --git a/conda_build/variants.py b/conda_build/variants.py index 7e9dfc7ff0..8cf2c007cc 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -2,7 +2,6 @@ # SPDX-License-Identifier: BSD-3-Clause """This file handles the parsing of feature specifications from files, ending up with a configuration matrix""" - import os.path import re import sys @@ -13,9 +12,9 @@ import yaml -from conda_build.conda_interface import cc_conda_build, subdir -from conda_build.utils import ensure_list, get_logger, islist, on_win, trim_empty_keys -from conda_build.version import _parse as parse_version +from .conda_interface import cc_conda_build, subdir +from .utils import ensure_list, get_logger, islist, on_win, trim_empty_keys +from .version import _parse as parse_version DEFAULT_VARIANTS = { "python": f"{sys.version_info.major}.{sys.version_info.minor}", @@ -125,7 +124,7 @@ def get_default_variant(config): def parse_config_file(path, config): - from conda_build.metadata import get_selectors, select_lines + from .metadata import get_selectors, select_lines with open(path) as f: contents = f.read() @@ -161,8 +160,8 @@ def validate_spec(src, spec): # check for duplicate keys unique = set() errors.extend( - " zip_key entry {} in group {} is a duplicate, keys can only occur " - "in one group".format(k, zg) + f" zip_key entry {k} in group {zg} is a duplicate, keys can only occur " + "in one group" # include error if key has already been seen, otherwise add to unique keys if k in unique else unique.add(k) @@ -498,13 +497,8 @@ def filter_by_key_value(variants, key, values, source_name): else: log = get_logger(__name__) log.debug( - "Filtering variant with key {key} not matching target value(s) " - "({tgt_vals}) from {source_name}, actual {actual_val}".format( - key=key, - tgt_vals=values, - source_name=source_name, - actual_val=variant.get(key), - ) + f"Filtering variant with key {key} not matching target value(s) " + f"({values}) from {source_name}, actual {variant.get(key)}" ) return reduced_variants @@ -646,7 +640,7 @@ def get_package_combined_spec(recipedir_or_metadata, config=None, variants=None) if hasattr(recipedir_or_metadata, "config"): config = recipedir_or_metadata.config if not config: - from conda_build.config import Config + from .config import Config config = Config() files = find_config_files(recipedir_or_metadata, config) diff --git a/conda_build/windows.py b/conda_build/windows.py index 84da4a0f0d..ba53abf80a 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -18,15 +18,15 @@ # Allow some imports to work for cross or CONDA_SUBDIR usage. pass -from conda_build import environ -from conda_build.utils import ( +from . import environ +from .utils import ( check_call_env, copy_into, get_logger, path_prepended, write_bat_activation_text, ) -from conda_build.variants import get_default_variant, set_language_env_vars +from .variants import get_default_variant, set_language_env_vars VS_VERSION_STRING = { "8.0": "Visual Studio 8 2005", @@ -203,9 +203,7 @@ def build_vcvarsall_cmd(cmd, arch=arch_selector): # If the WindowsSDKDir environment variable has not been successfully # set then try activating VS2010 msvc_env_lines.append( - 'if not "%WindowsSDKDir%" == "{}" ( {} )'.format( - WIN_SDK_71_PATH, build_vcvarsall_cmd(vcvarsall_vs_path) - ) + f'if not "%WindowsSDKDir%" == "{WIN_SDK_71_PATH}" ( {build_vcvarsall_cmd(vcvarsall_vs_path)} )' ) # sdk is not installed. Fall back to only trying VS 2010 except KeyError: diff --git a/pyproject.toml b/pyproject.toml index fc9969adc7..602c15be73 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,10 @@ pycodestyle = {max-line-length = 120} # E, W = pycodestyle errors and warnings # F = pyflakes # I = isort -select = ["E", "W", "F", "I"] +# UP = pyupgrade +# ISC = flake8-implicit-str-concat +# see also https://docs.astral.sh/ruff/rules/ +select = ["E", "W", "F", "I", "UP", "ISC"] # E402 module level import not at top of file # E722 do not use bare 'except' # E731 do not assign a lambda expression, use a def diff --git a/tests/cli/test_main_render.py b/tests/cli/test_main_render.py index 10ed9f803e..59fff7901c 100644 --- a/tests/cli/test_main_render.py +++ b/tests/cli/test_main_render.py @@ -37,7 +37,7 @@ def test_render_add_channel(): required_package_details = required_package_string.split(" ") assert len(required_package_details) > 1, ( "Expected version number on successful " - "rendering, but got only {}".format(required_package_details) + f"rendering, but got only {required_package_details}" ) assert ( required_package_details[1] == "1.0" diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 0cc85f01c3..856cc4fa1c 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -48,7 +48,6 @@ from conda_build.os_utils.external import find_executable from conda_build.render import finalize_metadata from conda_build.utils import ( - FileNotFoundError, check_call_env, check_output_env, convert_path_for_cygwin_or_msys2, @@ -400,12 +399,12 @@ def dummy_executable(folder, exename): with open(dummyfile, "w") as f: f.write( prefix - + """ - echo ******* You have reached the dummy {}. It is likely there is a bug in + + f""" + echo ******* You have reached the dummy {exename}. It is likely there is a bug in echo ******* conda that makes it not add the _build/bin directory onto the echo ******* PATH before running the source checkout tool exit -1 - """.format(exename) + """ ) if sys.platform != "win32": import stat @@ -614,10 +613,9 @@ def test_numpy_setup_py_data(testing_config): subprocess.check_call(["conda", "install", "-y", "cython"]) m = api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] _hash = m.hash_dependencies() - assert os.path.basename( - api.get_output_file_path(m)[0] - ) == "load_setup_py_test-0.1.0-np116py{}{}{}_0.tar.bz2".format( - sys.version_info.major, sys.version_info.minor, _hash + assert ( + os.path.basename(api.get_output_file_path(m)[0]) + == f"load_setup_py_test-0.1.0-np116py{sys.version_info.major}{sys.version_info.minor}{_hash}_0.tar.bz2" ) @@ -816,9 +814,9 @@ def test_disable_pip(testing_metadata): with pytest.raises(subprocess.CalledProcessError): api.build(testing_metadata) - testing_metadata.meta["build"]["script"] = ( - 'python -c "import setuptools; ' 'print(setuptools.__version__)"' - ) + testing_metadata.meta["build"][ + "script" + ] = 'python -c "import setuptools; print(setuptools.__version__)"' with pytest.raises(subprocess.CalledProcessError): api.build(testing_metadata) diff --git a/tests/test_api_convert.py b/tests/test_api_convert.py index bc17db6ffe..7da9ede2d3 100644 --- a/tests/test_api_convert.py +++ b/tests/test_api_convert.py @@ -55,9 +55,7 @@ def test_show_imports(base_platform, package, capfd): if platform == source_platform: platforms.remove(platform) - f = "http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2".format( - base_platform, package_name - ) + f = f"http://repo.anaconda.com/pkgs/free/{base_platform}-64/{package_name}-py36_0.tar.bz2" fn = f"{package_name}-py36_0.tar.bz2" download(f, fn) @@ -78,9 +76,7 @@ def test_show_imports(base_platform, package, capfd): def test_no_imports_found(base_platform, package, capfd): package_name, example_file = package - f = "http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2".format( - base_platform, package_name - ) + f = f"http://repo.anaconda.com/pkgs/free/{base_platform}-64/{package_name}-py36_0.tar.bz2" fn = f"{package_name}-py36_0.tar.bz2" download(f, fn) @@ -96,9 +92,7 @@ def test_no_imports_found(base_platform, package, capfd): def test_no_platform(base_platform, package): package_name, example_file = package - f = "http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2".format( - base_platform, package_name - ) + f = f"http://repo.anaconda.com/pkgs/free/{base_platform}-64/{package_name}-py36_0.tar.bz2" fn = f"{package_name}-py36_0.tar.bz2" download(f, fn) @@ -122,9 +116,7 @@ def test_c_extension_error(base_platform, package): if platform == source_platform: platforms.remove(platform) - f = "http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2".format( - base_platform, package_name - ) + f = f"http://repo.anaconda.com/pkgs/free/{base_platform}-64/{package_name}-py36_0.tar.bz2" fn = f"{package_name}-py36_0.tar.bz2" download(f, fn) @@ -133,8 +125,8 @@ def test_c_extension_error(base_platform, package): api.convert(fn, platforms=platform) assert ( - "WARNING: Package {} contains C extensions; skipping conversion. " - "Use -f to force conversion.".format(fn) + f"WARNING: Package {fn} contains C extensions; skipping conversion. " + "Use -f to force conversion." ) in str(e.value) @@ -150,9 +142,7 @@ def test_c_extension_conversion(base_platform, package): if platform == source_platform: platforms.remove(platform) - f = "http://repo.anaconda.com/pkgs/free/{}-64/{}-py36_0.tar.bz2".format( - base_platform, package_name - ) + f = f"http://repo.anaconda.com/pkgs/free/{base_platform}-64/{package_name}-py36_0.tar.bz2" fn = f"{package_name}-py36_0.tar.bz2" download(f, fn) @@ -170,9 +160,7 @@ def test_c_extension_conversion(base_platform, package): def test_convert_platform_to_others(base_platform, package): package_name, example_file = package subdir = f"{base_platform}-64" - f = "http://repo.anaconda.com/pkgs/free/{}/{}-py27_0.tar.bz2".format( - subdir, package_name - ) + f = f"http://repo.anaconda.com/pkgs/free/{subdir}/{package_name}-py27_0.tar.bz2" fn = f"{package_name}-py27_0.tar.bz2" download(f, fn) expected_paths_json = package_has_file(fn, "info/paths.json") @@ -254,9 +242,7 @@ def test_convert_from_unix_to_win_creates_entry_points(testing_config, request): def test_convert_dependencies(base_platform, package): package_name, example_file = package subdir = f"{base_platform}-64" - f = "http://repo.anaconda.com/pkgs/free/{}/{}-np112py36_0.tar.bz2".format( - subdir, package_name - ) + f = f"http://repo.anaconda.com/pkgs/free/{subdir}/{package_name}-np112py36_0.tar.bz2" fn = f"{package_name}-np112py36_0.tar.bz2" download(f, fn) @@ -291,9 +277,7 @@ def test_convert_dependencies(base_platform, package): def test_convert_no_dependencies(base_platform, package): package_name, example_file = package subdir = f"{base_platform}-64" - f = "http://repo.anaconda.com/pkgs/free/{}/{}-np112py36_0.tar.bz2".format( - subdir, package_name - ) + f = f"http://repo.anaconda.com/pkgs/free/{subdir}/{package_name}-np112py36_0.tar.bz2" fn = f"{package_name}-np112py36_0.tar.bz2" download(f, fn) @@ -324,9 +308,7 @@ def test_skip_conversion(base_platform, package, capfd): package_name, example_file = package source_plat_arch = f"{base_platform}-64" - f = "http://repo.anaconda.com/pkgs/free/{}-64/{}-np112py36_0.tar.bz2".format( - base_platform, package_name - ) + f = f"http://repo.anaconda.com/pkgs/free/{base_platform}-64/{package_name}-np112py36_0.tar.bz2" fn = f"{package_name}-np112py36_0.tar.bz2" download(f, fn) @@ -337,8 +319,8 @@ def test_skip_conversion(base_platform, package, capfd): output, error = capfd.readouterr() skip_message = ( - "Source platform '{}' and target platform '{}' are identical. " - "Skipping conversion.\n".format(source_plat_arch, source_plat_arch) + f"Source platform '{source_plat_arch}' and target platform '{source_plat_arch}' are identical. " + "Skipping conversion.\n" ) package = os.path.join(source_plat_arch, fn) @@ -361,9 +343,7 @@ def test_renaming_executables(base_platform, package): """ package_name, example_file = package subdir = f"{base_platform}-64" - f = "http://repo.anaconda.com/pkgs/free/{}/{}-py27_0.tar.bz2".format( - subdir, package_name - ) + f = f"http://repo.anaconda.com/pkgs/free/{subdir}/{package_name}-py27_0.tar.bz2" fn = f"{package_name}-py27_0.tar.bz2" download(f, fn) expected_paths_json = package_has_file(fn, "info/paths.json") diff --git a/tests/test_api_render.py b/tests/test_api_render.py index 1451fbbbe0..878617e78d 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -99,9 +99,7 @@ def test_get_output_file_path_jinja2(testing_config): assert build_path == os.path.join( testing_config.croot, testing_config.host_subdir, - "conda-build-test-source-git-jinja2-1.20.2-py{}{}_0_g262d444.tar.bz2".format( - python, _hash - ), + f"conda-build-test-source-git-jinja2-1.20.2-py{python}{_hash}_0_g262d444.tar.bz2", ) diff --git a/tests/test_jinja_context.py b/tests/test_jinja_context.py index 3a82ed0227..8393bc6ba5 100644 --- a/tests/test_jinja_context.py +++ b/tests/test_jinja_context.py @@ -126,7 +126,7 @@ def test_load_setup_py_data_from_setup_cfg(testing_metadata, tmp_path: Path): setup_py = tmp_path / "setup.py" setup_cfg = tmp_path / "setup.cfg" setup_py.write_text( - "from setuptools import setup\n" 'setup(name="name_from_setup_py")\n' + 'from setuptools import setup\nsetup(name="name_from_setup_py")\n' ) setup_cfg.write_text( "[metadata]\n" diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index db75006a6d..ca6004eefd 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -116,9 +116,7 @@ def test_intradependencies(testing_config): outputs2_set = {os.path.basename(p) for p in outputs2} assert ( outputs1_set == outputs2_set - ), "pkgs differ :: get_output_file_paths()={} but build()={}".format( - outputs1_set, outputs2_set - ) + ), f"pkgs differ :: get_output_file_paths()={outputs1_set} but build()={outputs2_set}" def test_git_in_output_version(testing_config, conda_build_test_recipe_envvar: str): diff --git a/tests/test_utils.py b/tests/test_utils.py index baa5bf5a34..727859501a 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -274,7 +274,7 @@ def test_logger_config_from_file(testing_workdir, capfd, mocker): test_file = os.path.join(testing_workdir, "build_log_config.yaml") with open(test_file, "w") as f: f.write( - """ + f""" version: 1 formatters: simple: @@ -286,14 +286,14 @@ def test_logger_config_from_file(testing_workdir, capfd, mocker): formatter: simple stream: ext://sys.stdout loggers: - {}: + {__name__}: level: WARN handlers: [console] propagate: no root: level: DEBUG handlers: [console] -""".format(__name__) +""" ) cc_conda_build = mocker.patch.object(utils, "cc_conda_build") cc_conda_build.get.return_value = test_file diff --git a/tests/utils.py b/tests/utils.py index 4819d3c76b..d7bd5b479d 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -53,8 +53,9 @@ def get_valid_recipes(*parts: Path | str) -> Generator[Path, None, None]: def add_mangling(filename): - filename = os.path.splitext(filename)[0] + ".cpython-{}{}.py".format( - sys.version_info.major, sys.version_info.minor + filename = ( + os.path.splitext(filename)[0] + + f".cpython-{sys.version_info.major}{sys.version_info.minor}.py" ) filename = os.path.join( os.path.dirname(filename), "__pycache__", os.path.basename(filename) From 6edc3dc4af4c131c2e8455dabd42d9d2b5059c43 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 4 Jan 2024 12:29:36 -0600 Subject: [PATCH 253/366] Deprecate `conda_build.utils.samefile` in favor of path to package mapping (#5130) * Deprecate `conda_build.utils.samefile` * Implement path (using lstat) to package mapping * Add benchmark for `which_package` --- conda_build/inspect_pkg.py | 45 +++++++++++++++++---- conda_build/utils.py | 1 + news/5126-samefile-regression | 19 +++++++++ pyproject.toml | 1 + tests/test_config.py | 17 +++++--- tests/test_inspect_pkg.py | 74 ++++++++++++++++++++++++++++++++--- 6 files changed, 137 insertions(+), 20 deletions(-) create mode 100644 news/5126-samefile-regression diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index f8fa57a1b5..87bc372766 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -43,13 +43,13 @@ ) from .deprecations import deprecated -from .utils import on_mac, on_win, samefile +from .utils import on_mac, on_win @deprecated("3.28.0", "24.1.0") @lru_cache(maxsize=None) def dist_files(prefix: str | os.PathLike | Path, dist: Dist) -> set[str]: - if (prec := PrefixData(prefix).get(dist.name, None)) is None: + if (prec := PrefixData(str(prefix)).get(dist.name, None)) is None: return set() elif MatchSpec(dist).match(prec): return set(prec["files"]) @@ -62,19 +62,48 @@ def which_package( path: str | os.PathLike | Path, prefix: str | os.PathLike | Path, ) -> Iterable[PrefixRecord]: - """ + """Detect which package(s) a path belongs to. + Given the path (of a (presumably) conda installed file) iterate over the conda packages the file came from. Usually the iteration yields only one package. + + We use lstat since a symlink doesn't clobber the file it points to. """ prefix = Path(prefix) - # historically, path was relative to prefix just to be safe we append to prefix - # (pathlib correctly handles this even if path is absolute) - path = prefix / path + # historically, path was relative to prefix, just to be safe we append to prefix + # get lstat before calling _file_package_mapping in case path doesn't exist + try: + lstat = (prefix / path).lstat() + except FileNotFoundError: + # FileNotFoundError: path doesn't exist + return + else: + yield from _file_package_mapping(prefix).get(lstat, ()) + + +@lru_cache(maxsize=None) +def _file_package_mapping(prefix: Path) -> dict[os.stat_result, set[PrefixRecord]]: + """Map paths to package records. + + We use lstat since a symlink doesn't clobber the file it points to. + """ + mapping: dict[os.stat_result, set[PrefixRecord]] = {} for prec in PrefixData(str(prefix)).iter_records(): - if any(samefile(prefix / file, path) for file in prec["files"]): - yield prec + for file in prec["files"]: + # packages are capable of removing files installed by other dependencies from + # the build prefix, in those cases lstat will fail, while which_package wont + # return the correct package(s) in such a condition we choose to not worry about + # it since this file to package lookup exists primarily to detect clobbering + try: + lstat = (prefix / file).lstat() + except FileNotFoundError: + # FileNotFoundError: path doesn't exist + continue + else: + mapping.setdefault(lstat, set()).add(prec) + return mapping def print_object_info(info, key): diff --git a/conda_build/utils.py b/conda_build/utils.py index 6c300737c1..3961e38bd6 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -2222,6 +2222,7 @@ def is_conda_pkg(pkg_path: str) -> bool: ) +@deprecated("3.28.3", "24.1.0") def samefile(path1: Path, path2: Path) -> bool: try: return path1.samefile(path2) diff --git a/news/5126-samefile-regression b/news/5126-samefile-regression new file mode 100644 index 0000000000..e3ec790882 --- /dev/null +++ b/news/5126-samefile-regression @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Update `which_package` to use a cached mapping of files to packages (`O(1)`) instead of relying on `Path.samefile` comparisons (`O(n * m)`). (#5126 via #5130) + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/pyproject.toml b/pyproject.toml index a3477043d0..825ae5627f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -129,4 +129,5 @@ markers = [ "slow: execute the slow tests if active", "sanity: execute the sanity tests", "no_default_testing_config: used internally to disable monkeypatching for testing_config", + "benchmark: execute the benchmark tests", ] diff --git a/tests/test_config.py b/tests/test_config.py index 7c46ca0693..fa362a0b4f 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -7,7 +7,7 @@ import pytest from conda_build.config import Config, get_or_merge_config -from conda_build.utils import on_win, samefile +from conda_build.utils import on_win @pytest.fixture @@ -39,20 +39,25 @@ def test_keep_old_work(config: Config, build_id: str, tmp_path: Path): config.croot = tmp_path config.build_id = build_id + magic = "a_touched_file.magic" + # empty working directory orig_dir = Path(config.work_dir) + assert orig_dir.exists() assert not len(os.listdir(config.work_dir)) # touch a file so working directory is not empty - (orig_dir / "a_touched_file.magic").touch() - assert len(os.listdir(config.work_dir)) + (orig_dir / magic).touch() + assert orig_dir.exists() + assert len(os.listdir(config.work_dir)) == 1 + assert Path(config.work_dir, magic).exists() config.compute_build_id("a_new_name", reset=True) - # working directory should still exist and have the touched file - assert not samefile(orig_dir, config.work_dir) + # working directory should still exist (in new location) and have the touched file assert not orig_dir.exists() - assert len(os.listdir(config.work_dir)) + assert len(os.listdir(config.work_dir)) == 1 + assert Path(config.work_dir, magic).exists() @pytest.mark.skipif(on_win, reason="Windows uses only the short prefix") diff --git a/tests/test_inspect_pkg.py b/tests/test_inspect_pkg.py index edefa96a54..97aa9228db 100644 --- a/tests/test_inspect_pkg.py +++ b/tests/test_inspect_pkg.py @@ -3,8 +3,11 @@ from __future__ import annotations import json +import os from pathlib import Path +from uuid import uuid4 +import pytest from conda.core.prefix_data import PrefixData from conda_build.inspect_pkg import which_package @@ -100,7 +103,7 @@ def test_which_package(tmp_path: Path): precs_hardlinkA = list(which_package(tmp_path / "hardlinkA", tmp_path)) assert len(precs_hardlinkA) == 1 - assert precs_hardlinkA[0] == precA + assert set(precs_hardlinkA) == {precA} precs_shared = list(which_package(tmp_path / "shared", tmp_path)) assert len(precs_shared) == 2 @@ -108,12 +111,71 @@ def test_which_package(tmp_path: Path): precs_internal = list(which_package(tmp_path / "internal", tmp_path)) assert len(precs_internal) == 1 - assert precs_internal[0] == precA + assert set(precs_internal) == {precA} precs_external = list(which_package(tmp_path / "external", tmp_path)) - assert len(precs_external) == 2 - assert set(precs_external) == {precA, precB} + assert len(precs_external) == 1 + assert set(precs_external) == {precA} precs_hardlinkB = list(which_package(tmp_path / "hardlinkB", tmp_path)) - assert len(precs_hardlinkB) == 2 - assert set(precs_hardlinkB) == {precA, precB} + assert len(precs_hardlinkB) == 1 + assert set(precs_hardlinkB) == {precB} + + +@pytest.mark.benchmark +def test_which_package_battery(tmp_path: Path): + # regression: https://github.com/conda/conda-build/issues/5126 + # create a dummy environment + (tmp_path / "conda-meta").mkdir() + (tmp_path / "conda-meta" / "history").touch() + (tmp_path / "lib").mkdir() + + # dummy packages with files + removed = [] + for _ in range(100): + name = f"package_{uuid4().hex}" + + # mock a package with 100 files + files = [f"lib/{uuid4().hex}" for _ in range(100)] + for file in files: + (tmp_path / file).touch() + + # mock a removed file + remove = f"lib/{uuid4().hex}" + files.append(remove) + removed.append(remove) + + (tmp_path / "conda-meta" / f"{name}-1-0.json").write_text( + json.dumps( + { + "build": "0", + "build_number": 0, + "channel": f"{name}-channel", + "files": files, + "name": name, + "paths_data": { + "paths": [ + {"_path": file, "path_type": "hardlink", "size_in_bytes": 0} + for file in files + ], + "paths_version": 1, + }, + "version": "1", + } + ) + ) + + # every path should return exactly one package + for subdir, _, files in os.walk(tmp_path / "lib"): + for file in files: + path = Path(subdir, file) + + assert len(list(which_package(path, tmp_path))) == 1 + + # removed files should return no packages + # this occurs when, e.g., a package removes files installed by another package + for file in removed: + assert not len(list(which_package(tmp_path / file, tmp_path))) + + # missing files should return no packages + assert not len(list(which_package(tmp_path / "missing", tmp_path))) From 31cc7d5ce15e8ac01e2befa82f7695498c801e44 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 5 Jan 2024 11:34:35 -0600 Subject: [PATCH 254/366] Release 3.28.3 (#5128) --- .authors.yml | 2 +- CHANGELOG.md | 13 +++++++++++++ news/5124-fix-ensure_binary-None-handling | 19 ------------------- news/5126-samefile-regression | 19 ------------------- 4 files changed, 14 insertions(+), 39 deletions(-) delete mode 100644 news/5124-fix-ensure_binary-None-handling delete mode 100644 news/5126-samefile-regression diff --git a/.authors.yml b/.authors.yml index b22e4b31e4..4e6c03865d 100644 --- a/.authors.yml +++ b/.authors.yml @@ -1201,7 +1201,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 155 + num_commits: 158 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard diff --git a/CHANGELOG.md b/CHANGELOG.md index dfc774b884..e376dc1422 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,18 @@ [//]: # (current developments) +## 3.28.3 (2024-01-04) + +### Bug fixes + +* Update `conda_build.os_utils.liefldd.ensure_binary` to handle `None` inputs. (#5123 via #5124) +* Update `conda_build.inspect_pkg.which_package` to use a cached mapping of paths to packages (first call: `O(n)`, subsequent calls: `O(1)`) instead of relying on `Path.samefile` comparisons (`O(n * m)`). (#5126 via #5130) + +### Contributors + +* @kenodegard + + + ## 3.28.2 (2023-12-15) ### Enhancements diff --git a/news/5124-fix-ensure_binary-None-handling b/news/5124-fix-ensure_binary-None-handling deleted file mode 100644 index edd4b9d9a4..0000000000 --- a/news/5124-fix-ensure_binary-None-handling +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Update `conda_build.os_utils.liefldd.ensure_binary` to handle `None` inputs. (#5123 via #5124) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/5126-samefile-regression b/news/5126-samefile-regression deleted file mode 100644 index e3ec790882..0000000000 --- a/news/5126-samefile-regression +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Update `which_package` to use a cached mapping of files to packages (`O(1)`) instead of relying on `Path.samefile` comparisons (`O(n * m)`). (#5126 via #5130) - -### Deprecations - -* - -### Docs - -* - -### Other - -* From 835c4cd06de587e53ea3b29b3dc44771dac2a3b2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 11:13:31 -0600 Subject: [PATCH 255/366] [pre-commit.ci] pre-commit autoupdate (#5133) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.9 → v0.1.11](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.9...v0.1.11) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 030c783909..d154e2e06b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,7 +54,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.9 + rev: v0.1.11 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff From c79640164a58317ff63c1c3ff78896d580df2303 Mon Sep 17 00:00:00 2001 From: Johnny Date: Wed, 10 Jan 2024 20:23:00 +0100 Subject: [PATCH 256/366] Add support for Python 3.12 (#4998) Co-authored-by: Ken Odegard Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .github/workflows/tests.yml | 18 ++--- conda_build/_load_setup_py_data.py | 2 + conda_build/skeletons/pypi.py | 63 +++++++-------- conda_build/variants.py | 19 +++-- pyproject.toml | 1 + recipe/conda_build_config.yaml | 9 ++- tests/cli/test_main_build.py | 77 +++++++++++-------- tests/cli/test_main_skeleton.py | 16 ++-- tests/requirements.txt | 2 +- .../metadata/source_setup_py_data/bld.bat | 4 +- .../metadata/source_setup_py_data/build.sh | 4 +- .../metadata/source_setup_py_data/meta.yaml | 2 +- .../building_jinja2_setup_py_data/meta.yaml | 2 +- tests/test_api_build.py | 57 +++++++++++--- tests/test_api_skeleton.py | 26 ++++--- tests/test_metadata.py | 30 ++++---- tests/test_subpackages.py | 2 +- tests/test_variants.py | 4 +- tests/utils.py | 10 +++ 19 files changed, 213 insertions(+), 135 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 80caa4cdea..cfa10453e6 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -69,7 +69,7 @@ jobs: fail-fast: false matrix: # test all lower versions (w/ stable conda) and upper version (w/ canary conda) - python-version: ['3.9', '3.10'] + python-version: ['3.9', '3.10', '3.11'] conda-version: [release] test-type: [serial, parallel] include: @@ -81,10 +81,10 @@ jobs: conda-version: 22.11.0 test-type: parallel # maximum Python/conda combo - - python-version: '3.11' + - python-version: '3.12' conda-version: canary test-type: serial - - python-version: '3.11' + - python-version: '3.12' conda-version: canary test-type: parallel env: @@ -173,10 +173,10 @@ jobs: conda-version: [release] test-type: [serial, parallel] include: - - python-version: '3.11' + - python-version: '3.12' conda-version: canary test-type: serial - - python-version: '3.11' + - python-version: '3.12' conda-version: canary test-type: parallel env: @@ -270,10 +270,10 @@ jobs: conda-version: [release] test-type: [serial, parallel] include: - - python-version: '3.11' + - python-version: '3.12' conda-version: canary test-type: serial - - python-version: '3.11' + - python-version: '3.12' conda-version: canary test-type: parallel env: @@ -426,10 +426,10 @@ jobs: clean: true fetch-depth: 0 - # Explicitly use Python 3.11 since each of the OSes has a different default Python + # Explicitly use Python 3.12 since each of the OSes has a different default Python - uses: actions/setup-python@v4 with: - python-version: '3.11' + python-version: '3.12' - name: Detect label shell: python diff --git a/conda_build/_load_setup_py_data.py b/conda_build/_load_setup_py_data.py index efeb14c69d..9180c404fc 100644 --- a/conda_build/_load_setup_py_data.py +++ b/conda_build/_load_setup_py_data.py @@ -90,6 +90,8 @@ def setup(**kw): del sys.modules["versioneer"] try: + # numpy.distutils deprecated in Python 3.12+ + # see https://numpy.org/doc/stable/reference/distutils_status_migration.html import numpy.distutils.core numpy_setup = numpy.distutils.core.setup diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index fe69e09d23..fbe59199b3 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -558,7 +558,7 @@ def add_parser(repos): action="store", default=default_python, help="""Version of Python to use to run setup.py. Default is %(default)s.""", - choices=["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"], + choices=["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12"], ) pypi.add_argument( @@ -1371,39 +1371,40 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op with open(patch, "wb") as f: f.write(DISTUTILS_PATCH.format(temp_dir.replace("\\", "\\\\")).encode("utf-8")) - if exists(join(stdlib_dir, "distutils", "core.py-copy")): - rm_rf(join(stdlib_dir, "distutils", "core.py")) - copy2( - join(stdlib_dir, "distutils", "core.py-copy"), - join(stdlib_dir, "distutils", "core.py"), - ) - # Avoid race conditions. Invalidate the cache. - rm_rf( - join( - stdlib_dir, - "distutils", - "__pycache__", - f"core.cpython-{sys.version_info[0]}{sys.version_info[1]}.pyc", + # distutils deprecated in Python 3.10+, removed in Python 3.12+ + distutils = join(stdlib_dir, "distutils") + if isdir(distutils): + if exists(join(distutils, "core.py-copy")): + rm_rf(join(distutils, "core.py")) + copy2( + join(distutils, "core.py-copy"), + join(distutils, "core.py"), ) - ) - rm_rf( - join( - stdlib_dir, - "distutils", - "__pycache__", - f"core.cpython-{sys.version_info[0]}{sys.version_info[1]}.pyo", + # Avoid race conditions. Invalidate the cache. + rm_rf( + join( + distutils, + "__pycache__", + f"core.cpython-{sys.version_info[0]}{sys.version_info[1]}.pyc", + ) ) - ) - else: - copy2( - join(stdlib_dir, "distutils", "core.py"), - join(stdlib_dir, "distutils", "core.py-copy"), - ) - apply_patch(join(stdlib_dir, "distutils"), patch, config=config) + rm_rf( + join( + distutils, + "__pycache__", + f"core.cpython-{sys.version_info[0]}{sys.version_info[1]}.pyo", + ) + ) + else: + copy2( + join(distutils, "core.py"), + join(distutils, "core.py-copy"), + ) + apply_patch(distutils, patch, config=config) - vendored = join(stdlib_dir, "site-packages", "setuptools", "_distutils") - if os.path.isdir(vendored): - apply_patch(vendored, patch, config=config) + setuptools = join(stdlib_dir, "site-packages", "setuptools", "_distutils") + if isdir(setuptools): + apply_patch(setuptools, patch, config=config) # Save PYTHONPATH for later env = os.environ.copy() diff --git a/conda_build/variants.py b/conda_build/variants.py index 8cf2c007cc..d798a6e79a 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -18,14 +18,24 @@ DEFAULT_VARIANTS = { "python": f"{sys.version_info.major}.{sys.version_info.minor}", - "numpy": "1.22", + "numpy": { + # (python): numpy_version, # range of versions built for given python + (3, 8): "1.22", # 1.19-1.24 + (3, 9): "1.22", # 1.19-1.26 + (3, 10): "1.22", # 1.21-1.26 + (3, 11): "1.23", # 1.23-1.26 + (3, 12): "1.26", # 1.26- + }.get(sys.version_info[:2], "1.26"), # this one actually needs to be pretty specific. The reason is that cpan skeleton uses the # version to say what's in their standard library. "perl": "5.26.2", "lua": "5", "r_base": "3.4" if on_win else "3.5", "cpu_optimization_target": "nocona", - "pin_run_as_build": OrderedDict(python=OrderedDict(min_pin="x.x", max_pin="x.x")), + "pin_run_as_build": { + "python": {"min_pin": "x.x", "max_pin": "x.x"}, + "r-base": {"min_pin": "x.x", "max_pin": "x.x"}, + }, "ignore_version": [], "ignore_build_only_deps": ["python", "numpy"], "extend_keys": [ @@ -37,11 +47,6 @@ "cran_mirror": "https://cran.r-project.org", } -# set this outside the initialization because of the dash in the key -DEFAULT_VARIANTS["pin_run_as_build"]["r-base"] = OrderedDict( - min_pin="x.x", max_pin="x.x" -) - # map python version to default compiler on windows, to match upstream python # This mapping only sets the "native" compiler, and can be overridden by specifying a compiler # in the conda-build variant configuration diff --git a/pyproject.toml b/pyproject.toml index 8b55ee4168..5fc2f3eac5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy" ] diff --git a/recipe/conda_build_config.yaml b/recipe/conda_build_config.yaml index a75aff37d1..42847d7ead 100644 --- a/recipe/conda_build_config.yaml +++ b/recipe/conda_build_config.yaml @@ -1,5 +1,6 @@ python: - - 3.8 - - 3.9 - - 3.10 - - 3.11 + - "3.8" + - "3.9" + - "3.10" + - "3.11" + - "3.12" diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py index 59a080eace..90d1c4a629 100644 --- a/tests/cli/test_main_build.py +++ b/tests/cli/test_main_build.py @@ -1,33 +1,29 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import os import re from pathlib import Path import pytest +from pytest import FixtureRequest, MonkeyPatch +from pytest_mock import MockerFixture -import conda_build from conda_build import api from conda_build.cli import main_build, main_render -from conda_build.conda_interface import ( - TemporaryDirectory, - cc_conda_build, - context, - reset_context, +from conda_build.conda_interface import TemporaryDirectory +from conda_build.config import ( + Config, + zstd_compression_level_default, ) -from conda_build.config import Config, zstd_compression_level_default from conda_build.exceptions import DependencyNeedsBuildingError +from conda_build.metadata import MetaData +from conda_build.os_utils.external import find_executable from conda_build.utils import get_build_folders, on_win, package_has_file from ..utils import metadata_dir - - -def _reset_config(search_path=None): - reset_context(search_path) - cc_conda_build.clear() - cc_conda_build.update( - context.conda_build if hasattr(context, "conda_build") else {} - ) +from ..utils import reset_config as _reset_config @pytest.mark.sanity @@ -266,25 +262,42 @@ def test_purge_all(testing_workdir, testing_metadata): @pytest.mark.serial -def test_no_force_upload(mocker, testing_workdir, testing_metadata, request): - with open(os.path.join(testing_workdir, ".condarc"), "w") as f: - f.write("anaconda_upload: True\n") - f.write("conda_build:\n") - f.write(" force_upload: False\n") - del testing_metadata.meta["test"] - api.output_yaml(testing_metadata, "meta.yaml") - args = ["--no-force-upload", testing_workdir] - call = mocker.patch.object(conda_build.build.subprocess, "call") +def test_no_force_upload( + mocker: MockerFixture, + monkeypatch: MonkeyPatch, + testing_workdir: str | os.PathLike | Path, + testing_metadata: MetaData, + request: FixtureRequest, +): + # this is nearly identical to tests/test_api_build.py::test_no_force_upload + # only difference is this tests `conda_build.cli.main_build.execute` request.addfinalizer(_reset_config) - _reset_config([os.path.join(testing_workdir, ".condarc")]) - main_build.execute(args) + call = mocker.patch("subprocess.call") + anaconda = find_executable("anaconda") + + # render recipe + api.output_yaml(testing_metadata, "meta.yaml") pkg = api.get_output_file_path(testing_metadata) - assert call.called_once_with(["anaconda", "upload", pkg]) - args = [testing_workdir] - with open(os.path.join(testing_workdir, ".condarc"), "w") as f: - f.write("anaconda_upload: True\n") - main_build.execute(args) - assert call.called_once_with(["anaconda", "upload", "--force", pkg]) + + # mock Config.set_keys to always set anaconda_upload to True + # conda's Context + conda_build's MetaData & Config objects interact in such an + # awful way that mocking these configurations is ugly and confusing, all of it + # needs major refactoring + set_keys = Config.set_keys # store original method + monkeypatch.setattr( + Config, + "set_keys", + lambda self, **kwargs: set_keys(self, **{**kwargs, "anaconda_upload": True}), + ) + + # check for normal upload + main_build.execute(["--no-force-upload", testing_workdir]) + call.assert_called_once_with([anaconda, "upload", *pkg]) + call.reset_mock() + + # check for force upload + main_build.execute([testing_workdir]) + call.assert_called_once_with([anaconda, "upload", "--force", *pkg]) @pytest.mark.slow diff --git a/tests/cli/test_main_skeleton.py b/tests/cli/test_main_skeleton.py index 64eb300878..0333d77c1f 100644 --- a/tests/cli/test_main_skeleton.py +++ b/tests/cli/test_main_skeleton.py @@ -34,20 +34,26 @@ def test_skeleton_pypi_arguments_work(testing_workdir): https://github.com/conda/conda-build/pull/1384 """ - args = ["pypi", "msumastro", "--version=1.1.6", "--pin-numpy"] + args = ["pypi", "fasttext", "--version=0.9.2", "--pin-numpy"] main_skeleton.execute(args) - assert os.path.isdir("msumastro") + assert os.path.isdir("fasttext") # Deliberately bypass metadata reading in conda build to get as # close to the "ground truth" as possible. - with open(os.path.join("msumastro", "meta.yaml")) as f: + with open(os.path.join("fasttext", "meta.yaml")) as f: assert f.read().count("numpy x.x") == 2 - args = ["pypi", "photutils", "--version=0.2.2", "--setup-options=--offline"] + args = [ + "pypi", + "photutils", + "--version=1.10.0", + "--setup-options=--offline", + "--extra-specs=extension-helpers", + ] main_skeleton.execute(args) assert os.path.isdir("photutils") # Check that the setup option occurs in bld.bat and build.sh. m = api.render("photutils")[0][0] assert "--offline" in m.meta["build"]["script"] - assert m.version() == "0.2.2" + assert m.version() == "1.10.0" diff --git a/tests/requirements.txt b/tests/requirements.txt index a7140e8673..a4ecdd07a8 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,7 +1,7 @@ -anaconda-client beautifulsoup4 chardet conda >=22.11.0 +conda-forge::anaconda-client conda-index conda-package-handling >=1.3 conda-verify diff --git a/tests/test-recipes/metadata/source_setup_py_data/bld.bat b/tests/test-recipes/metadata/source_setup_py_data/bld.bat index 4168d5d6f0..3399daa92d 100644 --- a/tests/test-recipes/metadata/source_setup_py_data/bld.bat +++ b/tests/test-recipes/metadata/source_setup_py_data/bld.bat @@ -6,6 +6,6 @@ if errorlevel 1 exit 1 for /f "delims=" %%i in ('git describe') do set gitdesc=%%i if errorlevel 1 exit 1 echo "%gitdesc%" -if not "%gitdesc%"=="1.21.0" exit 1 +if not "%gitdesc%"=="1.22.0" exit 1 echo "%PKG_VERSION%" -if not "%PKG_VERSION%"=="1.21.0" exit 1 +if not "%PKG_VERSION%"=="1.22.0" exit 1 diff --git a/tests/test-recipes/metadata/source_setup_py_data/build.sh b/tests/test-recipes/metadata/source_setup_py_data/build.sh index ecde5ca3c3..3c8cd9361a 100644 --- a/tests/test-recipes/metadata/source_setup_py_data/build.sh +++ b/tests/test-recipes/metadata/source_setup_py_data/build.sh @@ -3,6 +3,6 @@ # Ensure we are in a git repo [ -d .git ] git describe -[ "$(git describe)" = 1.21.0 ] +[ "$(git describe)" = 1.22.0 ] echo "\$PKG_VERSION = $PKG_VERSION" -[ "${PKG_VERSION}" = 1.21.0 ] +[ "${PKG_VERSION}" = 1.22.0 ] diff --git a/tests/test-recipes/metadata/source_setup_py_data/meta.yaml b/tests/test-recipes/metadata/source_setup_py_data/meta.yaml index d4a3b21e7b..459c19ba74 100644 --- a/tests/test-recipes/metadata/source_setup_py_data/meta.yaml +++ b/tests/test-recipes/metadata/source_setup_py_data/meta.yaml @@ -12,7 +12,7 @@ package: source: git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} - git_tag: 1.21.0 + git_tag: 1.22.0 build: entry_points: diff --git a/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml b/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml index 8596e5c574..9d7b9dd2df 100644 --- a/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml +++ b/tests/test-recipes/published_code/building_jinja2_setup_py_data/meta.yaml @@ -8,7 +8,7 @@ package: # Example assumes that this folder has setup.py in it source: git_url: {{ environ.get('CONDA_BUILD_TEST_RECIPE_PATH') }} - git_tag: 1.21.0 + git_tag: 1.22.0 requirements: build: diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 856cc4fa1c..1ac2ca06d2 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -3,6 +3,8 @@ """ This module tests the build API. These are high-level integration tests. """ +from __future__ import annotations + import json import logging import os @@ -25,15 +27,15 @@ from binstar_client.errors import NotFound from conda.common.compat import on_linux, on_mac, on_win from conda.exceptions import ClobberError, CondaMultiError +from pytest import FixtureRequest, MonkeyPatch +from pytest_mock import MockerFixture -import conda_build from conda_build import __version__, api, exceptions from conda_build.conda_interface import ( CONDA_VERSION, CondaError, LinkError, VersionOrder, - cc_conda_build, context, reset_context, url_path, @@ -45,6 +47,7 @@ OverDependingError, OverLinkingError, ) +from conda_build.metadata import MetaData from conda_build.os_utils.external import find_executable from conda_build.render import finalize_metadata from conda_build.utils import ( @@ -66,6 +69,7 @@ get_valid_recipes, metadata_dir, metadata_path, + reset_config, ) @@ -437,7 +441,7 @@ def test_checkout_tool_as_dependency(testing_workdir, testing_config, monkeypatc platforms = ["64" if sys.maxsize > 2**32 else "32"] if sys.platform == "win32": platforms = sorted({"32", *platforms}) - compilers = ["3.10", "3.11"] + compilers = ["3.10", "3.11", "3.12"] msvc_vers = ["14.0"] else: msvc_vers = [] @@ -599,6 +603,10 @@ def test_build_metadata_object(testing_metadata): @pytest.mark.serial +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="numpy.distutils deprecated in Python 3.12+", +) def test_numpy_setup_py_data(testing_config): recipe_path = os.path.join(metadata_dir, "_numpy_setup_py_data") # this shows an error that is OK to ignore: @@ -1486,17 +1494,44 @@ def test_runtime_dependencies(testing_config): @pytest.mark.sanity -def test_no_force_upload_condarc_setting(mocker, testing_workdir, testing_metadata): - testing_metadata.config.anaconda_upload = True - del testing_metadata.meta["test"] +def test_no_force_upload( + mocker: MockerFixture, + monkeypatch: MonkeyPatch, + testing_workdir: str | os.PathLike | Path, + testing_metadata: MetaData, + request: FixtureRequest, +): + # this is nearly identical to tests/cli/test_main_build.py::test_no_force_upload + # only difference is this tests `conda_build.api.build` + request.addfinalizer(reset_config) + call = mocker.patch("subprocess.call") + anaconda = find_executable("anaconda") + + # render recipe api.output_yaml(testing_metadata, "meta.yaml") - call = mocker.patch.object(conda_build.build.subprocess, "call") - cc_conda_build["force_upload"] = False + + # mock Config.set_keys to always set anaconda_upload to True + # conda's Context + conda_build's MetaData & Config objects interact in such an + # awful way that mocking these configurations is ugly and confusing, all of it + # needs major refactoring + set_keys = Config.set_keys # store original method + override = {"anaconda_upload": True} + monkeypatch.setattr( + Config, + "set_keys", + lambda self, **kwargs: set_keys(self, **{**kwargs, **override}), + ) + + # check for normal upload + override["force_upload"] = False pkg = api.build(testing_workdir) - assert call.called_once_with(["anaconda", "upload", pkg]) - del cc_conda_build["force_upload"] + call.assert_called_once_with([anaconda, "upload", *pkg]) + call.reset_mock() + + # check for force upload + override["force_upload"] = True pkg = api.build(testing_workdir) - assert call.called_once_with(["anaconda", "upload", "--force", pkg]) + call.assert_called_once_with([anaconda, "upload", "--force", *pkg]) @pytest.mark.sanity diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index 514d469c56..52039b9ed4 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -11,7 +11,7 @@ import ruamel.yaml from conda_build import api -from conda_build.exceptions import DependencyNeedsBuildingError +from conda_build.config import Config from conda_build.skeletons.pypi import ( clean_license_name, convert_to_flat_list, @@ -321,10 +321,11 @@ def test_pypi_with_setup_options(tmp_path: Path, testing_config): api.skeletonize( packages="photutils", repo="pypi", - version="0.2.2", + version="1.10.0", setup_options="--offline", config=testing_config, output_dir=tmp_path, + extra_specs=["extension-helpers"], ) # Check that the setup option occurs in bld.bat and build.sh. @@ -332,30 +333,31 @@ def test_pypi_with_setup_options(tmp_path: Path, testing_config): assert "--offline" in m.meta["build"]["script"] -def test_pypi_pin_numpy(tmp_path: Path, testing_config): +def test_pypi_pin_numpy(tmp_path: Path, testing_config: Config): # The package used here must have a numpy dependence for pin-numpy to have # any effect. api.skeletonize( - packages="msumastro", + packages="fasttext", repo="pypi", - version="0.9.0", + version="0.9.2", config=testing_config, pin_numpy=True, output_dir=tmp_path, ) - assert (tmp_path / "msumastro" / "meta.yaml").read_text().count("numpy x.x") == 2 - with pytest.raises(DependencyNeedsBuildingError): - api.build("msumastro") + assert (tmp_path / "fasttext" / "meta.yaml").read_text().count("numpy x.x") == 2 -def test_pypi_version_sorting(tmp_path: Path, testing_config): +def test_pypi_version_sorting(tmp_path: Path, testing_config: Config): # The package used here must have a numpy dependence for pin-numpy to have # any effect. api.skeletonize( - packages="impyla", repo="pypi", config=testing_config, output_dir=tmp_path + packages="fasttext", + repo="pypi", + config=testing_config, + output_dir=tmp_path, ) - m = api.render(str(tmp_path / "impyla"))[0][0] - assert parse_version(m.version()) >= parse_version("0.13.8") + m = api.render(str(tmp_path / "fasttext"))[0][0] + assert parse_version(m.version()) >= parse_version("0.9.2") def test_list_skeletons(): diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 7ac5bbdf01..05e67b540b 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -24,6 +24,7 @@ yamlize, ) from conda_build.utils import DEFAULT_SUBDIRS +from conda_build.variants import DEFAULT_VARIANTS from .utils import metadata_dir, metadata_path, thisdir @@ -189,10 +190,11 @@ def test_build_bootstrap_env_by_path(testing_metadata): ("win", "x86_64", "3.9", {"vs2017_win-x86_64"}), ("win", "x86_64", "3.10", {"vs2017_win-x86_64"}), ("win", "x86_64", "3.11", {"vs2017_win-x86_64"}), - ("linux", "32", "3.11", {"gcc_linux-32", "gxx_linux-32"}), - ("linux", "64", "3.11", {"gcc_linux-64", "gxx_linux-64"}), - ("osx", "32", "3.11", {"clang_osx-32", "clangxx_osx-32"}), - ("osx", "64", "3.11", {"clang_osx-64", "clangxx_osx-64"}), + ("win", "x86_64", "3.12", {"vs2017_win-x86_64"}), + ("linux", "32", "3.12", {"gcc_linux-32", "gxx_linux-32"}), + ("linux", "64", "3.12", {"gcc_linux-64", "gxx_linux-64"}), + ("osx", "32", "3.12", {"clang_osx-32", "clangxx_osx-32"}), + ("osx", "64", "3.12", {"clang_osx-64", "clangxx_osx-64"}), ], ) def test_native_compiler_metadata( @@ -438,19 +440,19 @@ def test_get_selectors( assert get_selectors(config) == { # defaults "build_platform": context.subdir, - "lua": "5", # see conda_build.variants.DEFAULT_VARIANTS["lua"] - "luajit": False, # lua[0] == 2 - "np": 122, # see conda_build.variants.DEFAULT_VARIANTS["numpy"] + "lua": DEFAULT_VARIANTS["lua"], + "luajit": DEFAULT_VARIANTS["lua"] == 2, + "np": int(float(DEFAULT_VARIANTS["numpy"]) * 100), "os": os, - "pl": "5.26.2", # see conda_build.variants.DEFAULT_VARIANTS["perl"] + "pl": DEFAULT_VARIANTS["perl"], "py": int(f"{sys.version_info.major}{sys.version_info.minor}"), - "py26": sys.version_info.major == 2 and sys.version_info.minor == 6, - "py27": sys.version_info.major == 2 and sys.version_info.minor == 7, + "py26": sys.version_info[:2] == (2, 6), + "py27": sys.version_info[:2] == (2, 7), "py2k": sys.version_info.major == 2, - "py33": sys.version_info.major == 3 and sys.version_info.minor == 3, - "py34": sys.version_info.major == 3 and sys.version_info.minor == 4, - "py35": sys.version_info.major == 3 and sys.version_info.minor == 5, - "py36": sys.version_info.major == 3 and sys.version_info.minor == 6, + "py33": sys.version_info[:2] == (3, 3), + "py34": sys.version_info[:2] == (3, 4), + "py35": sys.version_info[:2] == (3, 5), + "py36": sys.version_info[:2] == (3, 6), "py3k": sys.version_info.major == 3, "nomkl": bool(nomkl), # default OS/arch values diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index ca6004eefd..d89b888758 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -125,7 +125,7 @@ def test_git_in_output_version(testing_config, conda_build_test_recipe_envvar: s recipe, config=testing_config, finalize=False, bypass_env_check=True ) assert len(outputs) == 1 - assert outputs[0][0].version() == "1.21.11" + assert outputs[0][0].version() == "1.22.0" def test_intradep_with_templated_output_name(testing_config): diff --git a/tests/test_variants.py b/tests/test_variants.py index 819f39d793..89ebb67999 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -60,7 +60,7 @@ def test_python_variants(testing_workdir, testing_config, as_yaml): python 3.5 -> python >=3.5,<3.6.0a0 otherPackages 3.5 -> otherPackages 3.5 """ - variants = {"python": ["3.10", "3.11"]} + variants = {"python": ["3.11", "3.12"]} testing_config.ignore_system_config = True # write variants to disk @@ -87,7 +87,7 @@ def test_python_variants(testing_workdir, testing_config, as_yaml): assert { *metadata[0][0].meta["requirements"]["run"], *metadata[1][0].meta["requirements"]["run"], - } == {"python >=3.10,<3.11.0a0", "python >=3.11,<3.12.0a0"} + } == {"python >=3.11,<3.12.0a0", "python >=3.12,<3.13.0a0"} def test_use_selectors_in_variants(testing_workdir, testing_config): diff --git a/tests/utils.py b/tests/utils.py index d7bd5b479d..692f852fff 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -8,8 +8,10 @@ from pathlib import Path from typing import Generator +from conda.base.context import context, reset_context from conda.common.compat import on_mac +from conda_build.conda_interface import cc_conda_build from conda_build.metadata import MetaData tests_path = Path(__file__).parent @@ -144,3 +146,11 @@ def get_noarch_python_meta(meta): d = meta.meta d["build"]["noarch"] = "python" return MetaData.fromdict(d, config=meta.config) + + +def reset_config(search_path=None): + reset_context(search_path) + cc_conda_build.clear() + cc_conda_build.update( + context.conda_build if hasattr(context, "conda_build") else {} + ) From d0daa2b446ddd41dcecdd1b791754386277f5673 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 17 Jan 2024 21:34:42 +0100 Subject: [PATCH 257/366] Restore pre-`3.28` behavior for `which_package` (#5141) * Add test for deleted files in which_package * Add which_package case-insensitivity test on Win * Restore pre-3.28 behavior for which_package * Add regression test for #5136 --------- Signed-off-by: Marcel Bargull Co-authored-by: Ken Odegard --- conda_build/inspect_pkg.py | 45 +++++------------ news/5141-fix-which_package | 19 +++++++ tests/conftest.py | 8 ++- tests/test_inspect_pkg.py | 98 ++++++++++++++++++++++++++++++++++--- 4 files changed, 128 insertions(+), 42 deletions(-) create mode 100644 news/5141-fix-which_package diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 87bc372766..2769bc6927 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -11,7 +11,7 @@ from functools import lru_cache from itertools import groupby from operator import itemgetter -from os.path import abspath, basename, dirname, exists, join +from os.path import abspath, basename, dirname, exists, join, normcase from pathlib import Path from typing import Iterable, Literal @@ -67,43 +67,20 @@ def which_package( Given the path (of a (presumably) conda installed file) iterate over the conda packages the file came from. Usually the iteration yields only one package. - - We use lstat since a symlink doesn't clobber the file it points to. """ - prefix = Path(prefix) - - # historically, path was relative to prefix, just to be safe we append to prefix - # get lstat before calling _file_package_mapping in case path doesn't exist try: - lstat = (prefix / path).lstat() - except FileNotFoundError: - # FileNotFoundError: path doesn't exist - return - else: - yield from _file_package_mapping(prefix).get(lstat, ()) - + path = Path(path).relative_to(prefix) + except ValueError: + # ValueError: path is already relative to prefix + pass + # On Windows, be lenient and allow case-insensitive path comparisons. + # NOTE: On macOS, although case-insensitive filesystem is default, still + # require case-sensitive matches (i.e., normcase on macOS is a no-op). + normcase_path = normcase(path) -@lru_cache(maxsize=None) -def _file_package_mapping(prefix: Path) -> dict[os.stat_result, set[PrefixRecord]]: - """Map paths to package records. - - We use lstat since a symlink doesn't clobber the file it points to. - """ - mapping: dict[os.stat_result, set[PrefixRecord]] = {} for prec in PrefixData(str(prefix)).iter_records(): - for file in prec["files"]: - # packages are capable of removing files installed by other dependencies from - # the build prefix, in those cases lstat will fail, while which_package wont - # return the correct package(s) in such a condition we choose to not worry about - # it since this file to package lookup exists primarily to detect clobbering - try: - lstat = (prefix / file).lstat() - except FileNotFoundError: - # FileNotFoundError: path doesn't exist - continue - else: - mapping.setdefault(lstat, set()).add(prec) - return mapping + if normcase_path in (normcase(file) for file in prec["files"]): + yield prec def print_object_info(info, key): diff --git a/news/5141-fix-which_package b/news/5141-fix-which_package new file mode 100644 index 0000000000..934235b0d1 --- /dev/null +++ b/news/5141-fix-which_package @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Fix linking check regressions by restoring pre-3.28 behavior for `conda_build.inspect_pkg.which_package`. (#5141) + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/conftest.py b/tests/conftest.py index f347317d90..1511afd662 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -227,7 +227,13 @@ def conda_build_test_recipe_path(tmp_path_factory: pytest.TempPathFactory) -> Pa # clone conda_build_test_recipe locally repo = tmp_path_factory.mktemp("conda_build_test_recipe", numbered=False) subprocess.run( - ["git", "clone", "https://github.com/conda/conda_build_test_recipe", str(repo)], + [ + "git", + "clone", + "https://github.com/conda/conda_build_test_recipe", + "--branch=1.21.11", + str(repo), + ], check=True, ) return repo diff --git a/tests/test_inspect_pkg.py b/tests/test_inspect_pkg.py index 97aa9228db..2f35bd3b0e 100644 --- a/tests/test_inspect_pkg.py +++ b/tests/test_inspect_pkg.py @@ -11,6 +11,7 @@ from conda.core.prefix_data import PrefixData from conda_build.inspect_pkg import which_package +from conda_build.utils import on_win def test_which_package(tmp_path: Path): @@ -24,15 +25,27 @@ def test_which_package(tmp_path: Path): (tmp_path / "internal").symlink_to(tmp_path / "hardlinkA") # packageA (tmp_path / "external").symlink_to(tmp_path / "hardlinkB") # packageA (tmp_path / "hardlinkB").touch() # packageB + # Files might be deleted from the prefix during the build, but they should + # still be recognized since they will be present in the run environment. + (tmp_path / "deleted").unlink(missing_ok=True) # packageA + (tmp_path / "deleted_shared").unlink(missing_ok=True) # packageA & packageB - # a dummy package with a hardlink file, shared file, internal softlink, and external softlink + # a dummy package with a hardlink file, shared file, internal softlink, + # external softlink, deleted file, and deleted shared file (tmp_path / "conda-meta" / "packageA-1-0.json").write_text( json.dumps( { "build": "0", "build_number": 0, "channel": "packageA-channel", - "files": ["hardlinkA", "shared", "internal", "external"], + "files": [ + "hardlinkA", + "shared", + "internal", + "external", + "deleted", + "deleted_shared", + ], "name": "packageA", "paths_data": { "paths": [ @@ -56,6 +69,16 @@ def test_which_package(tmp_path: Path): "path_type": "softlink", "size_in_bytes": 0, }, + { + "_path": "deleted", + "path_type": "hardlink", + "size_in_bytes": 0, + }, + { + "_path": "deleted_shared", + "path_type": "hardlink", + "size_in_bytes": 0, + }, ], "paths_version": 1, }, @@ -63,14 +86,14 @@ def test_which_package(tmp_path: Path): } ) ) - # a dummy package with a hardlink file and shared file + # a dummy package with a hardlink file, shared file, and deleted shared file (tmp_path / "conda-meta" / "packageB-1-0.json").write_text( json.dumps( { "build": "0", "build_number": 0, "channel": "packageB-channel", - "files": ["hardlinkB", "shared"], + "files": ["hardlinkB", "shared", "deleted_shared"], "name": "packageB", "paths_data": { "paths": [ @@ -84,6 +107,11 @@ def test_which_package(tmp_path: Path): "path_type": "hardlink", "size_in_bytes": 0, }, + { + "_path": "deleted_shared", + "path_type": "hardlink", + "size_in_bytes": 0, + }, ], "paths_version": 1, }, @@ -101,6 +129,14 @@ def test_which_package(tmp_path: Path): precs_missing = list(which_package(tmp_path / "missing", tmp_path)) assert not precs_missing + precs_Hardlinka = list(which_package(tmp_path / "Hardlinka", tmp_path)) + if on_win: + # On Windows, be lenient and allow case-insensitive path comparisons. + assert len(precs_Hardlinka) == 1 + assert set(precs_Hardlinka) == {precA} + else: + assert not precs_Hardlinka + precs_hardlinkA = list(which_package(tmp_path / "hardlinkA", tmp_path)) assert len(precs_hardlinkA) == 1 assert set(precs_hardlinkA) == {precA} @@ -121,6 +157,52 @@ def test_which_package(tmp_path: Path): assert len(precs_hardlinkB) == 1 assert set(precs_hardlinkB) == {precB} + precs_deleted = list(which_package(tmp_path / "deleted", tmp_path)) + assert len(precs_deleted) == 1 + assert set(precs_deleted) == {precA} + + precs_deleted_shared = list(which_package(tmp_path / "deleted_shared", tmp_path)) + assert len(precs_deleted_shared) == 2 + assert set(precs_deleted_shared) == {precA, precB} + + # reuse environment, regression test for #5136 + (tmp_path / "conda-meta" / "packageA-1-0.json").unlink() + (tmp_path / "conda-meta" / "packageB-1-0.json").unlink() + + # a dummy package with a hardlink file + (tmp_path / "conda-meta" / "packageC-1-0.json").write_text( + json.dumps( + { + "build": "0", + "build_number": 0, + "channel": "packageC-channel", + "files": ["hardlinkA"], + "name": "packageC", + "paths_data": { + "paths": [ + { + "_path": "hardlinkA", + "path_type": "hardlink", + "size_in_bytes": 0, + } + ], + "paths_version": 1, + }, + "version": "1", + } + ) + ) + + # fetch package records + PrefixData._cache_.clear() + pd = PrefixData(tmp_path) + precC = pd.get("packageC") + + # test returned package records given a path + precs_reused = list(which_package(tmp_path / "hardlinkA", tmp_path)) + assert len(precs_reused) == 1 + assert set(precs_reused) == {precC} + @pytest.mark.benchmark def test_which_package_battery(tmp_path: Path): @@ -172,10 +254,12 @@ def test_which_package_battery(tmp_path: Path): assert len(list(which_package(path, tmp_path))) == 1 - # removed files should return no packages - # this occurs when, e.g., a package removes files installed by another package + # removed files should still return a package + # this occurs when, e.g., a build script removes files installed by another package + # (post-install scripts removing files from the run environment is less + # likely and not covered) for file in removed: - assert not len(list(which_package(tmp_path / file, tmp_path))) + assert len(list(which_package(tmp_path / file, tmp_path))) == 1 # missing files should return no packages assert not len(list(which_package(tmp_path / "missing", tmp_path))) From bd79925912778fd2d92c19e39414a79a43b30f03 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 18 Jan 2024 02:49:57 -0600 Subject: [PATCH 258/366] Release 3.28.4 (#5143) --- .authors.yml | 4 ++-- CHANGELOG.md | 12 ++++++++++++ news/5141-fix-which_package | 19 ------------------- 3 files changed, 14 insertions(+), 21 deletions(-) delete mode 100644 news/5141-fix-which_package diff --git a/.authors.yml b/.authors.yml index 4e6c03865d..e662a35c1a 100644 --- a/.authors.yml +++ b/.authors.yml @@ -611,7 +611,7 @@ first_commit: 2015-08-30 06:44:37 - name: Marcel Bargull email: marcel.bargull@udo.edu - num_commits: 76 + num_commits: 77 first_commit: 2016-09-26 11:45:54 github: mbargull alternate_emails: @@ -1201,7 +1201,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 158 + num_commits: 159 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard diff --git a/CHANGELOG.md b/CHANGELOG.md index e376dc1422..62edf32ae8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,17 @@ [//]: # (current developments) +## 3.28.4 (2024-01-17) + +### Bug fixes + +* Fix linking check regressions by restoring pre-3.28 behavior for `conda_build.inspect_pkg.which_package`. (#5141) + +### Contributors + +* @mbargull + + + ## 3.28.3 (2024-01-04) ### Bug fixes diff --git a/news/5141-fix-which_package b/news/5141-fix-which_package deleted file mode 100644 index 934235b0d1..0000000000 --- a/news/5141-fix-which_package +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Fix linking check regressions by restoring pre-3.28 behavior for `conda_build.inspect_pkg.which_package`. (#5141) - -### Deprecations - -* - -### Docs - -* - -### Other - -* From c061feb240ede1d88ecdad2eb5aa954d3a8c21a2 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 18 Jan 2024 09:37:35 -0600 Subject: [PATCH 259/366] Revert git clone modification --- tests/conftest.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 1511afd662..f347317d90 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -227,13 +227,7 @@ def conda_build_test_recipe_path(tmp_path_factory: pytest.TempPathFactory) -> Pa # clone conda_build_test_recipe locally repo = tmp_path_factory.mktemp("conda_build_test_recipe", numbered=False) subprocess.run( - [ - "git", - "clone", - "https://github.com/conda/conda_build_test_recipe", - "--branch=1.21.11", - str(repo), - ], + ["git", "clone", "https://github.com/conda/conda_build_test_recipe", str(repo)], check=True, ) return repo From 304daae0730f3ec2da1ee92c8ccbc91f97a88240 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 17:37:06 +0100 Subject: [PATCH 260/366] [pre-commit.ci] pre-commit autoupdate (#5137) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.11 → v0.1.14](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.11...v0.1.14) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d154e2e06b..919c73cdea 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,7 +54,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.11 + rev: v0.1.14 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff From a6ebdc6e52f38fb473cfd0bd689668516af21f8e Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Thu, 25 Jan 2024 18:11:16 +0100 Subject: [PATCH 261/366] Release 24.1.0 (#5149) Co-authored-by: Bianca Henderson --- .authors.yml | 21 +++++++------ .mailmap | 4 ++- CHANGELOG.md | 40 +++++++++++++++++++++++++ news/5033-update-conda-inspect-channels | 22 -------------- news/5093-add-goat-counter | 19 ------------ news/5105-script-env-warn | 20 ------------- news/allure-removal | 19 ------------ 7 files changed, 55 insertions(+), 90 deletions(-) delete mode 100644 news/5033-update-conda-inspect-channels delete mode 100644 news/5093-add-goat-counter delete mode 100644 news/5105-script-env-warn delete mode 100644 news/allure-removal diff --git a/.authors.yml b/.authors.yml index e662a35c1a..28f364059e 100644 --- a/.authors.yml +++ b/.authors.yml @@ -4,10 +4,11 @@ alternate_emails: - msarahan@continuum.io - msarahan@gmail.com + - msarahan@nvidia.com aliases: - Mike Sarahan - Michael Sarahan - num_commits: 2000 + num_commits: 2001 first_commit: 2015-09-04 21:31:08 - name: Jonathan J. Helmus email: jjhelmus@gmail.com @@ -1201,7 +1202,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 159 + num_commits: 164 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1224,7 +1225,7 @@ first_commit: 2020-11-19 10:46:41 - name: Jannis Leidel email: jannis@leidel.info - num_commits: 30 + num_commits: 33 github: jezdez first_commit: 2020-11-19 10:46:41 - name: Christof Kaufmann @@ -1239,7 +1240,7 @@ github: pre-commit-ci[bot] aliases: - pre-commit-ci[bot] - num_commits: 56 + num_commits: 61 first_commit: 2021-11-20 01:47:17 - name: Jacob Walls email: jacobtylerwalls@gmail.com @@ -1261,7 +1262,7 @@ alternate_emails: - ad-team+condabot@anaconda.com - 18747875+conda-bot@users.noreply.github.com - num_commits: 42 + num_commits: 44 first_commit: 2022-01-17 18:09:22 - name: Uwe L. Korn email: xhochy@users.noreply.github.com @@ -1270,7 +1271,7 @@ - name: Daniel Holth email: dholth@anaconda.com github: dholth - num_commits: 13 + num_commits: 14 first_commit: 2022-04-28 05:22:14 - name: Rylan Chord email: rchord@users.noreply.github.com @@ -1280,7 +1281,7 @@ - name: Travis Hathaway email: travis.j.hathaway@gmail.com github: travishathaway - num_commits: 5 + num_commits: 6 first_commit: 2022-05-12 05:53:02 - name: Kyle Leaders email: remkade@users.noreply.github.com @@ -1326,7 +1327,7 @@ - name: Jaime Rodríguez-Guerra email: jaimergp@users.noreply.github.com github: jaimergp - num_commits: 8 + num_commits: 10 first_commit: 2022-11-02 19:34:51 - name: Dave Clements email: tnabtaf@gmail.com @@ -1365,7 +1366,9 @@ github: peetw - name: Johnny email: johnnync13@gmail.com - num_commits: 1 + alternate_emails: + - johnnynuca14@gmail.com + num_commits: 2 first_commit: 2023-03-22 00:34:22 github: johnnynunez - name: Ryan Keith diff --git a/.mailmap b/.mailmap index 34499e8009..17e816d480 100644 --- a/.mailmap +++ b/.mailmap @@ -132,7 +132,7 @@ John Kirkham jakirkham John Kirkham John Kirkham John Kirkham John Kirkham John Omotani -Johnny +Johnny Johnny Jonathan J. Helmus Jonathan Helmus Jose Diaz-Gonzalez Joseph Crail @@ -183,8 +183,10 @@ Michael Cormier Michael Maltese Michael Sarahan Mike Sarahan Michael Sarahan Mike Sarahan +Michael Sarahan Mike Sarahan Michael Sarahan Michael Sarahan Michael Sarahan Michael Sarahan +Michael Sarahan Michael Sarahan Min RK MinRK Morten Enemark Lund mel Morten Lund diff --git a/CHANGELOG.md b/CHANGELOG.md index 62edf32ae8..7f2b7d5298 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,45 @@ [//]: # (current developments) +## 24.1.0 (2024-01-25) + +### Enhancements + +* Update `conda inspect channels` to use updated solver/transaction logic. (#5033) +* Relax `script_env` error in outputs when variable referenced in `script_env` is not defined. + This unifies current behavior with the top-level build. (#5105) +* Add support for Python 3.12. (#4997 via #4998) +* Adopt calender versioning (CalVer) per CEP-8 for consistency with conda. (#4975) +* Adopt expedited CEP-9 deprecation policy. (#5064) + +### Deprecations + +* Mark `conda inspect channels --test-installable` as pending deprecation. (#5033) +* Mark `conda_build.inspect_pkg.check_install(package)` as pending deprecation in favor of `conda_build.inspect_pkg.check_install(subdir)`. (#5033) +* Mark `conda_build.inspect_pkg.check_install(prepend)` as pending deprecation. (#5033) +* Mark `conda_build.inspect_pkg.check_install(minimal_hint)` as pending deprecation. (#5033) + +### Docs + +* Add GoatCounter (https://www.goatcounter.com/) as an analytics tool. (#5093) + +### Other + +* Remove unused Allure test report collection. (#5113) + +### Contributors + +* @conda-bot +* @dholth +* @jaimergp +* @jezdez +* @johnnynunez +* @kenodegard +* @msarahan +* @travishathaway +* @pre-commit-ci[bot] + + + ## 3.28.4 (2024-01-17) ### Bug fixes diff --git a/news/5033-update-conda-inspect-channels b/news/5033-update-conda-inspect-channels deleted file mode 100644 index 13fba0b6b4..0000000000 --- a/news/5033-update-conda-inspect-channels +++ /dev/null @@ -1,22 +0,0 @@ -### Enhancements - -* Update `conda inspect channels` to use updated solver/transaction logic. (#5033) - -### Bug fixes - -* - -### Deprecations - -* Mark `conda inspect channels --test-installable` as pending deprecation. (#5033) -* Mark `conda_build.inspect_pkg.check_install(package)` as pending deprecation in favor of `conda_build.inspect_pkg.check_install(subdir)`. (#5033) -* Mark `conda_build.inspect_pkg.check_install(prepend)` as pending deprecation. (#5033) -* Mark `conda_build.inspect_pkg.check_install(minimal_hint)` as pending deprecation. (#5033) - -### Docs - -* - -### Other - -* diff --git a/news/5093-add-goat-counter b/news/5093-add-goat-counter deleted file mode 100644 index 37b3a9b3b9..0000000000 --- a/news/5093-add-goat-counter +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* Add goat counter (https://www.goatcounter.com/) as an analytics tool. (#5093) - -### Other - -* diff --git a/news/5105-script-env-warn b/news/5105-script-env-warn deleted file mode 100644 index 5b7d66bad5..0000000000 --- a/news/5105-script-env-warn +++ /dev/null @@ -1,20 +0,0 @@ -### Enhancements - -* Relax script_env error in outputs when variable referenced in script_env is not defined. - This unifies current behavior with the top-level build. (#5105) - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/allure-removal b/news/allure-removal deleted file mode 100644 index 3a2df35fba..0000000000 --- a/news/allure-removal +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Remove unused Allure test report collection. From 3efdccbf9801d58c41011956a1f0aa9b94a17afb Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Fri, 26 Jan 2024 13:19:18 +0100 Subject: [PATCH 262/366] Trigger test runs on CalVer release branches. (#5150) --- .github/workflows/tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index cfa10453e6..6bf8249c4c 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -10,6 +10,7 @@ on: - main - feature/** - '[0-9].*.x' # e.g., 3.24.x + - '[0-9][0-9].*.x' # e.g., 23.3.x # https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request pull_request: From 0fd0f0bc75a4c479722d8084939966d12bdefeec Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 26 Jan 2024 12:38:55 -0600 Subject: [PATCH 263/366] Update `pylint` from 2.3.1 to 2.7.4 (#5153) --- tests/test_api_skeleton.py | 53 +++++++++++++++++++++++++------------- 1 file changed, 35 insertions(+), 18 deletions(-) diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index 52039b9ed4..b19a2b2dde 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -33,14 +33,12 @@ "#sha256=6cf85a5cfe8fff69553e745b05128de6fc8de8f291965c63871c79701dc6efc9" ) -PYLINT_VERSION = "2.3.1" +PYLINT_VERSION = "2.7.4" # last version to use setup.py without setup.cfg PYLINT_HASH_TYPE = "sha256" -PYLINT_HASH_VALUE = "723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1" -PYLINT_HASH_VALUE_BLAKE2 = ( - "018b538911c0ebc2529f15004f4cb07e3ca562bb9aacea5df89cc25b62e01891" -) +PYLINT_SHA256 = "bd38914c7731cdc518634a8d3c5585951302b6e2b6de60fbb3f7a0220e21eeee" +PYLINT_BLAKE2 = "2d5b491cf9e85288c29759a6535e6009938c2141b137b27a0653e435dcbad6a2" PYLINT_FILENAME = f"pylint-{PYLINT_VERSION}.tar.gz" -PYLINT_URL = f"https://files.pythonhosted.org/packages/{PYLINT_HASH_VALUE_BLAKE2[:2]}/{PYLINT_HASH_VALUE_BLAKE2[2:4]}/{PYLINT_HASH_VALUE_BLAKE2[4:]}/{PYLINT_FILENAME}" +PYLINT_URL = f"https://files.pythonhosted.org/packages/{PYLINT_BLAKE2[:2]}/{PYLINT_BLAKE2[2:4]}/{PYLINT_BLAKE2[4:]}/{PYLINT_FILENAME}" @pytest.fixture @@ -54,7 +52,7 @@ def mock_metadata(): "version": "UNKNOWN", "pypiurl": PYLINT_URL, "filename": PYLINT_FILENAME, - "digest": [PYLINT_HASH_TYPE, PYLINT_HASH_VALUE], + "digest": [PYLINT_HASH_TYPE, PYLINT_SHA256], "import_tests": "", "summary": "", } @@ -94,23 +92,30 @@ def pylint_pkginfo(): "extras_require": {':sys_platform=="win32"': ["colorama"]}, "home": "https://github.com/PyCQA/pylint", "install_requires": [ - "astroid>=2.2.0,<3", - "isort>=4.2.5,<5", - "mccabe>=0.6,<0.7", + "astroid >=2.5.2,<2.7", + "isort >=4.2.5,<6", + "mccabe >=0.6,<0.7", + "toml >=0.7.1", ], "license": "GPL", "name": "pylint", "packages": [ "pylint", "pylint.checkers", - "pylint.pyreverse", + "pylint.checkers.refactoring", + "pylint.config", "pylint.extensions", + "pylint.lint", + "pylint.message", + "pylint.pyreverse", "pylint.reporters", "pylint.reporters.ureports", + "pylint.testutils", + "pylint.utils", ], "setuptools": True, "summary": "python code static checker", - "tests_require": ["pytest"], + "tests_require": ["pytest", "pytest-benchmark"], "version": "2.3.1", } @@ -118,12 +123,18 @@ def pylint_pkginfo(): @pytest.fixture def pylint_metadata(): return { - "run_depends": ["astroid >=2.2.0,<3", "isort >=4.2.5,<5", "mccabe >=0.6,<0.7"], + "run_depends": [ + "astroid >=2.5.2,<2.7", + "isort >=4.2.5,<6", + "mccabe >=0.6,<0.7", + "toml >=0.7.1", + ], "build_depends": [ "pip", - "astroid >=2.2.0,<3", - "isort >=4.2.5,<5", + "astroid >=2.5.2,<2.7", + "isort >=4.2.5,<6", "mccabe >=0.6,<0.7", + "toml >=0.7.1", ], "entry_points": [ "pylint = pylint:run_pylint", @@ -137,18 +148,24 @@ def pylint_metadata(): "pyreverse --help", "symilar --help", ], - "tests_require": ["pytest"], + "tests_require": ["pytest", "pytest-benchmark"], "version": PYLINT_VERSION, "pypiurl": PYLINT_URL, "filename": PYLINT_FILENAME, - "digest": [PYLINT_HASH_TYPE, PYLINT_HASH_VALUE], + "digest": [PYLINT_HASH_TYPE, PYLINT_SHA256], "import_tests": [ "pylint", "pylint.checkers", + "pylint.checkers.refactoring", + "pylint.config", "pylint.extensions", + "pylint.lint", + "pylint.message", "pylint.pyreverse", "pylint.reporters", "pylint.reporters.ureports", + "pylint.testutils", + "pylint.utils", ], "summary": "python code static checker", "packagename": "pylint", @@ -297,7 +314,7 @@ def test_get_package_metadata(testing_config, mock_metadata, pylint_metadata): mock_metadata, {}, ".", - "3.7", + "3.9", False, False, [PYLINT_URL], From e4fd8d7022bf3ac868e86815750b37f447464461 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 26 Jan 2024 15:07:07 -0600 Subject: [PATCH 264/366] Remove deprecations (#5151) Co-authored-by: Jannis Leidel Co-authored-by: Marcel Bargull --- CHANGELOG.md | 52 ++++++++ conda_build/api.py | 43 ------ conda_build/cli/main_build.py | 196 +++++++++++++--------------- conda_build/cli/main_convert.py | 46 +++---- conda_build/cli/main_debug.py | 23 ++-- conda_build/cli/main_develop.py | 48 ++++--- conda_build/cli/main_index.py | 126 ------------------ conda_build/cli/main_inspect.py | 64 +++++---- conda_build/cli/main_metapackage.py | 51 ++++---- conda_build/cli/main_render.py | 61 ++++----- conda_build/cli/main_skeleton.py | 41 +++--- conda_build/conda_interface.py | 83 ------------ conda_build/config.py | 73 ----------- conda_build/environ.py | 48 ------- conda_build/index.py | 62 --------- conda_build/inspect_pkg.py | 21 --- conda_build/metadata.py | 2 - conda_build/os_utils/ldd.py | 9 -- conda_build/os_utils/liefldd.py | 33 ----- conda_build/os_utils/pyldd.py | 92 ------------- conda_build/plugin.py | 15 --- conda_build/post.py | 76 +---------- conda_build/utils.py | 31 ----- recipe/meta.yaml | 30 +++-- tests/cli/test_main_debug.py | 6 +- tests/cli/test_main_index.py | 11 -- tests/test_api_build.py | 5 +- tests/test_api_consistency.py | 39 ------ tests/test_api_update_index.py | 12 -- tests/test_conda_interface.py | 9 -- tests/test_misc.py | 17 +-- tests/test_subpackages.py | 1 - tests/test_utils.py | 91 ------------- 33 files changed, 326 insertions(+), 1191 deletions(-) delete mode 100644 conda_build/cli/main_index.py delete mode 100644 tests/cli/test_main_index.py delete mode 100644 tests/test_api_update_index.py delete mode 100644 tests/test_conda_interface.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 7f2b7d5298..a1937ae4c4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,58 @@ * Mark `conda_build.inspect_pkg.check_install(package)` as pending deprecation in favor of `conda_build.inspect_pkg.check_install(subdir)`. (#5033) * Mark `conda_build.inspect_pkg.check_install(prepend)` as pending deprecation. (#5033) * Mark `conda_build.inspect_pkg.check_install(minimal_hint)` as pending deprecation. (#5033) +* Remove `conda_build.api.update_index`. (#5151) +* Remove `conda_build.cli.main_build.main`. (#5151) +* Remove `conda_build.cli.main_convert.main`. (#5151) +* Remove `conda_build.cli.main_debug.main`. (#5151) +* Remove `conda_build.cli.main_develop.main`. (#5151) +* Remove `conda_build.cli.main_index`. (#5151) +* Remove `conda_build.cli.main_inspect.main`. (#5151) +* Remove `conda_build.cli.main_metapackage.main`. (#5151) +* Remove `conda_build.cli.main_render.main`. (#5151) +* Remove `conda_build.cli.main_skeleton.main`. (#5151) +* Remove `conda_build.conda_interface.IndexRecord`. (#5151) +* Remove `conda_build.conda_interface.CrossPlatformStLink`. (#5151) +* Remove `conda_build.conda_interface.SignatureError`. (#5151) +* Remove `conda_build.conda_interface.which_package`. (#5151) +* Remove `conda_build.conda_interface.which_prefix`. (#5151) +* Remove `conda_build.conda_interface.get_installed_version`. (#5151) +* Remove `conda_build.config.python2_fs_encode`. (#5151) +* Remove `conda_build.config._ensure_dir`. (#5151) +* Remove `conda_build.config.Config.CONDA_LUA`. (#5151) +* Remove `conda_build.config.Config.CONDA_PY`. (#5151) +* Remove `conda_build.config.Config.CONDA_NPY`. (#5151) +* Remove `conda_build.config.Config.CONDA_PERL`. (#5151) +* Remove `conda_build.config.Config.CONDA_R`. (#5151) +* Remove `conda_build.environ.clean_pkg_cache`. (#5151) +* Remove `conda_build.index.update_index`. (#5151) +* Remove `conda_build.inspect_pkg.dist_files`. (#5151) +* Remove `conda_build.inspect_pkg.which_package(avoid_canonical_channel_name)`. (#5151) +* Remove `conda_build.inspect_pkg._installed`. (#5151) +* Remove `conda_build.metadata.Metadata.name(fail_ok)`. (#5151) +* Remove `conda_build.os_utils.ldd.get_package_files`. (#5151) +* Remove `conda_build.os_utils.liefldd.is_string`. (#5151) +* Remove `conda_build.os_utils.liefldd.codefile_type_liefldd`. (#5151) +* Remove `conda_build.os_utils.liefldd.codefile_type_pyldd`. (#5151) +* Remove `conda_build.os_utils.liefldd.codefile_type`. (#5151) +* Remove `conda_build.os_utils.pyldd.mach_o_change`. (#5151) +* Remove `conda_build.os_utils.pyldd.is_codefile`. (#5151) +* Remove `conda_build.os_utils.pyldd.codefile_type`. (#5151) +* Remove `conda_build.os_utils.pyldd.inspect_rpaths`. (#5151) +* Remove `conda_build.os_utils.pyldd.get_runpaths`. (#5151) +* Remove `conda_build.os_utils.pyldd.otool_sys`. (#5151) +* Remove `conda_build.os_utils.pyldd.ldd_sys`. (#5151) +* Remove `conda_build.plugin.index`. (#5151) +* Remove `conda_build.post.determine_package_nature`. (#5151) +* Remove `conda_build.post.library_nature(subdir)`. (#5151) +* Remove `conda_build.post.library_nature(bldpkgs_dirs)`. (#5151) +* Remove `conda_build.post.library_nature(output_folder)`. (#5151) +* Remove `conda_build.post.library_nature(channel_urls)`. (#5151) +* Remove `conda_build.post.dists_from_names`. (#5151) +* Remove `conda_build.post.FakeDist`. (#5151) +* Remove `conda_build.post._get_fake_pkg_dist`. (#5151) +* Remove `conda_build.utils.relative`. (#5151) +* Remove `conda_build.utils.samefile`. (#5151) ### Docs diff --git a/conda_build/api.py b/conda_build/api.py index 8f55488708..8a1298bbe9 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -20,7 +20,6 @@ # make the Config class available in the api namespace from .config import DEFAULT_PREFIX_LENGTH as _prefix_length from .config import Config, get_channel_urls, get_or_merge_config -from .deprecations import deprecated from .utils import ( CONDA_PACKAGE_EXTENSIONS, LoggingContext, @@ -523,48 +522,6 @@ def create_metapackage( ) -@deprecated("3.25.0", "24.1.0", addendum="Use standalone conda-index.") -def update_index( - dir_paths, - config=None, - force=False, - check_md5=False, - remove=False, - channel_name=None, - subdir=None, - threads=None, - patch_generator=None, - verbose=False, - progress=False, - hotfix_source_repo=None, - current_index_versions=None, - **kwargs, -): - import yaml - - from .index import update_index as legacy_update_index - - dir_paths = [os.path.abspath(path) for path in ensure_list(dir_paths)] - - if isinstance(current_index_versions, str): - with open(current_index_versions) as f: - current_index_versions = yaml.safe_load(f) - - for path in dir_paths: - legacy_update_index( - path, - check_md5=check_md5, - channel_name=channel_name, - patch_generator=patch_generator, - threads=threads, - verbose=verbose, - progress=progress, - subdirs=ensure_list(subdir), - current_index_versions=current_index_versions, - index_file=kwargs.get("index_file", None), - ) - - def debug( recipe_or_package_path_or_metadata_tuples, path=None, diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index e66ff0e11b..f84024df48 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -1,35 +1,42 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import argparse import logging import sys import warnings +from argparse import Namespace from glob import glob from itertools import chain from os.path import abspath, expanduser, expandvars from pathlib import Path +from typing import Sequence -import filelock from conda.auxlib.ish import dals from conda.common.io import dashlist from .. import api, build, source, utils -from ..conda_interface import add_parser_channels, binstar_upload, cc_conda_build +from ..conda_interface import ( + ArgumentParser, + add_parser_channels, + binstar_upload, + cc_conda_build, +) from ..config import ( get_channel_urls, get_or_merge_config, zstd_compression_level_default, ) -from ..deprecations import deprecated from ..utils import LoggingContext from .actions import KeyValueAction from .main_render import get_render_parser -def parse_args(args): - p = get_render_parser() - p.prog = "conda build" - p.description = dals( +def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + parser = get_render_parser() + parser.prog = "conda build" + parser.description = dals( """ Tool for building conda packages. A conda package is a binary tarball containing system-level libraries, Python modules, executable programs, or @@ -38,71 +45,71 @@ def parse_args(args): different sets of packages. """ ) - p.add_argument( + parser.add_argument( "--check", action="store_true", help="Only check (validate) the recipe.", ) - p.add_argument( + parser.add_argument( "--no-anaconda-upload", action="store_false", help="Do not ask to upload the package to anaconda.org.", dest="anaconda_upload", default=binstar_upload, ) - p.add_argument( + parser.add_argument( "--no-binstar-upload", action="store_false", help=argparse.SUPPRESS, dest="anaconda_upload", default=binstar_upload, ) - p.add_argument( + parser.add_argument( "--no-include-recipe", action="store_false", help="Don't include the recipe inside the built package.", dest="include_recipe", default=cc_conda_build.get("include_recipe", "true").lower() == "true", ) - p.add_argument( + parser.add_argument( "-s", "--source", action="store_true", help="Only obtain the source (but don't build).", ) - p.add_argument( + parser.add_argument( "-t", "--test", action="store_true", help="Test package (assumes package is already built). RECIPE_DIR argument must be a " "path to built package .tar.bz2 file.", ) - p.add_argument( + parser.add_argument( "--no-test", action="store_true", dest="notest", help="Do not test the package.", ) - p.add_argument( + parser.add_argument( "-b", "--build-only", action="store_true", help="""Only run the build, without any post processing or testing. Implies --no-test and --no-anaconda-upload.""", ) - p.add_argument( + parser.add_argument( "-p", "--post", action="store_true", help="Run the post-build logic. Implies --no-anaconda-upload.", ) - p.add_argument( + parser.add_argument( "-p", "--test-run-post", action="store_true", help="Run the post-build logic during testing.", ) - p.add_argument( + parser.add_argument( "recipe", metavar="RECIPE_PATH", nargs="+", @@ -110,7 +117,7 @@ def parse_args(args): "work and test intermediates. Pass 'purge-all' to also remove " "previously built packages.", ) - p.add_argument( + parser.add_argument( "--skip-existing", action="store_true", help=( @@ -119,56 +126,56 @@ def parse_args(args): ), default=cc_conda_build.get("skip_existing", "false").lower() == "true", ) - p.add_argument( + parser.add_argument( "--keep-old-work", action="store_true", dest="keep_old_work", help="Do not remove anything from environment, even after successful " "build and test.", ) - p.add_argument( + parser.add_argument( "--dirty", action="store_true", help="Do not remove work directory or _build environment, " "to speed up debugging. Does not apply patches or download source.", ) - p.add_argument( + parser.add_argument( "-q", "--quiet", action="store_true", help="do not display progress bar", default=cc_conda_build.get("quiet", "false").lower() == "true", ) - p.add_argument( + parser.add_argument( "--debug", action="store_true", help="Show debug output from source checkouts and conda", ) - p.add_argument( + parser.add_argument( "--token", help="Token to pass through to anaconda upload", default=cc_conda_build.get("anaconda_token"), ) - p.add_argument( + parser.add_argument( "--user", help="User/organization to upload packages to on anaconda.org or pypi", default=cc_conda_build.get("user"), ) - p.add_argument( + parser.add_argument( "--label", action="append", dest="labels", default=[], help="Label argument to pass through to anaconda upload", ) - p.add_argument( + parser.add_argument( "--no-force-upload", help="Disable force upload to anaconda.org, preventing overwriting any existing packages", dest="force_upload", default=True, action="store_false", ) - p.add_argument( + parser.add_argument( "--zstd-compression-level", help=( "When building v2 packages, set the compression level used by " @@ -181,7 +188,7 @@ def parse_args(args): "zstd_compression_level", zstd_compression_level_default ), ) - pypi_grp = p.add_argument_group("PyPI upload parameters (twine)") + pypi_grp = parser.add_argument_group("PyPI upload parameters (twine)") pypi_grp.add_argument( "--password", help="password to use when uploading packages to pypi", @@ -213,14 +220,14 @@ def parse_args(args): help="PyPI repository to upload to", default=cc_conda_build.get("pypi_repository", "pypitest"), ) - p.add_argument( + parser.add_argument( "--no-activate", action="store_false", help="do not activate the build and test envs; just prepend to PATH", dest="activate", default=cc_conda_build.get("activate", "true").lower() == "true", ) - p.add_argument( + parser.add_argument( "--no-build-id", action="store_false", help=( @@ -231,7 +238,7 @@ def parse_args(args): # note: inverted - dest stores positive logic default=cc_conda_build.get("set_build_id", "true").lower() == "true", ) - p.add_argument( + parser.add_argument( "--build-id-pat", help=( "specify a templated pattern to use as build folder names. Use if having issues with " @@ -240,34 +247,34 @@ def parse_args(args): dest="build_id_pat", default=cc_conda_build.get("build_id_pat", "{n}_{t}"), ) - p.add_argument( + parser.add_argument( "--croot", help=( "Build root folder. Equivalent to CONDA_BLD_PATH, but applies only " "to this call of conda-build." ), ) - p.add_argument( + parser.add_argument( "--verify", action="store_true", help="run verification on recipes or packages when building", default=cc_conda_build.get("verify", "true").lower() == "true", ) - p.add_argument( + parser.add_argument( "--no-verify", action="store_false", dest="verify", help="do not run verification on recipes or packages when building", default=cc_conda_build.get("verify", "true").lower() == "true", ) - p.add_argument( + parser.add_argument( "--strict-verify", action="store_true", dest="exit_on_verify_error", help="Exit if any conda-verify check fail, instead of only printing them", default=cc_conda_build.get("exit_on_verify_error", "false").lower() == "true", ) - p.add_argument( + parser.add_argument( "--output-folder", help=( "folder to dump output package to. Package are moved here if build or test succeeds." @@ -275,7 +282,7 @@ def parse_args(args): ), default=cc_conda_build.get("output_folder"), ) - p.add_argument( + parser.add_argument( "--no-prefix-length-fallback", dest="prefix_length_fallback", action="store_false", @@ -285,7 +292,7 @@ def parse_args(args): ), default=True, ) - p.add_argument( + parser.add_argument( "--prefix-length-fallback", dest="prefix_length_fallback", action="store_true", @@ -297,7 +304,7 @@ def parse_args(args): # had enough time to build long-prefix length packages. default=True, ) - p.add_argument( + parser.add_argument( "--prefix-length", dest="_prefix_length", help=( @@ -312,7 +319,7 @@ def parse_args(args): default=255, type=int, ) - p.add_argument( + parser.add_argument( "--no-locking", dest="locking", default=True, @@ -322,7 +329,7 @@ def parse_args(args): "builds at once on one system with this set." ), ) - p.add_argument( + parser.add_argument( "--no-remove-work-dir", dest="remove_work_dir", default=True, @@ -333,7 +340,7 @@ def parse_args(args): "tests, but ultimately fail on installed systems." ), ) - p.add_argument( + parser.add_argument( "--error-overlinking", dest="error_overlinking", action="store_true", @@ -344,7 +351,7 @@ def parse_args(args): ), default=cc_conda_build.get("error_overlinking", "false").lower() == "true", ) - p.add_argument( + parser.add_argument( "--no-error-overlinking", dest="error_overlinking", action="store_false", @@ -355,7 +362,7 @@ def parse_args(args): ), default=cc_conda_build.get("error_overlinking", "false").lower() == "true", ) - p.add_argument( + parser.add_argument( "--error-overdepending", dest="error_overdepending", action="store_true", @@ -366,7 +373,7 @@ def parse_args(args): ), default=cc_conda_build.get("error_overdepending", "false").lower() == "true", ) - p.add_argument( + parser.add_argument( "--no-error-overdepending", dest="error_overdepending", action="store_false", @@ -377,7 +384,7 @@ def parse_args(args): ), default=cc_conda_build.get("error_overdepending", "false").lower() == "true", ) - p.add_argument( + parser.add_argument( "--long-test-prefix", action="store_true", help=( @@ -387,7 +394,7 @@ def parse_args(args): ), default=cc_conda_build.get("long_test_prefix", "true").lower() == "true", ) - p.add_argument( + parser.add_argument( "--no-long-test-prefix", dest="long_test_prefix", action="store_false", @@ -397,7 +404,7 @@ def parse_args(args): ), default=cc_conda_build.get("long_test_prefix", "true").lower() == "true", ) - p.add_argument( + parser.add_argument( "--keep-going", "-k", action="store_true", @@ -406,7 +413,7 @@ def parse_args(args): "failure." ), ) - p.add_argument( + parser.add_argument( "--cache-dir", help=( "Path to store the source files (archives, git clones, etc.) during the build." @@ -417,7 +424,7 @@ def parse_args(args): else cc_conda_build.get("cache_dir") ), ) - p.add_argument( + parser.add_argument( "--no-copy-test-source-files", dest="copy_test_source_files", action="store_false", @@ -430,7 +437,7 @@ def parse_args(args): "in the future." ), ) - p.add_argument( + parser.add_argument( "--merge-build-host", action="store_true", help=( @@ -439,11 +446,11 @@ def parse_args(args): ), default=cc_conda_build.get("merge_build_host", "false").lower() == "true", ) - p.add_argument( + parser.add_argument( "--stats-file", help="File path to save build statistics to. Stats are in JSON format", ) - p.add_argument( + parser.add_argument( "--extra-deps", nargs="+", help=( @@ -452,7 +459,7 @@ def parse_args(args): "meta.yaml or use templates otherwise." ), ) - p.add_argument( + parser.add_argument( "--extra-meta", nargs="*", action=KeyValueAction, @@ -460,7 +467,7 @@ def parse_args(args): "defined as Key=Value with a space separating each pair.", metavar="KEY=VALUE", ) - p.add_argument( + parser.add_argument( "--suppress-variables", action="store_true", help=( @@ -468,12 +475,11 @@ def parse_args(args): ), ) - add_parser_channels(p) - args = p.parse_args(args) - - check_recipe(args.recipe) + add_parser_channels(parser) - return p, args + parsed = parser.parse_args(args) + check_recipe(parsed.recipe) + return parser, parsed def check_recipe(path_list): @@ -516,44 +522,44 @@ def check_action(recipe, config): return api.check(recipe, config=config) -def execute(args): - _parser, args = parse_args(args) - config = get_or_merge_config(None, **args.__dict__) +def execute(args: Sequence[str] | None = None): + _, parsed = parse_args(args) + config = get_or_merge_config(None, **parsed.__dict__) build.check_external() # change globals in build module, see comment there as well - config.channel_urls = get_channel_urls(args.__dict__) + config.channel_urls = get_channel_urls(parsed.__dict__) - config.override_channels = args.override_channels - config.verbose = not args.quiet or args.debug + config.override_channels = parsed.override_channels + config.verbose = not parsed.quiet or parsed.debug - if "purge" in args.recipe: + if "purge" in parsed.recipe: build.clean_build(config) return - if "purge-all" in args.recipe: + if "purge-all" in parsed.recipe: build.clean_build(config) config.clean_pkgs() return outputs = None - if args.output: + if parsed.output: config.verbose = False config.quiet = True config.debug = False - outputs = [output_action(recipe, config) for recipe in args.recipe] - elif args.test: + outputs = [output_action(recipe, config) for recipe in parsed.recipe] + elif parsed.test: outputs = [] failed_recipes = [] recipes = chain.from_iterable( glob(abspath(recipe), recursive=True) if "*" in recipe else [recipe] - for recipe in args.recipe + for recipe in parsed.recipe ) for recipe in recipes: try: test_action(recipe, config) except: - if not args.keep_going: + if not parsed.keep_going: raise else: failed_recipes.append(recipe) @@ -564,40 +570,24 @@ def execute(args): sys.exit(len(failed_recipes)) else: print("All tests passed") - elif args.source: - outputs = [source_action(recipe, config) for recipe in args.recipe] - elif args.check: - outputs = [check_action(recipe, config) for recipe in args.recipe] + elif parsed.source: + outputs = [source_action(recipe, config) for recipe in parsed.recipe] + elif parsed.check: + outputs = [check_action(recipe, config) for recipe in parsed.recipe] else: outputs = api.build( - args.recipe, - post=args.post, - test_run_post=args.test_run_post, - build_only=args.build_only, - notest=args.notest, + parsed.recipe, + post=parsed.post, + test_run_post=parsed.test_run_post, + build_only=parsed.build_only, + notest=parsed.notest, already_built=None, config=config, - verify=args.verify, - variants=args.variants, - cache_dir=args.cache_dir, + verify=parsed.verify, + variants=parsed.variants, + cache_dir=parsed.cache_dir, ) - if not args.output and len(utils.get_build_folders(config.croot)) > 0: + if not parsed.output and len(utils.get_build_folders(config.croot)) > 0: build.print_build_intermediate_warning(config) return outputs - - -@deprecated("3.26.0", "24.1.0", addendum="Use `conda build` instead.") -def main(): - try: - execute(sys.argv[1:]) - except RuntimeError as e: - print(str(e)) - sys.exit(1) - except filelock.Timeout as e: - print( - f"File lock on {e.lock_file} could not be obtained. You might need to try fewer builds at once." - " Otherwise, run conda clean --lock" - ) - sys.exit(1) - return diff --git a/conda_build/cli/main_convert.py b/conda_build/cli/main_convert.py index 34b748d407..14b5b4f9b0 100644 --- a/conda_build/cli/main_convert.py +++ b/conda_build/cli/main_convert.py @@ -1,12 +1,14 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import logging -import sys +from argparse import Namespace from os.path import abspath, expanduser +from typing import Sequence from .. import api from ..conda_interface import ArgumentParser -from ..deprecations import deprecated logging.basicConfig(level=logging.INFO) @@ -35,8 +37,8 @@ """ -def parse_args(args): - p = ArgumentParser( +def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + parser = ArgumentParser( prog="conda convert", description=""" Various tools to convert conda packages. Takes a pure Python package build for @@ -46,8 +48,8 @@ def parse_args(args): ) # TODO: Factor this into a subcommand, since it's python package specific - p.add_argument("files", nargs="+", help="Package files to convert.") - p.add_argument( + parser.add_argument("files", nargs="+", help="Package files to convert.") + parser.add_argument( "-p", "--platform", dest="platforms", @@ -71,7 +73,7 @@ def parse_args(args): help="Platform to convert the packages to.", default=None, ) - p.add_argument( + parser.add_argument( "--dependencies", "-d", nargs="*", @@ -79,19 +81,19 @@ def parse_args(args): package. To specify a version restriction for a dependency, wrap the dependency in quotes, like 'package >=2.0'.""", ) - p.add_argument( + parser.add_argument( "--show-imports", action="store_true", default=False, help="Show Python imports for compiled parts of the package.", ) - p.add_argument( + parser.add_argument( "-f", "--force", action="store_true", help="Force convert, even when a package has compiled C extensions.", ) - p.add_argument( + parser.add_argument( "-o", "--output-dir", default=".", @@ -99,36 +101,30 @@ def parse_args(args): organized in platform/ subdirectories, e.g., win-32/package-1.0-py27_0.tar.bz2.""", ) - p.add_argument( + parser.add_argument( "-v", "--verbose", default=False, action="store_true", help="Print verbose output.", ) - p.add_argument( + parser.add_argument( "--dry-run", action="store_true", help="Only display what would have been done.", ) - p.add_argument( + parser.add_argument( "-q", "--quiet", action="store_true", help="Don't print as much output." ) - args = p.parse_args(args) - return p, args + return parser, parser.parse_args(args) -def execute(args): - _, args = parse_args(args) - files = args.files - del args.__dict__["files"] +def execute(args: Sequence[str] | None = None): + _, parsed = parse_args(args) + files = parsed.files + del parsed.__dict__["files"] for f in files: f = abspath(expanduser(f)) - api.convert(f, **args.__dict__) - - -@deprecated("3.26.0", "24.1.0", addendum="Use `conda convert` instead.") -def main(): - return execute(sys.argv[1:]) + api.convert(f, **parsed.__dict__) diff --git a/conda_build/cli/main_debug.py b/conda_build/cli/main_debug.py index dd29f988ee..b01b8b5e03 100644 --- a/conda_build/cli/main_debug.py +++ b/conda_build/cli/main_debug.py @@ -1,11 +1,13 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import logging import sys from argparse import ArgumentParser +from typing import Sequence from .. import api -from ..deprecations import deprecated from ..utils import on_win from . import validators as valid from .main_render import get_render_parser @@ -86,25 +88,25 @@ def get_parser() -> ArgumentParser: return p -def execute(args): +def execute(args: Sequence[str] | None = None): parser = get_parser() - args = parser.parse_args(args) + parsed = parser.parse_args(args) try: activation_string = api.debug( - args.recipe_or_package_file_path, - verbose=(not args.activate_string_only), - **args.__dict__, + parsed.recipe_or_package_file_path, + verbose=(not parsed.activate_string_only), + **parsed.__dict__, ) - if not args.activate_string_only: + if not parsed.activate_string_only: print("#" * 80) print( "Test environment created for debugging. To enter a debugging environment:\n" ) print(activation_string) - if not args.activate_string_only: + if not parsed.activate_string_only: test_file = "conda_test_runner.bat" if on_win else "conda_test_runner.sh" print( f"To run your tests, you might want to start with running the {test_file} file." @@ -116,8 +118,3 @@ def execute(args): f"Error: conda-debug encountered the following error:\n{e}", file=sys.stderr ) sys.exit(1) - - -@deprecated("3.26.0", "24.1.0", addendum="Use `conda debug` instead.") -def main(): - return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_develop.py b/conda_build/cli/main_develop.py index 68abbbce00..2c81a4edc1 100644 --- a/conda_build/cli/main_develop.py +++ b/conda_build/cli/main_develop.py @@ -1,19 +1,21 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import logging -import sys +from argparse import Namespace +from typing import Sequence from conda.base.context import context, determine_target_prefix from .. import api from ..conda_interface import ArgumentParser, add_parser_prefix -from ..deprecations import deprecated logging.basicConfig(level=logging.INFO) -def parse_args(args): - p = ArgumentParser( +def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + parser = ArgumentParser( prog="conda develop", description=""" @@ -23,10 +25,10 @@ def parse_args(args): # TODO: Use setup.py to determine any entry-points to install. ) - p.add_argument( + parser.add_argument( "source", metavar="PATH", nargs="+", help="Path to the source directory." ) - p.add_argument( + parser.add_argument( "-npf", "--no-pth-file", action="store_true", @@ -36,7 +38,7 @@ def parse_args(args): "Do not add source to conda.pth." ), ) - p.add_argument( + parser.add_argument( "-b", "--build_ext", action="store_true", @@ -47,7 +49,7 @@ def parse_args(args): "environment's lib/." ), ) - p.add_argument( + parser.add_argument( "-c", "--clean", action="store_true", @@ -57,7 +59,7 @@ def parse_args(args): "use with build_ext to clean before building." ), ) - p.add_argument( + parser.add_argument( "-u", "--uninstall", action="store_true", @@ -68,26 +70,20 @@ def parse_args(args): ), ) - add_parser_prefix(p) - p.set_defaults(func=execute) + add_parser_prefix(parser) + parser.set_defaults(func=execute) - args = p.parse_args(args) - return p, args + return parser, parser.parse_args(args) -def execute(args): - _, args = parse_args(args) - prefix = determine_target_prefix(context, args) +def execute(args: Sequence[str] | None = None): + _, parsed = parse_args(args) + prefix = determine_target_prefix(context, parsed) api.develop( - args.source, + parsed.source, prefix=prefix, - no_pth_file=args.no_pth_file, - build_ext=args.build_ext, - clean=args.clean, - uninstall=args.uninstall, + no_pth_file=parsed.no_pth_file, + build_ext=parsed.build_ext, + clean=parsed.clean, + uninstall=parsed.uninstall, ) - - -@deprecated("3.26.0", "24.1.0", addendum="Use `conda develop` instead.") -def main(): - return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_index.py b/conda_build/cli/main_index.py deleted file mode 100644 index 79cff86fa6..0000000000 --- a/conda_build/cli/main_index.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -import logging -import os -import sys - -from conda_index.index import MAX_THREADS_DEFAULT -from conda_index.utils import DEFAULT_SUBDIRS - -from .. import api -from ..conda_interface import ArgumentParser -from ..deprecations import deprecated - -logging.basicConfig(level=logging.INFO) - - -def parse_args(args): - p = ArgumentParser( - prog="conda index", - description="Update package index metadata files in given directories. " - "Pending deprecated, please use the standalone conda-index project.", - ) - - p.add_argument( - "dir", - help="Directory that contains an index to be updated.", - nargs="*", - default=[os.getcwd()], - ) - - p.add_argument( - "-c", - "--check-md5", - action="store_true", - help="""Use hash values instead of file modification times for determining if a - package's metadata needs to be updated.""", - ) - p.add_argument( - "-n", - "--channel-name", - help="Customize the channel name listed in each channel's index.html.", - ) - p.add_argument( - "-s", - "--subdir", - action="append", - help="Optional. The subdir to index. Can be given multiple times. If not provided, will " - "default to all of %s. If provided, will not create channeldata.json for the channel." - "" % ", ".join(DEFAULT_SUBDIRS), - ) - p.add_argument( - "-t", - "--threads", - default=MAX_THREADS_DEFAULT, - type=int, - ) - p.add_argument( - "-p", - "--patch-generator", - help="Path to Python file that outputs metadata patch instructions from its " - "_patch_repodata function or a .tar.bz2/.conda file which contains a " - "patch_instructions.json file for each subdir", - ) - p.add_argument( - "--hotfix-source-repo", - help="Deprecated, will be removed in a future version of conda build", - ) - p.add_argument("--verbose", help="show extra debugging info", action="store_true") - p.add_argument( - "--no-progress", - help="Hide progress bars", - action="store_false", - dest="progress", - ) - p.add_argument( - "--current-index-versions-file", - "-m", - help=""" - YAML file containing name of package as key, and list of versions as values. The current_index.json - will contain the newest from this series of versions. For example: - - python: - - 2.7 - - 3.6 - - will keep python 2.7.X and 3.6.Y in the current_index.json, instead of only the very latest python version. - """, - ) - p.add_argument( - "-f", - "--file", - help="A file that contains a new line separated list of packages to add to repodata.", - action="store", - ) - - args = p.parse_args(args) - return p, args - - -def execute(args): - _, args = parse_args(args) - - deprecated.topic( - "3.25.0", - "24.1.0", - topic="`conda index` and `conda-index`", - addendum="Use the `conda-index` project instead.", - ) - - api.update_index( - args.dir, - check_md5=args.check_md5, - channel_name=args.channel_name, - threads=args.threads, - subdir=args.subdir, - patch_generator=args.patch_generator, - verbose=args.verbose, - progress=args.progress, - current_index_versions=args.current_index_versions_file, - index_file=args.file, - ) - - -@deprecated("3.26.0", "24.1.0", addendum="Use `conda index` instead.") -def main(): - return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index 7f9a30c847..58cba771dc 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -1,21 +1,24 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import logging import sys +from argparse import Namespace from os.path import expanduser from pprint import pprint +from typing import Sequence from conda.base.context import context, determine_target_prefix from .. import api from ..conda_interface import ArgumentParser, add_parser_prefix -from ..deprecations import deprecated logging.basicConfig(level=logging.INFO) -def parse_args(args): - p = ArgumentParser( +def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + parser = ArgumentParser( prog="conda inspect", description="Tools for inspecting conda packages.", epilog=""" @@ -23,7 +26,7 @@ def parse_args(args): options available. """, ) - subcommand = p.add_subparsers( + subcommand = parser.add_subparsers( dest="subcommand", ) @@ -177,49 +180,44 @@ def parse_args(args): nargs="*", help="Conda packages to inspect.", ) - args = p.parse_args(args) - return p, args + + return parser, parser.parse_args(args) -def execute(args): - parser, args = parse_args(args) +def execute(args: Sequence[str] | None = None): + parser, parsed = parse_args(args) - if not args.subcommand: + if not parsed.subcommand: parser.print_help() sys.exit(0) - elif args.subcommand == "channels": - print(api.test_installable(args.channel)) - elif args.subcommand == "linkages": + elif parsed.subcommand == "channels": + print(api.test_installable(parsed.channel)) + elif parsed.subcommand == "linkages": print( api.inspect_linkages( - args.packages, - prefix=determine_target_prefix(context, args), - untracked=args.untracked, - all_packages=args.all, - show_files=args.show_files, - groupby=args.groupby, - sysroot=expanduser(args.sysroot), + parsed.packages, + prefix=determine_target_prefix(context, parsed), + untracked=parsed.untracked, + all_packages=parsed.all, + show_files=parsed.show_files, + groupby=parsed.groupby, + sysroot=expanduser(parsed.sysroot), ) ) - elif args.subcommand == "objects": + elif parsed.subcommand == "objects": print( api.inspect_objects( - args.packages, - prefix=determine_target_prefix(context, args), - groupby=args.groupby, + parsed.packages, + prefix=determine_target_prefix(context, parsed), + groupby=parsed.groupby, ) ) - elif args.subcommand == "prefix-lengths": + elif parsed.subcommand == "prefix-lengths": if not api.inspect_prefix_length( - args.packages, min_prefix_length=args.min_prefix_length + parsed.packages, min_prefix_length=parsed.min_prefix_length ): sys.exit(1) - elif args.subcommand == "hash-inputs": - pprint(api.inspect_hash_inputs(args.packages)) + elif parsed.subcommand == "hash-inputs": + pprint(api.inspect_hash_inputs(parsed.packages)) else: - parser.error(f"Unrecognized subcommand: {args.subcommand}.") - - -@deprecated("3.26.0", "24.1.0", addendum="Use `conda inspect` instead.") -def main(): - return execute(sys.argv[1:]) + parser.error(f"Unrecognized subcommand: {parsed.subcommand}.") diff --git a/conda_build/cli/main_metapackage.py b/conda_build/cli/main_metapackage.py index 4b7e4df110..b4c610aea8 100644 --- a/conda_build/cli/main_metapackage.py +++ b/conda_build/cli/main_metapackage.py @@ -1,18 +1,20 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import argparse import logging -import sys +from argparse import Namespace +from typing import Sequence from .. import api from ..conda_interface import ArgumentParser, add_parser_channels, binstar_upload -from ..deprecations import deprecated logging.basicConfig(level=logging.INFO) -def parse_args(args): - p = ArgumentParser( +def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + parser = ArgumentParser( prog="conda metapackage", description=""" Tool for building conda metapackages. A metapackage is a package with no @@ -25,51 +27,51 @@ def parse_args(args): """, ) - p.add_argument( + parser.add_argument( "--no-anaconda-upload", action="store_false", help="Do not ask to upload the package to anaconda.org.", dest="anaconda_upload", default=binstar_upload, ) - p.add_argument( + parser.add_argument( "--no-binstar-upload", action="store_false", help=argparse.SUPPRESS, dest="anaconda_upload", default=binstar_upload, ) - p.add_argument("--token", help="Token to pass through to anaconda upload") - p.add_argument( + parser.add_argument("--token", help="Token to pass through to anaconda upload") + parser.add_argument( "--user", help="User/organization to upload packages to on anaconda.org" ) - p.add_argument( + parser.add_argument( "--label", action="append", dest="labels", default=[], help="Label argument to pass through to anaconda upload", ) - p.add_argument( + parser.add_argument( "name", help="Name of the created package.", ) - p.add_argument( + parser.add_argument( "version", help="Version of the created package.", ) - p.add_argument( + parser.add_argument( "--build-number", type=int, default=0, help="Build number for the package (default is 0).", ) - p.add_argument( + parser.add_argument( "--build-string", default=None, help="Build string for the package (default is automatically generated).", ) - p.add_argument( + parser.add_argument( "--dependencies", "-d", nargs="*", @@ -77,21 +79,21 @@ def parse_args(args): help="""The dependencies of the package. To specify a version restriction for a dependency, wrap the dependency in quotes, like 'package >=2.0'.""", ) - p.add_argument( + parser.add_argument( "--home", help="The homepage for the metapackage.", ) - p.add_argument( + parser.add_argument( "--license", help="The license of the metapackage.", dest="license_name" ) - p.add_argument( + parser.add_argument( "--summary", help="""Summary of the package. Pass this in as a string on the command line, like --summary 'A metapackage for X'. It is recommended to use single quotes if you are not doing variable substitution to avoid interpretation of special characters.""", ) - p.add_argument( + parser.add_argument( "--entry-points", nargs="*", default=(), @@ -101,17 +103,12 @@ def parse_args(args): bsdiff4 that calls bsdiff4.cli.main_bsdiff4(). """, ) - add_parser_channels(p) - args = p.parse_args(args) - return p, args + add_parser_channels(parser) + + return parser, parser.parse_args(args) -def execute(args): +def execute(args: Sequence[str] | None = None): _, args = parse_args(args) channel_urls = args.__dict__.get("channel") or args.__dict__.get("channels") or () api.create_metapackage(channel_urls=channel_urls, **args.__dict__) - - -@deprecated("3.26.0", "24.1.0", addendum="Use `conda metapackage` instead.") -def main(): - return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index cdd831021b..155c0e7739 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -1,9 +1,12 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import argparse import logging -import sys +from argparse import Namespace from pprint import pprint +from typing import Sequence import yaml from yaml.parser import ParserError @@ -11,7 +14,6 @@ from .. import __version__, api from ..conda_interface import ArgumentParser, add_parser_channels, cc_conda_build from ..config import get_channel_urls, get_or_merge_config -from ..deprecations import deprecated from ..utils import LoggingContext from ..variants import get_package_variants, set_language_env_vars @@ -163,70 +165,70 @@ def get_render_parser(): return p -def parse_args(args): - p = get_render_parser() - p.add_argument( +def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + parser = get_render_parser() + parser.add_argument( "-f", "--file", help="write YAML to file, given as argument here.\ Overwrites existing files.", ) # we do this one separately because we only allow one entry to conda render - p.add_argument( + parser.add_argument( "recipe", metavar="RECIPE_PATH", help="Path to recipe directory.", ) # this is here because we have a different default than build - p.add_argument( + parser.add_argument( "--verbose", action="store_true", help="Enable verbose output from download tools and progress updates", ) - args, _ = p.parse_known_args(args) - return p, args + + return parser, parser.parse_args(args) -def execute(args, print_results=True): - p, args = parse_args(args) +def execute(args: Sequence[str] | None = None, print_results: bool = True): + _, parsed = parse_args(args) - config = get_or_merge_config(None, **args.__dict__) + config = get_or_merge_config(None, **parsed.__dict__) - variants = get_package_variants(args.recipe, config, variants=args.variants) + variants = get_package_variants(parsed.recipe, config, variants=parsed.variants) from ..build import get_all_replacements get_all_replacements(variants) set_language_env_vars(variants) - config.channel_urls = get_channel_urls(args.__dict__) + config.channel_urls = get_channel_urls(parsed.__dict__) - config.override_channels = args.override_channels + config.override_channels = parsed.override_channels - if args.output: + if parsed.output: config.verbose = False config.debug = False metadata_tuples = api.render( - args.recipe, + parsed.recipe, config=config, - no_download_source=args.no_source, - variants=args.variants, + no_download_source=parsed.no_source, + variants=parsed.variants, ) - if args.file and len(metadata_tuples) > 1: + if parsed.file and len(metadata_tuples) > 1: log.warning( "Multiple variants rendered. " - f"Only one will be written to the file you specified ({args.file})." + f"Only one will be written to the file you specified ({parsed.file})." ) if print_results: - if args.output: + if parsed.output: with LoggingContext(logging.CRITICAL + 1): paths = api.get_output_file_paths(metadata_tuples, config=config) print("\n".join(sorted(paths))) - if args.file: + if parsed.file: m = metadata_tuples[-1][0] - api.output_yaml(m, args.file, suppress_outputs=True) + api.output_yaml(m, parsed.file, suppress_outputs=True) else: logging.basicConfig(level=logging.INFO) for m, _, _ in metadata_tuples: @@ -237,15 +239,6 @@ def execute(args, print_results=True): print("----------") print("meta.yaml:") print("----------") - print(api.output_yaml(m, args.file, suppress_outputs=True)) + print(api.output_yaml(m, parsed.file, suppress_outputs=True)) else: return metadata_tuples - - -@deprecated("3.26.0", "24.1.0", addendum="Use `conda render` instead.") -def main(): - return execute(sys.argv[1:]) - - -if __name__ == "__main__": - main() diff --git a/conda_build/cli/main_skeleton.py b/conda_build/cli/main_skeleton.py index 219c8d1734..ade4b14d0e 100644 --- a/conda_build/cli/main_skeleton.py +++ b/conda_build/cli/main_skeleton.py @@ -1,22 +1,25 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import importlib import logging import os import pkgutil import sys +from argparse import Namespace +from typing import Sequence from .. import api from ..conda_interface import ArgumentParser from ..config import Config -from ..deprecations import deprecated thisdir = os.path.dirname(os.path.abspath(__file__)) logging.basicConfig(level=logging.INFO) -def parse_args(args): - p = ArgumentParser( +def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + parser = ArgumentParser( prog="conda skeleton", description=""" Generates a boilerplate/skeleton recipe, which you can then edit to create a @@ -28,7 +31,7 @@ def parse_args(args): """, ) - repos = p.add_subparsers(dest="repo") + repos = parser.add_subparsers(dest="repo") skeletons = [ name @@ -40,32 +43,22 @@ def parse_args(args): module = importlib.import_module("conda_build.skeletons." + skeleton) module.add_parser(repos) - args = p.parse_args(args) - return p, args + return parser, parser.parse_args(args) -def execute(args): - parser, args = parse_args(args) - config = Config(**args.__dict__) +def execute(args: Sequence[str] | None = None): + parser, parsed = parse_args(args) + config = Config(**parsed.__dict__) - if not args.repo: + if not parsed.repo: parser.print_help() sys.exit() api.skeletonize( - args.packages, - args.repo, - output_dir=args.output_dir, - recursive=args.recursive, - version=args.version, + parsed.packages, + parsed.repo, + output_dir=parsed.output_dir, + recursive=parsed.recursive, + version=parsed.version, config=config, ) - - -@deprecated("3.26.0", "24.1.0", addendum="Use `conda skeleton` instead.") -def main(): - return execute(sys.argv[1:]) - - -if __name__ == "__main__": - main() diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 78eeb03f95..929384e459 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -6,8 +6,6 @@ import os from functools import partial from importlib import import_module # noqa: F401 -from pathlib import Path -from typing import Iterable from conda import __version__ as CONDA_VERSION # noqa: F401 from conda.auxlib.packaging import ( # noqa: F401 @@ -76,17 +74,6 @@ ) from conda.models.channel import get_conda_build_local_url # noqa: F401 from conda.models.dist import Dist # noqa: F401 -from conda.models.records import PackageRecord, PrefixRecord - -from .deprecations import deprecated - -deprecated.constant( - "3.28.0", - "24.1.0", - "IndexRecord", - PackageRecord, - addendum="Use `conda.models.records.PackageRecord` instead.", -) # TODO: Go to references of all properties below and import them from `context` instead binstar_upload = context.binstar_upload @@ -109,76 +96,6 @@ os.environ["CONDA_ALLOW_SOFTLINKS"] = "false" reset_context() - -class CrossPlatformStLink: - def __call__(self, path: str | os.PathLike) -> int: - return self.st_nlink(path) - - @staticmethod - @deprecated("3.24.0", "24.1.0", addendum="Use `os.stat().st_nlink` instead.") - def st_nlink(path: str | os.PathLike) -> int: - return os.stat(path).st_nlink - - -@deprecated("3.28.0", "24.1.0") -class SignatureError(Exception): - # TODO: What is this? 🤔 - pass - - -@deprecated( - "3.28.0", - "24.1.0", - addendum="Use `conda_build.inspect_pkg.which_package` instead.", -) -def which_package(path: str | os.PathLike | Path) -> Iterable[PrefixRecord]: - from .inspect_pkg import which_package - - return which_package(path, which_prefix(path)) - - -@deprecated("3.28.0", "24.1.0") -def which_prefix(path: str | os.PathLike | Path) -> Path: - """ - Given the path (to a (presumably) conda installed file) return the - environment prefix in which the file in located - """ - from conda.gateways.disk.test import is_conda_environment - - prefix = Path(path) - for _ in range(20): - if is_conda_environment(prefix): - return prefix - elif prefix == (parent := prefix.parent): - # we cannot chop off any more directories, so we didn't find it - break - else: - prefix = parent - - raise RuntimeError("could not determine conda prefix from: %s" % path) - - -@deprecated("3.28.0", "24.1.0") -def get_installed_version(prefix, pkgs): - """ - Primarily used by conda-forge, but may be useful in general for checking when - a package needs to be updated - """ - from .utils import ensure_list - - pkgs = ensure_list(pkgs) - linked_pkgs = linked(prefix) - versions = {} - for pkg in pkgs: - vers_inst = [ - dist.split("::", 1)[-1].rsplit("-", 2)[1] - for dist in linked_pkgs - if dist.split("::", 1)[-1].rsplit("-", 2)[0] == pkg - ] - versions[pkg] = vers_inst[0] if len(vers_inst) == 1 else None - return versions - - # When deactivating envs (e.g. switching from root to build/test) this env var is used, # except the PR that removed this has been reverted (for now) and Windows doesn't need it. env_path_backup_var_exists = os.environ.get("CONDA_PATH_BACKUP", None) diff --git a/conda_build/config.py b/conda_build/config.py index 89c158e52d..a631a69593 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -24,7 +24,6 @@ subdir, url_path, ) -from .deprecations import deprecated from .utils import ( get_build_folders, get_conda_operation_locks, @@ -64,23 +63,6 @@ def set_invocation_time(): zstd_compression_level_default = 19 -@deprecated("3.25.0", "24.1.0") -def python2_fs_encode(strin): - return strin - - -@deprecated( - "3.25.0", - "24.1.0", - addendum=( - "Use `pathlib.Path.mkdir(exist_ok=True)` or `os.makedirs(exist_ok=True)` " - "instead." - ), -) -def _ensure_dir(path: os.PathLike): - os.makedirs(path, exist_ok=True) - - # we need this to be accessible to the CLI, so it needs to be more static. DEFAULT_PREFIX_LENGTH = 255 @@ -499,61 +481,6 @@ def build_folder(self): It has the environments and work directories.""" return os.path.join(self.croot, self.build_id) - # back compat for conda-build-all - expects CONDA_* vars to be attributes of the config object - @property - @deprecated("3.0.28", "24.1.0") - def CONDA_LUA(self): - return self.variant.get("lua", get_default_variant(self)["lua"]) - - @CONDA_LUA.setter - @deprecated("3.0.28", "24.1.0") - def CONDA_LUA(self, value): - self.variant["lua"] = value - - @property - @deprecated("3.0.28", "24.1.0") - def CONDA_PY(self): - value = self.variant.get("python", get_default_variant(self)["python"]) - return int("".join(value.split("."))) - - @CONDA_PY.setter - @deprecated("3.0.28", "24.1.0") - def CONDA_PY(self, value): - value = str(value) - self.variant["python"] = ".".join((value[0], value[1:])) - - @property - @deprecated("3.0.28", "24.1.0") - def CONDA_NPY(self): - value = self.variant.get("numpy", get_default_variant(self)["numpy"]) - return int("".join(value.split("."))) - - @CONDA_NPY.setter - @deprecated("3.0.28", "24.1.0") - def CONDA_NPY(self, value): - value = str(value) - self.variant["numpy"] = ".".join((value[0], value[1:])) - - @property - @deprecated("3.0.28", "24.1.0") - def CONDA_PERL(self): - return self.variant.get("perl", get_default_variant(self)["perl"]) - - @CONDA_PERL.setter - @deprecated("3.0.28", "24.1.0") - def CONDA_PERL(self, value): - self.variant["perl"] = value - - @property - @deprecated("3.0.28", "24.1.0") - def CONDA_R(self): - return self.variant.get("r_base", get_default_variant(self)["r_base"]) - - @CONDA_R.setter - @deprecated("3.0.28", "24.1.0") - def CONDA_R(self, value): - self.variant["r_base"] = value - def _get_python(self, prefix, platform): if platform.startswith("win") or (platform == "noarch" and on_win): if os.path.isfile(os.path.join(prefix, "python_d.exe")): diff --git a/conda_build/environ.py b/conda_build/environ.py index 319b4e7bf4..52f5e835ed 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -26,15 +26,12 @@ create_default_packages, display_actions, execute_actions, - execute_plan, get_version_from_git_tag, install_actions, - package_cache, pkgs_dirs, reset_context, root_dir, ) -from .deprecations import deprecated from .exceptions import BuildLockError, DependencyNeedsBuildingError from .features import feature_list from .index import get_build_index @@ -1219,51 +1216,6 @@ def remove_existing_packages(dirs, fns, config): utils.rm_rf(entry) -@deprecated("3.28.0", "24.1.0") -def clean_pkg_cache(dist, config): - locks = [] - - conda_log_level = logging.WARN - if config.debug: - conda_log_level = logging.DEBUG - - with utils.LoggingContext(conda_log_level): - locks = get_pkg_dirs_locks([config.bldpkgs_dir] + pkgs_dirs, config) - with utils.try_acquire_locks(locks, timeout=config.timeout): - rmplan = [ - f"RM_EXTRACTED {dist} local::{dist}", - f"RM_FETCHED {dist} local::{dist}", - ] - execute_plan(rmplan) - - # Conda does not seem to do a complete cleanup sometimes. This is supplemental. - # Conda's cleanup is still necessary - it keeps track of its own in-memory - # list of downloaded things. - for folder in pkgs_dirs: - if ( - os.path.exists(os.path.join(folder, dist)) - or os.path.exists(os.path.join(folder, dist + ".tar.bz2")) - or any( - pkg_id in package_cache() for pkg_id in [dist, "local::" + dist] - ) - ): - log = utils.get_logger(__name__) - log.debug( - "Conda caching error: %s package remains in cache after removal", - dist, - ) - log.debug("manually removing to compensate") - cache = package_cache() - keys = [key for key in cache.keys() if dist in key] - for pkg_id in keys: - if pkg_id in cache: - del cache[pkg_id] - - # Note that this call acquires the relevant locks, so this must be called - # outside the lock context above. - remove_existing_packages(pkgs_dirs, [dist], config) - - def get_pinned_deps(m, section): with TemporaryDirectory(prefix="_") as tmpdir: actions = get_install_actions( diff --git a/conda_build/index.py b/conda_build/index.py index 8ac164dccf..9a0004e771 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -61,7 +61,6 @@ human_bytes, url_path, ) -from .deprecations import deprecated from .utils import ( CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, @@ -329,67 +328,6 @@ def _delegated_update_index( # Everything below is deprecated to maintain API/feature compatibility. -@deprecated("3.25.0", "24.1.0", addendum="Use standalone conda-index.") -def update_index( - dir_path, - check_md5=False, - channel_name=None, - patch_generator=None, - threads=MAX_THREADS_DEFAULT, - verbose=False, - progress=False, - hotfix_source_repo=None, - subdirs=None, - warn=True, - current_index_versions=None, - debug=False, - index_file=None, -): - """ - If dir_path contains a directory named 'noarch', the path tree therein is treated - as though it's a full channel, with a level of subdirs, each subdir having an update - to repodata.json. The full channel will also have a channeldata.json file. - - If dir_path does not contain a directory named 'noarch', but instead contains at least - one '*.tar.bz2' file, the directory is assumed to be a standard subdir, and only repodata.json - information will be updated. - - """ - base_path, dirname = os.path.split(dir_path) - if dirname in utils.DEFAULT_SUBDIRS: - if warn: - log.warn( - "The update_index function has changed to index all subdirs at once. " - "You're pointing it at a single subdir. " - "Please update your code to point it at the channel root, rather than a subdir." - ) - return update_index( - base_path, - check_md5=check_md5, - channel_name=channel_name, - threads=threads, - verbose=verbose, - progress=progress, - hotfix_source_repo=hotfix_source_repo, - current_index_versions=current_index_versions, - ) - return ChannelIndex( - dir_path, - channel_name, - subdirs=subdirs, - threads=threads, - deep_integrity_check=check_md5, - debug=debug, - ).index( - patch_generator=patch_generator, - verbose=verbose, - progress=progress, - hotfix_source_repo=hotfix_source_repo, - current_index_versions=current_index_versions, - index_file=index_file, - ) - - def _determine_namespace(info): if info.get("namespace"): namespace = info["namespace"] diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index bb69707456..3f3fba7545 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -6,7 +6,6 @@ import os import sys from collections import defaultdict -from functools import lru_cache from itertools import groupby from operator import itemgetter from os.path import abspath, basename, dirname, exists, join, normcase @@ -17,13 +16,10 @@ from conda.api import Solver from conda.core.index import get_index from conda.core.prefix_data import PrefixData -from conda.models.dist import Dist from conda.models.records import PrefixRecord -from conda.resolve import MatchSpec from . import conda_interface from .conda_interface import ( - linked_data, specs_from_args, ) from .deprecations import deprecated @@ -47,18 +43,6 @@ log = get_logger(__name__) -@deprecated("3.28.0", "24.1.0") -@lru_cache(maxsize=None) -def dist_files(prefix: str | os.PathLike | Path, dist: Dist) -> set[str]: - if (prec := PrefixData(str(prefix)).get(dist.name, None)) is None: - return set() - elif MatchSpec(dist).match(prec): - return set(prec["files"]) - else: - return set() - - -@deprecated.argument("3.28.0", "24.1.0", "avoid_canonical_channel_name") def which_package( path: str | os.PathLike | Path, prefix: str | os.PathLike | Path, @@ -224,11 +208,6 @@ def test_installable(channel: str = "defaults") -> bool: return success -@deprecated("3.28.0", "24.1.0") -def _installed(prefix: str | os.PathLike | Path) -> dict[str, Dist]: - return {dist.name: dist for dist in linked_data(str(prefix))} - - def _underlined_text(text): return str(text) + "\n" + "-" * len(str(text)) + "\n\n" diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 906ce0b628..e02d09de23 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -20,7 +20,6 @@ from . import exceptions, utils, variants from .conda_interface import MatchSpec, envs_dirs, md5_file from .config import Config, get_or_merge_config -from .deprecations import deprecated from .features import feature_list from .license_family import ensure_valid_license_family from .utils import ( @@ -1457,7 +1456,6 @@ def check_field(key, section): check_field(key_or_dict, section) return True - @deprecated.argument("3.28.0", "24.1.0", "fail_ok") def name(self) -> str: name = self.get_value("package/name", "") if not name and self.final: diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index 70267d08f4..3ab78bc7fd 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -13,7 +13,6 @@ from conda.models.records import PrefixRecord from ..conda_interface import untracked -from ..deprecations import deprecated from ..utils import on_linux, on_mac from .macho import otool from .pyldd import codefile_class, inspect_linkages, machofile @@ -108,14 +107,6 @@ def _get_linkages( return linkages -@deprecated("3.28.0", "24.1.0") -@lru_cache(maxsize=None) -def get_package_files( - prec: PrefixRecord, prefix: str | os.PathLike | Path -) -> list[str]: - return prec["files"] - - @lru_cache(maxsize=None) def get_package_obj_files( prec: PrefixRecord, prefix: str | os.PathLike | Path diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index 8898f45473..c44a03f864 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -13,7 +13,6 @@ from pathlib import Path from subprocess import PIPE, Popen -from ..deprecations import deprecated from ..utils import on_mac, on_win, rec_glob from .external import find_executable @@ -21,7 +20,6 @@ # TODO :: Remove all use of pyldd # Currently we verify the output of each against the other from .pyldd import DLLfile, EXEfile, elffile, machofile -from .pyldd import codefile_type as _codefile_type from .pyldd import inspect_linkages as inspect_linkages_pyldd try: @@ -33,11 +31,6 @@ have_lief = False -@deprecated("3.28.0", "24.1.0", addendum="Use `isinstance(value, str)` instead.") -def is_string(s): - return isinstance(s, str) - - # Some functions can operate on either file names # or an already loaded binary. Generally speaking # these are to be avoided, or if not avoided they @@ -102,32 +95,6 @@ def codefile_class( from .pyldd import codefile_class -@deprecated( - "3.28.0", - "24.1.0", - addendum="Use `conda_build.os_utils.liefldd.codefile_class` instead.", -) -def codefile_type_liefldd(*args, **kwargs) -> str | None: - codefile = codefile_class(*args, **kwargs) - return codefile.__name__ if codefile else None - - -deprecated.constant( - "3.28.0", - "24.1.0", - "codefile_type_pyldd", - _codefile_type, - addendum="Use `conda_build.os_utils.pyldd.codefile_class` instead.", -) -deprecated.constant( - "3.28.0", - "24.1.0", - "codefile_type", - _codefile_type, - addendum="Use `conda_build.os_utils.liefldd.codefile_class` instead.", -) - - def _trim_sysroot(sysroot): while sysroot.endswith("/") or sysroot.endswith("\\"): sysroot = sysroot[:-1] diff --git a/conda_build/os_utils/pyldd.py b/conda_build/os_utils/pyldd.py index 7f1eb81a8b..a3f0d609d0 100644 --- a/conda_build/os_utils/pyldd.py +++ b/conda_build/os_utils/pyldd.py @@ -11,7 +11,6 @@ import sys from pathlib import Path -from ..deprecations import deprecated from ..utils import ensure_list, get_logger, on_linux, on_mac, on_win logging.basicConfig(level=logging.INFO) @@ -364,21 +363,6 @@ def do_file(file, lc_operation, off_sz, arch, results, *args): results.append(do_macho(file, 64, LITTLE_ENDIAN, lc_operation, *args)) -@deprecated("3.28.0", "24.1.0") -def mach_o_change(path, arch, what, value): - """ - Replace a given name (what) in any LC_LOAD_DYLIB command found in - the given binary with a new name (value), provided it's shorter. - """ - - assert len(what) >= len(value) - - results = [] - with open(path, "r+b") as f: - do_file(f, replace_lc_load_dylib, offset_size(), arch, results, what, value) - return results - - def mach_o_find_dylibs(ofile, arch, regex=".*"): """ Finds the executable's view of where any dylibs live @@ -1063,28 +1047,6 @@ def _get_magic_bit(path: Path) -> bytes: return None -@deprecated( - "3.28.0", - "24.1.0", - addendum="Use `conda_build.os_utils.pyldd.codefile_class` instead.", -) -def is_codefile(path: str | os.PathLike | Path, skip_symlinks: bool = True) -> bool: - return bool(codefile_class(path, skip_symlinks=skip_symlinks)) - - -@deprecated( - "3.28.0", - "24.1.0", - addendum="Use `conda_build.os_utils.pyldd.codefile_class` instead.", -) -def codefile_type( - path: str | os.PathLike | Path, - skip_symlinks: bool = True, -) -> str | None: - codefile = codefile_class(path, skip_symlinks=skip_symlinks) - return codefile.__name__ if codefile else None - - def _trim_sysroot(sysroot): if sysroot: while sysroot.endswith("/") or sysroot.endswith("\\"): @@ -1135,48 +1097,6 @@ def _inspect_linkages_this(filename, sysroot="", arch="native"): return cf.uniqueness_key(), orig_names, resolved_names -@deprecated("3.28.0", "24.1.0") -def inspect_rpaths( - filename, resolve_dirnames=True, use_os_varnames=True, sysroot="", arch="native" -): - if not os.path.exists(filename): - return [], [] - sysroot = _trim_sysroot(sysroot) - arch = _get_arch_if_native(arch) - with open(filename, "rb") as f: - # TODO :: Problems here: - # TODO :: 1. macOS can modify RPATH for children in each .so - # TODO :: 2. Linux can identify the program interpreter which can change the initial RPATHs - # TODO :: Should '/lib', '/usr/lib' not include (or be?!) `sysroot`(s) instead? - cf = codefile(f, arch, ["/lib", "/usr/lib"]) - if resolve_dirnames: - return [ - _get_resolved_location( - cf, - rpath, - os.path.dirname(filename), - os.path.dirname(filename), - sysroot, - )[0] - for rpath in cf.rpaths_nontransitive - ] - else: - if use_os_varnames: - return [cf.to_os_varnames(rpath) for rpath in cf.rpaths_nontransitive] - else: - return cf.rpaths_nontransitive - - -@deprecated("3.28.0", "24.1.0") -def get_runpaths(filename, arch="native"): - if not os.path.exists(filename): - return [] - arch = _get_arch_if_native(arch) - with open(filename, "rb") as f: - cf = codefile(f, arch, ["/lib", "/usr/lib"]) - return cf.get_runpaths() - - # TODO :: Consider returning a tree structure or a dict when recurse is True? def inspect_linkages( filename, resolve_filenames=True, recurse=True, sysroot="", arch="native" @@ -1253,18 +1173,6 @@ def otool(*args): return 1 -@deprecated("3.28.0", "24.1.0") -def otool_sys(*args): - import subprocess - - return subprocess.check_output("/usr/bin/otool", args).decode(encoding="ascii") - - -@deprecated("3.28.0", "24.1.0") -def ldd_sys(*args): - return [] - - def ldd(*args): parser = argparse.ArgumentParser(prog="ldd", add_help=False) parser.add_argument("-h", "--help", action="store_true") diff --git a/conda_build/plugin.py b/conda_build/plugin.py index 6ca5c34cc1..16ac40bbb1 100644 --- a/conda_build/plugin.py +++ b/conda_build/plugin.py @@ -28,13 +28,6 @@ def develop(*args, **kwargs): execute(*args, **kwargs) -def index(*args, **kwargs): - # deprecated! use conda-index! - from .cli.main_index import execute - - execute(*args, **kwargs) - - def inspect(*args, **kwargs): from .cli.main_inspect import execute @@ -84,14 +77,6 @@ def conda_subcommands(): ), action=develop, ) - yield conda.plugins.CondaSubcommand( - name="index", - summary=( - "Update package index metadata files. Pending deprecation, " - "use https://github.com/conda/conda-index instead." - ), - action=index, - ) yield conda.plugins.CondaSubcommand( name="inspect", summary="Tools for inspecting conda packages.", diff --git a/conda_build/post.py b/conda_build/post.py index 558ae50bc9..5a05eda077 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -32,7 +32,7 @@ ) from pathlib import Path from subprocess import CalledProcessError, call, check_output -from typing import Iterable, Literal +from typing import Literal from conda.core.prefix_data import PrefixData from conda.models.records import PrefixRecord @@ -44,7 +44,6 @@ md5_file, walk_prefix, ) -from .deprecations import deprecated from .exceptions import OverDependingError, OverLinkingError, RunPathError from .inspect_pkg import which_package from .metadata import MetaData @@ -64,7 +63,7 @@ elffile, machofile, ) -from .utils import linked_data_no_multichannels, on_mac, on_win, prefix_files +from .utils import on_mac, on_win, prefix_files filetypes_for_platform = { "win": (DLLfile, EXEfile), @@ -650,26 +649,6 @@ def assert_relative_osx(path, host_prefix, build_prefix): ) -@deprecated( - "3.28.0", - "24.1.0", - addendum="Use `conda_build.post.get_dsos` and `conda_build.post.get_run_exports` instead.", -) -def determine_package_nature( - prec: PrefixRecord, - prefix: str | os.PathLike | Path, - subdir, - bldpkgs_dir, - output_folder, - channel_urls, -) -> tuple[set[str], tuple[str, ...], bool]: - return ( - get_dsos(prec, prefix), - get_run_exports(prec, prefix), - prec.name.startswith("lib"), - ) - - def get_dsos(prec: PrefixRecord, prefix: str | os.PathLike | Path) -> set[str]: return { file @@ -713,10 +692,6 @@ def get_run_exports( return () -@deprecated.argument("3.28.0", "24.1.0", "subdir") -@deprecated.argument("3.28.0", "24.1.0", "bldpkgs_dirs") -@deprecated.argument("3.28.0", "24.1.0", "output_folder") -@deprecated.argument("3.28.0", "24.1.0", "channel_urls") def library_nature( prec: PrefixRecord, prefix: str | os.PathLike | Path ) -> Literal[ @@ -772,36 +747,6 @@ def library_nature( return "non-library" -@deprecated( - "3.28.0", - "24.1.0", - addendum="Query `conda.core.prefix_data.PrefixData` instead.", -) -def dists_from_names(names: Iterable[str], prefix: str | os.PathLike | Path): - names = utils.ensure_list(names) - return [prec for prec in linked_data_no_multichannels(prefix) if prec.name in names] - - -@deprecated( - "3.28.0", - "24.1.0", - addendum="Use `conda.models.records.PrefixRecord` instead.", -) -class FakeDist: - def __init__(self, name, version, build_number, build_str, channel, files): - self.name = name - self.quad = [name] - self.version = version - self.build_number = build_number - self.build_string = build_str - self.channel = channel - self.files = files - - def get(self, name): - if name == "files": - return self.files - - # This is really just a small, fixed sysroot and it is rooted at ''. `libcrypto.0.9.8.dylib` should not be in it IMHO. DEFAULT_MAC_WHITELIST = [ "/opt/X11/", @@ -1015,23 +960,6 @@ def _map_file_to_package( return prefix_owners, contains_dsos, contains_static_libs, all_lib_exports -@deprecated( - "3.28.0", "24.1.0", addendum="Use `conda.models.records.PrefixRecord` instead." -) -def _get_fake_pkg_dist(pkg_name, pkg_version, build_str, build_number, channel, files): - return ( - FakeDist( - pkg_name, - str(pkg_version), - build_number, - build_str, - channel, - files, - ), - f"{pkg_name}-{pkg_version}-{build_str}", - ) - - def _print_msg(errors, text, verbose): if text.startswith(" ERROR"): errors.append(text) diff --git a/conda_build/utils.py b/conda_build/utils.py index fc70e9660d..1c303a8b64 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -75,7 +75,6 @@ win_path_to_unix, ) from .conda_interface import rm_rf as _rm_rf -from .deprecations import deprecated from .exceptions import BuildLockError on_win = sys.platform == "win32" @@ -763,26 +762,6 @@ def get_conda_operation_locks(locking=True, bldpkgs_dirs=None, timeout=900): return locks -@deprecated( - "3.28.0", - "24.1.0", - addendum="Use `os.path.relpath` or `pathlib.Path.relative_to` instead.", -) -def relative(f, d="lib"): - assert not f.startswith("/"), f - assert not d.startswith("/"), d - d = d.strip("/").split("/") - if d == ["."]: - d = [] - f = dirname(f).split("/") - if f == [""]: - f = [] - while d and f and d[0] == f[0]: - d.pop(0) - f.pop(0) - return "/".join((([".."] * len(f)) if f else ["."]) + d) - - # This is the lowest common denominator of the formats supported by our libarchive/python-libarchive-c # packages across all platforms decompressible_exts = ( @@ -2186,13 +2165,3 @@ def is_conda_pkg(pkg_path: str) -> bool: return path.is_file() and ( any(path.name.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) ) - - -@deprecated("3.28.3", "24.1.0") -def samefile(path1: Path, path2: Path) -> bool: - try: - return path1.samefile(path2) - except (FileNotFoundError, PermissionError): - # FileNotFoundError: path doesn't exist - # PermissionError: don't have permissions to read path - return path1 == path2 diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 54c792b9c9..77a8034891 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -9,15 +9,15 @@ source: build: script: {{ PYTHON }} -m pip install . --no-deps --no-build-isolation -vv entry_points: - - conda-build = conda_build.cli.main_build:main - - conda-convert = conda_build.cli.main_convert:main - - conda-debug = conda_build.cli.main_debug:main - - conda-develop = conda_build.cli.main_develop:main + - conda-build = conda_build.cli.main_build:execute + - conda-convert = conda_build.cli.main_convert:execute + - conda-debug = conda_build.cli.main_debug:execute + - conda-develop = conda_build.cli.main_develop:execute - conda-index = conda_build.cli.main_index:main - - conda-inspect = conda_build.cli.main_inspect:main - - conda-metapackage = conda_build.cli.main_metapackage:main - - conda-render = conda_build.cli.main_render:main - - conda-skeleton = conda_build.cli.main_skeleton:main + - conda-inspect = conda_build.cli.main_inspect:execute + - conda-metapackage = conda_build.cli.main_metapackage:execute + - conda-render = conda_build.cli.main_render:execute + - conda-skeleton = conda_build.cli.main_skeleton:execute requirements: build: @@ -68,12 +68,11 @@ test: - test_bdist_conda_setup.py commands: - python -m pip check - # builtin subcommands + # subcommands - conda --help - conda build --help - conda convert --help - conda develop --help - - conda index --help - conda inspect --help - conda inspect linkages --help # [unix] - conda inspect objects --help # [osx] @@ -81,6 +80,17 @@ test: - conda render --help - conda skeleton --help - conda debug --help + # entrypoints + - conda-build --help + - conda-convert --help + - conda-develop --help + - conda-inspect --help + - conda-inspect linkages --help # [unix] + - conda-inspect objects --help # [osx] + - conda-metapackage --help + - conda-render --help + - conda-skeleton --help + - conda-debug --help # bdist_conda - python test_bdist_conda_setup.py bdist_conda --help diff --git a/tests/cli/test_main_debug.py b/tests/cli/test_main_debug.py index 22011bedf1..ae4f22441d 100644 --- a/tests/cli/test_main_debug.py +++ b/tests/cli/test_main_debug.py @@ -16,7 +16,7 @@ def test_main_debug_help_message(capsys: CaptureFixture, monkeypatch: MonkeyPatc help_blurb = debug.get_parser().format_help() with pytest.raises(SystemExit): - debug.main() + debug.execute() captured = capsys.readouterr() assert help_blurb in captured.out @@ -28,7 +28,7 @@ def test_main_debug_file_does_not_exist( monkeypatch.setattr(sys, "argv", ["conda-debug", "file-does-not-exist"]) with pytest.raises(SystemExit): - debug.main() + debug.execute() captured = capsys.readouterr() assert valid.CONDA_PKG_OR_RECIPE_ERROR_MESSAGE in captured.err @@ -45,7 +45,7 @@ def test_main_debug_happy_path( monkeypatch.setattr(sys, "argv", ["conda-debug", str(fake)]) with mock.patch("conda_build.api.debug") as mock_debug: - debug.main() + debug.execute() captured = capsys.readouterr() assert captured.err == "" diff --git a/tests/cli/test_main_index.py b/tests/cli/test_main_index.py deleted file mode 100644 index 75b1926f78..0000000000 --- a/tests/cli/test_main_index.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -import os.path - -from conda_build.cli import main_index - - -def testing_index(testing_workdir): - args = ["."] - main_index.execute(args) - assert os.path.isfile("noarch/repodata.json") diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 1ac2ca06d2..c9bc2cd31e 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -27,6 +27,7 @@ from binstar_client.errors import NotFound from conda.common.compat import on_linux, on_mac, on_win from conda.exceptions import ClobberError, CondaMultiError +from conda_index.api import update_index from pytest import FixtureRequest, MonkeyPatch from pytest_mock import MockerFixture @@ -533,7 +534,7 @@ def test_skip_existing_url(testing_metadata, testing_workdir, capfd): copy_into(outputs[0], os.path.join(platform, os.path.basename(outputs[0]))) # create the index so conda can find the file - api.update_index(output_dir) + update_index(output_dir) testing_metadata.config.skip_existing = True testing_metadata.config.channel_urls = [url_path(output_dir)] @@ -1467,7 +1468,7 @@ def test_run_constrained_stores_constrains_info(testing_config): @pytest.mark.sanity def test_no_locking(testing_config): recipe = os.path.join(metadata_dir, "source_git_jinja2") - api.update_index(os.path.join(testing_config.croot)) + update_index(os.path.join(testing_config.croot)) api.build(recipe, config=testing_config, locking=False) diff --git a/tests/test_api_consistency.py b/tests/test_api_consistency.py index 502046e5ad..56685f66d1 100644 --- a/tests/test_api_consistency.py +++ b/tests/test_api_consistency.py @@ -2,8 +2,6 @@ # SPDX-License-Identifier: BSD-3-Clause # This file makes sure that our API has not changed. Doing so can not be accidental. Whenever it # happens, we should bump our major build number, because we may have broken someone. - -import inspect import sys from inspect import getfullargspec as getargspec @@ -183,40 +181,3 @@ def test_api_create_metapackage(): "config", ] assert argspec.defaults == ((), None, 0, (), None, None, None, None) - - -def test_api_update_index(): - # getfullargspec() isn't friends with functools.wraps - argspec = inspect.signature(api.update_index) - assert list(argspec.parameters) == [ - "dir_paths", - "config", - "force", - "check_md5", - "remove", - "channel_name", - "subdir", - "threads", - "patch_generator", - "verbose", - "progress", - "hotfix_source_repo", - "current_index_versions", - "kwargs", - ] - assert tuple(parameter.default for parameter in argspec.parameters.values()) == ( - inspect._empty, - None, - False, - False, - False, - None, - None, - None, - None, - False, - False, - None, - None, - inspect._empty, - ) diff --git a/tests/test_api_update_index.py b/tests/test_api_update_index.py deleted file mode 100644 index 6573d5533a..0000000000 --- a/tests/test_api_update_index.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -from pathlib import Path - -from conda_build.api import update_index - - -def test_update_index(testing_workdir): - update_index(testing_workdir) - - for name in ("repodata.json", "repodata.json.bz2"): - assert Path(testing_workdir, "noarch", name).is_file() diff --git a/tests/test_conda_interface.py b/tests/test_conda_interface.py deleted file mode 100644 index 1c13d6faf9..0000000000 --- a/tests/test_conda_interface.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -from conda_build import conda_interface as ci - - -def test_get_installed_version(): - versions = ci.get_installed_version(ci.root_dir, "conda") - assert versions.get("conda") - assert ci.VersionOrder(versions.get("conda")) diff --git a/tests/test_misc.py b/tests/test_misc.py index 7a8b56dced..bcdafcb196 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -1,13 +1,12 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import json -import os from pathlib import Path import pytest from conda_build._link import pyc_f -from conda_build.conda_interface import CrossPlatformStLink, EntityEncoder, PathType +from conda_build.conda_interface import EntityEncoder, PathType @pytest.mark.parametrize( @@ -40,17 +39,3 @@ def test_entity_encoder(tmp_path): json_file = json.loads(test_file.read_text()) assert json_file == {"a": "hardlink", "b": 1} - - -def test_crossplatform_st_link(tmp_path): - test_file = tmp_path / "test-file" - test_file_linked = tmp_path / "test-file-linked" - test_file_link = tmp_path / "test-file-link" - - test_file.touch() - test_file_link.touch() - os.link(test_file_link, test_file_linked) - - assert 1 == CrossPlatformStLink.st_nlink(test_file) - assert 2 == CrossPlatformStLink.st_nlink(test_file_link) - assert 2 == CrossPlatformStLink.st_nlink(test_file_linked) diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index d89b888758..3937036d14 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -89,7 +89,6 @@ def test_run_exports_in_subpackage(testing_metadata): p1 = testing_metadata.copy() p1.meta["outputs"] = [{"name": "has_run_exports", "run_exports": "bzip2 1.0"}] api.build(p1, config=testing_metadata.config)[0] - # api.update_index(os.path.dirname(output), config=testing_metadata.config) p2 = testing_metadata.copy() p2.meta["requirements"]["host"] = ["has_run_exports"] p2_final = finalize_metadata(p2) diff --git a/tests/test_utils.py b/tests/test_utils.py index 727859501a..d245e65796 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -79,97 +79,6 @@ def test_disallow_in_tree_merge(testing_workdir): utils.merge_tree(testing_workdir, os.path.join(testing_workdir, "subdir")) -def test_relative_default(): - for f, r in [ - ("bin/python", "../lib"), - ("lib/libhdf5.so", "."), - ("lib/python2.6/foobar.so", ".."), - ("lib/python2.6/lib-dynload/zlib.so", "../.."), - ("lib/python2.6/site-packages/pyodbc.so", "../.."), - ("lib/python2.6/site-packages/bsdiff4/core.so", "../../.."), - ("xyz", "./lib"), - ("bin/somedir/cmd", "../../lib"), - ]: - assert utils.relative(f) == r - - -def test_relative_lib(): - for f, r in [ - ("bin/python", "../lib"), - ("lib/libhdf5.so", "."), - ("lib/python2.6/foobar.so", ".."), - ("lib/python2.6/lib-dynload/zlib.so", "../.."), - ("lib/python2.6/site-packages/pyodbc.so", "../.."), - ("lib/python2.6/site-packages/bsdiff3/core.so", "../../.."), - ("xyz", "./lib"), - ("bin/somedir/cmd", "../../lib"), - ("bin/somedir/somedir2/cmd", "../../../lib"), - ]: - assert utils.relative(f, "lib") == r - - -def test_relative_subdir(): - for f, r in [ - ("lib/libhdf5.so", "./sub"), - ("lib/sub/libhdf5.so", "."), - ("bin/python", "../lib/sub"), - ("bin/somedir/cmd", "../../lib/sub"), - ]: - assert utils.relative(f, "lib/sub") == r - - -def test_relative_prefix(): - for f, r in [ - ("xyz", "."), - ("a/xyz", ".."), - ("a/b/xyz", "../.."), - ("a/b/c/xyz", "../../.."), - ("a/b/c/d/xyz", "../../../.."), - ]: - assert utils.relative(f, ".") == r - - -def test_relative_2(): - for f, r in [ - ("a/b/c/d/libhdf5.so", "../.."), - ("a/b/c/libhdf5.so", ".."), - ("a/b/libhdf5.so", "."), - ("a/libhdf5.so", "./b"), - ("x/x/libhdf5.so", "../../a/b"), - ("x/b/libhdf5.so", "../../a/b"), - ("x/libhdf5.so", "../a/b"), - ("libhdf5.so", "./a/b"), - ]: - assert utils.relative(f, "a/b") == r - - -def test_relative_3(): - for f, r in [ - ("a/b/c/d/libhdf5.so", ".."), - ("a/b/c/libhdf5.so", "."), - ("a/b/libhdf5.so", "./c"), - ("a/libhdf5.so", "./b/c"), - ("libhdf5.so", "./a/b/c"), - ("a/b/x/libhdf5.so", "../c"), - ("a/x/x/libhdf5.so", "../../b/c"), - ("x/x/x/libhdf5.so", "../../../a/b/c"), - ("x/x/libhdf5.so", "../../a/b/c"), - ("x/libhdf5.so", "../a/b/c"), - ]: - assert utils.relative(f, "a/b/c") == r - - -def test_relative_4(): - for f, r in [ - ("a/b/c/d/libhdf5.so", "."), - ("a/b/c/x/libhdf5.so", "../d"), - ("a/b/x/x/libhdf5.so", "../../c/d"), - ("a/x/x/x/libhdf5.so", "../../../b/c/d"), - ("x/x/x/x/libhdf5.so", "../../../../a/b/c/d"), - ]: - assert utils.relative(f, "a/b/c/d") == r - - def test_expand_globs(testing_workdir): sub_dir = os.path.join(testing_workdir, "sub1") os.mkdir(sub_dir) From 3cba0b912aa6b860f7264af33140302d0c5231cb Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Sat, 27 Jan 2024 00:45:02 +0100 Subject: [PATCH 265/366] Dist removal (#5074) * Copy over Dist and related functions from conda * Copy over plan instructions from conda * Remove Dist * Inline one-off execute_/install_actions functions * Simplify _plan_from_actions * Collapse _plan_from_actions into execute_actions * Remove unused args for execute/install_actions * Use PackageRecord "prec" naming when unambiguous * Move install actions to .environ * Mark functions from conda.plan as non-public These will be changed/superseded/removed in a future version and thus should not be used elsewhere. --------- Signed-off-by: Marcel Bargull Co-authored-by: Ken Odegard Co-authored-by: Jannis Leidel --- CHANGELOG.md | 12 ++ conda_build/build.py | 2 +- conda_build/conda_interface.py | 31 +++-- conda_build/environ.py | 203 +++++++++++++++++++++++++++++++-- conda_build/index.py | 4 +- conda_build/render.py | 101 ++++++++-------- conda_build/utils.py | 13 ++- tests/test_api_build.py | 12 -- 8 files changed, 291 insertions(+), 87 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a1937ae4c4..5777412269 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,15 @@ * Mark `conda_build.inspect_pkg.check_install(package)` as pending deprecation in favor of `conda_build.inspect_pkg.check_install(subdir)`. (#5033) * Mark `conda_build.inspect_pkg.check_install(prepend)` as pending deprecation. (#5033) * Mark `conda_build.inspect_pkg.check_install(minimal_hint)` as pending deprecation. (#5033) +* Mark `conda_build.conda_interface.Dist` as pending deprecation. (#5074) +* Mark `conda_build.conda_interface.display_actions` as pending deprecation. (#5074) +* Mark `conda_build.conda_interface.execute_actions` as pending deprecation. (#5074) +* Mark `conda_build.conda_interface.execute_plan` as pending deprecation. (#5074) +* Mark `conda_build.conda_interface.get_index` as pending deprecation. (#5074) +* Mark `conda_build.conda_interface.install_actions` as pending deprecation. (#5074) +* Mark `conda_build.conda_interface.linked` as pending deprecation. (#5074) +* Mark `conda_build.conda_interface.linked_data` as pending deprecation. (#5074) +* Mark `conda_build.utils.linked_data_no_multichannels` as pending deprecation. (#5074) * Remove `conda_build.api.update_index`. (#5151) * Remove `conda_build.cli.main_build.main`. (#5151) * Remove `conda_build.cli.main_convert.main`. (#5151) @@ -77,6 +86,9 @@ ### Other * Remove unused Allure test report collection. (#5113) +* Remove dependency on `conda.plan`. (#5074) +* Remove almost all dependency on `conda.models.dist`. (#5074) +* Replace usage of legacy `conda.models.dist.Dist` with `conda.models.records.PackageRecord`. (#5074) ### Contributors diff --git a/conda_build/build.py b/conda_build/build.py index fe0b5fe5a4..45f64995f2 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -3841,7 +3841,7 @@ def build_tree( meta, actions, "host", - package_subset=dep, + package_subset=[dep], require_files=True, ) # test that package, using the local channel so that our new diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 929384e459..4fa9fb3777 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -33,6 +33,7 @@ InstalledPackages, MatchSpec, NoPackagesFound, + PackageRecord, PathType, Resolve, StringIO, @@ -43,23 +44,15 @@ _toposort, add_parser_channels, add_parser_prefix, - display_actions, download, - execute_actions, - execute_plan, - get_index, handle_proxy_407, hashsum_file, human_bytes, input, - install_actions, lchmod, - linked, - linked_data, md5_file, memoized, normalized_version, - package_cache, prefix_placeholder, rm_rf, spec_from_line, @@ -72,8 +65,28 @@ walk_prefix, win_path_to_unix, ) +from conda.exports import display_actions as _display_actions +from conda.exports import execute_actions as _execute_actions +from conda.exports import execute_plan as _execute_plan +from conda.exports import get_index as _get_index +from conda.exports import install_actions as _install_actions +from conda.exports import linked as _linked +from conda.exports import linked_data as _linked_data +from conda.exports import package_cache as _package_cache from conda.models.channel import get_conda_build_local_url # noqa: F401 -from conda.models.dist import Dist # noqa: F401 +from conda.models.dist import Dist as _Dist + +from .deprecations import deprecated + +deprecated.constant("24.1.0", "24.3.0", "Dist", _Dist) +deprecated.constant("24.1.0", "24.3.0", "display_actions", _display_actions) +deprecated.constant("24.1.0", "24.3.0", "execute_actions", _execute_actions) +deprecated.constant("24.1.0", "24.3.0", "execute_plan", _execute_plan) +deprecated.constant("24.1.0", "24.3.0", "get_index", _get_index) +deprecated.constant("24.1.0", "24.3.0", "install_actions", _install_actions) +deprecated.constant("24.1.0", "24.3.0", "linked", _linked) +deprecated.constant("24.1.0", "24.3.0", "linked_data", _linked_data) +deprecated.constant("24.1.0", "24.3.0", "package_cache", _package_cache) # TODO: Go to references of all properties below and import them from `context` instead binstar_upload = context.binstar_upload diff --git a/conda_build/environ.py b/conda_build/environ.py index 52f5e835ed..c363588e3f 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -10,24 +10,35 @@ import subprocess import sys import warnings +from collections import defaultdict from functools import lru_cache from glob import glob +from logging import getLogger from os.path import join, normpath +from conda.base.constants import DEFAULTS_CHANNEL_NAME, UNKNOWN_CHANNEL +from conda.common.io import env_vars +from conda.core.index import LAST_CHANNEL_URLS +from conda.core.link import PrefixSetup, UnlinkLinkTransaction +from conda.core.prefix_data import PrefixData +from conda.models.channel import prioritize_channels + from . import utils from .conda_interface import ( + Channel, CondaError, LinkError, LockError, + MatchSpec, NoPackagesFoundError, + PackageRecord, PaddingError, + ProgressiveFetchExtract, TemporaryDirectory, UnsatisfiableError, + context, create_default_packages, - display_actions, - execute_actions, get_version_from_git_tag, - install_actions, pkgs_dirs, reset_context, root_dir, @@ -42,10 +53,16 @@ env_var, on_mac, on_win, + package_record_to_requirement, prepend_bin_path, ) from .variants import get_default_variant +log = getLogger(__name__) + +PREFIX_ACTION = "PREFIX" +LINK_ACTION = "LINK" + # these are things that we provide env vars for more explicitly. This list disables the # pass-through of variant values to env vars for these keys. LANGUAGES = ("PERL", "LUA", "R", "NUMPY", "PYTHON") @@ -890,8 +907,8 @@ def get_install_actions( disable_pip, ) in cached_actions and last_index_ts >= index_ts: actions = cached_actions[(specs, env, subdir, channel_urls, disable_pip)].copy() - if "PREFIX" in actions: - actions["PREFIX"] = prefix + if PREFIX_ACTION in actions: + actions[PREFIX_ACTION] = prefix elif specs: # this is hiding output like: # Fetching package metadata ........... @@ -899,7 +916,7 @@ def get_install_actions( with utils.LoggingContext(conda_log_level): with capture(): try: - actions = install_actions(prefix, index, specs, force=True) + actions = _install_actions(prefix, index, specs) except (NoPackagesFoundError, UnsatisfiableError) as exc: raise DependencyNeedsBuildingError(exc, subdir=subdir) except ( @@ -973,8 +990,8 @@ def get_install_actions( if not any( re.match(r"^%s(?:$|[\s=].*)" % pkg, str(dep)) for dep in specs ): - actions["LINK"] = [ - spec for spec in actions["LINK"] if spec.name != pkg + actions[LINK_ACTION] = [ + prec for prec in actions[LINK_ACTION] if prec.name != pkg ] utils.trim_empty_keys(actions) cached_actions[(specs, env, subdir, channel_urls, disable_pip)] = actions.copy() @@ -1051,13 +1068,13 @@ def create_env( timeout=config.timeout, ) utils.trim_empty_keys(actions) - display_actions(actions, index) + _display_actions(actions) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) with env_var("CONDA_QUIET", not config.verbose, reset_context): with env_var("CONDA_JSON", not config.verbose, reset_context): - execute_actions(actions, index) + _execute_actions(actions) except ( SystemExit, PaddingError, @@ -1096,7 +1113,7 @@ def create_env( # Set this here and use to create environ # Setting this here is important because we use it below (symlink) prefix = config.host_prefix if host else config.build_prefix - actions["PREFIX"] = prefix + actions[PREFIX_ACTION] = prefix create_env( prefix, @@ -1234,6 +1251,168 @@ def get_pinned_deps(m, section): channel_urls=tuple(m.config.channel_urls), ) runtime_deps = [ - " ".join(link.dist_name.rsplit("-", 2)) for link in actions.get("LINK", []) + package_record_to_requirement(prec) for prec in actions.get(LINK_ACTION, []) ] return runtime_deps + + +# NOTE: The function has to retain the "install_actions" name for now since +# conda_libmamba_solver.solver.LibMambaSolver._called_from_conda_build +# checks for this name in the call stack explicitly. +def install_actions(prefix, index, specs): + # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L471 + # but reduced to only the functionality actually used within conda-build. + + with env_vars( + { + "CONDA_ALLOW_NON_CHANNEL_URLS": "true", + "CONDA_SOLVER_IGNORE_TIMESTAMPS": "false", + }, + callback=reset_context, + ): + # a hack since in conda-build we don't track channel_priority_map + if LAST_CHANNEL_URLS: + channel_priority_map = prioritize_channels(LAST_CHANNEL_URLS) + # tuple(dict.fromkeys(...)) removes duplicates while preserving input order. + channels = tuple( + dict.fromkeys(Channel(url) for url in channel_priority_map) + ) + subdirs = ( + tuple( + dict.fromkeys( + subdir for subdir in (c.subdir for c in channels) if subdir + ) + ) + or context.subdirs + ) + else: + channels = subdirs = None + + specs = tuple(MatchSpec(spec) for spec in specs) + + PrefixData._cache_.clear() + + solver_backend = context.plugin_manager.get_cached_solver_backend() + solver = solver_backend(prefix, channels, subdirs, specs_to_add=specs) + if index: + # Solver can modify the index (e.g., Solver._prepare adds virtual + # package) => Copy index (just outer container, not deep copy) + # to conserve it. + solver._index = index.copy() + txn = solver.solve_for_transaction(prune=False, ignore_pinned=False) + prefix_setup = txn.prefix_setups[prefix] + actions = { + PREFIX_ACTION: prefix, + LINK_ACTION: [prec for prec in prefix_setup.link_precs], + } + return actions + + +_install_actions = install_actions +del install_actions + + +def _execute_actions(actions): + # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L575 + # but reduced to only the functionality actually used within conda-build. + + assert PREFIX_ACTION in actions and actions[PREFIX_ACTION] + prefix = actions[PREFIX_ACTION] + + if LINK_ACTION not in actions: + log.debug(f"action {LINK_ACTION} not in actions") + return + + link_precs = actions[LINK_ACTION] + if not link_precs: + log.debug(f"action {LINK_ACTION} has None value") + return + + # Always link menuinst first/last on windows in case a subsequent + # package tries to import it to create/remove a shortcut + link_precs = [p for p in link_precs if p.name == "menuinst"] + [ + p for p in link_precs if p.name != "menuinst" + ] + + progressive_fetch_extract = ProgressiveFetchExtract(link_precs) + progressive_fetch_extract.prepare() + + stp = PrefixSetup(prefix, (), link_precs, (), [], ()) + unlink_link_transaction = UnlinkLinkTransaction(stp) + + log.debug(" %s(%r)", "PROGRESSIVEFETCHEXTRACT", progressive_fetch_extract) + progressive_fetch_extract.execute() + log.debug(" %s(%r)", "UNLINKLINKTRANSACTION", unlink_link_transaction) + unlink_link_transaction.execute() + + +def _display_actions(actions): + # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L58 + # but reduced to only the functionality actually used within conda-build. + + prefix = actions.get(PREFIX_ACTION) + builder = ["", "## Package Plan ##\n"] + if prefix: + builder.append(" environment location: %s" % prefix) + builder.append("") + print("\n".join(builder)) + + show_channel_urls = context.show_channel_urls + + def channel_str(rec): + if rec.get("schannel"): + return rec["schannel"] + if rec.get("url"): + return Channel(rec["url"]).canonical_name + if rec.get("channel"): + return Channel(rec["channel"]).canonical_name + return UNKNOWN_CHANNEL + + def channel_filt(s): + if show_channel_urls is False: + return "" + if show_channel_urls is None and s == DEFAULTS_CHANNEL_NAME: + return "" + return s + + packages = defaultdict(lambda: "") + features = defaultdict(lambda: "") + channels = defaultdict(lambda: "") + + for prec in actions.get(LINK_ACTION, []): + assert isinstance(prec, PackageRecord) + pkg = prec["name"] + channels[pkg] = channel_filt(channel_str(prec)) + packages[pkg] = prec["version"] + "-" + prec["build"] + features[pkg] = ",".join(prec.get("features") or ()) + + fmt = {} + if packages: + maxpkg = max(len(p) for p in packages) + 1 + maxver = max(len(p) for p in packages.values()) + maxfeatures = max(len(p) for p in features.values()) + maxchannels = max(len(p) for p in channels.values()) + for pkg in packages: + # That's right. I'm using old-style string formatting to generate a + # string with new-style string formatting. + fmt[pkg] = f"{{pkg:<{maxpkg}}} {{vers:<{maxver}}}" + if maxchannels: + fmt[pkg] += " {channel:<%s}" % maxchannels + if features[pkg]: + fmt[pkg] += " [{features:<%s}]" % maxfeatures + + lead = " " * 4 + + def format(s, pkg): + return lead + s.format( + pkg=pkg + ":", + vers=packages[pkg], + channel=channels[pkg], + features=features[pkg], + ) + + if packages: + print("\nThe following NEW packages will be INSTALLED:\n") + for pkg in sorted(packages): + print(format(fmt[pkg], pkg)) + print() diff --git a/conda_build/index.py b/conda_build/index.py index 9a0004e771..aebc28fe21 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -37,6 +37,7 @@ from conda.common.compat import ensure_binary # BAD BAD BAD - conda internals +from conda.core.index import get_index from conda.core.subdir_data import SubdirData from conda.models.channel import Channel from conda_index.index import update_index as _update_index @@ -57,7 +58,6 @@ TemporaryDirectory, VersionOrder, context, - get_index, human_bytes, url_path, ) @@ -226,7 +226,7 @@ def get_build_index( platform=subdir, ) - expanded_channels = {rec.channel for rec in cached_index.values()} + expanded_channels = {rec.channel for rec in cached_index} superchannel = {} # we need channeldata.json too, as it is a more reliable source of run_exports data diff --git a/conda_build/render.py b/conda_build/render.py index c97f3bbe9f..c75838a65b 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -28,17 +28,22 @@ from . import environ, exceptions, source, utils from .conda_interface import ( + PackageRecord, ProgressiveFetchExtract, TemporaryDirectory, UnsatisfiableError, - execute_actions, pkgs_dirs, specs_from_url, ) +from .environ import LINK_ACTION from .exceptions import DependencyNeedsBuildingError from .index import get_build_index from .metadata import MetaData, combine_top_level_metadata_with_output -from .utils import CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2 +from .utils import ( + CONDA_PACKAGE_EXTENSION_V1, + CONDA_PACKAGE_EXTENSION_V2, + package_record_to_requirement, +) from .variants import ( filter_by_key_value, get_package_variants, @@ -86,11 +91,8 @@ def bldpkg_path(m): def actions_to_pins(actions): - if "LINK" in actions: - return [ - " ".join(spec.dist_name.split()[0].rsplit("-", 2)) - for spec in actions["LINK"] - ] + if LINK_ACTION in actions: + return [package_record_to_requirement(prec) for prec in actions[LINK_ACTION]] return [] @@ -340,37 +342,40 @@ def execute_download_actions(m, actions, env, package_subset=None, require_files # this should be just downloading packages. We don't need to extract them - - download_actions = { - k: v for k, v in actions.items() if k in ("FETCH", "EXTRACT", "PREFIX") - } - if "FETCH" in actions or "EXTRACT" in actions: - # this is to force the download - execute_actions(download_actions, index, verbose=m.config.debug) + # NOTE: The following commented execute_actions is defunct + # (FETCH/EXTRACT were replaced by PROGRESSIVEFETCHEXTRACT). + # + # download_actions = { + # k: v for k, v in actions.items() if k in (FETCH, EXTRACT, PREFIX) + # } + # if FETCH in actions or EXTRACT in actions: + # # this is to force the download + # execute_actions(download_actions, index, verbose=m.config.debug) pkg_files = {} - packages = actions.get("LINK", []) - package_subset = utils.ensure_list(package_subset) + precs = actions.get(LINK_ACTION, []) + if isinstance(package_subset, PackageRecord): + package_subset = [package_subset] + else: + package_subset = utils.ensure_list(package_subset) selected_packages = set() if package_subset: for pkg in package_subset: - if hasattr(pkg, "name"): - if pkg in packages: - selected_packages.add(pkg) + if isinstance(pkg, PackageRecord): + prec = pkg + if prec in precs: + selected_packages.add(prec) else: pkg_name = pkg.split()[0] - for link_pkg in packages: - if pkg_name == link_pkg.name: - selected_packages.add(link_pkg) + for link_prec in precs: + if pkg_name == link_prec.name: + selected_packages.add(link_prec) break - packages = selected_packages + precs = selected_packages - for pkg in packages: - if hasattr(pkg, "dist_name"): - pkg_dist = pkg.dist_name - else: - pkg = strip_channel(pkg) - pkg_dist = pkg.split(" ")[0] + for prec in precs: + pkg_dist = "-".join((prec.name, prec.version, prec.build)) pkg_loc = find_pkg_dir_or_file_in_pkgs_dirs( pkg_dist, m, files_only=require_files ) @@ -379,22 +384,21 @@ def execute_download_actions(m, actions, env, package_subset=None, require_files # TODO: this is a vile hack reaching into conda's internals. Replace with # proper conda API when available. if not pkg_loc: - try: - pkg_record = [_ for _ in index if _.dist_name == pkg_dist][0] - # the conda 4.4 API uses a single `link_prefs` kwarg - # whereas conda 4.3 used `index` and `link_dists` kwargs - pfe = ProgressiveFetchExtract(link_prefs=(index[pkg_record],)) - except TypeError: - # TypeError: __init__() got an unexpected keyword argument 'link_prefs' - pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index) + link_prec = [ + rec + for rec in index + if (rec.name, rec.version, rec.build) + == (prec.name, prec.version, prec.build) + ][0] + pfe = ProgressiveFetchExtract(link_prefs=(link_prec,)) with utils.LoggingContext(): pfe.execute() for pkg_dir in pkgs_dirs: - _loc = join(pkg_dir, index.get(pkg, pkg).fn) + _loc = join(pkg_dir, prec.fn) if isfile(_loc): pkg_loc = _loc break - pkg_files[pkg] = pkg_loc, pkg_dist + pkg_files[prec] = pkg_loc, pkg_dist return pkg_files @@ -404,27 +408,30 @@ def get_upstream_pins(m: MetaData, actions, env): downstream dependency specs. Return these additional specs.""" env_specs = m.get_value(f"requirements/{env}", []) explicit_specs = [req.split(" ")[0] for req in env_specs] if env_specs else [] - linked_packages = actions.get("LINK", []) - linked_packages = [pkg for pkg in linked_packages if pkg.name in explicit_specs] + linked_packages = actions.get(LINK_ACTION, []) + linked_packages = [prec for prec in linked_packages if prec.name in explicit_specs] ignore_pkgs_list = utils.ensure_list(m.get_value("build/ignore_run_exports_from")) ignore_list = utils.ensure_list(m.get_value("build/ignore_run_exports")) additional_specs = {} - for pkg in linked_packages: - if any(pkg.name in req.split(" ")[0] for req in ignore_pkgs_list): + for prec in linked_packages: + if any(prec.name in req.split(" ")[0] for req in ignore_pkgs_list): continue run_exports = None if m.config.use_channeldata: - channeldata = utils.download_channeldata(pkg.channel) + channeldata = utils.download_channeldata(prec.channel) # only use channeldata if requested, channeldata exists and contains # a packages key, otherwise use run_exports from the packages themselves if "packages" in channeldata: - pkg_data = channeldata["packages"].get(pkg.name, {}) - run_exports = pkg_data.get("run_exports", {}).get(pkg.version, {}) + pkg_data = channeldata["packages"].get(prec.name, {}) + run_exports = pkg_data.get("run_exports", {}).get(prec.version, {}) if run_exports is None: loc, dist = execute_download_actions( - m, actions, env=env, package_subset=pkg - )[pkg] + m, + actions, + env=env, + package_subset=[prec], + )[prec] run_exports = _read_specs_from_package(loc, dist) specs = _filter_run_exports(run_exports, ignore_list) if specs: diff --git a/conda_build/utils.py b/conda_build/utils.py index 1c303a8b64..bc1c20634c 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -60,6 +60,7 @@ from .conda_interface import ( CondaHTTPError, MatchSpec, + PackageRecord, StringIO, TemporaryDirectory, VersionOrder, @@ -75,6 +76,7 @@ win_path_to_unix, ) from .conda_interface import rm_rf as _rm_rf +from .deprecations import deprecated from .exceptions import BuildLockError on_win = sys.platform == "win32" @@ -1956,13 +1958,11 @@ def match_peer_job(target_matchspec, other_m, this_m=None): for any keys that are shared between target_variant and m.config.variant""" name, version, build = other_m.name(), other_m.version(), "" matchspec_matches = target_matchspec.match( - Dist( + PackageRecord( name=name, - dist_name=f"{name}-{version}-{build}", version=version, - build_string=build, + build=build, build_number=other_m.build_number(), - channel=None, ) ) @@ -2110,6 +2110,7 @@ def download_channeldata(channel_url): return data +@deprecated("24.1.0", "24.3.0") def linked_data_no_multichannels( prefix: str | os.PathLike | Path, ) -> dict[Dist, PrefixRecord]: @@ -2165,3 +2166,7 @@ def is_conda_pkg(pkg_path: str) -> bool: return path.is_file() and ( any(path.name.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) ) + + +def package_record_to_requirement(prec: PackageRecord) -> str: + return f"{prec.name} {prec.version} {prec.build}" diff --git a/tests/test_api_build.py b/tests/test_api_build.py index c9bc2cd31e..2e0f2b0224 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -33,10 +33,8 @@ from conda_build import __version__, api, exceptions from conda_build.conda_interface import ( - CONDA_VERSION, CondaError, LinkError, - VersionOrder, context, reset_context, url_path, @@ -1949,16 +1947,6 @@ def test_add_pip_as_python_dependency_from_condarc_file( Test whether settings from .condarc files are heeded. ref: https://github.com/conda/conda-libmamba-solver/issues/393 """ - if VersionOrder(CONDA_VERSION) <= VersionOrder("23.10.0"): - if not add_pip_as_python_dependency and context.solver == "libmamba": - pytest.xfail( - "conda.plan.install_actions from conda<=23.10.0 ignores .condarc files." - ) - from conda.base.context import context_stack - - # ContextStack's pop/replace methods don't call self.apply. - context_stack.apply() - # TODO: SubdirData._cache_ clearing might not be needed for future conda versions. # See https://github.com/conda/conda/pull/13365 for proposed changes. from conda.core.subdir_data import SubdirData From 0435053f61f8452b4d3e7709e04c68821a96dd49 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Tue, 30 Jan 2024 17:40:30 +0100 Subject: [PATCH 266/366] Fix entrypoint definition in pyproject.toml (#5158) * Fix entrypoint definition in pyproject.toml to match recipe/meta.yaml * Remove conda-index entrypoint --------- Signed-off-by: Marcel Bargull Co-authored-by: Ken Odegard --- pyproject.toml | 17 ++++++++--------- recipe/meta.yaml | 1 - 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5fc2f3eac5..72f657031e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,15 +55,14 @@ repository = "https://github.com/conda/conda-build" changelog = "https://github.com/conda/conda-build/blob/main/CHANGELOG.md" [project.scripts] -conda-build = "conda_build.cli.main_build:main" -conda-convert = "conda_build.cli.main_convert:main" -conda-develop = "conda_build.cli.main_develop:main" -conda-index = "conda_build.cli.main_index:main" -conda-inspect = "conda_build.cli.main_inspect:main" -conda-metapackage = "conda_build.cli.main_metapackage:main" -conda-render = "conda_build.cli.main_render:main" -conda-skeleton = "conda_build.cli.main_skeleton:main" -conda-debug = "conda_build.cli.main_debug:main" +conda-build = "conda_build.cli.main_build:execute" +conda-convert = "conda_build.cli.main_convert:execute" +conda-develop = "conda_build.cli.main_develop:execute" +conda-inspect = "conda_build.cli.main_inspect:execute" +conda-metapackage = "conda_build.cli.main_metapackage:execute" +conda-render = "conda_build.cli.main_render:execute" +conda-skeleton = "conda_build.cli.main_skeleton:execute" +conda-debug = "conda_build.cli.main_debug:execute" [project.entry-points."distutils.commands"] bdist_conda = "conda_build.bdist_conda:bdist_conda" diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 77a8034891..fb5088e15e 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -13,7 +13,6 @@ build: - conda-convert = conda_build.cli.main_convert:execute - conda-debug = conda_build.cli.main_debug:execute - conda-develop = conda_build.cli.main_develop:execute - - conda-index = conda_build.cli.main_index:main - conda-inspect = conda_build.cli.main_inspect:execute - conda-metapackage = conda_build.cli.main_metapackage:execute - conda-render = conda_build.cli.main_render:execute From 7dbf29e9df65b710e7ab6cebd8726d2d236a7cca Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 31 Jan 2024 23:23:57 +0100 Subject: [PATCH 267/366] Prepare deprecation of full index and action usage (#5152) Co-authored-by: Ken Odegard --- CHANGELOG.md | 35 ++++++++++++++++++ conda_build/build.py | 30 ++++++++-------- conda_build/environ.py | 80 ++++++++++++++++++++++++++++++++---------- conda_build/index.py | 79 +++++++++++++++++++++++++++++------------ conda_build/render.py | 24 ++++++++----- 5 files changed, 184 insertions(+), 64 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5777412269..9377577710 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,6 +26,41 @@ * Mark `conda_build.conda_interface.linked` as pending deprecation. (#5074) * Mark `conda_build.conda_interface.linked_data` as pending deprecation. (#5074) * Mark `conda_build.utils.linked_data_no_multichannels` as pending deprecation. (#5074) +* Mark `conda_build.environ.get_install_actions` as pending deprecation in favor of `conda_build.environ.get_package_records`. (#5152) +* Mark `conda_build.environ.create_env(specs_or_actions)` as pending deprecation in favor of `conda_build.environ.create_env(specs_or_precs)`. (#5152) +* Mark `conda_build.index.channel_data` as pending deprecation. (#5152) +* Mark `conda_build.index._determine_namespace` as pending deprecation. (#5152) +* Mark `conda_build.index._make_seconds` as pending deprecation. (#5152) +* Mark `conda_build.index.REPODATA_VERSION` as pending deprecation. (#5152) +* Mark `conda_build.index.CHANNELDATA_VERSION` as pending deprecation. (#5152) +* Mark `conda_build.index.REPODATA_JSON_FN` as pending deprecation. (#5152) +* Mark `conda_build.index.REPODATA_FROM_PKGS_JSON_FN` as pending deprecation. (#5152) +* Mark `conda_build.index.CHANNELDATA_FIELDS` as pending deprecation. (#5152) +* Mark `conda_build.index._clear_newline_chars` as pending deprecation. (#5152) +* Mark `conda_build.index._apply_instructions` as pending deprecation. (#5152) +* Mark `conda_build.index._get_jinja2_environment` as pending deprecation. (#5152) +* Mark `conda_build.index._maybe_write` as pending deprecation. (#5152) +* Mark `conda_build.index._maybe_build_string` as pending deprecation. (#5152) +* Mark `conda_build.index._warn_on_missing_dependencies` as pending deprecation. (#5152) +* Mark `conda_build.index._cache_post_install_details` as pending deprecation. (#5152) +* Mark `conda_build.index._cache_recipe` as pending deprecation. (#5152) +* Mark `conda_build.index._cache_run_exports` as pending deprecation. (#5152) +* Mark `conda_build.index._cache_icon` as pending deprecation. (#5152) +* Mark `conda_build.index._make_subdir_index_html` as pending deprecation. (#5152) +* Mark `conda_build.index._make_channeldata_index_html` as pending deprecation. (#5152) +* Mark `conda_build.index._get_source_repo_git_info` as pending deprecation. (#5152) +* Mark `conda_build.index._cache_info_file` as pending deprecation. (#5152) +* Mark `conda_build.index._alternate_file_extension` as pending deprecation. (#5152) +* Mark `conda_build.index._get_resolve_object` as pending deprecation. (#5152) +* Mark `conda_build.index._get_newest_versions` as pending deprecation. (#5152) +* Mark `conda_build.index._add_missing_deps` as pending deprecation. (#5152) +* Mark `conda_build.index._add_prev_ver_for_features` as pending deprecation. (#5152) +* Mark `conda_build.index._shard_newest_packages` as pending deprecation. (#5152) +* Mark `conda_build.index._build_current_repodata` as pending deprecation. (#5152) +* Mark `conda_build.index.ChannelIndex` as pending deprecation. (#5152) +* Mark `conda_build.render.actions_to_pins` as pending deprecation. (#5152) +* Mark `conda_build.render.execute_download_actions(actions)` as pending deprecation in favor of `conda_build.render.execute_download_actions(precs)`. (#5152) +* Mark `conda_build.render.get_upstream_pins(actions)` as pending deprecation in favor of `conda_build.render.get_upstream_pins(precs)`. (#5152) * Remove `conda_build.api.update_index`. (#5151) * Remove `conda_build.cli.main_build.main`. (#5151) * Remove `conda_build.cli.main_convert.main`. (#5151) diff --git a/conda_build/build.py b/conda_build/build.py index 45f64995f2..28ffc04a70 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -2308,7 +2308,7 @@ def create_build_envs(m: MetaData, notest): m.config._merge_build_host = m.build_is_host if m.is_cross and not m.build_is_host: - host_actions = environ.get_install_actions( + host_precs = environ.get_package_records( m.config.host_prefix, tuple(host_ms_deps), "host", @@ -2325,7 +2325,7 @@ def create_build_envs(m: MetaData, notest): ) environ.create_env( m.config.host_prefix, - host_actions, + host_precs, env="host", config=m.config, subdir=m.config.host_subdir, @@ -2334,7 +2334,7 @@ def create_build_envs(m: MetaData, notest): ) if m.build_is_host: build_ms_deps.extend(host_ms_deps) - build_actions = environ.get_install_actions( + build_precs = environ.get_package_records( m.config.build_prefix, tuple(build_ms_deps), "build", @@ -2360,7 +2360,7 @@ def create_build_envs(m: MetaData, notest): *utils.ensure_list(m.get_value("requirements/run", [])), ] # make sure test deps are available before taking time to create build env - environ.get_install_actions( + environ.get_package_records( m.config.test_prefix, tuple(test_run_ms_deps), "test", @@ -2397,7 +2397,7 @@ def create_build_envs(m: MetaData, notest): ): environ.create_env( m.config.build_prefix, - build_actions, + build_precs, env="build", config=m.config, subdir=m.config.build_subdir, @@ -2435,8 +2435,8 @@ def build( return default_return log = utils.get_logger(__name__) - host_actions = [] - build_actions = [] + host_precs = [] + build_precs = [] output_metas = [] with utils.path_prepended(m.config.build_prefix): @@ -2779,7 +2779,7 @@ def build( host_ms_deps = m.ms_depends("host") sub_build_ms_deps = m.ms_depends("build") if m.is_cross and not m.build_is_host: - host_actions = environ.get_install_actions( + host_precs = environ.get_package_records( m.config.host_prefix, tuple(host_ms_deps), "host", @@ -2796,7 +2796,7 @@ def build( ) environ.create_env( m.config.host_prefix, - host_actions, + host_precs, env="host", config=m.config, subdir=subdir, @@ -2806,7 +2806,7 @@ def build( else: # When not cross-compiling, the build deps aggregate 'build' and 'host'. sub_build_ms_deps.extend(host_ms_deps) - build_actions = environ.get_install_actions( + build_precs = environ.get_package_records( m.config.build_prefix, tuple(sub_build_ms_deps), "build", @@ -2823,7 +2823,7 @@ def build( ) environ.create_env( m.config.build_prefix, - build_actions, + build_precs, env="build", config=m.config, subdir=m.config.build_subdir, @@ -3481,7 +3481,7 @@ def test( utils.rm_rf(metadata.config.test_prefix) try: - actions = environ.get_install_actions( + precs = environ.get_package_records( metadata.config.test_prefix, tuple(specs), "host", @@ -3523,7 +3523,7 @@ def test( with env_var("CONDA_PATH_CONFLICT", conflict_verbosity, reset_context): environ.create_env( metadata.config.test_prefix, - actions, + precs, config=metadata.config, env="host", subdir=subdir, @@ -3819,7 +3819,7 @@ def build_tree( with TemporaryDirectory( prefix="_", suffix=r_string ) as tmpdir: - actions = environ.get_install_actions( + precs = environ.get_package_records( tmpdir, specs, env="run", @@ -3839,7 +3839,7 @@ def build_tree( # make sure to download that package to the local cache if not there local_file = execute_download_actions( meta, - actions, + precs, "host", package_subset=[dep], require_files=True, diff --git a/conda_build/environ.py b/conda_build/environ.py index c363588e3f..3026f1bf60 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -43,6 +43,7 @@ reset_context, root_dir, ) +from .deprecations import deprecated from .exceptions import BuildLockError, DependencyNeedsBuildingError from .features import feature_list from .index import get_build_index @@ -849,7 +850,7 @@ def package_specs(self): last_index_ts = 0 -def get_install_actions( +def get_package_records( prefix, specs, env, @@ -996,12 +997,49 @@ def get_install_actions( utils.trim_empty_keys(actions) cached_actions[(specs, env, subdir, channel_urls, disable_pip)] = actions.copy() last_index_ts = index_ts - return actions + return actions.get(LINK_ACTION, []) +@deprecated("24.1.0", "24.3.0", addendum="Use `get_package_records` instead.") +def get_install_actions( + prefix, + specs, + env, + retries=0, + subdir=None, + verbose=True, + debug=False, + locking=True, + bldpkgs_dirs=None, + timeout=900, + disable_pip=False, + max_env_retry=3, + output_folder=None, + channel_urls=None, +): + precs = get_package_records( + prefix=prefix, + specs=specs, + env=env, + retries=retries, + subdir=subdir, + verbose=verbose, + debug=debug, + locking=locking, + bldpkgs_dirs=bldpkgs_dirs, + timeout=timeout, + disable_pip=disable_pip, + max_env_retry=max_env_retry, + output_folder=output_folder, + channel_urls=channel_urls, + ) + return {PREFIX_ACTION: prefix, LINK_ACTION: precs} + + +@deprecated.argument("24.1.0", "24.3.0", "specs_or_actions", rename="specs_or_precs") def create_env( prefix, - specs_or_actions, + specs_or_precs, env, config, subdir, @@ -1029,17 +1067,20 @@ def create_env( # if os.path.isdir(prefix): # utils.rm_rf(prefix) - if specs_or_actions: # Don't waste time if there is nothing to do + if specs_or_precs: # Don't waste time if there is nothing to do log.debug("Creating environment in %s", prefix) - log.debug(str(specs_or_actions)) + log.debug(str(specs_or_precs)) if not locks: locks = utils.get_conda_operation_locks(config) try: with utils.try_acquire_locks(locks, timeout=config.timeout): - # input is a list - it's specs in MatchSpec format - if not hasattr(specs_or_actions, "keys"): - specs = list(set(specs_or_actions)) + # input is a list of specs in MatchSpec format + if not ( + hasattr(specs_or_precs, "keys") + or isinstance(specs_or_precs[0], PackageRecord) + ): + specs = list(set(specs_or_precs)) actions = get_install_actions( prefix, tuple(specs), @@ -1056,7 +1097,10 @@ def create_env( channel_urls=tuple(config.channel_urls), ) else: - actions = specs_or_actions + if not hasattr(specs_or_precs, "keys"): + actions = {LINK_ACTION: specs_or_precs} + else: + actions = specs_or_precs index, _, _ = get_build_index( subdir=subdir, bldpkgs_dir=config.bldpkgs_dir, @@ -1068,13 +1112,13 @@ def create_env( timeout=config.timeout, ) utils.trim_empty_keys(actions) - _display_actions(actions) + _display_actions(prefix, actions) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) with env_var("CONDA_QUIET", not config.verbose, reset_context): with env_var("CONDA_JSON", not config.verbose, reset_context): - _execute_actions(actions) + _execute_actions(prefix, actions) except ( SystemExit, PaddingError, @@ -1134,7 +1178,7 @@ def create_env( ) create_env( prefix, - specs_or_actions, + specs_or_precs, config=config, subdir=subdir, env=env, @@ -1165,7 +1209,7 @@ def create_env( ) create_env( prefix, - specs_or_actions, + specs_or_precs, config=config, subdir=subdir, env=env, @@ -1203,7 +1247,7 @@ def create_env( ) create_env( prefix, - specs_or_actions, + specs_or_precs, config=config, subdir=subdir, env=env, @@ -1312,12 +1356,11 @@ def install_actions(prefix, index, specs): del install_actions -def _execute_actions(actions): +def _execute_actions(prefix, actions): # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L575 # but reduced to only the functionality actually used within conda-build. - assert PREFIX_ACTION in actions and actions[PREFIX_ACTION] - prefix = actions[PREFIX_ACTION] + assert prefix if LINK_ACTION not in actions: log.debug(f"action {LINK_ACTION} not in actions") @@ -1346,11 +1389,10 @@ def _execute_actions(actions): unlink_link_transaction.execute() -def _display_actions(actions): +def _display_actions(prefix, actions): # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L58 # but reduced to only the functionality actually used within conda-build. - prefix = actions.get(PREFIX_ACTION) builder = ["", "## Package Plan ##\n"] if prefix: builder.append(" environment location: %s" % prefix) diff --git a/conda_build/index.py b/conda_build/index.py index aebc28fe21..229c5e1632 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -61,6 +61,7 @@ human_bytes, url_path, ) +from .deprecations import deprecated from .utils import ( CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, @@ -112,7 +113,8 @@ def map(self, func, *iterables): local_subdir = "" local_output_folder = "" cached_channels = [] -channel_data = {} +_channel_data = {} +deprecated.constant("24.1.0", "24.3.0", "channel_data", _channel_data) # TODO: support for libarchive seems to have broken ability to use multiple threads here. @@ -151,7 +153,7 @@ def get_build_index( global local_output_folder global cached_index global cached_channels - global channel_data + global _channel_data mtime = 0 channel_urls = list(utils.ensure_list(channel_urls)) @@ -248,7 +250,7 @@ def get_build_index( while retry < max_retries: try: with open(channeldata_file, "r+") as f: - channel_data[channel.name] = json.load(f) + _channel_data[channel.name] = json.load(f) break except (OSError, JSONDecodeError): time.sleep(0.2) @@ -257,24 +259,24 @@ def get_build_index( # download channeldata.json for url if not context.offline: try: - channel_data[channel.name] = utils.download_channeldata( + _channel_data[channel.name] = utils.download_channeldata( channel.base_url + "/channeldata.json" ) except CondaHTTPError: continue # collapse defaults metachannel back into one superchannel, merging channeldata - if channel.base_url in context.default_channels and channel_data.get( + if channel.base_url in context.default_channels and _channel_data.get( channel.name ): packages = superchannel.get("packages", {}) - packages.update(channel_data[channel.name]) + packages.update(_channel_data[channel.name]) superchannel["packages"] = packages - channel_data["defaults"] = superchannel + _channel_data["defaults"] = superchannel local_index_timestamp = os.path.getmtime(index_file) local_subdir = subdir local_output_folder = output_folder cached_channels = channel_urls - return cached_index, local_index_timestamp, channel_data + return cached_index, local_index_timestamp, _channel_data def _ensure_valid_channel(local_folder, subdir): @@ -328,6 +330,7 @@ def _delegated_update_index( # Everything below is deprecated to maintain API/feature compatibility. +@deprecated("24.1.0", "24.3.0") def _determine_namespace(info): if info.get("namespace"): namespace = info["namespace"] @@ -354,6 +357,7 @@ def _determine_namespace(info): return namespace, info.get("name_in_channel", info["name"]), info["name"] +@deprecated("24.1.0", "24.3.0") def _make_seconds(timestamp): timestamp = int(timestamp) if timestamp > 253402300799: # 9999-12-31 @@ -366,11 +370,11 @@ def _make_seconds(timestamp): # ========================================================================== -REPODATA_VERSION = 1 -CHANNELDATA_VERSION = 1 -REPODATA_JSON_FN = "repodata.json" -REPODATA_FROM_PKGS_JSON_FN = "repodata_from_packages.json" -CHANNELDATA_FIELDS = ( +_REPODATA_VERSION = 1 +_CHANNELDATA_VERSION = 1 +_REPODATA_JSON_FN = "repodata.json" +_REPODATA_FROM_PKGS_JSON_FN = "repodata_from_packages.json" +_CHANNELDATA_FIELDS = ( "description", "dev_url", "doc_url", @@ -401,8 +405,16 @@ def _make_seconds(timestamp): "recipe_origin", "commits", ) +deprecated.constant("24.1.0", "24.3.0", "REPODATA_VERSION", _REPODATA_VERSION) +deprecated.constant("24.1.0", "24.3.0", "CHANNELDATA_VERSION", _CHANNELDATA_VERSION) +deprecated.constant("24.1.0", "24.3.0", "REPODATA_JSON_FN", _REPODATA_JSON_FN) +deprecated.constant( + "24.1.0", "24.3.0", "REPODATA_FROM_PKGS_JSON_FN", _REPODATA_FROM_PKGS_JSON_FN +) +deprecated.constant("24.1.0", "24.3.0", "CHANNELDATA_FIELDS", _CHANNELDATA_FIELDS) +@deprecated("24.1.0", "24.3.0") def _clear_newline_chars(record, field_name): if field_name in record: try: @@ -412,6 +424,9 @@ def _clear_newline_chars(record, field_name): record[field_name] = record[field_name][0].strip().replace("\n", " ") +@deprecated( + "24.1.0", "24.5.0", addendum="Use `conda_index._apply_instructions` instead." +) def _apply_instructions(subdir, repodata, instructions): repodata.setdefault("removed", []) utils.merge_or_update_dict( @@ -460,6 +475,7 @@ def _apply_instructions(subdir, repodata, instructions): return repodata +@deprecated("24.1.0", "24.3.0") def _get_jinja2_environment(): def _filter_strftime(dt, dt_format): if isinstance(dt, Number): @@ -489,6 +505,7 @@ def _filter_add_href(text, link, **kwargs): return environment +@deprecated("24.1.0", "24.3.0") def _maybe_write(path, content, write_newline_end=False, content_is_binary=False): # Create the temp file next "path" so that we can use an atomic move, see # https://github.com/conda/conda-build/issues/3833 @@ -510,6 +527,7 @@ def _maybe_write(path, content, write_newline_end=False, content_is_binary=False return True +@deprecated("24.1.0", "24.3.0") def _make_build_string(build, build_number): build_number_as_string = str(build_number) if build.endswith(build_number_as_string): @@ -519,6 +537,7 @@ def _make_build_string(build, build_number): return build_string +@deprecated("24.1.0", "24.3.0") def _warn_on_missing_dependencies(missing_dependencies, patched_repodata): """ The following dependencies do not exist in the channel and are not declared @@ -553,6 +572,7 @@ def _warn_on_missing_dependencies(missing_dependencies, patched_repodata): log.warn("\n".join(builder)) +@deprecated("24.1.0", "24.3.0") def _cache_post_install_details(paths_cache_path, post_install_cache_path): post_install_details_json = { "binary_prefix": False, @@ -591,6 +611,7 @@ def _cache_post_install_details(paths_cache_path, post_install_cache_path): json.dump(post_install_details_json, fh) +@deprecated("24.1.0", "24.3.0") def _cache_recipe(tmpdir, recipe_cache_path): recipe_path_search_order = ( "info/recipe/meta.yaml.rendered", @@ -620,6 +641,7 @@ def _cache_recipe(tmpdir, recipe_cache_path): return recipe_json +@deprecated("24.1.0", "24.3.0") def _cache_run_exports(tmpdir, run_exports_cache_path): run_exports = {} try: @@ -635,6 +657,7 @@ def _cache_run_exports(tmpdir, run_exports_cache_path): json.dump(run_exports, fh) +@deprecated("24.1.0", "24.3.0") def _cache_icon(tmpdir, recipe_json, icon_cache_path): # If a conda package contains an icon, also extract and cache that in an .icon/ # directory. The icon file name is the name of the package, plus the extension @@ -651,6 +674,7 @@ def _cache_icon(tmpdir, recipe_json, icon_cache_path): utils.move_with_fallback(icon_path, icon_cache_path) +@deprecated("24.1.0", "24.3.0") def _make_subdir_index_html(channel_name, subdir, repodata_packages, extra_paths): environment = _get_jinja2_environment() template = environment.get_template("subdir-index.html.j2") @@ -663,6 +687,7 @@ def _make_subdir_index_html(channel_name, subdir, repodata_packages, extra_paths return rendered_html +@deprecated("24.1.0", "24.3.0") def _make_channeldata_index_html(channel_name, channeldata): environment = _get_jinja2_environment() template = environment.get_template("channeldata-index.html.j2") @@ -675,6 +700,7 @@ def _make_channeldata_index_html(channel_name, channeldata): return rendered_html +@deprecated("24.1.0", "24.3.0") def _get_source_repo_git_info(path): is_repo = subprocess.check_output( ["git", "rev-parse", "--is-inside-work-tree"], cwd=path @@ -697,12 +723,14 @@ def _get_source_repo_git_info(path): return commits +@deprecated("24.1.0", "24.3.0") def _cache_info_file(tmpdir, info_fn, cache_path): info_path = os.path.join(tmpdir, "info", info_fn) if os.path.lexists(info_path): utils.move_with_fallback(info_path, cache_path) +@deprecated("24.1.0", "24.3.0") def _alternate_file_extension(fn): cache_fn = fn for ext in CONDA_PACKAGE_EXTENSIONS: @@ -711,6 +739,7 @@ def _alternate_file_extension(fn): return cache_fn + next(iter(other_ext)) +@deprecated("24.1.0", "24.3.0") def _get_resolve_object(subdir, file_path=None, precs=None, repodata=None): packages = {} conda_packages = {} @@ -745,6 +774,7 @@ def _get_resolve_object(subdir, file_path=None, precs=None, repodata=None): return r +@deprecated("24.1.0", "24.3.0") def _get_newest_versions(r, pins={}): groups = {} for g_name, g_recs in r.groups.items(): @@ -760,6 +790,7 @@ def _get_newest_versions(r, pins={}): return [pkg for group in groups.values() for pkg in group] +@deprecated("24.1.0", "24.3.0") def _add_missing_deps(new_r, original_r): """For each package in new_r, if any deps are not satisfiable, backfill them from original_r.""" @@ -784,6 +815,7 @@ def _add_missing_deps(new_r, original_r): return [pkg for group in expanded_groups.values() for pkg in group] +@deprecated("24.1.0", "24.3.0") def _add_prev_ver_for_features(new_r, orig_r): expanded_groups = copy.deepcopy(new_r.groups) for g_name in new_r.groups: @@ -812,6 +844,7 @@ def _add_prev_ver_for_features(new_r, orig_r): return [pkg for group in expanded_groups.values() for pkg in group] +@deprecated("24.1.0", "24.3.0") def _shard_newest_packages(subdir, r, pins=None): """Captures only the newest versions of software in the resolve object. @@ -844,6 +877,7 @@ def _shard_newest_packages(subdir, r, pins=None): return set(_add_prev_ver_for_features(new_r, r)) +@deprecated("24.1.0", "24.3.0") def _build_current_repodata(subdir, repodata, pins): r = _get_resolve_object(subdir, repodata=repodata) keep_pkgs = _shard_newest_packages(subdir, r, pins) @@ -871,6 +905,7 @@ def _build_current_repodata(subdir, repodata, pins): return new_repodata +@deprecated("24.1.0", "24.3.0") class ChannelIndex: def __init__( self, @@ -951,7 +986,7 @@ def index( self._write_repodata( subdir, repodata_from_packages, - REPODATA_FROM_PKGS_JSON_FN, + _REPODATA_FROM_PKGS_JSON_FN, ) # Step 3. Apply patch instructions. @@ -968,7 +1003,7 @@ def index( t2.set_description("Writing patched repodata") t2.update() self._write_repodata( - subdir, patched_repodata, REPODATA_JSON_FN + subdir, patched_repodata, _REPODATA_JSON_FN ) t2.set_description("Building current_repodata subset") t2.update() @@ -1000,7 +1035,7 @@ def index( def index_subdir(self, subdir, index_file=None, verbose=False, progress=False): subdir_path = join(self.channel_root, subdir) self._ensure_dirs(subdir) - repodata_json_path = join(subdir_path, REPODATA_FROM_PKGS_JSON_FN) + repodata_json_path = join(subdir_path, _REPODATA_FROM_PKGS_JSON_FN) if verbose: log.info("Building repodata for %s" % subdir_path) @@ -1158,7 +1193,7 @@ def index_subdir(self, subdir, index_file=None, verbose=False, progress=False): "info": { "subdir": subdir, }, - "repodata_version": REPODATA_VERSION, + "repodata_version": _REPODATA_VERSION, "removed": sorted(list(ignore_set)), } finally: @@ -1465,11 +1500,11 @@ def _add_extra_path(extra_paths, path): } extra_paths = OrderedDict() - _add_extra_path(extra_paths, join(subdir_path, REPODATA_JSON_FN)) - _add_extra_path(extra_paths, join(subdir_path, REPODATA_JSON_FN + ".bz2")) - _add_extra_path(extra_paths, join(subdir_path, REPODATA_FROM_PKGS_JSON_FN)) + _add_extra_path(extra_paths, join(subdir_path, _REPODATA_JSON_FN)) + _add_extra_path(extra_paths, join(subdir_path, _REPODATA_JSON_FN + ".bz2")) + _add_extra_path(extra_paths, join(subdir_path, _REPODATA_FROM_PKGS_JSON_FN)) _add_extra_path( - extra_paths, join(subdir_path, REPODATA_FROM_PKGS_JSON_FN + ".bz2") + extra_paths, join(subdir_path, _REPODATA_FROM_PKGS_JSON_FN + ".bz2") ) # _add_extra_path(extra_paths, join(subdir_path, "repodata2.json")) _add_extra_path(extra_paths, join(subdir_path, "patch_instructions.json")) @@ -1603,7 +1638,7 @@ def _replace_if_newer_and_present(pd, data, erec, data_newer, k): channel_data.update( { - "channeldata_version": CHANNELDATA_VERSION, + "channeldata_version": _CHANNELDATA_VERSION, "subdirs": sorted( list(set(channel_data.get("subdirs", []) + [subdir])) ), diff --git a/conda_build/render.py b/conda_build/render.py index c75838a65b..a46130f4ed 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -35,6 +35,7 @@ pkgs_dirs, specs_from_url, ) +from .deprecations import deprecated from .environ import LINK_ACTION from .exceptions import DependencyNeedsBuildingError from .index import get_build_index @@ -90,6 +91,7 @@ def bldpkg_path(m): return path +@deprecated("24.1.0", "24.3.0") def actions_to_pins(actions): if LINK_ACTION in actions: return [package_record_to_requirement(prec) for prec in actions[LINK_ACTION]] @@ -182,7 +184,9 @@ def get_env_dependencies( else: raise - specs = actions_to_pins(actions) + specs = [ + package_record_to_requirement(prec) for prec in actions.get(LINK_ACTION, []) + ] return ( utils.ensure_list( (specs + subpackages + pass_through_deps) @@ -325,7 +329,8 @@ def _read_specs_from_package(pkg_loc, pkg_dist): return specs -def execute_download_actions(m, actions, env, package_subset=None, require_files=False): +@deprecated.argument("24.1.0", "24.3.0", "actions", rename="precs") +def execute_download_actions(m, precs, env, package_subset=None, require_files=False): subdir = getattr(m.config, f"{env}_subdir") index, _, _ = get_build_index( subdir=subdir, @@ -354,7 +359,8 @@ def execute_download_actions(m, actions, env, package_subset=None, require_files pkg_files = {} - precs = actions.get(LINK_ACTION, []) + if hasattr(precs, "keys"): + precs = precs.get(LINK_ACTION, []) if isinstance(package_subset, PackageRecord): package_subset = [package_subset] else: @@ -403,18 +409,20 @@ def execute_download_actions(m, actions, env, package_subset=None, require_files return pkg_files -def get_upstream_pins(m: MetaData, actions, env): +@deprecated.argument("24.1.0", "24.3.0", "actions", rename="precs") +def get_upstream_pins(m: MetaData, precs, env): """Download packages from specs, then inspect each downloaded package for additional downstream dependency specs. Return these additional specs.""" env_specs = m.get_value(f"requirements/{env}", []) explicit_specs = [req.split(" ")[0] for req in env_specs] if env_specs else [] - linked_packages = actions.get(LINK_ACTION, []) - linked_packages = [prec for prec in linked_packages if prec.name in explicit_specs] + if hasattr(precs, "keys"): + precs = precs.get(LINK_ACTION, []) + precs = [prec for prec in precs if prec.name in explicit_specs] ignore_pkgs_list = utils.ensure_list(m.get_value("build/ignore_run_exports_from")) ignore_list = utils.ensure_list(m.get_value("build/ignore_run_exports")) additional_specs = {} - for prec in linked_packages: + for prec in precs: if any(prec.name in req.split(" ")[0] for req in ignore_pkgs_list): continue run_exports = None @@ -428,7 +436,7 @@ def get_upstream_pins(m: MetaData, actions, env): if run_exports is None: loc, dist = execute_download_actions( m, - actions, + precs, env=env, package_subset=[prec], )[prec] From 15971b7f1b6e624f5d5b41368e353183aa086340 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 5 Feb 2024 10:39:54 -0600 Subject: [PATCH 268/366] [pre-commit.ci] pre-commit autoupdate (#5168) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.14 → v0.2.0](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.14...v0.2.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 919c73cdea..7b8b3c1e72 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,7 +54,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.14 + rev: v0.2.0 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff From e32ff65a8f5ed0be2b62b23e7201d594a6520702 Mon Sep 17 00:00:00 2001 From: Finn Womack Date: Tue, 6 Feb 2024 08:43:45 -0800 Subject: [PATCH 269/366] Make msvc_env_cmd no-op for arm64 (#4867) Co-authored-by: Isuru Fernando --- conda_build/windows.py | 3 +++ news/4867-arm64-msvc-env-cmd-no-op | 19 +++++++++++++++++++ 2 files changed, 22 insertions(+) create mode 100644 news/4867-arm64-msvc-env-cmd-no-op diff --git a/conda_build/windows.py b/conda_build/windows.py index ba53abf80a..706b499265 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -110,6 +110,9 @@ def msvc_env_cmd(bits, config, override=None): "If this recipe does not use a compiler, this message is safe to ignore. " "Otherwise, use {{compiler('')}} jinja2 in requirements/build." ) + if bits not in ["64", "32"]: + log.warn(f"The legacy MSVC compiler setup does not support {bits} builds. ") + return "" if override: log.warn( "msvc_compiler key in meta.yaml is deprecated. Use the new" diff --git a/news/4867-arm64-msvc-env-cmd-no-op b/news/4867-arm64-msvc-env-cmd-no-op new file mode 100644 index 0000000000..134dcd14fd --- /dev/null +++ b/news/4867-arm64-msvc-env-cmd-no-op @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Added a check to print an additional warning and return an empty string when bits is "arm64" in msvc_env_cmd. (#4867) From d65bf503ed4864179e9aecea2cd777bedce29028 Mon Sep 17 00:00:00 2001 From: Travis Hathaway Date: Tue, 6 Feb 2024 13:02:33 -1000 Subject: [PATCH 270/366] bump conda-sphinx-theme version (#5170) --- docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 81d30818d9..37666a374b 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,4 +1,4 @@ -conda-sphinx-theme==0.2.0 +conda-sphinx-theme==0.2.1 linkify-it-py==2.0.2 myst-parser==2.0.0 Pillow==10.0.1 From 8055779d76ef0dc4ead06d95feb04622b80ac850 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Wed, 7 Feb 2024 03:37:02 +0100 Subject: [PATCH 271/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#5165)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/labels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index ebfafa82a2..15c69d8793 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -22,7 +22,7 @@ jobs: steps: - uses: actions/checkout@v4 - id: has_local - uses: andstor/file-existence-action@v2.0.0 + uses: andstor/file-existence-action@v3.0.0 with: files: ${{ env.LOCAL }} - name: Global Only From 7e166acdaeaccb860b46518568288b84a0bbe452 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 7 Feb 2024 19:17:28 +0100 Subject: [PATCH 272/366] Fix nonzero exit for conda-build entrypoint (#5169) Add type hints & fix return type for execute functions. Updates tests that previously depended on `conda_build.cli.main_build.execute`'s return value. --- conda_build/cli/main_build.py | 28 ++++--- conda_build/cli/main_convert.py | 4 +- conda_build/cli/main_debug.py | 4 +- conda_build/cli/main_develop.py | 4 +- conda_build/cli/main_inspect.py | 4 +- conda_build/cli/main_metapackage.py | 4 +- conda_build/cli/main_render.py | 43 +++++------ conda_build/cli/main_skeleton.py | 4 +- conda_build/plugin.py | 36 +++++---- conda_build/utils.py | 2 +- news/5169-fix-nonzero-exitcode | 19 +++++ tests/cli/test_main_build.py | 111 +++++++++++++++++++--------- 12 files changed, 172 insertions(+), 91 deletions(-) create mode 100644 news/5169-fix-nonzero-exitcode diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index f84024df48..51c906ebc8 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -522,7 +522,7 @@ def check_action(recipe, config): return api.check(recipe, config=config) -def execute(args: Sequence[str] | None = None): +def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) config = get_or_merge_config(None, **parsed.__dict__) build.check_external() @@ -535,21 +535,22 @@ def execute(args: Sequence[str] | None = None): if "purge" in parsed.recipe: build.clean_build(config) - return + return 0 if "purge-all" in parsed.recipe: build.clean_build(config) config.clean_pkgs() - return + return 0 - outputs = None if parsed.output: config.verbose = False config.quiet = True config.debug = False - outputs = [output_action(recipe, config) for recipe in parsed.recipe] - elif parsed.test: - outputs = [] + for recipe in parsed.recipe: + output_action(recipe, config) + return 0 + + if parsed.test: failed_recipes = [] recipes = chain.from_iterable( glob(abspath(recipe), recursive=True) if "*" in recipe else [recipe] @@ -571,11 +572,13 @@ def execute(args: Sequence[str] | None = None): else: print("All tests passed") elif parsed.source: - outputs = [source_action(recipe, config) for recipe in parsed.recipe] + for recipe in parsed.recipe: + source_action(recipe, config) elif parsed.check: - outputs = [check_action(recipe, config) for recipe in parsed.recipe] + for recipe in parsed.recipe: + check_action(recipe, config) else: - outputs = api.build( + api.build( parsed.recipe, post=parsed.post, test_run_post=parsed.test_run_post, @@ -588,6 +591,7 @@ def execute(args: Sequence[str] | None = None): cache_dir=parsed.cache_dir, ) - if not parsed.output and len(utils.get_build_folders(config.croot)) > 0: + if utils.get_build_folders(config.croot): build.print_build_intermediate_warning(config) - return outputs + + return 0 diff --git a/conda_build/cli/main_convert.py b/conda_build/cli/main_convert.py index 14b5b4f9b0..7f50883172 100644 --- a/conda_build/cli/main_convert.py +++ b/conda_build/cli/main_convert.py @@ -120,7 +120,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: return parser, parser.parse_args(args) -def execute(args: Sequence[str] | None = None): +def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) files = parsed.files del parsed.__dict__["files"] @@ -128,3 +128,5 @@ def execute(args: Sequence[str] | None = None): for f in files: f = abspath(expanduser(f)) api.convert(f, **parsed.__dict__) + + return 0 diff --git a/conda_build/cli/main_debug.py b/conda_build/cli/main_debug.py index b01b8b5e03..0fcdd8ab5c 100644 --- a/conda_build/cli/main_debug.py +++ b/conda_build/cli/main_debug.py @@ -88,7 +88,7 @@ def get_parser() -> ArgumentParser: return p -def execute(args: Sequence[str] | None = None): +def execute(args: Sequence[str] | None = None) -> int: parser = get_parser() parsed = parser.parse_args(args) @@ -118,3 +118,5 @@ def execute(args: Sequence[str] | None = None): f"Error: conda-debug encountered the following error:\n{e}", file=sys.stderr ) sys.exit(1) + + return 0 diff --git a/conda_build/cli/main_develop.py b/conda_build/cli/main_develop.py index 2c81a4edc1..694dde32b8 100644 --- a/conda_build/cli/main_develop.py +++ b/conda_build/cli/main_develop.py @@ -76,7 +76,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: return parser, parser.parse_args(args) -def execute(args: Sequence[str] | None = None): +def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) prefix = determine_target_prefix(context, parsed) api.develop( @@ -87,3 +87,5 @@ def execute(args: Sequence[str] | None = None): clean=parsed.clean, uninstall=parsed.uninstall, ) + + return 0 diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index 58cba771dc..e71ee03137 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -184,7 +184,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: return parser, parser.parse_args(args) -def execute(args: Sequence[str] | None = None): +def execute(args: Sequence[str] | None = None) -> int: parser, parsed = parse_args(args) if not parsed.subcommand: @@ -221,3 +221,5 @@ def execute(args: Sequence[str] | None = None): pprint(api.inspect_hash_inputs(parsed.packages)) else: parser.error(f"Unrecognized subcommand: {parsed.subcommand}.") + + return 0 diff --git a/conda_build/cli/main_metapackage.py b/conda_build/cli/main_metapackage.py index b4c610aea8..1af55abfbb 100644 --- a/conda_build/cli/main_metapackage.py +++ b/conda_build/cli/main_metapackage.py @@ -108,7 +108,9 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: return parser, parser.parse_args(args) -def execute(args: Sequence[str] | None = None): +def execute(args: Sequence[str] | None = None) -> int: _, args = parse_args(args) channel_urls = args.__dict__.get("channel") or args.__dict__.get("channels") or () api.create_metapackage(channel_urls=channel_urls, **args.__dict__) + + return 0 diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 155c0e7739..5710bace1c 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -14,6 +14,7 @@ from .. import __version__, api from ..conda_interface import ArgumentParser, add_parser_channels, cc_conda_build from ..config import get_channel_urls, get_or_merge_config +from ..deprecations import deprecated from ..utils import LoggingContext from ..variants import get_package_variants, set_language_env_vars @@ -189,7 +190,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: return parser, parser.parse_args(args) -def execute(args: Sequence[str] | None = None, print_results: bool = True): +@deprecated.argument("24.1.1", "24.3.0", "print_results") +def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) config = get_or_merge_config(None, **parsed.__dict__) @@ -221,24 +223,23 @@ def execute(args: Sequence[str] | None = None, print_results: bool = True): f"Only one will be written to the file you specified ({parsed.file})." ) - if print_results: - if parsed.output: - with LoggingContext(logging.CRITICAL + 1): - paths = api.get_output_file_paths(metadata_tuples, config=config) - print("\n".join(sorted(paths))) - if parsed.file: - m = metadata_tuples[-1][0] - api.output_yaml(m, parsed.file, suppress_outputs=True) - else: - logging.basicConfig(level=logging.INFO) - for m, _, _ in metadata_tuples: - print("--------------") - print("Hash contents:") - print("--------------") - pprint(m.get_hash_contents()) - print("----------") - print("meta.yaml:") - print("----------") - print(api.output_yaml(m, parsed.file, suppress_outputs=True)) + if parsed.output: + with LoggingContext(logging.CRITICAL + 1): + paths = api.get_output_file_paths(metadata_tuples, config=config) + print("\n".join(sorted(paths))) + if parsed.file: + m = metadata_tuples[-1][0] + api.output_yaml(m, parsed.file, suppress_outputs=True) else: - return metadata_tuples + logging.basicConfig(level=logging.INFO) + for m, _, _ in metadata_tuples: + print("--------------") + print("Hash contents:") + print("--------------") + pprint(m.get_hash_contents()) + print("----------") + print("meta.yaml:") + print("----------") + print(api.output_yaml(m, parsed.file, suppress_outputs=True)) + + return 0 diff --git a/conda_build/cli/main_skeleton.py b/conda_build/cli/main_skeleton.py index ade4b14d0e..d314304e62 100644 --- a/conda_build/cli/main_skeleton.py +++ b/conda_build/cli/main_skeleton.py @@ -46,7 +46,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: return parser, parser.parse_args(args) -def execute(args: Sequence[str] | None = None): +def execute(args: Sequence[str] | None = None) -> int: parser, parsed = parse_args(args) config = Config(**parsed.__dict__) @@ -62,3 +62,5 @@ def execute(args: Sequence[str] | None = None): version=parsed.version, config=config, ) + + return 0 diff --git a/conda_build/plugin.py b/conda_build/plugin.py index 16ac40bbb1..e86aa2a7af 100644 --- a/conda_build/plugin.py +++ b/conda_build/plugin.py @@ -1,55 +1,59 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +from typing import Sequence + import conda.plugins # lazy-import to avoid nasty import-time side effects when not using conda-build -def build(*args, **kwargs): +def build(args: Sequence[str]) -> int: from .cli.main_build import execute - execute(*args, **kwargs) + return execute(args) -def convert(*args, **kwargs): +def convert(args: Sequence[str]) -> int: from .cli.main_convert import execute - execute(*args, **kwargs) + return execute(args) -def debug(*args, **kwargs): +def debug(args: Sequence[str]) -> int: from .cli.main_debug import execute - execute(*args, **kwargs) + return execute(args) -def develop(*args, **kwargs): +def develop(args: Sequence[str]) -> int: from .cli.main_develop import execute - execute(*args, **kwargs) + return execute(args) -def inspect(*args, **kwargs): +def inspect(args: Sequence[str]) -> int: from .cli.main_inspect import execute - execute(*args, **kwargs) + return execute(args) -def metapackage(*args, **kwargs): +def metapackage(args: Sequence[str]) -> int: from .cli.main_metapackage import execute - execute(*args, **kwargs) + return execute(args) -def render(*args, **kwargs): +def render(args: Sequence[str]) -> int: from .cli.main_render import execute - execute(*args, **kwargs) + return execute(args) -def skeleton(*args, **kwargs): +def skeleton(args: Sequence[str]) -> int: from .cli.main_skeleton import execute - execute(*args, **kwargs) + return execute(args) @conda.plugins.hookimpl diff --git a/conda_build/utils.py b/conda_build/utils.py index bc1c20634c..c1a529082f 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -990,7 +990,7 @@ def get_site_packages(prefix, py_ver): return os.path.join(get_stdlib_dir(prefix, py_ver), "site-packages") -def get_build_folders(croot): +def get_build_folders(croot: str | os.PathLike | Path) -> list[str]: # remember, glob is not a regex. return glob(os.path.join(croot, "*" + "[0-9]" * 10 + "*"), recursive=True) diff --git a/news/5169-fix-nonzero-exitcode b/news/5169-fix-nonzero-exitcode new file mode 100644 index 0000000000..e66efe71a1 --- /dev/null +++ b/news/5169-fix-nonzero-exitcode @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Fix nonzero exitcode on success. (#5167 via #5169) + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py index 90d1c4a629..04c2cda7ba 100644 --- a/tests/cli/test_main_build.py +++ b/tests/cli/test_main_build.py @@ -130,15 +130,23 @@ def test_build_output_build_path_multiple_recipes( assert output.rstrip().splitlines() == test_paths, error -def test_slash_in_recipe_arg_keeps_build_id(testing_workdir, testing_config): +def test_slash_in_recipe_arg_keeps_build_id( + testing_workdir: str, testing_config: Config +): args = [ os.path.join(metadata_dir, "has_prefix_files"), "--croot", testing_config.croot, "--no-anaconda-upload", ] - outputs = main_build.execute(args) - data = package_has_file(outputs[0], "binary-has-prefix", refresh_mode="forced") + main_build.execute(args) + + output = os.path.join( + testing_config.croot, + testing_config.host_subdir, + "conda-build-test-has-prefix-files-1.0-0.tar.bz2", + ) + data = package_has_file(output, "binary-has-prefix", refresh_mode="forced") assert data if hasattr(data, "decode"): data = data.decode("UTF-8") @@ -157,7 +165,7 @@ def test_build_long_test_prefix_default_enabled(mocker, testing_workdir): main_build.execute(args) -def test_build_no_build_id(testing_workdir, testing_config): +def test_build_no_build_id(testing_workdir: str, testing_config: Config): args = [ os.path.join(metadata_dir, "has_prefix_files"), "--no-build-id", @@ -166,8 +174,14 @@ def test_build_no_build_id(testing_workdir, testing_config): "--no-activate", "--no-anaconda-upload", ] - outputs = main_build.execute(args) - data = package_has_file(outputs[0], "binary-has-prefix", refresh_mode="forced") + main_build.execute(args) + + output = os.path.join( + testing_config.croot, + testing_config.host_subdir, + "conda-build-test-has-prefix-files-1.0-0.tar.bz2", + ) + data = package_has_file(output, "binary-has-prefix", refresh_mode="forced") assert data if hasattr(data, "decode"): data = data.decode("UTF-8") @@ -191,7 +205,7 @@ def test_build_multiple_recipes(testing_metadata, testing_workdir, testing_confi main_build.execute(args) -def test_build_output_folder(testing_workdir: str, testing_metadata): +def test_build_output_folder(testing_workdir: str, testing_metadata: MetaData): api.output_yaml(testing_metadata, "meta.yaml") out = Path(testing_workdir, "out") @@ -207,9 +221,10 @@ def test_build_output_folder(testing_workdir: str, testing_metadata): "--output-folder", str(out), ] - output = main_build.execute(args)[0] + main_build.execute(args) + assert ( - out / testing_metadata.config.host_subdir / os.path.basename(output) + out / testing_metadata.config.host_subdir / testing_metadata.pkg_fn() ).is_file() @@ -375,38 +390,53 @@ def test_activate_scripts_not_included(testing_workdir): assert not package_has_file(out, f) -def test_relative_path_croot(conda_build_test_recipe_envvar: str): +def test_relative_path_croot( + conda_build_test_recipe_envvar: str, testing_config: Config +): # this tries to build a package while specifying the croot with a relative path: # conda-build --no-test --croot ./relative/path + empty_sections = Path(metadata_dir, "empty_with_build_script") + croot = Path(".", "relative", "path") - empty_sections = os.path.join(metadata_dir, "empty_with_build_script") - croot_rel = os.path.join(".", "relative", "path") - args = ["--no-anaconda-upload", "--croot", croot_rel, empty_sections] - outputfile = main_build.execute(args) + args = ["--no-anaconda-upload", f"--croot={croot}", str(empty_sections)] + main_build.execute(args) - assert len(outputfile) == 1 - assert os.path.isfile(outputfile[0]) + assert len(list(croot.glob("**/*.tar.bz2"))) == 1 + assert ( + croot / testing_config.subdir / "empty_with_build_script-0.0-0.tar.bz2" + ).is_file() -def test_relative_path_test_artifact(conda_build_test_recipe_envvar: str): +def test_relative_path_test_artifact( + conda_build_test_recipe_envvar: str, testing_config: Config +): # this test builds a package into (cwd)/relative/path and then calls: # conda-build --test ./relative/path/{platform}/{artifact}.tar.bz2 - - empty_sections = os.path.join(metadata_dir, "empty_with_build_script") - croot_rel = os.path.join(".", "relative", "path") - croot_abs = os.path.abspath(os.path.normpath(croot_rel)) + empty_sections = Path(metadata_dir, "empty_with_build_script") + croot_rel = Path(".", "relative", "path") + croot_abs = croot_rel.resolve() # build the package - args = ["--no-anaconda-upload", "--no-test", "--croot", croot_abs, empty_sections] - output_file_abs = main_build.execute(args) - assert len(output_file_abs) == 1 + args = [ + "--no-anaconda-upload", + "--no-test", + f"--croot={croot_abs}", + str(empty_sections), + ] + main_build.execute(args) - output_file_rel = os.path.join( - croot_rel, os.path.relpath(output_file_abs[0], croot_abs) - ) + assert len(list(croot_abs.glob("**/*.tar.bz2"))) == 1 # run the test stage with relative path - args = ["--no-anaconda-upload", "--test", output_file_rel] + args = [ + "--no-anaconda-upload", + "--test", + os.path.join( + croot_rel, + testing_config.subdir, + "empty_with_build_script-0.0-0.tar.bz2", + ), + ] main_build.execute(args) @@ -414,17 +444,28 @@ def test_relative_path_test_recipe(conda_build_test_recipe_envvar: str): # this test builds a package into (cwd)/relative/path and then calls: # conda-build --test --croot ./relative/path/ /abs/path/to/recipe - empty_sections = os.path.join(metadata_dir, "empty_with_build_script") - croot_rel = os.path.join(".", "relative", "path") - croot_abs = os.path.abspath(os.path.normpath(croot_rel)) + empty_sections = Path(metadata_dir, "empty_with_build_script") + croot_rel = Path(".", "relative", "path") + croot_abs = croot_rel.resolve() # build the package - args = ["--no-anaconda-upload", "--no-test", "--croot", croot_abs, empty_sections] - output_file_abs = main_build.execute(args) - assert len(output_file_abs) == 1 + args = [ + "--no-anaconda-upload", + "--no-test", + f"--croot={croot_abs}", + str(empty_sections), + ] + main_build.execute(args) + + assert len(list(croot_abs.glob("**/*.tar.bz2"))) == 1 # run the test stage with relative croot - args = ["--no-anaconda-upload", "--test", "--croot", croot_rel, empty_sections] + args = [ + "--no-anaconda-upload", + "--test", + f"--croot={croot_rel}", + str(empty_sections), + ] main_build.execute(args) From f278f71329590f42618c1e7355713526da54c868 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 8 Feb 2024 16:37:02 +0100 Subject: [PATCH 273/366] Release 24.1.1 (#5172) --- .authors.yml | 6 +++--- CHANGELOG.md | 12 ++++++++++++ news/5169-fix-nonzero-exitcode | 19 ------------------- 3 files changed, 15 insertions(+), 22 deletions(-) delete mode 100644 news/5169-fix-nonzero-exitcode diff --git a/.authors.yml b/.authors.yml index 28f364059e..6b71ac9dc7 100644 --- a/.authors.yml +++ b/.authors.yml @@ -612,7 +612,7 @@ first_commit: 2015-08-30 06:44:37 - name: Marcel Bargull email: marcel.bargull@udo.edu - num_commits: 77 + num_commits: 80 first_commit: 2016-09-26 11:45:54 github: mbargull alternate_emails: @@ -1202,7 +1202,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 164 + num_commits: 167 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1225,7 +1225,7 @@ first_commit: 2020-11-19 10:46:41 - name: Jannis Leidel email: jannis@leidel.info - num_commits: 33 + num_commits: 32 github: jezdez first_commit: 2020-11-19 10:46:41 - name: Christof Kaufmann diff --git a/CHANGELOG.md b/CHANGELOG.md index 9377577710..dfe35b2345 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,17 @@ [//]: # (current developments) +## 24.1.1 (2024-02-07) + +### Bug fixes + +* Fix nonzero exitcode on success. (#5167 via #5169) + +### Contributors + +* @kenodegard + + + ## 24.1.0 (2024-01-25) ### Enhancements diff --git a/news/5169-fix-nonzero-exitcode b/news/5169-fix-nonzero-exitcode deleted file mode 100644 index e66efe71a1..0000000000 --- a/news/5169-fix-nonzero-exitcode +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Fix nonzero exitcode on success. (#5167 via #5169) - -### Deprecations - -* - -### Docs - -* - -### Other - -* From fc4ef10beaacb3e9fadc4fb85ef53fcd3f2ca7c5 Mon Sep 17 00:00:00 2001 From: Min RK Date: Fri, 9 Feb 2024 21:33:03 +0100 Subject: [PATCH 274/366] avoid encoding error in ensure_binary (#5175) log repr of path, because it's not a safe assumption that it's a valid str --- conda_build/os_utils/liefldd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index c44a03f864..421990cc85 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -46,7 +46,7 @@ def ensure_binary( try: return lief.parse(str(file)) except BaseException: - print(f"WARNING: liefldd: failed to ensure_binary({file})") + print(f"WARNING: liefldd: failed to ensure_binary({file!r})") return None From 3a4e5766f4ab43a0e16a5e181b2543487ddb7255 Mon Sep 17 00:00:00 2001 From: Daniel Holth Date: Fri, 9 Feb 2024 17:02:05 -0500 Subject: [PATCH 275/366] Document load file regex (#5173) * update load_file_regex documentation * improve load_file_regex documentation * Punctuation and verb tense consistency * Apply suggestions from code review Co-authored-by: Katherine Kinnaman --------- Co-authored-by: Katherine Kinnaman --- docs/source/resources/define-metadata.rst | 27 ++++++++++++++--------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index 63cbb3eb86..e0e5bfff2d 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -1756,11 +1756,11 @@ retrieve a fully rendered ``meta.yaml``, use the Loading data from other files ----------------------------- -There are several additional functions available to Jinja2 which can be used +There are several additional functions available to Jinja2, which can be used to load data from other files. These are ``load_setup_py_data``, ``load_file_regex``, ``load_file_data``, and ``load_str_data``. -* ``load_setup_py_data``: Loads data from a ``setup.py`` file. This can be useful to +* ``load_setup_py_data``: Load data from a ``setup.py`` file. This can be useful to obtain metadata such as the version from a project's ``setup.py`` file. For example:: {% set data = load_setup_py_data() %} @@ -1769,16 +1769,23 @@ to load data from other files. These are ``load_setup_py_data``, ``load_file_reg name: foo version: {{ version }} -* ``load_file_regex``: Searches a file for a regular expression and returns the - first match as a Python ``re.Match object``. For example:: +* ``load_file_regex``: Search a file for a regular expression returning the + first match as a Python `re.Match + `_ object. + + For example, using ``load_file_regex(load_file, regex_pattern, from_recipe_dir=False) -> re.Match | None``:: + + {% set version_match = load_file_regex( + load_file="conda_package_streaming/__init__.py", + regex_pattern='^__version__ = "(.+)"') %} + {% set version = version_match[1] %} - {% set readme_heading = load_file_regex(load_file='README.rst', regex_pattern=r'^# (\S+)') %} package: - name: {{ readme_heading.string }} + version: {{ version }} -* ``load_file_data``: You can also parse JSON, TOML, or YAML files and load data - from them. For example you can use this to load poetry configurations from - ``pyproject.toml``. This is especially useful as ``setup.py`` is no longer the +* ``load_file_data``: Parse JSON, TOML, or YAML files and load data + from them. For example, you can use this to load poetry configurations from + ``pyproject.toml``. This is especially useful, as ``setup.py`` is no longer the only standard way to define project metadata (see `PEP 517 `_ and `PEP 518 `_):: @@ -1789,7 +1796,7 @@ to load data from other files. These are ``load_setup_py_data``, ``load_file_reg name: {{ poetry.get('name') }} version: {{ poetry.get('version') }} -* ``load_str_data``: Loads and parses data from a string. This is similar to +* ``load_str_data``: Load and parse data from a string. This is similar to ``load_file_data``, but it takes a string instead of a file as an argument. This may seem pointless at first, but you can use this to pass more complex data structures by environment variables. For example:: From 343f71585833d19e7bbe835116f56ef79cac84e5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 12 Feb 2024 10:50:40 -0600 Subject: [PATCH 276/366] [pre-commit.ci] pre-commit autoupdate (#5180) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.2.0 → v0.2.1](https://github.com/astral-sh/ruff-pre-commit/compare/v0.2.0...v0.2.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7b8b3c1e72..7f55024665 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,7 +54,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.2.0 + rev: v0.2.1 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff From ec50bdfc5c2f398ce6319a4dbfc06a8214a19190 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Mon, 12 Feb 2024 11:09:18 -0600 Subject: [PATCH 277/366] Define `CODEOWNERS` (#5182) --- .github/CODEOWNERS | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000000..eb9895e16e --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,3 @@ +# Syntax for this file at https://help.github.com/articles/about-codeowners/ + +* @conda/builds-tools From 46dfcf8dac6b456de05144625af796666fba21de Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Mon, 12 Feb 2024 15:15:08 -0600 Subject: [PATCH 278/366] Add `TCH`/`T10`/`FA` lints and sync `conda.deprecations` (#5178) --- .pre-commit-config.yaml | 2 +- conda_build/cli/main_build.py | 16 +++--- conda_build/cli/main_convert.py | 7 ++- conda_build/cli/main_debug.py | 7 ++- conda_build/cli/main_develop.py | 7 ++- conda_build/cli/main_inspect.py | 7 ++- conda_build/cli/main_metapackage.py | 7 ++- conda_build/cli/main_render.py | 7 ++- conda_build/cli/main_skeleton.py | 7 ++- conda_build/config.py | 5 +- conda_build/create_test.py | 5 +- conda_build/deprecations.py | 88 +++++++++++++++++++++++------ conda_build/inspect_pkg.py | 5 +- conda_build/jinja_context.py | 10 +++- conda_build/metadata.py | 5 +- conda_build/os_utils/ldd.py | 11 ++-- conda_build/plugin.py | 5 +- conda_build/post.py | 8 ++- conda_build/skeletons/cran.py | 6 +- conda_build/source.py | 5 +- conda_build/utils.py | 6 +- pyproject.toml | 13 +++-- tests/cli/test_main_build.py | 10 +++- tests/cli/test_validators.py | 5 +- tests/os_utils/test_codefile.py | 5 +- tests/test_api_build.py | 10 +++- tests/test_api_skeleton.py | 6 +- tests/test_create_test.py | 5 +- tests/test_deprecations.py | 5 +- tests/test_jinja_context.py | 7 ++- tests/test_metadata.py | 5 +- tests/test_render.py | 8 ++- tests/utils.py | 5 +- 33 files changed, 230 insertions(+), 80 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7f55024665..2cfff523aa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,6 +5,7 @@ ci: # ignore patches/diffs since slight reformatting can break them exclude: | (?x)^( + conda_build/version.py | tests/( archives | index_data | @@ -47,7 +48,6 @@ repos: - id: insert-license files: \.py$ args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] - exclude: ^conda_build/version.py - repo: https://github.com/asottile/blacken-docs rev: 1.16.0 hooks: diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index 51c906ebc8..de698df22c 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -6,23 +6,17 @@ import logging import sys import warnings -from argparse import Namespace from glob import glob from itertools import chain from os.path import abspath, expanduser, expandvars from pathlib import Path -from typing import Sequence +from typing import TYPE_CHECKING from conda.auxlib.ish import dals from conda.common.io import dashlist from .. import api, build, source, utils -from ..conda_interface import ( - ArgumentParser, - add_parser_channels, - binstar_upload, - cc_conda_build, -) +from ..conda_interface import add_parser_channels, binstar_upload, cc_conda_build from ..config import ( get_channel_urls, get_or_merge_config, @@ -32,6 +26,12 @@ from .actions import KeyValueAction from .main_render import get_render_parser +if TYPE_CHECKING: + from argparse import Namespace + from typing import Sequence + + from ..conda_interface import ArgumentParser + def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: parser = get_render_parser() diff --git a/conda_build/cli/main_convert.py b/conda_build/cli/main_convert.py index 7f50883172..ce92a71ddc 100644 --- a/conda_build/cli/main_convert.py +++ b/conda_build/cli/main_convert.py @@ -3,13 +3,16 @@ from __future__ import annotations import logging -from argparse import Namespace from os.path import abspath, expanduser -from typing import Sequence +from typing import TYPE_CHECKING from .. import api from ..conda_interface import ArgumentParser +if TYPE_CHECKING: + from argparse import Namespace + from typing import Sequence + logging.basicConfig(level=logging.INFO) epilog = """ diff --git a/conda_build/cli/main_debug.py b/conda_build/cli/main_debug.py index 0fcdd8ab5c..59689bfa05 100644 --- a/conda_build/cli/main_debug.py +++ b/conda_build/cli/main_debug.py @@ -4,14 +4,17 @@ import logging import sys -from argparse import ArgumentParser -from typing import Sequence +from typing import TYPE_CHECKING from .. import api from ..utils import on_win from . import validators as valid from .main_render import get_render_parser +if TYPE_CHECKING: + from argparse import ArgumentParser + from typing import Sequence + logging.basicConfig(level=logging.INFO) diff --git a/conda_build/cli/main_develop.py b/conda_build/cli/main_develop.py index 694dde32b8..cb67c40696 100644 --- a/conda_build/cli/main_develop.py +++ b/conda_build/cli/main_develop.py @@ -3,14 +3,17 @@ from __future__ import annotations import logging -from argparse import Namespace -from typing import Sequence +from typing import TYPE_CHECKING from conda.base.context import context, determine_target_prefix from .. import api from ..conda_interface import ArgumentParser, add_parser_prefix +if TYPE_CHECKING: + from argparse import Namespace + from typing import Sequence + logging.basicConfig(level=logging.INFO) diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index e71ee03137..eefbcf97da 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -4,16 +4,19 @@ import logging import sys -from argparse import Namespace from os.path import expanduser from pprint import pprint -from typing import Sequence +from typing import TYPE_CHECKING from conda.base.context import context, determine_target_prefix from .. import api from ..conda_interface import ArgumentParser, add_parser_prefix +if TYPE_CHECKING: + from argparse import Namespace + from typing import Sequence + logging.basicConfig(level=logging.INFO) diff --git a/conda_build/cli/main_metapackage.py b/conda_build/cli/main_metapackage.py index 1af55abfbb..b295b4130e 100644 --- a/conda_build/cli/main_metapackage.py +++ b/conda_build/cli/main_metapackage.py @@ -4,12 +4,15 @@ import argparse import logging -from argparse import Namespace -from typing import Sequence +from typing import TYPE_CHECKING from .. import api from ..conda_interface import ArgumentParser, add_parser_channels, binstar_upload +if TYPE_CHECKING: + from argparse import Namespace + from typing import Sequence + logging.basicConfig(level=logging.INFO) diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 5710bace1c..61c46c1c4b 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -4,9 +4,8 @@ import argparse import logging -from argparse import Namespace from pprint import pprint -from typing import Sequence +from typing import TYPE_CHECKING import yaml from yaml.parser import ParserError @@ -18,6 +17,10 @@ from ..utils import LoggingContext from ..variants import get_package_variants, set_language_env_vars +if TYPE_CHECKING: + from argparse import Namespace + from typing import Sequence + log = logging.getLogger(__name__) diff --git a/conda_build/cli/main_skeleton.py b/conda_build/cli/main_skeleton.py index d314304e62..c33661f986 100644 --- a/conda_build/cli/main_skeleton.py +++ b/conda_build/cli/main_skeleton.py @@ -7,13 +7,16 @@ import os import pkgutil import sys -from argparse import Namespace -from typing import Sequence +from typing import TYPE_CHECKING from .. import api from ..conda_interface import ArgumentParser from ..config import Config +if TYPE_CHECKING: + from argparse import Namespace + from typing import Sequence + thisdir = os.path.dirname(os.path.abspath(__file__)) logging.basicConfig(level=logging.INFO) diff --git a/conda_build/config.py b/conda_build/config.py index a631a69593..5d4ba590d3 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -13,7 +13,7 @@ import time from collections import namedtuple from os.path import abspath, expanduser, expandvars, join -from pathlib import Path +from typing import TYPE_CHECKING from .conda_interface import ( binstar_upload, @@ -33,6 +33,9 @@ ) from .variants import get_default_variant +if TYPE_CHECKING: + from pathlib import Path + invocation_time = "" diff --git a/conda_build/create_test.py b/conda_build/create_test.py index 334645dcac..1788bbe97d 100644 --- a/conda_build/create_test.py +++ b/conda_build/create_test.py @@ -9,10 +9,13 @@ import os from os.path import basename, exists, isfile, join from pathlib import Path +from typing import TYPE_CHECKING -from .metadata import MetaData from .utils import copy_into, ensure_list, on_win, rm_rf +if TYPE_CHECKING: + from .metadata import MetaData + def create_files(m: MetaData, test_dir: Path) -> bool: """ diff --git a/conda_build/deprecations.py b/conda_build/deprecations.py index 372f700876..67ceb59c7f 100644 --- a/conda_build/deprecations.py +++ b/conda_build/deprecations.py @@ -5,12 +5,15 @@ import sys import warnings -from argparse import Action from functools import wraps from types import ModuleType -from typing import Any, Callable +from typing import TYPE_CHECKING -from packaging.version import Version, parse +if TYPE_CHECKING: + from argparse import Action + from typing import Any, Callable + + from packaging.version import Version from . import __version__ @@ -22,17 +25,52 @@ class DeprecatedError(RuntimeError): # inspired by deprecation (https://deprecation.readthedocs.io/en/latest/) and # CPython's warnings._deprecated class DeprecationHandler: - _version: Version + _version: str | None + _version_tuple: tuple[int, ...] | None + _version_object: Version | None - def __init__(self, version: Version | str): + def __init__(self, version: str): """Factory to create a deprecation handle for the specified version. :param version: The version to compare against when checking deprecation statuses. """ + self._version = version + # Try to parse the version string as a simple tuple[int, ...] to avoid + # packaging.version import and costlier version comparisons. + self._version_tuple = self._get_version_tuple(version) + self._version_object = None + + @staticmethod + def _get_version_tuple(version: str) -> tuple[int, ...] | None: + """Return version as non-empty tuple of ints if possible, else None. + + :param version: Version string to parse. + """ try: - self._version = parse(version) - except TypeError: - self._version = parse("0.0.0.dev0+placeholder") + return tuple(int(part) for part in version.strip().split(".")) or None + except (AttributeError, ValueError): + return None + + def _version_less_than(self, version: str) -> bool: + """Test whether own version is less than the given version. + + :param version: Version string to compare against. + """ + if self._version_tuple: + if version_tuple := self._get_version_tuple(version): + return self._version_tuple < version_tuple + + # If self._version or version could not be represented by a simple + # tuple[int, ...], do a more elaborate version parsing and comparison. + # Avoid this import otherwise to reduce import time for conda activate. + from packaging.version import parse + + if self._version_object is None: + try: + self._version_object = parse(self._version) + except TypeError: + self._version_object = parse("0.0.0.dev0+placeholder") + return self._version_object < parse(version) def __call__( self, @@ -281,16 +319,33 @@ def _get_module(self, stack: int) -> tuple[ModuleType, str]: :param stack: The stacklevel increment. :return: The module and module name. """ - import inspect # expensive - try: frame = sys._getframe(2 + stack) - module = inspect.getmodule(frame) - if module is not None: - return (module, module.__name__) except IndexError: # IndexError: 2 + stack is out of range pass + else: + # Shortcut finding the module by manually inspecting loaded modules. + try: + filename = frame.f_code.co_filename + except AttributeError: + # AttributeError: frame.f_code.co_filename is undefined + pass + else: + for module in sys.modules.values(): + if not isinstance(module, ModuleType): + continue + if not hasattr(module, "__file__"): + continue + if module.__file__ == filename: + return (module, module.__name__) + + # If above failed, do an expensive import and costly getmodule call. + import inspect + + module = inspect.getmodule(frame) + if module is not None: + return (module, module.__name__) raise DeprecatedError("unable to determine the calling module") @@ -309,14 +364,11 @@ def _generate_message( :param addendum: Additional messaging. Useful to indicate what to do instead. :return: The warning category (if applicable) and the message. """ - deprecate_version = parse(deprecate_in) - remove_version = parse(remove_in) - category: type[Warning] | None - if self._version < deprecate_version: + if self._version_less_than(deprecate_in): category = PendingDeprecationWarning warning = f"is pending deprecation and will be removed in {remove_in}." - elif self._version < remove_version: + elif self._version_less_than(remove_in): category = DeprecationWarning warning = f"is deprecated and will be removed in {remove_in}." else: diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 3f3fba7545..b208aee7db 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -11,7 +11,7 @@ from os.path import abspath, basename, dirname, exists, join, normcase from pathlib import Path from tempfile import TemporaryDirectory -from typing import Iterable, Literal +from typing import TYPE_CHECKING from conda.api import Solver from conda.core.index import get_index @@ -40,6 +40,9 @@ package_has_file, ) +if TYPE_CHECKING: + from typing import Iterable, Literal + log = get_logger(__name__) diff --git a/conda_build/jinja_context.py b/conda_build/jinja_context.py index 6933f631ad..2ef9b910d1 100644 --- a/conda_build/jinja_context.py +++ b/conda_build/jinja_context.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import datetime import json import os @@ -8,7 +10,6 @@ import time from functools import partial from io import StringIO, TextIOBase -from typing import IO, Any, Optional from warnings import warn import jinja2 @@ -19,6 +20,8 @@ except: import tomli as tomllib +from typing import TYPE_CHECKING + from . import _load_setup_py_data from .environ import get_dict as get_environ from .exceptions import CondaBuildException @@ -35,6 +38,9 @@ ) from .variants import DEFAULT_COMPILERS +if TYPE_CHECKING: + from typing import IO, Any + log = get_logger(__name__) @@ -671,7 +677,7 @@ def _load_data(stream: IO, fmt: str, *args, **kwargs) -> Any: def load_file_data( filename: str, - fmt: Optional[str] = None, + fmt: str | None = None, *args, config=None, from_recipe_dir=False, diff --git a/conda_build/metadata.py b/conda_build/metadata.py index e02d09de23..f9f0d55438 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -13,7 +13,7 @@ from collections import OrderedDict from functools import lru_cache from os.path import isfile, join -from typing import Literal, overload +from typing import TYPE_CHECKING, overload from bs4 import UnicodeDammit @@ -33,6 +33,9 @@ on_win, ) +if TYPE_CHECKING: + from typing import Literal + try: import yaml except ImportError: diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index 3ab78bc7fd..c3672764ec 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -2,21 +2,24 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -import os import re import subprocess from functools import lru_cache from os.path import basename from pathlib import Path -from typing import Iterable - -from conda.models.records import PrefixRecord +from typing import TYPE_CHECKING from ..conda_interface import untracked from ..utils import on_linux, on_mac from .macho import otool from .pyldd import codefile_class, inspect_linkages, machofile +if TYPE_CHECKING: + import os + from typing import Iterable + + from conda.models.records import PrefixRecord + LDD_RE = re.compile(r"\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)") LDD_NOT_FOUND_RE = re.compile(r"\s*(.*?)\s*=>\s*not found") diff --git a/conda_build/plugin.py b/conda_build/plugin.py index e86aa2a7af..17cbcad143 100644 --- a/conda_build/plugin.py +++ b/conda_build/plugin.py @@ -2,10 +2,13 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -from typing import Sequence +from typing import TYPE_CHECKING import conda.plugins +if TYPE_CHECKING: + from typing import Sequence + # lazy-import to avoid nasty import-time side effects when not using conda-build def build(args: Sequence[str]) -> int: diff --git a/conda_build/post.py b/conda_build/post.py index 5a05eda077..3d22ff3aae 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -32,7 +32,7 @@ ) from pathlib import Path from subprocess import CalledProcessError, call, check_output -from typing import Literal +from typing import TYPE_CHECKING from conda.core.prefix_data import PrefixData from conda.models.records import PrefixRecord @@ -46,7 +46,6 @@ ) from .exceptions import OverDependingError, OverLinkingError, RunPathError from .inspect_pkg import which_package -from .metadata import MetaData from .os_utils import external, macho from .os_utils.liefldd import ( get_exports_memoized, @@ -65,6 +64,11 @@ ) from .utils import on_mac, on_win, prefix_files +if TYPE_CHECKING: + from typing import Literal + + from .metadata import MetaData + filetypes_for_platform = { "win": (DLLfile, EXEfile), "osx": (machofile,), diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index e1a4406252..423941164e 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -28,7 +28,6 @@ realpath, relpath, ) -from typing import Literal import requests import yaml @@ -39,6 +38,8 @@ except ImportError: from yaml import SafeDumper +from typing import TYPE_CHECKING + from conda.common.io import dashlist from .. import source @@ -49,6 +50,9 @@ from ..utils import ensure_list, rm_rf from ..variants import DEFAULT_VARIANTS, get_package_variants +if TYPE_CHECKING: + from typing import Literal + SOURCE_META = """\ {archive_keys} {git_url_key} {git_url} diff --git a/conda_build/source.py b/conda_build/source.py index 0db306fb75..d4e1ca5b69 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -12,7 +12,7 @@ from os.path import abspath, basename, exists, expanduser, isdir, isfile, join, normpath from pathlib import Path from subprocess import CalledProcessError -from typing import Iterable +from typing import TYPE_CHECKING from urllib.parse import urljoin from .conda_interface import ( @@ -40,6 +40,9 @@ tar_xf, ) +if TYPE_CHECKING: + from typing import Iterable + log = get_logger(__name__) git_submod_re = re.compile(r"(?:.+)\.(.+)\.(?:.+)\s(.+)") diff --git a/conda_build/utils.py b/conda_build/utils.py index c1a529082f..29baa98005 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -41,7 +41,7 @@ ) from pathlib import Path from threading import Thread -from typing import Iterable +from typing import TYPE_CHECKING, Iterable import conda_package_handling.api import filelock @@ -55,7 +55,6 @@ ) from conda.core.prefix_data import PrefixData from conda.models.dist import Dist -from conda.models.records import PrefixRecord from .conda_interface import ( CondaHTTPError, @@ -79,6 +78,9 @@ from .deprecations import deprecated from .exceptions import BuildLockError +if TYPE_CHECKING: + from conda.models.records import PrefixRecord + on_win = sys.platform == "win32" on_mac = sys.platform == "darwin" on_linux = sys.platform == "linux" diff --git a/pyproject.toml b/pyproject.toml index 72f657031e..7b27d57775 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -93,20 +93,25 @@ skip_covered = true omit = ["conda_build/skeletons/_example_skeleton.py"] [tool.ruff] -pycodestyle = {max-line-length = 120} +target-version = "py38" + +[tool.ruff.lint] # E, W = pycodestyle errors and warnings # F = pyflakes # I = isort # UP = pyupgrade # ISC = flake8-implicit-str-concat +# TCH = flake8-type-checking +# T10 = flake8-debugger +# FA = flake8-future-annotations # see also https://docs.astral.sh/ruff/rules/ -select = ["E", "W", "F", "I", "UP", "ISC"] +select = ["E", "W", "F", "I", "UP", "ISC", "TCH", "T10", "FA"] # E402 module level import not at top of file # E722 do not use bare 'except' # E731 do not assign a lambda expression, use a def ignore = ["E402", "E722", "E731"] -target-version = "py38" -pydocstyle = {convention = "pep257"} +pycodestyle = {max-line-length = 120} +flake8-type-checking = {exempt-modules = [], strict = true} [tool.pytest.ini_options] minversion = 3.0 diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py index 04c2cda7ba..60f24cf7ca 100644 --- a/tests/cli/test_main_build.py +++ b/tests/cli/test_main_build.py @@ -5,10 +5,9 @@ import os import re from pathlib import Path +from typing import TYPE_CHECKING import pytest -from pytest import FixtureRequest, MonkeyPatch -from pytest_mock import MockerFixture from conda_build import api from conda_build.cli import main_build, main_render @@ -18,13 +17,18 @@ zstd_compression_level_default, ) from conda_build.exceptions import DependencyNeedsBuildingError -from conda_build.metadata import MetaData from conda_build.os_utils.external import find_executable from conda_build.utils import get_build_folders, on_win, package_has_file from ..utils import metadata_dir from ..utils import reset_config as _reset_config +if TYPE_CHECKING: + from pytest import FixtureRequest, MonkeyPatch + from pytest_mock import MockerFixture + + from conda_build.metadata import MetaData + @pytest.mark.sanity def test_build_empty_sections(conda_build_test_recipe_envvar: str): diff --git a/tests/cli/test_validators.py b/tests/cli/test_validators.py index 5048494c46..0809058a5b 100644 --- a/tests/cli/test_validators.py +++ b/tests/cli/test_validators.py @@ -1,8 +1,9 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import os from argparse import ArgumentError -from typing import Union import pytest @@ -31,7 +32,7 @@ ], ) def test_validate_is_conda_pkg_or_recipe_dir( - file_or_folder: str, expected: Union[str, bool], is_dir: bool, create: bool, tmpdir + file_or_folder: str, expected: str | bool, is_dir: bool, create: bool, tmpdir ): if create: file_or_folder = os.path.join(tmpdir, file_or_folder) diff --git a/tests/os_utils/test_codefile.py b/tests/os_utils/test_codefile.py index a3e38342da..3cdcc0854d 100644 --- a/tests/os_utils/test_codefile.py +++ b/tests/os_utils/test_codefile.py @@ -3,7 +3,7 @@ from __future__ import annotations from pathlib import Path -from typing import Callable +from typing import TYPE_CHECKING import pytest @@ -11,6 +11,9 @@ from conda_build.os_utils.pyldd import DLLfile, EXEfile, elffile, machofile from conda_build.os_utils.pyldd import codefile_class as pyldd_codefile_class +if TYPE_CHECKING: + from typing import Callable + LDD = Path(__file__).parent.parent / "data" / "ldd" diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 2e0f2b0224..6ad6577c50 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -18,6 +18,7 @@ from glob import glob from pathlib import Path from shutil import which +from typing import TYPE_CHECKING # for version import conda @@ -28,8 +29,6 @@ from conda.common.compat import on_linux, on_mac, on_win from conda.exceptions import ClobberError, CondaMultiError from conda_index.api import update_index -from pytest import FixtureRequest, MonkeyPatch -from pytest_mock import MockerFixture from conda_build import __version__, api, exceptions from conda_build.conda_interface import ( @@ -46,7 +45,6 @@ OverDependingError, OverLinkingError, ) -from conda_build.metadata import MetaData from conda_build.os_utils.external import find_executable from conda_build.render import finalize_metadata from conda_build.utils import ( @@ -71,6 +69,12 @@ reset_config, ) +if TYPE_CHECKING: + from pytest import FixtureRequest, MonkeyPatch + from pytest_mock import MockerFixture + + from conda_build.metadata import MetaData + def represent_ordereddict(dumper, data): value = [] diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index b19a2b2dde..a8273492b0 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -6,12 +6,12 @@ import subprocess import sys from pathlib import Path +from typing import TYPE_CHECKING import pytest import ruamel.yaml from conda_build import api -from conda_build.config import Config from conda_build.skeletons.pypi import ( clean_license_name, convert_to_flat_list, @@ -28,6 +28,10 @@ from conda_build.utils import on_win from conda_build.version import _parse as parse_version +if TYPE_CHECKING: + from conda_build.config import Config + + SYMPY_URL = ( "https://files.pythonhosted.org/packages/7d/23/70fa970c07f0960f7543af982d2554be805e1034b9dcee9cb3082ce80f80/sympy-1.10.tar.gz" "#sha256=6cf85a5cfe8fff69553e745b05128de6fc8de8f291965c63871c79701dc6efc9" diff --git a/tests/test_create_test.py b/tests/test_create_test.py index f515391802..c7ea321cc1 100644 --- a/tests/test_create_test.py +++ b/tests/test_create_test.py @@ -3,7 +3,7 @@ from __future__ import annotations from pathlib import Path -from typing import Any +from typing import TYPE_CHECKING import pytest @@ -14,6 +14,9 @@ create_r_files, ) +if TYPE_CHECKING: + from typing import Any + @pytest.mark.parametrize( "name,imports,expected,unexpected", diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py index 0c6d8b11b4..a4ff2d1ea7 100644 --- a/tests/test_deprecations.py +++ b/tests/test_deprecations.py @@ -261,5 +261,8 @@ def test_topic_remove(deprecated_v3: DeprecationHandler): def test_version_fallback(): """Test that conda_build can run even if deprecations can't parse the version.""" - version = DeprecationHandler(None)._version # type: ignore + deprecated = DeprecationHandler(None) # type: ignore + assert deprecated._version_less_than("0") + assert deprecated._version_tuple is None + version = deprecated._version_object # type: ignore assert version.major == version.minor == version.micro == 0 diff --git a/tests/test_jinja_context.py b/tests/test_jinja_context.py index 8393bc6ba5..18ae32f7ab 100644 --- a/tests/test_jinja_context.py +++ b/tests/test_jinja_context.py @@ -2,14 +2,17 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -from pathlib import Path -from typing import Any +from typing import TYPE_CHECKING import pytest from conda_build import jinja_context from conda_build.utils import HashableDict +if TYPE_CHECKING: + from pathlib import Path + from typing import Any + def test_pin_default(testing_metadata, mocker): get_env_dependencies = mocker.patch.object(jinja_context, "get_env_dependencies") diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 05e67b540b..496af2d67b 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -5,12 +5,12 @@ import os import subprocess import sys +from typing import TYPE_CHECKING import pytest from conda import __version__ as conda_version from conda.base.context import context from packaging.version import Version -from pytest import MonkeyPatch from conda_build import api from conda_build.config import Config @@ -28,6 +28,9 @@ from .utils import metadata_dir, metadata_path, thisdir +if TYPE_CHECKING: + from pytest import MonkeyPatch + def test_uses_vcs_in_metadata(testing_workdir, testing_metadata): testing_metadata._meta_path = os.path.join(testing_workdir, "meta.yaml") diff --git a/tests/test_render.py b/tests/test_render.py index e400d45e87..6cfd0abeea 100644 --- a/tests/test_render.py +++ b/tests/test_render.py @@ -4,15 +4,19 @@ import json import os -from pathlib import Path +from typing import TYPE_CHECKING from uuid import uuid4 import pytest from conda_build import api, render -from conda_build.metadata import MetaData from conda_build.utils import CONDA_PACKAGE_EXTENSION_V1 +if TYPE_CHECKING: + from pathlib import Path + + from conda_build.metadata import MetaData + @pytest.mark.parametrize( "build", diff --git a/tests/utils.py b/tests/utils.py index 692f852fff..125cda7c91 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -6,7 +6,7 @@ import shlex import sys from pathlib import Path -from typing import Generator +from typing import TYPE_CHECKING from conda.base.context import context, reset_context from conda.common.compat import on_mac @@ -14,6 +14,9 @@ from conda_build.conda_interface import cc_conda_build from conda_build.metadata import MetaData +if TYPE_CHECKING: + from typing import Generator + tests_path = Path(__file__).parent metadata_path = tests_path / "test-recipes" / "metadata" subpackage_path = tests_path / "test-recipes" / "split-packages" From fc9436dc630acccd5697ea71eadc2de02d891623 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 14 Feb 2024 16:28:36 +0100 Subject: [PATCH 279/366] Fix rpaths patcher being run on symbolic links (#5181) * Add test for rpath patcher not called for symlinks * Use skip_symlinks=True for former is_codefile/codefile_type calls --------- Signed-off-by: Marcel Bargull --- conda_build/inspect_pkg.py | 2 +- conda_build/os_utils/ldd.py | 10 ++++- conda_build/os_utils/liefldd.py | 8 ++-- conda_build/post.py | 18 +++++---- news/5181-fix-no-set-rpath-for-symlink | 19 +++++++++ .../metadata/_rpath_symlink/meta.yaml | 39 +++++++++++++++++++ tests/test_post.py | 24 +++++++++++- 7 files changed, 105 insertions(+), 15 deletions(-) create mode 100644 news/5181-fix-no-set-rpath-for-symlink create mode 100644 tests/test-recipes/metadata/_rpath_symlink/meta.yaml diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 3f3fba7545..c5a1d92ec8 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -330,7 +330,7 @@ def inspect_objects( info = [] for f in obj_files: path = join(prefix, f) - codefile = codefile_class(path) + codefile = codefile_class(path, skip_symlinks=True) if codefile == machofile: info.append( { diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index 3ab78bc7fd..c45def903e 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -111,11 +111,17 @@ def _get_linkages( def get_package_obj_files( prec: PrefixRecord, prefix: str | os.PathLike | Path ) -> list[str]: - return [file for file in prec["files"] if codefile_class(Path(prefix, file))] + return [ + file + for file in prec["files"] + if codefile_class(Path(prefix, file), skip_symlinks=True) + ] @lru_cache(maxsize=None) def get_untracked_obj_files(prefix: str | os.PathLike | Path) -> list[str]: return [ - file for file in untracked(str(prefix)) if codefile_class(Path(prefix, file)) + file + for file in untracked(str(prefix)) + if codefile_class(Path(prefix, file), skip_symlinks=True) ] diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index c44a03f864..cc365decdf 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -504,7 +504,8 @@ def inspect_linkages_lief( while tmp_filename: if ( not parent_exe_dirname - and codefile_class(tmp_filename) == EXEfile + and codefile_class(tmp_filename, skip_symlinks=True) + == EXEfile ): parent_exe_dirname = os.path.dirname(tmp_filename) tmp_filename = parents_by_filename[tmp_filename] @@ -600,7 +601,8 @@ def get_linkages( result_pyldd = [] debug = False if not have_lief or debug: - if codefile_class(filename) not in (DLLfile, EXEfile): + codefile = codefile_class(filename, skip_symlinks=True) + if codefile not in (DLLfile, EXEfile): result_pyldd = inspect_linkages_pyldd( filename, resolve_filenames=resolve_filenames, @@ -612,7 +614,7 @@ def get_linkages( return result_pyldd else: print( - f"WARNING: failed to get_linkages, codefile_class('{filename}')={codefile_class(filename)}" + f"WARNING: failed to get_linkages, codefile_class('{filename}', True)={codefile}" ) return {} result_lief = inspect_linkages_lief( diff --git a/conda_build/post.py b/conda_build/post.py index 5a05eda077..3b9cb6c832 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -74,7 +74,7 @@ def fix_shebang(f, prefix, build_python, osx_is_app=False): path = join(prefix, f) - if codefile_class(path): + if codefile_class(path, skip_symlinks=True): return elif islink(path): return @@ -413,7 +413,7 @@ def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): ".. seems to be linking to a compiler runtime, replacing build prefix with " "host prefix and" ) - if not codefile_class(link): + if not codefile_class(link, skip_symlinks=True): sys.exit( "Error: Compiler runtime library in build prefix not found in host prefix %s" % link @@ -653,7 +653,7 @@ def get_dsos(prec: PrefixRecord, prefix: str | os.PathLike | Path) -> set[str]: return { file for file in prec["files"] - if codefile_class(Path(prefix, file)) + if codefile_class(Path(prefix, file), skip_symlinks=True) # codefile_class already filters by extension/binary type, do we need this second filter? for ext in (".dylib", ".so", ".dll", ".pyd") if ext in file @@ -836,7 +836,7 @@ def _collect_needed_dsos( sysroots = list(sysroots_files.keys())[0] for f in files: path = join(run_prefix, f) - if not codefile_class(path): + if not codefile_class(path, skip_symlinks=True): continue build_prefix = build_prefix.replace(os.sep, "/") run_prefix = run_prefix.replace(os.sep, "/") @@ -1174,7 +1174,7 @@ def _show_linking_messages( ) for f in files: path = join(run_prefix, f) - codefile = codefile_class(path) + codefile = codefile_class(path, skip_symlinks=True) if codefile not in filetypes_for_platform[subdir.split("-")[0]]: continue warn_prelude = "WARNING ({},{})".format(pkg_name, f.replace(os.sep, "/")) @@ -1273,7 +1273,7 @@ def check_overlinking_impl( filesu = [] for file in files: path = join(run_prefix, file) - codefile = codefile_class(path) + codefile = codefile_class(path, skip_symlinks=True) if codefile in filetypes_for_platform[subdir.split("-")[0]]: files_to_inspect.append(file) filesu.append(file.replace("\\", "/")) @@ -1578,7 +1578,7 @@ def post_process_shared_lib(m, f, files, host_prefix=None): if not host_prefix: host_prefix = m.config.host_prefix path = join(host_prefix, f) - codefile = codefile_class(path) + codefile = codefile_class(path, skip_symlinks=True) if not codefile or path.endswith(".debug"): return rpaths = m.get_value("build/rpaths", ["lib"]) @@ -1737,7 +1737,9 @@ def check_symlinks(files, prefix, croot): # symlinks to binaries outside of the same dir don't work. RPATH stuff gets confused # because ld.so follows symlinks in RPATHS # If condition exists, then copy the file rather than symlink it. - if not dirname(link_path) == dirname(real_link_path) and codefile_class(f): + if not dirname(link_path) == dirname(real_link_path) and codefile_class( + f, skip_symlinks=True + ): os.remove(path) utils.copy_into(real_link_path, path) elif real_link_path.startswith(real_build_prefix): diff --git a/news/5181-fix-no-set-rpath-for-symlink b/news/5181-fix-no-set-rpath-for-symlink new file mode 100644 index 0000000000..35fb7a72c5 --- /dev/null +++ b/news/5181-fix-no-set-rpath-for-symlink @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Fix rpaths patcher being run on symbolic links. (#5179 via #5181) + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test-recipes/metadata/_rpath_symlink/meta.yaml b/tests/test-recipes/metadata/_rpath_symlink/meta.yaml new file mode 100644 index 0000000000..0ef58cdab2 --- /dev/null +++ b/tests/test-recipes/metadata/_rpath_symlink/meta.yaml @@ -0,0 +1,39 @@ +{% set lib_file = "libthing.so.1.0.0" %} # [linux] +{% set lib_file = "libthing.1.0.0.dylib" %} # [osx] + +package: + name: rpath_symlink + version: 1.0.0 + +build: + skip: true # [not (linux or osx)] + rpaths_patcher: {{ rpaths_patcher }} + script: + - mkdir -p "${PREFIX}/lib" + - > + < /dev/null ${CC} ${CPPFLAGS} ${CFLAGS} ${LDFLAGS} + -x c - -nostdlib -s -o "${PREFIX}/lib/{{ lib_file }}" "-Wl,-rpath,${PREFIX}/lib" + -shared -Wl,-soname,libthing.so.1 # [linux] + -dynamiclib -install_name libthing.1.dylib # [osx] + - ln -s "${PREFIX}/lib/{{ lib_file }}" "${PREFIX}/lib/libthing.so.1" # [linux] + - ln -s "${PREFIX}/lib/{{ lib_file }}" "${PREFIX}/lib/libthing.1.dylib" # [osx] + - mkdir -p "${PREFIX}/lib/subfolder" + - ln -s "${PREFIX}/lib/{{ lib_file }}" "${PREFIX}/lib/subfolder/libthing-link.so" # [linux] + - ln -s "${PREFIX}/lib/{{ lib_file }}" "${PREFIX}/lib/subfolder/libthing-link.dylib" # [osx] + +requirements: + build: + - {{ compiler("c") }} + +test: + requires: + - py-lief + commands: + # Test that we get only a single entry that is the library's own directory. + - | + python -c ' + import os, lief + lib = lief.parse(os.environ["PREFIX"] + "/lib/{{ lib_file }}") + assert {"$ORIGIN/."} == {e.rpath for e in lib.dynamic_entries if e.tag == lief.ELF.DYNAMIC_TAGS.RPATH} # [linux] + assert {"@loader_path/"} == {command.path for command in lib.commands if command.command == lief.MachO.LOAD_COMMAND_TYPES.RPATH} # [osx] + ' diff --git a/tests/test_post.py b/tests/test_post.py index c15fffaf2a..97ef1448fc 100644 --- a/tests/test_post.py +++ b/tests/test_post.py @@ -10,7 +10,13 @@ import pytest from conda_build import api, post -from conda_build.utils import get_site_packages, on_win, package_has_file +from conda_build.utils import ( + get_site_packages, + on_linux, + on_mac, + on_win, + package_has_file, +) from .utils import add_mangling, metadata_dir @@ -148,3 +154,19 @@ def test_menuinst_validation_fails_bad_json(testing_config, caplog, tmp_path): assert "Found 'Menu/*.json' files but couldn't validate:" not in captured_text assert "not a valid menuinst JSON document" in captured_text assert "JSONDecodeError" in captured_text + + +@pytest.mark.skipif(on_win, reason="rpath fixup not done on Windows.") +def test_rpath_symlink(mocker, testing_config): + if on_linux: + mk_relative = mocker.spy(post, "mk_relative_linux") + elif on_mac: + mk_relative = mocker.spy(post, "mk_relative_osx") + api.build( + os.path.join(metadata_dir, "_rpath_symlink"), + config=testing_config, + variants={"rpaths_patcher": ["patchelf", "LIEF"]}, + activate=True, + ) + # Should only be called on the actual binary, not its symlinks. (once per variant) + assert mk_relative.call_count == 2 From 355d6dfe23f5e29fcf0cbf4415846309112f07eb Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 14 Feb 2024 17:53:47 +0100 Subject: [PATCH 280/366] Fix corrupted package cache for outputs in subpackage tests (#5184) * Add test recipe outputs_overwrite_base_file * Fix corrupted package cache for outputs in subpackage tests This re-introduces `conda_build.environ.clean_pkg_cache` with slight changes to not use `conda.models.dist.Dist` and handle multiple `pkgs_dirs` better. --------- Signed-off-by: Marcel Bargull --- conda_build/build.py | 20 ++++++++++ conda_build/environ.py | 31 +++++++++++++- news/5184-fix-multi-output-package-corruption | 19 +++++++++ .../outputs_overwrite_base_file/install.bat | 2 + .../outputs_overwrite_base_file/install.sh | 2 + .../outputs_overwrite_base_file/meta.yaml | 40 +++++++++++++++++++ 6 files changed, 113 insertions(+), 1 deletion(-) create mode 100644 news/5184-fix-multi-output-package-corruption create mode 100644 tests/test-recipes/metadata/outputs_overwrite_base_file/install.bat create mode 100644 tests/test-recipes/metadata/outputs_overwrite_base_file/install.sh create mode 100644 tests/test-recipes/metadata/outputs_overwrite_base_file/meta.yaml diff --git a/conda_build/build.py b/conda_build/build.py index 28ffc04a70..526d665c8a 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -38,6 +38,7 @@ env_path_backup_var_exists, get_conda_channel, get_rc_urls, + pkgs_dirs, prefix_placeholder, reset_context, root_dir, @@ -3394,6 +3395,25 @@ def test( # folder destination _extract_test_files_from_package(metadata) + # Remove any previously cached build from the package cache to ensure we + # really test the requested build and not some clashing or corrupted build. + # (Corruption of the extracted package can happen, e.g., in multi-output + # builds if one of the subpackages overwrites files from the other.) + # Special case: + # If test is requested for .tar.bz2/.conda file from the pkgs dir itself, + # clean_pkg_cache() will remove it; don't call that function in this case. + in_pkg_cache = ( + not hasattr(recipedir_or_package_or_metadata, "config") + and os.path.isfile(recipedir_or_package_or_metadata) + and recipedir_or_package_or_metadata.endswith(CONDA_PACKAGE_EXTENSIONS) + and any( + os.path.dirname(recipedir_or_package_or_metadata) in pkgs_dir + for pkgs_dir in pkgs_dirs + ) + ) + if not in_pkg_cache: + environ.clean_pkg_cache(metadata.dist(), metadata.config) + copy_test_source_files(metadata, metadata.config.test_dir) # this is also copying tests/source_files from work_dir to testing workdir diff --git a/conda_build/environ.py b/conda_build/environ.py index 3026f1bf60..762b9c7479 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -16,10 +16,15 @@ from logging import getLogger from os.path import join, normpath -from conda.base.constants import DEFAULTS_CHANNEL_NAME, UNKNOWN_CHANNEL +from conda.base.constants import ( + CONDA_PACKAGE_EXTENSIONS, + DEFAULTS_CHANNEL_NAME, + UNKNOWN_CHANNEL, +) from conda.common.io import env_vars from conda.core.index import LAST_CHANNEL_URLS from conda.core.link import PrefixSetup, UnlinkLinkTransaction +from conda.core.package_cache_data import PackageCacheData from conda.core.prefix_data import PrefixData from conda.models.channel import prioritize_channels @@ -43,6 +48,7 @@ reset_context, root_dir, ) +from .config import Config from .deprecations import deprecated from .exceptions import BuildLockError, DependencyNeedsBuildingError from .features import feature_list @@ -1264,6 +1270,29 @@ def get_pkg_dirs_locks(dirs, config): return [utils.get_lock(folder, timeout=config.timeout) for folder in dirs] +def clean_pkg_cache(dist: str, config: Config) -> None: + with utils.LoggingContext(logging.DEBUG if config.debug else logging.WARN): + locks = get_pkg_dirs_locks([config.bldpkgs_dir] + pkgs_dirs, config) + with utils.try_acquire_locks(locks, timeout=config.timeout): + for pkgs_dir in pkgs_dirs: + if any( + os.path.exists(os.path.join(pkgs_dir, f"{dist}{ext}")) + for ext in ("", *CONDA_PACKAGE_EXTENSIONS) + ): + log.debug( + "Conda caching error: %s package remains in cache after removal", + dist, + ) + log.debug("manually removing to compensate") + package_cache = PackageCacheData.first_writable([pkgs_dir]) + for cache_pkg_id in package_cache.query(dist): + package_cache.remove(cache_pkg_id) + + # Note that this call acquires the relevant locks, so this must be called + # outside the lock context above. + remove_existing_packages(pkgs_dirs, [dist], config) + + def remove_existing_packages(dirs, fns, config): locks = get_pkg_dirs_locks(dirs, config) if config.locking else [] diff --git a/news/5184-fix-multi-output-package-corruption b/news/5184-fix-multi-output-package-corruption new file mode 100644 index 0000000000..584a2a1f35 --- /dev/null +++ b/news/5184-fix-multi-output-package-corruption @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Fix corrupted package cache for outputs in subpackage tests. (#5184) + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test-recipes/metadata/outputs_overwrite_base_file/install.bat b/tests/test-recipes/metadata/outputs_overwrite_base_file/install.bat new file mode 100644 index 0000000000..b6584f3971 --- /dev/null +++ b/tests/test-recipes/metadata/outputs_overwrite_base_file/install.bat @@ -0,0 +1,2 @@ +:: Always output 4 characters to properly test even if "SafetyError: ... incorrect size." is not triggered. +< nul set /p="%PKG_NAME:~0,4%" > "%PREFIX%\file" & call; diff --git a/tests/test-recipes/metadata/outputs_overwrite_base_file/install.sh b/tests/test-recipes/metadata/outputs_overwrite_base_file/install.sh new file mode 100644 index 0000000000..cb0be8cb2b --- /dev/null +++ b/tests/test-recipes/metadata/outputs_overwrite_base_file/install.sh @@ -0,0 +1,2 @@ +## Always output 4 characters to properly test even if "SafetyError: ... incorrect size." is not triggered. +printf '%.4s' "${PKG_NAME}" > "${PREFIX}/file" diff --git a/tests/test-recipes/metadata/outputs_overwrite_base_file/meta.yaml b/tests/test-recipes/metadata/outputs_overwrite_base_file/meta.yaml new file mode 100644 index 0000000000..1c27afc126 --- /dev/null +++ b/tests/test-recipes/metadata/outputs_overwrite_base_file/meta.yaml @@ -0,0 +1,40 @@ +{% set name = "outputs_overwrite_base_file" %} + +package: + name: {{ name }} + version: 1.0 + +outputs: + - name: base-{{ name }} + script: install.sh # [unix] + script: install.bat # [win] + + - name: first-{{ name }} + script: install.sh # [unix] + script: install.bat # [win] + requirements: + host: + - {{ pin_subpackage("base-" + name) }} + run: + - {{ pin_subpackage("base-" + name) }} + test: + commands: + - content="$(cat "${PREFIX}/file")" # [unix] + - test "${content}" = base # [unix] + - < "%PREFIX%\file%" set /p content= # [win] + - if not "%content%" == "base" exit 1 # [win] + + - name: second-{{ name }} + script: install.sh # [unix] + script: install.bat # [win] + requirements: + host: + - {{ pin_subpackage("base-" + name) }} + run: + - {{ pin_subpackage("base-" + name) }} + test: + commands: + - content="$(cat "${PREFIX}/file")" # [unix] + - test "${content}" = "base" # [unix] + - < "%PREFIX%\file%" set /p content= # [win] + - if not "%content%" == "base" exit 1 # [win] From 7c03955ca3c7231e48ef0e8abc8b40e405b118ec Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 15 Feb 2024 12:09:32 -0600 Subject: [PATCH 281/366] Ignore `TYPE_CHECKING` imports in coverage reports (#5186) --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 7b27d57775..9bf9398b96 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -91,6 +91,9 @@ show_missing = true sort = "Miss" skip_covered = true omit = ["conda_build/skeletons/_example_skeleton.py"] +exclude_lines = [ + "if TYPE_CHECKING:", # ignoring type checking imports +] [tool.ruff] target-version = "py38" From f80caa9bb947dec5ec4ca060ab95b5a95d346030 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 15 Feb 2024 12:12:02 -0600 Subject: [PATCH 282/366] Release 24.1.2 (#5187) --- .authors.yml | 4 ++-- CHANGELOG.md | 13 +++++++++++++ news/5181-fix-no-set-rpath-for-symlink | 19 ------------------- news/5184-fix-multi-output-package-corruption | 19 ------------------- 4 files changed, 15 insertions(+), 40 deletions(-) delete mode 100644 news/5181-fix-no-set-rpath-for-symlink delete mode 100644 news/5184-fix-multi-output-package-corruption diff --git a/.authors.yml b/.authors.yml index 6b71ac9dc7..dc7b1a4258 100644 --- a/.authors.yml +++ b/.authors.yml @@ -612,7 +612,7 @@ first_commit: 2015-08-30 06:44:37 - name: Marcel Bargull email: marcel.bargull@udo.edu - num_commits: 80 + num_commits: 82 first_commit: 2016-09-26 11:45:54 github: mbargull alternate_emails: @@ -1202,7 +1202,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 167 + num_commits: 168 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard diff --git a/CHANGELOG.md b/CHANGELOG.md index dfe35b2345..840bc6636a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,18 @@ [//]: # (current developments) +## 24.1.2 (2024-02-15) + +### Bug fixes + +* Fix rpaths patcher being run on symbolic links. (#5179 via #5181) +* Fix corrupted package cache for outputs in subpackage tests. (#5184) + +### Contributors + +* @mbargull + + + ## 24.1.1 (2024-02-07) ### Bug fixes diff --git a/news/5181-fix-no-set-rpath-for-symlink b/news/5181-fix-no-set-rpath-for-symlink deleted file mode 100644 index 35fb7a72c5..0000000000 --- a/news/5181-fix-no-set-rpath-for-symlink +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Fix rpaths patcher being run on symbolic links. (#5179 via #5181) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/5184-fix-multi-output-package-corruption b/news/5184-fix-multi-output-package-corruption deleted file mode 100644 index 584a2a1f35..0000000000 --- a/news/5184-fix-multi-output-package-corruption +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Fix corrupted package cache for outputs in subpackage tests. (#5184) - -### Deprecations - -* - -### Docs - -* - -### Other - -* From cac5efae57d2e3e1e4f1102cebcb4a9f099c77ba Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 15 Feb 2024 18:41:57 -0600 Subject: [PATCH 283/366] Correct `conda-index` dependency (#5189) --- pyproject.toml | 2 +- recipe/meta.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 72f657031e..4ae5105a55 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ dependencies = [ "beautifulsoup4", "chardet", "conda >=22.11", - "conda-index", + "conda-index >=0.4.0", "conda-package-handling >=1.3", "filelock", "jinja2", diff --git a/recipe/meta.yaml b/recipe/meta.yaml index fb5088e15e..9b1ec2f3bc 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -31,7 +31,7 @@ requirements: - beautifulsoup4 - chardet - conda >=22.11.0 - - conda-index + - conda-index >=0.4.0 - conda-package-handling >=1.3 - filelock - jinja2 From 21866aef4b6229d95536fceae90d08fceb3f7483 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Mon, 19 Feb 2024 18:12:12 +0100 Subject: [PATCH 284/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#5192)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/labels.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index 15c69d8793..99ec60667f 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -26,14 +26,14 @@ jobs: with: files: ${{ env.LOCAL }} - name: Global Only - uses: EndBug/label-sync@v2.3.2 + uses: EndBug/label-sync@v2.3.3 if: steps.has_local.outputs.files_exists == 'false' with: config-file: ${{ env.GLOBAL }} delete-other-labels: true dry-run: ${{ github.event.inputs.dryrun }} - name: Global & Local - uses: EndBug/label-sync@v2.3.2 + uses: EndBug/label-sync@v2.3.3 if: steps.has_local.outputs.files_exists == 'true' with: config-file: | From 3560e90b6ac1f8acea113de102201876ae671184 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 19 Feb 2024 18:30:37 -0600 Subject: [PATCH 285/366] [pre-commit.ci] pre-commit autoupdate (#5193) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/Lucas-C/pre-commit-hooks: v1.5.4 → v1.5.5](https://github.com/Lucas-C/pre-commit-hooks/compare/v1.5.4...v1.5.5) - [github.com/astral-sh/ruff-pre-commit: v0.2.1 → v0.2.2](https://github.com/astral-sh/ruff-pre-commit/compare/v0.2.1...v0.2.2) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2cfff523aa..3277627305 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -42,7 +42,7 @@ repos: args: [--unique] # Python verification and formatting - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.5.4 + rev: v1.5.5 hooks: # auto inject license blurb - id: insert-license @@ -54,7 +54,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.2.1 + rev: v0.2.2 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff From cc7bb532eff61451853a8195f39688a2101a9548 Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Wed, 21 Feb 2024 17:14:17 -0500 Subject: [PATCH 286/366] Deprecate bdist_conda (#5196) Deprecate bdist_conda module --- conda_build/bdist_conda.py | 3 ++- news/5196-deprecate-bdist-conda | 19 +++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) create mode 100644 news/5196-deprecate-bdist-conda diff --git a/conda_build/bdist_conda.py b/conda_build/bdist_conda.py index b10e4758c4..3f16238d9c 100644 --- a/conda_build/bdist_conda.py +++ b/conda_build/bdist_conda.py @@ -12,10 +12,11 @@ from .build import handle_anaconda_upload from .conda_interface import StringIO, configparser, spec_from_line from .config import Config +from .deprecations import deprecated from .metadata import MetaData from .skeletons import pypi -# TODO: Add support for all the options that conda build has +deprecated.module("24.3", "24.5") class GetoptError(BaseError): diff --git a/news/5196-deprecate-bdist-conda b/news/5196-deprecate-bdist-conda new file mode 100644 index 0000000000..3f37838bf0 --- /dev/null +++ b/news/5196-deprecate-bdist-conda @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Mark `conda_build.bdist_conda` module as pending deprecation. (#5196) + +### Docs + +* + +### Other + +* From 9b04330680695f4f49f70f1d1750f29c76633f5b Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 27 Feb 2024 04:01:14 -0600 Subject: [PATCH 287/366] Deprecate unused `conda_build.build.have_prefix_files` (#5199) Co-authored-by: Jannis Leidel Co-authored-by: Bianca Henderson --- conda_build/build.py | 27 ++------------------------- news/5199-deprecate-have_prefix_files | 19 +++++++++++++++++++ 2 files changed, 21 insertions(+), 25 deletions(-) create mode 100644 news/5199-deprecate-have_prefix_files diff --git a/conda_build/build.py b/conda_build/build.py index 526d665c8a..a24e468eca 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -46,6 +46,7 @@ ) from .config import Config from .create_test import create_all_test_files +from .deprecations import deprecated from .exceptions import CondaBuildException, DependencyNeedsBuildingError from .index import _delegated_update_index, get_build_index from .metadata import FIELDS, MetaData @@ -184,6 +185,7 @@ def prefix_replacement_excluded(path): return False +@deprecated("24.3", "24.5") def have_prefix_files(files, prefix): """ Yields files that contain the current prefix in them, and modifies them @@ -1231,31 +1233,6 @@ def get_files_with_prefix(m, replacements, files_in, prefix): end - start, ) ) - """ - # Keeping this around just for a while. - files_with_prefix2 = sorted(have_prefix_files(files_in, prefix)) - end = time.time() - print("INFO :: Time taken to do replacements (prefix only) was: {}".format(end - start)) - - ignore_files = m.ignore_prefix_files() - ignore_types = set() - if not hasattr(ignore_files, "__iter__"): - if ignore_files is True: - ignore_types.update((FileMode.text.name, FileMode.binary.name)) - ignore_files = [] - if (not m.get_value('build/detect_binary_files_with_prefix', True) and - not m.get_value('build/binary_has_prefix_files', None)): - ignore_types.update((FileMode.binary.name,)) - # files_with_prefix is a list of tuples containing (prefix_placeholder, file_type, file_path) - ignore_files.extend( - f[2] for f in files_with_prefix2 if f[1] in ignore_types and f[2] not in ignore_files) - files_with_prefix2 = [f for f in files_with_prefix2 if f[2] not in ignore_files] - end2 = time.time() - print("INFO :: Time taken to do replacements (prefix only) was: {}".format(end2 - start2)) - files1 = set([f for _, _, f in files_with_prefix]) - files2 = set([f for _, _, f in files_with_prefix2]) - assert not (files2 - files1), "New ripgrep prefix search missed the following files:\n{}\n".format(files2 - files1) - """ return sorted(files_with_prefix) diff --git a/news/5199-deprecate-have_prefix_files b/news/5199-deprecate-have_prefix_files new file mode 100644 index 0000000000..eccab010da --- /dev/null +++ b/news/5199-deprecate-have_prefix_files @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Mark `conda_build.build.have_prefix_files` as deprecated. (#5199) + +### Docs + +* + +### Other + +* From 38fdc15d81bbdc9bf9377385f668ba9d4de5dfcd Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Wed, 28 Feb 2024 17:06:00 +0100 Subject: [PATCH 288/366] Fix stdlib being recognized in variant hash inputs (#5195) * Test stdlib is recognized in variant hash inputs * Fix stdlib being recognized in variant hash inputs * Test c_stdlib* inclusion in Metadata.get_used_vars This function is used downstream in conda-forge's conda-smithy, so let's test against this explicitly, too. --------- Signed-off-by: Marcel Bargull Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- conda_build/variants.py | 18 ++++++++++-------- news/5195-fix-stdlib-variant | 19 +++++++++++++++++++ tests/test_metadata.py | 19 ++++++++++++------- 3 files changed, 41 insertions(+), 15 deletions(-) create mode 100644 news/5195-fix-stdlib-variant diff --git a/conda_build/variants.py b/conda_build/variants.py index d798a6e79a..2ece5f4bd6 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -727,15 +727,17 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False): recipe_lines = recipe_text.splitlines() for v in variant: all_res = [] - compiler_match = re.match(r"(.*?)_compiler(_version)?$", v) - if compiler_match and not selectors_only: - compiler_lang = compiler_match.group(1) - compiler_regex = r"\{\s*compiler\([\'\"]%s[\"\'][^\{]*?\}" % re.escape( - compiler_lang + target_match = re.match(r"(.*?)_(compiler|stdlib)(_version)?$", v) + if target_match and not selectors_only: + target_lang = target_match.group(1) + target_kind = target_match.group(2) + target_lang_regex = re.escape(target_lang) + target_regex = ( + rf"\{{\s*{target_kind}\([\'\"]{target_lang_regex}[\"\'][^\{{]*?\}}" ) - all_res.append(compiler_regex) + all_res.append(target_regex) variant_lines = [ - line for line in recipe_lines if v in line or compiler_lang in line + line for line in recipe_lines if v in line or target_lang in line ] else: variant_lines = [ @@ -760,7 +762,7 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False): all_res = r"|".join(all_res) if any(re.search(all_res, line) for line in variant_lines): used_variables.add(v) - if v in ("c_compiler", "cxx_compiler"): + if v in ("c_stdlib", "c_compiler", "cxx_compiler"): if "CONDA_BUILD_SYSROOT" in variant: used_variables.add("CONDA_BUILD_SYSROOT") return used_variables diff --git a/news/5195-fix-stdlib-variant b/news/5195-fix-stdlib-variant new file mode 100644 index 0000000000..526692f286 --- /dev/null +++ b/news/5195-fix-stdlib-variant @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* Fix stdlib being recognized in variant hash inputs. (#5190 via #5195) + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 496af2d67b..b176d4103d 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -233,16 +233,16 @@ def test_compiler_metadata_cross_compiler(): @pytest.mark.parametrize( - "platform,arch,stdlibs", + "platform,arch,stdlib,stdlib_version", [ - ("linux", "64", {"sysroot_linux-64 2.12.*"}), - ("linux", "aarch64", {"sysroot_linux-aarch64 2.17.*"}), - ("osx", "64", {"macosx_deployment_target_osx-64 10.13.*"}), - ("osx", "arm64", {"macosx_deployment_target_osx-arm64 11.0.*"}), + ("linux", "64", "sysroot", "2.12"), + ("linux", "aarch64", "sysroot", "2.17"), + ("osx", "64", "macosx_deployment_target", "10.13"), + ("osx", "arm64", "macosx_deployment_target", "11.0"), ], ) def test_native_stdlib_metadata( - platform: str, arch: str, stdlibs: set[str], testing_config + platform: str, arch: str, stdlib: str, stdlib_version: str, testing_config ): testing_config.platform = platform metadata = api.render( @@ -256,7 +256,12 @@ def test_native_stdlib_metadata( bypass_env_check=True, python="3.11", # irrelevant )[0][0] - assert stdlibs <= set(metadata.meta["requirements"]["host"]) + stdlib_req = f"{stdlib}_{platform}-{arch} {stdlib_version}.*" + assert stdlib_req in metadata.meta["requirements"]["host"] + assert {"c_stdlib", "c_stdlib_version"} <= metadata.get_used_vars() + hash_contents = metadata.get_hash_contents() + assert stdlib == hash_contents["c_stdlib"] + assert stdlib_version == hash_contents["c_stdlib_version"] def test_hash_build_id(testing_metadata): From 20b75c155dcdb868ba46ace72d3e91e81a5cff85 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Mon, 4 Mar 2024 08:24:26 -0600 Subject: [PATCH 289/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#5213)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/cla.yml | 2 +- .github/workflows/project.yml | 2 +- .github/workflows/stale.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index b823a45165..773cb76c96 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check CLA - uses: conda/actions/check-cla@v23.10.0 + uses: conda/actions/check-cla@v24.2.0 with: # [required] # A token with ability to comment, label, and modify the commit status diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index 2bcc43c6bc..b98940e079 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -14,7 +14,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/add-to-project@v0.5.0 + - uses: actions/add-to-project@v0.6.0 with: # issues are added to the Planning project # PRs are added to the Review project diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 6284ac0c42..2464e81e4b 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -34,7 +34,7 @@ jobs: days-before-issue-stale: 90 days-before-issue-close: 21 steps: - - uses: conda/actions/read-yaml@v23.10.0 + - uses: conda/actions/read-yaml@v24.2.0 id: read_yaml with: path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml From 5353b38ffd38e06918b64484c3a319c8cfeeeab3 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 8 Mar 2024 09:57:13 -0600 Subject: [PATCH 290/366] Remove deprecations slated for 24.3 (#5203) --- .github/workflows/tests.yml | 4 +- conda_build/build.py | 2 +- conda_build/cli/main_render.py | 2 - conda_build/conda_interface.py | 51 +- conda_build/environ.py | 240 +++-- conda_build/index.py | 1520 +------------------------------- conda_build/inspect_pkg.py | 4 - conda_build/metadata.py | 7 +- conda_build/noarch_python.py | 22 +- conda_build/post.py | 4 +- conda_build/render.py | 29 +- conda_build/skeletons/cpan.py | 2 +- conda_build/skeletons/pypi.py | 10 +- conda_build/source.py | 7 +- conda_build/utils.py | 60 +- news/5203-remove-deprecations | 82 ++ pyproject.toml | 8 +- recipe/meta.yaml | 2 +- tests/requirements.txt | 4 +- tests/test_source.py | 5 +- tests/test_variants.py | 2 +- 21 files changed, 295 insertions(+), 1772 deletions(-) create mode 100644 news/5203-remove-deprecations diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 6bf8249c4c..c0b0e8ff59 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -76,10 +76,10 @@ jobs: include: # minimum Python/conda combo - python-version: '3.8' - conda-version: 22.11.0 + conda-version: 23.5.0 test-type: serial - python-version: '3.8' - conda-version: 22.11.0 + conda-version: 23.5.0 test-type: parallel # maximum Python/conda combo - python-version: '3.12' diff --git a/conda_build/build.py b/conda_build/build.py index a24e468eca..087e932f81 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -3501,7 +3501,7 @@ def test( AssertionError, ) as exc: log.warn( - "failed to get install actions, retrying. exception was: %s", str(exc) + "failed to get package records, retrying. exception was: %s", str(exc) ) tests_failed( metadata, diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 61c46c1c4b..933528b114 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -13,7 +13,6 @@ from .. import __version__, api from ..conda_interface import ArgumentParser, add_parser_channels, cc_conda_build from ..config import get_channel_urls, get_or_merge_config -from ..deprecations import deprecated from ..utils import LoggingContext from ..variants import get_package_variants, set_language_env_vars @@ -193,7 +192,6 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: return parser, parser.parse_args(args) -@deprecated.argument("24.1.1", "24.3.0", "print_results") def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 4fa9fb3777..f309b338a0 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -45,13 +45,9 @@ add_parser_channels, add_parser_prefix, download, - handle_proxy_407, - hashsum_file, human_bytes, input, lchmod, - md5_file, - memoized, normalized_version, prefix_placeholder, rm_rf, @@ -65,29 +61,11 @@ walk_prefix, win_path_to_unix, ) -from conda.exports import display_actions as _display_actions -from conda.exports import execute_actions as _execute_actions -from conda.exports import execute_plan as _execute_plan -from conda.exports import get_index as _get_index -from conda.exports import install_actions as _install_actions -from conda.exports import linked as _linked -from conda.exports import linked_data as _linked_data -from conda.exports import package_cache as _package_cache +from conda.gateways.disk.read import compute_sum from conda.models.channel import get_conda_build_local_url # noqa: F401 -from conda.models.dist import Dist as _Dist from .deprecations import deprecated -deprecated.constant("24.1.0", "24.3.0", "Dist", _Dist) -deprecated.constant("24.1.0", "24.3.0", "display_actions", _display_actions) -deprecated.constant("24.1.0", "24.3.0", "execute_actions", _execute_actions) -deprecated.constant("24.1.0", "24.3.0", "execute_plan", _execute_plan) -deprecated.constant("24.1.0", "24.3.0", "get_index", _get_index) -deprecated.constant("24.1.0", "24.3.0", "install_actions", _install_actions) -deprecated.constant("24.1.0", "24.3.0", "linked", _linked) -deprecated.constant("24.1.0", "24.3.0", "linked_data", _linked_data) -deprecated.constant("24.1.0", "24.3.0", "package_cache", _package_cache) - # TODO: Go to references of all properties below and import them from `context` instead binstar_upload = context.binstar_upload default_python = context.default_python @@ -112,3 +90,30 @@ # When deactivating envs (e.g. switching from root to build/test) this env var is used, # except the PR that removed this has been reverted (for now) and Windows doesn't need it. env_path_backup_var_exists = os.environ.get("CONDA_PATH_BACKUP", None) + + +@deprecated( + "24.3", + "24.5", + addendum="Handled by `conda.gateways.connection.session.CondaSession`.", +) +def handle_proxy_407(x, y): + pass + + +deprecated.constant( + "24.3", + "24.5", + "hashsum_file", + compute_sum, + addendum="Use `conda.gateways.disk.read.compute_sum` instead.", +) + + +@deprecated( + "24.3", + "24.5", + addendum="Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead.", +) +def md5_file(path: str | os.PathLike) -> str: + return compute_sum(path, "md5") diff --git a/conda_build/environ.py b/conda_build/environ.py index 762b9c7479..ba57d39314 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import contextlib import json import logging @@ -15,6 +17,7 @@ from glob import glob from logging import getLogger from os.path import join, normpath +from typing import TYPE_CHECKING from conda.base.constants import ( CONDA_PACKAGE_EXTENSIONS, @@ -26,21 +29,23 @@ from conda.core.link import PrefixSetup, UnlinkLinkTransaction from conda.core.package_cache_data import PackageCacheData from conda.core.prefix_data import PrefixData +from conda.exceptions import ( + CondaError, + LinkError, + LockError, + NoPackagesFoundError, + PaddingError, + UnsatisfiableError, +) from conda.models.channel import prioritize_channels +from conda.models.match_spec import MatchSpec from . import utils from .conda_interface import ( Channel, - CondaError, - LinkError, - LockError, - MatchSpec, - NoPackagesFoundError, PackageRecord, - PaddingError, ProgressiveFetchExtract, TemporaryDirectory, - UnsatisfiableError, context, create_default_packages, get_version_from_git_tag, @@ -48,12 +53,10 @@ reset_context, root_dir, ) -from .config import Config from .deprecations import deprecated from .exceptions import BuildLockError, DependencyNeedsBuildingError from .features import feature_list from .index import get_build_index -from .metadata import MetaData from .os_utils import external from .utils import ( ensure_list, @@ -65,10 +68,22 @@ ) from .variants import get_default_variant +if TYPE_CHECKING: + from pathlib import Path + from typing import Any, Iterable, TypedDict + + from .config import Config + from .metadata import MetaData + + class InstallActionsType(TypedDict): + PREFIX: str | os.PathLike | Path + LINK: list[PackageRecord] + + log = getLogger(__name__) -PREFIX_ACTION = "PREFIX" -LINK_ACTION = "LINK" +deprecated.constant("24.3", "24.5", "PREFIX_ACTION", _PREFIX_ACTION := "PREFIX") +deprecated.constant("24.3", "24.5", "LINK_ACTION", _LINK_ACTION := "LINK") # these are things that we provide env vars for more explicitly. This list disables the # pass-through of variant values to env vars for these keys. @@ -852,29 +867,35 @@ def package_specs(self): return specs -cached_actions = {} +cached_precs: dict[ + tuple[tuple[str | MatchSpec, ...], Any, Any, Any, bool], list[PackageRecord] +] = {} +deprecated.constant("24.3", "24.5", "cached_actions", cached_precs) last_index_ts = 0 -def get_package_records( - prefix, - specs, - env, - retries=0, +# NOTE: The function has to retain the "get_install_actions" name for now since +# conda_libmamba_solver.solver.LibMambaSolver._called_from_conda_build +# checks for this name in the call stack explicitly. +def get_install_actions( + prefix: str | os.PathLike | Path, + specs: Iterable[str | MatchSpec], + env, # unused + retries: int = 0, subdir=None, - verbose=True, - debug=False, - locking=True, + verbose: bool = True, + debug: bool = False, + locking: bool = True, bldpkgs_dirs=None, timeout=900, - disable_pip=False, - max_env_retry=3, + disable_pip: bool = False, + max_env_retry: int = 3, output_folder=None, channel_urls=None, -): - global cached_actions +) -> list[PackageRecord]: + global cached_precs global last_index_ts - actions = {} + log = utils.get_logger(__name__) conda_log_level = logging.WARN specs = list(specs) @@ -906,16 +927,15 @@ def get_package_records( utils.ensure_valid_spec(spec) for spec in specs if not str(spec).endswith("@") ) + precs: list[PackageRecord] = [] if ( specs, env, subdir, channel_urls, disable_pip, - ) in cached_actions and last_index_ts >= index_ts: - actions = cached_actions[(specs, env, subdir, channel_urls, disable_pip)].copy() - if PREFIX_ACTION in actions: - actions[PREFIX_ACTION] = prefix + ) in cached_precs and last_index_ts >= index_ts: + precs = cached_precs[(specs, env, subdir, channel_urls, disable_pip)].copy() elif specs: # this is hiding output like: # Fetching package metadata ........... @@ -923,7 +943,7 @@ def get_package_records( with utils.LoggingContext(conda_log_level): with capture(): try: - actions = _install_actions(prefix, index, specs) + precs = _install_actions(prefix, index, specs)["LINK"] except (NoPackagesFoundError, UnsatisfiableError) as exc: raise DependencyNeedsBuildingError(exc, subdir=subdir) except ( @@ -937,7 +957,7 @@ def get_package_records( ) as exc: if "lock" in str(exc): log.warn( - "failed to get install actions, retrying. exception was: %s", + "failed to get package records, retrying. exception was: %s", str(exc), ) elif ( @@ -966,12 +986,12 @@ def get_package_records( utils.rm_rf(pkg_dir) if retries < max_env_retry: log.warn( - "failed to get install actions, retrying. exception was: %s", + "failed to get package records, retrying. exception was: %s", str(exc), ) - actions = get_install_actions( + precs = get_package_records( prefix, - tuple(specs), + specs, env, retries=retries + 1, subdir=subdir, @@ -987,7 +1007,7 @@ def get_package_records( ) else: log.error( - "Failed to get install actions, max retries exceeded." + "Failed to get package records, max retries exceeded." ) raise if disable_pip: @@ -997,64 +1017,28 @@ def get_package_records( if not any( re.match(r"^%s(?:$|[\s=].*)" % pkg, str(dep)) for dep in specs ): - actions[LINK_ACTION] = [ - prec for prec in actions[LINK_ACTION] if prec.name != pkg - ] - utils.trim_empty_keys(actions) - cached_actions[(specs, env, subdir, channel_urls, disable_pip)] = actions.copy() + precs = [prec for prec in precs if prec.name != pkg] + cached_precs[(specs, env, subdir, channel_urls, disable_pip)] = precs.copy() last_index_ts = index_ts - return actions.get(LINK_ACTION, []) + return precs -@deprecated("24.1.0", "24.3.0", addendum="Use `get_package_records` instead.") -def get_install_actions( - prefix, - specs, - env, - retries=0, - subdir=None, - verbose=True, - debug=False, - locking=True, - bldpkgs_dirs=None, - timeout=900, - disable_pip=False, - max_env_retry=3, - output_folder=None, - channel_urls=None, -): - precs = get_package_records( - prefix=prefix, - specs=specs, - env=env, - retries=retries, - subdir=subdir, - verbose=verbose, - debug=debug, - locking=locking, - bldpkgs_dirs=bldpkgs_dirs, - timeout=timeout, - disable_pip=disable_pip, - max_env_retry=max_env_retry, - output_folder=output_folder, - channel_urls=channel_urls, - ) - return {PREFIX_ACTION: prefix, LINK_ACTION: precs} +get_package_records = get_install_actions +del get_install_actions -@deprecated.argument("24.1.0", "24.3.0", "specs_or_actions", rename="specs_or_precs") def create_env( - prefix, - specs_or_precs, + prefix: str | os.PathLike | Path, + specs_or_precs: Iterable[str | MatchSpec] | Iterable[PackageRecord], env, config, subdir, - clear_cache=True, - retry=0, + clear_cache: bool = True, + retry: int = 0, locks=None, - is_cross=False, - is_conda=False, -): + is_cross: bool = False, + is_conda: bool = False, +) -> None: """ Create a conda envrionment for the given prefix and specs. """ @@ -1073,6 +1057,7 @@ def create_env( # if os.path.isdir(prefix): # utils.rm_rf(prefix) + specs_or_precs = tuple(ensure_list(specs_or_precs)) if specs_or_precs: # Don't waste time if there is nothing to do log.debug("Creating environment in %s", prefix) log.debug(str(specs_or_precs)) @@ -1082,14 +1067,10 @@ def create_env( try: with utils.try_acquire_locks(locks, timeout=config.timeout): # input is a list of specs in MatchSpec format - if not ( - hasattr(specs_or_precs, "keys") - or isinstance(specs_or_precs[0], PackageRecord) - ): - specs = list(set(specs_or_precs)) - actions = get_install_actions( + if not isinstance(specs_or_precs[0], PackageRecord): + precs = get_package_records( prefix, - tuple(specs), + tuple(set(specs_or_precs)), env, subdir=subdir, verbose=config.verbose, @@ -1103,10 +1084,7 @@ def create_env( channel_urls=tuple(config.channel_urls), ) else: - if not hasattr(specs_or_precs, "keys"): - actions = {LINK_ACTION: specs_or_precs} - else: - actions = specs_or_precs + precs = specs_or_precs index, _, _ = get_build_index( subdir=subdir, bldpkgs_dir=config.bldpkgs_dir, @@ -1117,14 +1095,13 @@ def create_env( locking=config.locking, timeout=config.timeout, ) - utils.trim_empty_keys(actions) - _display_actions(prefix, actions) + _display_actions(prefix, precs) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) with env_var("CONDA_QUIET", not config.verbose, reset_context): with env_var("CONDA_JSON", not config.verbose, reset_context): - _execute_actions(prefix, actions) + _execute_actions(prefix, precs) except ( SystemExit, PaddingError, @@ -1159,15 +1136,13 @@ def create_env( ) config.prefix_length = 80 - host = "_h_env" in prefix - # Set this here and use to create environ - # Setting this here is important because we use it below (symlink) - prefix = config.host_prefix if host else config.build_prefix - actions[PREFIX_ACTION] = prefix - create_env( - prefix, - actions, + ( + config.host_prefix + if "_h_env" in prefix + else config.build_prefix + ), + specs_or_precs, config=config, subdir=subdir, env=env, @@ -1308,7 +1283,7 @@ def remove_existing_packages(dirs, fns, config): def get_pinned_deps(m, section): with TemporaryDirectory(prefix="_") as tmpdir: - actions = get_install_actions( + precs = get_package_records( tmpdir, tuple(m.ms_depends(section)), section, @@ -1323,16 +1298,17 @@ def get_pinned_deps(m, section): output_folder=m.config.output_folder, channel_urls=tuple(m.config.channel_urls), ) - runtime_deps = [ - package_record_to_requirement(prec) for prec in actions.get(LINK_ACTION, []) - ] - return runtime_deps + return [package_record_to_requirement(prec) for prec in precs] # NOTE: The function has to retain the "install_actions" name for now since # conda_libmamba_solver.solver.LibMambaSolver._called_from_conda_build # checks for this name in the call stack explicitly. -def install_actions(prefix, index, specs): +def install_actions( + prefix: str | os.PathLike | Path, + index, + specs: Iterable[str | MatchSpec], +) -> InstallActionsType: # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L471 # but reduced to only the functionality actually used within conda-build. @@ -1344,6 +1320,8 @@ def install_actions(prefix, index, specs): callback=reset_context, ): # a hack since in conda-build we don't track channel_priority_map + channels: tuple[Channel, ...] | None + subdirs: tuple[str, ...] | None if LAST_CHANNEL_URLS: channel_priority_map = prioritize_channels(LAST_CHANNEL_URLS) # tuple(dict.fromkeys(...)) removes duplicates while preserving input order. @@ -1353,7 +1331,7 @@ def install_actions(prefix, index, specs): subdirs = ( tuple( dict.fromkeys( - subdir for subdir in (c.subdir for c in channels) if subdir + subdir for channel in channels if (subdir := channel.subdir) ) ) or context.subdirs @@ -1361,12 +1339,12 @@ def install_actions(prefix, index, specs): else: channels = subdirs = None - specs = tuple(MatchSpec(spec) for spec in specs) + mspecs = tuple(MatchSpec(spec) for spec in specs) PrefixData._cache_.clear() solver_backend = context.plugin_manager.get_cached_solver_backend() - solver = solver_backend(prefix, channels, subdirs, specs_to_add=specs) + solver = solver_backend(prefix, channels, subdirs, specs_to_add=mspecs) if index: # Solver can modify the index (e.g., Solver._prepare adds virtual # package) => Copy index (just outer container, not deep copy) @@ -1374,42 +1352,33 @@ def install_actions(prefix, index, specs): solver._index = index.copy() txn = solver.solve_for_transaction(prune=False, ignore_pinned=False) prefix_setup = txn.prefix_setups[prefix] - actions = { - PREFIX_ACTION: prefix, - LINK_ACTION: [prec for prec in prefix_setup.link_precs], + return { + "PREFIX": prefix, + "LINK": [prec for prec in prefix_setup.link_precs], } - return actions _install_actions = install_actions del install_actions -def _execute_actions(prefix, actions): +@deprecated.argument("24.3", "24.5", "actions", rename="precs") +def _execute_actions(prefix, precs): # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L575 # but reduced to only the functionality actually used within conda-build. - assert prefix - if LINK_ACTION not in actions: - log.debug(f"action {LINK_ACTION} not in actions") - return - - link_precs = actions[LINK_ACTION] - if not link_precs: - log.debug(f"action {LINK_ACTION} has None value") - return - # Always link menuinst first/last on windows in case a subsequent # package tries to import it to create/remove a shortcut - link_precs = [p for p in link_precs if p.name == "menuinst"] + [ - p for p in link_precs if p.name != "menuinst" + precs = [ + *(prec for prec in precs if prec.name == "menuinst"), + *(prec for prec in precs if prec.name != "menuinst"), ] - progressive_fetch_extract = ProgressiveFetchExtract(link_precs) + progressive_fetch_extract = ProgressiveFetchExtract(precs) progressive_fetch_extract.prepare() - stp = PrefixSetup(prefix, (), link_precs, (), [], ()) + stp = PrefixSetup(prefix, (), precs, (), [], ()) unlink_link_transaction = UnlinkLinkTransaction(stp) log.debug(" %s(%r)", "PROGRESSIVEFETCHEXTRACT", progressive_fetch_extract) @@ -1418,7 +1387,8 @@ def _execute_actions(prefix, actions): unlink_link_transaction.execute() -def _display_actions(prefix, actions): +@deprecated.argument("24.3", "24.5", "actions", rename="precs") +def _display_actions(prefix, precs): # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L58 # but reduced to only the functionality actually used within conda-build. @@ -1450,7 +1420,7 @@ def channel_filt(s): features = defaultdict(lambda: "") channels = defaultdict(lambda: "") - for prec in actions.get(LINK_ACTION, []): + for prec in precs: assert isinstance(prec, PackageRecord) pkg = prec["name"] channels[pkg] = channel_filt(channel_str(prec)) diff --git a/conda_build/index.py b/conda_build/index.py index 229c5e1632..c3968d238a 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -1,81 +1,25 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import bz2 -import copy -import fnmatch -import functools import json import logging import os -import subprocess -import sys import time -from collections import OrderedDict -from concurrent.futures import Executor, ProcessPoolExecutor -from datetime import datetime +from concurrent.futures import Executor from functools import partial -from itertools import groupby -from numbers import Number -from os.path import ( - abspath, - basename, - dirname, - getmtime, - getsize, - isfile, - join, - splitext, -) -from pathlib import Path -from uuid import uuid4 +from os.path import dirname -import conda_package_handling.api -import pytz -import yaml - -# Lots of conda internals here. Should refactor to use exports. -from conda.common.compat import ensure_binary - -# BAD BAD BAD - conda internals from conda.core.index import get_index -from conda.core.subdir_data import SubdirData -from conda.models.channel import Channel from conda_index.index import update_index as _update_index -from conda_package_handling.api import InvalidArchiveError -from jinja2 import Environment, PackageLoader -from tqdm import tqdm -from yaml.constructor import ConstructorError -from yaml.parser import ParserError -from yaml.reader import ReaderError -from yaml.scanner import ScannerError from . import conda_interface, utils -from .conda_interface import ( - CondaError, - CondaHTTPError, - MatchSpec, - Resolve, - TemporaryDirectory, - VersionOrder, - context, - human_bytes, - url_path, -) +from .conda_interface import CondaHTTPError, context, url_path from .deprecations import deprecated -from .utils import ( - CONDA_PACKAGE_EXTENSION_V1, - CONDA_PACKAGE_EXTENSION_V2, - CONDA_PACKAGE_EXTENSIONS, - JSONDecodeError, - get_logger, - glob, - on_win, -) +from .utils import JSONDecodeError, get_logger, on_win log = get_logger(__name__) -# use this for debugging, because ProcessPoolExecutor isn't pdb/ipdb friendly +@deprecated("24.3", "24.5") class DummyExecutor(Executor): def map(self, func, *iterables): for iterable in iterables: @@ -83,50 +27,24 @@ def map(self, func, *iterables): yield func(thing) -try: - from conda.base.constants import NAMESPACE_PACKAGE_NAMES, NAMESPACES_MAP -except ImportError: - NAMESPACES_MAP = { # base package name, namespace - "python": "python", - "r": "r", - "r-base": "r", - "mro-base": "r", - "mro-base_impl": "r", - "erlang": "erlang", - "java": "java", - "openjdk": "java", - "julia": "julia", - "latex": "latex", - "lua": "lua", - "nodejs": "js", - "perl": "perl", - "php": "php", - "ruby": "ruby", - "m2-base": "m2", - "msys2-conda-epoch": "m2w64", - } - NAMESPACE_PACKAGE_NAMES = frozenset(NAMESPACES_MAP) - NAMESPACES = frozenset(NAMESPACES_MAP.values()) - local_index_timestamp = 0 cached_index = None local_subdir = "" local_output_folder = "" cached_channels = [] _channel_data = {} -deprecated.constant("24.1.0", "24.3.0", "channel_data", _channel_data) +deprecated.constant("24.1", "24.5", "channel_data", _channel_data) # TODO: support for libarchive seems to have broken ability to use multiple threads here. # The new conda format is so much faster that it more than makes up for it. However, it # would be nice to fix this at some point. -MAX_THREADS_DEFAULT = ( - os.cpu_count() if (hasattr(os, "cpu_count") and os.cpu_count() > 1) else 1 -) +_MAX_THREADS_DEFAULT = os.cpu_count() or 1 if on_win: # see https://github.com/python/cpython/commit/8ea0fd85bc67438f679491fae29dfe0a3961900a - MAX_THREADS_DEFAULT = min(48, MAX_THREADS_DEFAULT) -LOCK_TIMEOUT_SECS = 3 * 3600 -LOCKFILE_NAME = ".lock" + _MAX_THREADS_DEFAULT = min(48, _MAX_THREADS_DEFAULT) +deprecated.constant("24.3", "24.5", "MAX_THREADS_DEFAULT", _MAX_THREADS_DEFAULT) +deprecated.constant("24.3", "24.5", "LOCK_TIMEOUT_SECS", 3 * 3600) +deprecated.constant("24.3", "24.5", "LOCKFILE_NAME", ".lock") # TODO: this is to make sure that the index doesn't leak tokens. It breaks use of private channels, though. # os.environ['CONDA_ADD_ANACONDA_TOKEN'] = "false" @@ -325,1419 +243,3 @@ def _delegated_update_index( current_index_versions=current_index_versions, debug=debug, ) - - -# Everything below is deprecated to maintain API/feature compatibility. - - -@deprecated("24.1.0", "24.3.0") -def _determine_namespace(info): - if info.get("namespace"): - namespace = info["namespace"] - else: - depends_names = set() - for spec in info.get("depends", []): - try: - depends_names.add(MatchSpec(spec).name) - except CondaError: - pass - spaces = depends_names & NAMESPACE_PACKAGE_NAMES - if len(spaces) == 1: - namespace = NAMESPACES_MAP[spaces.pop()] - else: - namespace = "global" - info["namespace"] = namespace - - if not info.get("namespace_in_name") and "-" in info["name"]: - namespace_prefix, reduced_name = info["name"].split("-", 1) - if namespace_prefix == namespace: - info["name_in_channel"] = info["name"] - info["name"] = reduced_name - - return namespace, info.get("name_in_channel", info["name"]), info["name"] - - -@deprecated("24.1.0", "24.3.0") -def _make_seconds(timestamp): - timestamp = int(timestamp) - if timestamp > 253402300799: # 9999-12-31 - timestamp //= ( - 1000 # convert milliseconds to seconds; see conda/conda-build#1988 - ) - return timestamp - - -# ========================================================================== - - -_REPODATA_VERSION = 1 -_CHANNELDATA_VERSION = 1 -_REPODATA_JSON_FN = "repodata.json" -_REPODATA_FROM_PKGS_JSON_FN = "repodata_from_packages.json" -_CHANNELDATA_FIELDS = ( - "description", - "dev_url", - "doc_url", - "doc_source_url", - "home", - "license", - "reference_package", - "source_url", - "source_git_url", - "source_git_tag", - "source_git_rev", - "summary", - "version", - "subdirs", - "icon_url", - "icon_hash", # "md5:abc123:12" - "run_exports", - "binary_prefix", - "text_prefix", - "activate.d", - "deactivate.d", - "pre_link", - "post_link", - "pre_unlink", - "tags", - "identifiers", - "keywords", - "recipe_origin", - "commits", -) -deprecated.constant("24.1.0", "24.3.0", "REPODATA_VERSION", _REPODATA_VERSION) -deprecated.constant("24.1.0", "24.3.0", "CHANNELDATA_VERSION", _CHANNELDATA_VERSION) -deprecated.constant("24.1.0", "24.3.0", "REPODATA_JSON_FN", _REPODATA_JSON_FN) -deprecated.constant( - "24.1.0", "24.3.0", "REPODATA_FROM_PKGS_JSON_FN", _REPODATA_FROM_PKGS_JSON_FN -) -deprecated.constant("24.1.0", "24.3.0", "CHANNELDATA_FIELDS", _CHANNELDATA_FIELDS) - - -@deprecated("24.1.0", "24.3.0") -def _clear_newline_chars(record, field_name): - if field_name in record: - try: - record[field_name] = record[field_name].strip().replace("\n", " ") - except AttributeError: - # sometimes description gets added as a list instead of just a string - record[field_name] = record[field_name][0].strip().replace("\n", " ") - - -@deprecated( - "24.1.0", "24.5.0", addendum="Use `conda_index._apply_instructions` instead." -) -def _apply_instructions(subdir, repodata, instructions): - repodata.setdefault("removed", []) - utils.merge_or_update_dict( - repodata.get("packages", {}), - instructions.get("packages", {}), - merge=False, - add_missing_keys=False, - ) - # we could have totally separate instructions for .conda than .tar.bz2, but it's easier if we assume - # that a similarly-named .tar.bz2 file is the same content as .conda, and shares fixes - new_pkg_fixes = { - k.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2): v - for k, v in instructions.get("packages", {}).items() - } - - utils.merge_or_update_dict( - repodata.get("packages.conda", {}), - new_pkg_fixes, - merge=False, - add_missing_keys=False, - ) - utils.merge_or_update_dict( - repodata.get("packages.conda", {}), - instructions.get("packages.conda", {}), - merge=False, - add_missing_keys=False, - ) - - for fn in instructions.get("revoke", ()): - for key in ("packages", "packages.conda"): - if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == "packages.conda": - fn = fn.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2) - if fn in repodata[key]: - repodata[key][fn]["revoked"] = True - repodata[key][fn]["depends"].append("package_has_been_revoked") - - for fn in instructions.get("remove", ()): - for key in ("packages", "packages.conda"): - if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == "packages.conda": - fn = fn.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2) - popped = repodata[key].pop(fn, None) - if popped: - repodata["removed"].append(fn) - repodata["removed"].sort() - - return repodata - - -@deprecated("24.1.0", "24.3.0") -def _get_jinja2_environment(): - def _filter_strftime(dt, dt_format): - if isinstance(dt, Number): - if dt > 253402300799: # 9999-12-31 - dt //= 1000 # convert milliseconds to seconds; see #1988 - dt = datetime.utcfromtimestamp(dt).replace(tzinfo=pytz.timezone("UTC")) - return dt.strftime(dt_format) - - def _filter_add_href(text, link, **kwargs): - if link: - kwargs_list = [f'href="{link}"'] - kwargs_list.append(f'alt="{text}"') - kwargs_list += [f'{k}="{v}"' for k, v in kwargs.items()] - return "{}".format(" ".join(kwargs_list), text) - else: - return text - - environment = Environment( - loader=PackageLoader("conda_build", "templates"), - ) - environment.filters["human_bytes"] = human_bytes - environment.filters["strftime"] = _filter_strftime - environment.filters["add_href"] = _filter_add_href - environment.trim_blocks = True - environment.lstrip_blocks = True - - return environment - - -@deprecated("24.1.0", "24.3.0") -def _maybe_write(path, content, write_newline_end=False, content_is_binary=False): - # Create the temp file next "path" so that we can use an atomic move, see - # https://github.com/conda/conda-build/issues/3833 - temp_path = f"{path}.{uuid4()}" - - if not content_is_binary: - content = ensure_binary(content) - with open(temp_path, "wb") as fh: - fh.write(content) - if write_newline_end: - fh.write(b"\n") - if isfile(path): - if utils.md5_file(temp_path) == utils.md5_file(path): - # No need to change mtimes. The contents already match. - os.unlink(temp_path) - return False - # log.info("writing %s", path) - utils.move_with_fallback(temp_path, path) - return True - - -@deprecated("24.1.0", "24.3.0") -def _make_build_string(build, build_number): - build_number_as_string = str(build_number) - if build.endswith(build_number_as_string): - build = build[: -len(build_number_as_string)] - build = build.rstrip("_") - build_string = build - return build_string - - -@deprecated("24.1.0", "24.3.0") -def _warn_on_missing_dependencies(missing_dependencies, patched_repodata): - """ - The following dependencies do not exist in the channel and are not declared - as external dependencies: - - dependency1: - - subdir/fn1.tar.bz2 - - subdir/fn2.tar.bz2 - dependency2: - - subdir/fn3.tar.bz2 - - subdir/fn4.tar.bz2 - - The associated packages are being removed from the index. - """ - - if missing_dependencies: - builder = [ - "WARNING: The following dependencies do not exist in the channel", - " and are not declared as external dependencies:", - ] - for dep_name in sorted(missing_dependencies): - builder.append(" %s" % dep_name) - for subdir_fn in sorted(missing_dependencies[dep_name]): - builder.append(" - %s" % subdir_fn) - subdir, fn = subdir_fn.split("/") - popped = patched_repodata["packages"].pop(fn, None) - if popped: - patched_repodata["removed"].append(fn) - - builder.append("The associated packages are being removed from the index.") - builder.append("") - log.warn("\n".join(builder)) - - -@deprecated("24.1.0", "24.3.0") -def _cache_post_install_details(paths_cache_path, post_install_cache_path): - post_install_details_json = { - "binary_prefix": False, - "text_prefix": False, - "activate.d": False, - "deactivate.d": False, - "pre_link": False, - "post_link": False, - "pre_unlink": False, - } - if os.path.lexists(paths_cache_path): - with open(paths_cache_path) as f: - paths = json.load(f).get("paths", []) - - # get embedded prefix data from paths.json - for f in paths: - if f.get("prefix_placeholder"): - if f.get("file_mode") == "binary": - post_install_details_json["binary_prefix"] = True - elif f.get("file_mode") == "text": - post_install_details_json["text_prefix"] = True - # check for any activate.d/deactivate.d scripts - for k in ("activate.d", "deactivate.d"): - if not post_install_details_json.get(k) and f["_path"].startswith( - "etc/conda/%s" % k - ): - post_install_details_json[k] = True - # check for any link scripts - for pat in ("pre-link", "post-link", "pre-unlink"): - if not post_install_details_json.get(pat) and fnmatch.fnmatch( - f["_path"], "*/.*-%s.*" % pat - ): - post_install_details_json[pat.replace("-", "_")] = True - - with open(post_install_cache_path, "w") as fh: - json.dump(post_install_details_json, fh) - - -@deprecated("24.1.0", "24.3.0") -def _cache_recipe(tmpdir, recipe_cache_path): - recipe_path_search_order = ( - "info/recipe/meta.yaml.rendered", - "info/recipe/meta.yaml", - "info/meta.yaml", - ) - for path in recipe_path_search_order: - recipe_path = os.path.join(tmpdir, path) - if os.path.lexists(recipe_path): - break - recipe_path = None - - recipe_json = {} - if recipe_path: - with open(recipe_path) as f: - try: - recipe_json = yaml.safe_load(f) - except (ConstructorError, ParserError, ScannerError, ReaderError): - pass - try: - recipe_json_str = json.dumps(recipe_json) - except TypeError: - recipe_json.get("requirements", {}).pop("build") - recipe_json_str = json.dumps(recipe_json) - with open(recipe_cache_path, "w") as fh: - fh.write(recipe_json_str) - return recipe_json - - -@deprecated("24.1.0", "24.3.0") -def _cache_run_exports(tmpdir, run_exports_cache_path): - run_exports = {} - try: - with open(os.path.join(tmpdir, "info", "run_exports.json")) as f: - run_exports = json.load(f) - except (OSError, FileNotFoundError): - try: - with open(os.path.join(tmpdir, "info", "run_exports.yaml")) as f: - run_exports = yaml.safe_load(f) - except (OSError, FileNotFoundError): - log.debug("%s has no run_exports file (this is OK)" % tmpdir) - with open(run_exports_cache_path, "w") as fh: - json.dump(run_exports, fh) - - -@deprecated("24.1.0", "24.3.0") -def _cache_icon(tmpdir, recipe_json, icon_cache_path): - # If a conda package contains an icon, also extract and cache that in an .icon/ - # directory. The icon file name is the name of the package, plus the extension - # of the icon file as indicated by the meta.yaml `app/icon` key. - # apparently right now conda-build renames all icons to 'icon.png' - # What happens if it's an ico file, or a svg file, instead of a png? Not sure! - app_icon_path = recipe_json.get("app", {}).get("icon") - if app_icon_path: - icon_path = os.path.join(tmpdir, "info", "recipe", app_icon_path) - if not os.path.lexists(icon_path): - icon_path = os.path.join(tmpdir, "info", "icon.png") - if os.path.lexists(icon_path): - icon_cache_path += splitext(app_icon_path)[-1] - utils.move_with_fallback(icon_path, icon_cache_path) - - -@deprecated("24.1.0", "24.3.0") -def _make_subdir_index_html(channel_name, subdir, repodata_packages, extra_paths): - environment = _get_jinja2_environment() - template = environment.get_template("subdir-index.html.j2") - rendered_html = template.render( - title="{}/{}".format(channel_name or "", subdir), - packages=repodata_packages, - current_time=datetime.utcnow().replace(tzinfo=pytz.timezone("UTC")), - extra_paths=extra_paths, - ) - return rendered_html - - -@deprecated("24.1.0", "24.3.0") -def _make_channeldata_index_html(channel_name, channeldata): - environment = _get_jinja2_environment() - template = environment.get_template("channeldata-index.html.j2") - rendered_html = template.render( - title=channel_name, - packages=channeldata["packages"], - subdirs=channeldata["subdirs"], - current_time=datetime.utcnow().replace(tzinfo=pytz.timezone("UTC")), - ) - return rendered_html - - -@deprecated("24.1.0", "24.3.0") -def _get_source_repo_git_info(path): - is_repo = subprocess.check_output( - ["git", "rev-parse", "--is-inside-work-tree"], cwd=path - ) - if is_repo.strip().decode("utf-8") == "true": - output = subprocess.check_output( - ["git", "log", "--pretty=format:'%h|%ad|%an|%s'", "--date=unix"], cwd=path - ) - commits = [] - for line in output.decode("utf-8").strip().splitlines(): - _hash, _time, _author, _desc = line.split("|") - commits.append( - { - "hash": _hash, - "timestamp": int(_time), - "author": _author, - "description": _desc, - } - ) - return commits - - -@deprecated("24.1.0", "24.3.0") -def _cache_info_file(tmpdir, info_fn, cache_path): - info_path = os.path.join(tmpdir, "info", info_fn) - if os.path.lexists(info_path): - utils.move_with_fallback(info_path, cache_path) - - -@deprecated("24.1.0", "24.3.0") -def _alternate_file_extension(fn): - cache_fn = fn - for ext in CONDA_PACKAGE_EXTENSIONS: - cache_fn = cache_fn.replace(ext, "") - other_ext = set(CONDA_PACKAGE_EXTENSIONS) - {fn.replace(cache_fn, "")} - return cache_fn + next(iter(other_ext)) - - -@deprecated("24.1.0", "24.3.0") -def _get_resolve_object(subdir, file_path=None, precs=None, repodata=None): - packages = {} - conda_packages = {} - if file_path: - with open(file_path) as fi: - packages = json.load(fi) - recs = json.load(fi) - for k, v in recs.items(): - if k.endswith(CONDA_PACKAGE_EXTENSION_V1): - packages[k] = v - elif k.endswith(CONDA_PACKAGE_EXTENSION_V2): - conda_packages[k] = v - if not repodata: - repodata = { - "info": { - "subdir": subdir, - "arch": context.arch_name, - "platform": context.platform, - }, - "packages": packages, - "packages.conda": conda_packages, - } - - channel = Channel("https://conda.anaconda.org/dummy-channel/%s" % subdir) - sd = SubdirData(channel) - sd._process_raw_repodata_str(json.dumps(repodata)) - sd._loaded = True - SubdirData._cache_[channel.url(with_credentials=True)] = sd - - index = {prec: prec for prec in precs or sd._package_records} - r = Resolve(index, channels=(channel,)) - return r - - -@deprecated("24.1.0", "24.3.0") -def _get_newest_versions(r, pins={}): - groups = {} - for g_name, g_recs in r.groups.items(): - if g_name in pins: - matches = [] - for pin in pins[g_name]: - version = r.find_matches(MatchSpec(f"{g_name}={pin}"))[0].version - matches.extend(r.find_matches(MatchSpec(f"{g_name}={version}"))) - else: - version = r.groups[g_name][0].version - matches = r.find_matches(MatchSpec(f"{g_name}={version}")) - groups[g_name] = matches - return [pkg for group in groups.values() for pkg in group] - - -@deprecated("24.1.0", "24.3.0") -def _add_missing_deps(new_r, original_r): - """For each package in new_r, if any deps are not satisfiable, backfill them from original_r.""" - - expanded_groups = copy.deepcopy(new_r.groups) - seen_specs = set() - for g_name, g_recs in new_r.groups.items(): - for g_rec in g_recs: - for dep_spec in g_rec.depends: - if dep_spec in seen_specs: - continue - ms = MatchSpec(dep_spec) - if not new_r.find_matches(ms): - matches = original_r.find_matches(ms) - if matches: - version = matches[0].version - expanded_groups[ms.name] = set( - expanded_groups.get(ms.name, []) - ) | set( - original_r.find_matches(MatchSpec(f"{ms.name}={version}")) - ) - seen_specs.add(dep_spec) - return [pkg for group in expanded_groups.values() for pkg in group] - - -@deprecated("24.1.0", "24.3.0") -def _add_prev_ver_for_features(new_r, orig_r): - expanded_groups = copy.deepcopy(new_r.groups) - for g_name in new_r.groups: - if not any(m.track_features or m.features for m in new_r.groups[g_name]): - # no features so skip - continue - - # versions are sorted here so this is the latest - latest_version = VersionOrder(str(new_r.groups[g_name][0].version)) - if g_name in orig_r.groups: - # now we iterate through the list to find the next to latest - # without a feature - keep_m = None - for i in range(len(orig_r.groups[g_name])): - _m = orig_r.groups[g_name][i] - if VersionOrder(str(_m.version)) <= latest_version and not ( - _m.track_features or _m.features - ): - keep_m = _m - break - if keep_m is not None: - expanded_groups[g_name] = {keep_m} | set( - expanded_groups.get(g_name, []) - ) - - return [pkg for group in expanded_groups.values() for pkg in group] - - -@deprecated("24.1.0", "24.3.0") -def _shard_newest_packages(subdir, r, pins=None): - """Captures only the newest versions of software in the resolve object. - - For things where more than one version is supported simultaneously (like Python), - pass pins as a dictionary, with the key being the package name, and the value being - a list of supported versions. For example: - - {'python': ["2.7", "3.6"]} - """ - groups = {} - pins = pins or {} - for g_name, g_recs in r.groups.items(): - # always do the latest implicitly - version = r.groups[g_name][0].version - matches = set(r.find_matches(MatchSpec(f"{g_name}={version}"))) - if g_name in pins: - for pin_value in pins[g_name]: - version = r.find_matches(MatchSpec(f"{g_name}={pin_value}"))[0].version - matches.update(r.find_matches(MatchSpec(f"{g_name}={version}"))) - groups[g_name] = matches - - # add the deps of the stuff in the index - new_r = _get_resolve_object( - subdir, precs=[pkg for group in groups.values() for pkg in group] - ) - new_r = _get_resolve_object(subdir, precs=_add_missing_deps(new_r, r)) - - # now for any pkg with features, add at least one previous version - # also return - return set(_add_prev_ver_for_features(new_r, r)) - - -@deprecated("24.1.0", "24.3.0") -def _build_current_repodata(subdir, repodata, pins): - r = _get_resolve_object(subdir, repodata=repodata) - keep_pkgs = _shard_newest_packages(subdir, r, pins) - new_repodata = { - k: repodata[k] for k in set(repodata.keys()) - {"packages", "packages.conda"} - } - packages = {} - conda_packages = {} - for keep_pkg in keep_pkgs: - if keep_pkg.fn.endswith(CONDA_PACKAGE_EXTENSION_V2): - conda_packages[keep_pkg.fn] = repodata["packages.conda"][keep_pkg.fn] - # in order to prevent package churn we consider the md5 for the .tar.bz2 that matches the .conda file - # This holds when .conda files contain the same files as .tar.bz2, which is an assumption we'll make - # until it becomes more prevalent that people provide only .conda files and just skip .tar.bz2 - counterpart = keep_pkg.fn.replace( - CONDA_PACKAGE_EXTENSION_V2, CONDA_PACKAGE_EXTENSION_V1 - ) - conda_packages[keep_pkg.fn]["legacy_bz2_md5"] = ( - repodata["packages"].get(counterpart, {}).get("md5") - ) - elif keep_pkg.fn.endswith(CONDA_PACKAGE_EXTENSION_V1): - packages[keep_pkg.fn] = repodata["packages"][keep_pkg.fn] - new_repodata["packages"] = packages - new_repodata["packages.conda"] = conda_packages - return new_repodata - - -@deprecated("24.1.0", "24.3.0") -class ChannelIndex: - def __init__( - self, - channel_root, - channel_name, - subdirs=None, - threads=MAX_THREADS_DEFAULT, - deep_integrity_check=False, - debug=False, - ): - self.channel_root = abspath(channel_root) - self.channel_name = channel_name or basename(channel_root.rstrip("/")) - self._subdirs = subdirs - self.thread_executor = ( - DummyExecutor() - if debug or sys.version_info.major == 2 or threads == 1 - else ProcessPoolExecutor(threads) - ) - self.deep_integrity_check = deep_integrity_check - - def index( - self, - patch_generator, - hotfix_source_repo=None, - verbose=False, - progress=False, - current_index_versions=None, - index_file=None, - ): - if verbose: - level = logging.DEBUG - else: - level = logging.ERROR - - with utils.LoggingContext(level, loggers=[__name__]): - if not self._subdirs: - detected_subdirs = { - subdir.name - for subdir in os.scandir(self.channel_root) - if subdir.name in utils.DEFAULT_SUBDIRS and subdir.is_dir() - } - log.debug("found subdirs %s" % detected_subdirs) - self.subdirs = subdirs = sorted(detected_subdirs | {"noarch"}) - else: - self.subdirs = subdirs = sorted(set(self._subdirs) | {"noarch"}) - - # Step 1. Lock local channel. - with utils.try_acquire_locks( - [utils.get_lock(self.channel_root)], timeout=900 - ): - channel_data = {} - channeldata_file = os.path.join(self.channel_root, "channeldata.json") - if os.path.isfile(channeldata_file): - with open(channeldata_file) as f: - channel_data = json.load(f) - # Step 2. Collect repodata from packages, save to pkg_repodata.json file - with tqdm( - total=len(subdirs), disable=(verbose or not progress), leave=False - ) as t: - for subdir in subdirs: - t.set_description("Subdir: %s" % subdir) - t.update() - with tqdm( - total=8, disable=(verbose or not progress), leave=False - ) as t2: - t2.set_description("Gathering repodata") - t2.update() - _ensure_valid_channel(self.channel_root, subdir) - repodata_from_packages = self.index_subdir( - subdir, - verbose=verbose, - progress=progress, - index_file=index_file, - ) - - t2.set_description("Writing pre-patch repodata") - t2.update() - self._write_repodata( - subdir, - repodata_from_packages, - _REPODATA_FROM_PKGS_JSON_FN, - ) - - # Step 3. Apply patch instructions. - t2.set_description("Applying patch instructions") - t2.update() - patched_repodata, patch_instructions = self._patch_repodata( - subdir, repodata_from_packages, patch_generator - ) - - # Step 4. Save patched and augmented repodata. - # If the contents of repodata have changed, write a new repodata.json file. - # Also create associated index.html. - - t2.set_description("Writing patched repodata") - t2.update() - self._write_repodata( - subdir, patched_repodata, _REPODATA_JSON_FN - ) - t2.set_description("Building current_repodata subset") - t2.update() - current_repodata = _build_current_repodata( - subdir, patched_repodata, pins=current_index_versions - ) - t2.set_description("Writing current_repodata subset") - t2.update() - self._write_repodata( - subdir, - current_repodata, - json_filename="current_repodata.json", - ) - - t2.set_description("Writing subdir index HTML") - t2.update() - self._write_subdir_index_html(subdir, patched_repodata) - - t2.set_description("Updating channeldata") - t2.update() - self._update_channeldata( - channel_data, patched_repodata, subdir - ) - - # Step 7. Create and write channeldata. - self._write_channeldata_index_html(channel_data) - self._write_channeldata(channel_data) - - def index_subdir(self, subdir, index_file=None, verbose=False, progress=False): - subdir_path = join(self.channel_root, subdir) - self._ensure_dirs(subdir) - repodata_json_path = join(subdir_path, _REPODATA_FROM_PKGS_JSON_FN) - - if verbose: - log.info("Building repodata for %s" % subdir_path) - - # gather conda package filenames in subdir - # we'll process these first, because reading their metadata is much faster - fns_in_subdir = { - fn - for fn in os.listdir(subdir_path) - if fn.endswith(".conda") or fn.endswith(".tar.bz2") - } - - # load current/old repodata - try: - with open(repodata_json_path) as fh: - old_repodata = json.load(fh) or {} - except (OSError, JSONDecodeError): - # log.info("no repodata found at %s", repodata_json_path) - old_repodata = {} - - old_repodata_packages = old_repodata.get("packages", {}) - old_repodata_conda_packages = old_repodata.get("packages.conda", {}) - old_repodata_fns = set(old_repodata_packages) | set(old_repodata_conda_packages) - - # Load stat cache. The stat cache has the form - # { - # 'package_name.tar.bz2': { - # 'mtime': 123456, - # 'md5': 'abd123', - # }, - # } - stat_cache_path = join(subdir_path, ".cache", "stat.json") - try: - with open(stat_cache_path) as fh: - stat_cache = json.load(fh) or {} - except: - stat_cache = {} - - stat_cache_original = stat_cache.copy() - - remove_set = old_repodata_fns - fns_in_subdir - ignore_set = set(old_repodata.get("removed", [])) - try: - # calculate all the paths and figure out what we're going to do with them - # add_set: filenames that aren't in the current/old repodata, but exist in the subdir - if index_file: - with open(index_file) as fin: - add_set = set() - for line in fin: - fn_subdir, fn = line.strip().split("/") - if fn_subdir != subdir: - continue - if fn.endswith(".conda") or fn.endswith(".tar.bz2"): - add_set.add(fn) - else: - add_set = fns_in_subdir - old_repodata_fns - - add_set -= ignore_set - - # update_set: Filenames that are in both old repodata and new repodata, - # and whose contents have changed based on file size or mtime. We're - # not using md5 here because it takes too long. If needing to do full md5 checks, - # use the --deep-integrity-check flag / self.deep_integrity_check option. - update_set = self._calculate_update_set( - subdir, - fns_in_subdir, - old_repodata_fns, - stat_cache, - verbose=verbose, - progress=progress, - ) - # unchanged_set: packages in old repodata whose information can carry straight - # across to new repodata - unchanged_set = set(old_repodata_fns - update_set - remove_set - ignore_set) - - assert isinstance(unchanged_set, set) # faster `in` queries - - # clean up removed files - removed_set = old_repodata_fns - fns_in_subdir - for fn in removed_set: - if fn in stat_cache: - del stat_cache[fn] - - new_repodata_packages = { - k: v - for k, v in old_repodata.get("packages", {}).items() - if k in unchanged_set - } - new_repodata_conda_packages = { - k: v - for k, v in old_repodata.get("packages.conda", {}).items() - if k in unchanged_set - } - - for k in sorted(unchanged_set): - if not (k in new_repodata_packages or k in new_repodata_conda_packages): - fn, rec = ChannelIndex._load_index_from_cache( - self.channel_root, subdir, fn, stat_cache - ) - # this is how we pass an exception through. When fn == rec, there's been a problem, - # and we need to reload this file - if fn == rec: - update_set.add(fn) - else: - if fn.endswith(CONDA_PACKAGE_EXTENSION_V1): - new_repodata_packages[fn] = rec - else: - new_repodata_conda_packages[fn] = rec - - # Invalidate cached files for update_set. - # Extract and cache update_set and add_set, then add to new_repodata_packages. - # This is also where we update the contents of the stat_cache for successfully - # extracted packages. - # Sorting here prioritizes .conda files ('c') over .tar.bz2 files ('b') - hash_extract_set = (*add_set, *update_set) - - extract_func = functools.partial( - ChannelIndex._extract_to_cache, self.channel_root, subdir - ) - # split up the set by .conda packages first, then .tar.bz2. This avoids race conditions - # with execution in parallel that would end up in the same place. - for conda_format in tqdm( - CONDA_PACKAGE_EXTENSIONS, - desc="File format", - disable=(verbose or not progress), - leave=False, - ): - for fn, mtime, size, index_json in tqdm( - self.thread_executor.map( - extract_func, - (fn for fn in hash_extract_set if fn.endswith(conda_format)), - ), - desc="hash & extract packages for %s" % subdir, - disable=(verbose or not progress), - leave=False, - ): - # fn can be None if the file was corrupt or no longer there - if fn and mtime: - stat_cache[fn] = {"mtime": int(mtime), "size": size} - if index_json: - if fn.endswith(CONDA_PACKAGE_EXTENSION_V2): - new_repodata_conda_packages[fn] = index_json - else: - new_repodata_packages[fn] = index_json - else: - log.error( - "Package at %s did not contain valid index.json data. Please" - " check the file and remove/redownload if necessary to obtain " - "a valid package." % os.path.join(subdir_path, fn) - ) - - new_repodata = { - "packages": new_repodata_packages, - "packages.conda": new_repodata_conda_packages, - "info": { - "subdir": subdir, - }, - "repodata_version": _REPODATA_VERSION, - "removed": sorted(list(ignore_set)), - } - finally: - if stat_cache != stat_cache_original: - # log.info("writing stat cache to %s", stat_cache_path) - with open(stat_cache_path, "w") as fh: - json.dump(stat_cache, fh) - return new_repodata - - def _ensure_dirs(self, subdir: str): - """Create cache directories within a subdir. - - Args: - subdir (str): name of the subdirectory - """ - # Create all cache directories in the subdir. - cache_path = Path(self.channel_root, subdir, ".cache") - cache_path.mkdir(parents=True, exist_ok=True) - (cache_path / "index").mkdir(exist_ok=True) - (cache_path / "about").mkdir(exist_ok=True) - (cache_path / "paths").mkdir(exist_ok=True) - (cache_path / "recipe").mkdir(exist_ok=True) - (cache_path / "run_exports").mkdir(exist_ok=True) - (cache_path / "post_install").mkdir(exist_ok=True) - (cache_path / "icon").mkdir(exist_ok=True) - (cache_path / "recipe_log").mkdir(exist_ok=True) - Path(self.channel_root, "icons").mkdir(exist_ok=True) - - def _calculate_update_set( - self, - subdir, - fns_in_subdir, - old_repodata_fns, - stat_cache, - verbose=False, - progress=True, - ): - # Determine the packages that already exist in repodata, but need to be updated. - # We're not using md5 here because it takes too long. - candidate_fns = fns_in_subdir & old_repodata_fns - subdir_path = join(self.channel_root, subdir) - - update_set = set() - for fn in tqdm( - iter(candidate_fns), - desc="Finding updated files", - disable=(verbose or not progress), - leave=False, - ): - if fn not in stat_cache: - update_set.add(fn) - else: - stat_result = os.stat(join(subdir_path, fn)) - if ( - int(stat_result.st_mtime) != int(stat_cache[fn]["mtime"]) - or stat_result.st_size != stat_cache[fn]["size"] - ): - update_set.add(fn) - return update_set - - @staticmethod - def _extract_to_cache(channel_root, subdir, fn, second_try=False): - # This method WILL reread the tarball. Probably need another one to exit early if - # there are cases where it's fine not to reread. Like if we just rebuild repodata - # from the cached files, but don't use the existing repodata.json as a starting point. - subdir_path = join(channel_root, subdir) - - # allow .conda files to reuse cache from .tar.bz2 and vice-versa. - # Assumes that .tar.bz2 and .conda files have exactly the same - # contents. This is convention, but not guaranteed, nor checked. - alternate_cache_fn = _alternate_file_extension(fn) - cache_fn = fn - - abs_fn = os.path.join(subdir_path, fn) - - stat_result = os.stat(abs_fn) - size = stat_result.st_size - mtime = stat_result.st_mtime - retval = fn, mtime, size, None - - index_cache_path = join(subdir_path, ".cache", "index", cache_fn + ".json") - about_cache_path = join(subdir_path, ".cache", "about", cache_fn + ".json") - paths_cache_path = join(subdir_path, ".cache", "paths", cache_fn + ".json") - recipe_cache_path = join(subdir_path, ".cache", "recipe", cache_fn + ".json") - run_exports_cache_path = join( - subdir_path, ".cache", "run_exports", cache_fn + ".json" - ) - post_install_cache_path = join( - subdir_path, ".cache", "post_install", cache_fn + ".json" - ) - icon_cache_path = join(subdir_path, ".cache", "icon", cache_fn) - - log.debug("hashing, extracting, and caching %s" % fn) - - alternate_cache = False - if not os.path.exists(index_cache_path) and os.path.exists( - index_cache_path.replace(fn, alternate_cache_fn) - ): - alternate_cache = True - - try: - # allow .tar.bz2 files to use the .conda cache, but not vice-versa. - # .conda readup is very fast (essentially free), but .conda files come from - # converting .tar.bz2 files, which can go wrong. Forcing extraction for - # .conda files gives us a check on the validity of that conversion. - if not fn.endswith(CONDA_PACKAGE_EXTENSION_V2) and os.path.isfile( - index_cache_path - ): - with open(index_cache_path) as f: - index_json = json.load(f) - elif not alternate_cache and ( - second_try or not os.path.exists(index_cache_path) - ): - with TemporaryDirectory() as tmpdir: - conda_package_handling.api.extract( - abs_fn, dest_dir=tmpdir, components="info" - ) - index_file = os.path.join(tmpdir, "info", "index.json") - if not os.path.exists(index_file): - return retval - with open(index_file) as f: - index_json = json.load(f) - - _cache_info_file(tmpdir, "about.json", about_cache_path) - _cache_info_file(tmpdir, "paths.json", paths_cache_path) - _cache_info_file(tmpdir, "recipe_log.json", paths_cache_path) - _cache_run_exports(tmpdir, run_exports_cache_path) - _cache_post_install_details( - paths_cache_path, post_install_cache_path - ) - recipe_json = _cache_recipe(tmpdir, recipe_cache_path) - _cache_icon(tmpdir, recipe_json, icon_cache_path) - - # decide what fields to filter out, like has_prefix - filter_fields = { - "arch", - "has_prefix", - "mtime", - "platform", - "ucs", - "requires_features", - "binstar", - "target-triplet", - "machine", - "operatingsystem", - } - for field_name in filter_fields & set(index_json): - del index_json[field_name] - elif alternate_cache: - # we hit the cache of the other file type. Copy files to this name, and replace - # the size, md5, and sha256 values - paths = [ - index_cache_path, - about_cache_path, - paths_cache_path, - recipe_cache_path, - run_exports_cache_path, - post_install_cache_path, - icon_cache_path, - ] - bizarro_paths = [_.replace(fn, alternate_cache_fn) for _ in paths] - for src, dest in zip(bizarro_paths, paths): - if os.path.exists(src): - try: - os.makedirs(os.path.dirname(dest)) - except: - pass - utils.copy_into(src, dest) - - with open(index_cache_path) as f: - index_json = json.load(f) - else: - with open(index_cache_path) as f: - index_json = json.load(f) - - # calculate extra stuff to add to index.json cache, size, md5, sha256 - # This is done always for all files, whether the cache is loaded or not, - # because the cache may be from the other file type. We don't store this - # info in the cache to avoid confusion. - index_json.update(conda_package_handling.api.get_pkg_details(abs_fn)) - - with open(index_cache_path, "w") as fh: - json.dump(index_json, fh) - retval = fn, mtime, size, index_json - except (InvalidArchiveError, KeyError, EOFError, JSONDecodeError): - if not second_try: - return ChannelIndex._extract_to_cache( - channel_root, subdir, fn, second_try=True - ) - return retval - - @staticmethod - def _load_index_from_cache(channel_root, subdir, fn, stat_cache): - index_cache_path = join(channel_root, subdir, ".cache", "index", fn + ".json") - try: - with open(index_cache_path) as fh: - index_json = json.load(fh) - except (OSError, JSONDecodeError): - index_json = fn - - return fn, index_json - - @staticmethod - def _load_all_from_cache(channel_root, subdir, fn): - subdir_path = join(channel_root, subdir) - try: - mtime = getmtime(join(subdir_path, fn)) - except FileNotFoundError: - return {} - # In contrast to self._load_index_from_cache(), this method reads up pretty much - # all of the cached metadata, except for paths. It all gets dumped into a single map. - index_cache_path = join(subdir_path, ".cache", "index", fn + ".json") - about_cache_path = join(subdir_path, ".cache", "about", fn + ".json") - recipe_cache_path = join(subdir_path, ".cache", "recipe", fn + ".json") - run_exports_cache_path = join( - subdir_path, ".cache", "run_exports", fn + ".json" - ) - post_install_cache_path = join( - subdir_path, ".cache", "post_install", fn + ".json" - ) - icon_cache_path_glob = join(subdir_path, ".cache", "icon", fn + ".*") - recipe_log_path = join(subdir_path, ".cache", "recipe_log", fn + ".json") - - data = {} - for path in ( - recipe_cache_path, - about_cache_path, - index_cache_path, - post_install_cache_path, - recipe_log_path, - ): - try: - if os.path.getsize(path) != 0: - with open(path) as fh: - data.update(json.load(fh)) - except (OSError, EOFError): - pass - - try: - icon_cache_paths = glob(icon_cache_path_glob) - if icon_cache_paths: - icon_cache_path = sorted(icon_cache_paths)[-1] - icon_ext = icon_cache_path.rsplit(".", 1)[-1] - channel_icon_fn = "{}.{}".format(data["name"], icon_ext) - icon_url = "icons/" + channel_icon_fn - icon_channel_path = join(channel_root, "icons", channel_icon_fn) - icon_md5 = utils.md5_file(icon_cache_path) - icon_hash = f"md5:{icon_md5}:{getsize(icon_cache_path)}" - data.update(icon_hash=icon_hash, icon_url=icon_url) - # log.info("writing icon from %s to %s", icon_cache_path, icon_channel_path) - utils.move_with_fallback(icon_cache_path, icon_channel_path) - except: - pass - - # have to stat again, because we don't have access to the stat cache here - data["mtime"] = mtime - - source = data.get("source", {}) - try: - data.update({"source_" + k: v for k, v in source.items()}) - except AttributeError: - # sometimes source is a list instead of a dict - pass - _clear_newline_chars(data, "description") - _clear_newline_chars(data, "summary") - try: - with open(run_exports_cache_path) as fh: - data["run_exports"] = json.load(fh) - except (OSError, EOFError): - data["run_exports"] = {} - return data - - def _write_repodata(self, subdir, repodata, json_filename): - repodata_json_path = join(self.channel_root, subdir, json_filename) - new_repodata_binary = ( - json.dumps( - repodata, - indent=2, - sort_keys=True, - ) - .replace("':'", "': '") - .encode("utf-8") - ) - write_result = _maybe_write( - repodata_json_path, new_repodata_binary, write_newline_end=True - ) - if write_result: - repodata_bz2_path = repodata_json_path + ".bz2" - bz2_content = bz2.compress(new_repodata_binary) - _maybe_write(repodata_bz2_path, bz2_content, content_is_binary=True) - return write_result - - def _write_subdir_index_html(self, subdir, repodata): - repodata_packages = repodata["packages"] - subdir_path = join(self.channel_root, subdir) - - def _add_extra_path(extra_paths, path): - if isfile(join(self.channel_root, path)): - extra_paths[basename(path)] = { - "size": getsize(path), - "timestamp": int(getmtime(path)), - "sha256": utils.sha256_checksum(path), - "md5": utils.md5_file(path), - } - - extra_paths = OrderedDict() - _add_extra_path(extra_paths, join(subdir_path, _REPODATA_JSON_FN)) - _add_extra_path(extra_paths, join(subdir_path, _REPODATA_JSON_FN + ".bz2")) - _add_extra_path(extra_paths, join(subdir_path, _REPODATA_FROM_PKGS_JSON_FN)) - _add_extra_path( - extra_paths, join(subdir_path, _REPODATA_FROM_PKGS_JSON_FN + ".bz2") - ) - # _add_extra_path(extra_paths, join(subdir_path, "repodata2.json")) - _add_extra_path(extra_paths, join(subdir_path, "patch_instructions.json")) - rendered_html = _make_subdir_index_html( - self.channel_name, subdir, repodata_packages, extra_paths - ) - index_path = join(subdir_path, "index.html") - return _maybe_write(index_path, rendered_html) - - def _write_channeldata_index_html(self, channeldata): - rendered_html = _make_channeldata_index_html(self.channel_name, channeldata) - index_path = join(self.channel_root, "index.html") - _maybe_write(index_path, rendered_html) - - def _update_channeldata(self, channel_data, repodata, subdir): - legacy_packages = repodata["packages"] - conda_packages = repodata["packages.conda"] - - use_these_legacy_keys = set(legacy_packages.keys()) - { - k[:-6] + CONDA_PACKAGE_EXTENSION_V1 for k in conda_packages.keys() - } - all_packages = conda_packages.copy() - all_packages.update({k: legacy_packages[k] for k in use_these_legacy_keys}) - package_data = channel_data.get("packages", {}) - - def _append_group(groups, candidates): - candidate = sorted(candidates, key=lambda x: x[1].get("timestamp", 0))[-1] - pkg_dict = candidate[1] - pkg_name = pkg_dict["name"] - - run_exports = package_data.get(pkg_name, {}).get("run_exports", {}) - if ( - pkg_name not in package_data - or subdir not in package_data.get(pkg_name, {}).get("subdirs", []) - or package_data.get(pkg_name, {}).get("timestamp", 0) - < _make_seconds(pkg_dict.get("timestamp", 0)) - or run_exports - and pkg_dict["version"] not in run_exports - ): - groups.append(candidate) - - groups = [] - for name, group in groupby(all_packages.items(), lambda x: x[1]["name"]): - if name not in package_data or package_data[name].get("run_exports"): - # pay special attention to groups that have run_exports - we need to process each version - # group by version; take newest per version group. We handle groups that are not - # in the index t all yet similarly, because we can't check if they have any run_exports - for _, vgroup in groupby(group, lambda x: x[1]["version"]): - _append_group(groups, vgroup) - else: - # take newest per group - _append_group(groups, group) - - def _replace_if_newer_and_present(pd, data, erec, data_newer, k): - if data.get(k) and (data_newer or not erec.get(k)): - pd[k] = data[k] - else: - pd[k] = erec.get(k) - - # unzipping - fns, fn_dicts = [], [] - if groups: - fns, fn_dicts = zip(*groups) - - load_func = functools.partial( - ChannelIndex._load_all_from_cache, - self.channel_root, - subdir, - ) - for fn_dict, data in zip(fn_dicts, self.thread_executor.map(load_func, fns)): - if data: - data.update(fn_dict) - name = data["name"] - # existing record - erec = package_data.get(name, {}) - data_v = data.get("version", "0") - erec_v = erec.get("version", "0") - data_newer = VersionOrder(data_v) > VersionOrder(erec_v) - - package_data[name] = package_data.get(name, {}) - # keep newer value for these - for k in ( - "description", - "dev_url", - "doc_url", - "doc_source_url", - "home", - "license", - "source_url", - "source_git_url", - "summary", - "icon_url", - "icon_hash", - "tags", - "identifiers", - "keywords", - "recipe_origin", - "version", - ): - _replace_if_newer_and_present( - package_data[name], data, erec, data_newer, k - ) - - # keep any true value for these, since we don't distinguish subdirs - for k in ( - "binary_prefix", - "text_prefix", - "activate.d", - "deactivate.d", - "pre_link", - "post_link", - "pre_unlink", - ): - package_data[name][k] = any((data.get(k), erec.get(k))) - - package_data[name]["subdirs"] = sorted( - list(set(erec.get("subdirs", []) + [subdir])) - ) - # keep one run_exports entry per version of the package, since these vary by version - run_exports = erec.get("run_exports", {}) - exports_from_this_version = data.get("run_exports") - if exports_from_this_version: - run_exports[data_v] = data.get("run_exports") - package_data[name]["run_exports"] = run_exports - package_data[name]["timestamp"] = _make_seconds( - max( - data.get("timestamp", 0), - channel_data.get(name, {}).get("timestamp", 0), - ) - ) - - channel_data.update( - { - "channeldata_version": _CHANNELDATA_VERSION, - "subdirs": sorted( - list(set(channel_data.get("subdirs", []) + [subdir])) - ), - "packages": package_data, - } - ) - - def _write_channeldata(self, channeldata): - # trim out commits, as they can take up a ton of space. They're really only for the RSS feed. - for _pkg, pkg_dict in channeldata.get("packages", {}).items(): - if "commits" in pkg_dict: - del pkg_dict["commits"] - channeldata_path = join(self.channel_root, "channeldata.json") - content = json.dumps(channeldata, indent=2, sort_keys=True).replace( - "':'", "': '" - ) - _maybe_write(channeldata_path, content, True) - - def _load_patch_instructions_tarball(self, subdir, patch_generator): - instructions = {} - with TemporaryDirectory() as tmpdir: - conda_package_handling.api.extract(patch_generator, dest_dir=tmpdir) - instructions_file = os.path.join(tmpdir, subdir, "patch_instructions.json") - if os.path.isfile(instructions_file): - with open(instructions_file) as f: - instructions = json.load(f) - return instructions - - def _create_patch_instructions(self, subdir, repodata, patch_generator=None): - gen_patch_path = patch_generator or join(self.channel_root, "gen_patch.py") - if isfile(gen_patch_path): - log.debug(f"using patch generator {gen_patch_path} for {subdir}") - - # https://stackoverflow.com/a/41595552/2127762 - try: - from importlib.util import module_from_spec, spec_from_file_location - - spec = spec_from_file_location("a_b", gen_patch_path) - mod = module_from_spec(spec) - - spec.loader.exec_module(mod) - # older pythons - except ImportError: - import imp - - mod = imp.load_source("a_b", gen_patch_path) - - instructions = mod._patch_repodata(repodata, subdir) - - if instructions.get("patch_instructions_version", 0) > 1: - raise RuntimeError("Incompatible patch instructions version") - - return instructions - else: - if patch_generator: - raise ValueError( - f"Specified metadata patch file '{patch_generator}' does not exist. Please try an absolute " - "path, or examine your relative path carefully with respect to your cwd." - ) - return {} - - def _write_patch_instructions(self, subdir, instructions): - new_patch = json.dumps(instructions, indent=2, sort_keys=True).replace( - "':'", "': '" - ) - patch_instructions_path = join( - self.channel_root, subdir, "patch_instructions.json" - ) - _maybe_write(patch_instructions_path, new_patch, True) - - def _load_instructions(self, subdir): - patch_instructions_path = join( - self.channel_root, subdir, "patch_instructions.json" - ) - if isfile(patch_instructions_path): - log.debug("using patch instructions %s" % patch_instructions_path) - with open(patch_instructions_path) as fh: - instructions = json.load(fh) - if instructions.get("patch_instructions_version", 0) > 1: - raise RuntimeError("Incompatible patch instructions version") - return instructions - return {} - - def _patch_repodata(self, subdir, repodata, patch_generator=None): - if patch_generator and any( - patch_generator.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS - ): - instructions = self._load_patch_instructions_tarball( - subdir, patch_generator - ) - else: - instructions = self._create_patch_instructions( - subdir, repodata, patch_generator - ) - if instructions: - self._write_patch_instructions(subdir, instructions) - else: - instructions = self._load_instructions(subdir) - if instructions.get("patch_instructions_version", 0) > 1: - raise RuntimeError("Incompatible patch instructions version") - - return _apply_instructions(subdir, repodata, instructions), instructions diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 1b50a076c6..7a9985fc8a 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -22,7 +22,6 @@ from .conda_interface import ( specs_from_args, ) -from .deprecations import deprecated from .os_utils.ldd import ( get_linkages, get_package_obj_files, @@ -96,9 +95,6 @@ def __str__(self): untracked_package = _untracked_package() -@deprecated.argument("24.1.0", "24.3.0", "platform", rename="subdir") -@deprecated.argument("24.1.0", "24.3.0", "prepend") -@deprecated.argument("24.1.0", "24.3.0", "minimal_hint") def check_install( packages: Iterable[str], subdir: str | None = None, diff --git a/conda_build/metadata.py b/conda_build/metadata.py index f9f0d55438..633b6de8fc 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -16,9 +16,10 @@ from typing import TYPE_CHECKING, overload from bs4 import UnicodeDammit +from conda.gateways.disk.read import compute_sum from . import exceptions, utils, variants -from .conda_interface import MatchSpec, envs_dirs, md5_file +from .conda_interface import MatchSpec, envs_dirs from .config import Config, get_or_merge_config from .features import feature_list from .license_family import ensure_valid_license_family @@ -1704,7 +1705,9 @@ def is_app(self): def app_meta(self): d = {"type": "app"} if self.get_value("app/icon"): - d["icon"] = "%s.png" % md5_file(join(self.path, self.get_value("app/icon"))) + d["icon"] = "%s.png" % compute_sum( + join(self.path, self.get_value("app/icon")), "md5" + ) for field, key in [ ("app/entry", "app_entry"), diff --git a/conda_build/noarch_python.py b/conda_build/noarch_python.py index daaf163490..fb81565b3d 100644 --- a/conda_build/noarch_python.py +++ b/conda_build/noarch_python.py @@ -6,30 +6,10 @@ import os import shutil import sys -from os.path import basename, dirname, isdir, isfile, join +from os.path import basename, dirname, isfile, join -from .deprecations import deprecated from .utils import on_win -deprecated.constant( - "24.1", - "24.3", - "ISWIN", - on_win, - addendum="Use `conda_build.utils.on_win` instead.", -) - - -@deprecated("24.1", "24.3", addendum="Use `os.makedirs(exist_ok=True)` instead.") -def _force_dir(dirname): - if not isdir(dirname): - os.makedirs(dirname) - - -@deprecated("24.1", "24.3") -def _error_exit(exit_message): - sys.exit("[noarch_python] %s" % exit_message) - def rewrite_script(fn, prefix): """Take a file from the bin directory and rewrite it into the python-scripts diff --git a/conda_build/post.py b/conda_build/post.py index 17edda3d6e..4512c9e508 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -35,13 +35,13 @@ from typing import TYPE_CHECKING from conda.core.prefix_data import PrefixData +from conda.gateways.disk.read import compute_sum from conda.models.records import PrefixRecord from . import utils from .conda_interface import ( TemporaryDirectory, lchmod, - md5_file, walk_prefix, ) from .exceptions import OverDependingError, OverLinkingError, RunPathError @@ -393,7 +393,7 @@ def find_lib(link, prefix, files, path=None): # multiple places. md5s = set() for f in file_names[link]: - md5s.add(md5_file(join(prefix, f))) + md5s.add(compute_sum(join(prefix, f), "md5")) if len(md5s) > 1: sys.exit( f"Error: Found multiple instances of {link}: {file_names[link]}" diff --git a/conda_build/render.py b/conda_build/render.py index a46130f4ed..9ba417bf23 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -35,8 +35,6 @@ pkgs_dirs, specs_from_url, ) -from .deprecations import deprecated -from .environ import LINK_ACTION from .exceptions import DependencyNeedsBuildingError from .index import get_build_index from .metadata import MetaData, combine_top_level_metadata_with_output @@ -91,13 +89,6 @@ def bldpkg_path(m): return path -@deprecated("24.1.0", "24.3.0") -def actions_to_pins(actions): - if LINK_ACTION in actions: - return [package_record_to_requirement(prec) for prec in actions[LINK_ACTION]] - return [] - - def _categorize_deps(m, specs, exclude_pattern, variant): subpackages = [] dependencies = [] @@ -158,7 +149,7 @@ def get_env_dependencies( ) with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir: try: - actions = environ.get_install_actions( + precs = environ.get_package_records( tmpdir, tuple(dependencies), env, @@ -180,19 +171,17 @@ def get_env_dependencies( else: unsat = e.message if permit_unsatisfiable_variants: - actions = {} + precs = [] else: raise - specs = [ - package_record_to_requirement(prec) for prec in actions.get(LINK_ACTION, []) - ] + specs = [package_record_to_requirement(prec) for prec in precs] return ( utils.ensure_list( (specs + subpackages + pass_through_deps) or m.get_value(f"requirements/{env}", []) ), - actions, + precs, unsat, ) @@ -329,7 +318,6 @@ def _read_specs_from_package(pkg_loc, pkg_dist): return specs -@deprecated.argument("24.1.0", "24.3.0", "actions", rename="precs") def execute_download_actions(m, precs, env, package_subset=None, require_files=False): subdir = getattr(m.config, f"{env}_subdir") index, _, _ = get_build_index( @@ -359,8 +347,6 @@ def execute_download_actions(m, precs, env, package_subset=None, require_files=F pkg_files = {} - if hasattr(precs, "keys"): - precs = precs.get(LINK_ACTION, []) if isinstance(package_subset, PackageRecord): package_subset = [package_subset] else: @@ -409,14 +395,11 @@ def execute_download_actions(m, precs, env, package_subset=None, require_files=F return pkg_files -@deprecated.argument("24.1.0", "24.3.0", "actions", rename="precs") def get_upstream_pins(m: MetaData, precs, env): """Download packages from specs, then inspect each downloaded package for additional downstream dependency specs. Return these additional specs.""" env_specs = m.get_value(f"requirements/{env}", []) explicit_specs = [req.split(" ")[0] for req in env_specs] if env_specs else [] - if hasattr(precs, "keys"): - precs = precs.get(LINK_ACTION, []) precs = [prec for prec in precs if prec.name in explicit_specs] ignore_pkgs_list = utils.ensure_list(m.get_value("build/ignore_run_exports_from")) @@ -453,7 +436,7 @@ def _read_upstream_pin_files( permit_unsatisfiable_variants, exclude_pattern, ): - deps, actions, unsat = get_env_dependencies( + deps, precs, unsat = get_env_dependencies( m, env, m.config.variant, @@ -462,7 +445,7 @@ def _read_upstream_pin_files( ) # extend host deps with strong build run exports. This is important for things like # vc feature activation to work correctly in the host env. - extra_run_specs = get_upstream_pins(m, actions, env) + extra_run_specs = get_upstream_pins(m, precs, env) return ( list(set(deps)) or m.get_value(f"requirements/{env}", []), unsat, diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index e1c061bf73..891f62f3cb 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -18,6 +18,7 @@ from os.path import basename, dirname, exists, join import requests +from conda.core.index import get_index from .. import environ from ..conda_interface import ( @@ -28,7 +29,6 @@ TemporaryDirectory, TmpDownload, download, - get_index, ) from ..config import Config, get_or_merge_config from ..utils import check_call_env, on_linux, on_win diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index fbe59199b3..92e2ff9efd 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -19,6 +19,7 @@ import pkginfo import requests import yaml +from conda.gateways.disk.read import compute_sum from requests.packages.urllib3.util.url import parse_url from ..conda_interface import ( @@ -26,7 +27,6 @@ configparser, default_python, download, - hashsum_file, human_bytes, input, normalized_version, @@ -1276,10 +1276,10 @@ def get_pkginfo( download_path = join(config.src_cache, filename) if ( not isfile(download_path) - or hashsum_file(download_path, hash_type) != hash_value + or compute_sum(download_path, hash_type) != hash_value ): download(pypiurl, join(config.src_cache, filename)) - if hashsum_file(download_path, hash_type) != hash_value: + if compute_sum(download_path, hash_type) != hash_value: raise RuntimeError( f" Download of {package} failed" f" checksum type {hash_type} expected value {hash_value}. Please" @@ -1291,7 +1291,7 @@ def get_pkginfo( # Needs to be done in this block because this is where we have # access to the source file. if hash_type != "sha256": - new_hash_value = hashsum_file(download_path, "sha256") + new_hash_value = compute_sum(download_path, "sha256") else: new_hash_value = "" @@ -1356,7 +1356,7 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op create_env( config.host_prefix, - specs_or_actions=specs, + specs_or_precs=specs, env="host", subdir=subdir, clear_cache=False, diff --git a/conda_build/source.py b/conda_build/source.py index d4e1ca5b69..436a4137b2 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -15,11 +15,12 @@ from typing import TYPE_CHECKING from urllib.parse import urljoin +from conda.gateways.disk.read import compute_sum + from .conda_interface import ( CondaHTTPError, TemporaryDirectory, download, - hashsum_file, url_path, ) from .exceptions import MissingDependency @@ -120,7 +121,7 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): for tp in ("md5", "sha1", "sha256"): if tp in source_dict: expected_hash = source_dict[tp] - hashed = hashsum_file(path, tp) + hashed = compute_sum(path, tp) if expected_hash != hashed: rm_rf(path) raise RuntimeError( @@ -132,7 +133,7 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): # collisions in our source cache, but the end user will get no benefit from the cache. if not hash_added: if not hashed: - hashed = hashsum_file(path, "sha256") + hashed = compute_sum(path, "sha256") dest_path = append_hash_to_fn(path, hashed) if not os.path.isfile(dest_path): shutil.move(path, dest_path) diff --git a/conda_build/utils.py b/conda_build/utils.py index 29baa98005..7635c45a6f 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -41,7 +41,7 @@ ) from pathlib import Path from threading import Thread -from typing import TYPE_CHECKING, Iterable +from typing import TYPE_CHECKING, Iterable, overload import conda_package_handling.api import filelock @@ -53,12 +53,11 @@ CONDA_PACKAGE_EXTENSIONS, KNOWN_SUBDIRS, ) -from conda.core.prefix_data import PrefixData -from conda.models.dist import Dist +from conda.gateways.disk.read import compute_sum +from conda.models.match_spec import MatchSpec from .conda_interface import ( CondaHTTPError, - MatchSpec, PackageRecord, StringIO, TemporaryDirectory, @@ -67,19 +66,20 @@ context, download, get_conda_channel, - hashsum_file, - md5_file, pkgs_dirs, root_dir, unix_path_to_win, win_path_to_unix, ) from .conda_interface import rm_rf as _rm_rf -from .deprecations import deprecated from .exceptions import BuildLockError if TYPE_CHECKING: - from conda.models.records import PrefixRecord + from typing import Mapping, TypeVar + + T = TypeVar("T") + K = TypeVar("K") + V = TypeVar("V") on_win = sys.platform == "win32" on_mac = sys.platform == "darwin" @@ -876,8 +876,8 @@ def tar_xf(tarball, dir_path): def file_info(path): return { "size": getsize(path), - "md5": md5_file(path), - "sha256": hashsum_file(path, "sha256"), + "md5": compute_sum(path, "md5"), + "sha256": compute_sum(path, "sha256"), "mtime": getmtime(path), } @@ -1162,7 +1162,7 @@ def package_has_file(package_path, file_path, refresh_mode="modified"): return content -def ensure_list(arg, include_dict=True): +def ensure_list(arg: T | Iterable[T] | None, include_dict: bool = True) -> list[T]: """ Ensure the object is a list. If not return it in a list. @@ -1181,7 +1181,11 @@ def ensure_list(arg, include_dict=True): return [arg] -def islist(arg, uniform=False, include_dict=True): +def islist( + arg: T | Iterable[T], + uniform: bool = False, + include_dict: bool = True, +) -> bool: """ Check whether `arg` is a `list`. Optionally determine whether the list elements are all uniform. @@ -1767,7 +1771,10 @@ def merge_or_update_dict( return base -def merge_dicts_of_lists(dol1, dol2): +def merge_dicts_of_lists( + dol1: Mapping[K, Iterable[V]], + dol2: Mapping[K, Iterable[V]], +) -> dict[K, list[V]]: """ From Alex Martelli: https://stackoverflow.com/a/1495821/3257826 """ @@ -1889,7 +1896,17 @@ def sort_list_in_nested_structure(dictionary, omissions=""): spec_ver_needing_star_re = re.compile(r"^([0-9a-zA-Z\.]+)$") -def ensure_valid_spec(spec, warn=False): +@overload +def ensure_valid_spec(spec: str, warn: bool = False) -> str: + ... + + +@overload +def ensure_valid_spec(spec: MatchSpec, warn: bool = False) -> MatchSpec: + ... + + +def ensure_valid_spec(spec: str | MatchSpec, warn: bool = False) -> str | MatchSpec: if isinstance(spec, MatchSpec): if ( hasattr(spec, "version") @@ -2112,21 +2129,6 @@ def download_channeldata(channel_url): return data -@deprecated("24.1.0", "24.3.0") -def linked_data_no_multichannels( - prefix: str | os.PathLike | Path, -) -> dict[Dist, PrefixRecord]: - """ - Return a dictionary of the linked packages in prefix, with correct channels, hopefully. - cc @kalefranz. - """ - prefix = Path(prefix) - return { - Dist.from_string(prec.fn, channel_override=prec.channel.name): prec - for prec in PrefixData(str(prefix)).iter_records() - } - - def shutil_move_more_retrying(src, dest, debug_name): log = get_logger(__name__) log.info(f"Renaming {debug_name} directory '{src}' to '{dest}'") diff --git a/news/5203-remove-deprecations b/news/5203-remove-deprecations new file mode 100644 index 0000000000..fb77c3b149 --- /dev/null +++ b/news/5203-remove-deprecations @@ -0,0 +1,82 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Mark `conda_build.conda_interface.handle_proxy_407` as deprecated. Handled by `conda.gateways.connection.session.CondaSession`. (#5203) +* Mark `conda_build.conda_interface.hashsum_file` as deprecated. Use `conda.gateways.disk.read.compute_sum` instead. (#5203) +* Mark `conda_build.conda_interface.md5_file` as deprecated. Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead. (#5203) +* Mark `conda_build.environ.PREFIX_ACTION` as deprecated. (#5203) +* Mark `conda_build.environ.LINK_ACTION` as deprecated. (#5203) +* Mark `conda_build.environ.cache_actions` as deprecated. (#5203) +* Mark `conda_build.index.DummyExecutor` as deprecated. (#5203) +* Mark `conda_build.index.MAX_THREADS_DEFAULT` as deprecated. (#5203) +* Mark `conda_build.index.LOCK_TIMEOUT_SECS` as deprecated. (#5203) +* Mark `conda_build.index.LOCKFILE_NAME` as deprecated. (#5203) +* Postpone `conda_build.index.channel_data` deprecation. (#5203) +* Rename `conda_build.environ.create_env('specs_or_actions' -> 'specs_or_precs')`. (#5203) +* Rename `conda_build.environ._execute_actions('actions' -> 'precs'). (#5203) +* Rename `conda_build.environ._display_actions('actions' -> 'precs'). (#5203) +* Rename `conda_build.inspect.check_install('platform' -> 'subdir')`. (#5203) +* Rename `conda_build.render.execute_download_actions('actions' -> 'precs')`. (#5203) +* Rename `conda_build.render.get_upstream_pins('actions' -> 'precs')`. (#5203) +* Remove `conda_build.cli.main_render.execute(print_results)`. (#5203) +* Remove `conda_build.conda_interface.Dist`. (#5203) +* Remove `conda_build.conda_interface.display_actions`. (#5203) +* Remove `conda_build.conda_interface.execute_actions`. (#5203) +* Remove `conda_build.conda_interface.execute_plan`. (#5203) +* Remove `conda_build.conda_interface.get_index`. (#5203) +* Remove `conda_build.conda_interface.install_actions`. (#5203) +* Remove `conda_build.conda_interface.linked`. (#5203) +* Remove `conda_build.conda_interface.linked_data`. (#5203) +* Remove `conda_build.conda_interface.package_cache`. (#5203) +* Remove `conda_build.environ.get_install_actions`. Use `conda_build.environ.get_package_records` instead. (#5203) +* Remove `conda_build.index._determine_namespace`. (#5203) +* Remove `conda_build.index._make_seconds`. (#5203) +* Remove `conda_build.index.REPODATA_VERSION`. (#5203) +* Remove `conda_build.index.CHANNELDATA_VERSION`. (#5203) +* Remove `conda_build.index.REPODATA_JSON_FN`. (#5203) +* Remove `conda_build.index.REPODATA_FROM_PKGS_JSON_FN`. (#5203) +* Remove `conda_build.index.CHANNELDATA_FIELDS`. (#5203) +* Remove `conda_build.index._clear_newline_chars`. (#5203) +* Remove `conda_build.index._apply_instructions`. (#5203) +* Remove `conda_build.index._get_jinja2_environment`. (#5203) +* Remove `conda_build.index._maybe_write`. (#5203) +* Remove `conda_build.index._make_build_string`. (#5203) +* Remove `conda_build.index._warn_on_missing_dependencies`. (#5203) +* Remove `conda_build.index._cache_post_install_details`. (#5203) +* Remove `conda_build.index._cache_recipe`. (#5203) +* Remove `conda_build.index._cache_run_exports`. (#5203) +* Remove `conda_build.index._cache_icon`. (#5203) +* Remove `conda_build.index._make_subdir_index_html`. (#5203) +* Remove `conda_build.index._make_channeldata_index_html`. (#5203) +* Remove `conda_build.index._get_source_repo_git_info`. (#5203) +* Remove `conda_build.index._cache_info_file`. (#5203) +* Remove `conda_build.index._alternate_file_extension`. (#5203) +* Remove `conda_build.index._get_resolve_object`. (#5203) +* Remove `conda_build.index._get_newest_versions`. (#5203) +* Remove `conda_build.index._add_missing_deps`. (#5203) +* Remove `conda_build.index._add_prev_ver_for_features`. (#5203) +* Remove `conda_build.index._shard_newest_packages`. (#5203) +* Remove `conda_build.index._build_current_repodata`. (#5203) +* Remove `conda_build.index.ChannelIndex`. (#5203) +* Remove `conda_build.inspect.check_install('prepend')`. (#5203) +* Remove `conda_build.inspect.check_install('minimal_hint')`. (#5203) +* Remove `conda_build.noarch_python.ISWIN`. Use `conda_build.utils.on_win` instead. (#5203) +* Remove `conda_build.noarch_python._force_dir`. Use `os.makedirs(exist_ok=True)` instead. (#5203) +* Remove `conda_build.noarch_python._error_exit`. (#5203) +* Remove `conda_build.render.actions_to_pins`. (#5203) +* Remove `conda_build.utils.linked_data_no_multichannels`. (#5203) + +### Docs + +* + +### Other + +* diff --git a/pyproject.toml b/pyproject.toml index 21d787c86d..e8cfc5e011 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ requires-python = ">=3.8" dependencies = [ "beautifulsoup4", "chardet", - "conda >=22.11", + "conda >=23.5.0", "conda-index >=0.4.0", "conda-package-handling >=1.3", "filelock", @@ -125,9 +125,9 @@ addopts = [ # "--cov=conda_build", # passed in test runner scripts instead (avoid debugger) "--cov-append", "--cov-branch", - "--cov-report=term-missing", - "--cov-report=xml", - "--durations=16", + "--cov-report=term", # print summary table to screen + "--cov-report=xml", # for codecov/codecov-action upload + "--durations=16", # show 16 slowest tests "--junitxml=junit.xml", # "--splitting-algorithm=least_duration", # not available yet # "--store-durations", # not available yet diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 9b1ec2f3bc..a9062803cb 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -30,7 +30,7 @@ requirements: run: - beautifulsoup4 - chardet - - conda >=22.11.0 + - conda >=23.5.0 - conda-index >=0.4.0 - conda-package-handling >=1.3 - filelock diff --git a/tests/requirements.txt b/tests/requirements.txt index a4ecdd07a8..5f96c8fd66 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,8 +1,8 @@ beautifulsoup4 chardet -conda >=22.11.0 +conda >=23.5.0 conda-forge::anaconda-client -conda-index +conda-index >=0.4.0 conda-package-handling >=1.3 conda-verify contextlib2 diff --git a/tests/test_source.py b/tests/test_source.py index e32a133b84..711407d153 100644 --- a/tests/test_source.py +++ b/tests/test_source.py @@ -5,9 +5,10 @@ import tarfile import pytest +from conda.gateways.disk.read import compute_sum from conda_build import source -from conda_build.conda_interface import TemporaryDirectory, hashsum_file +from conda_build.conda_interface import TemporaryDirectory from conda_build.source import download_to_cache from conda_build.utils import reset_deduplicator @@ -142,7 +143,7 @@ def test_source_user_expand(): "url": os.path.join( prefix, os.path.basename(tmp), "cb-test.tar.bz2" ), - "sha256": hashsum_file(tbz_name, "sha256"), + "sha256": compute_sum(tbz_name, "sha256"), } with TemporaryDirectory() as tmp2: download_to_cache(tmp2, "", source_dict) diff --git a/tests/test_variants.py b/tests/test_variants.py index 89ebb67999..50e9cea4f2 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -429,7 +429,7 @@ def test_build_run_exports_act_on_host(caplog): platform="win", arch="64", ) - assert "failed to get install actions, retrying" not in caplog.text + assert "failed to get package records, retrying" not in caplog.text def test_detect_variables_in_build_and_output_scripts(): From f6403dc16687c5d6227551cc974207198628edd6 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 8 Mar 2024 14:54:36 -0600 Subject: [PATCH 291/366] Port deprecated `conda.auxlib.packaging.get_version_from_git_tag` to `conda_build` (#5221) --- conda_build/conda_interface.py | 14 +++++++++++--- conda_build/environ.py | 19 ++++++++++++++++++- news/5221-deprecate-get_version_from_git_tag | 19 +++++++++++++++++++ 3 files changed, 48 insertions(+), 4 deletions(-) create mode 100644 news/5221-deprecate-get_version_from_git_tag diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index f309b338a0..3f25e89591 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -8,9 +8,6 @@ from importlib import import_module # noqa: F401 from conda import __version__ as CONDA_VERSION # noqa: F401 -from conda.auxlib.packaging import ( # noqa: F401 - _get_version_from_git_tag as get_version_from_git_tag, -) from conda.base.context import context, determine_target_prefix, reset_context from conda.base.context import non_x86_machines as non_x86_linux_machines # noqa: F401 from conda.core.package_cache import ProgressiveFetchExtract # noqa: F401 @@ -117,3 +114,14 @@ def handle_proxy_407(x, y): ) def md5_file(path: str | os.PathLike) -> str: return compute_sum(path, "md5") + + +@deprecated( + "24.3", + "24.5", + addendum="Use `conda_build.environ.get_version_from_git_tag` instead.", +) +def get_version_from_git_tag(tag): + from .environ import get_version_from_git_tag + + return get_version_from_git_tag(tag) diff --git a/conda_build/environ.py b/conda_build/environ.py index ba57d39314..4fe68add36 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -48,7 +48,6 @@ TemporaryDirectory, context, create_default_packages, - get_version_from_git_tag, pkgs_dirs, reset_context, root_dir, @@ -223,6 +222,24 @@ def verify_git_repo( return OK +GIT_DESCRIBE_REGEX = re.compile( + r"(?:[_-a-zA-Z]*)" + r"(?P[a-zA-Z0-9.]+)" + r"(?:-(?P\d+)-g(?P[0-9a-f]{7,}))$" +) + + +def get_version_from_git_tag(tag): + """Return a PEP440-compliant version derived from the git status. + If that fails for any reason, return the changeset hash. + """ + m = GIT_DESCRIBE_REGEX.match(tag) + if m is None: + return None + version, post_commit, hash = m.groups() + return version if post_commit == "0" else f"{version}.post{post_commit}+{hash}" + + def get_git_info(git_exe, repo, debug): """ Given a repo to a git repo, return a dictionary of: diff --git a/news/5221-deprecate-get_version_from_git_tag b/news/5221-deprecate-get_version_from_git_tag new file mode 100644 index 0000000000..2c1e811a54 --- /dev/null +++ b/news/5221-deprecate-get_version_from_git_tag @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Mark `conda_build.conda_interface.get_version_from_git_tag` as deprecated. Use `conda_build.environ.get_version_from_git_tag` instead. (#5221) + +### Docs + +* + +### Other + +* From 6b983ae466af3da77f7948e9a55c3f73a64fbc9e Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 8 Mar 2024 16:14:40 -0600 Subject: [PATCH 292/366] Deprecate `Environment` in favor of `PrefixData` (#5219) --- conda_build/build.py | 7 +++++-- conda_build/environ.py | 4 ++++ news/5219-deprecate-Environment | 19 +++++++++++++++++++ tests/test_environ.py | 12 ++++++++++++ 4 files changed, 40 insertions(+), 2 deletions(-) create mode 100644 news/5219-deprecate-Environment diff --git a/conda_build/build.py b/conda_build/build.py index 087e932f81..9e62fc7293 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -22,6 +22,7 @@ import yaml from bs4 import UnicodeDammit from conda import __version__ as conda_version +from conda.core.prefix_data import PrefixData from . import __version__ as conda_build_version from . import environ, noarch_python, source, tarcheck, utils @@ -1421,8 +1422,10 @@ def write_about_json(m): m.config.extra_meta, ) extra.update(m.config.extra_meta) - env = environ.Environment(root_dir) - d["root_pkgs"] = env.package_specs() + d["root_pkgs"] = [ + f"{prec.name} {prec.version} {prec.build}" + for prec in PrefixData(root_dir).iter_records() + ] # Include the extra section of the metadata in the about.json d["extra"] = extra json.dump(d, fo, indent=2, sort_keys=True) diff --git a/conda_build/environ.py b/conda_build/environ.py index 4fe68add36..f7260ac92e 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -827,18 +827,21 @@ def os_vars(m, prefix): return d +@deprecated("24.3", "24.5") class InvalidEnvironment(Exception): pass # Stripped-down Environment class from conda-tools ( https://github.com/groutr/conda-tools ) # Vendored here to avoid the whole dependency for just this bit. +@deprecated("24.3", "24.5") def _load_json(path): with open(path) as fin: x = json.load(fin) return x +@deprecated("24.3", "24.5") def _load_all_json(path): """ Load all json files in a directory. Return dictionary with filenames mapped to json @@ -852,6 +855,7 @@ def _load_all_json(path): return result +@deprecated("24.3", "24.5", addendum="Use `conda.core.prefix_data.PrefixData` instead.") class Environment: def __init__(self, path): """ diff --git a/news/5219-deprecate-Environment b/news/5219-deprecate-Environment new file mode 100644 index 0000000000..95780c6be3 --- /dev/null +++ b/news/5219-deprecate-Environment @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Mark `conda_build.environ.Environment` as deprecated. Use `conda.core.prefix_data.PrefixData` instead. (#5219) + +### Docs + +* + +### Other + +* diff --git a/tests/test_environ.py b/tests/test_environ.py index 93311ab81b..d45fc8ed7f 100644 --- a/tests/test_environ.py +++ b/tests/test_environ.py @@ -1,6 +1,9 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import os +import sys + +from conda.core.prefix_data import PrefixData from conda_build import environ @@ -15,3 +18,12 @@ def test_environment_creation_preserves_PATH(testing_workdir, testing_config): subdir=testing_config.build_subdir, ) assert os.environ["PATH"] == ref_path + + +def test_environment(): + """Asserting PrefixData can accomplish the same thing as Environment.""" + assert (specs := environ.Environment(sys.prefix).package_specs()) + assert specs == [ + f"{prec.name} {prec.version} {prec.build}" + for prec in PrefixData(sys.prefix).iter_records() + ] From 558f4da3e07ea0c2294cc52a9d79b4732d7d9b2e Mon Sep 17 00:00:00 2001 From: Ryan Date: Fri, 8 Mar 2024 16:03:43 -0700 Subject: [PATCH 293/366] Deprecate `conda_build.api.get_output_file_path` (#5208) --- conda_build/api.py | 9 ++------- docs/source/resources/variants.rst | 2 +- news/5208-deprecate-get_output_file_path | 19 +++++++++++++++++++ tests/cli/test_main_build.py | 2 +- tests/test_api_build.py | 8 ++++---- tests/test_api_consistency.py | 2 +- tests/test_api_render.py | 2 +- tests/test_render.py | 2 +- tests/test_subpackages.py | 6 +++--- 9 files changed, 33 insertions(+), 19 deletions(-) create mode 100644 news/5208-deprecate-get_output_file_path diff --git a/conda_build/api.py b/conda_build/api.py index 8a1298bbe9..a8fc525e66 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -20,13 +20,13 @@ # make the Config class available in the api namespace from .config import DEFAULT_PREFIX_LENGTH as _prefix_length from .config import Config, get_channel_urls, get_or_merge_config +from .deprecations import deprecated from .utils import ( CONDA_PACKAGE_EXTENSIONS, LoggingContext, ensure_list, expand_globs, find_recipe, - get_logger, get_skip_message, on_win, ) @@ -168,6 +168,7 @@ def get_output_file_paths( return sorted(list(set(outs))) +@deprecated("24.3.0", "24.5.0", addendum="Use `get_output_file_paths` instead.") def get_output_file_path( recipe_path_or_metadata, no_download_source=False, @@ -180,12 +181,6 @@ def get_output_file_path( Both split packages (recipes with more than one output) and build matrices, created with variants, contribute to the list of file paths here. """ - log = get_logger(__name__) - log.warn( - "deprecation warning: this function has been renamed to get_output_file_paths, " - "to reflect that potentially multiple paths are returned. This function will be " - "removed in the conda-build 4.0 release." - ) return get_output_file_paths( recipe_path_or_metadata, no_download_source=no_download_source, diff --git a/docs/source/resources/variants.rst b/docs/source/resources/variants.rst index 3209fd3620..90953126ee 100644 --- a/docs/source/resources/variants.rst +++ b/docs/source/resources/variants.rst @@ -323,7 +323,7 @@ your Jinja2 templates. There are two ways that you can feed this information into the API: 1. Pass the ``variants`` keyword argument to API functions. Currently, the - ``build``, ``render``, ``get_output_file_path``, and ``check`` functions + ``build``, ``render``, ``get_output_file_paths``, and ``check`` functions accept this argument. ``variants`` should be a dictionary where each value is a list of versions to iterate over. These are aggregated as detailed in the `Aggregation of multiple variants`_ section below. diff --git a/news/5208-deprecate-get_output_file_path b/news/5208-deprecate-get_output_file_path new file mode 100644 index 0000000000..33244e8bf5 --- /dev/null +++ b/news/5208-deprecate-get_output_file_path @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Mark `conda_build.api.get_output_file_path` as deprecated. Use `conda_build.api.get_output_file_paths` instead. (#5208) + +### Docs + +* + +### Other + +* diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py index 60f24cf7ca..9da5b48418 100644 --- a/tests/cli/test_main_build.py +++ b/tests/cli/test_main_build.py @@ -296,7 +296,7 @@ def test_no_force_upload( # render recipe api.output_yaml(testing_metadata, "meta.yaml") - pkg = api.get_output_file_path(testing_metadata) + pkg = api.get_output_file_paths(testing_metadata) # mock Config.set_keys to always set anaconda_upload to True # conda's Context + conda_build's MetaData & Config objects interact in such an diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 6ad6577c50..0d2bd3b5f0 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -242,7 +242,7 @@ def test_offline( def test_git_describe_info_on_branch(testing_config): recipe_path = os.path.join(metadata_dir, "_git_describe_number_branch") m = api.render(recipe_path, config=testing_config)[0][0] - output = api.get_output_file_path(m)[0] + output = api.get_output_file_paths(m)[0] # missing hash because we set custom build string in meta.yaml test_path = os.path.join( testing_config.croot, @@ -625,7 +625,7 @@ def test_numpy_setup_py_data(testing_config): m = api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] _hash = m.hash_dependencies() assert ( - os.path.basename(api.get_output_file_path(m)[0]) + os.path.basename(api.get_output_file_paths(m)[0]) == f"load_setup_py_test-0.1.0-np116py{sys.version_info.major}{sys.version_info.minor}{_hash}_0.tar.bz2" ) @@ -795,7 +795,7 @@ def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monke # This will (after one spin round the loop) install and run 'git' with the # build env prepended to os.environ[] metadata = api.render(testing_workdir, config=testing_config)[0][0] - output = api.get_output_file_path(metadata, config=testing_config)[0] + output = api.get_output_file_paths(metadata, config=testing_config)[0] assert f"relative_submodules-{tag}-" in output api.build(metadata, config=testing_config) @@ -811,7 +811,7 @@ def test_noarch(testing_workdir): ) with open(filename, "w") as outfile: outfile.write(yaml.dump(data, default_flow_style=False, width=999999999)) - output = api.get_output_file_path(testing_workdir)[0] + output = api.get_output_file_paths(testing_workdir)[0] assert os.path.sep + "noarch" + os.path.sep in output or not noarch assert os.path.sep + "noarch" + os.path.sep not in output or noarch diff --git a/tests/test_api_consistency.py b/tests/test_api_consistency.py index 56685f66d1..9d88b60eee 100644 --- a/tests/test_api_consistency.py +++ b/tests/test_api_consistency.py @@ -43,7 +43,7 @@ def test_api_output_yaml(): def test_api_get_output_file_path(): - argspec = getargspec(api.get_output_file_path) + argspec = getargspec(api.get_output_file_paths) assert argspec.args == [ "recipe_path_or_metadata", "no_download_source", diff --git a/tests/test_api_render.py b/tests/test_api_render.py index 878617e78d..868053876b 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -105,7 +105,7 @@ def test_get_output_file_path_jinja2(testing_config): def test_output_without_jinja_does_not_download(mocker, testing_config): mock = mocker.patch("conda_build.source") - api.get_output_file_path( + api.get_output_file_paths( os.path.join(metadata_dir, "source_git"), config=testing_config ) mock.assert_not_called() diff --git a/tests/test_render.py b/tests/test_render.py index 6cfd0abeea..aef9d0e928 100644 --- a/tests/test_render.py +++ b/tests/test_render.py @@ -27,7 +27,7 @@ ) def test_noarch_output(build, testing_metadata): testing_metadata.meta["build"].update(build) - output = api.get_output_file_path(testing_metadata) + output = api.get_output_file_paths(testing_metadata) assert os.path.sep + "noarch" + os.path.sep in output[0] diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index 3937036d14..3c3b011c58 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -55,7 +55,7 @@ def test_rm_rf_does_not_remove_relative_source_package_files( def test_output_pkg_path_shows_all_subpackages(testing_metadata): testing_metadata.meta["outputs"] = [{"name": "a"}, {"name": "b"}] out_dicts_and_metadata = testing_metadata.get_output_metadata_set() - outputs = api.get_output_file_path( + outputs = api.get_output_file_paths( [(m, None, None) for (_, m) in out_dicts_and_metadata] ) assert len(outputs) == 2 @@ -64,7 +64,7 @@ def test_output_pkg_path_shows_all_subpackages(testing_metadata): def test_subpackage_version_provided(testing_metadata): testing_metadata.meta["outputs"] = [{"name": "a", "version": "2.0"}] out_dicts_and_metadata = testing_metadata.get_output_metadata_set() - outputs = api.get_output_file_path( + outputs = api.get_output_file_paths( [(m, None, None) for (_, m) in out_dicts_and_metadata] ) assert len(outputs) == 1 @@ -78,7 +78,7 @@ def test_subpackage_independent_hash(testing_metadata): testing_metadata.meta["requirements"]["run"] = ["a"] out_dicts_and_metadata = testing_metadata.get_output_metadata_set() assert len(out_dicts_and_metadata) == 2 - outputs = api.get_output_file_path( + outputs = api.get_output_file_paths( [(m, None, None) for (_, m) in out_dicts_and_metadata] ) assert len(outputs) == 2 From cb67840c8d474cefedc875767661bff4b03c3bcf Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Mon, 11 Mar 2024 17:49:14 +0100 Subject: [PATCH 294/366] Defer `.index._apply_instructions` and `.conda_interface.get_index` deprecations (#5226) * Re-add .index._apply_instructions 24.5 deprecation Signed-off-by: Marcel Bargull * Defer .conda_interface.get_index deprecation 24.5 Signed-off-by: Marcel Bargull --------- Signed-off-by: Marcel Bargull --- conda_build/conda_interface.py | 2 ++ conda_build/index.py | 59 +++++++++++++++++++++++++++++++++- news/5203-remove-deprecations | 2 -- 3 files changed, 60 insertions(+), 3 deletions(-) diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 3f25e89591..5d5c455d07 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -58,11 +58,13 @@ walk_prefix, win_path_to_unix, ) +from conda.exports import get_index as _get_index from conda.gateways.disk.read import compute_sum from conda.models.channel import get_conda_build_local_url # noqa: F401 from .deprecations import deprecated +deprecated.constant("24.1.0", "24.5.0", "get_index", _get_index) # TODO: Go to references of all properties below and import them from `context` instead binstar_upload = context.binstar_upload default_python = context.default_python diff --git a/conda_build/index.py b/conda_build/index.py index c3968d238a..cd36cc9cac 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -14,7 +14,13 @@ from . import conda_interface, utils from .conda_interface import CondaHTTPError, context, url_path from .deprecations import deprecated -from .utils import JSONDecodeError, get_logger, on_win +from .utils import ( + CONDA_PACKAGE_EXTENSION_V1, + CONDA_PACKAGE_EXTENSION_V2, + JSONDecodeError, + get_logger, + on_win, +) log = get_logger(__name__) @@ -243,3 +249,54 @@ def _delegated_update_index( current_index_versions=current_index_versions, debug=debug, ) + + +@deprecated( + "24.1.0", "24.5.0", addendum="Use `conda_index._apply_instructions` instead." +) +def _apply_instructions(subdir, repodata, instructions): + repodata.setdefault("removed", []) + utils.merge_or_update_dict( + repodata.get("packages", {}), + instructions.get("packages", {}), + merge=False, + add_missing_keys=False, + ) + # we could have totally separate instructions for .conda than .tar.bz2, but it's easier if we assume + # that a similarly-named .tar.bz2 file is the same content as .conda, and shares fixes + new_pkg_fixes = { + k.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2): v + for k, v in instructions.get("packages", {}).items() + } + + utils.merge_or_update_dict( + repodata.get("packages.conda", {}), + new_pkg_fixes, + merge=False, + add_missing_keys=False, + ) + utils.merge_or_update_dict( + repodata.get("packages.conda", {}), + instructions.get("packages.conda", {}), + merge=False, + add_missing_keys=False, + ) + + for fn in instructions.get("revoke", ()): + for key in ("packages", "packages.conda"): + if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == "packages.conda": + fn = fn.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2) + if fn in repodata[key]: + repodata[key][fn]["revoked"] = True + repodata[key][fn]["depends"].append("package_has_been_revoked") + + for fn in instructions.get("remove", ()): + for key in ("packages", "packages.conda"): + if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == "packages.conda": + fn = fn.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2) + popped = repodata[key].pop(fn, None) + if popped: + repodata["removed"].append(fn) + repodata["removed"].sort() + + return repodata diff --git a/news/5203-remove-deprecations b/news/5203-remove-deprecations index fb77c3b149..5021c12907 100644 --- a/news/5203-remove-deprecations +++ b/news/5203-remove-deprecations @@ -30,7 +30,6 @@ * Remove `conda_build.conda_interface.display_actions`. (#5203) * Remove `conda_build.conda_interface.execute_actions`. (#5203) * Remove `conda_build.conda_interface.execute_plan`. (#5203) -* Remove `conda_build.conda_interface.get_index`. (#5203) * Remove `conda_build.conda_interface.install_actions`. (#5203) * Remove `conda_build.conda_interface.linked`. (#5203) * Remove `conda_build.conda_interface.linked_data`. (#5203) @@ -44,7 +43,6 @@ * Remove `conda_build.index.REPODATA_FROM_PKGS_JSON_FN`. (#5203) * Remove `conda_build.index.CHANNELDATA_FIELDS`. (#5203) * Remove `conda_build.index._clear_newline_chars`. (#5203) -* Remove `conda_build.index._apply_instructions`. (#5203) * Remove `conda_build.index._get_jinja2_environment`. (#5203) * Remove `conda_build.index._maybe_write`. (#5203) * Remove `conda_build.index._make_build_string`. (#5203) From 4bc2fd821aafb27561f96cb91e54eb5e83c27979 Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Mon, 11 Mar 2024 22:19:02 +0100 Subject: [PATCH 295/366] Add compatibility for LIEF=0.14 (#5228) * Add compatibility for LIEF=0.14 Signed-off-by: Marcel Bargull * Add news/5228-lief-0.14-compat Signed-off-by: Marcel Bargull * TMP: Test with conda-forge/label/lief_rc::py-lief >=0.14 Signed-off-by: Marcel Bargull * Replace lief.Binary.name (lief<0.14) by str(file) Signed-off-by: Marcel Bargull * Revert "TMP: Test with conda-forge/label/lief_rc::py-lief >=0.14" This reverts commit cb5df71c9afd743262a9fd1476f6f9c3cc98b126. Signed-off-by: Marcel Bargull --------- Signed-off-by: Marcel Bargull --- conda_build/os_utils/liefldd.py | 72 +++++++++++++++++++-------------- news/5228-lief-0.14-compat | 19 +++++++++ 2 files changed, 60 insertions(+), 31 deletions(-) create mode 100644 news/5228-lief-0.14-compat diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index 9f358f619a..9b14454c4f 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -27,6 +27,16 @@ lief.logging.disable() have_lief = True + try: + PE_HEADER_CHARACTERISTICS = lief.PE.Header.CHARACTERISTICS + except AttributeError: + # Fallback for lief<0.14. + PE_HEADER_CHARACTERISTICS = lief.PE.HEADER_CHARACTERISTICS + try: + EXE_FORMATS = lief.Binary.FORMATS + except AttributeError: + # Fallback for lief<0.14. + EXE_FORMATS = lief.EXE_FORMATS except ImportError: have_lief = False @@ -78,15 +88,15 @@ def codefile_class( if not (binary := ensure_binary(path)): return None elif ( - binary.format == lief.EXE_FORMATS.PE - and lief.PE.HEADER_CHARACTERISTICS.DLL in binary.header.characteristics_list + binary.format == EXE_FORMATS.PE + and PE_HEADER_CHARACTERISTICS.DLL in binary.header.characteristics_list ): return DLLfile - elif binary.format == lief.EXE_FORMATS.PE: + elif binary.format == EXE_FORMATS.PE: return EXEfile - elif binary.format == lief.EXE_FORMATS.MACHO: + elif binary.format == EXE_FORMATS.MACHO: return machofile - elif binary.format == lief.EXE_FORMATS.ELF: + elif binary.format == EXE_FORMATS.ELF: return elffile else: return None @@ -105,7 +115,7 @@ def get_libraries(file): result = [] binary = ensure_binary(file) if binary: - if binary.format == lief.EXE_FORMATS.PE: + if binary.format == EXE_FORMATS.PE: result = binary.libraries else: result = [ @@ -113,7 +123,7 @@ def get_libraries(file): ] # LIEF returns LC_ID_DYLIB name @rpath/libbz2.dylib in binary.libraries. Strip that. binary_name = None - if binary.format == lief.EXE_FORMATS.MACHO: + if binary.format == EXE_FORMATS.MACHO: binary_name = [ command.name for command in binary.commands @@ -174,7 +184,7 @@ def get_rpathy_thing_raw_partial(file, elf_attribute, elf_dyn_tag): rpaths = [] if binary: binary_format = binary.format - if binary_format == lief.EXE_FORMATS.ELF: + if binary_format == EXE_FORMATS.ELF: binary_type = binary.type if ( binary_type == lief.ELF.ELF_CLASS.CLASS32 @@ -182,7 +192,7 @@ def get_rpathy_thing_raw_partial(file, elf_attribute, elf_dyn_tag): ): rpaths = _get_elf_rpathy_thing(binary, elf_attribute, elf_dyn_tag) elif ( - binary_format == lief.EXE_FORMATS.MACHO + binary_format == EXE_FORMATS.MACHO and binary.has_rpath and elf_dyn_tag == lief.ELF.DYNAMIC_TAGS.RPATH ): @@ -232,7 +242,7 @@ def set_rpath(old_matching, new_rpath, file): binary = ensure_binary(file) if not binary: return - if binary.format == lief.EXE_FORMATS.ELF and ( + if binary.format == EXE_FORMATS.ELF and ( binary.type == lief.ELF.ELF_CLASS.CLASS32 or binary.type == lief.ELF.ELF_CLASS.CLASS64 ): @@ -244,7 +254,7 @@ def set_rpath(old_matching, new_rpath, file): def get_rpaths(file, exe_dirname, envroot, windows_root=""): rpaths, rpaths_type, binary_format, binary_type = get_runpaths_or_rpaths_raw(file) - if binary_format == lief.EXE_FORMATS.PE: + if binary_format == EXE_FORMATS.PE: # To allow the unix-y rpath code to work we consider # exes as having rpaths of env + CONDA_WINDOWS_PATHS # and consider DLLs as having no rpaths. @@ -259,9 +269,9 @@ def get_rpaths(file, exe_dirname, envroot, windows_root=""): rpaths.append("/".join((windows_root, "System32", "downlevel"))) rpaths.append(windows_root) if envroot: - # and not lief.PE.HEADER_CHARACTERISTICS.DLL in binary.header.characteristics_list: + # and not .DLL in binary.header.characteristics_list: rpaths.extend(list(_get_path_dirs(envroot))) - elif binary_format == lief.EXE_FORMATS.MACHO: + elif binary_format == EXE_FORMATS.MACHO: rpaths = [rpath.rstrip("/") for rpath in rpaths] return [from_os_varnames(binary_format, binary_type, rpath) for rpath in rpaths] @@ -299,13 +309,13 @@ def _inspect_linkages_this(filename, sysroot="", arch="native"): def to_os_varnames(binary, input_): """Don't make these functions - they are methods to match the API for elffiles.""" - if binary.format == lief.EXE_FORMATS.MACHO: + if binary.format == EXE_FORMATS.MACHO: return ( input_.replace("$SELFDIR", "@loader_path") .replace("$EXEDIR", "@executable_path") .replace("$RPATH", "@rpath") ) - elif binary.format == lief.EXE_FORMATS.ELF: + elif binary.format == EXE_FORMATS.ELF: if binary.ehdr.sz_ptr == 8: libdir = "/lib64" else: @@ -315,19 +325,19 @@ def to_os_varnames(binary, input_): def from_os_varnames(binary_format, binary_type, input_): """Don't make these functions - they are methods to match the API for elffiles.""" - if binary_format == lief.EXE_FORMATS.MACHO: + if binary_format == EXE_FORMATS.MACHO: return ( input_.replace("@loader_path", "$SELFDIR") .replace("@executable_path", "$EXEDIR") .replace("@rpath", "$RPATH") ) - elif binary_format == lief.EXE_FORMATS.ELF: + elif binary_format == EXE_FORMATS.ELF: if binary_type == lief.ELF.ELF_CLASS.CLASS64: libdir = "/lib64" else: libdir = "/lib" return input_.replace("$ORIGIN", "$SELFDIR").replace("$LIB", libdir) - elif binary_format == lief.EXE_FORMATS.PE: + elif binary_format == EXE_FORMATS.PE: return input_ @@ -344,10 +354,10 @@ def _get_path_dirs(prefix): def get_uniqueness_key(file): binary = ensure_binary(file) if not binary: - return lief.EXE_FORMATS.UNKNOWN - elif binary.format == lief.EXE_FORMATS.MACHO: - return binary.name - elif binary.format == lief.EXE_FORMATS.ELF and ( # noqa + return EXE_FORMATS.UNKNOWN + elif binary.format == EXE_FORMATS.MACHO: + return str(file) + elif binary.format == EXE_FORMATS.ELF and ( # noqa binary.type == lief.ELF.ELF_CLASS.CLASS32 or binary.type == lief.ELF.ELF_CLASS.CLASS64 ): @@ -357,8 +367,8 @@ def get_uniqueness_key(file): ] if result: return result[0] - return binary.name - return binary.name + return str(file) + return str(file) def _get_resolved_location( @@ -467,7 +477,7 @@ def inspect_linkages_lief( default_paths = [] if not binary: default_paths = [] - elif binary.format == lief.EXE_FORMATS.ELF: + elif binary.format == EXE_FORMATS.ELF: if binary.type == lief.ELF.ELF_CLASS.CLASS64: default_paths = [ "$SYSROOT/lib64", @@ -477,9 +487,9 @@ def inspect_linkages_lief( ] else: default_paths = ["$SYSROOT/lib", "$SYSROOT/usr/lib"] - elif binary.format == lief.EXE_FORMATS.MACHO: + elif binary.format == EXE_FORMATS.MACHO: default_paths = ["$SYSROOT/usr/lib"] - elif binary.format == lief.EXE_FORMATS.PE: + elif binary.format == EXE_FORMATS.PE: # We do not include C:\Windows nor C:\Windows\System32 in this list. They are added in # get_rpaths() instead since we need to carefully control the order. default_paths = [ @@ -499,7 +509,7 @@ def inspect_linkages_lief( uniqueness_key = get_uniqueness_key(binary) if uniqueness_key not in already_seen: parent_exe_dirname = None - if binary.format == lief.EXE_FORMATS.PE: + if binary.format == EXE_FORMATS.PE: tmp_filename = filename2 while tmp_filename: if ( @@ -519,7 +529,7 @@ def inspect_linkages_lief( ) tmp_filename = filename2 rpaths_transitive = [] - if binary.format == lief.EXE_FORMATS.PE: + if binary.format == EXE_FORMATS.PE: rpaths_transitive = rpaths_by_binary[tmp_filename] else: while tmp_filename: @@ -534,7 +544,7 @@ def inspect_linkages_lief( "$RPATH/" + lib if not lib.startswith("/") and not lib.startswith("$") - and binary.format != lief.EXE_FORMATS.MACHO # noqa + and binary.format != EXE_FORMATS.MACHO # noqa else lib ) for lib in libraries @@ -556,7 +566,7 @@ def inspect_linkages_lief( # can be run case-sensitively if the user wishes. # """ - if binary.format == lief.EXE_FORMATS.PE: + if binary.format == EXE_FORMATS.PE: import random path_fixed = ( os.path.dirname(path_fixed) diff --git a/news/5228-lief-0.14-compat b/news/5228-lief-0.14-compat new file mode 100644 index 0000000000..602242c7d4 --- /dev/null +++ b/news/5228-lief-0.14-compat @@ -0,0 +1,19 @@ +### Enhancements + +* Add compatibility for LIEF=0.14. (#5227 via #5228) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* From d384ef09dfb59ae90c07021a71ea76ed26675eae Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Mon, 11 Mar 2024 17:02:17 -0500 Subject: [PATCH 296/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#5230)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- HOW_WE_USE_GITHUB.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HOW_WE_USE_GITHUB.md b/HOW_WE_USE_GITHUB.md index d0a4f4266f..46a13ecd98 100644 --- a/HOW_WE_USE_GITHUB.md +++ b/HOW_WE_USE_GITHUB.md @@ -225,7 +225,7 @@ This is a duplicate of [link to primary issue]; please feel free to conti
 
 Please uninstall your current version of `conda` and reinstall the latest version.
-Feel free to use either the [miniconda](https://docs.conda.io/en/latest/miniconda.html)
+Feel free to use either the [miniconda](https://docs.anaconda.com/free/miniconda/)
 or [anaconda](https://www.anaconda.com/products/individual) installer,
 whichever is more appropriate for your needs.
 
From 17c51bdf3faeae9a8b609909f1acd1fef1594a1f Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 12 Mar 2024 08:35:14 -0500 Subject: [PATCH 297/366] Sync CI with conda (#5146) --- ci/github/.condarc => .github/condarc | 2 - .github/workflows/tests.yml | 359 +++++++++--------- pyproject.toml | 8 + ...ments-linux.txt => requirements-Linux.txt} | 0 ...s-windows.txt => requirements-Windows.txt} | 0 tests/requirements-ci.txt | 19 + ...ments-macos.txt => requirements-macOS.txt} | 0 tests/requirements.txt | 23 +- 8 files changed, 211 insertions(+), 200 deletions(-) rename ci/github/.condarc => .github/condarc (73%) rename tests/{requirements-linux.txt => requirements-Linux.txt} (100%) rename tests/{requirements-windows.txt => requirements-Windows.txt} (100%) create mode 100644 tests/requirements-ci.txt rename tests/{requirements-macos.txt => requirements-macOS.txt} (100%) diff --git a/ci/github/.condarc b/.github/condarc similarity index 73% rename from ci/github/.condarc rename to .github/condarc index 44a36fcc35..a76e773f8f 100644 --- a/ci/github/.condarc +++ b/.github/condarc @@ -2,7 +2,5 @@ auto_update_conda: False auto_activate_base: True notify_outdated_conda: False changeps1: False -pkgs_dirs: -- /usr/share/miniconda/envs/test/pkgs always_yes: True local_repodata_ttl: 7200 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index c0b0e8ff59..090c389a6b 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,6 +1,3 @@ -# this is the sibling workflow to tests-skip.yml, it is required to work around -# the skipped but required checks issue: -# https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/troubleshooting-required-status-checks#handling-skipped-but-required-checks name: Tests on: @@ -32,20 +29,29 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true +env: + # https://conda.github.io/conda-libmamba-solver/user-guide/configuration/#advanced-options + CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED: true + jobs: # detect whether any code changes are included in this PR changes: runs-on: ubuntu-latest permissions: + # necessary to detect changes + # https://github.com/dorny/paths-filter#supported-workflows pull-requests: read outputs: code: ${{ steps.filter.outputs.code }} steps: - - uses: actions/checkout@v3 + - name: Checkout Source + uses: actions/checkout@v4 # dorny/paths-filter needs git clone for non-PR events - # https://github.com/marketplace/actions/paths-changes-filter#supported-workflows + # https://github.com/dorny/paths-filter#supported-workflows if: github.event_name != 'pull_request' - - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 + + - name: Filter Changes + uses: dorny/paths-filter@v3 id: filter with: filters: | @@ -65,6 +71,7 @@ jobs: runs-on: ubuntu-latest defaults: run: + # https://github.com/conda-incubator/setup-miniconda#use-a-default-shell shell: bash -el {0} strategy: fail-fast: false @@ -89,75 +96,70 @@ jobs: conda-version: canary test-type: parallel env: - CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }} - CONDA_VERSION: ${{ contains('canary,release', matrix.conda-version) && 'conda' || format('conda={0}', matrix.conda-version) }} - REPLAY_NAME: Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }} - REPLAY_DIR: ${{ github.workspace }}/pytest-replay + CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev::' || '' }} + CONDA_VERSION: ${{ contains('canary|release', matrix.conda-version) && 'conda' || format('conda={0}', matrix.conda-version) }} PYTEST_MARKER: ${{ matrix.test-type == 'serial' && 'serial' || 'not serial' }} - PYTEST_NUMPROCESSES: ${{ matrix.test-type == 'serial' && 0 || 'auto' }} steps: - - name: Checkout repository - uses: actions/checkout@v3 + - name: Checkout Source + uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Timestamp - run: echo "TIMESTAMP=$(date -u "+%Y%m")" >> $GITHUB_ENV - shell: bash + - name: Hash + Timestamp + run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV - - name: Cache conda - uses: actions/cache@v3 + - name: Cache Conda + uses: actions/cache@v4 with: path: ~/conda_pkgs_dir - key: ${{ runner.os }}-conda-${{ env.TIMESTAMP }} + key: cache-${{ env.HASH }} - - name: Setup miniconda - uses: conda-incubator/setup-miniconda@v2 + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@v3 with: - condarc-file: ./ci/github/.condarc - python-version: ${{ matrix.python-version }} + condarc-file: .github/condarc run-post: false # skip post cleanup - - name: Setup environment - run: | - conda install -q -y -c defaults \ - --file ./tests/requirements.txt \ - --file ./tests/requirements-linux.txt \ - ${{ env.CONDA_CHANNEL_LABEL }}::${{ env.CONDA_VERSION }} - pip install -e . --no-deps - - - name: Show info - run: | - conda info -a - conda list --show-channel-urls - - - name: Run tests - run: | - pytest \ - --color=yes \ - -v \ - -n "${{ env.PYTEST_NUMPROCESSES }}" \ - --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" \ - --cov conda_build \ - --cov-append \ - --cov-branch \ - --cov-report xml \ - --replay-record-dir="${{ env.REPLAY_DIR }}" \ - --replay-base-name="${{ env.REPLAY_NAME }}" \ - -m "${{ env.PYTEST_MARKER }}" \ - ./tests - - - uses: codecov/codecov-action@v3 + - name: Conda Install + run: conda install + --yes + --file tests/requirements.txt + --file tests/requirements-${{ runner.os }}.txt + --file tests/requirements-ci.txt + python=${{ matrix.python-version }} + ${{ env.CONDA_CHANNEL_LABEL }}${{ env.CONDA_VERSION }} + + # TODO: how can we remove this step? + - name: Install Self + run: pip install -e . + + - name: Conda Info + run: conda info --verbose + + - name: Conda List + run: conda list --show-channel-urls + + - name: Run Tests + run: pytest + --cov=conda_build + -n auto + -m "${{ env.PYTEST_MARKER }}" + + - name: Upload Coverage + uses: codecov/codecov-action@v4 with: - flags: ${{ matrix.test-type }},${{ matrix.python-version }},linux-64 + flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - - name: Upload Pytest Replay + - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: ${{ env.REPLAY_NAME }}-${{ matrix.test-type }} - path: ${{ env.REPLAY_DIR }} + name: test-results-${{ env.HASH }} + path: | + .coverage + test-report.xml + retention-days: 1 # temporary, combined in aggregate below # windows test suite windows: @@ -181,77 +183,77 @@ jobs: conda-version: canary test-type: parallel env: + ErrorActionPreference: Stop # powershell exit on first error CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }} - REPLAY_NAME: Win-${{ matrix.conda-version }}-Py${{ matrix.python-version }} - REPLAY_DIR: ${{ github.workspace }}\pytest-replay PYTEST_MARKER: ${{ matrix.test-type == 'serial' && 'serial' || 'not serial and not slow' }} - PYTEST_NUMPROCESSES: ${{ matrix.test-type == 'serial' && 0 || 'auto' }} steps: - - name: Checkout repository - uses: actions/checkout@v3 + - name: Checkout Source + uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Timestamp - run: echo "TIMESTAMP=$(date -u "+%Y%m")" >> $GITHUB_ENV - shell: bash + - name: Hash + Timestamp + shell: bash # use bash to run date command + run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV - - name: Cache conda - uses: actions/cache@v3 + - name: Cache Conda + uses: actions/cache@v4 with: path: ~/conda_pkgs_dir - key: ${{ runner.os }}-conda-${{ env.TIMESTAMP }} + key: cache-${{ env.HASH }} - - name: Setup miniconda - uses: conda-incubator/setup-miniconda@v2 + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@v3 with: - condarc-file: .\ci\github\.condarc - python-version: ${{ matrix.python-version }} + condarc-file: .github\condarc run-post: false # skip post cleanup - - name: Setup environment - shell: cmd /C CALL {0} - run: | - @echo on - CALL choco install visualstudio2017-workload-vctools || exit 1 - CALL conda install -q -y -c defaults ^ - --file .\tests\requirements.txt ^ - --file .\tests\requirements-windows.txt ^ - ${{ env.CONDA_CHANNEL_LABEL }}::conda || exit 1 - CALL pip install -e . --no-deps || exit 1 - - - name: Show info - run: | - conda info -a - conda list --show-channel-urls - - - name: Run tests - run: | - pytest ` - --color=yes ` - -v ` - -n "${{ env.PYTEST_NUMPROCESSES }}" ` - --basetemp "${{ runner.temp }}\${{ matrix.test-type}}" ` - --cov conda_build ` - --cov-append ` - --cov-branch ` - --cov-report xml ` - --replay-record-dir="${{ env.REPLAY_DIR }}" ` - --replay-base-name="${{ env.REPLAY_NAME }}" ` - -m "${{ env.PYTEST_MARKER }}" ` - .\tests - - - uses: codecov/codecov-action@v3 + - name: Choco Install + run: choco install visualstudio2017-workload-vctools + + - name: Conda Install + run: conda install + --yes + --file tests\requirements.txt + --file tests\requirements-${{ runner.os }}.txt + --file tests\requirements-ci.txt + python=${{ matrix.python-version }} + ${{ env.CONDA_CHANNEL_LABEL }}::conda + + # TODO: how can we remove this step? + - name: Install Self + run: pip install -e . + + - name: Conda Info + run: conda info --verbose + + - name: Conda List + run: conda list --show-channel-urls + + - name: Run Tests + # Windows is sensitive to long paths, using `--basetemp=${{ runner.temp }} to + # keep the test directories shorter + run: pytest + --cov=conda_build + --basetemp=${{ runner.temp }} + -n auto + -m "${{ env.PYTEST_MARKER }}" + + - name: Upload Coverage + uses: codecov/codecov-action@v4 with: - flags: ${{ matrix.test-type }},${{ matrix.python-version }},win-64 + flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - - name: Upload Pytest Replay + - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - path: ${{ env.REPLAY_DIR }} - name: ${{ env.REPLAY_NAME }}-${{ matrix.test-type }} + name: test-results-${{ env.HASH }} + path: | + .coverage + test-report.xml + retention-days: 1 # temporary, combined in aggregate below # macos test suite macos: @@ -262,6 +264,7 @@ jobs: runs-on: macos-11 defaults: run: + # https://github.com/conda-incubator/setup-miniconda#use-a-default-shell shell: bash -el {0} strategy: fail-fast: false @@ -279,74 +282,71 @@ jobs: test-type: parallel env: CONDA_CHANNEL_LABEL: ${{ matrix.conda-version == 'canary' && 'conda-canary/label/dev' || 'defaults' }} - REPLAY_NAME: macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }} - REPLAY_DIR: ${{ github.workspace }}/pytest-replay PYTEST_MARKER: ${{ matrix.test-type == 'serial' && 'serial' || 'not serial' }} - PYTEST_NUMPROCESSES: ${{ matrix.test-type == 'serial' && 0 || 'auto' }} steps: - - name: Checkout repository - uses: actions/checkout@v3 + - name: Checkout Source + uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Timestamp - run: echo "TIMESTAMP=$(date -u "+%Y%m")" >> $GITHUB_ENV - shell: bash + - name: Hash + Timestamp + run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV - - name: Cache conda - uses: actions/cache@v3 + - name: Cache Conda + uses: actions/cache@v4 with: path: ~/conda_pkgs_dir - key: ${{ runner.os }}-conda-${{ env.TIMESTAMP }} + key: cache-${{ env.HASH }} - - name: Setup miniconda - uses: conda-incubator/setup-miniconda@v2 + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@v3 with: - condarc-file: ./ci/github/.condarc - python-version: ${{ matrix.python-version }} + condarc-file: .github/condarc run-post: false # skip post cleanup - - name: Setup environment - run: | - sudo xcode-select --switch /Applications/Xcode_11.7.app - conda install -q -y -c defaults \ - --file ./tests/requirements.txt \ - --file ./tests/requirements-macos.txt \ - ${{ env.CONDA_CHANNEL_LABEL }}::conda - pip install -e . --no-deps - - - name: Show info - run: | - conda info -a - conda list --show-channel-urls + - name: Xcode Install + run: sudo xcode-select --switch /Applications/Xcode_11.7.app - - name: Run tests - run: | - pytest \ - --color=yes \ - -v \ - -n "${{ env.PYTEST_NUMPROCESSES }}" \ - --basetemp "${{ runner.temp }}/${{ matrix.test-type }}" \ - --cov conda_build \ - --cov-append \ - --cov-branch \ - --cov-report xml \ - --replay-record-dir="${{ env.REPLAY_DIR }}" \ - --replay-base-name="${{ env.REPLAY_NAME }}" \ - -m "${{ env.PYTEST_MARKER }}" \ - ./tests - - - uses: codecov/codecov-action@v3 + - name: Conda Install + run: conda install + --yes + --file tests/requirements.txt + --file tests/requirements-${{ runner.os }}.txt + --file tests/requirements-ci.txt + python=${{ matrix.python-version }} + ${{ env.CONDA_CHANNEL_LABEL }}::conda + + # TODO: how can we remove this step? + - name: Install Self + run: pip install -e . + + - name: Conda Info + run: conda info --verbose + + - name: Conda List + run: conda list --show-channel-urls + + - name: Run Tests + run: pytest + --cov=conda_build + -n auto + -m "${{ env.PYTEST_MARKER }}" + + - name: Upload Coverage + uses: codecov/codecov-action@v4 with: - flags: ${{ matrix.test-type }},${{ matrix.python-version }},osx-64 + flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - - name: Upload Pytest Replay + - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: ${{ env.REPLAY_NAME }}-${{ matrix.test-type }} - path: ${{ env.REPLAY_DIR }} + name: test-results-${{ env.HASH }} + path: | + .coverage + test-report.xml + retention-days: 1 # temporary, combined in aggregate below # aggregate and upload aggregate: @@ -361,40 +361,37 @@ jobs: runs-on: ubuntu-latest steps: - - name: Download test results - uses: actions/download-artifact@v3 + - name: Download Artifacts + uses: actions/download-artifact@v4 - - name: Upload combined test results - # provides one downloadable archive of all .coverage/test-report.xml files - # of all matrix runs for further analysis. - uses: actions/upload-artifact@v3 + - name: Upload Combined Test Results + # provides one downloadable archive of all matrix run test results for further analysis + uses: actions/upload-artifact@v4 with: name: test-results-${{ github.sha }}-all - path: test-results-${{ github.sha }}-* - retention-days: 90 # default: 90 + path: test-results-* - name: Test Summary uses: test-summary/action@v2 with: - paths: ./test-results-${{ github.sha }}-**/test-report*.xml + paths: test-results-*/test-report.xml # required check analyze: - name: Analyze results needs: [linux, windows, macos, aggregate] if: '!cancelled()' runs-on: ubuntu-latest steps: - - name: Decide whether the needed jobs succeeded or failed - uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe + - name: Determine Success + uses: re-actors/alls-green@v1.2.2 with: + # permit jobs to be skipped if there are no code changes (see changes job) allowed-skips: ${{ toJSON(needs) }} jobs: ${{ toJSON(needs) }} # canary builds build: - name: Canary Build needs: [analyze] # only build canary build if # - prior steps succeeded, @@ -415,24 +412,28 @@ jobs: subdir: linux-64 - runner: macos-latest subdir: osx-64 + - runner: macos-14 + subdir: osx-arm64 - runner: windows-latest subdir: win-64 runs-on: ${{ matrix.runner }} steps: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - - uses: actions/checkout@v3 + - name: Checkout Source + uses: actions/checkout@v4 with: ref: ${{ github.ref }} clean: true fetch-depth: 0 # Explicitly use Python 3.12 since each of the OSes has a different default Python - - uses: actions/setup-python@v4 + - name: Setup Python + uses: actions/setup-python@v4 with: python-version: '3.12' - - name: Detect label + - name: Detect Label shell: python run: | from pathlib import Path @@ -453,8 +454,8 @@ jobs: Path(environ["GITHUB_ENV"]).write_text(f"ANACONDA_ORG_LABEL={label}") - - name: Create and upload canary build - uses: conda/actions/canary-release@v23.7.0 + - name: Create & Upload + uses: conda/actions/canary-release@v24.2.0 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} diff --git a/pyproject.toml b/pyproject.toml index e8cfc5e011..dd3e95dd56 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -133,8 +133,16 @@ addopts = [ # "--store-durations", # not available yet "--strict-markers", "--tb=native", + "--xdoctest-modules", + "--xdoctest-style=google", "-vv", ] +doctest_optionflags = [ + "NORMALIZE_WHITESPACE", + "IGNORE_EXCEPTION_DETAIL", + "ALLOW_UNICODE", + "ELLIPSIS", +] markers = [ "serial: execute test serially (to avoid race conditions)", "slow: execute the slow tests if active", diff --git a/tests/requirements-linux.txt b/tests/requirements-Linux.txt similarity index 100% rename from tests/requirements-linux.txt rename to tests/requirements-Linux.txt diff --git a/tests/requirements-windows.txt b/tests/requirements-Windows.txt similarity index 100% rename from tests/requirements-windows.txt rename to tests/requirements-Windows.txt diff --git a/tests/requirements-ci.txt b/tests/requirements-ci.txt new file mode 100644 index 0000000000..23d78bb0b2 --- /dev/null +++ b/tests/requirements-ci.txt @@ -0,0 +1,19 @@ +anaconda-client +conda-forge::xdoctest +conda-verify +contextlib2 +coverage +cytoolz +git +numpy +perl +pip +pyflakes +pytest +pytest-cov +pytest-forked +pytest-mock +pytest-rerunfailures +pytest-xdist +ruamel.yaml +tomli # [py<3.11] for coverage pyproject.toml diff --git a/tests/requirements-macos.txt b/tests/requirements-macOS.txt similarity index 100% rename from tests/requirements-macos.txt rename to tests/requirements-macOS.txt diff --git a/tests/requirements.txt b/tests/requirements.txt index 5f96c8fd66..5e94d4111a 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,37 +1,22 @@ beautifulsoup4 chardet conda >=23.5.0 -conda-forge::anaconda-client conda-index >=0.4.0 +conda-libmamba-solver # ensure we use libmamba conda-package-handling >=1.3 -conda-verify -contextlib2 -cytoolz filelock -git jinja2 jsonschema >=4.19 menuinst >=2 -numpy packaging -perl -pip pkginfo psutil py-lief -pyflakes -pytest -pytest-cov -pytest-forked -pytest-mock -pytest-replay -pytest-rerunfailures -pytest-xdist +python >=3.8 python-libarchive-c pytz +pyyaml requests -ripgrep -ruamel.yaml +ripgrep # for faster grep setuptools_scm # needed for devenv version detection -tomli # [py<3.11] for coverage pyproject.toml tqdm From 1b3352e37e887221f24ba94598908edbe7265de0 Mon Sep 17 00:00:00 2001 From: Katherine Kinnaman Date: Tue, 12 Mar 2024 09:00:25 -0500 Subject: [PATCH 298/366] Update advice on installing conda-build (#5223) --- docs/source/user-guide/getting-started.rst | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/source/user-guide/getting-started.rst b/docs/source/user-guide/getting-started.rst index 113632fe73..64c82e292e 100644 --- a/docs/source/user-guide/getting-started.rst +++ b/docs/source/user-guide/getting-started.rst @@ -16,16 +16,17 @@ Prerequisites Before starting the tutorials, you need to install: -- `Miniconda or Anaconda `_ +- `Miniconda `_ or `Anaconda `_ - conda-build - Git The most straightforward way to do this is to install Miniconda or Anaconda, which contain conda, and then use conda to install conda-build -and Git. Make sure you install these packages into a new environment -and not your base environment.:: +and Git. Make sure you install these packages into your base environment.:: - conda create -n my-conda-build-environment conda-build git + conda install -n base conda-build git + +For more information on installing and updating conda-build, see :doc:`Installing and updating conda-build <../install-conda-build>`. .. _submissions: From 67b90dd789f12317961ea72e4c5a31e1bb0fe428 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 15 Mar 2024 11:34:39 -0500 Subject: [PATCH 299/366] Ensure we are testing development conda-build (#5236) --- .github/workflows/tests.yml | 9 ++++++--- tests/conftest.py | 9 +++++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 090c389a6b..201ef3e0d6 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -135,7 +135,8 @@ jobs: run: pip install -e . - name: Conda Info - run: conda info --verbose + # view test env info (not base) + run: python -m conda info --verbose - name: Conda List run: conda list --show-channel-urls @@ -226,7 +227,8 @@ jobs: run: pip install -e . - name: Conda Info - run: conda info --verbose + # view test env info (not base) + run: python -m conda info --verbose - name: Conda List run: conda list --show-channel-urls @@ -322,7 +324,8 @@ jobs: run: pip install -e . - name: Conda Info - run: conda info --verbose + # view test env info (not base) + run: python -m conda info --verbose - name: Conda List run: conda list --show-channel-urls diff --git a/tests/conftest.py b/tests/conftest.py index f347317d90..f055b05d80 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -12,6 +12,7 @@ from conda.common.compat import on_mac, on_win from pytest import MonkeyPatch +import conda_build import conda_build.config from conda_build.config import ( Config, @@ -32,6 +33,14 @@ from conda_build.variants import get_default_variant +@pytest.hookimpl +def pytest_report_header(config: pytest.Config): + # ensuring the expected development conda is being run + expected = Path(__file__).parent.parent / "conda_build" / "__init__.py" + assert expected.samefile(conda_build.__file__) + return f"conda_build.__file__: {conda_build.__file__}" + + @pytest.fixture(scope="function") def testing_workdir(monkeypatch: MonkeyPatch, tmp_path: Path) -> Iterator[str]: """Create a workdir in a safe temporary folder; cd into dir above before test, cd out after From 3584ec9d4be9ff8f49d2c3473ffbd4bff13407a1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 18:55:09 +0000 Subject: [PATCH 300/366] [pre-commit.ci] pre-commit autoupdate (#5232) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/astral-sh/ruff-pre-commit: v0.2.2 → v0.3.2](https://github.com/astral-sh/ruff-pre-commit/compare/v0.2.2...v0.3.2) * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Ruff corrections --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Ken Odegard --- .pre-commit-config.yaml | 2 +- conda_build/__version__.py | 1 + conda_build/_link.py | 1 + conda_build/api.py | 1 + conda_build/build.py | 1 + conda_build/config.py | 1 + conda_build/convert.py | 1 + conda_build/create_test.py | 1 + conda_build/deprecations.py | 1 + conda_build/jinja_context.py | 36 +++---------------- conda_build/metadata.py | 12 +++---- conda_build/os_utils/macho.py | 8 ++--- conda_build/post.py | 6 ++-- conda_build/skeletons/cpan.py | 3 +- conda_build/skeletons/cran.py | 1 + conda_build/skeletons/pypi.py | 1 + conda_build/source.py | 5 ++- conda_build/utils.py | 11 +++--- conda_build/variants.py | 29 +++++---------- .../bdist-recipe/conda_build_test/__init__.py | 1 + tests/test_api_build.py | 17 +++++---- tests/test_api_debug.py | 1 + tests/test_api_render.py | 1 + tests/test_api_skeleton_cpan.py | 1 - tests/test_api_skeleton_cran.py | 1 + tests/test_api_test.py | 1 + tests/test_build.py | 1 + tests/test_cpan_skeleton.py | 1 - tests/test_cran_skeleton.py | 1 + tests/test_develop.py | 1 + 30 files changed, 61 insertions(+), 88 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3277627305..0941dd3f5c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,7 +54,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.2.2 + rev: v0.3.2 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff diff --git a/conda_build/__version__.py b/conda_build/__version__.py index e835e1be9d..e664582d94 100644 --- a/conda_build/__version__.py +++ b/conda_build/__version__.py @@ -6,6 +6,7 @@ Conda-build abides by CEP-8 which specifies using CalVer, so the dev version is: YY.MM.MICRO.devN+gHASH[.dirty] """ + try: from setuptools_scm import get_version diff --git a/conda_build/_link.py b/conda_build/_link.py index 21ea66aaed..af841c0275 100644 --- a/conda_build/_link.py +++ b/conda_build/_link.py @@ -4,6 +4,7 @@ This is code that is added to noarch Python packages. See conda_build/noarch_python.py. """ + from __future__ import annotations import os diff --git a/conda_build/api.py b/conda_build/api.py index a8fc525e66..f83e235354 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -8,6 +8,7 @@ Design philosophy: put variability into config. Make each function here accept kwargs, but only use those kwargs in config. Config must change to support new features elsewhere. """ + from __future__ import annotations # imports are done locally to keep the api clean and limited strictly diff --git a/conda_build/build.py b/conda_build/build.py index 9e62fc7293..f2ffb06cdf 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -3,6 +3,7 @@ """ Module that does most of the heavy lifting for the ``conda build`` command. """ + import fnmatch import json import os diff --git a/conda_build/config.py b/conda_build/config.py index 5d4ba590d3..9a33dac858 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -3,6 +3,7 @@ """ Module to store conda build settings. """ + from __future__ import annotations import copy diff --git a/conda_build/convert.py b/conda_build/convert.py index c2882d1508..793f0dc93c 100644 --- a/conda_build/convert.py +++ b/conda_build/convert.py @@ -3,6 +3,7 @@ """ Tools for converting conda packages """ + import glob import hashlib import json diff --git a/conda_build/create_test.py b/conda_build/create_test.py index 1788bbe97d..1a8a0f1c34 100644 --- a/conda_build/create_test.py +++ b/conda_build/create_test.py @@ -3,6 +3,7 @@ """ Module to handle generating test files. """ + from __future__ import annotations import json diff --git a/conda_build/deprecations.py b/conda_build/deprecations.py index 67ceb59c7f..494f0f85f1 100644 --- a/conda_build/deprecations.py +++ b/conda_build/deprecations.py @@ -1,6 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause """Tools to aid in deprecating code.""" + from __future__ import annotations import sys diff --git a/conda_build/jinja_context.py b/conda_build/jinja_context.py index 2ef9b910d1..cc5c3b24c7 100644 --- a/conda_build/jinja_context.py +++ b/conda_build/jinja_context.py @@ -73,39 +73,13 @@ def __init__( # Using any of these methods on an Undefined variable # results in another Undefined variable. - __add__ = ( - __radd__ - ) = ( - __mul__ - ) = ( - __rmul__ - ) = ( - __div__ - ) = ( - __rdiv__ - ) = ( - __truediv__ - ) = ( + __add__ = __radd__ = __mul__ = __rmul__ = __div__ = __rdiv__ = __truediv__ = ( __rtruediv__ - ) = ( - __floordiv__ - ) = ( - __rfloordiv__ - ) = ( - __mod__ - ) = ( - __rmod__ - ) = ( - __pos__ - ) = ( - __neg__ - ) = ( + ) = __floordiv__ = __rfloordiv__ = __mod__ = __rmod__ = __pos__ = __neg__ = ( __call__ - ) = ( - __getitem__ - ) = __lt__ = __le__ = __gt__ = __ge__ = __complex__ = __pow__ = __rpow__ = ( - lambda self, *args, **kwargs: self._return_undefined(self._undefined_name) - ) + ) = __getitem__ = __lt__ = __le__ = __gt__ = __ge__ = __complex__ = __pow__ = ( + __rpow__ + ) = lambda self, *args, **kwargs: self._return_undefined(self._undefined_name) # Accessing an attribute of an Undefined variable # results in another Undefined variable. diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 633b6de8fc..e3b814d8a7 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -1341,8 +1341,7 @@ def fromdict(cls, metadata, config=None, variant=None): return m @overload - def get_section(self, section: Literal["source", "outputs"]) -> list[dict]: - ... + def get_section(self, section: Literal["source", "outputs"]) -> list[dict]: ... @overload def get_section( @@ -1356,8 +1355,7 @@ def get_section( "about", "extra", ], - ) -> dict: - ... + ) -> dict: ... def get_section(self, name): section = self.meta.get(name) @@ -2553,9 +2551,9 @@ def get_output_metadata_set( ) ] = (out, out_metadata) out_metadata_map[HashableDict(out)] = out_metadata - ref_metadata.other_outputs = ( - out_metadata.other_outputs - ) = all_output_metadata + ref_metadata.other_outputs = out_metadata.other_outputs = ( + all_output_metadata + ) except SystemExit: if not permit_undefined_jinja: raise diff --git a/conda_build/os_utils/macho.py b/conda_build/os_utils/macho.py index 950ebd6d57..516df7a0a6 100644 --- a/conda_build/os_utils/macho.py +++ b/conda_build/os_utils/macho.py @@ -191,12 +191,10 @@ def find_apple_cctools_executable(name, build_prefix, nofail=False): tool = tool_xcr if os.path.exists(tool): return tool - except Exception as _: # noqa + except Exception: # noqa print( - "ERROR :: Failed to run `{}`. Please use `conda` to install `cctools` into your base environment.\n" - " An option on macOS is to install `Xcode` or `Command Line Tools for Xcode`.".format( - tool - ) + f"ERROR :: Failed to run `{tool}`. Use `conda` to install `cctools` into your base environment.\n" + f" An option on macOS is to install `Xcode` or `Command Line Tools for Xcode`." ) sys.exit(1) return tool diff --git a/conda_build/post.py b/conda_build/post.py index 4512c9e508..eea8a584b6 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -1402,9 +1402,11 @@ def check_overlinking_impl( if diffs: log = utils.get_logger(__name__) log.warning( - "Partially parsed some '.tbd' files in sysroot {}, pretending .tbds are their install-names\n" + "Partially parsed some '.tbd' files in sysroot %s, pretending .tbds are their install-names\n" "Adding support to 'conda-build' for parsing these in 'liefldd.py' would be easy and useful:\n" - "{} ...".format(sysroot, list(diffs)[1:3]) + "%s...", + sysroot, + list(diffs)[1:3], ) sysroots_files[srs] = sysroot_files sysroots_files = OrderedDict( diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index 891f62f3cb..8d1d996e1b 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -3,6 +3,7 @@ """ Tools for converting CPAN packages to conda recipes. """ + import codecs import gzip import hashlib @@ -149,7 +150,6 @@ class InvalidReleaseError(RuntimeError): - """ An exception that is raised when a release is not available on MetaCPAN. """ @@ -158,7 +158,6 @@ class InvalidReleaseError(RuntimeError): class PerlTmpDownload(TmpDownload): - """ Subclass Conda's TmpDownload to replace : in download filenames. Critical on win. diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index 423941164e..7140c9a89f 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -3,6 +3,7 @@ """ Tools for converting Cran packages to conda recipes. """ + from __future__ import annotations import argparse diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index 92e2ff9efd..abf1a173bc 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -3,6 +3,7 @@ """ Tools for converting PyPI packages to conda recipes. """ + import keyword import logging import os diff --git a/conda_build/source.py b/conda_build/source.py index 436a4137b2..3583f0b8ce 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -379,9 +379,8 @@ def git_mirror_checkout_recursive( ) if verbose: print( - "Relative submodule {} found: url is {}, submod_mirror_dir is {}".format( - submod_name, submod_url, submod_mirror_dir - ) + f"Relative submodule {submod_name} found: url is {submod_url}, " + f"submod_mirror_dir is {submod_mirror_dir}" ) with TemporaryDirectory() as temp_checkout_dir: git_mirror_checkout_recursive( diff --git a/conda_build/utils.py b/conda_build/utils.py index 7635c45a6f..36a65ead4e 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -1132,8 +1132,9 @@ def convert_path_for_cygwin_or_msys2(exe, path): def get_skip_message(m): - return "Skipped: {} from {} defines build/skip for this configuration ({}).".format( - m.name(), m.path, {k: m.config.variant[k] for k in m.get_used_vars()} + return ( + f"Skipped: {m.name()} from {m.path} defines build/skip for this configuration " + f"({({k: m.config.variant[k] for k in m.get_used_vars()})})." ) @@ -1897,13 +1898,11 @@ def sort_list_in_nested_structure(dictionary, omissions=""): @overload -def ensure_valid_spec(spec: str, warn: bool = False) -> str: - ... +def ensure_valid_spec(spec: str, warn: bool = False) -> str: ... @overload -def ensure_valid_spec(spec: MatchSpec, warn: bool = False) -> MatchSpec: - ... +def ensure_valid_spec(spec: MatchSpec, warn: bool = False) -> MatchSpec: ... def ensure_valid_spec(spec: str | MatchSpec, warn: bool = False) -> str | MatchSpec: diff --git a/conda_build/variants.py b/conda_build/variants.py index 2ece5f4bd6..e4c541ed96 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -2,6 +2,7 @@ # SPDX-License-Identifier: BSD-3-Clause """This file handles the parsing of feature specifications from files, ending up with a configuration matrix""" + import os.path import re import sys @@ -303,15 +304,10 @@ def _combine_spec_dictionaries( ensure_list(v) ): raise ValueError( - "All entries associated by a zip_key " - "field must be the same length. In {}, {} and {} are " - "different ({} and {})".format( - spec_source, - k, - group_item, - len(ensure_list(v)), - len(ensure_list(spec[group_item])), - ) + f"All entries associated by a zip_key " + f"field must be the same length. In {spec_source}, {k} and {group_item} " + f"are different ({len(ensure_list(v))} and " + f"{len(ensure_list(spec[group_item]))})" ) values[group_item] = ensure_list(spec[group_item]) elif k in values: @@ -338,17 +334,10 @@ def _combine_spec_dictionaries( ] if len(missing_subvalues): raise ValueError( - "variant config in {} is ambiguous because it\n" - "does not fully implement all zipped keys (To be clear: missing {})\n" - "or specifies a subspace that is not fully implemented (To be clear:\n" - ".. we did not find {} from {} in {}:{}).".format( - spec_source, - missing_group_items, - missing_subvalues, - spec, - k, - values[k], - ) + f"variant config in {spec_source} is ambiguous because it does not fully " + f"implement all zipped keys (missing {missing_group_items}) or specifies a " + f"subspace that is not fully implemented (we did not find {missing_subvalues} " + f"from {spec} in {k}:{values[k]})." ) return values diff --git a/tests/bdist-recipe/conda_build_test/__init__.py b/tests/bdist-recipe/conda_build_test/__init__.py index 3574c4128a..1f22b11325 100644 --- a/tests/bdist-recipe/conda_build_test/__init__.py +++ b/tests/bdist-recipe/conda_build_test/__init__.py @@ -3,4 +3,5 @@ """ conda build test package """ + print("conda_build_test has been imported") diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 0d2bd3b5f0..fab08891c2 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -3,6 +3,7 @@ """ This module tests the build API. These are high-level integration tests. """ + from __future__ import annotations import json @@ -306,9 +307,7 @@ def test_output_build_path_git_source(testing_config): test_path = os.path.join( testing_config.croot, testing_config.host_subdir, - "conda-build-test-source-git-jinja2-1.20.2-py{}{}{}_0_g262d444.tar.bz2".format( - sys.version_info.major, sys.version_info.minor, _hash - ), + f"conda-build-test-source-git-jinja2-1.20.2-py{sys.version_info.major}{sys.version_info.minor}{_hash}_0_g262d444.tar.bz2", ) assert output == test_path @@ -819,15 +818,15 @@ def test_noarch(testing_workdir): def test_disable_pip(testing_metadata): testing_metadata.config.disable_pip = True testing_metadata.meta["requirements"] = {"host": ["python"], "run": ["python"]} - testing_metadata.meta["build"][ - "script" - ] = 'python -c "import pip; print(pip.__version__)"' + testing_metadata.meta["build"]["script"] = ( + 'python -c "import pip; print(pip.__version__)"' + ) with pytest.raises(subprocess.CalledProcessError): api.build(testing_metadata) - testing_metadata.meta["build"][ - "script" - ] = 'python -c "import setuptools; print(setuptools.__version__)"' + testing_metadata.meta["build"]["script"] = ( + 'python -c "import setuptools; print(setuptools.__version__)"' + ) with pytest.raises(subprocess.CalledProcessError): api.build(testing_metadata) diff --git a/tests/test_api_debug.py b/tests/test_api_debug.py index 42fa1275fe..af24d8acfb 100644 --- a/tests/test_api_debug.py +++ b/tests/test_api_debug.py @@ -4,6 +4,7 @@ This module tests the test API. These are high-level integration tests. Lower level unit tests should go in test_render.py """ + from __future__ import annotations import subprocess diff --git a/tests/test_api_render.py b/tests/test_api_render.py index 868053876b..5a4c722836 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -4,6 +4,7 @@ This module tests the test API. These are high-level integration tests. Lower level unit tests should go in test_render.py """ + import os import re diff --git a/tests/test_api_skeleton_cpan.py b/tests/test_api_skeleton_cpan.py index 238635ba50..9f08ccbae6 100644 --- a/tests/test_api_skeleton_cpan.py +++ b/tests/test_api_skeleton_cpan.py @@ -5,7 +5,6 @@ conda_build.api.skeletonize and check the output files """ - import pytest from conda_build import api diff --git a/tests/test_api_skeleton_cran.py b/tests/test_api_skeleton_cran.py index 9b62b4ac30..912b2bee0c 100644 --- a/tests/test_api_skeleton_cran.py +++ b/tests/test_api_skeleton_cran.py @@ -4,6 +4,7 @@ Integrative tests of the CRAN skeleton that start from conda_build.api.skeletonize and check the output files """ + from pathlib import Path from typing import Sequence diff --git a/tests/test_api_test.py b/tests/test_api_test.py index a258bbba0d..2bb76838aa 100644 --- a/tests/test_api_test.py +++ b/tests/test_api_test.py @@ -3,6 +3,7 @@ """ This module tests the test API. These are high-level integration tests. """ + import os import pytest diff --git a/tests/test_build.py b/tests/test_build.py index f11be7727c..16bffa648f 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -4,6 +4,7 @@ This file tests the build.py module. It sits lower in the stack than the API tests, and is more unit-test oriented. """ + import json import os import sys diff --git a/tests/test_cpan_skeleton.py b/tests/test_cpan_skeleton.py index 2b726f6c88..1b02331f4b 100644 --- a/tests/test_cpan_skeleton.py +++ b/tests/test_cpan_skeleton.py @@ -4,7 +4,6 @@ Unit tests of the CPAN skeleton utility functions """ - from pathlib import Path import pytest diff --git a/tests/test_cran_skeleton.py b/tests/test_cran_skeleton.py index a0f5575114..0db839a8f4 100644 --- a/tests/test_cran_skeleton.py +++ b/tests/test_cran_skeleton.py @@ -3,6 +3,7 @@ """ Unit tests of the CRAN skeleton utility functions """ + import os import pytest diff --git a/tests/test_develop.py b/tests/test_develop.py index add9f65c03..d72bb247d3 100644 --- a/tests/test_develop.py +++ b/tests/test_develop.py @@ -3,6 +3,7 @@ """ Simple tests for testing functions in develop module - lower level than going through API. """ + from pathlib import Path from typing import Generator From 1a7e07cc6aa2170060e38a600389789d0ef47d68 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 15 Mar 2024 14:51:24 -0500 Subject: [PATCH 301/366] Enable CodSpeed and add select lines benchmark (#5233) * Add CodSpeed * Indent test_select_lines * Add test_select_lines_battery * Fix & limit test_which_package_battery --- .github/workflows/tests.yml | 73 ++++++++++++++++++- news/5233-enable-codspeed | 19 +++++ tests/test_inspect_pkg.py | 10 ++- tests/test_metadata.py | 139 +++++++++++++++++++++++++----------- 4 files changed, 197 insertions(+), 44 deletions(-) create mode 100644 news/5233-enable-codspeed diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 201ef3e0d6..ee71e1a826 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -162,6 +162,75 @@ jobs: test-report.xml retention-days: 1 # temporary, combined in aggregate below + # linux benchmarks + linux-benchmarks: + # only run test suite if there are code changes + needs: changes + if: needs.changes.outputs.code == 'true' + + runs-on: ubuntu-latest + defaults: + run: + # https://github.com/conda-incubator/setup-miniconda#use-a-default-shell + shell: bash -el {0} # bash exit immediately on error + login shell + strategy: + fail-fast: false + matrix: + python-version: ['3.12'] + + steps: + - name: Checkout Source + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Hash + Timestamp + run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-benchmark-$(date -u "+%Y%m")" >> $GITHUB_ENV + + - name: Cache Conda + uses: actions/cache@v4 + with: + path: ~/conda_pkgs_dir + key: cache-${{ env.HASH }} + + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@v3 + with: + condarc-file: .github/condarc + run-post: false # skip post cleanup + + - name: Conda Install + run: conda install + --yes + --file tests/requirements.txt + --file tests/requirements-${{ runner.os }}.txt + --file tests/requirements-ci.txt + python=${{ matrix.python-version }} + ${{ env.CONDA_CHANNEL_LABEL }}${{ env.CONDA_VERSION }} + + - name: Install CodSpeed + run: pip install git+https://github.com/kenodegard/pytest-codspeed.git@fix-outerr-redirects#egg=pytest-codspeed + + # TODO: how can we remove this step? + - name: Install Self + run: pip install -e . + + - name: Conda Info + # view test env info (not base) + run: python -m conda info --verbose + + - name: Conda Config + run: conda config --show-sources + + - name: Conda List + run: conda list --show-channel-urls + + - name: Run Benchmarks + uses: CodSpeedHQ/action@v2 + with: + token: ${{ secrets.CODSPEED_TOKEN }} + run: $CONDA/envs/test/bin/pytest --codspeed + # windows test suite windows: # only run test suite if there are code changes @@ -354,7 +423,7 @@ jobs: # aggregate and upload aggregate: # only aggregate test suite if there are code changes - needs: [changes, linux, windows, macos] + needs: [changes, linux, linux-benchmarks, windows, macos] if: >- !cancelled() && ( @@ -381,7 +450,7 @@ jobs: # required check analyze: - needs: [linux, windows, macos, aggregate] + needs: [linux, linux-benchmarks, windows, macos, aggregate] if: '!cancelled()' runs-on: ubuntu-latest diff --git a/news/5233-enable-codspeed b/news/5233-enable-codspeed new file mode 100644 index 0000000000..efb32df4d1 --- /dev/null +++ b/news/5233-enable-codspeed @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Enable CodSpeed benchmarks for select tests. (#5233) diff --git a/tests/test_inspect_pkg.py b/tests/test_inspect_pkg.py index 2f35bd3b0e..dae6d7f6ca 100644 --- a/tests/test_inspect_pkg.py +++ b/tests/test_inspect_pkg.py @@ -207,6 +207,14 @@ def test_which_package(tmp_path: Path): @pytest.mark.benchmark def test_which_package_battery(tmp_path: Path): # regression: https://github.com/conda/conda-build/issues/5126 + + # NOTE: CodSpeed on Python 3.12+ activates the stack profiler trampoline backend + # and thus runs the test twice (once without profiling and once with profiling), + # unfortunately this means that on the second iteration tmp_path is no longer empty + # so we create a randomized unique directory to compensate + tmp_path = tmp_path / uuid4().hex + tmp_path.mkdir() + # create a dummy environment (tmp_path / "conda-meta").mkdir() (tmp_path / "conda-meta" / "history").touch() @@ -214,7 +222,7 @@ def test_which_package_battery(tmp_path: Path): # dummy packages with files removed = [] - for _ in range(100): + for _ in range(10): name = f"package_{uuid4().hex}" # mock a package with 100 files diff --git a/tests/test_metadata.py b/tests/test_metadata.py index b176d4103d..0f6da9b089 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -5,6 +5,7 @@ import os import subprocess import sys +from itertools import product from typing import TYPE_CHECKING import pytest @@ -54,52 +55,108 @@ def test_uses_vcs_in_metadata(testing_workdir, testing_metadata): def test_select_lines(): - lines = """ -test -test [abc] no -test [abc] # no - -test [abc] - 'quoted # [abc] ' - "quoted # [abc] yes " -test # stuff [abc] yes -test {{ JINJA_VAR[:2] }} -test {{ JINJA_VAR[:2] }} # stuff [abc] yes -test {{ JINJA_VAR[:2] }} # stuff yes [abc] -test {{ JINJA_VAR[:2] }} # [abc] stuff yes -{{ environ["test"] }} # [abc] -""" + lines = "\n".join( + ( + "", + "test", + "test [abc] no", + "test [abc] # no", + " ' test ' ", + ' " test " ', + "", + "# comment line", + "test [abc]", + " 'quoted # [abc] '", + ' "quoted # [abc] yes "', + "test # stuff [abc] yes", + "test {{ JINJA_VAR[:2] }}", + "test {{ JINJA_VAR[:2] }} # stuff [abc] yes", + "test {{ JINJA_VAR[:2] }} # stuff yes [abc]", + "test {{ JINJA_VAR[:2] }} # [abc] stuff yes", + '{{ environ["test"] }} # [abc]', + "", # trailing newline + ) + ) - assert ( - select_lines(lines, {"abc": True}, variants_in_place=True) - == """ -test -test [abc] no -test [abc] # no - -test - 'quoted' - "quoted" -test -test {{ JINJA_VAR[:2] }} -test {{ JINJA_VAR[:2] }} -test {{ JINJA_VAR[:2] }} -test {{ JINJA_VAR[:2] }} -{{ environ["test"] }} -""" + assert select_lines(lines, {"abc": True}, variants_in_place=True) == "\n".join( + ( + "", + "test", + "test [abc] no", + "test [abc] # no", + " ' test '", + ' " test "', + "", + "test", + " 'quoted'", + ' "quoted"', + "test", + "test {{ JINJA_VAR[:2] }}", + "test {{ JINJA_VAR[:2] }}", + "test {{ JINJA_VAR[:2] }}", + "test {{ JINJA_VAR[:2] }}", + '{{ environ["test"] }}', + "", # trailing newline + ) ) - assert ( - select_lines(lines, {"abc": False}, variants_in_place=True) - == """ -test -test [abc] no -test [abc] # no - -test {{ JINJA_VAR[:2] }} -""" + assert select_lines(lines, {"abc": False}, variants_in_place=True) == "\n".join( + ( + "", + "test", + "test [abc] no", + "test [abc] # no", + " ' test '", + ' " test "', + "", + "test {{ JINJA_VAR[:2] }}", + "", # trailing newline + ) ) +@pytest.mark.benchmark +def test_select_lines_battery(): + test_foo = "test [foo]" + test_bar = "test [bar]" + test_baz = "test [baz]" + test_foo_and_bar = "test [foo and bar]" + test_foo_and_baz = "test [foo and baz]" + test_foo_or_bar = "test [foo or bar]" + test_foo_or_baz = "test [foo or baz]" + + lines = "\n".join( + ( + test_foo, + test_bar, + test_baz, + test_foo_and_bar, + test_foo_and_baz, + test_foo_or_bar, + test_foo_or_baz, + ) + * 10 + ) + + for _ in range(10): + for foo, bar, baz in product((True, False), repeat=3): + namespace = {"foo": foo, "bar": bar, "baz": baz} + selection = ( + ["test"] + * ( + foo + + bar + + baz + + (foo and bar) + + (foo and baz) + + (foo or bar) + + (foo or baz) + ) + * 10 + ) + selection = "\n".join(selection) + "\n" # trailing newline + assert select_lines(lines, namespace, variants_in_place=True) == selection + + def test_disallow_leading_period_in_version(testing_metadata): testing_metadata.meta["package"]["version"] = ".ste.ve" testing_metadata.final = True From 1ceaf108955eae953fb8dfbb6e455c11976682ed Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Mon, 18 Mar 2024 10:19:00 -0400 Subject: [PATCH 302/366] Release 24.3.0 (#5240) * Update .authors.yml * Update .mailmap * Updated authorship for 24.3.0 * Updated CHANGELOG for 24.3.0 * Add first-time contributions --- .authors.yml | 25 +++-- .mailmap | 1 + AUTHORS.md | 1 + CHANGELOG.md | 104 +++++++++++++++++++ news/4867-arm64-msvc-env-cmd-no-op | 19 ---- news/5195-fix-stdlib-variant | 19 ---- news/5196-deprecate-bdist-conda | 19 ---- news/5199-deprecate-have_prefix_files | 19 ---- news/5203-remove-deprecations | 80 -------------- news/5208-deprecate-get_output_file_path | 19 ---- news/5219-deprecate-Environment | 19 ---- news/5221-deprecate-get_version_from_git_tag | 19 ---- news/5228-lief-0.14-compat | 19 ---- 13 files changed, 121 insertions(+), 242 deletions(-) delete mode 100644 news/4867-arm64-msvc-env-cmd-no-op delete mode 100644 news/5195-fix-stdlib-variant delete mode 100644 news/5196-deprecate-bdist-conda delete mode 100644 news/5199-deprecate-have_prefix_files delete mode 100644 news/5203-remove-deprecations delete mode 100644 news/5208-deprecate-get_output_file_path delete mode 100644 news/5219-deprecate-Environment delete mode 100644 news/5221-deprecate-get_version_from_git_tag delete mode 100644 news/5228-lief-0.14-compat diff --git a/.authors.yml b/.authors.yml index dc7b1a4258..89d9a7ae00 100644 --- a/.authors.yml +++ b/.authors.yml @@ -162,7 +162,7 @@ aliases: - MinRK github: minrk - num_commits: 15 + num_commits: 16 first_commit: 2014-02-13 19:43:59 - name: Matty G email: meawoppl@gmail.com @@ -612,7 +612,7 @@ first_commit: 2015-08-30 06:44:37 - name: Marcel Bargull email: marcel.bargull@udo.edu - num_commits: 82 + num_commits: 85 first_commit: 2016-09-26 11:45:54 github: mbargull alternate_emails: @@ -1202,7 +1202,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 168 + num_commits: 178 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1240,7 +1240,7 @@ github: pre-commit-ci[bot] aliases: - pre-commit-ci[bot] - num_commits: 61 + num_commits: 64 first_commit: 2021-11-20 01:47:17 - name: Jacob Walls email: jacobtylerwalls@gmail.com @@ -1251,7 +1251,7 @@ github: beeankha alternate_emails: - beeankha@gmail.com - num_commits: 20 + num_commits: 23 first_commit: 2022-01-19 16:40:06 - name: Conda Bot email: 18747875+conda-bot@users.noreply.github.com @@ -1262,7 +1262,7 @@ alternate_emails: - ad-team+condabot@anaconda.com - 18747875+conda-bot@users.noreply.github.com - num_commits: 44 + num_commits: 96 first_commit: 2022-01-17 18:09:22 - name: Uwe L. Korn email: xhochy@users.noreply.github.com @@ -1271,7 +1271,7 @@ - name: Daniel Holth email: dholth@anaconda.com github: dholth - num_commits: 14 + num_commits: 15 first_commit: 2022-04-28 05:22:14 - name: Rylan Chord email: rchord@users.noreply.github.com @@ -1281,7 +1281,7 @@ - name: Travis Hathaway email: travis.j.hathaway@gmail.com github: travishathaway - num_commits: 6 + num_commits: 7 first_commit: 2022-05-12 05:53:02 - name: Kyle Leaders email: remkade@users.noreply.github.com @@ -1305,7 +1305,7 @@ - name: Katherine Kinnaman email: kkinnaman@anaconda.com github: kathatherine - num_commits: 2 + num_commits: 3 first_commit: 2022-07-07 10:56:31 - name: dependabot[bot] email: 49699333+dependabot[bot]@users.noreply.github.com @@ -1376,7 +1376,7 @@ aliases: - Ryan github: ryanskeith - num_commits: 5 + num_commits: 6 first_commit: 2023-03-22 03:11:02 - name: Rishabh Singh email: 67859818+rishabh11336@users.noreply.github.com @@ -1434,3 +1434,8 @@ - h-vetinari num_commits: 1 first_commit: 2023-10-25 09:33:34 +- name: Finn Womack + email: flan313@gmail.com + num_commits: 1 + first_commit: 2024-02-06 11:43:45 + github: finnagin diff --git a/.mailmap b/.mailmap index 17e816d480..02df1bf754 100644 --- a/.mailmap +++ b/.mailmap @@ -92,6 +92,7 @@ Evan Klitzke Felix Kühnl Ferry Firmansjah <103191403+ffirmanff@users.noreply.github.com> Filipe Fernandes ocefpaf +Finn Womack Floris Bruynooghe Gabriel Reis Gaëtan de Menten diff --git a/AUTHORS.md b/AUTHORS.md index 7667f98c40..969994f016 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -75,6 +75,7 @@ Authors are sorted alphabetically. * Felix Kühnl * Ferry Firmansjah * Filipe Fernandes +* Finn Womack * Floris Bruynooghe * Gabriel Reis * Gaëtan de Menten diff --git a/CHANGELOG.md b/CHANGELOG.md index 840bc6636a..42d745f874 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,109 @@ [//]: # (current developments) +## 24.3.0 (2024-03-15) + +### Enhancements + +* Add compatibility for `LIEF=0.14`. (#5227 via #5228) + +### Bug fixes + +* Fix `stdlib` being recognized in variant hash inputs. (#5190 via #5195) + +### Deprecations + +* Mark `conda_build.bdist_conda` module as pending deprecation. (#5196) +* Mark `conda_build.build.have_prefix_files` as deprecated. (#5199) +* Mark `conda_build.conda_interface.handle_proxy_407` as deprecated. Handled by `conda.gateways.connection.session.CondaSession`. (#5203) +* Mark `conda_build.conda_interface.hashsum_file` as deprecated. Use `conda.gateways.disk.read.compute_sum` instead. (#5203) +* Mark `conda_build.conda_interface.md5_file` as deprecated. Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead. (#5203) +* Mark `conda_build.environ.PREFIX_ACTION` as deprecated. (#5203) +* Mark `conda_build.environ.LINK_ACTION` as deprecated. (#5203) +* Mark `conda_build.environ.cache_actions` as deprecated. (#5203) +* Mark `conda_build.index.DummyExecutor` as deprecated. (#5203) +* Mark `conda_build.index.MAX_THREADS_DEFAULT` as deprecated. (#5203) +* Mark `conda_build.index.LOCK_TIMEOUT_SECS` as deprecated. (#5203) +* Mark `conda_build.index.LOCKFILE_NAME` as deprecated. (#5203) +* Postpone `conda_build.index.channel_data` deprecation. (#5203) +* Rename `conda_build.environ.create_env('specs_or_actions' -> 'specs_or_precs')`. (#5203) +* Rename `conda_build.environ._execute_actions('actions' -> 'precs'). (#5203) +* Rename `conda_build.environ._display_actions('actions' -> 'precs'). (#5203) +* Rename `conda_build.inspect.check_install('platform' -> 'subdir')`. (#5203) +* Rename `conda_build.render.execute_download_actions('actions' -> 'precs')`. (#5203) +* Rename `conda_build.render.get_upstream_pins('actions' -> 'precs')`. (#5203) +* Remove `conda_build.cli.main_render.execute(print_results)`. (#5203) +* Remove `conda_build.conda_interface.Dist`. (#5203) +* Remove `conda_build.conda_interface.display_actions`. (#5203) +* Remove `conda_build.conda_interface.execute_actions`. (#5203) +* Remove `conda_build.conda_interface.execute_plan`. (#5203) +* Remove `conda_build.conda_interface.install_actions`. (#5203) +* Remove `conda_build.conda_interface.linked`. (#5203) +* Remove `conda_build.conda_interface.linked_data`. (#5203) +* Remove `conda_build.conda_interface.package_cache`. (#5203) +* Remove `conda_build.environ.get_install_actions`. Use `conda_build.environ.get_package_records` instead. (#5203) +* Remove `conda_build.index._determine_namespace`. (#5203) +* Remove `conda_build.index._make_seconds`. (#5203) +* Remove `conda_build.index.REPODATA_VERSION`. (#5203) +* Remove `conda_build.index.CHANNELDATA_VERSION`. (#5203) +* Remove `conda_build.index.REPODATA_JSON_FN`. (#5203) +* Remove `conda_build.index.REPODATA_FROM_PKGS_JSON_FN`. (#5203) +* Remove `conda_build.index.CHANNELDATA_FIELDS`. (#5203) +* Remove `conda_build.index._clear_newline_chars`. (#5203) +* Remove `conda_build.index._get_jinja2_environment`. (#5203) +* Remove `conda_build.index._maybe_write`. (#5203) +* Remove `conda_build.index._make_build_string`. (#5203) +* Remove `conda_build.index._warn_on_missing_dependencies`. (#5203) +* Remove `conda_build.index._cache_post_install_details`. (#5203) +* Remove `conda_build.index._cache_recipe`. (#5203) +* Remove `conda_build.index._cache_run_exports`. (#5203) +* Remove `conda_build.index._cache_icon`. (#5203) +* Remove `conda_build.index._make_subdir_index_html`. (#5203) +* Remove `conda_build.index._make_channeldata_index_html`. (#5203) +* Remove `conda_build.index._get_source_repo_git_info`. (#5203) +* Remove `conda_build.index._cache_info_file`. (#5203) +* Remove `conda_build.index._alternate_file_extension`. (#5203) +* Remove `conda_build.index._get_resolve_object`. (#5203) +* Remove `conda_build.index._get_newest_versions`. (#5203) +* Remove `conda_build.index._add_missing_deps`. (#5203) +* Remove `conda_build.index._add_prev_ver_for_features`. (#5203) +* Remove `conda_build.index._shard_newest_packages`. (#5203) +* Remove `conda_build.index._build_current_repodata`. (#5203) +* Remove `conda_build.index.ChannelIndex`. (#5203) +* Remove `conda_build.inspect.check_install('prepend')`. (#5203) +* Remove `conda_build.inspect.check_install('minimal_hint')`. (#5203) +* Remove `conda_build.noarch_python.ISWIN`. Use `conda_build.utils.on_win` instead. (#5203) +* Remove `conda_build.noarch_python._force_dir`. Use `os.makedirs(exist_ok=True)` instead. (#5203) +* Remove `conda_build.noarch_python._error_exit`. (#5203) +* Remove `conda_build.render.actions_to_pins`. (#5203) +* Remove `conda_build.utils.linked_data_no_multichannels`. (#5203) +* Mark `conda_build.api.get_output_file_path` as deprecated. Use `conda_build.api.get_output_file_paths` instead. (#5208) +* Mark `conda_build.environ.Environment` as deprecated. Use `conda.core.prefix_data.PrefixData` instead. (#5219) +* Mark `conda_build.conda_interface.get_version_from_git_tag` as deprecated. Use `conda_build.environ.get_version_from_git_tag` instead. (#5221) + +### Docs + +* Update advice for installing conda-build into base environment. (#5223) + +### Other + +* Add a check to print an additional warning and return an empty string when bits is "arm64" in `msvc_env_cmd`. (#4867) + +### Contributors + +* @beeankha +* @conda-bot +* @dholth +* @finnagin made their first contribution in https://github.com/conda/conda-build/pull/4867 +* @kathatherine +* @kenodegard +* @mbargull +* @minrk +* @ryanskeith +* @travishathaway +* @pre-commit-ci[bot] + + + ## 24.1.2 (2024-02-15) ### Bug fixes diff --git a/news/4867-arm64-msvc-env-cmd-no-op b/news/4867-arm64-msvc-env-cmd-no-op deleted file mode 100644 index 134dcd14fd..0000000000 --- a/news/4867-arm64-msvc-env-cmd-no-op +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Added a check to print an additional warning and return an empty string when bits is "arm64" in msvc_env_cmd. (#4867) diff --git a/news/5195-fix-stdlib-variant b/news/5195-fix-stdlib-variant deleted file mode 100644 index 526692f286..0000000000 --- a/news/5195-fix-stdlib-variant +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Fix stdlib being recognized in variant hash inputs. (#5190 via #5195) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/5196-deprecate-bdist-conda b/news/5196-deprecate-bdist-conda deleted file mode 100644 index 3f37838bf0..0000000000 --- a/news/5196-deprecate-bdist-conda +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Mark `conda_build.bdist_conda` module as pending deprecation. (#5196) - -### Docs - -* - -### Other - -* diff --git a/news/5199-deprecate-have_prefix_files b/news/5199-deprecate-have_prefix_files deleted file mode 100644 index eccab010da..0000000000 --- a/news/5199-deprecate-have_prefix_files +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Mark `conda_build.build.have_prefix_files` as deprecated. (#5199) - -### Docs - -* - -### Other - -* diff --git a/news/5203-remove-deprecations b/news/5203-remove-deprecations deleted file mode 100644 index 5021c12907..0000000000 --- a/news/5203-remove-deprecations +++ /dev/null @@ -1,80 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Mark `conda_build.conda_interface.handle_proxy_407` as deprecated. Handled by `conda.gateways.connection.session.CondaSession`. (#5203) -* Mark `conda_build.conda_interface.hashsum_file` as deprecated. Use `conda.gateways.disk.read.compute_sum` instead. (#5203) -* Mark `conda_build.conda_interface.md5_file` as deprecated. Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead. (#5203) -* Mark `conda_build.environ.PREFIX_ACTION` as deprecated. (#5203) -* Mark `conda_build.environ.LINK_ACTION` as deprecated. (#5203) -* Mark `conda_build.environ.cache_actions` as deprecated. (#5203) -* Mark `conda_build.index.DummyExecutor` as deprecated. (#5203) -* Mark `conda_build.index.MAX_THREADS_DEFAULT` as deprecated. (#5203) -* Mark `conda_build.index.LOCK_TIMEOUT_SECS` as deprecated. (#5203) -* Mark `conda_build.index.LOCKFILE_NAME` as deprecated. (#5203) -* Postpone `conda_build.index.channel_data` deprecation. (#5203) -* Rename `conda_build.environ.create_env('specs_or_actions' -> 'specs_or_precs')`. (#5203) -* Rename `conda_build.environ._execute_actions('actions' -> 'precs'). (#5203) -* Rename `conda_build.environ._display_actions('actions' -> 'precs'). (#5203) -* Rename `conda_build.inspect.check_install('platform' -> 'subdir')`. (#5203) -* Rename `conda_build.render.execute_download_actions('actions' -> 'precs')`. (#5203) -* Rename `conda_build.render.get_upstream_pins('actions' -> 'precs')`. (#5203) -* Remove `conda_build.cli.main_render.execute(print_results)`. (#5203) -* Remove `conda_build.conda_interface.Dist`. (#5203) -* Remove `conda_build.conda_interface.display_actions`. (#5203) -* Remove `conda_build.conda_interface.execute_actions`. (#5203) -* Remove `conda_build.conda_interface.execute_plan`. (#5203) -* Remove `conda_build.conda_interface.install_actions`. (#5203) -* Remove `conda_build.conda_interface.linked`. (#5203) -* Remove `conda_build.conda_interface.linked_data`. (#5203) -* Remove `conda_build.conda_interface.package_cache`. (#5203) -* Remove `conda_build.environ.get_install_actions`. Use `conda_build.environ.get_package_records` instead. (#5203) -* Remove `conda_build.index._determine_namespace`. (#5203) -* Remove `conda_build.index._make_seconds`. (#5203) -* Remove `conda_build.index.REPODATA_VERSION`. (#5203) -* Remove `conda_build.index.CHANNELDATA_VERSION`. (#5203) -* Remove `conda_build.index.REPODATA_JSON_FN`. (#5203) -* Remove `conda_build.index.REPODATA_FROM_PKGS_JSON_FN`. (#5203) -* Remove `conda_build.index.CHANNELDATA_FIELDS`. (#5203) -* Remove `conda_build.index._clear_newline_chars`. (#5203) -* Remove `conda_build.index._get_jinja2_environment`. (#5203) -* Remove `conda_build.index._maybe_write`. (#5203) -* Remove `conda_build.index._make_build_string`. (#5203) -* Remove `conda_build.index._warn_on_missing_dependencies`. (#5203) -* Remove `conda_build.index._cache_post_install_details`. (#5203) -* Remove `conda_build.index._cache_recipe`. (#5203) -* Remove `conda_build.index._cache_run_exports`. (#5203) -* Remove `conda_build.index._cache_icon`. (#5203) -* Remove `conda_build.index._make_subdir_index_html`. (#5203) -* Remove `conda_build.index._make_channeldata_index_html`. (#5203) -* Remove `conda_build.index._get_source_repo_git_info`. (#5203) -* Remove `conda_build.index._cache_info_file`. (#5203) -* Remove `conda_build.index._alternate_file_extension`. (#5203) -* Remove `conda_build.index._get_resolve_object`. (#5203) -* Remove `conda_build.index._get_newest_versions`. (#5203) -* Remove `conda_build.index._add_missing_deps`. (#5203) -* Remove `conda_build.index._add_prev_ver_for_features`. (#5203) -* Remove `conda_build.index._shard_newest_packages`. (#5203) -* Remove `conda_build.index._build_current_repodata`. (#5203) -* Remove `conda_build.index.ChannelIndex`. (#5203) -* Remove `conda_build.inspect.check_install('prepend')`. (#5203) -* Remove `conda_build.inspect.check_install('minimal_hint')`. (#5203) -* Remove `conda_build.noarch_python.ISWIN`. Use `conda_build.utils.on_win` instead. (#5203) -* Remove `conda_build.noarch_python._force_dir`. Use `os.makedirs(exist_ok=True)` instead. (#5203) -* Remove `conda_build.noarch_python._error_exit`. (#5203) -* Remove `conda_build.render.actions_to_pins`. (#5203) -* Remove `conda_build.utils.linked_data_no_multichannels`. (#5203) - -### Docs - -* - -### Other - -* diff --git a/news/5208-deprecate-get_output_file_path b/news/5208-deprecate-get_output_file_path deleted file mode 100644 index 33244e8bf5..0000000000 --- a/news/5208-deprecate-get_output_file_path +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Mark `conda_build.api.get_output_file_path` as deprecated. Use `conda_build.api.get_output_file_paths` instead. (#5208) - -### Docs - -* - -### Other - -* diff --git a/news/5219-deprecate-Environment b/news/5219-deprecate-Environment deleted file mode 100644 index 95780c6be3..0000000000 --- a/news/5219-deprecate-Environment +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Mark `conda_build.environ.Environment` as deprecated. Use `conda.core.prefix_data.PrefixData` instead. (#5219) - -### Docs - -* - -### Other - -* diff --git a/news/5221-deprecate-get_version_from_git_tag b/news/5221-deprecate-get_version_from_git_tag deleted file mode 100644 index 2c1e811a54..0000000000 --- a/news/5221-deprecate-get_version_from_git_tag +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Mark `conda_build.conda_interface.get_version_from_git_tag` as deprecated. Use `conda_build.environ.get_version_from_git_tag` instead. (#5221) - -### Docs - -* - -### Other - -* diff --git a/news/5228-lief-0.14-compat b/news/5228-lief-0.14-compat deleted file mode 100644 index 602242c7d4..0000000000 --- a/news/5228-lief-0.14-compat +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* Add compatibility for LIEF=0.14. (#5227 via #5228) - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* From f3615dc8cced0a1ed396a05e436d68ac3f441d91 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 13:27:08 -0400 Subject: [PATCH 303/366] [pre-commit.ci] pre-commit autoupdate (#5243) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.3.2 → v0.3.3](https://github.com/astral-sh/ruff-pre-commit/compare/v0.3.2...v0.3.3) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0941dd3f5c..9335532d1f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,7 +54,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.2 + rev: v0.3.3 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff From 1bfee9f6fa221f1aeeec8db226f19915fed7c96f Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Tue, 19 Mar 2024 12:45:59 -0400 Subject: [PATCH 304/366] Deprecating `conda_build.conda_interface` (#5222) * Deprecate CONDA_VERSION and binstar_upload constants * Bump deprecation versions * Deprecating default_python * Deprecating envs_dirs * Deprecating pkgs_dirs * Deprecating cc_platform * Deprecate root_dir * Deprecate root_writable * Add news file * Deprecate subdir constant * Deprecate create_default_packages * Deprecate get_rc_urls * Deprecate get_prefix * Deprecate get_conda_channel * Move CONDA_ALLOW_SOFTLINKS * Update reset_context() imports * Deprecate conda_build.conda_interface.reset_context function * Wrap CONDA_ALLOW_SOFTLINKS in try-except for docs * Update conda_build/__init__.py --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Ken Odegard --- conda_build/__init__.py | 15 ++++ conda_build/build.py | 22 +++-- conda_build/cli/main_build.py | 7 +- conda_build/cli/main_metapackage.py | 8 +- conda_build/conda_interface.py | 124 +++++++++++++++++++++----- conda_build/config.py | 28 +++--- conda_build/environ.py | 25 +++--- conda_build/index.py | 7 +- conda_build/inspect_pkg.py | 8 +- conda_build/metadata.py | 5 +- conda_build/os_utils/external.py | 13 +-- conda_build/render.py | 7 +- conda_build/skeletons/pypi.py | 7 +- conda_build/utils.py | 16 ++-- conda_build/variants.py | 5 +- news/5222-deprecating-conda_interface | 32 +++++++ tests/test_api_build.py | 2 +- tests/test_api_render.py | 7 +- tests/test_subpackages.py | 4 +- 19 files changed, 235 insertions(+), 107 deletions(-) create mode 100644 news/5222-deprecating-conda_interface diff --git a/conda_build/__init__.py b/conda_build/__init__.py index 91367d0d86..6b43ca6180 100644 --- a/conda_build/__init__.py +++ b/conda_build/__init__.py @@ -1,5 +1,6 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause + from .__version__ import __version__ __all__ = ["__version__"] @@ -15,3 +16,17 @@ "render", "skeleton", ] + +# Skip context logic for doc generation since we don't install all dependencies in the CI doc build environment, +# see .readthedocs.yml file +try: + import os + + from conda.base.context import reset_context + + # Disallow softlinks. This avoids a lot of dumb issues, at the potential cost of disk space. + os.environ["CONDA_ALLOW_SOFTLINKS"] = "false" + reset_context() + +except ImportError: + pass diff --git a/conda_build/build.py b/conda_build/build.py index f2ffb06cdf..09643c8f18 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -23,7 +23,9 @@ import yaml from bs4 import UnicodeDammit from conda import __version__ as conda_version +from conda.base.context import context, reset_context from conda.core.prefix_data import PrefixData +from conda.models.channel import Channel from . import __version__ as conda_build_version from . import environ, noarch_python, source, tarcheck, utils @@ -36,14 +38,8 @@ PathType, TemporaryDirectory, UnsatisfiableError, - context, env_path_backup_var_exists, - get_conda_channel, - get_rc_urls, - pkgs_dirs, prefix_placeholder, - reset_context, - root_dir, url_path, ) from .config import Config @@ -1335,7 +1331,7 @@ def record_prefix_files(m, files_with_prefix): def sanitize_channel(channel): - return get_conda_channel(channel).urls(with_credentials=False, subdirs=[""])[0] + return Channel.from_value(channel).urls(with_credentials=False, subdirs=[""])[0] def write_info_files_file(m, files): @@ -1407,7 +1403,7 @@ def write_about_json(m): # conda env will be in most, but not necessarily all installations. # Don't die if we don't see it. stripped_channels = [] - for channel in get_rc_urls() + list(m.config.channel_urls): + for channel in (*context.channels, *m.config.channel_urls): stripped_channels.append(sanitize_channel(channel)) d["channels"] = stripped_channels evars = ["CIO_TEST"] @@ -1425,7 +1421,7 @@ def write_about_json(m): extra.update(m.config.extra_meta) d["root_pkgs"] = [ f"{prec.name} {prec.version} {prec.build}" - for prec in PrefixData(root_dir).iter_records() + for prec in PrefixData(context.root_dir).iter_records() ] # Include the extra section of the metadata in the about.json d["extra"] = extra @@ -3389,7 +3385,7 @@ def test( and recipedir_or_package_or_metadata.endswith(CONDA_PACKAGE_EXTENSIONS) and any( os.path.dirname(recipedir_or_package_or_metadata) in pkgs_dir - for pkgs_dir in pkgs_dirs + for pkgs_dir in context.pkgs_dirs ) ) if not in_pkg_cache: @@ -4161,8 +4157,10 @@ def is_package_built(metadata, env, include_local=True): _delegated_update_index(d, verbose=metadata.config.debug, warn=False, threads=1) subdir = getattr(metadata.config, f"{env}_subdir") - urls = [url_path(metadata.config.output_folder), "local"] if include_local else [] - urls += get_rc_urls() + urls = [ + *([url_path(metadata.config.output_folder), "local"] if include_local else []), + *context.channels, + ] if metadata.config.channel_urls: urls.extend(metadata.config.channel_urls) diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index de698df22c..bdcaaa25d6 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -13,10 +13,11 @@ from typing import TYPE_CHECKING from conda.auxlib.ish import dals +from conda.base.context import context from conda.common.io import dashlist from .. import api, build, source, utils -from ..conda_interface import add_parser_channels, binstar_upload, cc_conda_build +from ..conda_interface import add_parser_channels, cc_conda_build from ..config import ( get_channel_urls, get_or_merge_config, @@ -55,14 +56,14 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: action="store_false", help="Do not ask to upload the package to anaconda.org.", dest="anaconda_upload", - default=binstar_upload, + default=context.binstar_upload, ) parser.add_argument( "--no-binstar-upload", action="store_false", help=argparse.SUPPRESS, dest="anaconda_upload", - default=binstar_upload, + default=context.binstar_upload, ) parser.add_argument( "--no-include-recipe", diff --git a/conda_build/cli/main_metapackage.py b/conda_build/cli/main_metapackage.py index b295b4130e..a11c581702 100644 --- a/conda_build/cli/main_metapackage.py +++ b/conda_build/cli/main_metapackage.py @@ -6,8 +6,10 @@ import logging from typing import TYPE_CHECKING +from conda.base.context import context + from .. import api -from ..conda_interface import ArgumentParser, add_parser_channels, binstar_upload +from ..conda_interface import ArgumentParser, add_parser_channels if TYPE_CHECKING: from argparse import Namespace @@ -35,14 +37,14 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: action="store_false", help="Do not ask to upload the package to anaconda.org.", dest="anaconda_upload", - default=binstar_upload, + default=context.binstar_upload, ) parser.add_argument( "--no-binstar-upload", action="store_false", help=argparse.SUPPRESS, dest="anaconda_upload", - default=binstar_upload, + default=context.binstar_upload, ) parser.add_argument("--token", help="Token to pass through to anaconda upload") parser.add_argument( diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 5d5c455d07..bb92f6b8b3 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -7,9 +7,10 @@ from functools import partial from importlib import import_module # noqa: F401 -from conda import __version__ as CONDA_VERSION # noqa: F401 -from conda.base.context import context, determine_target_prefix, reset_context +from conda import __version__ +from conda.base.context import context, determine_target_prefix from conda.base.context import non_x86_machines as non_x86_linux_machines # noqa: F401 +from conda.base.context import reset_context as _reset_context from conda.core.package_cache import ProgressiveFetchExtract # noqa: F401 from conda.exceptions import ( # noqa: F401 CondaError, @@ -65,26 +66,100 @@ from .deprecations import deprecated deprecated.constant("24.1.0", "24.5.0", "get_index", _get_index) -# TODO: Go to references of all properties below and import them from `context` instead -binstar_upload = context.binstar_upload -default_python = context.default_python -envs_dirs = context.envs_dirs -pkgs_dirs = list(context.pkgs_dirs) -cc_platform = context.platform -root_dir = context.root_dir -root_writable = context.root_writable -subdir = context.subdir -create_default_packages = context.create_default_packages - -get_rc_urls = lambda: list(context.channels) -get_prefix = partial(determine_target_prefix, context) -cc_conda_build = context.conda_build if hasattr(context, "conda_build") else {} +deprecated.constant( + "24.5", + "24.7", + "reset_context", + _reset_context, + addendum="Use `conda.base.context.reset_context` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "binstar_upload", + context.binstar_upload, + addendum="Use `conda.base.context.context.binstar_upload` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "default_python", + context.default_python, + addendum="Use `conda.base.context.context.default_python` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "envs_dirs", + context.envs_dirs, + addendum="Use `conda.base.context.context.envs_dirs` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "pkgs_dirs", + list(context.pkgs_dirs), + addendum="Use `conda.base.context.context.pkgs_dirs` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "cc_platform", + context.platform, + addendum="Use `conda.base.context.context.platform` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "root_dir", + context.root_dir, + addendum="Use `conda.base.context.context.root_dir` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "root_writable", + context.root_writable, + addendum="Use `conda.base.context.context.root_writable` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "subdir", + context.subdir, + addendum="Use `conda.base.context.context.subdir` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "create_default_packages", + context.create_default_packages, + addendum="Use `conda.base.context.context.create_default_packages` instead.", +) -get_conda_channel = Channel.from_value +deprecated.constant( + "24.5", + "24.7", + "get_rc_urls", + lambda: list(context.channels), + addendum="Use `conda.base.context.context.channels` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "get_prefix", + partial(determine_target_prefix, context), + addendum="Use `conda.base.context.context.target_prefix` instead.", +) +cc_conda_build = context.conda_build if hasattr(context, "conda_build") else {} -# Disallow softlinks. This avoids a lot of dumb issues, at the potential cost of disk space. -os.environ["CONDA_ALLOW_SOFTLINKS"] = "false" -reset_context() +deprecated.constant( + "24.5", + "24.7", + "get_conda_channel", + Channel.from_value, + addendum="Use `conda.models.channel.Channel.from_value` instead.", +) # When deactivating envs (e.g. switching from root to build/test) this env var is used, # except the PR that removed this has been reverted (for now) and Windows doesn't need it. @@ -118,6 +193,15 @@ def md5_file(path: str | os.PathLike) -> str: return compute_sum(path, "md5") +deprecated.constant( + "24.5", + "24.7", + "CONDA_VERSION", + __version__, + addendum="Use `conda.__version__` instead.", +) + + @deprecated( "24.3", "24.5", diff --git a/conda_build/config.py b/conda_build/config.py index 9a33dac858..6e6cc8e28c 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -16,15 +16,9 @@ from os.path import abspath, expanduser, expandvars, join from typing import TYPE_CHECKING -from .conda_interface import ( - binstar_upload, - cc_conda_build, - cc_platform, - root_dir, - root_writable, - subdir, - url_path, -) +from conda.base.context import context + +from .conda_interface import cc_conda_build, url_path from .utils import ( get_build_folders, get_conda_operation_locks, @@ -88,7 +82,7 @@ def set_invocation_time(): def _get_default_settings(): return [ Setting("activate", True), - Setting("anaconda_upload", binstar_upload), + Setting("anaconda_upload", context.binstar_upload), Setting("force_upload", True), Setting("channel_urls", []), Setting("dirty", False), @@ -322,7 +316,7 @@ def set_lang(variant, lang): def arch(self): """Always the native (build system) arch, except when pretending to be some other platform""" - return self._arch or subdir.rsplit("-", 1)[1] + return self._arch or context.subdir.rsplit("-", 1)[1] @arch.setter def arch(self, value): @@ -338,7 +332,7 @@ def arch(self, value): def platform(self): """Always the native (build system) OS, except when pretending to be some other platform""" - return self._platform or subdir.rsplit("-", 1)[0] + return self._platform or context.subdir.rsplit("-", 1)[0] @platform.setter def platform(self, value): @@ -381,8 +375,8 @@ def noarch(self): return self.host_platform == "noarch" def reset_platform(self): - if not self.platform == cc_platform: - self.platform = cc_platform + if not self.platform == context.platform: + self.platform = context.platform @property def subdir(self): @@ -460,8 +454,8 @@ def croot(self) -> str: self._croot = abspath(expanduser(_bld_root_env)) elif _bld_root_rc: self._croot = abspath(expanduser(expandvars(_bld_root_rc))) - elif root_writable: - self._croot = join(root_dir, "conda-bld") + elif context.root_writable: + self._croot = join(context.root_dir, "conda-bld") else: self._croot = abspath(expanduser("~/conda-bld")) return self._croot @@ -718,7 +712,7 @@ def bldpkgs_dirs(self): # subdir should be the native platform, while self.subdir would be the host platform. return { join(self.croot, self.host_subdir), - join(self.croot, subdir), + join(self.croot, context.subdir), join(self.croot, "noarch"), } diff --git a/conda_build/environ.py b/conda_build/environ.py index f7260ac92e..a6fca61837 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -24,6 +24,7 @@ DEFAULTS_CHANNEL_NAME, UNKNOWN_CHANNEL, ) +from conda.base.context import context, reset_context from conda.common.io import env_vars from conda.core.index import LAST_CHANNEL_URLS from conda.core.link import PrefixSetup, UnlinkLinkTransaction @@ -46,11 +47,6 @@ PackageRecord, ProgressiveFetchExtract, TemporaryDirectory, - context, - create_default_packages, - pkgs_dirs, - reset_context, - root_dir, ) from .deprecations import deprecated from .exceptions import BuildLockError, DependencyNeedsBuildingError @@ -425,7 +421,7 @@ def conda_build_vars(prefix, config): "HTTP_PROXY": os.getenv("HTTP_PROXY", ""), "REQUESTS_CA_BUNDLE": os.getenv("REQUESTS_CA_BUNDLE", ""), "DIRTY": "1" if config.dirty else "", - "ROOT": root_dir, + "ROOT": context.root_dir, } @@ -921,7 +917,7 @@ def get_install_actions( conda_log_level = logging.WARN specs = list(specs) if specs: - specs.extend(create_default_packages) + specs.extend(context.create_default_packages) if verbose or debug: capture = contextlib.nullcontext if debug: @@ -993,7 +989,7 @@ def get_install_actions( pkg_dir = str(exc) folder = 0 while ( - os.path.dirname(pkg_dir) not in pkgs_dirs + os.path.dirname(pkg_dir) not in context.pkgs_dirs and folder < 20 ): pkg_dir = os.path.dirname(pkg_dir) @@ -1003,7 +999,7 @@ def get_install_actions( "Removing the folder and retrying", pkg_dir, ) - if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir): + if pkg_dir in context.pkgs_dirs and os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retries < max_env_retry: log.warn( @@ -1194,7 +1190,10 @@ def create_env( with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = str(exc) folder = 0 - while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20: + while ( + os.path.dirname(pkg_dir) not in context.pkgs_dirs + and folder < 20 + ): pkg_dir = os.path.dirname(pkg_dir) folder += 1 log.warn( @@ -1268,9 +1267,9 @@ def get_pkg_dirs_locks(dirs, config): def clean_pkg_cache(dist: str, config: Config) -> None: with utils.LoggingContext(logging.DEBUG if config.debug else logging.WARN): - locks = get_pkg_dirs_locks([config.bldpkgs_dir] + pkgs_dirs, config) + locks = get_pkg_dirs_locks((config.bldpkgs_dir, *context.pkgs_dirs), config) with utils.try_acquire_locks(locks, timeout=config.timeout): - for pkgs_dir in pkgs_dirs: + for pkgs_dir in context.pkgs_dirs: if any( os.path.exists(os.path.join(pkgs_dir, f"{dist}{ext}")) for ext in ("", *CONDA_PACKAGE_EXTENSIONS) @@ -1286,7 +1285,7 @@ def clean_pkg_cache(dist: str, config: Config) -> None: # Note that this call acquires the relevant locks, so this must be called # outside the lock context above. - remove_existing_packages(pkgs_dirs, [dist], config) + remove_existing_packages(context.pkgs_dirs, [dist], config) def remove_existing_packages(dirs, fns, config): diff --git a/conda_build/index.py b/conda_build/index.py index cd36cc9cac..e4d07a52ff 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -8,11 +8,12 @@ from functools import partial from os.path import dirname +from conda.base.context import context from conda.core.index import get_index from conda_index.index import update_index as _update_index -from . import conda_interface, utils -from .conda_interface import CondaHTTPError, context, url_path +from . import utils +from .conda_interface import CondaHTTPError, url_path from .deprecations import deprecated from .utils import ( CONDA_PACKAGE_EXTENSION_V1, @@ -128,7 +129,7 @@ def get_build_index( # native content and the noarch content. if subdir == "noarch": - subdir = conda_interface.subdir + subdir = context.subdir try: # get_index() is like conda reading the index, not conda_index # creating a new index. diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 7a9985fc8a..7d7c61f8f9 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -14,11 +14,11 @@ from typing import TYPE_CHECKING from conda.api import Solver +from conda.base.context import context from conda.core.index import get_index from conda.core.prefix_data import PrefixData from conda.models.records import PrefixRecord -from . import conda_interface from .conda_interface import ( specs_from_args, ) @@ -104,14 +104,14 @@ def check_install( Solver( prefix, channel_urls, - [subdir or conda_interface.subdir], + [subdir or context.subdir], specs_from_args(packages), ).solve_for_transaction(ignore_pinned=True).print_transaction_summary() def print_linkages( depmap: dict[ - PrefixRecord | Literal["not found" | "system" | "untracked"], + PrefixRecord | Literal["not found", "system", "untracked"], list[tuple[str, str, str]], ], show_files: bool = False, @@ -217,7 +217,7 @@ def inspect_linkages( untracked: bool = False, all_packages: bool = False, show_files: bool = False, - groupby: Literal["package" | "dependency"] = "package", + groupby: Literal["package", "dependency"] = "package", sysroot="", ): if not packages and not untracked and not all_packages: diff --git a/conda_build/metadata.py b/conda_build/metadata.py index e3b814d8a7..01f3367d03 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -16,10 +16,11 @@ from typing import TYPE_CHECKING, overload from bs4 import UnicodeDammit +from conda.base.context import context from conda.gateways.disk.read import compute_sum from . import exceptions, utils, variants -from .conda_interface import MatchSpec, envs_dirs +from .conda_interface import MatchSpec from .config import Config, get_or_merge_config from .features import feature_list from .license_family import ensure_valid_license_family @@ -781,7 +782,7 @@ def build_string_from_metadata(metadata): # but we don't presently have an API there. def _get_env_path(env_name_or_path): if not os.path.isdir(env_name_or_path): - for envs_dir in list(envs_dirs) + [os.getcwd()]: + for envs_dir in list(context.envs_dirs) + [os.getcwd()]: path = os.path.join(envs_dir, env_name_or_path) if os.path.isdir(path): env_name_or_path = path diff --git a/conda_build/os_utils/external.py b/conda_build/os_utils/external.py index 8b84833c00..f1d91d098b 100644 --- a/conda_build/os_utils/external.py +++ b/conda_build/os_utils/external.py @@ -5,7 +5,8 @@ from glob import glob from os.path import expanduser, isfile, join -from ..conda_interface import root_dir +from conda.base.context import context + from ..utils import on_win @@ -16,10 +17,10 @@ def find_executable(executable, prefix=None, all_matches=False): result = None if on_win: dir_paths = [ - join(root_dir, "Scripts"), - join(root_dir, "Library\\mingw-w64\\bin"), - join(root_dir, "Library\\usr\\bin"), - join(root_dir, "Library\\bin"), + join(context.root_dir, "Scripts"), + join(context.root_dir, "Library\\mingw-w64\\bin"), + join(context.root_dir, "Library\\usr\\bin"), + join(context.root_dir, "Library\\bin"), ] if prefix: dir_paths[0:0] = [ @@ -30,7 +31,7 @@ def find_executable(executable, prefix=None, all_matches=False): ] else: dir_paths = [ - join(root_dir, "bin"), + join(context.root_dir, "bin"), ] if prefix: dir_paths.insert(0, join(prefix, "bin")) diff --git a/conda_build/render.py b/conda_build/render.py index 9ba417bf23..7e64256dda 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -25,6 +25,7 @@ from pathlib import Path import yaml +from conda.base.context import context from . import environ, exceptions, source, utils from .conda_interface import ( @@ -32,7 +33,6 @@ ProgressiveFetchExtract, TemporaryDirectory, UnsatisfiableError, - pkgs_dirs, specs_from_url, ) from .exceptions import DependencyNeedsBuildingError @@ -247,7 +247,7 @@ def _filter_run_exports(specs, ignore_list): def find_pkg_dir_or_file_in_pkgs_dirs( distribution: str, m: MetaData, files_only: bool = False ) -> str | None: - for cache in map(Path, (*pkgs_dirs, *m.config.bldpkgs_dirs)): + for cache in map(Path, (*context.pkgs_dirs, *m.config.bldpkgs_dirs)): package = cache / (distribution + CONDA_PACKAGE_EXTENSION_V1) if package.is_file(): return str(package) @@ -274,6 +274,7 @@ def find_pkg_dir_or_file_in_pkgs_dirs( archive.add(entry, arcname=entry.name) return str(package) + return None @lru_cache(maxsize=None) @@ -385,7 +386,7 @@ def execute_download_actions(m, precs, env, package_subset=None, require_files=F pfe = ProgressiveFetchExtract(link_prefs=(link_prec,)) with utils.LoggingContext(): pfe.execute() - for pkg_dir in pkgs_dirs: + for pkg_dir in context.pkgs_dirs: _loc = join(pkg_dir, prec.fn) if isfile(_loc): pkg_loc = _loc diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index abf1a173bc..f39a5e2318 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -20,13 +20,13 @@ import pkginfo import requests import yaml +from conda.base.context import context from conda.gateways.disk.read import compute_sum from requests.packages.urllib3.util.url import parse_url from ..conda_interface import ( StringIO, configparser, - default_python, download, human_bytes, input, @@ -282,7 +282,8 @@ def skeletonize( if not config: config = Config() - python_version = python_version or config.variant.get("python", default_python) + if not python_version: + python_version = config.variant.get("python", context.default_python) created_recipes = [] while packages: @@ -557,7 +558,7 @@ def add_parser(repos): pypi.add_argument( "--python-version", action="store", - default=default_python, + default=context.default_python, help="""Version of Python to use to run setup.py. Default is %(default)s.""", choices=["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12"], ) diff --git a/conda_build/utils.py b/conda_build/utils.py index 36a65ead4e..24303d9ba5 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -53,7 +53,9 @@ CONDA_PACKAGE_EXTENSIONS, KNOWN_SUBDIRS, ) +from conda.base.context import context from conda.gateways.disk.read import compute_sum +from conda.models.channel import Channel from conda.models.match_spec import MatchSpec from .conda_interface import ( @@ -63,11 +65,7 @@ TemporaryDirectory, VersionOrder, cc_conda_build, - context, download, - get_conda_channel, - pkgs_dirs, - root_dir, unix_path_to_win, win_path_to_unix, ) @@ -86,7 +84,7 @@ on_linux = sys.platform == "linux" codec = getpreferredencoding() or "utf-8" -root_script_dir = os.path.join(root_dir, "Scripts" if on_win else "bin") +root_script_dir = os.path.join(context.root_dir, "Scripts" if on_win else "bin") mmap_MAP_PRIVATE = 0 if on_win else mmap.MAP_PRIVATE mmap_PROT_READ = 0 if on_win else mmap.PROT_READ mmap_PROT_WRITE = 0 if on_win else mmap.PROT_WRITE @@ -710,7 +708,7 @@ def merge_tree( # at any time, but the lock within this process should all be tied to the same tracking # mechanism. _lock_folders = ( - os.path.join(root_dir, "locks"), + os.path.join(context.root_dir, "locks"), os.path.expanduser(os.path.join("~", ".conda_build_locks")), ) @@ -754,9 +752,7 @@ def get_conda_operation_locks(locking=True, bldpkgs_dirs=None, timeout=900): bldpkgs_dirs = ensure_list(bldpkgs_dirs) # locks enabled by default if locking: - _pkgs_dirs = pkgs_dirs[:1] - locked_folders = _pkgs_dirs + list(bldpkgs_dirs) - for folder in locked_folders: + for folder in (*context.pkgs_dirs[:1], *bldpkgs_dirs): if not os.path.isdir(folder): os.makedirs(folder) lock = get_lock(folder, timeout=timeout) @@ -2109,7 +2105,7 @@ def write_bat_activation_text(file_handle, m): def download_channeldata(channel_url): global channeldata_cache if channel_url.startswith("file://") or channel_url not in channeldata_cache: - urls = get_conda_channel(channel_url).urls() + urls = Channel.from_value(channel_url).urls() urls = {url.rsplit("/", 1)[0] for url in urls} data = {} for url in urls: diff --git a/conda_build/variants.py b/conda_build/variants.py index e4c541ed96..c5bbe9a41e 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -12,8 +12,9 @@ from itertools import product import yaml +from conda.base.context import context -from .conda_interface import cc_conda_build, subdir +from .conda_interface import cc_conda_build from .utils import ensure_list, get_logger, islist, on_win, trim_empty_keys from .version import _parse as parse_version @@ -85,7 +86,7 @@ }, } -arch_name = subdir.rsplit("-", 1)[-1] +arch_name = context.subdir.rsplit("-", 1)[-1] SUFFIX_MAP = { "PY": "python", diff --git a/news/5222-deprecating-conda_interface b/news/5222-deprecating-conda_interface new file mode 100644 index 0000000000..288f24474b --- /dev/null +++ b/news/5222-deprecating-conda_interface @@ -0,0 +1,32 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Deprecate `conda_build.conda_interface.CONDA_VERSION` constant. Use `conda.__version__` instead. (#5222) +* Deprecate `conda_build.conda_interface.binstar_upload` constant. Use `conda.base.context.context.binstar_upload` instead. (#5222) +* Deprecate `conda_build.conda_interface.default_python` constant. Use `conda.base.context.context.default_python` instead. (#5222) +* Deprecate `conda_build.conda_interface.envs_dirs` constant. Use `conda.base.context.context.envs_dirs` instead. (#5222) +* Deprecate `conda_build.conda_interface.pkgs_dirs` constant. Use `conda.base.context.context.pkgs_dirs` instead. (#5222) +* Deprecate `conda_build.conda_interface.cc_platform` constant. Use `conda.base.context.context.platform` instead. (#5222) +* Deprecate `conda_build.conda_interface.root_dir` constant. Use `conda.base.context.context.root_dir` instead. (#5222) +* Deprecate `conda_build.conda_interface.root_writable` constant. Use `conda.base.context.context.root_writable` instead. (#5222) +* Deprecate `conda_build.conda_interface.subdir` constant. Use `conda.base.context.context.subdir` instead. (#5222) +* Deprecate `conda_build.conda_interface.create_default_packages` constant. Use `conda.base.context.context.create_default_packages` instead. (#5222) +* Deprecate `conda_build.conda_interface.get_rc_urls` function. Use `conda.base.context.context.channels` instead. (#5222) +* Deprecate `conda_build.conda_interface.get_prefix` function. Use `conda.base.context.context.target_prefix` instead. (#5222) +* Deprecate `conda_build.conda_interface.get_conda_channel` function. Use `conda.models.channel.Channel.from_value` instead. (#5222) +* Deprecate `conda_build.conda_interface.reset_context` function. Use `conda.base.context.reset_context` instead. (#5222) + +### Docs + +* + +### Other + +* diff --git a/tests/test_api_build.py b/tests/test_api_build.py index fab08891c2..7bedf3e215 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -27,6 +27,7 @@ import yaml from binstar_client.commands import remove, show from binstar_client.errors import NotFound +from conda.base.context import reset_context from conda.common.compat import on_linux, on_mac, on_win from conda.exceptions import ClobberError, CondaMultiError from conda_index.api import update_index @@ -36,7 +37,6 @@ CondaError, LinkError, context, - reset_context, url_path, ) from conda_build.config import Config diff --git a/tests/test_api_render.py b/tests/test_api_render.py index 5a4c722836..6d733d9c1d 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -10,10 +10,11 @@ import pytest import yaml +from conda.base.context import context from conda.common.compat import on_win from conda_build import api, render -from conda_build.conda_interface import cc_conda_build, subdir +from conda_build.conda_interface import cc_conda_build from .utils import metadata_dir, variants_dir @@ -167,7 +168,7 @@ def test_pin_depends(testing_config): def test_cross_recipe_with_only_build_section(testing_config): recipe = os.path.join(metadata_dir, "_cross_prefix_elision_compiler_used") metadata = api.render(recipe, config=testing_config, bypass_env_check=True)[0][0] - assert metadata.config.host_subdir != subdir + assert metadata.config.host_subdir != context.subdir assert metadata.config.build_prefix != metadata.config.host_prefix assert not metadata.build_is_host @@ -176,7 +177,7 @@ def test_cross_info_index_platform(testing_config): recipe = os.path.join(metadata_dir, "_cross_build_unix_windows") metadata = api.render(recipe, config=testing_config, bypass_env_check=True)[0][0] info_index = metadata.info_index() - assert metadata.config.host_subdir != subdir + assert metadata.config.host_subdir != context.subdir assert metadata.config.host_subdir == info_index["subdir"] assert metadata.config.host_platform != metadata.config.platform assert metadata.config.host_platform == info_index["platform"] diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index 3c3b011c58..4fe966c054 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -8,9 +8,9 @@ from pathlib import Path import pytest +from conda.base.context import context from conda_build import api, utils -from conda_build.conda_interface import subdir from conda_build.render import finalize_metadata from .utils import get_valid_recipes, subpackage_dir @@ -145,7 +145,7 @@ def test_output_specific_subdir(testing_config): assert len(metadata) == 3 for m, _, _ in metadata: if m.name() in ("default_subdir", "default_subdir_2"): - assert m.config.target_subdir == subdir + assert m.config.target_subdir == context.subdir elif m.name() == "custom_subdir": assert m.config.target_subdir == "linux-aarch64" else: From c01f352bcdb7cc3e462791d7e410656596e058a4 Mon Sep 17 00:00:00 2001 From: Yannik Tausch Date: Tue, 19 Mar 2024 23:09:38 +0100 Subject: [PATCH 305/366] Clarify run_exports/weak (#5214) Co-authored-by: Bianca Henderson --- docs/source/resources/define-metadata.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index e0e5bfff2d..720f4f4624 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -734,7 +734,7 @@ implicitly added by host requirements (e.g. libpng exports libpng), and with - libpng Here, because no specific kind of ``run_exports`` is specified, libpng's ``run_exports`` -are considered "weak." This means they will only apply when libpng is in the +are considered "weak". This means they will only apply when libpng is in the host section, when they will add their export to the run section. If libpng were listed in the build section, the ``run_exports`` would not apply to the run section. @@ -746,6 +746,9 @@ listed in the build section, the ``run_exports`` would not apply to the run sect strong: - libgcc +There is also ``run_exports/weak`` which is equivalent to an unspecific kind of +``run_exports`` but useful if you want to define both strong and weak run exports. + Strong ``run_exports`` are used for things like runtimes, where the same runtime needs to be present in the host and the run environment, and exactly which runtime that should be is determined by what's present in the build section. From cd42a498db70062159c24a3248576992ef70c443 Mon Sep 17 00:00:00 2001 From: Yannik Tausch Date: Tue, 19 Mar 2024 23:09:58 +0100 Subject: [PATCH 306/366] Add undocumented about section fields (#5215) Co-authored-by: Bianca Henderson --- docs/source/resources/define-metadata.rst | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index 720f4f4624..29b523fcde 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -1557,9 +1557,16 @@ information displays in the Anaconda.org channel. about: home: https://github.com/ilanschnell/bsdiff4 - license: BSD + license: BSD 3-Clause license_file: LICENSE - summary: binary diff and patch using the BSDIFF4-format + license_family: BSD + summary: binary diff and patch using the BSDIFF4 format + description: | + This module provides an interface to the BSDIFF4 format, command line interfaces + (bsdiff4, bspatch4) and tests. + dev_url: https://github.com/ilanschnell/bsdiff4 + doc_url: https://bsdiff4.readthedocs.io + doc_source_url: https://github.com/ilanschnell/bsdiff4/blob/main/README.rst License file From 3b1f4f1b4c1ed3bc4a64b432ab0223c1c6ff03f5 Mon Sep 17 00:00:00 2001 From: Yannik Tausch Date: Tue, 19 Mar 2024 23:10:11 +0100 Subject: [PATCH 307/366] docs: source.git_rev defaults to HEAD (#5217) Co-authored-by: Bianca Henderson --- docs/source/resources/define-metadata.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index 29b523fcde..8654e8a5ae 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -125,7 +125,7 @@ The git_url can also be a relative path to the recipe directory. source: git_url: https://github.com/ilanschnell/bsdiff4.git - git_rev: 1.1.4 + git_rev: 1.1.4 # (Defaults to "HEAD") git_depth: 1 # (Defaults to -1/not shallow) The depth argument relates to the ability to perform a shallow clone. From 127ce33de3c2b99a8dccedb5ba575f119919456a Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Thu, 21 Mar 2024 22:04:24 +0100 Subject: [PATCH 308/366] Add benchmark for high `pin_subpackage` count recipe (#5246) * Add benchmark for high pin_subpackage count recipe This is a performance regression benchmark for https://github.com/conda/conda-build/pull/5224 * Reduce size of test_pin_subpackage_benchmark * Use render(..., variants=...) not Config.variant --------- Signed-off-by: Marcel Bargull --- .../_pin_subpackage_benchmark/meta.yaml | 148 ++++++++++++++++++ tests/test_api_render.py | 34 ++++ 2 files changed, 182 insertions(+) create mode 100644 tests/test-recipes/metadata/_pin_subpackage_benchmark/meta.yaml diff --git a/tests/test-recipes/metadata/_pin_subpackage_benchmark/meta.yaml b/tests/test-recipes/metadata/_pin_subpackage_benchmark/meta.yaml new file mode 100644 index 0000000000..2fde86c598 --- /dev/null +++ b/tests/test-recipes/metadata/_pin_subpackage_benchmark/meta.yaml @@ -0,0 +1,148 @@ +# Performance regression test for https://github.com/conda/conda-build/pull/5224 +# This is a reduced version of +# https://github.com/conda-forge/arrow-cpp-feedstock/blob/e6f573674c5f9c35c6a614a1563b2fe3eeb3e72b/recipe/meta.yaml +# stripped of everything apart from the large number of inter-output +# pin_subpackage dependencies/run_exports. +# Addendum: Omit libarrow-all, pyarrow, pyarrow-tests to reduce benchmark duration. + +package: + name: apache-arrow + version: 15.0.2 + +outputs: +# - name: libarrow-all +# build: +# run_exports: +# - {{ pin_subpackage("libarrow", max_pin="x") }} +# - {{ pin_subpackage("libarrow-acero", max_pin="x") }} +# - {{ pin_subpackage("libarrow-dataset", max_pin="x") }} +# - {{ pin_subpackage("libarrow-flight", max_pin="x") }} +# - {{ pin_subpackage("libarrow-flight-sql", max_pin="x") }} +# - {{ pin_subpackage("libarrow-gandiva", max_pin="x") }} +# - {{ pin_subpackage("libarrow-substrait", max_pin="x") }} +# - {{ pin_subpackage("libparquet", max_pin="x") }} +# requirements: +# host: +# - {{ pin_subpackage("libarrow", exact=True) }} +# - {{ pin_subpackage("libarrow-acero", exact=True) }} +# - {{ pin_subpackage("libarrow-dataset", exact=True) }} +# - {{ pin_subpackage("libarrow-flight", exact=True) }} +# - {{ pin_subpackage("libarrow-flight-sql", exact=True) }} +# - {{ pin_subpackage("libarrow-gandiva", exact=True) }} +# - {{ pin_subpackage("libarrow-substrait", exact=True) }} +# - {{ pin_subpackage("libparquet", exact=True) }} +# run: +# - {{ pin_subpackage("libarrow", exact=True) }} +# - {{ pin_subpackage("libarrow-acero", exact=True) }} +# - {{ pin_subpackage("libarrow-dataset", exact=True) }} +# - {{ pin_subpackage("libarrow-flight", exact=True) }} +# - {{ pin_subpackage("libarrow-flight-sql", exact=True) }} +# - {{ pin_subpackage("libarrow-gandiva", exact=True) }} +# - {{ pin_subpackage("libarrow-substrait", exact=True) }} +# - {{ pin_subpackage("libparquet", exact=True) }} + + - name: libarrow + build: + run_exports: + - {{ pin_subpackage("libarrow", max_pin="x") }} + + - name: libarrow-acero + build: + run_exports: + - {{ pin_subpackage("libarrow-acero", max_pin="x") }} + requirements: + host: + - {{ pin_subpackage("libarrow", exact=True) }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + + - name: libarrow-dataset + build: + run_exports: + - {{ pin_subpackage("libarrow-dataset", max_pin="x") }} + requirements: + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libparquet", exact=True) }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libparquet", exact=True) }} + + - name: libarrow-flight + build: + run_exports: + - {{ pin_subpackage("libarrow-flight", max_pin="x") }} + requirements: + run: + - {{ pin_subpackage("libarrow", exact=True) }} + + - name: libarrow-flight-sql + build: + run_exports: + - {{ pin_subpackage("libarrow-flight-sql", max_pin="x") }} + requirements: + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-flight", exact=True) }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-flight", exact=True) }} + + - name: libarrow-gandiva + build: + run_exports: + - {{ pin_subpackage("libarrow-gandiva", max_pin="x") }} + requirements: + build: + host: + - {{ pin_subpackage("libarrow", max_pin="x") }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + + - name: libarrow-substrait + build: + run_exports: + - {{ pin_subpackage("libarrow-substrait", max_pin="x") }} + requirements: + host: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libarrow-dataset", exact=True) }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + - {{ pin_subpackage("libarrow-acero", exact=True) }} + - {{ pin_subpackage("libarrow-dataset", exact=True) }} + + - name: libparquet + build: + run_exports: + - {{ pin_subpackage("libparquet", max_pin="x") }} + requirements: + host: + - {{ pin_subpackage("libarrow", max_pin="x") }} + run: + - {{ pin_subpackage("libarrow", exact=True) }} + +# - name: pyarrow +# requirements: +# host: +# - {{ pin_subpackage("libarrow-all", exact=True) }} +# run: +# - {{ pin_subpackage("libarrow", exact=True) }} +# - {{ pin_subpackage("libarrow-acero", exact=True) }} +# - {{ pin_subpackage("libarrow-dataset", exact=True) }} +# - {{ pin_subpackage("libarrow-flight", exact=True) }} +# - {{ pin_subpackage("libarrow-flight-sql", exact=True) }} +# - {{ pin_subpackage("libarrow-gandiva", exact=True) }} +# - {{ pin_subpackage("libarrow-substrait", exact=True) }} +# - {{ pin_subpackage("libparquet", exact=True) }} +# +# - name: pyarrow-tests +# requirements: +# host: +# - {{ pin_subpackage("libarrow-all", exact=True) }} +# - {{ pin_subpackage('pyarrow', exact=True) }} +# run: +# - {{ pin_subpackage('pyarrow', exact=True) }} diff --git a/tests/test_api_render.py b/tests/test_api_render.py index 6d733d9c1d..7849daa01c 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -7,6 +7,7 @@ import os import re +from itertools import count, islice import pytest import yaml @@ -15,6 +16,7 @@ from conda_build import api, render from conda_build.conda_interface import cc_conda_build +from conda_build.variants import validate_spec from .utils import metadata_dir, variants_dir @@ -299,3 +301,35 @@ def test_pin_expression_works_with_python_prereleases(testing_config): assert len(ms) == 2 m = next(m_[0] for m_ in ms if m_[0].meta["package"]["name"] == "bar") assert "python >=3.10.0rc1,<3.11.0a0" in m.meta["requirements"]["run"] + + +@pytest.mark.benchmark +def test_pin_subpackage_benchmark(testing_config): + # Performance regression test for https://github.com/conda/conda-build/pull/5224 + recipe = os.path.join(metadata_dir, "_pin_subpackage_benchmark") + + # Create variant config of size comparable (for subdir linux-64) to + # https://github.com/conda-forge/conda-forge-pinning-feedstock/blob/3c7d60f56a8cb7d1b8f5a8da0b02ae1f1f0982d7/recipe/conda_build_config.yaml + # Addendum: Changed number of single-value keys from 327 to 33 to reduce benchmark duration. + def create_variants(): + # ("pkg_1, ("1.1", "1.2", ...)), ("pkg_2", ("2.1", "2.2", ...)), ... + packages = ((f"pkg_{i}", (f"{i}.{j}" for j in count(1))) for i in count(1)) + variant = {} + variant["zip_keys"] = [] + for version_count, package_count in [(1, 4), (4, 3), (4, 3)]: + zipped = [] + for package, versions in islice(packages, package_count): + zipped.append(package) + variant[package] = list(islice(versions, version_count)) + variant["zip_keys"].append(zipped) + # for version_count, package_count in [(3, 1), (2, 4), (1, 327)]: + for version_count, package_count in [(3, 1), (2, 4), (1, 33)]: + for package, versions in islice(packages, package_count): + variant[package] = list(islice(versions, version_count)) + validate_spec("", variant) + return variant + + ms = api.render( + recipe, config=testing_config, channels=[], variants=create_variants() + ) + assert len(ms) == 11 - 3 # omits libarrow-all, pyarrow, pyarrow-tests From 6bd03828964327c29f88d6739d1b7e1c488a40db Mon Sep 17 00:00:00 2001 From: Marcel Bargull Date: Fri, 22 Mar 2024 01:55:10 +0100 Subject: [PATCH 309/366] Add selectors to pin_subpackage_benchmark recipe (#5249) Signed-off-by: Marcel Bargull --- .../_pin_subpackage_benchmark/meta.yaml | 104 ++++++++++++++++++ 1 file changed, 104 insertions(+) diff --git a/tests/test-recipes/metadata/_pin_subpackage_benchmark/meta.yaml b/tests/test-recipes/metadata/_pin_subpackage_benchmark/meta.yaml index 2fde86c598..311b5a95d2 100644 --- a/tests/test-recipes/metadata/_pin_subpackage_benchmark/meta.yaml +++ b/tests/test-recipes/metadata/_pin_subpackage_benchmark/meta.yaml @@ -146,3 +146,107 @@ outputs: # - {{ pin_subpackage('pyarrow', exact=True) }} # run: # - {{ pin_subpackage('pyarrow', exact=True) }} + +# The original recipe had 173 selector lines; adding placeholders for these here: +about: + description: > + 00 # [x86_64] + 01 # [not x86_64] + 02 # [unix] + 03 # [not unix] + 04 # [linux] + 05 # [not linux] + 06 # [osx] + 07 # [not osx] + 08 # [win] + 09 # [not win] + 10 # [x86_64] + 11 # [not x86_64] + 12 # [unix] + 13 # [not unix] + 14 # [linux] + 15 # [not linux] + 16 # [osx] + 17 # [not osx] + 18 # [win] + 19 # [not win] + 20 # [x86_64] + 21 # [not x86_64] + 22 # [unix] + 23 # [not unix] + 24 # [linux] + 25 # [not linux] + 26 # [osx] + 27 # [not osx] + 28 # [win] + 29 # [not win] + 30 # [x86_64] + 31 # [not x86_64] + 32 # [unix] + 33 # [not unix] + 34 # [linux] + 35 # [not linux] + 36 # [osx] + 37 # [not osx] + 38 # [win] + 39 # [not win] + 40 # [x86_64] + 41 # [not x86_64] + 42 # [unix] + 43 # [not unix] + 44 # [linux] + 45 # [not linux] + 46 # [osx] + 47 # [not osx] + 48 # [win] + 49 # [not win] + 50 # [x86_64] + 51 # [not x86_64] + 52 # [unix] + 53 # [not unix] + 54 # [linux] + 55 # [not linux] + 56 # [osx] + 57 # [not osx] + 58 # [win] + 59 # [not win] + 60 # [x86_64] + 61 # [not x86_64] + 62 # [unix] + 63 # [not unix] + 64 # [linux] + 65 # [not linux] + 66 # [osx] + 67 # [not osx] + 68 # [win] + 69 # [not win] + 70 # [x86_64] + 71 # [not x86_64] + 72 # [unix] + 73 # [not unix] + 74 # [linux] + 75 # [not linux] + 76 # [osx] + 77 # [not osx] + 78 # [win] + 79 # [not win] + 80 # [x86_64] + 81 # [not x86_64] + 82 # [unix] + 83 # [not unix] + 84 # [linux] + 85 # [not linux] + 86 # [osx] + 87 # [not osx] + 88 # [win] + 89 # [not win] + 90 # [x86_64] + 91 # [not x86_64] + 92 # [unix] + 93 # [not unix] + 94 # [linux] + 95 # [not linux] + 96 # [osx] + 97 # [not osx] + 98 # [win] + 99 # [not win] From be0c74c9025efe578dc6dff1fc591a379dfc49c1 Mon Sep 17 00:00:00 2001 From: Yannik Tausch Date: Fri, 22 Mar 2024 16:09:28 +0100 Subject: [PATCH 310/366] add license_url field to metadata docs (#5250) --- docs/source/resources/define-metadata.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index 8654e8a5ae..ecb4515c17 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -1560,6 +1560,7 @@ information displays in the Anaconda.org channel. license: BSD 3-Clause license_file: LICENSE license_family: BSD + license_url: https://github.com/bacchusrx/bsdiff4/blob/master/LICENSE summary: binary diff and patch using the BSDIFF4 format description: | This module provides an interface to the BSDIFF4 format, command line interfaces From ab137d23304fca69bd188a0ad11cf9601dea8f54 Mon Sep 17 00:00:00 2001 From: Yannik Tausch Date: Fri, 22 Mar 2024 17:48:26 +0100 Subject: [PATCH 311/366] docs: source URL can be a list (meta.yaml) (#5218) * meta.yaml: source URL can be a list * Update docs/source/resources/define-metadata.rst Co-authored-by: jaimergp --------- Co-authored-by: Bianca Henderson Co-authored-by: jaimergp --- docs/source/resources/define-metadata.rst | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index ecb4515c17..e4b599c438 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -116,6 +116,18 @@ If an extracted archive contains only 1 folder at its top level, its contents will be moved 1 level up, so that the extracted package contents sit in the root of the work folder. +You can also specify multiple URLs for the same source archive. +They will be attempted in order, should one fail. + +.. code-block:: yaml + + source: + url: + - https://archive.linux.duke.edu/cran/src/contrib/ggblanket_6.0.0.tar.gz + - https://archive.linux.duke.edu/cran/src/contrib/Archive/ggblanket/ggblanket_6.0.0.tar.gz + sha256: cd2181fe3d3365eaf36ff8bbbc90ea9d76c56d40e63386b4eefa0e3120ec6665 + + Source from git --------------- From 51f81cea7c89c0f509544d09f0dac037da838233 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Mon, 25 Mar 2024 10:21:08 -0500 Subject: [PATCH 312/366] Add `open_recipe` context to simplify recipe handling (#5238) --- conda_build/render.py | 149 +++++++++++++++++++++--------------------- news/5238-open_recipe | 19 ++++++ tests/test_render.py | 39 ++++++++--- 3 files changed, 124 insertions(+), 83 deletions(-) create mode 100644 news/5238-open_recipe diff --git a/conda_build/render.py b/conda_build/render.py index 7e64256dda..90061126cf 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -12,9 +12,9 @@ import tarfile import tempfile from collections import OrderedDict, defaultdict +from contextlib import contextmanager from functools import lru_cache from os.path import ( - abspath, dirname, isabs, isdir, @@ -23,6 +23,7 @@ normpath, ) from pathlib import Path +from typing import TYPE_CHECKING import yaml from conda.base.context import context @@ -49,6 +50,11 @@ list_of_dicts_to_dict_of_lists, ) +if TYPE_CHECKING: + from typing import Iterator + + from .config import Config + def odict_representer(dumper, data): return dumper.represent_dict(data.items()) @@ -929,15 +935,37 @@ def expand_outputs(metadata_tuples): return list(expanded_outputs.values()) +@contextmanager +def open_recipe(recipe: str | os.PathLike | Path) -> Iterator[Path]: + """Open the recipe from a file (meta.yaml), directory (recipe), or tarball (package).""" + recipe = Path(recipe) + + if not recipe.exists(): + sys.exit(f"Error: non-existent: {recipe}") + elif recipe.is_dir(): + # read the recipe from the current directory + yield recipe + elif recipe.suffixes in [[".tar"], [".tar", ".gz"], [".tgz"], [".tar", ".bz2"]]: + # extract the recipe to a temporary directory + with tempfile.TemporaryDirectory() as tmp, tarfile.open(recipe, "r:*") as tar: + tar.extractall(path=tmp) + yield Path(tmp) + elif recipe.suffix == ".yaml": + # read the recipe from the parent directory + yield recipe.parent + else: + sys.exit(f"Error: non-recipe: {recipe}") + + def render_recipe( - recipe_path, - config, - no_download_source=False, - variants=None, - permit_unsatisfiable_variants=True, - reset_build_id=True, - bypass_env_check=False, -): + recipe_dir: str | os.PathLike | Path, + config: Config, + no_download_source: bool = False, + variants: dict | None = None, + permit_unsatisfiable_variants: bool = True, + reset_build_id: bool = True, + bypass_env_check: bool = False, +) -> list[tuple[MetaData, bool, bool]]: """Returns a list of tuples, each consisting of (metadata-object, needs_download, needs_render_in_env) @@ -945,74 +973,45 @@ def render_recipe( You get one tuple per variant. Outputs are not factored in here (subpackages won't affect these results returned here.) """ - arg = recipe_path - if isfile(arg): - if arg.endswith((".tar", ".tar.gz", ".tgz", ".tar.bz2")): - recipe_dir = tempfile.mkdtemp() - t = tarfile.open(arg, "r:*") - t.extractall(path=recipe_dir) - t.close() - need_cleanup = True - elif arg.endswith(".yaml"): - recipe_dir = dirname(arg) - need_cleanup = False + with open_recipe(recipe_dir) as recipe: + try: + m = MetaData(str(recipe), config=config) + except exceptions.YamlParsingError as e: + sys.exit(e.error_msg()) + + # important: set build id *before* downloading source. Otherwise source goes into a different + # build folder. + if config.set_build_id: + m.config.compute_build_id(m.name(), m.version(), reset=reset_build_id) + + # this source may go into a folder that doesn't match the eventual build folder. + # There's no way around it AFAICT. We must download the source to be able to render + # the recipe (from anything like GIT_FULL_HASH), but we can't know the final build + # folder until rendering is complete, because package names can have variant jinja2 in them. + if m.needs_source_for_render and not m.source_provided: + try_download(m, no_download_source=no_download_source) + + if m.final: + if not getattr(m.config, "variants", None): + m.config.ignore_system_variants = True + if isfile(cbc_yaml := join(m.path, "conda_build_config.yaml")): + m.config.variant_config_files = [cbc_yaml] + m.config.variants = get_package_variants(m, variants=variants) + m.config.variant = m.config.variants[0] + return [(m, False, False)] else: - print("Ignoring non-recipe: %s" % arg) - return None, None - else: - recipe_dir = abspath(arg) - need_cleanup = False + # merge any passed-in variants with any files found + variants = get_package_variants(m, variants=variants) - if not isdir(recipe_dir): - sys.exit("Error: no such directory: %s" % recipe_dir) - - try: - m = MetaData(recipe_dir, config=config) - except exceptions.YamlParsingError as e: - sys.stderr.write(e.error_msg()) - sys.exit(1) - - rendered_metadata = {} - - # important: set build id *before* downloading source. Otherwise source goes into a different - # build folder. - if config.set_build_id: - m.config.compute_build_id(m.name(), m.version(), reset=reset_build_id) - - # this source may go into a folder that doesn't match the eventual build folder. - # There's no way around it AFAICT. We must download the source to be able to render - # the recipe (from anything like GIT_FULL_HASH), but we can't know the final build - # folder until rendering is complete, because package names can have variant jinja2 in them. - if m.needs_source_for_render and not m.source_provided: - try_download(m, no_download_source=no_download_source) - if m.final: - if not hasattr(m.config, "variants") or not m.config.variant: - m.config.ignore_system_variants = True - if isfile(join(m.path, "conda_build_config.yaml")): - m.config.variant_config_files = [ - join(m.path, "conda_build_config.yaml") - ] - m.config.variants = get_package_variants(m, variants=variants) - m.config.variant = m.config.variants[0] - rendered_metadata = [ - (m, False, False), - ] - else: - # merge any passed-in variants with any files found - variants = get_package_variants(m, variants=variants) - - # when building, we don't want to fully expand all outputs into metadata, only expand - # whatever variants we have (i.e. expand top-level variants, not output-only variants) - rendered_metadata = distribute_variants( - m, - variants, - permit_unsatisfiable_variants=permit_unsatisfiable_variants, - allow_no_other_outputs=True, - bypass_env_check=bypass_env_check, - ) - if need_cleanup: - utils.rm_rf(recipe_dir) - return rendered_metadata + # when building, we don't want to fully expand all outputs into metadata, only expand + # whatever variants we have (i.e. expand top-level variants, not output-only variants) + return distribute_variants( + m, + variants, + permit_unsatisfiable_variants=permit_unsatisfiable_variants, + allow_no_other_outputs=True, + bypass_env_check=bypass_env_check, + ) # Keep this out of the function below so it can be imported by other modules. diff --git a/news/5238-open_recipe b/news/5238-open_recipe new file mode 100644 index 0000000000..9d5d42c4c5 --- /dev/null +++ b/news/5238-open_recipe @@ -0,0 +1,19 @@ +### Enhancements + +* Add `conda_build.render.open_recipe` context manager to detect the recipe type (file/`meta.yaml`, directory/recipe, or tarball/package) and properly handling any exit/close behavior. (#5238) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test_render.py b/tests/test_render.py index aef9d0e928..940d090781 100644 --- a/tests/test_render.py +++ b/tests/test_render.py @@ -4,12 +4,19 @@ import json import os +import re from typing import TYPE_CHECKING from uuid import uuid4 import pytest -from conda_build import api, render +from conda_build.api import get_output_file_paths +from conda_build.render import ( + _simplify_to_exact_constraints, + find_pkg_dir_or_file_in_pkgs_dirs, + get_pin_from_build, + open_recipe, +) from conda_build.utils import CONDA_PACKAGE_EXTENSION_V1 if TYPE_CHECKING: @@ -27,7 +34,7 @@ ) def test_noarch_output(build, testing_metadata): testing_metadata.meta["build"].update(build) - output = api.get_output_file_paths(testing_metadata) + output = get_output_file_paths(testing_metadata) assert os.path.sep + "noarch" + os.path.sep in output[0] @@ -36,7 +43,7 @@ def test_reduce_duplicate_specs(testing_metadata): "build": ["exact", "exact 1.2.3 1", "exact >1.0,<2"], "host": ["exact", "exact 1.2.3 1"], } - render._simplify_to_exact_constraints(testing_metadata) + _simplify_to_exact_constraints(testing_metadata) simplified = testing_metadata.meta["requirements"] assert simplified["build"] == simplified["host"] @@ -47,9 +54,7 @@ def test_reduce_duplicate_specs(testing_metadata): def test_pin_run_as_build_preserve_string(testing_metadata): m = testing_metadata m.config.variant["pin_run_as_build"]["pkg"] = {"max_pin": "x.x"} - dep = render.get_pin_from_build( - m, "pkg * somestring*", {"pkg": "1.2.3 somestring_h1234"} - ) + dep = get_pin_from_build(m, "pkg * somestring*", {"pkg": "1.2.3 somestring_h1234"}) assert dep == "pkg >=1.2.3,<1.3.0a0 somestring*" @@ -74,7 +79,7 @@ def test_find_package( """ Testing our ability to find the package directory or archive. - The render.find_pkg_dir_or_file_in_pkgs_dirs function will scan the various + The find_pkg_dir_or_file_in_pkgs_dirs function will scan the various locations where packages may exist locally and returns the full package path if found. """ @@ -105,9 +110,27 @@ def test_find_package( package = other_cache / distribution # attempt to find the package and check we found the expected path - found = render.find_pkg_dir_or_file_in_pkgs_dirs( + found = find_pkg_dir_or_file_in_pkgs_dirs( distribution, testing_metadata, files_only=files_only, ) assert package is found is None or package.samefile(found) + + +def test_open_recipe(tmp_path: Path): + path = tmp_path / "missing" + with pytest.raises( + SystemExit, + match=rf"Error: non-existent: {re.escape(str(path))}", + ): + with open_recipe(path): + pass + + (path := tmp_path / "bad.ext").touch() + with pytest.raises( + SystemExit, + match=rf"Error: non-recipe: {re.escape(str(path))}", + ): + with open_recipe(path): + pass From 15f33236b438d9b1a5c371ecc9cdbdac87e5bad9 Mon Sep 17 00:00:00 2001 From: Tobias Fischer Date: Tue, 26 Mar 2024 03:13:16 +1000 Subject: [PATCH 313/366] Only fetch `lfs` files for specific `git_ref` (#5202) --- conda_build/source.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/conda_build/source.py b/conda_build/source.py index 3583f0b8ce..f1cfdb2613 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -202,19 +202,19 @@ def unpack( shutil.move(os.path.join(tmpdir, f), os.path.join(src_dir, f)) -def check_git_lfs(git, cwd): +def check_git_lfs(git, cwd, git_ref): try: - lfs_list_output = check_output_env([git, "lfs", "ls-files", "--all"], cwd=cwd) + lfs_list_output = check_output_env([git, "lfs", "ls-files", git_ref], cwd=cwd) return lfs_list_output and lfs_list_output.strip() except CalledProcessError: return False -def git_lfs_fetch(git, cwd, stdout, stderr): +def git_lfs_fetch(git, cwd, git_ref, stdout, stderr): lfs_version = check_output_env([git, "lfs", "version"], cwd=cwd) log.info(lfs_version) check_call_env( - [git, "lfs", "fetch", "origin", "--all"], cwd=cwd, stdout=stdout, stderr=stderr + [git, "lfs", "fetch", "origin", git_ref], cwd=cwd, stdout=stdout, stderr=stderr ) @@ -273,8 +273,8 @@ def git_mirror_checkout_recursive( check_call_env( [git, "fetch"], cwd=mirror_dir, stdout=stdout, stderr=stderr ) - if check_git_lfs(git, mirror_dir): - git_lfs_fetch(git, mirror_dir, stdout, stderr) + if check_git_lfs(git, mirror_dir, git_ref): + git_lfs_fetch(git, mirror_dir, git_ref, stdout, stderr) else: # Unlike 'git clone', fetch doesn't automatically update the cache's HEAD, # So here we explicitly store the remote HEAD in the cache's local refs/heads, @@ -318,8 +318,8 @@ def git_mirror_checkout_recursive( check_call_env( args + [git_url, git_mirror_dir], stdout=stdout, stderr=stderr ) - if check_git_lfs(git, mirror_dir): - git_lfs_fetch(git, mirror_dir, stdout, stderr) + if check_git_lfs(git, mirror_dir, git_ref): + git_lfs_fetch(git, mirror_dir, git_ref, stdout, stderr) except CalledProcessError: # on windows, remote URL comes back to us as cygwin or msys format. Python doesn't # know how to normalize it. Need to convert it to a windows path. From 054309a6978373ae9650d55f02eb5f89b3427c83 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 25 Mar 2024 17:05:43 -0500 Subject: [PATCH 314/366] [pre-commit.ci] pre-commit autoupdate (#5254) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.3.3 → v0.3.4](https://github.com/astral-sh/ruff-pre-commit/compare/v0.3.3...v0.3.4) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9335532d1f..2db1b692b1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,7 +54,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.3 + rev: v0.3.4 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff From 9267daefdec73cb5554b4d10f4c4c9575bd31748 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 26 Mar 2024 15:58:29 -0500 Subject: [PATCH 315/366] Treat deprecation warnings as errors (#5244) * Treat deprecation warnings as errors * Fix package_cache import * Use root_prefix instead of root_dir * Ignore numpy.distutils warning --- conda_build/build.py | 2 +- conda_build/conda_interface.py | 6 +++--- conda_build/config.py | 2 +- conda_build/environ.py | 2 +- conda_build/os_utils/external.py | 10 +++++----- conda_build/utils.py | 6 +++--- news/5222-deprecating-conda_interface | 2 +- pyproject.toml | 10 ++++++++++ tests/test_environ.py | 14 +++++++++++--- 9 files changed, 36 insertions(+), 18 deletions(-) diff --git a/conda_build/build.py b/conda_build/build.py index 09643c8f18..1abf843cd9 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -1421,7 +1421,7 @@ def write_about_json(m): extra.update(m.config.extra_meta) d["root_pkgs"] = [ f"{prec.name} {prec.version} {prec.build}" - for prec in PrefixData(context.root_dir).iter_records() + for prec in PrefixData(context.root_prefix).iter_records() ] # Include the extra section of the metadata in the about.json d["extra"] = extra diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index bb92f6b8b3..5e56bdbb89 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -11,7 +11,7 @@ from conda.base.context import context, determine_target_prefix from conda.base.context import non_x86_machines as non_x86_linux_machines # noqa: F401 from conda.base.context import reset_context as _reset_context -from conda.core.package_cache import ProgressiveFetchExtract # noqa: F401 +from conda.core.package_cache_data import ProgressiveFetchExtract # noqa: F401 from conda.exceptions import ( # noqa: F401 CondaError, CondaHTTPError, @@ -112,8 +112,8 @@ "24.5", "24.7", "root_dir", - context.root_dir, - addendum="Use `conda.base.context.context.root_dir` instead.", + context.root_prefix, + addendum="Use `conda.base.context.context.root_prefix` instead.", ) deprecated.constant( "24.5", diff --git a/conda_build/config.py b/conda_build/config.py index 6e6cc8e28c..fd599506bc 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -455,7 +455,7 @@ def croot(self) -> str: elif _bld_root_rc: self._croot = abspath(expanduser(expandvars(_bld_root_rc))) elif context.root_writable: - self._croot = join(context.root_dir, "conda-bld") + self._croot = join(context.root_prefix, "conda-bld") else: self._croot = abspath(expanduser("~/conda-bld")) return self._croot diff --git a/conda_build/environ.py b/conda_build/environ.py index a6fca61837..b3b02ce70b 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -421,7 +421,7 @@ def conda_build_vars(prefix, config): "HTTP_PROXY": os.getenv("HTTP_PROXY", ""), "REQUESTS_CA_BUNDLE": os.getenv("REQUESTS_CA_BUNDLE", ""), "DIRTY": "1" if config.dirty else "", - "ROOT": context.root_dir, + "ROOT": context.root_prefix, } diff --git a/conda_build/os_utils/external.py b/conda_build/os_utils/external.py index f1d91d098b..3ea8216a53 100644 --- a/conda_build/os_utils/external.py +++ b/conda_build/os_utils/external.py @@ -17,10 +17,10 @@ def find_executable(executable, prefix=None, all_matches=False): result = None if on_win: dir_paths = [ - join(context.root_dir, "Scripts"), - join(context.root_dir, "Library\\mingw-w64\\bin"), - join(context.root_dir, "Library\\usr\\bin"), - join(context.root_dir, "Library\\bin"), + join(context.root_prefix, "Scripts"), + join(context.root_prefix, "Library\\mingw-w64\\bin"), + join(context.root_prefix, "Library\\usr\\bin"), + join(context.root_prefix, "Library\\bin"), ] if prefix: dir_paths[0:0] = [ @@ -31,7 +31,7 @@ def find_executable(executable, prefix=None, all_matches=False): ] else: dir_paths = [ - join(context.root_dir, "bin"), + join(context.root_prefix, "bin"), ] if prefix: dir_paths.insert(0, join(prefix, "bin")) diff --git a/conda_build/utils.py b/conda_build/utils.py index 24303d9ba5..d6498da043 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -84,7 +84,7 @@ on_linux = sys.platform == "linux" codec = getpreferredencoding() or "utf-8" -root_script_dir = os.path.join(context.root_dir, "Scripts" if on_win else "bin") +root_script_dir = os.path.join(context.root_prefix, "Scripts" if on_win else "bin") mmap_MAP_PRIVATE = 0 if on_win else mmap.MAP_PRIVATE mmap_PROT_READ = 0 if on_win else mmap.PROT_READ mmap_PROT_WRITE = 0 if on_win else mmap.PROT_WRITE @@ -708,7 +708,7 @@ def merge_tree( # at any time, but the lock within this process should all be tied to the same tracking # mechanism. _lock_folders = ( - os.path.join(context.root_dir, "locks"), + os.path.join(context.root_prefix, "locks"), os.path.expanduser(os.path.join("~", ".conda_build_locks")), ) @@ -1343,7 +1343,7 @@ class LoggingContext: "dotupdate", "stdoutlog", "requests", - "conda.core.package_cache", + "conda.core.package_cache_data", "conda.plan", "conda.gateways.disk.delete", "conda_build", diff --git a/news/5222-deprecating-conda_interface b/news/5222-deprecating-conda_interface index 288f24474b..d7737f9368 100644 --- a/news/5222-deprecating-conda_interface +++ b/news/5222-deprecating-conda_interface @@ -14,7 +14,7 @@ * Deprecate `conda_build.conda_interface.envs_dirs` constant. Use `conda.base.context.context.envs_dirs` instead. (#5222) * Deprecate `conda_build.conda_interface.pkgs_dirs` constant. Use `conda.base.context.context.pkgs_dirs` instead. (#5222) * Deprecate `conda_build.conda_interface.cc_platform` constant. Use `conda.base.context.context.platform` instead. (#5222) -* Deprecate `conda_build.conda_interface.root_dir` constant. Use `conda.base.context.context.root_dir` instead. (#5222) +* Deprecate `conda_build.conda_interface.root_dir` constant. Use `conda.base.context.context.root_prefix` instead. (#5222) * Deprecate `conda_build.conda_interface.root_writable` constant. Use `conda.base.context.context.root_writable` instead. (#5222) * Deprecate `conda_build.conda_interface.subdir` constant. Use `conda.base.context.context.subdir` instead. (#5222) * Deprecate `conda_build.conda_interface.create_default_packages` constant. Use `conda.base.context.context.create_default_packages` instead. (#5222) diff --git a/pyproject.toml b/pyproject.toml index dd3e95dd56..71210db6f1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -150,3 +150,13 @@ markers = [ "no_default_testing_config: used internally to disable monkeypatching for testing_config", "benchmark: execute the benchmark tests", ] +filterwarnings = [ + # elevate conda's deprecated warning to an error + "error::PendingDeprecationWarning:conda", + "error::DeprecationWarning:conda", + # elevate conda-build's deprecated warning to an error + "error::PendingDeprecationWarning:conda_build", + "error::DeprecationWarning:conda_build", + # ignore numpy.distutils error + 'ignore:\s+`numpy.distutils` is deprecated:DeprecationWarning:conda_build._load_setup_py_data', +] diff --git a/tests/test_environ.py b/tests/test_environ.py index d45fc8ed7f..327accaeea 100644 --- a/tests/test_environ.py +++ b/tests/test_environ.py @@ -3,14 +3,17 @@ import os import sys +import pytest from conda.core.prefix_data import PrefixData +from packaging.version import parse -from conda_build import environ +import conda_build +from conda_build.environ import Environment, create_env def test_environment_creation_preserves_PATH(testing_workdir, testing_config): ref_path = os.environ["PATH"] - environ.create_env( + create_env( testing_workdir, ["python"], env="host", @@ -22,7 +25,12 @@ def test_environment_creation_preserves_PATH(testing_workdir, testing_config): def test_environment(): """Asserting PrefixData can accomplish the same thing as Environment.""" - assert (specs := environ.Environment(sys.prefix).package_specs()) + with pytest.warns( + PendingDeprecationWarning + if parse(conda_build.__version__) < parse("24.3") + else DeprecationWarning, + ): + assert (specs := Environment(sys.prefix).package_specs()) assert specs == [ f"{prec.name} {prec.version} {prec.build}" for prec in PrefixData(sys.prefix).iter_records() From 5fc1f8ff6c19702da17af6a5a95eaf1fdc386ca8 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Tue, 2 Apr 2024 09:08:41 -0500 Subject: [PATCH 316/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#5266)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/project.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index b98940e079..94143662d3 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -14,7 +14,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/add-to-project@v0.6.0 + - uses: actions/add-to-project@v1.0.0 with: # issues are added to the Planning project # PRs are added to the Review project From 805d41f7bdea6dd899018e8c523cc738cef99975 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Tue, 2 Apr 2024 20:16:29 -0500 Subject: [PATCH 317/366] More `conda_build.conda_interface` deprecations (#5251) --- conda_build/api.py | 3 +- conda_build/bdist_conda.py | 3 +- conda_build/build.py | 4 +- conda_build/cli/main_skeleton.py | 4 +- conda_build/conda_interface.py | 181 +++++++++++++++++++++----- conda_build/environ.py | 9 +- conda_build/index.py | 3 +- conda_build/os_utils/pyldd.py | 11 +- conda_build/render.py | 10 +- conda_build/skeletons/cpan.py | 3 +- conda_build/skeletons/pypi.py | 2 +- conda_build/source.py | 8 +- conda_build/utils.py | 2 +- news/5251-deprecating-conda_interface | 34 +++++ tests/test_api_build.py | 11 +- 15 files changed, 208 insertions(+), 80 deletions(-) create mode 100644 news/5251-deprecating-conda_interface diff --git a/conda_build/api.py b/conda_build/api.py index f83e235354..2d4e3ef567 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -48,7 +48,8 @@ def render( Returns a list of (metadata, needs_download, needs_reparse in env) tuples""" from collections import OrderedDict - from .conda_interface import NoPackagesFoundError + from conda.exceptions import NoPackagesFoundError + from .exceptions import DependencyNeedsBuildingError from .render import finalize_metadata, render_recipe diff --git a/conda_build/bdist_conda.py b/conda_build/bdist_conda.py index 3f16238d9c..6e965c409d 100644 --- a/conda_build/bdist_conda.py +++ b/conda_build/bdist_conda.py @@ -1,5 +1,6 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +import configparser import sys import time from collections import defaultdict @@ -10,7 +11,7 @@ from . import api from .build import handle_anaconda_upload -from .conda_interface import StringIO, configparser, spec_from_line +from .conda_interface import StringIO, spec_from_line from .config import Config from .deprecations import deprecated from .metadata import MetaData diff --git a/conda_build/build.py b/conda_build/build.py index 1abf843cd9..7310ca5e61 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -25,19 +25,17 @@ from conda import __version__ as conda_version from conda.base.context import context, reset_context from conda.core.prefix_data import PrefixData +from conda.exceptions import CondaError, NoPackagesFoundError, UnsatisfiableError from conda.models.channel import Channel from . import __version__ as conda_build_version from . import environ, noarch_python, source, tarcheck, utils from .conda_interface import ( - CondaError, EntityEncoder, FileMode, MatchSpec, - NoPackagesFoundError, PathType, TemporaryDirectory, - UnsatisfiableError, env_path_backup_var_exists, prefix_placeholder, url_path, diff --git a/conda_build/cli/main_skeleton.py b/conda_build/cli/main_skeleton.py index c33661f986..1a87487e26 100644 --- a/conda_build/cli/main_skeleton.py +++ b/conda_build/cli/main_skeleton.py @@ -2,11 +2,11 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -import importlib import logging import os import pkgutil import sys +from importlib import import_module from typing import TYPE_CHECKING from .. import api @@ -43,7 +43,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: for skeleton in skeletons: if skeleton.startswith("_"): continue - module = importlib.import_module("conda_build.skeletons." + skeleton) + module = import_module("conda_build.skeletons." + skeleton) module.add_parser(repos) return parser, parser.parse_args(args) diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 5e56bdbb89..c6e31b24af 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -2,25 +2,26 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -import configparser # noqa: F401 -import os -from functools import partial -from importlib import import_module # noqa: F401 +import configparser as _configparser +import os as _os +from functools import partial as _partial +from importlib import import_module as _import_module from conda import __version__ -from conda.base.context import context, determine_target_prefix -from conda.base.context import non_x86_machines as non_x86_linux_machines # noqa: F401 +from conda.base.context import context as _context +from conda.base.context import determine_target_prefix as _determine_target_prefix +from conda.base.context import non_x86_machines as _non_x86_linux_machines from conda.base.context import reset_context as _reset_context -from conda.core.package_cache_data import ProgressiveFetchExtract # noqa: F401 -from conda.exceptions import ( # noqa: F401 - CondaError, - CondaHTTPError, - LinkError, - LockError, - NoPackagesFoundError, - PaddingError, - UnsatisfiableError, +from conda.core.package_cache_data import ( + ProgressiveFetchExtract as _ProgressiveFetchExtract, ) +from conda.exceptions import CondaError as _CondaError +from conda.exceptions import CondaHTTPError as _CondaHTTPError +from conda.exceptions import LinkError as _LinkError +from conda.exceptions import LockError as _LockError +from conda.exceptions import NoPackagesFoundError as _NoPackagesFoundError +from conda.exceptions import PaddingError as _PaddingError +from conda.exceptions import UnsatisfiableError as _UnsatisfiableError from conda.exports import ( # noqa: F401 ArgumentParser, Channel, @@ -60,12 +61,124 @@ win_path_to_unix, ) from conda.exports import get_index as _get_index -from conda.gateways.disk.read import compute_sum -from conda.models.channel import get_conda_build_local_url # noqa: F401 +from conda.gateways.disk.read import compute_sum as _compute_sum +from conda.models.channel import get_conda_build_local_url as _get_conda_build_local_url from .deprecations import deprecated -deprecated.constant("24.1.0", "24.5.0", "get_index", _get_index) +deprecated.constant( + "24.5", + "24.7", + "configparser", + _configparser, + addendum="Use `configparser` instead.", +) +deprecated.constant("24.5", "24.7", "os", _os, addendum="Use `os` instead.") +deprecated.constant( + "24.5", + "24.7", + "partial", + _partial, + addendum="Use `functools.partial` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "import_module", + _import_module, + addendum="Use `importlib.import_module` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "context", + _context, + addendum="Use `conda.base.context.context` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "determine_target_prefix", + _determine_target_prefix, + addendum="Use `conda.base.context.determine_target_prefix` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "non_x86_linux_machines", + _non_x86_linux_machines, + addendum="Use `conda.base.context.non_x86_machines` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "ProgressiveFetchExtract", + _ProgressiveFetchExtract, + addendum="Use `conda.core.package_cache_data.ProgressiveFetchExtract` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "CondaError", + _CondaError, + addendum="Use `conda.exceptions.CondaError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "CondaHTTPError", + _CondaHTTPError, + addendum="Use `conda.exceptions.CondaHTTPError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "LinkError", + _LinkError, + addendum="Use `conda.exceptions.LinkError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "LockError", + _LockError, + addendum="Use `conda.exceptions.LockError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "NoPackagesFoundError", + _NoPackagesFoundError, + addendum="Use `conda.exceptions.NoPackagesFoundError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "PaddingError", + _PaddingError, + addendum="Use `conda.exceptions.PaddingError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "UnsatisfiableError", + _UnsatisfiableError, + addendum="Use `conda.exceptions.UnsatisfiableError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "get_conda_build_local_url", + _get_conda_build_local_url, + addendum="Use `conda.models.channel.get_conda_build_local_url` instead.", +) +deprecated.constant( + "24.1.0", + "24.5.0", + "get_index", + _get_index, + addendum="Use `conda.core.index.get_index` instead.", +) deprecated.constant( "24.5", "24.7", @@ -77,63 +190,63 @@ "24.5", "24.7", "binstar_upload", - context.binstar_upload, + _context.binstar_upload, addendum="Use `conda.base.context.context.binstar_upload` instead.", ) deprecated.constant( "24.5", "24.7", "default_python", - context.default_python, + _context.default_python, addendum="Use `conda.base.context.context.default_python` instead.", ) deprecated.constant( "24.5", "24.7", "envs_dirs", - context.envs_dirs, + _context.envs_dirs, addendum="Use `conda.base.context.context.envs_dirs` instead.", ) deprecated.constant( "24.5", "24.7", "pkgs_dirs", - list(context.pkgs_dirs), + list(_context.pkgs_dirs), addendum="Use `conda.base.context.context.pkgs_dirs` instead.", ) deprecated.constant( "24.5", "24.7", "cc_platform", - context.platform, + _context.platform, addendum="Use `conda.base.context.context.platform` instead.", ) deprecated.constant( "24.5", "24.7", "root_dir", - context.root_prefix, + _context.root_prefix, addendum="Use `conda.base.context.context.root_prefix` instead.", ) deprecated.constant( "24.5", "24.7", "root_writable", - context.root_writable, + _context.root_writable, addendum="Use `conda.base.context.context.root_writable` instead.", ) deprecated.constant( "24.5", "24.7", "subdir", - context.subdir, + _context.subdir, addendum="Use `conda.base.context.context.subdir` instead.", ) deprecated.constant( "24.5", "24.7", "create_default_packages", - context.create_default_packages, + _context.create_default_packages, addendum="Use `conda.base.context.context.create_default_packages` instead.", ) @@ -141,17 +254,17 @@ "24.5", "24.7", "get_rc_urls", - lambda: list(context.channels), + lambda: list(_context.channels), addendum="Use `conda.base.context.context.channels` instead.", ) deprecated.constant( "24.5", "24.7", "get_prefix", - partial(determine_target_prefix, context), + _partial(_determine_target_prefix, _context), addendum="Use `conda.base.context.context.target_prefix` instead.", ) -cc_conda_build = context.conda_build if hasattr(context, "conda_build") else {} +cc_conda_build = _context.conda_build if hasattr(_context, "conda_build") else {} deprecated.constant( "24.5", @@ -163,7 +276,7 @@ # When deactivating envs (e.g. switching from root to build/test) this env var is used, # except the PR that removed this has been reverted (for now) and Windows doesn't need it. -env_path_backup_var_exists = os.environ.get("CONDA_PATH_BACKUP", None) +env_path_backup_var_exists = _os.getenv("CONDA_PATH_BACKUP") @deprecated( @@ -179,7 +292,7 @@ def handle_proxy_407(x, y): "24.3", "24.5", "hashsum_file", - compute_sum, + _compute_sum, addendum="Use `conda.gateways.disk.read.compute_sum` instead.", ) @@ -189,8 +302,8 @@ def handle_proxy_407(x, y): "24.5", addendum="Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead.", ) -def md5_file(path: str | os.PathLike) -> str: - return compute_sum(path, "md5") +def md5_file(path: str | _os.PathLike) -> str: + return _compute_sum(path, "md5") deprecated.constant( diff --git a/conda_build/environ.py b/conda_build/environ.py index b3b02ce70b..36f6b78171 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -28,7 +28,7 @@ from conda.common.io import env_vars from conda.core.index import LAST_CHANNEL_URLS from conda.core.link import PrefixSetup, UnlinkLinkTransaction -from conda.core.package_cache_data import PackageCacheData +from conda.core.package_cache_data import PackageCacheData, ProgressiveFetchExtract from conda.core.prefix_data import PrefixData from conda.exceptions import ( CondaError, @@ -42,12 +42,7 @@ from conda.models.match_spec import MatchSpec from . import utils -from .conda_interface import ( - Channel, - PackageRecord, - ProgressiveFetchExtract, - TemporaryDirectory, -) +from .conda_interface import Channel, PackageRecord, TemporaryDirectory from .deprecations import deprecated from .exceptions import BuildLockError, DependencyNeedsBuildingError from .features import feature_list diff --git a/conda_build/index.py b/conda_build/index.py index e4d07a52ff..28a470f5c4 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -10,10 +10,11 @@ from conda.base.context import context from conda.core.index import get_index +from conda.exceptions import CondaHTTPError from conda_index.index import update_index as _update_index from . import utils -from .conda_interface import CondaHTTPError, url_path +from .conda_interface import url_path from .deprecations import deprecated from .utils import ( CONDA_PACKAGE_EXTENSION_V1, diff --git a/conda_build/os_utils/pyldd.py b/conda_build/os_utils/pyldd.py index a3f0d609d0..ceffb1dbc6 100644 --- a/conda_build/os_utils/pyldd.py +++ b/conda_build/os_utils/pyldd.py @@ -9,6 +9,7 @@ import re import struct import sys +from functools import partial from pathlib import Path from ..utils import ensure_list, get_logger, on_linux, on_mac, on_win @@ -1208,8 +1209,6 @@ def main(argv): def main_maybe_test(): if sys.argv[1] == "test": - import functools - tool = sys.argv[2] if tool != "otool" and tool != "ldd": if on_mac: @@ -1228,21 +1227,21 @@ def main_maybe_test(): else: sysroot = "" if tool == "otool": - test_this = functools.partial( + test_this = partial( inspect_linkages, sysroot=sysroot, resolve_filenames=False, recurse=False, ) if on_mac: - test_that = functools.partial(inspect_linkages_otool) + test_that = partial(inspect_linkages_otool) SOEXT = "dylib" elif tool == "ldd": - test_this = functools.partial( + test_this = partial( inspect_linkages, sysroot=sysroot, resolve_filenames=True, recurse=True ) if on_linux: - test_that = functools.partial(inspect_linkages_ldd) + test_that = partial(inspect_linkages_ldd) SOEXT = "so" # Find a load of dylibs or elfs and compare # the output against 'otool -L' or 'ldd' diff --git a/conda_build/render.py b/conda_build/render.py index 90061126cf..be17eaa461 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -27,15 +27,11 @@ import yaml from conda.base.context import context +from conda.core.package_cache_data import ProgressiveFetchExtract +from conda.exceptions import UnsatisfiableError from . import environ, exceptions, source, utils -from .conda_interface import ( - PackageRecord, - ProgressiveFetchExtract, - TemporaryDirectory, - UnsatisfiableError, - specs_from_url, -) +from .conda_interface import PackageRecord, TemporaryDirectory, specs_from_url from .exceptions import DependencyNeedsBuildingError from .index import get_build_index from .metadata import MetaData, combine_top_level_metadata_with_output diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index 8d1d996e1b..507086e4fe 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -20,11 +20,10 @@ import requests from conda.core.index import get_index +from conda.exceptions import CondaError, CondaHTTPError from .. import environ from ..conda_interface import ( - CondaError, - CondaHTTPError, MatchSpec, Resolve, TemporaryDirectory, diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index f39a5e2318..8dc6719f63 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -4,6 +4,7 @@ Tools for converting PyPI packages to conda recipes. """ +import configparser import keyword import logging import os @@ -26,7 +27,6 @@ from ..conda_interface import ( StringIO, - configparser, download, human_bytes, input, diff --git a/conda_build/source.py b/conda_build/source.py index f1cfdb2613..984fb239e8 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -15,14 +15,10 @@ from typing import TYPE_CHECKING from urllib.parse import urljoin +from conda.exceptions import CondaHTTPError from conda.gateways.disk.read import compute_sum -from .conda_interface import ( - CondaHTTPError, - TemporaryDirectory, - download, - url_path, -) +from .conda_interface import TemporaryDirectory, download, url_path from .exceptions import MissingDependency from .os_utils import external from .utils import ( diff --git a/conda_build/utils.py b/conda_build/utils.py index d6498da043..05b0d827ff 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -54,12 +54,12 @@ KNOWN_SUBDIRS, ) from conda.base.context import context +from conda.exceptions import CondaHTTPError from conda.gateways.disk.read import compute_sum from conda.models.channel import Channel from conda.models.match_spec import MatchSpec from .conda_interface import ( - CondaHTTPError, PackageRecord, StringIO, TemporaryDirectory, diff --git a/news/5251-deprecating-conda_interface b/news/5251-deprecating-conda_interface new file mode 100644 index 0000000000..9f5e48d6cd --- /dev/null +++ b/news/5251-deprecating-conda_interface @@ -0,0 +1,34 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Deprecate `conda_build.conda_interface.context` singleton. Use `conda.base.context.context` instead. (#5251) +* Deprecate `conda_build.conda_interface.configparser` module. Use `configparser` instead. (#5251) +* Deprecate `conda_build.conda_interface.os` module. Use `os` instead. (#5251) +* Deprecate `conda_build.conda_interface.partial` function. Use `functools.partial` instead. (#5251) +* Deprecate `conda_build.conda_interface.import_module` function. Use `importlib.import_module` instead. (#5251) +* Deprecate `conda_build.conda_interface.determine_target_prefix` function. Use `conda.base.context.determine_target_prefix` instead. (#5251) +* Deprecate `conda_build.conda_interface.non_x86_linux_machines` constant. Use `conda.base.context.non_x86_machines` instead. (#5251) +* Deprecate `conda_build.conda_interface.ProgressiveFetchExtract` class. Use `conda.core.package_cache.ProgressiveFetchExtract` instead. (#5251) +* Deprecate `conda_build.conda_interface.CondaError` class. Use `conda.exceptions.CondaError` instead. (#5251) +* Deprecate `conda_build.conda_interface.CondaHTTPError` class. Use `conda.exceptions.CondaHTTPError` instead. (#5251) +* Deprecate `conda_build.conda_interface.LinkError` class. Use `conda.exceptions.LinkError` instead. (#5251) +* Deprecate `conda_build.conda_interface.LockError` class. Use `conda.exceptions.LockError` instead. (#5251) +* Deprecate `conda_build.conda_interface.NoPackagesFoundError` class. Use `conda.exceptions.NoPackagesFoundError` instead. (#5251) +* Deprecate `conda_build.conda_interface.PaddingError` class. Use `conda.exceptions.PaddingError` instead. (#5251) +* Deprecate `conda_build.conda_interface.UnsatisfiableError` class. Use `conda.exceptions.UnsatisfiableError` instead. (#5251) +* Deprecate `conda_build.conda_interface.get_conda_build_local_url` class. Use `conda.models.channel.get_conda_build_local_url` instead. (#5251) + +### Docs + +* + +### Other + +* diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 7bedf3e215..5932bf4f1a 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -27,18 +27,13 @@ import yaml from binstar_client.commands import remove, show from binstar_client.errors import NotFound -from conda.base.context import reset_context +from conda.base.context import context, reset_context from conda.common.compat import on_linux, on_mac, on_win -from conda.exceptions import ClobberError, CondaMultiError +from conda.exceptions import ClobberError, CondaError, CondaMultiError, LinkError from conda_index.api import update_index from conda_build import __version__, api, exceptions -from conda_build.conda_interface import ( - CondaError, - LinkError, - context, - url_path, -) +from conda_build.conda_interface import url_path from conda_build.config import Config from conda_build.exceptions import ( CondaBuildException, From 19997b923201150d87c07e3cc2f28e6a53929c87 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 3 Apr 2024 07:18:13 -0500 Subject: [PATCH 318/366] Add `test_guess_interpreter` (#5265) --- conda_build/build.py | 71 ++++++++++++++++++++++---------------------- tests/test_build.py | 42 ++++++++++++++++++++++++++ 2 files changed, 78 insertions(+), 35 deletions(-) diff --git a/conda_build/build.py b/conda_build/build.py index 7310ca5e61..d0c939d9e8 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -4,6 +4,8 @@ Module that does most of the heavy lifting for the ``conda build`` command. """ +from __future__ import annotations + import fnmatch import json import os @@ -18,6 +20,7 @@ import warnings from collections import OrderedDict, deque from os.path import dirname, isdir, isfile, islink, join +from pathlib import Path import conda_package_handling.api import yaml @@ -1844,20 +1847,15 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): interpreter = output.get("script_interpreter") if not interpreter: - interpreter_and_args = guess_interpreter(output["script"]) - interpreter_and_args[0] = external.find_executable( - interpreter_and_args[0], metadata.config.build_prefix - ) - if not interpreter_and_args[0]: + args = list(guess_interpreter(output["script"])) + args[0] = external.find_executable(args[0], metadata.config.build_prefix) + if not args[0]: log.error( - "Did not find an interpreter to run {}, looked for {}".format( - output["script"], interpreter_and_args[0] - ) + "Did not find an interpreter to run %s, looked for %s", + output["script"], + args[0], ) - if ( - "system32" in interpreter_and_args[0] - and "bash" in interpreter_and_args[0] - ): + if "system32" in args[0] and "bash" in args[0]: print( "ERROR :: WSL bash.exe detected, this will not work (PRs welcome!). Please\n" " use MSYS2 packages. Add `m2-base` and more (depending on what your" @@ -1865,7 +1863,7 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): ) sys.exit(1) else: - interpreter_and_args = interpreter.split(" ") + args = interpreter.split(" ") initial_files = utils.prefix_files(metadata.config.host_prefix) env_output = env.copy() @@ -1901,7 +1899,7 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): bundle_stats = {} utils.check_call_env( - interpreter_and_args + [dest_file], + [*args, dest_file], cwd=metadata.config.work_dir, env=env_output, stats=bundle_stats, @@ -2108,11 +2106,11 @@ def bundle_wheel(output, metadata: MetaData, env, stats): env["TOP_PKG_VERSION"] = env["PKG_VERSION"] env["PKG_VERSION"] = metadata.version() env["PKG_NAME"] = metadata.name() - interpreter_and_args = guess_interpreter(dest_file) + args = guess_interpreter(dest_file) bundle_stats = {} utils.check_call_env( - interpreter_and_args + [dest_file], + [*args, dest_file], cwd=metadata.config.work_dir, env=env, stats=bundle_stats, @@ -2892,28 +2890,31 @@ def build( return new_pkgs -def guess_interpreter(script_filename): - # -l is needed for MSYS2 as the login scripts set some env. vars (TMP, TEMP) - # Since the MSYS2 installation is probably a set of conda packages we do not - # need to worry about system environmental pollution here. For that reason I - # do not pass -l on other OSes. - extensions_to_run_commands = { - ".sh": ["bash.exe", "-el"] if utils.on_win else ["bash", "-e"], - ".bat": [os.environ.get("COMSPEC", "cmd.exe"), "/d", "/c"], - ".ps1": ["powershell", "-executionpolicy", "bypass", "-File"], - ".py": ["python"], - } - file_ext = os.path.splitext(script_filename)[1] - for ext, command in extensions_to_run_commands.items(): - if file_ext.lower().startswith(ext): - interpreter_command = command - break - else: +# -l is needed for MSYS2 as the login scripts set some env. vars (TMP, TEMP) +# Since the MSYS2 installation is probably a set of conda packages we do not +# need to worry about system environmental pollution here. For that reason I +# do not pass -l on other OSes. +INTERPRETER_BASH = ("bash.exe", "-el") if on_win else ("bash", "-e") +INTERPRETER_BAT = (os.getenv("COMSPEC", "cmd.exe"), "/d", "/c") +INTERPRETER_POWERSHELL = ("powershell", "-ExecutionPolicy", "ByPass", "-File") +INTERPRETER_PYTHON = ("python",) + + +def guess_interpreter(script_filename: str | os.PathLike | Path) -> tuple[str, ...]: + suffix = Path(script_filename).suffix + try: + return { + ".sh": INTERPRETER_BASH, + ".bat": INTERPRETER_BAT, + ".ps1": INTERPRETER_POWERSHELL, + ".py": INTERPRETER_PYTHON, + }[suffix] + except KeyError: + # KeyError: unknown suffix raise NotImplementedError( - f"Don't know how to run {file_ext} file. Please specify " + f"Don't know how to run {suffix} file. Please specify " f"script_interpreter for {script_filename} output" ) - return interpreter_command def warn_on_use_of_SRC_DIR(metadata): diff --git a/tests/test_build.py b/tests/test_build.py index 16bffa648f..eca9441af8 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -5,11 +5,15 @@ and is more unit-test oriented. """ +from __future__ import annotations + import json import os import sys +from contextlib import nullcontext from pathlib import Path +import pytest from conda.common.compat import on_win from conda_build import api, build @@ -302,3 +306,41 @@ def test_rewrite_output(testing_config, capsys): assert "LIBDIR=$PREFIX/lib" in stdout assert "PWD=$SRC_DIR" in stdout assert "BUILD_PREFIX=$BUILD_PREFIX" in stdout + + +@pytest.mark.parametrize( + "script,error,interpreter", + [ + # known interpreter + ("foo.sh", None, build.INTERPRETER_BASH), + ("foo.bat", None, build.INTERPRETER_BAT), + ("foo.ps1", None, build.INTERPRETER_POWERSHELL), + ("foo.py", None, build.INTERPRETER_PYTHON), + ("foo.bar.sh", None, build.INTERPRETER_BASH), + ("foo.bar.bat", None, build.INTERPRETER_BAT), + ("foo.bar.ps1", None, build.INTERPRETER_POWERSHELL), + ("foo.bar.py", None, build.INTERPRETER_PYTHON), + # unknown interpreter + ("foo", NotImplementedError, None), + ("foo.unknown", NotImplementedError, None), + ("foo.zsh", NotImplementedError, None), + ("foo.csh", NotImplementedError, None), + ("foo.exe", NotImplementedError, None), + ("foo.exe", NotImplementedError, None), + ("foo.sh.other", NotImplementedError, None), + ("foo.bat.other", NotImplementedError, None), + ("foo.ps1.other", NotImplementedError, None), + ("foo.py.other", NotImplementedError, None), + ("foo.sh_what", NotImplementedError, None), + ("foo.bat_what", NotImplementedError, None), + ("foo.ps1_what", NotImplementedError, None), + ("foo.py_what", NotImplementedError, None), + ], +) +def test_guess_interpreter( + script: str, + error: type[Exception] | None, + interpreter: list[str], +): + with pytest.raises(error) if error else nullcontext(): + assert build.guess_interpreter(script) == interpreter From 07c363d5c5586ee0074972c10950ec2d7adc2f5d Mon Sep 17 00:00:00 2001 From: jaimergp Date: Thu, 4 Apr 2024 18:18:14 +0200 Subject: [PATCH 319/366] Add devcontainer configuration (#5261) --- .devcontainer/apt-deps.txt | 5 +++++ .devcontainer/devcontainer.json | 38 +++++++++++++++++++++++++++++++++ .devcontainer/post_create.sh | 27 +++++++++++++++++++++++ .devcontainer/post_start.sh | 23 ++++++++++++++++++++ 4 files changed, 93 insertions(+) create mode 100644 .devcontainer/apt-deps.txt create mode 100644 .devcontainer/devcontainer.json create mode 100644 .devcontainer/post_create.sh create mode 100644 .devcontainer/post_start.sh diff --git a/.devcontainer/apt-deps.txt b/.devcontainer/apt-deps.txt new file mode 100644 index 0000000000..130c90c515 --- /dev/null +++ b/.devcontainer/apt-deps.txt @@ -0,0 +1,5 @@ +git +less +htop +nano +ssh diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000000..9f5533a660 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,38 @@ +// For format details, see https://aka.ms/devcontainer.json +{ + "name": "Miniconda (default-channel=defaults)", + "image": "continuumio/miniconda3:latest", + + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], + // Use 'postCreateCommand' to run commands after the container is created. + "postCreateCommand": "bash /workspaces/conda-build/.devcontainer/post_create.sh", + // Use 'postStartCommand' to run commands after the container is started. + "postStartCommand": "bash /workspaces/conda-build/.devcontainer/post_start.sh", + + // Configure tool-specific properties. + "customizations": { + "vscode": { + "settings": { + "python.defaultInterpreterPath": "/opt/conda/bin/python", + "python.testing.pytestArgs": [ + "tests" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true + }, + "extensions": [ + "charliermarsh.ruff", + "eamodio.gitlens", + "ms-toolsai.jupyter" + ] + } + } + + // Adjust to connect as non-root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root", + +} diff --git a/.devcontainer/post_create.sh b/.devcontainer/post_create.sh new file mode 100644 index 0000000000..73ea60380c --- /dev/null +++ b/.devcontainer/post_create.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +set -euo pipefail + +BASE_CONDA=${BASE_CONDA:-/opt/conda} +SRC_CONDA_BUILD=${SRC_CONDA_BUILD:-/workspaces/conda-build} + +if which apt-get > /dev/null; then + HERE=$(dirname $0) + echo "Installing system dependencies" + apt-get update + DEBIAN_FRONTEND=noninteractive xargs -a "$HERE/apt-deps.txt" apt-get install -y +fi + +# Clear history to avoid unneeded conflicts +echo "Clearing base history..." +echo '' > "$BASE_CONDA/conda-meta/history" + +echo "Installing dev dependencies" +"$BASE_CONDA/bin/conda" install \ + -n base \ + --yes \ + --quiet \ + --file "$SRC_CONDA_BUILD/tests/requirements.txt" \ + --file "$SRC_CONDA_BUILD/tests/requirements-Linux.txt" \ + --file "$SRC_CONDA_BUILD/tests/requirements-ci.txt" \ + "conda>=23.5.0" diff --git a/.devcontainer/post_start.sh b/.devcontainer/post_start.sh new file mode 100644 index 0000000000..804a44a84f --- /dev/null +++ b/.devcontainer/post_start.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +# This script assumes we are running in a Miniconda container where: +# - /opt/conda is the Miniconda or Miniforge installation directory +# - https://github.com/conda/conda is mounted at /workspaces/conda +# - https://github.com/conda/conda-libmamba-solver is mounted at +# /workspaces/conda-libmamba-solver +# - https://github.com/mamba-org/mamba is (optionally) mounted at +# /workspaces/mamba + +set -euo pipefail + +BASE_CONDA=${BASE_CONDA:-/opt/conda} +SRC_CONDA_BUILD=${SRC_CONDA_BUILD:-/workspaces/conda-build} + +echo "Installing conda-build in dev mode..." +"$BASE_CONDA/bin/python" -m pip install -e "$SRC_CONDA_BUILD" --no-deps + +set -x +conda list -p "$BASE_CONDA" +conda info +conda config --show-sources +set +x From 0a0986b27f4023c63679b353e9450324a2f2fe72 Mon Sep 17 00:00:00 2001 From: jaimergp Date: Thu, 4 Apr 2024 18:25:25 +0200 Subject: [PATCH 320/366] Change hatch-vcs `version-file` to support cleaner pip editable installs (#5262) --- .gitignore | 3 +++ conda_build/__init__.py | 15 ++++++++++++++- conda_build/__version__.py | 17 ----------------- pyproject.toml | 2 +- 4 files changed, 18 insertions(+), 19 deletions(-) delete mode 100644 conda_build/__version__.py diff --git a/.gitignore b/.gitignore index af26fb7888..0e1abb2d0b 100644 --- a/.gitignore +++ b/.gitignore @@ -24,3 +24,6 @@ conda-build-testing rever/ **/.vscode **/_build + +# setuptools-scm +conda_build/_version.py diff --git a/conda_build/__init__.py b/conda_build/__init__.py index 6b43ca6180..23802d5135 100644 --- a/conda_build/__init__.py +++ b/conda_build/__init__.py @@ -1,7 +1,20 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from .__version__ import __version__ +try: + from ._version import __version__ +except ImportError: + # _version.py is only created after running `pip install` + try: + from setuptools_scm import get_version + + __version__ = get_version(root="..", relative_to=__file__) + except (ImportError, OSError): + # ImportError: setuptools_scm isn't installed + # OSError: git isn't installed + # Conda-build abides by CEP-8 which specifies using CalVer, so the dev version is: + # YY.MM.MICRO.devN+gHASH[.dirty] + __version__ = "0.0.0.dev0+placeholder" __all__ = ["__version__"] diff --git a/conda_build/__version__.py b/conda_build/__version__.py deleted file mode 100644 index e664582d94..0000000000 --- a/conda_build/__version__.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -"""Placeholder for the actual version code injected by hatch-vcs. - -The logic here is used during development installs only so keep it simple. -Conda-build abides by CEP-8 which specifies using CalVer, so the dev version is: - YY.MM.MICRO.devN+gHASH[.dirty] -""" - -try: - from setuptools_scm import get_version - - __version__ = get_version(root="..", relative_to=__file__) -except (ImportError, OSError): - # ImportError: setuptools_scm isn't installed - # OSError: git isn't installed - __version__ = "0.0.0.dev0+placeholder" diff --git a/pyproject.toml b/pyproject.toml index 71210db6f1..0360aaad4b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,7 +80,7 @@ local_scheme = "dirty-tag" include = ["conda_build", "conda_build/templates/*", "conda_build/cli-*.exe"] [tool.hatch.build.hooks.vcs] -version-file = "conda_build/__version__.py" +version-file = "conda_build/_version.py" [tool.coverage.run] # store relative paths in coverage information From 6d118c25dc8931618ec77131620f662c7bd1408c Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 5 Apr 2024 08:39:08 -0500 Subject: [PATCH 321/366] Catch LookupError (#5269) --- conda_build/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/conda_build/__init__.py b/conda_build/__init__.py index 23802d5135..f110bb23b5 100644 --- a/conda_build/__init__.py +++ b/conda_build/__init__.py @@ -9,9 +9,10 @@ from setuptools_scm import get_version __version__ = get_version(root="..", relative_to=__file__) - except (ImportError, OSError): + except (ImportError, OSError, LookupError): # ImportError: setuptools_scm isn't installed # OSError: git isn't installed + # LookupError: setuptools_scm unable to detect version # Conda-build abides by CEP-8 which specifies using CalVer, so the dev version is: # YY.MM.MICRO.devN+gHASH[.dirty] __version__ = "0.0.0.dev0+placeholder" From 6982cbdfe73bf9a190be75337755e90a90e11df5 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 5 Apr 2024 13:22:45 -0500 Subject: [PATCH 322/366] Sync with `conda.deprecations` (#5270) * Sync with conda.deprecations * Avoid RuntimeError in module detection * Update test_deprecations.py --- conda_build/deprecations.py | 140 +++++++------ tests/test_deprecations.py | 396 +++++++++++++++--------------------- 2 files changed, 247 insertions(+), 289 deletions(-) diff --git a/conda_build/deprecations.py b/conda_build/deprecations.py index 494f0f85f1..f691b5192d 100644 --- a/conda_build/deprecations.py +++ b/conda_build/deprecations.py @@ -6,16 +6,22 @@ import sys import warnings +from argparse import Action from functools import wraps from types import ModuleType from typing import TYPE_CHECKING if TYPE_CHECKING: - from argparse import Action - from typing import Any, Callable + from argparse import ArgumentParser, Namespace + from typing import Any, Callable, ParamSpec, Self, TypeVar from packaging.version import Version + T = TypeVar("T") + P = ParamSpec("P") + + ActionType = TypeVar("ActionType", bound=type[Action]) + from . import __version__ @@ -30,7 +36,7 @@ class DeprecationHandler: _version_tuple: tuple[int, ...] | None _version_object: Version | None - def __init__(self, version: str): + def __init__(self: Self, version: str) -> None: """Factory to create a deprecation handle for the specified version. :param version: The version to compare against when checking deprecation statuses. @@ -52,14 +58,13 @@ def _get_version_tuple(version: str) -> tuple[int, ...] | None: except (AttributeError, ValueError): return None - def _version_less_than(self, version: str) -> bool: + def _version_less_than(self: Self, version: str) -> bool: """Test whether own version is less than the given version. :param version: Version string to compare against. """ - if self._version_tuple: - if version_tuple := self._get_version_tuple(version): - return self._version_tuple < version_tuple + if self._version_tuple and (version_tuple := self._get_version_tuple(version)): + return self._version_tuple < version_tuple # If self._version or version could not be represented by a simple # tuple[int, ...], do a more elaborate version parsing and comparison. @@ -68,19 +73,20 @@ def _version_less_than(self, version: str) -> bool: if self._version_object is None: try: - self._version_object = parse(self._version) + self._version_object = parse(self._version) # type: ignore[arg-type] except TypeError: + # TypeError: self._version could not be parsed self._version_object = parse("0.0.0.dev0+placeholder") return self._version_object < parse(version) def __call__( - self, + self: Self, deprecate_in: str, remove_in: str, *, addendum: str | None = None, stack: int = 0, - ) -> Callable[[Callable], Callable]: + ) -> Callable[[Callable[P, T]], Callable[P, T]]: """Deprecation decorator for functions, methods, & classes. :param deprecate_in: Version in which code will be marked as deprecated. @@ -89,12 +95,12 @@ def __call__( :param stack: Optional stacklevel increment. """ - def deprecated_decorator(func: Callable) -> Callable: + def deprecated_decorator(func: Callable[P, T]) -> Callable[P, T]: # detect function name and generate message category, message = self._generate_message( - deprecate_in, - remove_in, - f"{func.__module__}.{func.__qualname__}", + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=f"{func.__module__}.{func.__qualname__}", addendum=addendum, ) @@ -104,7 +110,7 @@ def deprecated_decorator(func: Callable) -> Callable: # alert user that it's time to remove something @wraps(func) - def inner(*args, **kwargs): + def inner(*args: P.args, **kwargs: P.kwargs) -> T: warnings.warn(message, category, stacklevel=2 + stack) return func(*args, **kwargs) @@ -114,7 +120,7 @@ def inner(*args, **kwargs): return deprecated_decorator def argument( - self, + self: Self, deprecate_in: str, remove_in: str, argument: str, @@ -122,7 +128,7 @@ def argument( rename: str | None = None, addendum: str | None = None, stack: int = 0, - ) -> Callable[[Callable], Callable]: + ) -> Callable[[Callable[P, T]], Callable[P, T]]: """Deprecation decorator for keyword arguments. :param deprecate_in: Version in which code will be marked as deprecated. @@ -133,16 +139,16 @@ def argument( :param stack: Optional stacklevel increment. """ - def deprecated_decorator(func: Callable) -> Callable: + def deprecated_decorator(func: Callable[P, T]) -> Callable[P, T]: # detect function name and generate message category, message = self._generate_message( - deprecate_in, - remove_in, - f"{func.__module__}.{func.__qualname__}({argument})", + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=f"{func.__module__}.{func.__qualname__}({argument})", # provide a default addendum if renaming and no addendum is provided - addendum=f"Use '{rename}' instead." - if rename and not addendum - else addendum, + addendum=( + f"Use '{rename}' instead." if rename and not addendum else addendum + ), ) # alert developer that it's time to remove something @@ -151,7 +157,7 @@ def deprecated_decorator(func: Callable) -> Callable: # alert user that it's time to remove something @wraps(func) - def inner(*args, **kwargs): + def inner(*args: P.args, **kwargs: P.kwargs) -> T: # only warn about argument deprecations if the argument is used if argument in kwargs: warnings.warn(message, category, stacklevel=2 + stack) @@ -168,22 +174,27 @@ def inner(*args, **kwargs): return deprecated_decorator def action( - self, + self: Self, deprecate_in: str, remove_in: str, - action: type[Action], + action: ActionType, *, addendum: str | None = None, stack: int = 0, - ): - class DeprecationMixin: - def __init__(inner_self, *args, **kwargs): + ) -> ActionType: + """Wraps any argparse.Action to issue a deprecation warning.""" + + class DeprecationMixin(Action): + category: type[Warning] + help: str # override argparse.Action's help type annotation + + def __init__(inner_self: Self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) category, message = self._generate_message( - deprecate_in, - remove_in, - ( + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=( # option_string are ordered shortest to longest, # use the longest as it's the most descriptive f"`{inner_self.option_strings[-1]}`" @@ -192,6 +203,7 @@ def __init__(inner_self, *args, **kwargs): else f"`{inner_self.dest}`" ), addendum=addendum, + deprecation_type=FutureWarning, ) # alert developer that it's time to remove something @@ -201,18 +213,26 @@ def __init__(inner_self, *args, **kwargs): inner_self.category = category inner_self.help = message - def __call__(inner_self, parser, namespace, values, option_string=None): + def __call__( + inner_self: Self, + parser: ArgumentParser, + namespace: Namespace, + values: Any, + option_string: str | None = None, + ) -> None: # alert user that it's time to remove something warnings.warn( - inner_self.help, inner_self.category, stacklevel=7 + stack + inner_self.help, + inner_self.category, + stacklevel=7 + stack, ) super().__call__(parser, namespace, values, option_string) - return type(action.__name__, (DeprecationMixin, action), {}) + return type(action.__name__, (DeprecationMixin, action), {}) # type: ignore[return-value] def module( - self, + self: Self, deprecate_in: str, remove_in: str, *, @@ -235,7 +255,7 @@ def module( ) def constant( - self, + self: Self, deprecate_in: str, remove_in: str, constant: str, @@ -257,10 +277,10 @@ def constant( module, fullname = self._get_module(stack) # detect function name and generate message category, message = self._generate_message( - deprecate_in, - remove_in, - f"{fullname}.{constant}", - addendum, + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=f"{fullname}.{constant}", + addendum=addendum, ) # alert developer that it's time to remove something @@ -280,10 +300,10 @@ def __getattr__(name: str) -> Any: raise AttributeError(f"module '{fullname}' has no attribute '{name}'") - module.__getattr__ = __getattr__ + module.__getattr__ = __getattr__ # type: ignore[method-assign] def topic( - self, + self: Self, deprecate_in: str, remove_in: str, *, @@ -301,10 +321,10 @@ def topic( """ # detect function name and generate message category, message = self._generate_message( - deprecate_in, - remove_in, - topic, - addendum, + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=topic, + addendum=addendum, ) # alert developer that it's time to remove something @@ -314,7 +334,7 @@ def topic( # alert user that it's time to remove something warnings.warn(message, category, stacklevel=2 + stack) - def _get_module(self, stack: int) -> tuple[ModuleType, str]: + def _get_module(self: Self, stack: int) -> tuple[ModuleType, str]: """Detect the module from which we are being called. :param stack: The stacklevel increment. @@ -333,13 +353,15 @@ def _get_module(self, stack: int) -> tuple[ModuleType, str]: # AttributeError: frame.f_code.co_filename is undefined pass else: - for module in sys.modules.values(): - if not isinstance(module, ModuleType): + # use a copy of sys.modules to avoid RuntimeError during iteration + # see https://github.com/conda/conda/issues/13754 + for loaded in tuple(sys.modules.values()): + if not isinstance(loaded, ModuleType): continue - if not hasattr(module, "__file__"): + if not hasattr(loaded, "__file__"): continue - if module.__file__ == filename: - return (module, module.__name__) + if loaded.__file__ == filename: + return (loaded, loaded.__name__) # If above failed, do an expensive import and costly getmodule call. import inspect @@ -351,18 +373,22 @@ def _get_module(self, stack: int) -> tuple[ModuleType, str]: raise DeprecatedError("unable to determine the calling module") def _generate_message( - self, + self: Self, deprecate_in: str, remove_in: str, prefix: str, addendum: str | None, + *, + deprecation_type: type[Warning] = DeprecationWarning, ) -> tuple[type[Warning] | None, str]: - """Deprecation decorator for functions, methods, & classes. + """Generate the standardized deprecation message and determine whether the + deprecation is pending, active, or past. :param deprecate_in: Version in which code will be marked as deprecated. :param remove_in: Version in which code is expected to be removed. :param prefix: The message prefix, usually the function name. :param addendum: Additional messaging. Useful to indicate what to do instead. + :param deprecation_type: The warning type to use for active deprecations. :return: The warning category (if applicable) and the message. """ category: type[Warning] | None @@ -370,7 +396,7 @@ def _generate_message( category = PendingDeprecationWarning warning = f"is pending deprecation and will be removed in {remove_in}." elif self._version_less_than(remove_in): - category = DeprecationWarning + category = deprecation_type warning = f"is deprecated and will be removed in {remove_in}." else: category = None diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py index a4ff2d1ea7..35383913fb 100644 --- a/tests/test_deprecations.py +++ b/tests/test_deprecations.py @@ -1,268 +1,200 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import sys from argparse import ArgumentParser, _StoreTrueAction +from contextlib import nullcontext +from typing import TYPE_CHECKING import pytest from conda_build.deprecations import DeprecatedError, DeprecationHandler - -@pytest.fixture(scope="module") -def deprecated_v1() -> DeprecationHandler: - """Fixture mocking the conda_build.deprecations.deprecated object with `version=1.0`.""" - return DeprecationHandler("1.0") - - -@pytest.fixture(scope="module") -def deprecated_v2() -> DeprecationHandler: - """Fixture mocking the conda_build.deprecations.deprecated object with `version=2.0`.""" - return DeprecationHandler("2.0") - - -@pytest.fixture(scope="module") -def deprecated_v3() -> DeprecationHandler: - """Fixture mocking the conda_build.deprecations.deprecated object with `version=3.0`.""" - return DeprecationHandler("3.0") - - -def test_function_pending(deprecated_v1: DeprecationHandler): - """Calling a pending deprecation function displays associated warning.""" - - @deprecated_v1("2.0", "3.0") - def foo(): - return True - - with pytest.deprecated_call(match="pending deprecation"): - assert foo() - - -def test_function_deprecated(deprecated_v2: DeprecationHandler): - """Calling a deprecated function displays associated warning.""" - - @deprecated_v2("2.0", "3.0") - def foo(): - return True - - with pytest.deprecated_call(match="deprecated"): - assert foo() - - -def test_function_remove(deprecated_v3: DeprecationHandler): - """A function existing past its removal version raises an error.""" - with pytest.raises(DeprecatedError): - - @deprecated_v3("2.0", "3.0") +if TYPE_CHECKING: + from packaging.version import Version + + from conda_build.deprecations import DevDeprecationType, UserDeprecationType + +PENDING = pytest.param( + DeprecationHandler("1.0"), # deprecated + PendingDeprecationWarning, # warning + "pending deprecation", # message + id="pending", +) +FUTURE = pytest.param( + DeprecationHandler("2.0"), # deprecated + FutureWarning, # warning + "deprecated", # message + id="future", +) +DEPRECATED = pytest.param( + DeprecationHandler("2.0"), # deprecated + DeprecationWarning, # warning + "deprecated", # message + id="deprecated", +) +REMOVE = pytest.param( + DeprecationHandler("3.0"), # deprecated + None, # warning + None, # message + id="remove", +) + +parametrize_user = pytest.mark.parametrize( + "deprecated,warning,message", + [PENDING, FUTURE, REMOVE], +) +parametrize_dev = pytest.mark.parametrize( + "deprecated,warning,message", + [PENDING, DEPRECATED, REMOVE], +) + + +@parametrize_dev +def test_function( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Calling a deprecated function displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): + + @deprecated("2.0", "3.0") def foo(): return True - -def test_method_pending(deprecated_v1: DeprecationHandler): - """Calling a pending deprecation method displays associated warning.""" - - class Bar: - @deprecated_v1("2.0", "3.0") - def foo(self): - return True - - with pytest.deprecated_call(match="pending deprecation"): - assert Bar().foo() + with pytest.warns(warning, match=message): + assert foo() -def test_method_deprecated(deprecated_v2: DeprecationHandler): - """Calling a deprecated method displays associated warning.""" - - class Bar: - @deprecated_v2("2.0", "3.0") - def foo(self): - return True - - with pytest.deprecated_call(match="deprecated"): - assert Bar().foo() - - -def test_method_remove(deprecated_v3: DeprecationHandler): - """A method existing past its removal version raises an error.""" - with pytest.raises(DeprecatedError): +@parametrize_dev +def test_method( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Calling a deprecated method displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): class Bar: - @deprecated_v3("2.0", "3.0") + @deprecated("2.0", "3.0") def foo(self): return True + with pytest.warns(warning, match=message): + assert Bar().foo() -def test_class_pending(deprecated_v1: DeprecationHandler): - """Calling a pending deprecation class displays associated warning.""" - - @deprecated_v1("2.0", "3.0") - class Foo: - pass - with pytest.deprecated_call(match="pending deprecation"): - assert Foo() +@parametrize_dev +def test_class( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Calling a deprecated class displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): - -def test_class_deprecated(deprecated_v2: DeprecationHandler): - """Calling a deprecated class displays associated warning.""" - - @deprecated_v2("2.0", "3.0") - class Foo: - pass - - with pytest.deprecated_call(match="deprecated"): - assert Foo() - - -def test_class_remove(deprecated_v3: DeprecationHandler): - """A class existing past its removal version raises an error.""" - with pytest.raises(DeprecatedError): - - @deprecated_v3("2.0", "3.0") + @deprecated("2.0", "3.0") class Foo: pass + with pytest.warns(warning, match=message): + assert Foo() -def test_arguments_pending(deprecated_v1: DeprecationHandler): - """Calling a pending deprecation argument displays associated warning.""" - - @deprecated_v1.argument("2.0", "3.0", "three") - def foo(one, two): - return True - - # too many arguments, can only deprecate keyword arguments - with pytest.raises(TypeError): - assert foo(1, 2, 3) - - # alerting user to pending deprecation - with pytest.deprecated_call(match="pending deprecation"): - assert foo(1, 2, three=3) - # normal usage not needing deprecation - assert foo(1, 2) +@parametrize_dev +def test_arguments( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Calling a deprecated argument displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): - -def test_arguments_deprecated(deprecated_v2: DeprecationHandler): - """Calling a deprecated argument displays associated warning.""" - - @deprecated_v2.argument("2.0", "3.0", "three") - def foo(one, two): - return True - - # too many arguments, can only deprecate keyword arguments - with pytest.raises(TypeError): - assert foo(1, 2, 3) - - # alerting user to pending deprecation - with pytest.deprecated_call(match="deprecated"): - assert foo(1, 2, three=3) - - # normal usage not needing deprecation - assert foo(1, 2) - - -def test_arguments_remove(deprecated_v3: DeprecationHandler): - """An argument existing past its removal version raises an error.""" - with pytest.raises(DeprecatedError): - - @deprecated_v3.argument("2.0", "3.0", "three") + @deprecated.argument("2.0", "3.0", "three") def foo(one, two): return True - -def test_action_pending(deprecated_v1: DeprecationHandler): - """Calling a pending deprecation argparse.Action displays associated warning.""" - parser = ArgumentParser() - parser.add_argument( - "--foo", action=deprecated_v1.action("2.0", "3.0", _StoreTrueAction) - ) - - with pytest.deprecated_call(match="pending deprecation"): - parser.parse_args(["--foo"]) - - -def test_action_deprecated(deprecated_v2: DeprecationHandler): - """Calling a deprecated argparse.Action displays associated warning.""" - parser = ArgumentParser() - parser.add_argument( - "--foo", action=deprecated_v2.action("2.0", "3.0", _StoreTrueAction) - ) - - with pytest.deprecated_call(match="deprecated"): - parser.parse_args(["--foo"]) - - -def test_action_remove(deprecated_v3: DeprecationHandler): - """An argparse.Action existing past its removal version raises an error.""" - with pytest.raises(DeprecatedError): - ArgumentParser().add_argument( - "--foo", action=deprecated_v3.action("2.0", "3.0", _StoreTrueAction) + # too many arguments, can only deprecate keyword arguments + with pytest.raises(TypeError): + assert foo(1, 2, 3) + + # alerting user to pending deprecation + with pytest.warns(warning, match=message): + assert foo(1, 2, three=3) + + # normal usage not needing deprecation + assert foo(1, 2) + + +@parametrize_user +def test_action( + deprecated: DeprecationHandler, + warning: UserDeprecationType | None, + message: str | None, +) -> None: + """Calling a deprecated argparse.Action displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): + parser = ArgumentParser() + parser.add_argument( + "--foo", + action=deprecated.action("2.0", "3.0", _StoreTrueAction), ) - -def test_module_pending(deprecated_v1: DeprecationHandler): - """Importing a pending deprecation module displays associated warning.""" - with pytest.deprecated_call(match="pending deprecation"): - deprecated_v1.module("2.0", "3.0") - - -def test_module_deprecated(deprecated_v2: DeprecationHandler): - """Importing a deprecated module displays associated warning.""" - with pytest.deprecated_call(match="deprecated"): - deprecated_v2.module("2.0", "3.0") - - -def test_module_remove(deprecated_v3: DeprecationHandler): - """A module existing past its removal version raises an error.""" - with pytest.raises(DeprecatedError): - deprecated_v3.module("2.0", "3.0") - - -def test_constant_pending(deprecated_v1: DeprecationHandler): - """Using a pending deprecation constant displays associated warning.""" - deprecated_v1.constant("2.0", "3.0", "SOME_CONSTANT", 42) - module = sys.modules[__name__] - - with pytest.deprecated_call(match="pending deprecation"): - module.SOME_CONSTANT - - -def test_constant_deprecated(deprecated_v2: DeprecationHandler): - """Using a deprecated constant displays associated warning.""" - deprecated_v2.constant("2.0", "3.0", "SOME_CONSTANT", 42) - module = sys.modules[__name__] - - with pytest.deprecated_call(match="deprecated"): - module.SOME_CONSTANT - - -def test_constant_remove(deprecated_v3: DeprecationHandler): - """A constant existing past its removal version raises an error.""" - with pytest.raises(DeprecatedError): - deprecated_v3.constant("2.0", "3.0", "SOME_CONSTANT", 42) - - -def test_topic_pending(deprecated_v1: DeprecationHandler): - """Reaching a pending deprecation topic displays associated warning.""" - with pytest.deprecated_call(match="pending deprecation"): - deprecated_v1.topic("2.0", "3.0", topic="Some special topic") - - -def test_topic_deprecated(deprecated_v2: DeprecationHandler): - """Reaching a deprecated topic displays associated warning.""" - with pytest.deprecated_call(match="deprecated"): - deprecated_v2.topic("2.0", "3.0", topic="Some special topic") - - -def test_topic_remove(deprecated_v3: DeprecationHandler): - """A topic reached past its removal version raises an error.""" - with pytest.raises(DeprecatedError): - deprecated_v3.topic("2.0", "3.0", topic="Some special topic") - - -def test_version_fallback(): - """Test that conda_build can run even if deprecations can't parse the version.""" - deprecated = DeprecationHandler(None) # type: ignore + with pytest.warns(warning, match=message): + parser.parse_args(["--foo"]) + + +@parametrize_dev +def test_module( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Importing a deprecated module displays associated warning (or error).""" + with ( + pytest.warns(warning, match=message) + if warning + else pytest.raises(DeprecatedError) + ): + deprecated.module("2.0", "3.0") + + +@parametrize_dev +def test_constant( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Using a deprecated constant displays associated warning (or error).""" + with nullcontext() if warning else pytest.raises(DeprecatedError): + deprecated.constant("2.0", "3.0", "SOME_CONSTANT", 42) + module = sys.modules[__name__] + + with pytest.warns(warning, match=message): + module.SOME_CONSTANT + + +@parametrize_dev +def test_topic( + deprecated: DeprecationHandler, + warning: DevDeprecationType | None, + message: str | None, +) -> None: + """Reaching a deprecated topic displays associated warning (or error).""" + with ( + pytest.warns(warning, match=message) + if warning + else pytest.raises(DeprecatedError) + ): + deprecated.topic("2.0", "3.0", topic="Some special topic") + + +def test_version_fallback() -> None: + """Test that conda can run even if deprecations can't parse the version.""" + deprecated = DeprecationHandler(None) # type: ignore[arg-type] assert deprecated._version_less_than("0") assert deprecated._version_tuple is None - version = deprecated._version_object # type: ignore + version: Version = deprecated._version_object # type: ignore[assignment] assert version.major == version.minor == version.micro == 0 From 268a3e8193daa7073143ecd7858817624aa8b153 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 11 Apr 2024 13:43:12 -0500 Subject: [PATCH 323/366] [pre-commit.ci] pre-commit autoupdate (#5272) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/pre-commit-hooks: v4.5.0 → v4.6.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.5.0...v4.6.0) - [github.com/astral-sh/ruff-pre-commit: v0.3.4 → v0.3.5](https://github.com/astral-sh/ruff-pre-commit/compare/v0.3.4...v0.3.5) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2db1b692b1..40c5acfabb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,7 +18,7 @@ exclude: | repos: # generic verification and formatting - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: # standard end of line/end of file cleanup - id: mixed-line-ending @@ -54,7 +54,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.4 + rev: v0.3.5 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff From 024e61d9786ad79694a6e77b85a58ba11655cff9 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Thu, 11 Apr 2024 13:44:04 -0500 Subject: [PATCH 324/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#5268)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/ISSUE_TEMPLATE/epic.yml | 77 +++++++++++++++++++++++++++++---- .github/workflows/cla.yml | 2 +- .github/workflows/issues.yml | 4 +- .github/workflows/labels.yml | 8 ++-- .github/workflows/lock.yml | 2 +- .github/workflows/project.yml | 2 +- .github/workflows/stale.yml | 4 +- 7 files changed, 79 insertions(+), 20 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/epic.yml b/.github/ISSUE_TEMPLATE/epic.yml index f9c412b177..eca723f4b6 100644 --- a/.github/ISSUE_TEMPLATE/epic.yml +++ b/.github/ISSUE_TEMPLATE/epic.yml @@ -25,23 +25,82 @@ body: options: - label: I added a descriptive title required: true - - label: I searched open reports and couldn't find a duplicate + - label: I searched open issues and couldn't find a duplicate required: true + - type: textarea - id: summary + id: what attributes: - label: Summary + label: What? description: >- - Define the highlevel objectives to be accomplished in this epic. Include the - bigger picture of what is changing and/or the user story for why the - changes are desired/necessary. + What feature or problem will be addressed in this epic? + placeholder: Please describe here. validations: required: true + - type: textarea + id: why attributes: - label: Linked Issues & PRs - description: List all issues related to this epic. + label: Why? + description: >- + Why is the reported issue(s) a problem, or why is the proposed feature needed? + (Research and spike issues can be linked here.) value: | - - [ ] # + - [ ] + placeholder: Please describe here and/or link to relevant supporting issues. + validations: + required: true + + - type: textarea + id: user_impact + attributes: + label: User impact + description: >- + In what specific way(s) will users benefit from this change? (e.g. use cases or performance improvements) + placeholder: Please describe here. validations: required: true + + - type: textarea + id: goals + attributes: + label: Goals + description: >- + What goal(s) should this epic accomplish? + value: | + - [ ] + validations: + required: true + + - type: textarea + id: tasks + attributes: + label: Tasks + description: >- + What needs to be done to implement this change? + value: | + - [ ] + validations: + required: false + + - type: textarea + id: blocked_by + attributes: + label: 'This epic is blocked by:' + description: >- + Epics and issues that block this epic. + value: | + - [ ] + validations: + required: false + + - type: textarea + id: blocks + attributes: + label: 'This epic blocks:' + description: >- + Epics and issues that are blocked by this epic. + value: | + - [ ] + validations: + required: false diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index 773cb76c96..0f793ef038 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check CLA - uses: conda/actions/check-cla@v24.2.0 + uses: conda/actions/check-cla@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 with: # [required] # A token with ability to comment, label, and modify the commit status diff --git a/.github/workflows/issues.yml b/.github/workflows/issues.yml index 52b1cfee9e..8391b0ee68 100644 --- a/.github/workflows/issues.yml +++ b/.github/workflows/issues.yml @@ -24,12 +24,12 @@ jobs: runs-on: ubuntu-latest steps: # remove [pending::feedback] - - uses: actions-ecosystem/action-remove-labels@v1.3.0 + - uses: actions-ecosystem/action-remove-labels@2ce5d41b4b6aa8503e285553f75ed56e0a40bae0 with: labels: ${{ env.FEEDBACK_LBL }} github_token: ${{ secrets.PROJECT_TOKEN }} # add [pending::support], if still open - - uses: actions-ecosystem/action-add-labels@v1.1.3 + - uses: actions-ecosystem/action-add-labels@18f1af5e3544586314bbe15c0273249c770b2daf if: github.event.issue.state == 'open' with: labels: ${{ env.SUPPORT_LBL }} diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index 99ec60667f..45cb754d33 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -20,20 +20,20 @@ jobs: GLOBAL: https://raw.githubusercontent.com/conda/infra/main/.github/global.yml LOCAL: .github/labels.yml steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 - id: has_local - uses: andstor/file-existence-action@v3.0.0 + uses: andstor/file-existence-action@076e0072799f4942c8bc574a82233e1e4d13e9d6 with: files: ${{ env.LOCAL }} - name: Global Only - uses: EndBug/label-sync@v2.3.3 + uses: EndBug/label-sync@52074158190acb45f3077f9099fea818aa43f97a if: steps.has_local.outputs.files_exists == 'false' with: config-file: ${{ env.GLOBAL }} delete-other-labels: true dry-run: ${{ github.event.inputs.dryrun }} - name: Global & Local - uses: EndBug/label-sync@v2.3.3 + uses: EndBug/label-sync@52074158190acb45f3077f9099fea818aa43f97a if: steps.has_local.outputs.files_exists == 'true' with: config-file: | diff --git a/.github/workflows/lock.yml b/.github/workflows/lock.yml index 7fd6b91347..c4294bea31 100644 --- a/.github/workflows/lock.yml +++ b/.github/workflows/lock.yml @@ -18,7 +18,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: dessant/lock-threads@v5 + - uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 with: # Number of days of inactivity before a closed issue is locked issue-inactive-days: 365 diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index 94143662d3..35a4fcfec0 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -14,7 +14,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/add-to-project@v1.0.0 + - uses: actions/add-to-project@2e5cc851ca7162e9eb510e6da6a5c64022e606a7 with: # issues are added to the Planning project # PRs are added to the Review project diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 2464e81e4b..cfc9528718 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -34,12 +34,12 @@ jobs: days-before-issue-stale: 90 days-before-issue-close: 21 steps: - - uses: conda/actions/read-yaml@v24.2.0 + - uses: conda/actions/read-yaml@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 id: read_yaml with: path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml - - uses: actions/stale@v9 + - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e id: stale with: # Only issues with these labels are checked whether they are stale From b7ea4057ea2df0e65c2c62ef0c9580d33fc05d68 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 12 Apr 2024 16:02:57 -0500 Subject: [PATCH 325/366] Remove duplicate `extract_package_and_build_text` call (#5253) --- conda_build/metadata.py | 1 - 1 file changed, 1 deletion(-) diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 01f3367d03..d3eeb93d80 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -1665,7 +1665,6 @@ def build_id(self): raise RuntimeError( f"Couldn't extract raw recipe text for {self.name()} output" ) - raw_recipe_text = self.extract_package_and_build_text() raw_manual_build_string = re.search(r"\s*string:", raw_recipe_text) # user setting their own build string. Don't modify it. if manual_build_string and not ( From 1b2f1e03e247956b6f0d0a6ee213396315992c06 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 14:32:29 -0400 Subject: [PATCH 326/366] [pre-commit.ci] pre-commit autoupdate (#5286) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.3.5 → v0.3.7](https://github.com/astral-sh/ruff-pre-commit/compare/v0.3.5...v0.3.7) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 40c5acfabb..89681e1206 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,7 +54,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.5 + rev: v0.3.7 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff From b1310ca2a7b555a82c6b3f57964695a030d7088b Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Mon, 15 Apr 2024 23:45:04 +0200 Subject: [PATCH 327/366] Deprecate `HashableDict` in favor of `frozendict >=2.4.2` (#5284) --- conda_build/jinja_context.py | 17 ++++---- conda_build/metadata.py | 68 +++++++++++--------------------- conda_build/utils.py | 4 ++ news/5284-deprecate-HashableDict | 21 ++++++++++ pyproject.toml | 1 + recipe/meta.yaml | 1 + tests/requirements.txt | 1 + tests/test_jinja_context.py | 6 +-- 8 files changed, 61 insertions(+), 58 deletions(-) create mode 100644 news/5284-deprecate-HashableDict diff --git a/conda_build/jinja_context.py b/conda_build/jinja_context.py index cc5c3b24c7..6ec2195eb0 100644 --- a/conda_build/jinja_context.py +++ b/conda_build/jinja_context.py @@ -10,24 +10,18 @@ import time from functools import partial from io import StringIO, TextIOBase +from typing import TYPE_CHECKING from warnings import warn import jinja2 import yaml - -try: - import tomllib # Python 3.11 -except: - import tomli as tomllib - -from typing import TYPE_CHECKING +from frozendict import deepfreeze from . import _load_setup_py_data from .environ import get_dict as get_environ from .exceptions import CondaBuildException from .render import get_env_dependencies from .utils import ( - HashableDict, apply_pin_expressions, check_call_env, copy_into, @@ -38,6 +32,11 @@ ) from .variants import DEFAULT_COMPILERS +try: + import tomllib # Python 3.11 +except: + import tomli as tomllib + if TYPE_CHECKING: from typing import IO, Any @@ -298,7 +297,7 @@ def pin_compatible( # There are two cases considered here (so far): # 1. Good packages that follow semver style (if not philosophy). For example, 1.2.3 # 2. Evil packages that cram everything alongside a single major version. For example, 9b - key = (m.name(), HashableDict(m.config.variant)) + key = (m.name(), deepfreeze(m.config.variant)) if key in cached_env_dependencies: pins = cached_env_dependencies[key] else: diff --git a/conda_build/metadata.py b/conda_build/metadata.py index d3eeb93d80..07425e404e 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -18,6 +18,7 @@ from bs4 import UnicodeDammit from conda.base.context import context from conda.gateways.disk.read import compute_sum +from frozendict import deepfreeze from . import exceptions, utils, variants from .conda_interface import MatchSpec @@ -26,7 +27,6 @@ from .license_family import ensure_valid_license_family from .utils import ( DEFAULT_SUBDIRS, - HashableDict, ensure_list, expand_globs, find_recipe, @@ -956,15 +956,8 @@ def finalize_outputs_pass( fm = om if not output_d.get("type") or output_d.get("type").startswith("conda"): outputs[ - ( - fm.name(), - HashableDict( - { - k: copy.deepcopy(fm.config.variant[k]) - for k in fm.get_used_vars() - } - ), - ) + fm.name(), + deepfreeze({k: fm.config.variant[k] for k in fm.get_used_vars()}), ] = (output_d, fm) except exceptions.DependencyNeedsBuildingError as e: if not permit_unsatisfiable_variants: @@ -976,15 +969,13 @@ def finalize_outputs_pass( f"{e.packages}" ) outputs[ - ( - metadata.name(), - HashableDict( - { - k: copy.deepcopy(metadata.config.variant[k]) - for k in metadata.get_used_vars() - } - ), - ) + metadata.name(), + deepfreeze( + { + k: metadata.config.variant[k] + for k in metadata.get_used_vars() + } + ), ] = (output_d, metadata) # in-place modification base_metadata.other_outputs = outputs @@ -992,12 +983,8 @@ def finalize_outputs_pass( final_outputs = OrderedDict() for k, (out_d, m) in outputs.items(): final_outputs[ - ( - m.name(), - HashableDict( - {k: copy.deepcopy(m.config.variant[k]) for k in m.get_used_vars()} - ), - ) + m.name(), + deepfreeze({k: m.config.variant[k] for k in m.get_used_vars()}), ] = (out_d, m) return final_outputs @@ -2540,17 +2527,15 @@ def get_output_metadata_set( # also refine this collection as each output metadata object is # finalized - see the finalize_outputs_pass function all_output_metadata[ - ( - out_metadata.name(), - HashableDict( - { - k: copy.deepcopy(out_metadata.config.variant[k]) - for k in out_metadata.get_used_vars() - } - ), - ) + out_metadata.name(), + deepfreeze( + { + k: out_metadata.config.variant[k] + for k in out_metadata.get_used_vars() + } + ), ] = (out, out_metadata) - out_metadata_map[HashableDict(out)] = out_metadata + out_metadata_map[deepfreeze(out)] = out_metadata ref_metadata.other_outputs = out_metadata.other_outputs = ( all_output_metadata ) @@ -2577,12 +2562,7 @@ def get_output_metadata_set( ): conda_packages[ m.name(), - HashableDict( - { - k: copy.deepcopy(m.config.variant[k]) - for k in m.get_used_vars() - } - ), + deepfreeze({k: m.config.variant[k] for k in m.get_used_vars()}), ] = (output_d, m) elif output_d.get("type") == "wheel": if not output_d.get("requirements", {}).get("build") or not any( @@ -2719,11 +2699,7 @@ def get_used_vars(self, force_top_level=False, force_global=False): global used_vars_cache recipe_dir = self.path - # `HashableDict` does not handle lists of other dictionaries correctly. Also it - # is constructed inplace, taking references to sub-elements of the input dict - # and thus corrupting it. Also, this was being called in 3 places in this function - # so caching it is probably a good thing. - hashed_variants = HashableDict(copy.deepcopy(self.config.variant)) + hashed_variants = deepfreeze(self.config.variant) if hasattr(self.config, "used_vars"): used_vars = self.config.used_vars elif ( diff --git a/conda_build/utils.py b/conda_build/utils.py index 05b0d827ff..970073f4e2 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -70,6 +70,7 @@ win_path_to_unix, ) from .conda_interface import rm_rf as _rm_rf +from .deprecations import deprecated from .exceptions import BuildLockError if TYPE_CHECKING: @@ -1407,6 +1408,7 @@ def get_installed_packages(path): return installed +@deprecated("24.5", "24.7", addendum="Use `frozendict.deepfreeze` instead.") def _convert_lists_to_sets(_dict): for k, v in _dict.items(): if hasattr(v, "keys"): @@ -1419,6 +1421,7 @@ def _convert_lists_to_sets(_dict): return _dict +@deprecated("24.5", "24.7", addendum="Use `frozendict.deepfreeze` instead.") class HashableDict(dict): """use hashable frozen dictionaries for resources and resource types so that they can be in sets""" @@ -1430,6 +1433,7 @@ def __hash__(self): return hash(json.dumps(self, sort_keys=True)) +@deprecated("24.5", "24.7", addendum="Use `frozendict.deepfreeze` instead.") def represent_hashabledict(dumper, data): value = [] diff --git a/news/5284-deprecate-HashableDict b/news/5284-deprecate-HashableDict new file mode 100644 index 0000000000..c411443395 --- /dev/null +++ b/news/5284-deprecate-HashableDict @@ -0,0 +1,21 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Deprecate `conda_build.utils.HashableDict`. Use `frozendict.deepfreeze` instead. (#5284) +* Deprecate `conda_build.utils._convert_lists_to_sets`. Use `frozendict.deepfreeze` instead. (#5284) +* Deprecate `conda_build.utils.represent_hashabledict`. Use `frozendict.deepfreeze` instead. (#5284) + +### Docs + +* + +### Other + +* diff --git a/pyproject.toml b/pyproject.toml index 0360aaad4b..2096038835 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ dependencies = [ "conda-index >=0.4.0", "conda-package-handling >=1.3", "filelock", + "frozendict >=2.4.2", "jinja2", "jsonschema >=4.19", "libarchive-c", diff --git a/recipe/meta.yaml b/recipe/meta.yaml index a9062803cb..8171f8167d 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -34,6 +34,7 @@ requirements: - conda-index >=0.4.0 - conda-package-handling >=1.3 - filelock + - frozendict >=2.4.2 - jinja2 - jsonschema >=4.19 - m2-patch >=2.6 # [win] diff --git a/tests/requirements.txt b/tests/requirements.txt index 5e94d4111a..e005250f59 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -5,6 +5,7 @@ conda-index >=0.4.0 conda-libmamba-solver # ensure we use libmamba conda-package-handling >=1.3 filelock +frozendict >=2.4.2 jinja2 jsonschema >=4.19 menuinst >=2 diff --git a/tests/test_jinja_context.py b/tests/test_jinja_context.py index 18ae32f7ab..f19ea31997 100644 --- a/tests/test_jinja_context.py +++ b/tests/test_jinja_context.py @@ -5,9 +5,9 @@ from typing import TYPE_CHECKING import pytest +from frozendict import deepfreeze from conda_build import jinja_context -from conda_build.utils import HashableDict if TYPE_CHECKING: from pathlib import Path @@ -99,7 +99,7 @@ def test_pin_subpackage_exact(testing_metadata): testing_metadata.meta["outputs"] = [output_dict] fm = testing_metadata.get_output_metadata(output_dict) testing_metadata.other_outputs = { - (name, HashableDict(testing_metadata.config.variant)): (output_dict, fm) + (name, deepfreeze(testing_metadata.config.variant)): (output_dict, fm) } pin = jinja_context.pin_subpackage(testing_metadata, name, exact=True) assert len(pin.split()) == 3 @@ -111,7 +111,7 @@ def test_pin_subpackage_expression(testing_metadata): testing_metadata.meta["outputs"] = [output_dict] fm = testing_metadata.get_output_metadata(output_dict) testing_metadata.other_outputs = { - (name, HashableDict(testing_metadata.config.variant)): (output_dict, fm) + (name, deepfreeze(testing_metadata.config.variant)): (output_dict, fm) } pin = jinja_context.pin_subpackage(testing_metadata, name) assert len(pin.split()) == 2 From 1cf2f1a4997224e9d8dc4e93fc315b7a2e210b23 Mon Sep 17 00:00:00 2001 From: Bianca Henderson Date: Tue, 16 Apr 2024 14:34:52 -0400 Subject: [PATCH 328/366] Update pre-commit & dependabot config (#5287) --- .github/dependabot.yml | 4 ++ .github/workflows/builds-review.yaml | 6 +-- .github/workflows/docs.yml | 2 +- .github/workflows/tests.yml | 56 ++++++++++++++-------------- .pre-commit-config.yaml | 6 +++ 5 files changed, 42 insertions(+), 32 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 07210519aa..139f804e5a 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -13,3 +13,7 @@ updates: # Allow only production updates for Sphinx - dependency-name: "sphinx" dependency-type: "production" + - package-ecosystem: github-actions + directory: /.github/workflows + schedule: + interval: "weekly" diff --git a/.github/workflows/builds-review.yaml b/.github/workflows/builds-review.yaml index c10129b56f..8c57d87859 100644 --- a/.github/workflows/builds-review.yaml +++ b/.github/workflows/builds-review.yaml @@ -24,7 +24,7 @@ jobs: runs-on: ${{ matrix.runner }} steps: - name: Remove build label - uses: actions/github-script@v6 + uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 with: github-token: ${{ secrets.CANARY_ACTION_TOKEN }} script: | @@ -46,14 +46,14 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - - uses: actions/checkout@v3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 with: ref: ${{ github.ref }} clean: true fetch-depth: 0 - name: Create and upload review build - uses: conda/actions/canary-release@v23.7.0 + uses: conda/actions/canary-release@cc126c36f1f75dd8f5138655dedbabf3fbfc45d7 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 18ea421b87..8f0a834341 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -22,7 +22,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 with: fetch-depth: 0 - name: Setup diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ee71e1a826..e80a02f323 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -45,13 +45,13 @@ jobs: code: ${{ steps.filter.outputs.code }} steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # dorny/paths-filter needs git clone for non-PR events # https://github.com/dorny/paths-filter#supported-workflows if: github.event_name != 'pull_request' - name: Filter Changes - uses: dorny/paths-filter@v3 + uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 id: filter with: filters: | @@ -102,7 +102,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 @@ -110,13 +110,13 @@ jobs: run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV - name: Cache Conda - uses: actions/cache@v4 + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 with: path: ~/conda_pkgs_dir key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3 + uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 with: condarc-file: .github/condarc run-post: false # skip post cleanup @@ -148,13 +148,13 @@ jobs: -m "${{ env.PYTEST_MARKER }}" - name: Upload Coverage - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed with: flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 with: name: test-results-${{ env.HASH }} path: | @@ -180,7 +180,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 @@ -188,13 +188,13 @@ jobs: run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-benchmark-$(date -u "+%Y%m")" >> $GITHUB_ENV - name: Cache Conda - uses: actions/cache@v4 + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 with: path: ~/conda_pkgs_dir key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3 + uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 with: condarc-file: .github/condarc run-post: false # skip post cleanup @@ -226,7 +226,7 @@ jobs: run: conda list --show-channel-urls - name: Run Benchmarks - uses: CodSpeedHQ/action@v2 + uses: CodSpeedHQ/action@1dbf41f0ae41cebfe61e084e535aebe533409b4d with: token: ${{ secrets.CODSPEED_TOKEN }} run: $CONDA/envs/test/bin/pytest --codspeed @@ -259,7 +259,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 @@ -268,13 +268,13 @@ jobs: run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV - name: Cache Conda - uses: actions/cache@v4 + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 with: path: ~/conda_pkgs_dir key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3 + uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 with: condarc-file: .github\condarc run-post: false # skip post cleanup @@ -312,13 +312,13 @@ jobs: -m "${{ env.PYTEST_MARKER }}" - name: Upload Coverage - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed with: flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 with: name: test-results-${{ env.HASH }} path: | @@ -357,7 +357,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 @@ -365,13 +365,13 @@ jobs: run: echo "HASH=${{ runner.os }}-${{ runner.arch }}-Py${{ matrix.python-version }}-${{ matrix.conda-version }}-${{ matrix.test-type }}-$(date -u "+%Y%m")" >> $GITHUB_ENV - name: Cache Conda - uses: actions/cache@v4 + uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 with: path: ~/conda_pkgs_dir key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v3 + uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 with: condarc-file: .github/condarc run-post: false # skip post cleanup @@ -406,13 +406,13 @@ jobs: -m "${{ env.PYTEST_MARKER }}" - name: Upload Coverage - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed with: flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 with: name: test-results-${{ env.HASH }} path: | @@ -434,17 +434,17 @@ jobs: runs-on: ubuntu-latest steps: - name: Download Artifacts - uses: actions/download-artifact@v4 + uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 - name: Upload Combined Test Results # provides one downloadable archive of all matrix run test results for further analysis - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 with: name: test-results-${{ github.sha }}-all path: test-results-* - name: Test Summary - uses: test-summary/action@v2 + uses: test-summary/action@032c8a9cec6aaa3c20228112cae6ca10a3b29336 with: paths: test-results-*/test-report.xml @@ -456,7 +456,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Determine Success - uses: re-actors/alls-green@v1.2.2 + uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe with: # permit jobs to be skipped if there are no code changes (see changes job) allowed-skips: ${{ toJSON(needs) }} @@ -493,7 +493,7 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - name: Checkout Source - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: ref: ${{ github.ref }} clean: true @@ -501,7 +501,7 @@ jobs: # Explicitly use Python 3.12 since each of the OSes has a different default Python - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 with: python-version: '3.12' @@ -527,7 +527,7 @@ jobs: Path(environ["GITHUB_ENV"]).write_text(f"ANACONDA_ORG_LABEL={label}") - name: Create & Upload - uses: conda/actions/canary-release@v24.2.0 + uses: conda/actions/canary-release@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 89681e1206..55275ac4ee 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -61,6 +61,12 @@ repos: args: [--fix] # compatible replacement for black - id: ruff-format + - repo: https://github.com/python-jsonschema/check-jsonschema + rev: 0.28.1 + hooks: + # verify github syntaxes + - id: check-github-workflows + - id: check-dependabot - repo: meta # see https://pre-commit.com/#meta-hooks hooks: From fd4798f0261027593d179e5c474626b28e699fde Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Apr 2024 15:06:36 -0400 Subject: [PATCH 329/366] Bump actions/add-to-project from 1.0.0 to 1.0.1 in /.github/workflows (#5288) Bumps [actions/add-to-project](https://github.com/actions/add-to-project) from 1.0.0 to 1.0.1. - [Release notes](https://github.com/actions/add-to-project/releases) - [Commits](https://github.com/actions/add-to-project/compare/2e5cc851ca7162e9eb510e6da6a5c64022e606a7...9bfe908f2eaa7ba10340b31e314148fcfe6a2458) --- updated-dependencies: - dependency-name: actions/add-to-project dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/project.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index 35a4fcfec0..e1d5d4de39 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -14,7 +14,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/add-to-project@2e5cc851ca7162e9eb510e6da6a5c64022e606a7 + - uses: actions/add-to-project@9bfe908f2eaa7ba10340b31e314148fcfe6a2458 with: # issues are added to the Planning project # PRs are added to the Review project From 579c66ded800b74c7d837c772419a77f10485166 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Apr 2024 15:06:44 -0400 Subject: [PATCH 330/366] Bump conda/actions from 23.7.0 to 24.2.0 in /.github/workflows (#5291) Bumps [conda/actions](https://github.com/conda/actions) from 23.7.0 to 24.2.0. - [Release notes](https://github.com/conda/actions/releases) - [Commits](https://github.com/conda/actions/compare/v23.7.0...f46142e89fa703cc69f0421ca1d313ab2d5bfff6) --- updated-dependencies: - dependency-name: conda/actions dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builds-review.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/builds-review.yaml b/.github/workflows/builds-review.yaml index 8c57d87859..503a761d86 100644 --- a/.github/workflows/builds-review.yaml +++ b/.github/workflows/builds-review.yaml @@ -53,7 +53,7 @@ jobs: fetch-depth: 0 - name: Create and upload review build - uses: conda/actions/canary-release@cc126c36f1f75dd8f5138655dedbabf3fbfc45d7 + uses: conda/actions/canary-release@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} From ec7923620ec7434dabbe10d746f9d9444d24ae8f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Apr 2024 15:06:54 -0400 Subject: [PATCH 331/366] Bump actions/github-script from 6.4.1 to 7.0.1 in /.github/workflows (#5292) Bumps [actions/github-script](https://github.com/actions/github-script) from 6.4.1 to 7.0.1. - [Release notes](https://github.com/actions/github-script/releases) - [Commits](https://github.com/actions/github-script/compare/d7906e4ad0b1822421a7e6a35d5ca353c962f410...60a0d83039c74a4aee543508d2ffcb1c3799cdea) --- updated-dependencies: - dependency-name: actions/github-script dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builds-review.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/builds-review.yaml b/.github/workflows/builds-review.yaml index 503a761d86..c3923030a6 100644 --- a/.github/workflows/builds-review.yaml +++ b/.github/workflows/builds-review.yaml @@ -24,7 +24,7 @@ jobs: runs-on: ${{ matrix.runner }} steps: - name: Remove build label - uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea with: github-token: ${{ secrets.CANARY_ACTION_TOKEN }} script: | From edee10d1ac6b91f37520e4a1a193d2da0305042a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Apr 2024 19:50:14 +0000 Subject: [PATCH 332/366] Bump actions/checkout from 3.6.0 to 4.1.2 in /.github/workflows (#5289) Bumps [actions/checkout](https://github.com/actions/checkout) from 3.6.0 to 4.1.2. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v3.6.0...9bb56186c3b09b4f86b1c65136769dd318469633) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builds-review.yaml | 2 +- .github/workflows/docs.yml | 2 +- .github/workflows/tests.yml | 12 ++++++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/builds-review.yaml b/.github/workflows/builds-review.yaml index c3923030a6..d789e536c6 100644 --- a/.github/workflows/builds-review.yaml +++ b/.github/workflows/builds-review.yaml @@ -46,7 +46,7 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 with: ref: ${{ github.ref }} clean: true diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 8f0a834341..74856dd859 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -22,7 +22,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 with: fetch-depth: 0 - name: Setup diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index e80a02f323..cbcc0f1308 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -45,7 +45,7 @@ jobs: code: ${{ steps.filter.outputs.code }} steps: - name: Checkout Source - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # dorny/paths-filter needs git clone for non-PR events # https://github.com/dorny/paths-filter#supported-workflows if: github.event_name != 'pull_request' @@ -102,7 +102,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 with: fetch-depth: 0 @@ -180,7 +180,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 with: fetch-depth: 0 @@ -259,7 +259,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 with: fetch-depth: 0 @@ -357,7 +357,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 with: fetch-depth: 0 @@ -493,7 +493,7 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - name: Checkout Source - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 with: ref: ${{ github.ref }} clean: true From 03c3924c12ee021d76897befb99a1d70d2de0b32 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Apr 2024 20:49:08 +0000 Subject: [PATCH 333/366] Bump actions/setup-python from 4.7.1 to 5.1.0 in /.github/workflows (#5290) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4.7.1 to 5.1.0. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236...82c7e631bb3cdc910f68e0081d67478d79c6982d) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index cbcc0f1308..8f35cc10a5 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -501,7 +501,7 @@ jobs: # Explicitly use Python 3.12 since each of the OSes has a different default Python - name: Setup Python - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 + uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d with: python-version: '3.12' From 51793ced20453a2851671eca1f0b9b6c5f5c152b Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 17 Apr 2024 05:25:24 +0200 Subject: [PATCH 334/366] Short-circuit `get_output_dicts_from_metadata`'s if clause for faster evaluation (#5278) Signed-off-by: Marcel Bargull Co-authored-by: Marcel Bargull --- conda_build/metadata.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 07425e404e..b92b8aa6ec 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -889,8 +889,8 @@ def get_output_dicts_from_metadata(metadata, outputs=None): outputs.append(OrderedDict(name=metadata.name())) for out in outputs: if ( - "package:" in metadata.get_recipe_text() - and out.get("name") == metadata.name() + out.get("name") == metadata.name() + and "package:" in metadata.get_recipe_text() ): combine_top_level_metadata_with_output(metadata, out) return outputs From e7bc71c12c3ccef5dd16620c70924cc10c0449d6 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 17 Apr 2024 07:19:31 +0200 Subject: [PATCH 335/366] Memoize `conda_build.metadata._filter_recipe_text` (#5279) Signed-off-by: Marcel Bargull Co-authored-by: Marcel Bargull --- conda_build/metadata.py | 1 + 1 file changed, 1 insertion(+) diff --git a/conda_build/metadata.py b/conda_build/metadata.py index b92b8aa6ec..ac44564e17 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -1002,6 +1002,7 @@ def get_updated_output_dict_from_reparsed_metadata(original_dict, new_outputs): return output_d +@lru_cache(maxsize=200) def _filter_recipe_text(text, extract_pattern=None): if extract_pattern: match = re.search(extract_pattern, text, flags=re.MULTILINE | re.DOTALL) From f6d779862f7eec442ad26b36fe37249dedd799d8 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 17 Apr 2024 23:37:52 +0200 Subject: [PATCH 336/366] Add yamlfmt to pre-commit (#5293) --- .github/ISSUE_TEMPLATE/0_bug.yml | 1 - .github/ISSUE_TEMPLATE/1_feature.yml | 1 - .github/ISSUE_TEMPLATE/2_documentation.yml | 1 - .github/ISSUE_TEMPLATE/epic.yml | 1 - .github/dependabot.yml | 17 +-- .github/labels.yml | 42 +------ .github/workflows/cla.yml | 1 - .github/workflows/docs.yml | 12 +- .github/workflows/issues.yml | 1 - .github/workflows/labels.yml | 1 - .github/workflows/lock.yml | 1 - .github/workflows/project.yml | 1 - .github/workflows/stale.yml | 1 - .github/workflows/tests.yml | 23 ++-- .pre-commit-config.yaml | 27 ++++- .readthedocs.yml | 6 +- conda_build/skeletons/cpan.py | 25 ++-- docs/requirements.txt | 8 +- pyproject.toml | 129 +++++++++++---------- recipe/conda_build_config.yaml | 10 +- 20 files changed, 151 insertions(+), 158 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/0_bug.yml b/.github/ISSUE_TEMPLATE/0_bug.yml index cfccd360ed..a53f6fba87 100644 --- a/.github/ISSUE_TEMPLATE/0_bug.yml +++ b/.github/ISSUE_TEMPLATE/0_bug.yml @@ -1,4 +1,3 @@ ---- name: Bug Report description: Create a bug report. labels: diff --git a/.github/ISSUE_TEMPLATE/1_feature.yml b/.github/ISSUE_TEMPLATE/1_feature.yml index a1e739821d..bc022b4122 100644 --- a/.github/ISSUE_TEMPLATE/1_feature.yml +++ b/.github/ISSUE_TEMPLATE/1_feature.yml @@ -1,4 +1,3 @@ ---- name: Feature Request description: Create a feature request. labels: diff --git a/.github/ISSUE_TEMPLATE/2_documentation.yml b/.github/ISSUE_TEMPLATE/2_documentation.yml index 68ae890de9..f3ce040892 100644 --- a/.github/ISSUE_TEMPLATE/2_documentation.yml +++ b/.github/ISSUE_TEMPLATE/2_documentation.yml @@ -1,4 +1,3 @@ ---- name: Documentation description: Create a documentation related issue. labels: diff --git a/.github/ISSUE_TEMPLATE/epic.yml b/.github/ISSUE_TEMPLATE/epic.yml index eca723f4b6..9b3637f973 100644 --- a/.github/ISSUE_TEMPLATE/epic.yml +++ b/.github/ISSUE_TEMPLATE/epic.yml @@ -1,4 +1,3 @@ ---- name: Epic description: A collection of related tickets. labels: diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 139f804e5a..508818874b 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,19 +1,14 @@ -# To get started with Dependabot version updates, you'll need to specify which -# package ecosystems to update and where the package manifests are located. -# Please see the documentation for all configuration options: -# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates - version: 2 updates: - - package-ecosystem: "pip" - directory: "/docs/" + - package-ecosystem: pip + directory: /docs/ schedule: - interval: "weekly" + interval: weekly allow: # Allow only production updates for Sphinx - - dependency-name: "sphinx" - dependency-type: "production" + - dependency-name: sphinx + dependency-type: production - package-ecosystem: github-actions directory: /.github/workflows schedule: - interval: "weekly" + interval: weekly diff --git a/.github/labels.yml b/.github/labels.yml index ba799038bb..bcc616d339 100644 --- a/.github/labels.yml +++ b/.github/labels.yml @@ -1,39 +1,9 @@ # Builds -- name: build::review - description: trigger a build for this PR - color: "7B4052" + - name: build::review + description: trigger a build for this PR + color: '#7b4052' # Tags -- name: tag::noarch - description: related to noarch builds - color: "86C579" - aliases: [] - -# Deprecated -- name: 3_In_Progress - description: "[deprecated] use milestones/project boards" - color: "888888" -- name: 4_Needs_Review - description: "[deprecated] use milestones/project boards" - color: "888888" -- name: effort-high - description: "[deprecated]" - color: "888888" -- name: effort-low - description: "[deprecated] use good-first-issue" - color: "888888" -- name: effort-medium - description: "[deprecated]" - color: "888888" -- name: in_progress - description: "[deprecated] use milestones/project boards" - color: "888888" -- name: knowledge-high - description: "[deprecated]" - color: "888888" -- name: knowledge-low - description: "[deprecated] use good-first-issue" - color: "888888" -- name: knowledge-medium - description: "[deprecated]" - color: "888888" + - name: tag::noarch + description: related to noarch builds + color: '#86c579' diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index 0f793ef038..07c7f75b12 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -1,4 +1,3 @@ ---- name: CLA on: diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 74856dd859..03b32fc111 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -7,15 +7,15 @@ on: branches: - main paths: - - '.github/workflows/docs.yml' - - 'docs/**' + - .github/workflows/docs.yml + - docs/** # NOTE: github.event context is pull_request payload: # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request pull_request: paths: - - '.github/workflows/docs.yml' - - 'docs/**' + - .github/workflows/docs.yml + - docs/** jobs: docs: @@ -26,9 +26,9 @@ jobs: with: fetch-depth: 0 - name: Setup - run : | + run: | make env-docs - name: Build the docs - run : | + run: | cd docs conda run --name conda-build-docs make html diff --git a/.github/workflows/issues.yml b/.github/workflows/issues.yml index 8391b0ee68..7a114d6d41 100644 --- a/.github/workflows/issues.yml +++ b/.github/workflows/issues.yml @@ -1,4 +1,3 @@ ---- name: Automate Issues on: diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index 45cb754d33..f13985fb0a 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -1,4 +1,3 @@ ---- name: Sync Labels on: diff --git a/.github/workflows/lock.yml b/.github/workflows/lock.yml index c4294bea31..2204b62dda 100644 --- a/.github/workflows/lock.yml +++ b/.github/workflows/lock.yml @@ -1,4 +1,3 @@ ---- name: Lock on: diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index e1d5d4de39..7d06584c86 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -1,4 +1,3 @@ ---- name: Add to Project on: diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index cfc9528718..bcda1fea30 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -1,4 +1,3 @@ ---- name: Stale on: diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 8f35cc10a5..aafe0ed977 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -18,7 +18,7 @@ on: # no payload schedule: # https://crontab.guru/#37_18_*_*_* - - cron: 37 18 * * * + - cron: 37 18 * * * concurrency: # Concurrency group that uses the workflow name and PR number if available @@ -122,7 +122,8 @@ jobs: run-post: false # skip post cleanup - name: Conda Install - run: conda install + run: > + conda install --yes --file tests/requirements.txt --file tests/requirements-${{ runner.os }}.txt @@ -142,7 +143,8 @@ jobs: run: conda list --show-channel-urls - name: Run Tests - run: pytest + run: > + pytest --cov=conda_build -n auto -m "${{ env.PYTEST_MARKER }}" @@ -200,7 +202,8 @@ jobs: run-post: false # skip post cleanup - name: Conda Install - run: conda install + run: > + conda install --yes --file tests/requirements.txt --file tests/requirements-${{ runner.os }}.txt @@ -283,7 +286,8 @@ jobs: run: choco install visualstudio2017-workload-vctools - name: Conda Install - run: conda install + run: > + conda install --yes --file tests\requirements.txt --file tests\requirements-${{ runner.os }}.txt @@ -305,7 +309,8 @@ jobs: - name: Run Tests # Windows is sensitive to long paths, using `--basetemp=${{ runner.temp }} to # keep the test directories shorter - run: pytest + run: > + pytest --cov=conda_build --basetemp=${{ runner.temp }} -n auto @@ -380,7 +385,8 @@ jobs: run: sudo xcode-select --switch /Applications/Xcode_11.7.app - name: Conda Install - run: conda install + run: > + conda install --yes --file tests/requirements.txt --file tests/requirements-${{ runner.os }}.txt @@ -400,7 +406,8 @@ jobs: run: conda list --show-channel-urls - name: Run Tests - run: pytest + run: > + pytest --cov=conda_build -n auto -m "${{ env.PYTEST_MARKER }}" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 55275ac4ee..da60f66ed2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -38,7 +38,11 @@ repos: - id: check-merge-conflict # sort requirements files - id: file-contents-sorter - files: ^tests/requirements.*\.txt + files: | + (?x)^( + docs/requirements.txt | + tests/requirements.*\.txt + ) args: [--unique] # Python verification and formatting - repo: https://github.com/Lucas-C/pre-commit-hooks @@ -61,6 +65,27 @@ repos: args: [--fix] # compatible replacement for black - id: ruff-format + - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks + rev: v2.13.0 + hooks: + - id: pretty-format-toml + args: [--autofix, --trailing-commas] + - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt + rev: 0.2.3 + hooks: + - id: yamlfmt + # ruamel.yaml doesn't line wrap correctly (?) so set width to 1M to avoid issues + args: [--mapping=2, --offset=2, --sequence=4, --width=1000000, --implicit_start] + exclude: | + (?x)^( + .authors.yml | + conda_build/templates/npm.yaml | + conda_build/templates/setuptools.yaml | + docs/click/meta.yaml | + docs/source/user-guide/tutorials/meta.yaml | + recipe/meta.yaml | + tests/ + ) - repo: https://github.com/python-jsonschema/check-jsonschema rev: 0.28.1 hooks: diff --git a/.readthedocs.yml b/.readthedocs.yml index abdbda6254..64f8768db5 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,13 +1,13 @@ version: 2 build: - os: "ubuntu-22.04" + os: ubuntu-22.04 tools: - python: "3.11" + python: '3.11' python: install: - - requirements: docs/requirements.txt + - requirements: docs/requirements.txt # Build PDF, ePub and zipped HTML formats: diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index 507086e4fe..7b9cca26de 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -355,19 +355,22 @@ def install_perl_get_core_modules(version): "my @modules = grep {Module::CoreList::is_core($_)} Module::CoreList->find_modules(qr/.*/); " 'print join "\n", @modules;', ] - all_core_modules = ( - subprocess.check_output(args, shell=False) - .decode("utf-8") - .replace("\r\n", "\n") - .split("\n") - ) + try: + all_core_modules = ( + subprocess.check_output(args, shell=False) + .decode("utf-8") + .replace("\r\n", "\n") + .split("\n") + ) + except Exception as e: + print( + f"Failed to query perl={version} for core modules list, ran:\n" + f"{' '.join(args)}" + ) + print(e.message) return all_core_modules except Exception as e: - print( - "Failed to query perl={} for core modules list, attempted command was:\n{}".format( - version, " ".join(args) - ) - ) + print(f"Failed to query perl={version} for core modules list.") print(e.message) return [] diff --git a/docs/requirements.txt b/docs/requirements.txt index 37666a374b..993e9ea9e4 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,11 +1,12 @@ +Pillow==10.0.1 +PyYAML==6.0.1 +Sphinx==7.2.6 conda-sphinx-theme==0.2.1 linkify-it-py==2.0.2 myst-parser==2.0.0 -Pillow==10.0.1 -PyYAML==6.0.1 +pylint==2.17.5 requests==2.31.0 ruamel.yaml==0.17.32 -Sphinx==7.2.6 sphinx-argparse==0.4.0 sphinx-autobuild==2021.3.14 sphinx-sitemap==2.5.1 @@ -18,4 +19,3 @@ sphinxcontrib-plantuml==0.26 sphinxcontrib-programoutput==0.17 sphinxcontrib-qthelp==1.0.6 sphinxcontrib-serializinghtml==1.1.9 -pylint==2.17.5 diff --git a/pyproject.toml b/pyproject.toml index 2096038835..229333b6a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,16 +1,12 @@ [build-system] +build-backend = "hatchling.build" requires = [ "hatchling >=1.12.2", "hatch-vcs >=0.2.0", ] -build-backend = "hatchling.build" [project] -name = "conda-build" -description="tools for building conda packages" -readme = "README.md" authors = [{name = "Anaconda, Inc.", email = "conda@continuum.io"}] -license = {file = "LICENSE"} classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", @@ -24,9 +20,8 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy" + "Programming Language :: Python :: Implementation :: PyPy", ] -requires-python = ">=3.8" dependencies = [ "beautifulsoup4", "chardet", @@ -48,34 +43,46 @@ dependencies = [ "tomli ; python_version<'3.11'", "tqdm", ] +description = "tools for building conda packages" dynamic = ["version"] +license = {file = "LICENSE"} +name = "conda-build" +readme = "README.md" +requires-python = ">=3.8" -[project.urls] -documentation = "https://docs.conda.io/projects/conda-build/en/stable/" -repository = "https://github.com/conda/conda-build" -changelog = "https://github.com/conda/conda-build/blob/main/CHANGELOG.md" +[project.entry-points.conda] +conda-build = "conda_build.plugin" + +[project.entry-points."distutils.commands"] +bdist_conda = "conda_build.bdist_conda:bdist_conda" [project.scripts] conda-build = "conda_build.cli.main_build:execute" conda-convert = "conda_build.cli.main_convert:execute" +conda-debug = "conda_build.cli.main_debug:execute" conda-develop = "conda_build.cli.main_develop:execute" conda-inspect = "conda_build.cli.main_inspect:execute" conda-metapackage = "conda_build.cli.main_metapackage:execute" conda-render = "conda_build.cli.main_render:execute" conda-skeleton = "conda_build.cli.main_skeleton:execute" -conda-debug = "conda_build.cli.main_debug:execute" - -[project.entry-points."distutils.commands"] -bdist_conda = "conda_build.bdist_conda:bdist_conda" -[project.entry-points.conda] -conda-build = "conda_build.plugin" +[project.urls] +changelog = "https://github.com/conda/conda-build/blob/main/CHANGELOG.md" +documentation = "https://docs.conda.io/projects/conda-build/en/stable/" +repository = "https://github.com/conda/conda-build" -[tool.hatch.version] -source = "vcs" +[tool.coverage.report] +exclude_lines = [ + "if TYPE_CHECKING:", # ignoring type checking imports +] +omit = ["conda_build/skeletons/_example_skeleton.py"] +show_missing = true +skip_covered = true +sort = "Miss" -[tool.hatch.version.raw-options] -local_scheme = "dirty-tag" +[tool.coverage.run] +# store relative paths in coverage information +relative_files = true [tool.hatch.build] include = ["conda_build", "conda_build/templates/*", "conda_build/cli-*.exe"] @@ -83,44 +90,13 @@ include = ["conda_build", "conda_build/templates/*", "conda_build/cli-*.exe"] [tool.hatch.build.hooks.vcs] version-file = "conda_build/_version.py" -[tool.coverage.run] -# store relative paths in coverage information -relative_files = true - -[tool.coverage.report] -show_missing = true -sort = "Miss" -skip_covered = true -omit = ["conda_build/skeletons/_example_skeleton.py"] -exclude_lines = [ - "if TYPE_CHECKING:", # ignoring type checking imports -] - -[tool.ruff] -target-version = "py38" +[tool.hatch.version] +source = "vcs" -[tool.ruff.lint] -# E, W = pycodestyle errors and warnings -# F = pyflakes -# I = isort -# UP = pyupgrade -# ISC = flake8-implicit-str-concat -# TCH = flake8-type-checking -# T10 = flake8-debugger -# FA = flake8-future-annotations -# see also https://docs.astral.sh/ruff/rules/ -select = ["E", "W", "F", "I", "UP", "ISC", "TCH", "T10", "FA"] -# E402 module level import not at top of file -# E722 do not use bare 'except' -# E731 do not assign a lambda expression, use a def -ignore = ["E402", "E722", "E731"] -pycodestyle = {max-line-length = 120} -flake8-type-checking = {exempt-modules = [], strict = true} +[tool.hatch.version.raw-options] +local_scheme = "dirty-tag" [tool.pytest.ini_options] -minversion = 3.0 -testpaths = ["tests"] -norecursedirs = ["tests/test-recipes/*"] addopts = [ "--color=yes", # "--cov=conda_build", # passed in test runner scripts instead (avoid debugger) @@ -144,13 +120,6 @@ doctest_optionflags = [ "ALLOW_UNICODE", "ELLIPSIS", ] -markers = [ - "serial: execute test serially (to avoid race conditions)", - "slow: execute the slow tests if active", - "sanity: execute the sanity tests", - "no_default_testing_config: used internally to disable monkeypatching for testing_config", - "benchmark: execute the benchmark tests", -] filterwarnings = [ # elevate conda's deprecated warning to an error "error::PendingDeprecationWarning:conda", @@ -161,3 +130,37 @@ filterwarnings = [ # ignore numpy.distutils error 'ignore:\s+`numpy.distutils` is deprecated:DeprecationWarning:conda_build._load_setup_py_data', ] +markers = [ + "serial: execute test serially (to avoid race conditions)", + "slow: execute the slow tests if active", + "sanity: execute the sanity tests", + "no_default_testing_config: used internally to disable monkeypatching for testing_config", + "benchmark: execute the benchmark tests", +] +minversion = 3.0 +norecursedirs = ["tests/test-recipes/*"] +testpaths = ["tests"] + +[tool.ruff] +target-version = "py38" + +[tool.ruff.lint] +flake8-type-checking = {exempt-modules = [], strict = true} +ignore = [ + "E402", # module level import not at top of file + "E722", # do not use bare 'except' + "E731", # do not assign a lambda expression, use a def +] +pycodestyle = {max-line-length = 120} +# see https://docs.astral.sh/ruff/rules/ +select = [ + "E", # pycodestyle errors + "F", # pyflakes + "FA", # flake8-future-annotations + "I", # isort + "ISC", # flake8-implicit-str-concat + "T10", # flake8-debugger + "TCH", # flake8-type-checking + "UP", # pyupgrade + "W", # pycodestyle warnings +] diff --git a/recipe/conda_build_config.yaml b/recipe/conda_build_config.yaml index 42847d7ead..3959a519bd 100644 --- a/recipe/conda_build_config.yaml +++ b/recipe/conda_build_config.yaml @@ -1,6 +1,6 @@ python: - - "3.8" - - "3.9" - - "3.10" - - "3.11" - - "3.12" + - '3.8' + - '3.9' + - '3.10' + - '3.11' + - '3.12' From 2d3270d6e02fab91b75cb644642ebb71f9184a9a Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 17 Apr 2024 23:40:59 +0200 Subject: [PATCH 337/366] Remove/avoid redundant function calls (#5280) * Remove/avoid redundant function calls Signed-off-by: Marcel Bargull * Refactor get_vars * Deprecate get_vars(loop_only) --------- Signed-off-by: Marcel Bargull Co-authored-by: Marcel Bargull --- conda_build/metadata.py | 64 +++++++++++--------------- conda_build/variants.py | 34 ++++++++------ news/5280-deprecate-get_vars-loop_only | 19 ++++++++ tests/test_variants.py | 15 ++++++ 4 files changed, 82 insertions(+), 50 deletions(-) create mode 100644 news/5280-deprecate-get_vars-loop_only diff --git a/conda_build/metadata.py b/conda_build/metadata.py index ac44564e17..4347dc8842 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -20,7 +20,7 @@ from conda.gateways.disk.read import compute_sum from frozendict import deepfreeze -from . import exceptions, utils, variants +from . import exceptions, utils from .conda_interface import MatchSpec from .config import Config, get_or_merge_config from .features import feature_list @@ -34,6 +34,15 @@ insert_variant_versions, on_win, ) +from .variants import ( + dict_of_lists_to_list_of_dicts, + find_used_variables_in_batch_script, + find_used_variables_in_shell_script, + find_used_variables_in_text, + get_default_variant, + get_vars, + list_of_dicts_to_dict_of_lists, +) if TYPE_CHECKING: from typing import Literal @@ -156,7 +165,7 @@ def get_selectors(config: Config) -> dict[str, bool]: if arch == "32": d["x86"] = plat.endswith(("-32", "-64")) - defaults = variants.get_default_variant(config) + defaults = get_default_variant(config) py = config.variant.get("python", defaults["python"]) # there are times when python comes in as a tuple if not hasattr(py, "split"): @@ -2435,9 +2444,7 @@ def append_parent_metadata(self, out_metadata): def get_reduced_variant_set(self, used_variables): # reduce variable space to limit work we need to do - full_collapsed_variants = variants.list_of_dicts_to_dict_of_lists( - self.config.variants - ) + full_collapsed_variants = list_of_dicts_to_dict_of_lists(self.config.variants) reduced_collapsed_variants = full_collapsed_variants.copy() reduce_keys = set(self.config.variants[0].keys()) - set(used_variables) @@ -2469,7 +2476,7 @@ def get_reduced_variant_set(self, used_variables): # save only one element from this key reduced_collapsed_variants[key] = utils.ensure_list(next(iter(values))) - out = variants.dict_of_lists_to_list_of_dicts(reduced_collapsed_variants) + out = dict_of_lists_to_list_of_dicts(reduced_collapsed_variants) return out def get_output_metadata_set( @@ -2613,21 +2620,14 @@ def get_output_metadata_set( return output_tuples def get_loop_vars(self): - _variants = ( - self.config.input_variants - if hasattr(self.config, "input_variants") - else self.config.variants - ) - return variants.get_vars(_variants, loop_only=True) + return get_vars(getattr(self.config, "input_variants", self.config.variants)) def get_used_loop_vars(self, force_top_level=False, force_global=False): - return { - var - for var in self.get_used_vars( - force_top_level=force_top_level, force_global=force_global - ) - if var in self.get_loop_vars() - } + loop_vars = self.get_loop_vars() + used_vars = self.get_used_vars( + force_top_level=force_top_level, force_global=force_global + ) + return set(loop_vars).intersection(used_vars) def get_rendered_recipe_text( self, permit_undefined_jinja=False, extract_pattern=None @@ -2803,7 +2803,7 @@ def _get_used_vars_meta_yaml(self, force_top_level=False, force_global=False): apply_selectors=False, ) - all_used_selectors = variants.find_used_variables_in_text( + all_used_selectors = find_used_variables_in_text( variant_keys, recipe_text, selectors_only=True ) @@ -2812,7 +2812,7 @@ def _get_used_vars_meta_yaml(self, force_top_level=False, force_global=False): force_global=force_global, apply_selectors=True, ) - all_used_reqs = variants.find_used_variables_in_text( + all_used_reqs = find_used_variables_in_text( variant_keys, recipe_text, selectors_only=False ) @@ -2823,9 +2823,7 @@ def _get_used_vars_meta_yaml(self, force_top_level=False, force_global=False): if force_global: used = all_used else: - requirements_used = variants.find_used_variables_in_text( - variant_keys, reqs_text - ) + requirements_used = find_used_variables_in_text(variant_keys, reqs_text) outside_reqs_used = all_used - requirements_used requirements_used = trim_build_only_deps(self, requirements_used) @@ -2838,16 +2836,12 @@ def _get_used_vars_build_scripts(self): buildsh = os.path.join(self.path, "build.sh") if os.path.isfile(buildsh): used_vars.update( - variants.find_used_variables_in_shell_script( - self.config.variant, buildsh - ) + find_used_variables_in_shell_script(self.config.variant, buildsh) ) bldbat = os.path.join(self.path, "bld.bat") if self.config.platform == "win" and os.path.isfile(bldbat): used_vars.update( - variants.find_used_variables_in_batch_script( - self.config.variant, bldbat - ) + find_used_variables_in_batch_script(self.config.variant, bldbat) ) return used_vars @@ -2860,15 +2854,11 @@ def _get_used_vars_output_script(self): script = os.path.join(self.path, this_output["script"]) if os.path.splitext(script)[1] == ".sh": used_vars.update( - variants.find_used_variables_in_shell_script( - self.config.variant, script - ) + find_used_variables_in_shell_script(self.config.variant, script) ) elif os.path.splitext(script)[1] == ".bat": used_vars.update( - variants.find_used_variables_in_batch_script( - self.config.variant, script - ) + find_used_variables_in_batch_script(self.config.variant, script) ) else: log = utils.get_logger(__name__) @@ -2879,7 +2869,7 @@ def _get_used_vars_output_script(self): return used_vars def get_variants_as_dict_of_lists(self): - return variants.list_of_dicts_to_dict_of_lists(self.config.variants) + return list_of_dicts_to_dict_of_lists(self.config.variants) def clean(self): """This ensures that clean is called with the correct build id""" diff --git a/conda_build/variants.py b/conda_build/variants.py index c5bbe9a41e..2ea4091b88 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -3,6 +3,8 @@ """This file handles the parsing of feature specifications from files, ending up with a configuration matrix""" +from __future__ import annotations + import os.path import re import sys @@ -10,14 +12,19 @@ from copy import copy from functools import lru_cache from itertools import product +from typing import TYPE_CHECKING import yaml from conda.base.context import context from .conda_interface import cc_conda_build +from .deprecations import deprecated from .utils import ensure_list, get_logger, islist, on_win, trim_empty_keys from .version import _parse as parse_version +if TYPE_CHECKING: + from typing import Any, Iterable + DEFAULT_VARIANTS = { "python": f"{sys.version_info.major}.{sys.version_info.minor}", "numpy": { @@ -694,21 +701,22 @@ def get_package_variants(recipedir_or_metadata, config=None, variants=None): return filter_combined_spec_to_used_keys(combined_spec, specs=specs) -def get_vars(variants, loop_only=False): +@deprecated.argument("24.5", "24.7", "loop_only") +def get_vars(variants: Iterable[dict[str, Any]]) -> set[str]: """For purposes of naming/identifying, provide a way of identifying which variables contribute to the matrix dimensionality""" - special_keys = {"pin_run_as_build", "zip_keys", "ignore_version"} - special_keys.update(set(ensure_list(variants[0].get("extend_keys")))) - loop_vars = [ - k - for k in variants[0] - if k not in special_keys - and ( - not loop_only - or any(variant[k] != variants[0][k] for variant in variants[1:]) - ) - ] - return loop_vars + first, *others = variants + special_keys = { + "pin_run_as_build", + "zip_keys", + "ignore_version", + *ensure_list(first.get("extend_keys")), + } + return { + var + for var in set(first) - special_keys + if any(first[var] != other[var] for other in others) + } @lru_cache(maxsize=None) diff --git a/news/5280-deprecate-get_vars-loop_only b/news/5280-deprecate-get_vars-loop_only new file mode 100644 index 0000000000..e18d5cfe8c --- /dev/null +++ b/news/5280-deprecate-get_vars-loop_only @@ -0,0 +1,19 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Deprecate `conda_build.variants.get_vars(loop_only)`. Unused. (#5280) + +### Docs + +* + +### Other + +* diff --git a/tests/test_variants.py b/tests/test_variants.py index 50e9cea4f2..71b2e7e627 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -18,6 +18,7 @@ dict_of_lists_to_list_of_dicts, filter_combined_spec_to_used_keys, get_package_variants, + get_vars, validate_spec, ) @@ -700,3 +701,17 @@ def test_zip_key_filtering( } assert filter_combined_spec_to_used_keys(combined_spec, specs=specs) == expected + + +def test_get_vars(): + variants = [ + { + "python": "3.12", + "nodejs": "20", + "zip_keys": [], # ignored + }, + {"python": "3.12", "nodejs": "18"}, + {"python": "3.12", "nodejs": "20"}, + ] + + assert get_vars(variants) == {"nodejs"} From 28b51fb2c9b4cd37bcaa2c3a0f60c4ad696f903e Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Wed, 17 Apr 2024 23:41:34 +0200 Subject: [PATCH 338/366] Properly `re.escape` variant variable in `find_used_variables_*` (#5283) Signed-off-by: Marcel Bargull Co-authored-by: Marcel Bargull --- conda_build/variants.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/conda_build/variants.py b/conda_build/variants.py index 2ea4091b88..be59e36603 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -771,7 +771,7 @@ def find_used_variables_in_shell_script(variant, file_path): text = f.read() used_variables = set() for v in variant: - variant_regex = r"(^[^$]*?\$\{?\s*%s\s*[\s|\}])" % v + variant_regex = rf"(^[^$]*?\$\{{?\s*{re.escape(v)}\s*[\s|\}}])" if re.search(variant_regex, text, flags=re.MULTILINE | re.DOTALL): used_variables.add(v) return used_variables @@ -782,7 +782,7 @@ def find_used_variables_in_batch_script(variant, file_path): text = f.read() used_variables = set() for v in variant: - variant_regex = r"\%" + v + r"\%" + variant_regex = rf"\%{re.escape(v)}\%" if re.search(variant_regex, text, flags=re.MULTILINE | re.DOTALL): used_variables.add(v) return used_variables From 683ea916432254125c58efc365a191f1f1fd816a Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Wed, 17 Apr 2024 17:29:16 -0500 Subject: [PATCH 339/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#5294)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot From e35daf4bcd313e30023cc7a2d0f2d40c383901de Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 18 Apr 2024 17:53:11 +0200 Subject: [PATCH 340/366] Add `test_prefix_files` (#5260) --- conda_build/utils.py | 28 +++++++++++++++------------- tests/test_utils.py | 22 ++++++++++++++++++++++ 2 files changed, 37 insertions(+), 13 deletions(-) diff --git a/conda_build/utils.py b/conda_build/utils.py index 970073f4e2..dcaab55666 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -1784,22 +1784,24 @@ def merge_dicts_of_lists( return {k: dol1.get(k, no) + dol2.get(k, no) for k in keys} -def prefix_files(prefix): +def prefix_files(prefix: str | os.PathLike | Path) -> set[str]: """ Returns a set of all files in prefix. """ - res = set() - prefix_rep = prefix + os.path.sep - for root, dirs, files in walk(prefix): - for fn in files: - # this is relpath, just hacked to be faster - res.add(join(root, fn).replace(prefix_rep, "", 1)) - for dn in dirs: - path = join(root, dn) - if islink(path): - res.add(path.replace(prefix_rep, "", 1)) - res.update(expand_globs((path,), prefix)) - return res + prefix = f"{os.path.abspath(prefix)}{os.path.sep}" + prefix_files: set[str] = set() + for root, directories, files in walk(prefix): + # this is effectively os.path.relpath, just hacked to be faster + relroot = root[len(prefix) :].lstrip(os.path.sep) + # add all files + prefix_files.update(join(relroot, file) for file in files) + # add all symlink directories (they are "files") + prefix_files.update( + join(relroot, directory) + for directory in directories + if islink(join(root, directory)) + ) + return prefix_files def mmap_mmap( diff --git a/tests/test_utils.py b/tests/test_utils.py index d245e65796..0cc76ac8a3 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -433,3 +433,25 @@ def test_is_conda_pkg(tmpdir, value: str, expected: bool, is_dir: bool, create: fp.write("test") assert utils.is_conda_pkg(value) == expected + + +def test_prefix_files(tmp_path: Path): + # all files within the prefix are found + (prefix := tmp_path / "prefix1").mkdir() + (file1 := prefix / "file1").touch() + (dirA := prefix / "dirA").mkdir() + (file2 := dirA / "file2").touch() + (dirB := prefix / "dirB").mkdir() + (file3 := dirB / "file3").touch() + + # files outside of the prefix are not found + (prefix2 := tmp_path / "prefix2").mkdir() + (prefix2 / "file4").touch() + (dirC := prefix2 / "dirC").mkdir() + (dirC / "file5").touch() + + # even if they are symlinked + (link1 := prefix / "dirC").symlink_to(dirC) + + paths = {str(path.relative_to(prefix)) for path in (file1, file2, file3, link1)} + assert paths == utils.prefix_files(str(prefix)) From c7c69b1419a4a37f52cff77d21b9a9f5decf8f03 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 18 Apr 2024 17:53:58 +0200 Subject: [PATCH 341/366] Add cached `_split_line_selector` to avoid redundant parsing in `select_lines` (#5237) Co-authored-by: Marcel Bargull --- conda_build/metadata.py | 87 +++++++++++++++++++++++----------- news/5237-select_lines-caching | 19 ++++++++ tests/test_metadata.py | 22 +++++---- 3 files changed, 91 insertions(+), 37 deletions(-) create mode 100644 news/5237-select_lines-caching diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 4347dc8842..dd02f9a65c 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -45,7 +45,7 @@ ) if TYPE_CHECKING: - from typing import Literal + from typing import Any, Literal try: import yaml @@ -277,38 +277,68 @@ def eval_selector(selector_string, namespace, variants_in_place): return eval_selector(next_string, namespace, variants_in_place) -def select_lines(data, namespace, variants_in_place): - lines = [] - - for i, line in enumerate(data.splitlines()): +@lru_cache(maxsize=None) +def _split_line_selector(text: str) -> tuple[tuple[str | None, str], ...]: + lines: list[tuple[str | None, str]] = [] + for line in text.splitlines(): line = line.rstrip() + # skip comment lines, include a blank line as a placeholder + if line.lstrip().startswith("#"): + lines.append((None, "")) + continue + + # include blank lines + if not line: + lines.append((None, "")) + continue + + # user may have quoted entire line to make YAML happy trailing_quote = "" if line and line[-1] in ("'", '"'): trailing_quote = line[-1] - if line.lstrip().startswith("#"): - # Don't bother with comment only lines - continue - m = sel_pat.match(line) - if m: - cond = m.group(3) - try: - if eval_selector(cond, namespace, variants_in_place): - lines.append(m.group(1) + trailing_quote) - except Exception as e: - sys.exit( - """\ -Error: Invalid selector in meta.yaml line %d: -offending line: -%s -exception: -%s -""" - % (i + 1, line, str(e)) - ) + # Checking for "[" and "]" before regex matching every line is a bit faster. + if ( + ("[" in line and "]" in line) + and (match := sel_pat.match(line)) + and (selector := match.group(3)) + ): + # found a selector + lines.append((selector, (match.group(1) + trailing_quote).rstrip())) else: + # no selector found + lines.append((None, line)) + return tuple(lines) + + +def select_lines(text: str, namespace: dict[str, Any], variants_in_place: bool) -> str: + lines = [] + selector_cache: dict[str, bool] = {} + for i, (selector, line) in enumerate(_split_line_selector(text)): + if not selector: + # no selector? include line as is lines.append(line) + else: + # include lines with a selector that evaluates to True + try: + if selector_cache[selector]: + lines.append(line) + except KeyError: + # KeyError: cache miss + try: + value = bool(eval_selector(selector, namespace, variants_in_place)) + selector_cache[selector] = value + if value: + lines.append(line) + except Exception as e: + sys.exit( + f"Error: Invalid selector in meta.yaml line {i + 1}:\n" + f"offending line:\n" + f"{line}\n" + f"exception:\n" + f"{e.__class__.__name__}: {e}\n" + ) return "\n".join(lines) + "\n" @@ -2083,8 +2113,11 @@ def uses_vcs_in_build(self) -> Literal["git", "svn", "mercurial"] | None: return None def get_recipe_text( - self, extract_pattern=None, force_top_level=False, apply_selectors=True - ): + self, + extract_pattern: str | None = None, + force_top_level: bool = False, + apply_selectors: bool = True, + ) -> str: meta_path = self.meta_path if meta_path: recipe_text = read_meta_file(meta_path) diff --git a/news/5237-select_lines-caching b/news/5237-select_lines-caching new file mode 100644 index 0000000000..434a832350 --- /dev/null +++ b/news/5237-select_lines-caching @@ -0,0 +1,19 @@ +### Enhancements + +* Add `conda_build.metadata._split_line_selector` to cache line-selector parsed text. (#5237) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 0f6da9b089..1b9fc34258 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -57,14 +57,14 @@ def test_uses_vcs_in_metadata(testing_workdir, testing_metadata): def test_select_lines(): lines = "\n".join( ( - "", + "", # preserve leading newline "test", "test [abc] no", "test [abc] # no", " ' test ' ", ' " test " ', - "", - "# comment line", + "", # preserve newline + "# comment line", # preserve comment line (but not the comment) "test [abc]", " 'quoted # [abc] '", ' "quoted # [abc] yes "', @@ -74,19 +74,20 @@ def test_select_lines(): "test {{ JINJA_VAR[:2] }} # stuff yes [abc]", "test {{ JINJA_VAR[:2] }} # [abc] stuff yes", '{{ environ["test"] }} # [abc]', - "", # trailing newline + "", # preserve trailing newline ) ) assert select_lines(lines, {"abc": True}, variants_in_place=True) == "\n".join( ( - "", + "", # preserve leading newline "test", "test [abc] no", "test [abc] # no", " ' test '", ' " test "', - "", + "", # preserve newline + "", # preserve comment line (but not the comment) "test", " 'quoted'", ' "quoted"', @@ -96,20 +97,21 @@ def test_select_lines(): "test {{ JINJA_VAR[:2] }}", "test {{ JINJA_VAR[:2] }}", '{{ environ["test"] }}', - "", # trailing newline + "", # preserve trailing newline ) ) assert select_lines(lines, {"abc": False}, variants_in_place=True) == "\n".join( ( - "", + "", # preserve leading newline "test", "test [abc] no", "test [abc] # no", " ' test '", ' " test "', - "", + "", # preserve newline + "", # preserve comment line (but not the comment) "test {{ JINJA_VAR[:2] }}", - "", # trailing newline + "", # preserve trailing newline ) ) From 7dcab5772d20e442125a198ef79d8d74fbdb698c Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 18 Apr 2024 17:57:06 +0200 Subject: [PATCH 342/366] Finish `conda_build.conda_interface` deprecations (#5276) --- conda_build/bdist_conda.py | 3 +- conda_build/build.py | 34 +-- conda_build/cli/main_build.py | 71 ++--- conda_build/cli/main_convert.py | 5 +- conda_build/cli/main_develop.py | 11 +- conda_build/cli/main_inspect.py | 11 +- conda_build/cli/main_metapackage.py | 11 +- conda_build/cli/main_render.py | 16 +- conda_build/cli/main_skeleton.py | 5 +- conda_build/conda_interface.py | 357 ++++++++++++++++++++++---- conda_build/config.py | 36 +-- conda_build/environ.py | 5 +- conda_build/index.py | 2 +- conda_build/inspect_pkg.py | 4 +- conda_build/metadata.py | 4 +- conda_build/os_utils/ldd.py | 3 +- conda_build/os_utils/liefldd.py | 3 +- conda_build/post.py | 8 +- conda_build/render.py | 10 +- conda_build/skeletons/cpan.py | 11 +- conda_build/skeletons/cran.py | 5 +- conda_build/skeletons/pypi.py | 13 +- conda_build/source.py | 4 +- conda_build/utils.py | 33 ++- conda_build/variants.py | 5 +- news/5276-deprecating-conda_interface | 56 ++++ tests/cli/test_main_build.py | 20 +- tests/cli/test_main_convert.py | 2 +- tests/cli/test_main_develop.py | 3 +- tests/cli/test_main_render.py | 58 +++-- tests/test_api_build.py | 2 +- tests/test_api_convert.py | 2 +- tests/test_api_render.py | 37 ++- tests/test_misc.py | 3 +- tests/test_source.py | 2 +- tests/test_utils.py | 7 +- tests/utils.py | 7 +- 37 files changed, 611 insertions(+), 258 deletions(-) create mode 100644 news/5276-deprecating-conda_interface diff --git a/conda_build/bdist_conda.py b/conda_build/bdist_conda.py index 6e965c409d..45a1ff845e 100644 --- a/conda_build/bdist_conda.py +++ b/conda_build/bdist_conda.py @@ -4,14 +4,15 @@ import sys import time from collections import defaultdict +from io import StringIO +from conda.cli.common import spec_from_line from setuptools.command.install import install from setuptools.dist import Distribution from setuptools.errors import BaseError, OptionError from . import api from .build import handle_anaconda_upload -from .conda_interface import StringIO, spec_from_line from .config import Config from .deprecations import deprecated from .metadata import MetaData diff --git a/conda_build/build.py b/conda_build/build.py index d0c939d9e8..88461ac941 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -26,23 +26,19 @@ import yaml from bs4 import UnicodeDammit from conda import __version__ as conda_version +from conda.auxlib.entity import EntityEncoder +from conda.base.constants import PREFIX_PLACEHOLDER from conda.base.context import context, reset_context from conda.core.prefix_data import PrefixData from conda.exceptions import CondaError, NoPackagesFoundError, UnsatisfiableError +from conda.gateways.disk.create import TemporaryDirectory from conda.models.channel import Channel +from conda.models.enums import FileMode, PathType +from conda.models.match_spec import MatchSpec +from conda.utils import url_path from . import __version__ as conda_build_version from . import environ, noarch_python, source, tarcheck, utils -from .conda_interface import ( - EntityEncoder, - FileMode, - MatchSpec, - PathType, - TemporaryDirectory, - env_path_backup_var_exists, - prefix_placeholder, - url_path, -) from .config import Config from .create_test import create_all_test_files from .deprecations import deprecated @@ -195,7 +191,7 @@ def have_prefix_files(files, prefix): """ prefix_bytes = prefix.encode(utils.codec) - prefix_placeholder_bytes = prefix_placeholder.encode(utils.codec) + prefix_placeholder_bytes = PREFIX_PLACEHOLDER.encode(utils.codec) searches = {prefix: prefix_bytes} if utils.on_win: # some windows libraries use unix-style path separators @@ -206,7 +202,7 @@ def have_prefix_files(files, prefix): double_backslash_prefix = prefix.replace("\\", "\\\\") double_backslash_prefix_bytes = double_backslash_prefix.encode(utils.codec) searches[double_backslash_prefix] = double_backslash_prefix_bytes - searches[prefix_placeholder] = prefix_placeholder_bytes + searches[PREFIX_PLACEHOLDER] = prefix_placeholder_bytes min_prefix = min(len(k) for k, _ in searches.items()) # mm.find is incredibly slow, so ripgrep is used to pre-filter the list. @@ -1149,13 +1145,13 @@ def get_files_with_prefix(m, replacements, files_in, prefix): prefix[0].upper() + prefix[1:], prefix[0].lower() + prefix[1:], prefix_u, - prefix_placeholder.replace("\\", "'"), - prefix_placeholder.replace("/", "\\"), + PREFIX_PLACEHOLDER.replace("\\", "'"), + PREFIX_PLACEHOLDER.replace("/", "\\"), ] # some python/json files store an escaped version of prefix pfx_variants.extend([pfx.replace("\\", "\\\\") for pfx in pfx_variants]) else: - pfx_variants = (prefix, prefix_placeholder) + pfx_variants = (prefix, PREFIX_PLACEHOLDER) # replacing \ with \\ here is for regex escaping re_test = ( b"(" @@ -2351,8 +2347,6 @@ def create_build_envs(m: MetaData, notest): ) except DependencyNeedsBuildingError as e: # subpackages are not actually missing. We just haven't built them yet. - from .conda_interface import MatchSpec - other_outputs = ( m.other_outputs.values() if hasattr(m, "other_outputs") @@ -2416,8 +2410,6 @@ def build( with utils.path_prepended(m.config.build_prefix): env = environ.get_dict(m=m) env["CONDA_BUILD_STATE"] = "BUILD" - if env_path_backup_var_exists: - env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] # this should be a no-op if source is already here if m.needs_source_for_render: @@ -3447,8 +3439,6 @@ def test( env.update(environ.get_dict(m=metadata, prefix=config.test_prefix)) env["CONDA_BUILD_STATE"] = "TEST" env["CONDA_BUILD"] = "1" - if env_path_backup_var_exists: - env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] if not metadata.config.activate or metadata.name() == "conda": # prepend bin (or Scripts) directory @@ -3531,8 +3521,6 @@ def test( env = dict(os.environ.copy()) env.update(environ.get_dict(m=metadata, prefix=metadata.config.test_prefix)) env["CONDA_BUILD_STATE"] = "TEST" - if env_path_backup_var_exists: - env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"] if config.test_run_post: from .utils import get_installed_packages diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index bdcaaa25d6..18e24827e0 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -17,7 +17,6 @@ from conda.common.io import dashlist from .. import api, build, source, utils -from ..conda_interface import add_parser_channels, cc_conda_build from ..config import ( get_channel_urls, get_or_merge_config, @@ -27,12 +26,16 @@ from .actions import KeyValueAction from .main_render import get_render_parser +try: + from conda.cli.helpers import add_parser_channels +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_channels + if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence - from ..conda_interface import ArgumentParser - def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: parser = get_render_parser() @@ -70,7 +73,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: action="store_false", help="Don't include the recipe inside the built package.", dest="include_recipe", - default=cc_conda_build.get("include_recipe", "true").lower() == "true", + default=context.conda_build.get("include_recipe", "true").lower() == "true", ) parser.add_argument( "-s", @@ -125,7 +128,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "Skip recipes for which there already exists an existing build " "(locally or in the channels)." ), - default=cc_conda_build.get("skip_existing", "false").lower() == "true", + default=context.conda_build.get("skip_existing", "false").lower() == "true", ) parser.add_argument( "--keep-old-work", @@ -145,7 +148,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "--quiet", action="store_true", help="do not display progress bar", - default=cc_conda_build.get("quiet", "false").lower() == "true", + default=context.conda_build.get("quiet", "false").lower() == "true", ) parser.add_argument( "--debug", @@ -155,12 +158,12 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: parser.add_argument( "--token", help="Token to pass through to anaconda upload", - default=cc_conda_build.get("anaconda_token"), + default=context.conda_build.get("anaconda_token"), ) parser.add_argument( "--user", help="User/organization to upload packages to on anaconda.org or pypi", - default=cc_conda_build.get("user"), + default=context.conda_build.get("user"), ) parser.add_argument( "--label", @@ -185,7 +188,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: ), type=int, choices=range(1, 23), - default=cc_conda_build.get( + default=context.conda_build.get( "zstd_compression_level", zstd_compression_level_default ), ) @@ -210,23 +213,23 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "--config-file", help="path to .pypirc file to use when uploading to pypi", default=( - abspath(expanduser(expandvars(cc_conda_build.get("pypirc")))) - if cc_conda_build.get("pypirc") - else cc_conda_build.get("pypirc") + abspath(expanduser(expandvars(pypirc))) + if (pypirc := context.conda_build.get("pypirc")) + else None ), ) pypi_grp.add_argument( "--repository", "-r", help="PyPI repository to upload to", - default=cc_conda_build.get("pypi_repository", "pypitest"), + default=context.conda_build.get("pypi_repository", "pypitest"), ) parser.add_argument( "--no-activate", action="store_false", help="do not activate the build and test envs; just prepend to PATH", dest="activate", - default=cc_conda_build.get("activate", "true").lower() == "true", + default=context.conda_build.get("activate", "true").lower() == "true", ) parser.add_argument( "--no-build-id", @@ -237,7 +240,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: ), dest="set_build_id", # note: inverted - dest stores positive logic - default=cc_conda_build.get("set_build_id", "true").lower() == "true", + default=context.conda_build.get("set_build_id", "true").lower() == "true", ) parser.add_argument( "--build-id-pat", @@ -246,7 +249,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "paths being too long." ), dest="build_id_pat", - default=cc_conda_build.get("build_id_pat", "{n}_{t}"), + default=context.conda_build.get("build_id_pat", "{n}_{t}"), ) parser.add_argument( "--croot", @@ -259,21 +262,22 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "--verify", action="store_true", help="run verification on recipes or packages when building", - default=cc_conda_build.get("verify", "true").lower() == "true", + default=context.conda_build.get("verify", "true").lower() == "true", ) parser.add_argument( "--no-verify", action="store_false", dest="verify", help="do not run verification on recipes or packages when building", - default=cc_conda_build.get("verify", "true").lower() == "true", + default=context.conda_build.get("verify", "true").lower() == "true", ) parser.add_argument( "--strict-verify", action="store_true", dest="exit_on_verify_error", help="Exit if any conda-verify check fail, instead of only printing them", - default=cc_conda_build.get("exit_on_verify_error", "false").lower() == "true", + default=context.conda_build.get("exit_on_verify_error", "false").lower() + == "true", ) parser.add_argument( "--output-folder", @@ -281,7 +285,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "folder to dump output package to. Package are moved here if build or test succeeds." " Destination folder must exist prior to using this." ), - default=cc_conda_build.get("output_folder"), + default=context.conda_build.get("output_folder"), ) parser.add_argument( "--no-prefix-length-fallback", @@ -350,7 +354,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "linked to any executables or shared libraries in built packages. This is disabled " "by default, but will be enabled by default in conda-build 4.0." ), - default=cc_conda_build.get("error_overlinking", "false").lower() == "true", + default=context.conda_build.get("error_overlinking", "false").lower() == "true", ) parser.add_argument( "--no-error-overlinking", @@ -361,7 +365,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "linked to any executables or shared libraries in built packages. This is currently " "the default behavior, but will change in conda-build 4.0." ), - default=cc_conda_build.get("error_overlinking", "false").lower() == "true", + default=context.conda_build.get("error_overlinking", "false").lower() == "true", ) parser.add_argument( "--error-overdepending", @@ -372,7 +376,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " "any of the files in this package." ), - default=cc_conda_build.get("error_overdepending", "false").lower() == "true", + default=context.conda_build.get("error_overdepending", "false").lower() + == "true", ) parser.add_argument( "--no-error-overdepending", @@ -383,7 +388,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by " "any of the files in this package." ), - default=cc_conda_build.get("error_overdepending", "false").lower() == "true", + default=context.conda_build.get("error_overdepending", "false").lower() + == "true", ) parser.add_argument( "--long-test-prefix", @@ -393,7 +399,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "Linux and Mac. Prefix length matches the --prefix-length flag. This is on by " "default in conda-build 3.0+" ), - default=cc_conda_build.get("long_test_prefix", "true").lower() == "true", + default=context.conda_build.get("long_test_prefix", "true").lower() == "true", ) parser.add_argument( "--no-long-test-prefix", @@ -403,7 +409,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "Do not use a long prefix for the test prefix, as well as the build prefix." " Affects only Linux and Mac. Prefix length matches the --prefix-length flag. " ), - default=cc_conda_build.get("long_test_prefix", "true").lower() == "true", + default=context.conda_build.get("long_test_prefix", "true").lower() == "true", ) parser.add_argument( "--keep-going", @@ -420,16 +426,17 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "Path to store the source files (archives, git clones, etc.) during the build." ), default=( - abspath(expanduser(expandvars(cc_conda_build.get("cache_dir")))) - if cc_conda_build.get("cache_dir") - else cc_conda_build.get("cache_dir") + abspath(expanduser(expandvars(cache_dir))) + if (cache_dir := context.conda_build.get("cache_dir")) + else None ), ) parser.add_argument( "--no-copy-test-source-files", dest="copy_test_source_files", action="store_false", - default=cc_conda_build.get("copy_test_source_files", "true").lower() == "true", + default=context.conda_build.get("copy_test_source_files", "true").lower() + == "true", help=( "Disables copying the files necessary for testing the package into " "the info/test folder. Passing this argument means it may not be possible " @@ -445,7 +452,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: "Merge the build and host directories, even when host section or compiler " "jinja2 is present" ), - default=cc_conda_build.get("merge_build_host", "false").lower() == "true", + default=context.conda_build.get("merge_build_host", "false").lower() == "true", ) parser.add_argument( "--stats-file", diff --git a/conda_build/cli/main_convert.py b/conda_build/cli/main_convert.py index ce92a71ddc..cd12f21ddc 100644 --- a/conda_build/cli/main_convert.py +++ b/conda_build/cli/main_convert.py @@ -7,10 +7,9 @@ from typing import TYPE_CHECKING from .. import api -from ..conda_interface import ArgumentParser if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence logging.basicConfig(level=logging.INFO) @@ -41,6 +40,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + parser = ArgumentParser( prog="conda convert", description=""" diff --git a/conda_build/cli/main_develop.py b/conda_build/cli/main_develop.py index cb67c40696..326c5fd2a7 100644 --- a/conda_build/cli/main_develop.py +++ b/conda_build/cli/main_develop.py @@ -8,16 +8,23 @@ from conda.base.context import context, determine_target_prefix from .. import api -from ..conda_interface import ArgumentParser, add_parser_prefix + +try: + from conda.cli.helpers import add_parser_prefix +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_prefix if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence logging.basicConfig(level=logging.INFO) def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + parser = ArgumentParser( prog="conda develop", description=""" diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index eefbcf97da..88b31cb837 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -11,16 +11,23 @@ from conda.base.context import context, determine_target_prefix from .. import api -from ..conda_interface import ArgumentParser, add_parser_prefix + +try: + from conda.cli.helpers import add_parser_prefix +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_prefix if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence logging.basicConfig(level=logging.INFO) def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + parser = ArgumentParser( prog="conda inspect", description="Tools for inspecting conda packages.", diff --git a/conda_build/cli/main_metapackage.py b/conda_build/cli/main_metapackage.py index a11c581702..0e4507359e 100644 --- a/conda_build/cli/main_metapackage.py +++ b/conda_build/cli/main_metapackage.py @@ -9,16 +9,23 @@ from conda.base.context import context from .. import api -from ..conda_interface import ArgumentParser, add_parser_channels + +try: + from conda.cli.helpers import add_parser_channels +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_channels if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence logging.basicConfig(level=logging.INFO) def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + parser = ArgumentParser( prog="conda metapackage", description=""" diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 933528b114..3e0bf845f5 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -8,16 +8,22 @@ from typing import TYPE_CHECKING import yaml +from conda.base.context import context from yaml.parser import ParserError from .. import __version__, api -from ..conda_interface import ArgumentParser, add_parser_channels, cc_conda_build from ..config import get_channel_urls, get_or_merge_config from ..utils import LoggingContext from ..variants import get_package_variants, set_language_env_vars +try: + from conda.cli.helpers import add_parser_channels +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_channels + if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence log = logging.getLogger(__name__) @@ -43,7 +49,9 @@ def __call__(self, parser, namespace, values, option_string=None): ) -def get_render_parser(): +def get_render_parser() -> ArgumentParser: + from conda.cli.conda_argparse import ArgumentParser + p = ArgumentParser( prog="conda render", description=""" @@ -138,7 +146,7 @@ def get_render_parser(): "--old-build-string", dest="filename_hashing", action="store_false", - default=cc_conda_build.get("filename_hashing", "true").lower() == "true", + default=context.conda_build.get("filename_hashing", "true").lower() == "true", help=( "Disable hash additions to filenames to distinguish package " "variants from one another. NOTE: any filename collisions are " diff --git a/conda_build/cli/main_skeleton.py b/conda_build/cli/main_skeleton.py index 1a87487e26..825f3742de 100644 --- a/conda_build/cli/main_skeleton.py +++ b/conda_build/cli/main_skeleton.py @@ -10,11 +10,10 @@ from typing import TYPE_CHECKING from .. import api -from ..conda_interface import ArgumentParser from ..config import Config if TYPE_CHECKING: - from argparse import Namespace + from argparse import ArgumentParser, Namespace from typing import Sequence thisdir = os.path.dirname(os.path.abspath(__file__)) @@ -22,6 +21,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: + from conda.cli.conda_argparse import ArgumentParser + parser = ArgumentParser( prog="conda skeleton", description=""" diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index c6e31b24af..c5acfbfd06 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -4,14 +4,24 @@ import configparser as _configparser import os as _os +from builtins import input as _input from functools import partial as _partial from importlib import import_module as _import_module +from io import StringIO as _StringIO from conda import __version__ +from conda.auxlib.entity import EntityEncoder as _EntityEncoder +from conda.base.constants import PREFIX_PLACEHOLDER as _PREFIX_PLACEHOLDER from conda.base.context import context as _context from conda.base.context import determine_target_prefix as _determine_target_prefix from conda.base.context import non_x86_machines as _non_x86_linux_machines from conda.base.context import reset_context as _reset_context +from conda.cli.common import spec_from_line as _spec_from_line +from conda.cli.common import specs_from_args as _specs_from_args +from conda.cli.common import specs_from_url as _specs_from_url +from conda.cli.conda_argparse import ArgumentParser as _ArgumentParser +from conda.common.path import win_path_to_unix as _win_path_to_unix +from conda.common.toposort import _toposort as __toposort from conda.core.package_cache_data import ( ProgressiveFetchExtract as _ProgressiveFetchExtract, ) @@ -21,50 +31,286 @@ from conda.exceptions import LockError as _LockError from conda.exceptions import NoPackagesFoundError as _NoPackagesFoundError from conda.exceptions import PaddingError as _PaddingError +from conda.exceptions import ResolvePackageNotFound as _ResolvePackageNotFound from conda.exceptions import UnsatisfiableError as _UnsatisfiableError -from conda.exports import ( # noqa: F401 - ArgumentParser, - Channel, - Completer, - CondaSession, - EntityEncoder, - FileMode, - InstalledPackages, - MatchSpec, - NoPackagesFound, - PackageRecord, - PathType, - Resolve, - StringIO, - TemporaryDirectory, - TmpDownload, - Unsatisfiable, - VersionOrder, - _toposort, - add_parser_channels, - add_parser_prefix, - download, - human_bytes, - input, - lchmod, - normalized_version, - prefix_placeholder, - rm_rf, - spec_from_line, - specs_from_args, - specs_from_url, - symlink_conda, - unix_path_to_win, - untracked, - url_path, - walk_prefix, - win_path_to_unix, -) +from conda.exports import Completer as _Completer +from conda.exports import InstalledPackages as _InstalledPackages from conda.exports import get_index as _get_index +from conda.exports import symlink_conda as _symlink_conda +from conda.gateways.connection.download import TmpDownload as _TmpDownload +from conda.gateways.connection.download import download as _download +from conda.gateways.connection.session import CondaSession as _CondaSession +from conda.gateways.disk.create import TemporaryDirectory as _TemporaryDirectory +from conda.gateways.disk.link import lchmod as _lchmod from conda.gateways.disk.read import compute_sum as _compute_sum +from conda.misc import untracked as _untracked +from conda.misc import walk_prefix as _walk_prefix +from conda.models.channel import Channel as _Channel from conda.models.channel import get_conda_build_local_url as _get_conda_build_local_url +from conda.models.enums import FileMode as _FileMode +from conda.models.enums import PathType as _PathType +from conda.models.match_spec import MatchSpec as _MatchSpec +from conda.models.records import PackageRecord as _PackageRecord +from conda.models.version import VersionOrder as _VersionOrder +from conda.models.version import normalized_version as _normalized_version +from conda.resolve import Resolve as _Resolve +from conda.utils import human_bytes as _human_bytes +from conda.utils import unix_path_to_win as _unix_path_to_win +from conda.utils import url_path as _url_path from .deprecations import deprecated +from .utils import rm_rf as _rm_rf + +try: + from conda.cli.helpers import add_parser_channels as _add_parser_channels + from conda.cli.helpers import add_parser_prefix as _add_parser_prefix +except ImportError: + # conda<23.11 + from conda.cli.conda_argparse import add_parser_channels as _add_parser_channels + from conda.cli.conda_argparse import add_parser_prefix as _add_parser_prefix + +deprecated.constant( + "24.5", + "24.7", + "Completer", + _Completer, + addendum="Unused.", +) +deprecated.constant( + "24.5", + "24.7", + "CondaSession", + _CondaSession, + addendum="Use `conda.gateways.connection.session.CondaSession` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "InstalledPackages", + _InstalledPackages, + addendum="Unused.", +) +deprecated.constant( + "24.5", + "24.7", + "NoPackagesFound", + _ResolvePackageNotFound, + addendum="Use `conda.exceptions.ResolvePackageNotFound` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "Unsatisfiable", + _UnsatisfiableError, + addendum="Use `conda.exceptions.UnsatisfiableError` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "symlink_conda", + _symlink_conda, + addendum="Unused.", +) + + +deprecated.constant( + "24.5", + "24.7", + "ArgumentParser", + _ArgumentParser, + addendum="Use `conda.cli.conda_argparse.ArgumentParser` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "add_parser_channels", + _add_parser_channels, + addendum="Use `conda.cli.helpers.add_parser_channels` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "add_parser_prefix", + _add_parser_prefix, + addendum="Use `conda.cli.helpers.add_parser_prefix` instead.", +) + +deprecated.constant( + "24.5", + "24.7", + "Channel", + _Channel, + addendum="Use `conda.models.channel.Channel` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "FileMode", + _FileMode, + addendum="Use `conda.models.enums.FileMode` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "PathType", + _PathType, + addendum="Use `conda.models.enums.PathType` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "MatchSpec", + _MatchSpec, + addendum="Use `conda.models.match_spec.MatchSpec` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "PackageRecord", + _PackageRecord, + addendum="Use `conda.models.records.PackageRecord` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "VersionOrder", + _VersionOrder, + addendum="Use `conda.models.version.VersionOrder` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "normalized_version", + _normalized_version, + addendum="Use `conda.models.version.normalized_version` instead.", +) + +deprecated.constant( + "24.5", + "24.7", + "EntityEncoder", + _EntityEncoder, + addendum="Use `conda.auxlib.entity.EntityEncoder` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "Resolve", + _Resolve, + addendum="Use `conda.resolve.Resolve` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "TemporaryDirectory", + _TemporaryDirectory, + addendum="Use `conda.gateways.disk.create.TemporaryDirectory` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "TmpDownload", + _TmpDownload, + addendum="Use `conda.gateways.connection.download.TmpDownload` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "download", + _download, + addendum="Use `conda.gateways.connection.download.download` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "_toposort", + __toposort, + addendum="Use `conda.common.toposort._toposort` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "human_bytes", + _human_bytes, + addendum="Use `conda.utils.human_bytes` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "lchmod", + _lchmod, + addendum="Use `conda.gateways.disk.link.lchmod` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "prefix_placeholder", + _PREFIX_PLACEHOLDER, + addendum="Use `conda.base.constants.PREFIX_PLACEHOLDER` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "rm_rf", + _rm_rf, + addendum="Use `conda_build.utils.rm_rf` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "spec_from_line", + _spec_from_line, + addendum="Use `conda.cli.common.spec_from_line` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "specs_from_args", + _specs_from_args, + addendum="Use `conda.cli.common.specs_from_args` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "specs_from_url", + _specs_from_url, + addendum="Use `conda.cli.common.specs_from_url` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "unix_path_to_win", + _unix_path_to_win, + addendum="Use `conda.utils.unix_path_to_win` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "untracked", + _untracked, + addendum="Use `conda.misc.untracked` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "url_path", + _url_path, + addendum="Use `conda.utils.url_path` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "walk_prefix", + _walk_prefix, + addendum="Use `conda.misc.walk_prefix` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "win_path_to_unix", + _win_path_to_unix, + addendum="Use `conda.common.path.win_path_to_unix` instead.", +) deprecated.constant( "24.5", @@ -88,6 +334,21 @@ _import_module, addendum="Use `importlib.import_module` instead.", ) +deprecated.constant( + "24.5", + "24.7", + "StringIO", + _StringIO, + addendum="Use `io.StringIO` instead.", +) +deprecated.constant( + "24.5", + "24.7", + "input", + _input, + addendum="Use `input` instead.", +) + deprecated.constant( "24.5", "24.7", @@ -264,19 +525,29 @@ _partial(_determine_target_prefix, _context), addendum="Use `conda.base.context.context.target_prefix` instead.", ) -cc_conda_build = _context.conda_build if hasattr(_context, "conda_build") else {} +deprecated.constant( + "24.5", + "24.7", + "cc_conda_build", + _context.conda_build, + addendum="Use `conda.base.context.context.conda_build` instead.", +) deprecated.constant( "24.5", "24.7", "get_conda_channel", - Channel.from_value, + _Channel.from_value, addendum="Use `conda.models.channel.Channel.from_value` instead.", ) -# When deactivating envs (e.g. switching from root to build/test) this env var is used, -# except the PR that removed this has been reverted (for now) and Windows doesn't need it. -env_path_backup_var_exists = _os.getenv("CONDA_PATH_BACKUP") +deprecated.constant( + "24.5", + "24.7", + "env_path_backup_var_exists", + _os.getenv("CONDA_PATH_BACKUP"), + addendum="Unused.", +) @deprecated( diff --git a/conda_build/config.py b/conda_build/config.py index fd599506bc..98f3e8b447 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -17,8 +17,8 @@ from typing import TYPE_CHECKING from conda.base.context import context +from conda.utils import url_path -from .conda_interface import cc_conda_build, url_path from .utils import ( get_build_folders, get_conda_operation_locks, @@ -110,14 +110,16 @@ def _get_default_settings(): Setting("test_run_post", False), Setting( "filename_hashing", - cc_conda_build.get("filename_hashing", filename_hashing_default).lower() + context.conda_build.get( + "filename_hashing", filename_hashing_default + ).lower() == "true", ), Setting("keep_old_work", False), Setting( "_src_cache_root", - abspath(expanduser(expandvars(cc_conda_build.get("cache_dir")))) - if cc_conda_build.get("cache_dir") + abspath(expanduser(expandvars(cache_dir))) + if (cache_dir := context.conda_build.get("cache_dir")) else _src_cache_root_default, ), Setting("copy_test_source_files", True), @@ -142,30 +144,32 @@ def _get_default_settings(): # cli/main_build.py that this default will switch in conda-build 4.0. Setting( "error_overlinking", - cc_conda_build.get("error_overlinking", error_overlinking_default).lower() + context.conda_build.get( + "error_overlinking", error_overlinking_default + ).lower() == "true", ), Setting( "error_overdepending", - cc_conda_build.get( + context.conda_build.get( "error_overdepending", error_overdepending_default ).lower() == "true", ), Setting( "noarch_python_build_age", - cc_conda_build.get( + context.conda_build.get( "noarch_python_build_age", noarch_python_build_age_default ), ), Setting( "enable_static", - cc_conda_build.get("enable_static", enable_static_default).lower() + context.conda_build.get("enable_static", enable_static_default).lower() == "true", ), Setting( "no_rewrite_stdout_env", - cc_conda_build.get( + context.conda_build.get( "no_rewrite_stdout_env", no_rewrite_stdout_env_default ).lower() == "true", @@ -204,11 +208,13 @@ def _get_default_settings(): Setting("verify", True), Setting( "ignore_verify_codes", - cc_conda_build.get("ignore_verify_codes", ignore_verify_codes_default), + context.conda_build.get("ignore_verify_codes", ignore_verify_codes_default), ), Setting( "exit_on_verify_error", - cc_conda_build.get("exit_on_verify_error", exit_on_verify_error_default), + context.conda_build.get( + "exit_on_verify_error", exit_on_verify_error_default + ), ), # Recipes that have no host section, only build, should bypass the build/host line. # This is to make older recipes still work with cross-compiling. True cross-compiling @@ -226,17 +232,17 @@ def _get_default_settings(): Setting("_pip_cache_dir", None), Setting( "zstd_compression_level", - cc_conda_build.get( + context.conda_build.get( "zstd_compression_level", zstd_compression_level_default ), ), # this can be set to different values (currently only 2 means anything) to use package formats Setting( "conda_pkg_format", - cc_conda_build.get("pkg_format", conda_pkg_format_default), + context.conda_build.get("pkg_format", conda_pkg_format_default), ), Setting("suppress_variables", False), - Setting("build_id_pat", cc_conda_build.get("build_id_pat", "{n}_{t}")), + Setting("build_id_pat", context.conda_build.get("build_id_pat", "{n}_{t}")), ] @@ -449,7 +455,7 @@ def croot(self) -> str: """This is where source caches and work folders live""" if not self._croot: _bld_root_env = os.getenv("CONDA_BLD_PATH") - _bld_root_rc = cc_conda_build.get("root-dir") + _bld_root_rc = context.conda_build.get("root-dir") if _bld_root_env: self._croot = abspath(expanduser(_bld_root_env)) elif _bld_root_rc: diff --git a/conda_build/environ.py b/conda_build/environ.py index 36f6b78171..5a24d83172 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -38,11 +38,12 @@ PaddingError, UnsatisfiableError, ) -from conda.models.channel import prioritize_channels +from conda.gateways.disk.create import TemporaryDirectory +from conda.models.channel import Channel, prioritize_channels from conda.models.match_spec import MatchSpec +from conda.models.records import PackageRecord from . import utils -from .conda_interface import Channel, PackageRecord, TemporaryDirectory from .deprecations import deprecated from .exceptions import BuildLockError, DependencyNeedsBuildingError from .features import feature_list diff --git a/conda_build/index.py b/conda_build/index.py index 28a470f5c4..28f29063aa 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -11,10 +11,10 @@ from conda.base.context import context from conda.core.index import get_index from conda.exceptions import CondaHTTPError +from conda.utils import url_path from conda_index.index import update_index as _update_index from . import utils -from .conda_interface import url_path from .deprecations import deprecated from .utils import ( CONDA_PACKAGE_EXTENSION_V1, diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 7d7c61f8f9..b202a7eb68 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -15,13 +15,11 @@ from conda.api import Solver from conda.base.context import context +from conda.cli.common import specs_from_args from conda.core.index import get_index from conda.core.prefix_data import PrefixData from conda.models.records import PrefixRecord -from .conda_interface import ( - specs_from_args, -) from .os_utils.ldd import ( get_linkages, get_package_obj_files, diff --git a/conda_build/metadata.py b/conda_build/metadata.py index dd02f9a65c..b05c27d8ae 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -18,10 +18,10 @@ from bs4 import UnicodeDammit from conda.base.context import context from conda.gateways.disk.read import compute_sum +from conda.models.match_spec import MatchSpec from frozendict import deepfreeze from . import exceptions, utils -from .conda_interface import MatchSpec from .config import Config, get_or_merge_config from .features import feature_list from .license_family import ensure_valid_license_family @@ -854,7 +854,7 @@ def toposort(output_metadata_map): will naturally lead to non-overlapping files in each package and also the correct files being present during the install and test procedures, provided they are run in this order.""" - from .conda_interface import _toposort + from conda.common.toposort import _toposort # We only care about the conda packages built by this recipe. Non-conda # packages get sorted to the end. diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index 6f15173f29..c07a7adb71 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -9,7 +9,8 @@ from pathlib import Path from typing import TYPE_CHECKING -from ..conda_interface import untracked +from conda.misc import untracked + from ..utils import on_linux, on_mac from .macho import otool from .pyldd import codefile_class, inspect_linkages, machofile diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index 9b14454c4f..d02cd2bd30 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -13,6 +13,8 @@ from pathlib import Path from subprocess import PIPE, Popen +from conda.models.version import VersionOrder + from ..utils import on_mac, on_win, rec_glob from .external import find_executable @@ -963,7 +965,6 @@ def get_static_lib_exports_dumpbin(filename): results.append((result, version)) except: pass - from ..conda_interface import VersionOrder results = sorted(results, key=lambda x: VersionOrder(x[1])) dumpbin_exe = results[-1][0] diff --git a/conda_build/post.py b/conda_build/post.py index eea8a584b6..30a4057a30 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -35,15 +35,13 @@ from typing import TYPE_CHECKING from conda.core.prefix_data import PrefixData +from conda.gateways.disk.create import TemporaryDirectory +from conda.gateways.disk.link import lchmod from conda.gateways.disk.read import compute_sum +from conda.misc import walk_prefix from conda.models.records import PrefixRecord from . import utils -from .conda_interface import ( - TemporaryDirectory, - lchmod, - walk_prefix, -) from .exceptions import OverDependingError, OverLinkingError, RunPathError from .inspect_pkg import which_package from .os_utils import external, macho diff --git a/conda_build/render.py b/conda_build/render.py index be17eaa461..78a9ed643e 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -10,7 +10,6 @@ import subprocess import sys import tarfile -import tempfile from collections import OrderedDict, defaultdict from contextlib import contextmanager from functools import lru_cache @@ -27,11 +26,14 @@ import yaml from conda.base.context import context +from conda.cli.common import specs_from_url from conda.core.package_cache_data import ProgressiveFetchExtract from conda.exceptions import UnsatisfiableError +from conda.gateways.disk.create import TemporaryDirectory +from conda.models.records import PackageRecord +from conda.models.version import VersionOrder from . import environ, exceptions, source, utils -from .conda_interface import PackageRecord, TemporaryDirectory, specs_from_url from .exceptions import DependencyNeedsBuildingError from .index import get_build_index from .metadata import MetaData, combine_top_level_metadata_with_output @@ -806,8 +808,6 @@ def distribute_variants( # which python version we prefer. `python_age` can use used to tweak which # python gets used here. if metadata.noarch or metadata.noarch_python: - from .conda_interface import VersionOrder - age = int( metadata.get_value( "build/noarch_python_build_age", metadata.config.noarch_python_build_age @@ -943,7 +943,7 @@ def open_recipe(recipe: str | os.PathLike | Path) -> Iterator[Path]: yield recipe elif recipe.suffixes in [[".tar"], [".tar", ".gz"], [".tgz"], [".tar", ".bz2"]]: # extract the recipe to a temporary directory - with tempfile.TemporaryDirectory() as tmp, tarfile.open(recipe, "r:*") as tar: + with TemporaryDirectory() as tmp, tarfile.open(recipe, "r:*") as tar: tar.extractall(path=tmp) yield Path(tmp) elif recipe.suffix == ".yaml": diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index 7b9cca26de..4d65ef7cb1 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -21,15 +21,12 @@ import requests from conda.core.index import get_index from conda.exceptions import CondaError, CondaHTTPError +from conda.gateways.connection.download import TmpDownload, download +from conda.gateways.disk.create import TemporaryDirectory +from conda.models.match_spec import MatchSpec +from conda.resolve import Resolve from .. import environ -from ..conda_interface import ( - MatchSpec, - Resolve, - TemporaryDirectory, - TmpDownload, - download, -) from ..config import Config, get_or_merge_config from ..utils import check_call_env, on_linux, on_win from ..variants import get_default_variant diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index 7140c9a89f..fbd959dba2 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -41,10 +41,11 @@ from typing import TYPE_CHECKING +from conda.base.context import context from conda.common.io import dashlist +from conda.gateways.disk.create import TemporaryDirectory from .. import source -from ..conda_interface import TemporaryDirectory, cc_conda_build from ..config import get_or_merge_config from ..license_family import allowed_license_families, guess_license_family from ..metadata import MetaData @@ -454,7 +455,7 @@ def add_parser(repos): cran.add_argument( "-m", "--variant-config-files", - default=cc_conda_build.get("skeleton_config_yaml", None), + default=context.conda_build.get("skeleton_config_yaml", None), help="""Variant config file to add. These yaml files can contain keys such as `cran_mirror`. Only one can be provided here.""", ) diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index 8dc6719f63..7df95a9ad5 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -12,6 +12,7 @@ import subprocess import sys from collections import OrderedDict, defaultdict +from io import StringIO from os import chdir, getcwd, listdir, makedirs from os.path import abspath, exists, isdir, isfile, join from shutil import copy2 @@ -22,17 +23,13 @@ import requests import yaml from conda.base.context import context +from conda.cli.common import spec_from_line +from conda.gateways.connection.download import download from conda.gateways.disk.read import compute_sum +from conda.models.version import normalized_version +from conda.utils import human_bytes from requests.packages.urllib3.util.url import parse_url -from ..conda_interface import ( - StringIO, - download, - human_bytes, - input, - normalized_version, - spec_from_line, -) from ..config import Config from ..environ import create_env from ..license_family import allowed_license_families, guess_license_family diff --git a/conda_build/source.py b/conda_build/source.py index 984fb239e8..c7b3d1921b 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -16,9 +16,11 @@ from urllib.parse import urljoin from conda.exceptions import CondaHTTPError +from conda.gateways.connection.download import download +from conda.gateways.disk.create import TemporaryDirectory from conda.gateways.disk.read import compute_sum +from conda.utils import url_path -from .conda_interface import TemporaryDirectory, download, url_path from .exceptions import MissingDependency from .os_utils import external from .utils import ( diff --git a/conda_build/utils.py b/conda_build/utils.py index dcaab55666..92de8b24a1 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -23,6 +23,7 @@ from collections import OrderedDict, defaultdict from functools import lru_cache from glob import glob +from io import StringIO from itertools import filterfalse from json.decoder import JSONDecodeError from locale import getpreferredencoding @@ -54,22 +55,17 @@ KNOWN_SUBDIRS, ) from conda.base.context import context +from conda.common.path import win_path_to_unix from conda.exceptions import CondaHTTPError +from conda.gateways.connection.download import download +from conda.gateways.disk.create import TemporaryDirectory from conda.gateways.disk.read import compute_sum from conda.models.channel import Channel from conda.models.match_spec import MatchSpec +from conda.models.records import PackageRecord +from conda.models.version import VersionOrder +from conda.utils import unix_path_to_win -from .conda_interface import ( - PackageRecord, - StringIO, - TemporaryDirectory, - VersionOrder, - cc_conda_build, - download, - unix_path_to_win, - win_path_to_unix, -) -from .conda_interface import rm_rf as _rm_rf from .deprecations import deprecated from .exceptions import BuildLockError @@ -1621,8 +1617,13 @@ def filter_info_files(files_list, prefix): ) -def rm_rf(path, config=None): - return _rm_rf(path) +@deprecated.argument("24.5", "24.7", "config") +def rm_rf(path): + from conda.core.prefix_data import delete_prefix_from_linked_data + from conda.gateways.disk.delete import rm_rf as rm_rf + + rm_rf(path) + delete_prefix_from_linked_data(path) # https://stackoverflow.com/a/31459386/1170370 @@ -1680,10 +1681,8 @@ def reset_deduplicator(): def get_logger(name, level=logging.INFO, dedupe=True, add_stdout_stderr_handlers=True): config_file = None - if cc_conda_build.get("log_config_file"): - config_file = abspath( - expanduser(expandvars(cc_conda_build.get("log_config_file"))) - ) + if log_config_file := context.conda_build.get("log_config_file"): + config_file = abspath(expanduser(expandvars(log_config_file))) # by loading config file here, and then only adding handlers later, people # should be able to override conda-build's logger settings here. if config_file: diff --git a/conda_build/variants.py b/conda_build/variants.py index be59e36603..82f84793f3 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -17,7 +17,6 @@ import yaml from conda.base.context import context -from .conda_interface import cc_conda_build from .deprecations import deprecated from .utils import ensure_list, get_logger, islist, on_win, trim_empty_keys from .version import _parse as parse_version @@ -231,8 +230,8 @@ def find_config_files(metadata_or_path, config): if not files and not config.ignore_system_variants: # user config - if cc_conda_build.get("config_file"): - cfg = resolve(cc_conda_build["config_file"]) + if config_file := context.conda_build.get("config_file"): + cfg = resolve(config_file) else: cfg = resolve(os.path.join("~", "conda_build_config.yaml")) if os.path.isfile(cfg): diff --git a/news/5276-deprecating-conda_interface b/news/5276-deprecating-conda_interface new file mode 100644 index 0000000000..701b9a53f1 --- /dev/null +++ b/news/5276-deprecating-conda_interface @@ -0,0 +1,56 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Deprecate `conda_build.conda_interface._toposort`. Use `conda.common.toposort._toposort` instead. (#5276) +* Deprecate `conda_build.conda_interface.add_parser_channels`. Use `conda.cli.helpers.add_parser_channels` instead. (#5276) +* Deprecate `conda_build.conda_interface.add_parser_prefix`. Use `conda.cli.helpers.add_parser_prefix` instead. (#5276) +* Deprecate `conda_build.conda_interface.ArgumentParser`. Use `conda.cli.conda_argparse.ArgumentParser` instead. (#5276) +* Deprecate `conda_build.conda_interface.cc_conda_build`. Use `conda.base.context.context.conda_build` instead. (#5276) +* Deprecate `conda_build.conda_interface.Channel`. Use `conda.models.channel.Channel` instead. (#5276) +* Deprecate `conda_build.conda_interface.Completer`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.CondaSession`. Use `conda.gateways.connection.session.CondaSession` instead. (#5276) +* Deprecate `conda_build.conda_interface.download`. Use `conda.gateways.connection.download.download` instead. (#5276) +* Deprecate `conda_build.conda_interface.EntityEncoder`. Use `conda.auxlib.entity.EntityEncoder` instead. (#5276) +* Deprecate `conda_build.conda_interface.env_path_backup_var_exists`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.FileMode`. Use `conda.models.enums.FileMode` instead. (#5276) +* Deprecate `conda_build.conda_interface.human_bytes`. Use `conda.utils.human_bytes` instead. (#5276) +* Deprecate `conda_build.conda_interface.input`. Use `input` instead. (#5276) +* Deprecate `conda_build.conda_interface.InstalledPackages`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.lchmod`. Use `conda.gateways.disk.link.lchmod` instead. (#5276) +* Deprecate `conda_build.conda_interface.MatchSpec`. Use `conda.models.match_spec.MatchSpec` instead. (#5276) +* Deprecate `conda_build.conda_interface.NoPackagesFound`. Use `conda.exceptions.ResolvePackageNotFound` instead. (#5276) +* Deprecate `conda_build.conda_interface.normalized_version`. Use `conda.models.version.normalized_version` instead. (#5276) +* Deprecate `conda_build.conda_interface.PackageRecord`. Use `conda.models.records.PackageRecord` instead. (#5276) +* Deprecate `conda_build.conda_interface.PathType`. Use `conda.models.enums.PathType` instead. (#5276) +* Deprecate `conda_build.conda_interface.prefix_placeholder`. Use `conda.base.constants.PREFIX_PLACEHOLDER` instead. (#5276) +* Deprecate `conda_build.conda_interface.Resolve`. Use `conda.resolve.Resolve` instead. (#5276) +* Deprecate `conda_build.conda_interface.rm_rf`. Use `conda_build.utils.rm_rf` instead. (#5276) +* Deprecate `conda_build.conda_interface.spec_from_line`. Use `conda.cli.common.spec_from_line` instead. (#5276) +* Deprecate `conda_build.conda_interface.specs_from_args`. Use `conda.cli.common.specs_from_args` instead. (#5276) +* Deprecate `conda_build.conda_interface.specs_from_url`. Use `conda.cli.common.specs_from_url` instead. (#5276) +* Deprecate `conda_build.conda_interface.StringIO`. Use `io.StringIO` instead. (#5276) +* Deprecate `conda_build.conda_interface.symlink_conda`. Unused. (#5276) +* Deprecate `conda_build.conda_interface.TempDirectory`. Use `conda.gateways.disk.create.TemporaryDirectory` instead. (#5276) +* Deprecate `conda_build.conda_interface.TmpDownload`. Use `conda.gateways.connection.download.TmpDownload` instead. (#5276) +* Deprecate `conda_build.conda_interface.unix_path_to_win`. Use `conda.utils.unix_path_to_win` instead. (#5276) +* Deprecate `conda_build.conda_interface.Unsatisfiable`. Use `conda.exceptions.UnsatisfiableError` instead. (#5276) +* Deprecate `conda_build.conda_interface.untracked`. Use `conda.misc.untracked` instead. (#5276) +* Deprecate `conda_build.conda_interface.url_path`. Use `conda.utils.url_path` instead. (#5276) +* Deprecate `conda_build.conda_interface.VersionOrder`. Use `conda.models.version.VersionOrder` instead. (#5276) +* Deprecate `conda_build.conda_interface.walk_prefix`. Use `conda.misc.walk_prefix` instead. (#5276) +* Deprecate `conda_build.conda_interface.win_path_to_unix`. Use `conda.common.path.win_path_to_unix` instead. (#5276) + +### Docs + +* + +### Other + +* diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py index 9da5b48418..15b3d67237 100644 --- a/tests/cli/test_main_build.py +++ b/tests/cli/test_main_build.py @@ -11,7 +11,6 @@ from conda_build import api from conda_build.cli import main_build, main_render -from conda_build.conda_interface import TemporaryDirectory from conda_build.config import ( Config, zstd_compression_level_default, @@ -264,20 +263,19 @@ def test_purge(testing_workdir, testing_metadata): @pytest.mark.serial -def test_purge_all(testing_workdir, testing_metadata): +def test_purge_all( + testing_workdir: str, testing_metadata: MetaData, tmp_path: Path +) -> None: """ purge-all clears out build folders as well as build packages in the osx-64 folders and such """ api.output_yaml(testing_metadata, "meta.yaml") - with TemporaryDirectory() as tmpdir: - testing_metadata.config.croot = tmpdir - outputs = api.build( - testing_workdir, config=testing_metadata.config, notest=True - ) - args = ["purge-all", "--croot", tmpdir] - main_build.execute(args) - assert not get_build_folders(testing_metadata.config.croot) - assert not any(os.path.isfile(fn) for fn in outputs) + testing_metadata.config.croot = str(tmp_path) + outputs = api.build(testing_workdir, config=testing_metadata.config, notest=True) + args = ["purge-all", f"--croot={tmp_path}"] + main_build.execute(args) + assert not get_build_folders(testing_metadata.config.croot) + assert not any(os.path.isfile(fn) for fn in outputs) @pytest.mark.serial diff --git a/tests/cli/test_main_convert.py b/tests/cli/test_main_convert.py index 0be658b9d3..9ff65849d9 100644 --- a/tests/cli/test_main_convert.py +++ b/tests/cli/test_main_convert.py @@ -3,9 +3,9 @@ import os import pytest +from conda.gateways.connection.download import download from conda_build.cli import main_convert -from conda_build.conda_interface import download from conda_build.tarcheck import TarCheck from conda_build.utils import on_win diff --git a/tests/cli/test_main_develop.py b/tests/cli/test_main_develop.py index ede3758cfb..c0c3cdca3d 100644 --- a/tests/cli/test_main_develop.py +++ b/tests/cli/test_main_develop.py @@ -3,8 +3,9 @@ import os import sys +from conda.gateways.connection.download import download + from conda_build.cli import main_develop -from conda_build.conda_interface import download from conda_build.utils import get_site_packages, tar_xf diff --git a/tests/cli/test_main_render.py b/tests/cli/test_main_render.py index 59fff7901c..bf00ac6fd1 100644 --- a/tests/cli/test_main_render.py +++ b/tests/cli/test_main_render.py @@ -1,47 +1,51 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import os import sys +from typing import TYPE_CHECKING import pytest import yaml from conda_build import api from conda_build.cli import main_render -from conda_build.conda_interface import TemporaryDirectory from ..utils import metadata_dir +if TYPE_CHECKING: + from pathlib import Path + -def test_render_add_channel(): +def test_render_add_channel(tmp_path: Path) -> None: """This recipe requires the conda_build_test_requirement package, which is only on the conda_build_test channel. This verifies that the -c argument works for rendering.""" - with TemporaryDirectory() as tmpdir: - rendered_filename = os.path.join(tmpdir, "out.yaml") - args = [ - "-c", - "conda_build_test", - os.path.join(metadata_dir, "_recipe_requiring_external_channel"), - "--file", - rendered_filename, - ] - main_render.execute(args) - with open(rendered_filename) as rendered_file: - rendered_meta = yaml.safe_load(rendered_file) - required_package_string = [ - pkg - for pkg in rendered_meta["requirements"]["build"] - if "conda_build_test_requirement" in pkg - ][0] - required_package_details = required_package_string.split(" ") - assert len(required_package_details) > 1, ( - "Expected version number on successful " - f"rendering, but got only {required_package_details}" - ) - assert ( - required_package_details[1] == "1.0" - ), f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}" + rendered_filename = os.path.join(tmp_path, "out.yaml") + args = [ + "-c", + "conda_build_test", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + "--file", + rendered_filename, + ] + main_render.execute(args) + with open(rendered_filename) as rendered_file: + rendered_meta = yaml.safe_load(rendered_file) + required_package_string = [ + pkg + for pkg in rendered_meta["requirements"]["build"] + if "conda_build_test_requirement" in pkg + ][0] + required_package_details = required_package_string.split(" ") + assert len(required_package_details) > 1, ( + "Expected version number on successful " + f"rendering, but got only {required_package_details}" + ) + assert ( + required_package_details[1] == "1.0" + ), f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}" def test_render_without_channel_fails(tmp_path): diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 5932bf4f1a..8871fcedf7 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -30,10 +30,10 @@ from conda.base.context import context, reset_context from conda.common.compat import on_linux, on_mac, on_win from conda.exceptions import ClobberError, CondaError, CondaMultiError, LinkError +from conda.utils import url_path from conda_index.api import update_index from conda_build import __version__, api, exceptions -from conda_build.conda_interface import url_path from conda_build.config import Config from conda_build.exceptions import ( CondaBuildException, diff --git a/tests/test_api_convert.py b/tests/test_api_convert.py index 7da9ede2d3..c0e46b7bf3 100644 --- a/tests/test_api_convert.py +++ b/tests/test_api_convert.py @@ -7,9 +7,9 @@ import tarfile import pytest +from conda.gateways.connection.download import download from conda_build import api -from conda_build.conda_interface import download from conda_build.utils import on_win, package_has_file from .utils import assert_package_consistency, metadata_dir diff --git a/tests/test_api_render.py b/tests/test_api_render.py index 7849daa01c..60a381ebf1 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -15,7 +15,6 @@ from conda.common.compat import on_win from conda_build import api, render -from conda_build.conda_interface import cc_conda_build from conda_build.variants import validate_spec from .utils import metadata_dir, variants_dir @@ -213,7 +212,7 @@ def test_noarch_with_no_platform_deps(testing_workdir, testing_config): assert len(build_ids) == 1 -def test_setting_condarc_vars_with_env_var_expansion(testing_workdir): +def test_setting_condarc_vars_with_env_var_expansion(testing_workdir, mocker): os.makedirs("config") # python won't be used - the stuff in the recipe folder will override it python_versions = ["2.6", "3.4", "3.11"] @@ -221,27 +220,25 @@ def test_setting_condarc_vars_with_env_var_expansion(testing_workdir): with open(os.path.join("config", "conda_build_config.yaml"), "w") as f: yaml.dump(config, f, default_flow_style=False) - cc_conda_build_backup = cc_conda_build.copy() - # hacky equivalent of changing condarc - # careful, this is global and affects other tests! make sure to clear it! - cc_conda_build.update( - {"config_file": "${TEST_WORKDIR}/config/conda_build_config.yaml"} + mocker.patch( + "conda.base.context.Context.conda_build", + new_callable=mocker.PropertyMock, + return_value={ + "config_file": "${TEST_WORKDIR}/config/conda_build_config.yaml", + **context.conda_build, + }, ) os.environ["TEST_WORKDIR"] = testing_workdir - try: - m = api.render( - os.path.join(variants_dir, "19_used_variables"), - bypass_env_check=True, - finalize=False, - )[0][0] - # this one should have gotten clobbered by the values in the recipe - assert m.config.variant["python"] not in python_versions - # this confirms that we loaded the config file correctly - assert len(m.config.squished_variants["bzip2"]) == 2 - finally: - cc_conda_build.clear() - cc_conda_build.update(cc_conda_build_backup) + m = api.render( + os.path.join(variants_dir, "19_used_variables"), + bypass_env_check=True, + finalize=False, + )[0][0] + # this one should have gotten clobbered by the values in the recipe + assert m.config.variant["python"] not in python_versions + # this confirms that we loaded the config file correctly + assert len(m.config.squished_variants["bzip2"]) == 2 def test_self_reference_run_exports_pin_subpackage_picks_up_version_correctly(): diff --git a/tests/test_misc.py b/tests/test_misc.py index bcdafcb196..4a5bb0d95c 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -4,9 +4,10 @@ from pathlib import Path import pytest +from conda.auxlib.entity import EntityEncoder +from conda.models.enums import PathType from conda_build._link import pyc_f -from conda_build.conda_interface import EntityEncoder, PathType @pytest.mark.parametrize( diff --git a/tests/test_source.py b/tests/test_source.py index 711407d153..1cae2f9997 100644 --- a/tests/test_source.py +++ b/tests/test_source.py @@ -5,10 +5,10 @@ import tarfile import pytest +from conda.gateways.disk.create import TemporaryDirectory from conda.gateways.disk.read import compute_sum from conda_build import source -from conda_build.conda_interface import TemporaryDirectory from conda_build.source import download_to_cache from conda_build.utils import reset_deduplicator diff --git a/tests/test_utils.py b/tests/test_utils.py index 0cc76ac8a3..70a2981203 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -204,8 +204,11 @@ def test_logger_config_from_file(testing_workdir, capfd, mocker): handlers: [console] """ ) - cc_conda_build = mocker.patch.object(utils, "cc_conda_build") - cc_conda_build.get.return_value = test_file + mocker.patch( + "conda.base.context.Context.conda_build", + new_callable=mocker.PropertyMock, + return_value={"log_config_file": test_file}, + ) log = utils.get_logger(__name__) # default log level is INFO, but our config file should set level to DEBUG log.warn("test message") diff --git a/tests/utils.py b/tests/utils.py index 125cda7c91..b4ed64912b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -8,10 +8,9 @@ from pathlib import Path from typing import TYPE_CHECKING -from conda.base.context import context, reset_context +from conda.base.context import reset_context from conda.common.compat import on_mac -from conda_build.conda_interface import cc_conda_build from conda_build.metadata import MetaData if TYPE_CHECKING: @@ -153,7 +152,3 @@ def get_noarch_python_meta(meta): def reset_config(search_path=None): reset_context(search_path) - cc_conda_build.clear() - cc_conda_build.update( - context.conda_build if hasattr(context, "conda_build") else {} - ) From f2c3f3bcb07f9f4f9e0dc10b13206ebd9f9cdb5a Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 18 Apr 2024 18:01:23 +0200 Subject: [PATCH 343/366] Minor refactor of `find_used_variables_*` functions (#5296) --- conda_build/variants.py | 53 +++++++++++++++++++++++++++-------------- tests/test_variants.py | 24 +++++++++++++++++++ 2 files changed, 59 insertions(+), 18 deletions(-) diff --git a/conda_build/variants.py b/conda_build/variants.py index 82f84793f3..1e2b1adc0c 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -12,6 +12,7 @@ from copy import copy from functools import lru_cache from itertools import product +from pathlib import Path from typing import TYPE_CHECKING import yaml @@ -765,23 +766,39 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False): return used_variables -def find_used_variables_in_shell_script(variant, file_path): - with open(file_path) as f: - text = f.read() - used_variables = set() - for v in variant: - variant_regex = rf"(^[^$]*?\$\{{?\s*{re.escape(v)}\s*[\s|\}}])" - if re.search(variant_regex, text, flags=re.MULTILINE | re.DOTALL): - used_variables.add(v) - return used_variables +def find_used_variables_in_shell_script( + variants: Iterable[str], + file_path: str | os.PathLike | Path, +) -> set[str]: + text = Path(file_path).read_text() + return { + variant + for variant in variants + if ( + variant in text # str in str is faster than re.search + and re.search( + rf"(^[^$]*?\$\{{?\s*{re.escape(variant)}\s*[\s|\}}])", + text, + flags=re.MULTILINE | re.DOTALL, + ) + ) + } -def find_used_variables_in_batch_script(variant, file_path): - with open(file_path) as f: - text = f.read() - used_variables = set() - for v in variant: - variant_regex = rf"\%{re.escape(v)}\%" - if re.search(variant_regex, text, flags=re.MULTILINE | re.DOTALL): - used_variables.add(v) - return used_variables +def find_used_variables_in_batch_script( + variants: Iterable[str], + file_path: str | os.PathLike | Path, +) -> set[str]: + text = Path(file_path).read_text() + return { + variant + for variant in variants + if ( + variant in text # str in str is faster than re.search + and re.search( + rf"\%{re.escape(variant)}\%", + text, + flags=re.MULTILINE | re.DOTALL, + ) + ) + } diff --git a/tests/test_variants.py b/tests/test_variants.py index 71b2e7e627..e853f172fd 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -17,6 +17,8 @@ combine_specs, dict_of_lists_to_list_of_dicts, filter_combined_spec_to_used_keys, + find_used_variables_in_batch_script, + find_used_variables_in_shell_script, get_package_variants, get_vars, validate_spec, @@ -715,3 +717,25 @@ def test_get_vars(): ] assert get_vars(variants) == {"nodejs"} + + +def test_find_used_variables_in_shell_script(tmp_path: Path) -> None: + variants = ("FOO", "BAR", "BAZ", "QUX") + (script := tmp_path / "script.sh").write_text( + f"${variants[0]}\n" + f"${{{variants[1]}}}\n" + f"${{{{{variants[2]}}}}}\n" + f"$${variants[3]}\n" + ) + assert find_used_variables_in_shell_script(variants, script) == {"FOO", "BAR"} + + +def test_find_used_variables_in_batch_script(tmp_path: Path) -> None: + variants = ("FOO", "BAR", "BAZ", "QUX") + (script := tmp_path / "script.sh").write_text( + f"%{variants[0]}%\n" + f"%%{variants[1]}%%\n" + f"${variants[2]}\n" + f"${{{variants[3]}}}\n" + ) + assert find_used_variables_in_batch_script(variants, script) == {"FOO", "BAR"} From e4434d99f46c4a3cbf66d02e6f0d8f253ab93948 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 18 Apr 2024 21:38:02 +0200 Subject: [PATCH 344/366] Typing `conda_build.api` (#5234) Co-authored-by: Bianca Henderson --- conda_build/api.py | 241 +++++++++++++++++------------- conda_build/build.py | 26 +++- conda_build/config.py | 15 +- conda_build/convert.py | 36 +++-- conda_build/develop.py | 16 +- conda_build/inspect_pkg.py | 4 +- conda_build/metadata.py | 25 +++- conda_build/os_utils/ldd.py | 4 +- conda_build/os_utils/pyldd.py | 6 +- conda_build/render.py | 56 +++---- conda_build/skeletons/cpan.py | 20 +-- conda_build/skeletons/cran.py | 60 +++++--- conda_build/skeletons/luarocks.py | 9 +- conda_build/skeletons/pypi.py | 47 +++--- conda_build/skeletons/rpm.py | 45 +++--- conda_build/utils.py | 15 +- tests/cli/test_main_skeleton.py | 6 +- tests/test_api_build.py | 83 +++++----- tests/test_api_render.py | 79 +++++----- tests/test_api_skeleton.py | 38 ++--- tests/test_api_skeleton_cpan.py | 8 +- tests/test_api_skeleton_cran.py | 8 +- tests/test_build.py | 8 +- tests/test_subpackages.py | 134 ++++++++++------- tests/test_variants.py | 224 +++++++++++++++++---------- 25 files changed, 716 insertions(+), 497 deletions(-) diff --git a/conda_build/api.py b/conda_build/api.py index 2d4e3ef567..8c47ef1e6d 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -17,11 +17,13 @@ import sys from os.path import dirname, expanduser, join from pathlib import Path +from typing import TYPE_CHECKING, Iterable # make the Config class available in the api namespace from .config import DEFAULT_PREFIX_LENGTH as _prefix_length from .config import Config, get_channel_urls, get_or_merge_config from .deprecations import deprecated +from .metadata import MetaData, MetaDataTuple from .utils import ( CONDA_PACKAGE_EXTENSIONS, LoggingContext, @@ -32,21 +34,25 @@ on_win, ) +if TYPE_CHECKING: + from typing import Any, Literal + + StatsDict = dict[str, Any] + def render( - recipe_path, - config=None, - variants=None, - permit_unsatisfiable_variants=True, - finalize=True, - bypass_env_check=False, + recipe_path: str | os.PathLike | Path, + config: Config | None = None, + variants: dict[str, Any] | None = None, + permit_unsatisfiable_variants: bool = True, + finalize: bool = True, + bypass_env_check: bool = False, **kwargs, -): +) -> list[MetaDataTuple]: """Given path to a recipe, return the MetaData object(s) representing that recipe, with jinja2 templates evaluated. - Returns a list of (metadata, needs_download, needs_reparse in env) tuples""" - from collections import OrderedDict + Returns a list of (metadata, need_download, need_reparse in env) tuples""" from conda.exceptions import NoPackagesFoundError @@ -63,7 +69,7 @@ def render( variants=variants, permit_unsatisfiable_variants=permit_unsatisfiable_variants, ) - output_metas = OrderedDict() + output_metas: dict[tuple[str, str, tuple[tuple[str, str], ...]], MetaDataTuple] = {} for meta, download, render_in_env in metadata_tuples: if not meta.skip() or not config.trim_skip: for od, om in meta.get_output_metadata_set( @@ -95,7 +101,7 @@ def render( (var, om.config.variant[var]) for var in om.get_used_vars() ), - ] = (om, download, render_in_env) + ] = MetaDataTuple(om, download, render_in_env) else: output_metas[ f"{om.type}: {om.name()}", @@ -104,12 +110,16 @@ def render( (var, om.config.variant[var]) for var in om.get_used_vars() ), - ] = (om, download, render_in_env) + ] = MetaDataTuple(om, download, render_in_env) return list(output_metas.values()) -def output_yaml(metadata, file_path=None, suppress_outputs=False): +def output_yaml( + metadata: MetaData, + file_path: str | os.PathLike | Path | None = None, + suppress_outputs: bool = False, +) -> str: """Save a rendered recipe in its final form to the path given by file_path""" from .render import output_yaml @@ -117,12 +127,16 @@ def output_yaml(metadata, file_path=None, suppress_outputs=False): def get_output_file_paths( - recipe_path_or_metadata, - no_download_source=False, - config=None, - variants=None, + recipe_path_or_metadata: str + | os.PathLike + | Path + | MetaData + | Iterable[MetaDataTuple], + no_download_source: bool = False, + config: Config | None = None, + variants: dict[str, Any] | None = None, **kwargs, -): +) -> list[str]: """Get output file paths for any packages that would be created by a recipe Both split packages (recipes with more than one output) and build matrices, @@ -132,22 +146,9 @@ def get_output_file_paths( config = get_or_merge_config(config, **kwargs) - if hasattr(recipe_path_or_metadata, "__iter__") and not isinstance( - recipe_path_or_metadata, str - ): - list_of_metas = [ - hasattr(item[0], "config") - for item in recipe_path_or_metadata - if len(item) == 3 - ] - - if list_of_metas and all(list_of_metas): - metadata = recipe_path_or_metadata - else: - raise ValueError(f"received mixed list of metas: {recipe_path_or_metadata}") - elif isinstance(recipe_path_or_metadata, (str, Path)): + if isinstance(recipe_path_or_metadata, (str, Path)): # first, render the parent recipe (potentially multiple outputs, depending on variants). - metadata = render( + metadata_tuples = render( recipe_path_or_metadata, no_download_source=no_download_source, variants=variants, @@ -155,29 +156,48 @@ def get_output_file_paths( finalize=True, **kwargs, ) + + elif isinstance(recipe_path_or_metadata, MetaData): + metadata_tuples = [MetaDataTuple(recipe_path_or_metadata, False, False)] + + elif isinstance(recipe_path_or_metadata, Iterable) and all( + isinstance(recipe, MetaDataTuple) + and isinstance(recipe.metadata, MetaData) + and isinstance(recipe.need_download, bool) + and isinstance(recipe.need_reparse, bool) + for recipe in recipe_path_or_metadata + ): + metadata_tuples = recipe_path_or_metadata + else: - assert hasattr( - recipe_path_or_metadata, "config" - ), f"Expecting metadata object - got {recipe_path_or_metadata}" - metadata = [(recipe_path_or_metadata, None, None)] - # Next, loop over outputs that each metadata defines + raise ValueError( + f"Unknown input type: {type(recipe_path_or_metadata)}; expecting " + "PathLike object, MetaData object, or a list of tuples containing " + "(MetaData, bool, bool)." + ) + + # Next, loop over outputs that each metadata defines outs = [] - for m, _, _ in metadata: - if m.skip(): - outs.append(get_skip_message(m)) + for metadata, _, _ in metadata_tuples: + if metadata.skip(): + outs.append(get_skip_message(metadata)) else: - outs.append(bldpkg_path(m)) - return sorted(list(set(outs))) + outs.append(bldpkg_path(metadata)) + return sorted(set(outs)) @deprecated("24.3.0", "24.5.0", addendum="Use `get_output_file_paths` instead.") def get_output_file_path( - recipe_path_or_metadata, - no_download_source=False, - config=None, - variants=None, + recipe_path_or_metadata: str + | os.PathLike + | Path + | MetaData + | Iterable[MetaDataTuple], + no_download_source: bool = False, + config: Config | None = None, + variants: dict[str, Any] | None = None, **kwargs, -): +) -> list[str]: """Get output file paths for any packages that would be created by a recipe Both split packages (recipes with more than one output) and build matrices, @@ -192,7 +212,13 @@ def get_output_file_path( ) -def check(recipe_path, no_download_source=False, config=None, variants=None, **kwargs): +def check( + recipe_path: str | os.PathLike | Path, + no_download_source: bool = False, + config: Config | None = None, + variants: dict[str, Any] | None = None, + **kwargs, +) -> bool: """Check validity of input recipe path Verifies that recipe can be completely rendered, and that fields of the rendered recipe are @@ -209,16 +235,16 @@ def check(recipe_path, no_download_source=False, config=None, variants=None, **k def build( - recipe_paths_or_metadata, - post=None, - need_source_download=True, - build_only=False, - notest=False, - config=None, - variants=None, - stats=None, + recipe_paths_or_metadata: str | os.PathLike | Path | MetaData, + post: bool | None = None, + need_source_download: bool = True, + build_only: bool = False, + notest: bool = False, + config: Config | None = None, + variants: dict[str, Any] | None = None, + stats: StatsDict | None = None, **kwargs, -): +) -> list[str]: """Run the build step. If recipe paths are provided, renders recipe before building. @@ -230,16 +256,15 @@ def build( "other arguments (config) by keyword." ) - recipes = [] + recipes: list[str | MetaData] = [] for recipe in ensure_list(recipe_paths_or_metadata): - if isinstance(recipe, str): + if isinstance(recipe, (str, os.PathLike, Path)): for recipe in expand_globs(recipe, os.getcwd()): try: - recipe = find_recipe(recipe) + recipes.append(find_recipe(recipe)) except OSError: continue - recipes.append(recipe) - elif hasattr(recipe, "config"): + elif isinstance(recipe, MetaData): recipes.append(recipe) else: raise ValueError(f"Recipe passed was unrecognized object: {recipe}") @@ -263,12 +288,12 @@ def build( def test( - recipedir_or_package_or_metadata, - move_broken=True, - config=None, - stats=None, + recipedir_or_package_or_metadata: str | os.PathLike | Path | MetaData, + move_broken: bool = True, + config: Config | None = None, + stats: StatsDict | None = None, **kwargs, -): +) -> bool: """Run tests on either packages (.tar.bz2 or extracted) or recipe folders For a recipe folder, it renders the recipe enough to know what package to download, and obtains @@ -282,24 +307,22 @@ def test( # if people don't pass in an object to capture stats in, they won't get them returned. # We'll still track them, though. - if not stats: - stats = {} + stats = stats or {} with config: # This will create a new local build folder if and only if config # doesn't already have one. What this means is that if we're # running a test immediately after build, we use the one that the # build already provided - test_result = test( + return test( recipedir_or_package_or_metadata, config=config, move_broken=move_broken, stats=stats, ) - return test_result -def list_skeletons(): +def list_skeletons() -> list[str]: """List available skeletons for generating conda recipes from external sources. The returned list is generally the names of supported repositories (pypi, cran, etc.) @@ -315,8 +338,14 @@ def list_skeletons(): def skeletonize( - packages, repo, output_dir=".", version=None, recursive=False, config=None, **kwargs -): + packages: str | Iterable[str], + repo: Literal["cpan", "cran", "luarocks", "pypi", "rpm"], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, + config: Config | None = None, + **kwargs, +) -> None: """Generate a conda recipe from an external repo. Translates metadata from external sources into expected conda recipe format.""" @@ -355,7 +384,7 @@ def skeletonize( if arg in kwargs: del kwargs[arg] with config: - skeleton_return = module.skeletonize( + module.skeletonize( packages, output_dir=output_dir, version=version, @@ -363,42 +392,42 @@ def skeletonize( config=config, **kwargs, ) - return skeleton_return def develop( - recipe_dir, - prefix=sys.prefix, - no_pth_file=False, - build_ext=False, - clean=False, - uninstall=False, -): + recipe_dir: str | Iterable[str], + prefix: str = sys.prefix, + no_pth_file: bool = False, + build_ext: bool = False, + clean: bool = False, + uninstall: bool = False, +) -> None: """Install a Python package in 'development mode'. This works by creating a conda.pth file in site-packages.""" from .develop import execute recipe_dir = ensure_list(recipe_dir) - return execute(recipe_dir, prefix, no_pth_file, build_ext, clean, uninstall) + execute(recipe_dir, prefix, no_pth_file, build_ext, clean, uninstall) def convert( - package_file, - output_dir=".", - show_imports=False, - platforms=None, - force=False, - dependencies=None, - verbose=False, - quiet=True, - dry_run=False, -): + package_file: str, + output_dir: str = ".", + show_imports: bool = False, + platforms: str | Iterable[str] | None = None, + force: bool = False, + dependencies: str | Iterable[str] | None = None, + verbose: bool = False, + quiet: bool = True, + dry_run: bool = False, +) -> None: """Convert changes a package from one platform to another. It applies only to things that are portable, such as pure python, or header-only C/C++ libraries.""" from .convert import conda_convert platforms = ensure_list(platforms) + dependencies = ensure_list(dependencies) if package_file.endswith("tar.bz2"): return conda_convert( package_file, @@ -419,7 +448,7 @@ def convert( raise RuntimeError("cannot convert: %s" % package_file) -def test_installable(channel="defaults"): +def test_installable(channel: str = "defaults") -> bool: """Check to make sure that packages in channel are installable. This is a consistency check for the channel.""" from .inspect_pkg import test_installable @@ -428,14 +457,14 @@ def test_installable(channel="defaults"): def inspect_linkages( - packages, - prefix=sys.prefix, - untracked=False, - all_packages=False, - show_files=False, - groupby="package", - sysroot="", -): + packages: str | Iterable[str], + prefix: str | os.PathLike | Path = sys.prefix, + untracked: bool = False, + all_packages: bool = False, + show_files: bool = False, + groupby: Literal["package", "dependency"] = "package", + sysroot: str = "", +) -> str: from .inspect_pkg import inspect_linkages packages = ensure_list(packages) @@ -575,7 +604,7 @@ def debug( config.channel_urls = get_channel_urls(kwargs) - metadata_tuples: list[tuple[MetaData, bool, bool]] = [] + metadata_tuples: list[MetaDataTuple] = [] best_link_source_method = "skip" if isinstance(recipe_or_package_path_or_metadata_tuples, str): @@ -583,7 +612,7 @@ def debug( for metadata_conda_debug in metadatas_conda_debug: best_link_source_method = "symlink" metadata = MetaData(metadata_conda_debug, config, {}) - metadata_tuples.append((metadata, False, True)) + metadata_tuples.append(MetaDataTuple(metadata, False, True)) else: ext = os.path.splitext(recipe_or_package_path_or_metadata_tuples)[1] if not ext or not any(ext in _ for _ in CONDA_PACKAGE_EXTENSIONS): diff --git a/conda_build/build.py b/conda_build/build.py index 88461ac941..531b38323f 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -21,6 +21,7 @@ from collections import OrderedDict, deque from os.path import dirname, isdir, isfile, islink, join from pathlib import Path +from typing import TYPE_CHECKING import conda_package_handling.api import yaml @@ -85,6 +86,9 @@ if on_win: from . import windows +if TYPE_CHECKING: + from typing import Any, Iterable + if "bsd" in sys.platform: shell_path = "/bin/sh" elif utils.on_win: @@ -3322,12 +3326,12 @@ def write_test_scripts( def test( - recipedir_or_package_or_metadata, - config, - stats, - move_broken=True, - provision_only=False, -): + recipedir_or_package_or_metadata: str | os.PathLike | Path | MetaData, + config: Config, + stats: dict, + move_broken: bool = True, + provision_only: bool = False, +) -> bool: """ Execute any test scripts for the given package. @@ -3641,8 +3645,14 @@ def check_external(): def build_tree( - recipe_list, config, stats, build_only=False, post=None, notest=False, variants=None -): + recipe_list: Iterable[str | MetaData], + config: Config, + stats: dict, + build_only: bool = False, + post: bool | None = None, + notest: bool = False, + variants: dict[str, Any] | None = None, +) -> list[str]: to_build_recursive = [] recipe_list = deque(recipe_list) diff --git a/conda_build/config.py b/conda_build/config.py index 98f3e8b447..d37479e3b2 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -30,6 +30,7 @@ if TYPE_CHECKING: from pathlib import Path + from typing import Any invocation_time = "" @@ -821,7 +822,7 @@ def clean_pkgs(self): for folder in self.bldpkgs_dirs: rm_rf(folder) - def copy(self): + def copy(self) -> Config: new = copy.copy(self) new.variant = copy.deepcopy(self.variant) if hasattr(self, "variants"): @@ -847,7 +848,11 @@ def __exit__(self, e_type, e_value, traceback): self.clean(remove_folders=False) -def _get_or_merge_config(config, variant=None, **kwargs): +def _get_or_merge_config( + config: Config | None, + variant: dict[str, Any] | None = None, + **kwargs, +) -> Config: # This function should only ever be called via get_or_merge_config. # It only exists for us to monkeypatch a default config when running tests. if not config: @@ -863,7 +868,11 @@ def _get_or_merge_config(config, variant=None, **kwargs): return config -def get_or_merge_config(config, variant=None, **kwargs): +def get_or_merge_config( + config: Config | None, + variant: dict[str, Any] | None = None, + **kwargs, +) -> Config: """Always returns a new object - never changes the config that might be passed in.""" return _get_or_merge_config(config, variant=variant, **kwargs) diff --git a/conda_build/convert.py b/conda_build/convert.py index 793f0dc93c..e910d47e21 100644 --- a/conda_build/convert.py +++ b/conda_build/convert.py @@ -4,6 +4,8 @@ Tools for converting conda packages """ +from __future__ import annotations + import glob import hashlib import json @@ -14,8 +16,12 @@ import tarfile import tempfile from pathlib import Path +from typing import TYPE_CHECKING + +from .utils import ensure_list, filter_info_files, walk -from .utils import filter_info_files, walk +if TYPE_CHECKING: + from typing import Iterable def retrieve_c_extensions(file_path, show_imports=False): @@ -776,31 +782,35 @@ def convert_from_windows_to_unix( def conda_convert( - file_path, - output_dir=".", - show_imports=False, - platforms=None, - force=False, - dependencies=None, - verbose=False, - quiet=False, - dry_run=False, -): + file_path: str, + output_dir: str = ".", + show_imports: bool = False, + platforms: str | Iterable[str] | None = None, + force: bool = False, + dependencies: str | Iterable[str] | None = None, + verbose: bool = False, + quiet: bool = False, + dry_run: bool = False, +) -> None: """Convert a conda package between different platforms and architectures. Positional arguments: file_path (str) -- the file path to the source package's tar file output_dir (str) -- the file path to where to output the converted tar file show_imports (bool) -- show all C extensions found in the source package - platforms (str) -- the platforms to convert to: 'win-64', 'win-32', 'linux-64', + platforms list[str] -- the platforms to convert to: 'win-64', 'win-32', 'linux-64', 'linux-32', 'osx-64', or 'all' force (bool) -- force conversion of packages that contain C extensions - dependencies (List[str]) -- the new dependencies to add to the source package's + dependencies (list[str]) -- the new dependencies to add to the source package's existing dependencies verbose (bool) -- show output of items that are updated quiet (bool) -- hide all output except warnings and errors dry_run (bool) -- show which conversions will take place """ + + platforms = ensure_list(platforms) + dependencies = ensure_list(dependencies) + if show_imports: imports = retrieve_c_extensions(file_path) if len(imports) == 0: diff --git a/conda_build/develop.py b/conda_build/develop.py index 5b83185fdc..59b31a3231 100644 --- a/conda_build/develop.py +++ b/conda_build/develop.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import shutil import sys from os.path import abspath, exists, expanduser, isdir, join @@ -126,13 +128,13 @@ def _uninstall(sp_dir, pkg_path): def execute( - recipe_dirs, - prefix=sys.prefix, - no_pth_file=False, - build_ext=False, - clean=False, - uninstall=False, -): + recipe_dirs: list[str], + prefix: str = sys.prefix, + no_pth_file: bool = False, + build_ext: bool = False, + clean: bool = False, + uninstall: bool = False, +) -> None: if not isdir(prefix): sys.exit( """\ diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index b202a7eb68..19c0db7ca3 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -216,8 +216,8 @@ def inspect_linkages( all_packages: bool = False, show_files: bool = False, groupby: Literal["package", "dependency"] = "package", - sysroot="", -): + sysroot: str = "", +) -> str: if not packages and not untracked and not all_packages: sys.exit("At least one package or --untracked or --all must be provided") elif on_win: diff --git a/conda_build/metadata.py b/conda_build/metadata.py index b05c27d8ae..6fd065e0b2 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -13,7 +13,7 @@ from collections import OrderedDict from functools import lru_cache from os.path import isfile, join -from typing import TYPE_CHECKING, overload +from typing import TYPE_CHECKING, NamedTuple, overload from bs4 import UnicodeDammit from conda.base.context import context @@ -907,7 +907,10 @@ def toposort(output_metadata_map): return result -def get_output_dicts_from_metadata(metadata, outputs=None): +def get_output_dicts_from_metadata( + metadata: MetaData, + outputs: list[dict[str, Any]] | None = None, +) -> list[dict[str, Any]]: outputs = outputs or metadata.get_section("outputs") if not outputs: @@ -2514,16 +2517,16 @@ def get_reduced_variant_set(self, used_variables): def get_output_metadata_set( self, - permit_undefined_jinja=False, - permit_unsatisfiable_variants=False, - bypass_env_check=False, - ): + permit_undefined_jinja: bool = False, + permit_unsatisfiable_variants: bool = False, + bypass_env_check: bool = False, + ) -> list[tuple[dict[str, Any], MetaData]]: from .source import provide out_metadata_map = {} if self.final: - outputs = get_output_dicts_from_metadata(self)[0] - output_tuples = [(outputs, self)] + outputs = get_output_dicts_from_metadata(self) + output_tuples = [(outputs[0], self)] else: all_output_metadata = OrderedDict() @@ -2972,3 +2975,9 @@ def get_test_deps(self, py_files, pl_files, lua_files, r_files): specs.extend(utils.ensure_list(self.config.extra_deps)) return specs + + +class MetaDataTuple(NamedTuple): + metadata: MetaData + need_download: bool + need_reparse: bool diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index c07a7adb71..b2de763074 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -52,7 +52,7 @@ def ldd(path): def get_linkages( obj_files: Iterable[str], prefix: str | os.PathLike | Path, - sysroot, + sysroot: str, ) -> dict[str, list[tuple[str, str]]]: return _get_linkages(tuple(obj_files), Path(prefix), sysroot) @@ -61,7 +61,7 @@ def get_linkages( def _get_linkages( obj_files: tuple[str], prefix: Path, - sysroot, + sysroot: str, ) -> dict[str, list[tuple[str, str]]]: linkages = {} for file in obj_files: diff --git a/conda_build/os_utils/pyldd.py b/conda_build/os_utils/pyldd.py index ceffb1dbc6..ff48d5f891 100644 --- a/conda_build/os_utils/pyldd.py +++ b/conda_build/os_utils/pyldd.py @@ -1048,7 +1048,7 @@ def _get_magic_bit(path: Path) -> bytes: return None -def _trim_sysroot(sysroot): +def _trim_sysroot(sysroot: str) -> str: if sysroot: while sysroot.endswith("/") or sysroot.endswith("\\"): sysroot = sysroot[:-1] @@ -1066,7 +1066,7 @@ def _get_arch_if_native(arch): # TODO :: Consider memoizing instead of repeatedly scanning # TODO :: libc.so/libSystem.dylib when inspect_linkages(recurse=True) -def _inspect_linkages_this(filename, sysroot="", arch="native"): +def _inspect_linkages_this(filename, sysroot: str = "", arch="native"): """ :param filename: @@ -1100,7 +1100,7 @@ def _inspect_linkages_this(filename, sysroot="", arch="native"): # TODO :: Consider returning a tree structure or a dict when recurse is True? def inspect_linkages( - filename, resolve_filenames=True, recurse=True, sysroot="", arch="native" + filename, resolve_filenames=True, recurse=True, sysroot: str = "", arch="native" ): already_seen = set() todo = {filename} diff --git a/conda_build/render.py b/conda_build/render.py index 78a9ed643e..9bbdcc6efa 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -14,7 +14,6 @@ from contextlib import contextmanager from functools import lru_cache from os.path import ( - dirname, isabs, isdir, isfile, @@ -36,7 +35,7 @@ from . import environ, exceptions, source, utils from .exceptions import DependencyNeedsBuildingError from .index import get_build_index -from .metadata import MetaData, combine_top_level_metadata_with_output +from .metadata import MetaData, MetaDataTuple, combine_top_level_metadata_with_output from .utils import ( CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, @@ -49,7 +48,8 @@ ) if TYPE_CHECKING: - from typing import Iterator + import os + from typing import Any, Iterable, Iterator from .config import Config @@ -63,7 +63,7 @@ def odict_representer(dumper, data): yaml.add_representer(OrderedDict, odict_representer) -def bldpkg_path(m): +def bldpkg_path(m: MetaData) -> str: """ Returns path to built package's tarball given its ``Metadata``. """ @@ -800,8 +800,10 @@ def distribute_variants( permit_unsatisfiable_variants=False, allow_no_other_outputs=False, bypass_env_check=False, -): - rendered_metadata = {} +) -> list[MetaDataTuple]: + rendered_metadata: dict[ + tuple[str, str, tuple[tuple[str, str], ...]], MetaDataTuple + ] = {} need_source_download = True # don't bother distributing python if it's a noarch package, and figure out @@ -906,23 +908,25 @@ def distribute_variants( mv.config.variant.get("target_platform", mv.config.subdir), tuple((var, mv.config.variant.get(var)) for var in mv.get_used_vars()), ) - ] = (mv, need_source_download, None) + ] = MetaDataTuple(mv, need_source_download, False) # list of tuples. # each tuple item is a tuple of 3 items: - # metadata, need_download, need_reparse_in_env + # metadata, need_download, need_reparse return list(rendered_metadata.values()) -def expand_outputs(metadata_tuples): +def expand_outputs( + metadata_tuples: Iterable[MetaDataTuple], +) -> list[tuple[dict, MetaData]]: """Obtain all metadata objects for all outputs from recipe. Useful for outputting paths.""" - expanded_outputs = OrderedDict() + from copy import deepcopy - for _m, download, reparse in metadata_tuples: - from .build import get_all_replacements + from .build import get_all_replacements - get_all_replacements(_m.config) - from copy import deepcopy + expanded_outputs: dict[str, tuple[dict, MetaData]] = {} + for _m, download, reparse in metadata_tuples: + get_all_replacements(_m.config) for output_dict, m in deepcopy(_m).get_output_metadata_set( permit_unsatisfiable_variants=False ): @@ -957,11 +961,11 @@ def render_recipe( recipe_dir: str | os.PathLike | Path, config: Config, no_download_source: bool = False, - variants: dict | None = None, + variants: dict[str, Any] | None = None, permit_unsatisfiable_variants: bool = True, reset_build_id: bool = True, bypass_env_check: bool = False, -) -> list[tuple[MetaData, bool, bool]]: +) -> list[MetaDataTuple]: """Returns a list of tuples, each consisting of (metadata-object, needs_download, needs_render_in_env) @@ -994,7 +998,7 @@ def render_recipe( m.config.variant_config_files = [cbc_yaml] m.config.variants = get_package_variants(m, variants=variants) m.config.variant = m.config.variants[0] - return [(m, False, False)] + return [MetaDataTuple(m, False, False)] else: # merge any passed-in variants with any files found variants = get_package_variants(m, variants=variants) @@ -1055,7 +1059,11 @@ def ignore_aliases(self, data): unicode = None # silence pyflakes about unicode not existing in py3 -def output_yaml(metadata, filename=None, suppress_outputs=False): +def output_yaml( + metadata: MetaData, + filename: str | os.PathLike | Path | None = None, + suppress_outputs: bool = False, +) -> str: local_metadata = metadata.copy() if ( suppress_outputs @@ -1070,13 +1078,9 @@ def output_yaml(metadata, filename=None, suppress_outputs=False): indent=2, ) if filename: - if any(sep in filename for sep in ("\\", "/")): - try: - os.makedirs(dirname(filename)) - except OSError: - pass - with open(filename, "w") as f: - f.write(output) - return "Wrote yaml to %s" % filename + filename = Path(filename) + filename.parent.mkdir(parents=True, exist_ok=True) + filename.write_text(output) + return f"Wrote yaml to {filename}" else: return output diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index 4d65ef7cb1..c9bd5c398c 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -4,6 +4,8 @@ Tools for converting CPAN packages to conda recipes. """ +from __future__ import annotations + import codecs import gzip import hashlib @@ -384,15 +386,15 @@ def get_core_modules_for_this_perl_version(version, cache_dir): # meta_cpan_url="http://api.metacpan.org", def skeletonize( - packages, - output_dir=".", - version=None, - meta_cpan_url="https://fastapi.metacpan.org/v1", - recursive=False, - force=False, - config=None, - write_core=False, -): + packages: list[str], + output_dir: str = ".", + version: str | None = None, + meta_cpan_url: str = "https://fastapi.metacpan.org/v1", + recursive: bool = False, + force: bool = False, + config: Config | None = None, + write_core: bool = False, +) -> None: """ Loops over packages, outputting conda recipes converted from CPAN metata. """ diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index fbd959dba2..38628a52f4 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -55,6 +55,8 @@ if TYPE_CHECKING: from typing import Literal + from ..config import Config + SOURCE_META = """\ {archive_keys} {git_url_key} {git_url} @@ -863,28 +865,36 @@ def remove_comments(template): def skeletonize( - in_packages, - output_dir=".", - output_suffix="", - add_maintainer=None, - version=None, - git_tag=None, - cran_url=None, - recursive=False, - archive=True, - version_compare=False, - update_policy="", - r_interp="r-base", - use_binaries_ver=None, - use_noarch_generic=False, - use_when_no_binary: Literal["error" | "src" | "old" | "old-src"] = "src", - use_rtools_win=False, - config=None, - variant_config_files=None, - allow_archived=False, - add_cross_r_base=False, - no_comments=False, -): + in_packages: list[str], + output_dir: str = ".", + output_suffix: str = "", + add_maintainer: str | None = None, + version: str | None = None, + git_tag: str | None = None, + cran_url: str | None = None, + recursive: bool = False, + archive: bool = True, + version_compare: bool = False, + update_policy: Literal[ + "error", + "skip-up-to-date", + "skip-existing", + "overwrite", + "merge-keep-build-num", + "merge-incr-build-num", + ] + | None = None, + r_interp: str = "r-base", + use_binaries_ver: str | None = None, + use_noarch_generic: bool = False, + use_when_no_binary: Literal["error", "src", "old", "old-src"] = "src", + use_rtools_win: bool = False, + config: Config | None = None, + variant_config_files: list[str] | None = None, + allow_archived: bool = False, + add_cross_r_base: bool = False, + no_comments: bool = False, +) -> None: if ( use_when_no_binary != "error" and use_when_no_binary != "src" @@ -1089,7 +1099,11 @@ def skeletonize( script_env = [] extra_recipe_maintainers = [] build_number = 0 - if update_policy.startswith("merge") and inputs["old-metadata"]: + if ( + update_policy + and update_policy.startswith("merge") + and inputs["old-metadata"] + ): m = inputs["old-metadata"] patches = make_array(m, "source/patches") script_env = make_array(m, "build/script_env") diff --git a/conda_build/skeletons/luarocks.py b/conda_build/skeletons/luarocks.py index 14d9c44f77..da8e641928 100644 --- a/conda_build/skeletons/luarocks.py +++ b/conda_build/skeletons/luarocks.py @@ -8,6 +8,8 @@ # - mingw32 support (really any windows support, completely untested) # - replace manual "luajit -e require 'blah'" with built-in entry-point testing +from __future__ import annotations + import json import os import subprocess @@ -224,7 +226,12 @@ def ensure_base_deps(deps): return deps -def skeletonize(packages, output_dir=".", version=None, recursive=False): +def skeletonize( + packages: list[str], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, +) -> None: # Check that we have Lua installed (any version) # Check that we have luarocks installed diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index 7df95a9ad5..c45c843a6d 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -4,6 +4,8 @@ Tools for converting PyPI packages to conda recipes. """ +from __future__ import annotations + import configparser import keyword import logging @@ -17,6 +19,7 @@ from os.path import abspath, exists, isdir, isfile, join from shutil import copy2 from tempfile import mkdtemp +from typing import TYPE_CHECKING from urllib.parse import urljoin, urlsplit import pkginfo @@ -46,6 +49,9 @@ ) from ..version import _parse as parse_version +if TYPE_CHECKING: + from typing import Iterable + pypi_example = """ Examples: @@ -251,30 +257,27 @@ def _formating_value(attribute_name, attribute_value): def skeletonize( - packages, - output_dir=".", - version=None, - recursive=False, - all_urls=False, - pypi_url="https://pypi.io/pypi/", - noprompt=True, - version_compare=False, - python_version=None, - manual_url=False, - all_extras=False, - noarch_python=False, - config=None, - setup_options=None, - extra_specs=[], - pin_numpy=False, -): + packages: list[str], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, + all_urls: bool = False, + pypi_url: str = "https://pypi.io/pypi/", + noprompt: bool = True, + version_compare: bool = False, + python_version: str | None = None, + manual_url: bool = False, + all_extras: bool = False, + noarch_python: bool = False, + config: Config | None = None, + setup_options: str | Iterable[str] | None = None, + extra_specs: str | Iterable[str] | None = None, + pin_numpy: bool = False, +) -> None: package_dicts = {} - if not setup_options: - setup_options = [] - - if isinstance(setup_options, str): - setup_options = [setup_options] + setup_options = ensure_list(setup_options) + extra_specs = ensure_list(extra_specs) if not config: config = Config() diff --git a/conda_build/skeletons/rpm.py b/conda_build/skeletons/rpm.py index f0abb8c747..d44477171f 100644 --- a/conda_build/skeletons/rpm.py +++ b/conda_build/skeletons/rpm.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import argparse import gzip import hashlib @@ -9,13 +11,20 @@ from os import chmod, makedirs from os.path import basename, dirname, exists, join, splitext from textwrap import wrap +from typing import TYPE_CHECKING from urllib.request import urlopen from xml.etree import ElementTree as ET from ..license_family import guess_license_family from ..source import download_to_cache +from ..utils import ensure_list from .cran import yaml_quote_string +if TYPE_CHECKING: + from typing import Iterable + + from ..config import Config + # This is used in two places default_architecture = "x86_64" default_distro = "centos6" @@ -637,14 +646,14 @@ def write_conda_recipes( # Do I want to pass just the package name, the CDT and the arch and rely on # expansion to form the URL? I have been going backwards and forwards here. def write_conda_recipe( - packages, - distro, - output_dir, - architecture, - recursive, - override_arch, - dependency_add, - config, + packages: list[str], + distro: str, + output_dir: str, + architecture: str, + recursive: bool, + override_arch: bool, + dependency_add: list[str], + config: Config | None, ): cdt_name = distro bits = "32" if architecture in ("armv6", "armv7a", "i686", "i386") else "64" @@ -706,16 +715,18 @@ def write_conda_recipe( def skeletonize( - packages, - output_dir=".", - version=None, - recursive=False, - architecture=default_architecture, - override_arch=True, - dependency_add=[], - config=None, - distro=default_distro, + packages: list[str], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, + architecture: str = default_architecture, + override_arch: bool = True, + dependency_add: str | Iterable[str] | None = None, + config: Config | None = None, + distro: str = default_distro, ): + dependency_add = ensure_list(dependency_add) + write_conda_recipe( packages, distro, diff --git a/conda_build/utils.py b/conda_build/utils.py index 92de8b24a1..4a3e1f782c 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -72,6 +72,8 @@ if TYPE_CHECKING: from typing import Mapping, TypeVar + from .metadata import MetaData + T = TypeVar("T") K = TypeVar("K") V = TypeVar("V") @@ -1124,7 +1126,7 @@ def convert_path_for_cygwin_or_msys2(exe, path): return path -def get_skip_message(m): +def get_skip_message(m: MetaData) -> str: return ( f"Skipped: {m.name()} from {m.path} defines build/skip for this configuration " f"({({k: m.config.variant[k] for k in m.get_used_vars()})})." @@ -1250,9 +1252,13 @@ def tmp_chdir(dest): os.chdir(curdir) -def expand_globs(path_list, root_dir): +def expand_globs( + path_list: str | os.PathLike | Path | Iterable[str | os.PathLike | Path], + root_dir: str | os.PathLike | Path, +) -> list[str]: files = [] for path in ensure_list(path_list): + path = str(path) if not os.path.isabs(path): path = os.path.join(root_dir, path) if os.path.isfile(path): @@ -1276,11 +1282,10 @@ def expand_globs(path_list, root_dir): # Avoid this potential ambiguity by sorting. (see #4185) files.extend(sorted(glob_files)) prefix_path_re = re.compile("^" + re.escape(f"{root_dir}{os.path.sep}")) - files = [prefix_path_re.sub("", f, 1) for f in files] - return files + return [prefix_path_re.sub("", f, 1) for f in files] -def find_recipe(path): +def find_recipe(path: str) -> str: """recurse through a folder, locating valid meta files (see VALID_METAS). Raises error if more than one is found. Returns full path to meta file to be built. diff --git a/tests/cli/test_main_skeleton.py b/tests/cli/test_main_skeleton.py index 0333d77c1f..c2dd0a65b5 100644 --- a/tests/cli/test_main_skeleton.py +++ b/tests/cli/test_main_skeleton.py @@ -54,6 +54,6 @@ def test_skeleton_pypi_arguments_work(testing_workdir): assert os.path.isdir("photutils") # Check that the setup option occurs in bld.bat and build.sh. - m = api.render("photutils")[0][0] - assert "--offline" in m.meta["build"]["script"] - assert m.version() == "1.10.0" + metadata = api.render("photutils")[0][0] + assert "--offline" in metadata.meta["build"]["script"] + assert metadata.version() == "1.10.0" diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 8871fcedf7..a663f18e73 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -237,8 +237,8 @@ def test_offline( def test_git_describe_info_on_branch(testing_config): recipe_path = os.path.join(metadata_dir, "_git_describe_number_branch") - m = api.render(recipe_path, config=testing_config)[0][0] - output = api.get_output_file_paths(m)[0] + metadata = api.render(recipe_path, config=testing_config)[0][0] + output = api.get_output_file_paths(metadata)[0] # missing hash because we set custom build string in meta.yaml test_path = os.path.join( testing_config.croot, @@ -613,13 +613,13 @@ def test_numpy_setup_py_data(testing_config): # - cython subprocess.call("conda remove -y cython".split()) with pytest.raises(CondaBuildException) as exc_info: - api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] + api.render(recipe_path, config=testing_config, numpy="1.16") assert exc_info.match("Cython") subprocess.check_call(["conda", "install", "-y", "cython"]) - m = api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] - _hash = m.hash_dependencies() + metadata = api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] + _hash = metadata.hash_dependencies() assert ( - os.path.basename(api.get_output_file_paths(m)[0]) + os.path.basename(api.get_output_file_paths(metadata)[0]) == f"load_setup_py_test-0.1.0-np116py{sys.version_info.major}{sys.version_info.minor}{_hash}_0.tar.bz2" ) @@ -1178,9 +1178,9 @@ def test_run_exports(testing_metadata, testing_config, testing_workdir): # will be merged when build subdir == host_subdir, the weak run_exports should be present. testing_metadata.meta["requirements"]["build"] = ["test_has_run_exports"] api.output_yaml(testing_metadata, "meta.yaml") - m = api.render(testing_workdir, config=testing_config)[0][0] - assert "strong_pinned_package 1.0.*" in m.meta["requirements"]["run"] - assert "weak_pinned_package 1.0.*" in m.meta["requirements"]["run"] + metadata = api.render(testing_workdir, config=testing_config)[0][0] + assert "strong_pinned_package 1.0.*" in metadata.meta["requirements"]["run"] + assert "weak_pinned_package 1.0.*" in metadata.meta["requirements"]["run"] # 2. host present. Use run_exports from host, ignore 'weak' ones from build. All are # weak by default. @@ -1190,10 +1190,12 @@ def test_run_exports(testing_metadata, testing_config, testing_workdir): ] testing_metadata.meta["requirements"]["host"] = ["python"] api.output_yaml(testing_metadata, "host_present_weak/meta.yaml") - m = api.render( + metadata = api.render( os.path.join(testing_workdir, "host_present_weak"), config=testing_config )[0][0] - assert "weak_pinned_package 2.0.*" not in m.meta["requirements"].get("run", []) + assert "weak_pinned_package 2.0.*" not in metadata.meta["requirements"].get( + "run", [] + ) # 3. host present, and deps in build have "strong" run_exports section. use host, add # in "strong" from build. @@ -1205,15 +1207,15 @@ def test_run_exports(testing_metadata, testing_config, testing_workdir): "test_has_run_exports_implicit_weak" ] api.output_yaml(testing_metadata, "host_present_strong/meta.yaml") - m = api.render( + metadata = api.render( os.path.join(testing_workdir, "host_present_strong"), config=testing_config )[0][0] - assert "strong_pinned_package 1.0 0" in m.meta["requirements"]["host"] - assert "strong_pinned_package 1.0.*" in m.meta["requirements"]["run"] + assert "strong_pinned_package 1.0 0" in metadata.meta["requirements"]["host"] + assert "strong_pinned_package 1.0.*" in metadata.meta["requirements"]["run"] # weak one from test_has_run_exports should be excluded, since it is a build dep - assert "weak_pinned_package 1.0.*" not in m.meta["requirements"]["run"] + assert "weak_pinned_package 1.0.*" not in metadata.meta["requirements"]["run"] # weak one from test_has_run_exports_implicit_weak should be present, since it is a host dep - assert "weak_pinned_package 2.0.*" in m.meta["requirements"]["run"] + assert "weak_pinned_package 2.0.*" in metadata.meta["requirements"]["run"] @pytest.mark.sanity @@ -1279,20 +1281,20 @@ def test_run_exports_constrains(testing_metadata, testing_config, testing_workdi testing_metadata.meta["requirements"]["build"] = ["run_exports_constrains"] testing_metadata.meta["requirements"]["host"] = [] api.output_yaml(testing_metadata, "in_build/meta.yaml") - m = api.render(os.path.join(testing_workdir, "in_build"), config=testing_config)[0][ - 0 - ] - reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + metadata = api.render( + os.path.join(testing_workdir, "in_build"), config=testing_config + )[0][0] + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) assert {"strong_run_export"} == reqs_set("run") assert {"strong_constrains_export"} == reqs_set("run_constrained") testing_metadata.meta["requirements"]["build"] = [] testing_metadata.meta["requirements"]["host"] = ["run_exports_constrains"] api.output_yaml(testing_metadata, "in_host/meta.yaml") - m = api.render(os.path.join(testing_workdir, "in_host"), config=testing_config)[0][ - 0 - ] - reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + metadata = api.render( + os.path.join(testing_workdir, "in_host"), config=testing_config + )[0][0] + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) assert {"strong_run_export", "weak_run_export"} == reqs_set("run") assert {"strong_constrains_export", "weak_constrains_export"} == reqs_set( "run_constrained" @@ -1303,32 +1305,32 @@ def test_run_exports_constrains(testing_metadata, testing_config, testing_workdi ] testing_metadata.meta["requirements"]["host"] = [] api.output_yaml(testing_metadata, "only_weak_in_build/meta.yaml") - m = api.render( + metadata = api.render( os.path.join(testing_workdir, "only_weak_in_build"), config=testing_config )[0][0] - reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) assert set() == reqs_set("run") assert set() == reqs_set("run_constrained") testing_metadata.meta["requirements"]["build"] = [] testing_metadata.meta["requirements"]["host"] = ["run_exports_constrains_only_weak"] api.output_yaml(testing_metadata, "only_weak_in_host/meta.yaml") - m = api.render( + metadata = api.render( os.path.join(testing_workdir, "only_weak_in_host"), config=testing_config )[0][0] - reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) assert {"weak_run_export"} == reqs_set("run") assert {"weak_constrains_export"} == reqs_set("run_constrained") def test_pin_subpackage_exact(testing_config): recipe = os.path.join(metadata_dir, "_pin_subpackage_exact") - ms = api.render(recipe, config=testing_config) - assert len(ms) == 2 + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 assert any( re.match(r"run_exports_subpkg\ 1\.0\ 0", req) - for (m, _, _) in ms - for req in m.meta.get("requirements", {}).get("run", []) + for metadata, _, _ in metadata_tuples + for req in metadata.meta.get("requirements", {}).get("run", []) ) @@ -1420,12 +1422,12 @@ def test_unknown_selectors(testing_config): @pytest.mark.flaky(reruns=5, reruns_delay=2) def test_failed_recipe_leaves_folders(testing_config): recipe = os.path.join(fail_dir, "recursive-build") - m = api.render(recipe, config=testing_config)[0][0] - locks = get_conda_operation_locks(m.config) + metadata = api.render(recipe, config=testing_config)[0][0] + locks = get_conda_operation_locks(metadata.config) with pytest.raises((RuntimeError, exceptions.DependencyNeedsBuildingError)): - api.build(m) - assert os.path.isdir(m.config.build_folder), "build folder was removed" - assert os.listdir(m.config.build_folder), "build folder has no files" + api.build(metadata) + assert os.path.isdir(metadata.config.build_folder), "build folder was removed" + assert os.listdir(metadata.config.build_folder), "build folder has no files" # make sure that it does not leave lock files, though, as these cause permission errors on # centralized installations @@ -1646,13 +1648,14 @@ def test_pin_depends(testing_config): dependencies """ recipe = os.path.join(metadata_dir, "_pin_depends_record") - m = api.render(recipe, config=testing_config)[0][0] + metadata = api.render(recipe, config=testing_config)[0][0] # the recipe python is not pinned, and having pin_depends set to record # will not show it in record assert not any( - re.search(r"python\s+[23]\.", dep) for dep in m.meta["requirements"]["run"] + re.search(r"python\s+[23]\.", dep) + for dep in metadata.meta["requirements"]["run"] ) - output = api.build(m, config=testing_config)[0] + output = api.build(metadata, config=testing_config)[0] requires = package_has_file(output, "info/requires") assert requires if hasattr(requires, "decode"): diff --git a/tests/test_api_render.py b/tests/test_api_render.py index 60a381ebf1..293ca09815 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -139,9 +139,9 @@ def test_resolved_packages_recipe(testing_config): @pytest.mark.slow def test_host_entries_finalized(testing_config): recipe = os.path.join(metadata_dir, "_host_entries_finalized") - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 2 - outputs = api.get_output_file_paths(metadata) + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + outputs = api.get_output_file_paths(metadata_tuples) assert any("py27" in out for out in outputs) assert any("py39" in out for out in outputs) @@ -159,10 +159,11 @@ def test_hash_no_apply_to_custom_build_string(testing_metadata, testing_workdir) def test_pin_depends(testing_config): """This is deprecated functionality - replaced by the more general variants pinning scheme""" recipe = os.path.join(metadata_dir, "_pin_depends_strict") - m = api.render(recipe, config=testing_config)[0][0] + metadata = api.render(recipe, config=testing_config)[0][0] # the recipe python is not pinned, but having pin_depends set will force it to be. assert any( - re.search(r"python\s+[23]\.", dep) for dep in m.meta["requirements"]["run"] + re.search(r"python\s+[23]\.", dep) + for dep in metadata.meta["requirements"]["run"] ) @@ -189,10 +190,10 @@ def test_noarch_with_platform_deps(testing_workdir, testing_config): build_ids = {} for subdir_ in ["linux-64", "linux-aarch64", "linux-ppc64le", "osx-64", "win-64"]: platform, arch = subdir_.split("-") - m = api.render( + metadata = api.render( recipe_path, config=testing_config, platform=platform, arch=arch )[0][0] - build_ids[subdir_] = m.build_id() + build_ids[subdir_] = metadata.build_id() # one hash for each platform, plus one for the archspec selector assert len(set(build_ids.values())) == 4 @@ -206,8 +207,10 @@ def test_noarch_with_no_platform_deps(testing_workdir, testing_config): recipe_path = os.path.join(metadata_dir, "_noarch_with_no_platform_deps") build_ids = set() for platform in ["osx", "linux", "win"]: - m = api.render(recipe_path, config=testing_config, platform=platform)[0][0] - build_ids.add(m.build_id()) + metadata = api.render(recipe_path, config=testing_config, platform=platform)[0][ + 0 + ] + build_ids.add(metadata.build_id()) assert len(build_ids) == 1 @@ -230,21 +233,21 @@ def test_setting_condarc_vars_with_env_var_expansion(testing_workdir, mocker): ) os.environ["TEST_WORKDIR"] = testing_workdir - m = api.render( + metadata = api.render( os.path.join(variants_dir, "19_used_variables"), bypass_env_check=True, finalize=False, )[0][0] # this one should have gotten clobbered by the values in the recipe - assert m.config.variant["python"] not in python_versions + assert metadata.config.variant["python"] not in python_versions # this confirms that we loaded the config file correctly - assert len(m.config.squished_variants["bzip2"]) == 2 + assert len(metadata.config.squished_variants["bzip2"]) == 2 def test_self_reference_run_exports_pin_subpackage_picks_up_version_correctly(): recipe = os.path.join(metadata_dir, "_self_reference_run_exports") - m = api.render(recipe)[0][0] - run_exports = m.meta.get("build", {}).get("run_exports", []) + metadata = api.render(recipe)[0][0] + run_exports = metadata.meta.get("build", {}).get("run_exports", []) assert run_exports assert len(run_exports) == 1 assert run_exports[0].split()[1] == ">=1.0.0,<2.0a0" @@ -252,11 +255,11 @@ def test_self_reference_run_exports_pin_subpackage_picks_up_version_correctly(): def test_run_exports_with_pin_compatible_in_subpackages(testing_config): recipe = os.path.join(metadata_dir, "_run_exports_in_outputs") - ms = api.render(recipe, config=testing_config) - for m, _, _ in ms: - if m.name().startswith("gfortran_"): + metadata_tuples = api.render(recipe, config=testing_config) + for metadata, _, _ in metadata_tuples: + if metadata.name().startswith("gfortran_"): run_exports = set( - m.meta.get("build", {}).get("run_exports", {}).get("strong", []) + metadata.meta.get("build", {}).get("run_exports", {}).get("strong", []) ) assert len(run_exports) == 1 # len after splitting should be more than one because of pin_compatible. If it's only zlib, we've lost the @@ -266,38 +269,46 @@ def test_run_exports_with_pin_compatible_in_subpackages(testing_config): def test_ignore_build_only_deps(): - ms = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "python_in_build_only"), bypass_env_check=True, finalize=False, ) - assert len(ms) == 1 + assert len(metadata_tuples) == 1 def test_merge_build_host_build_key(): - m = api.render(os.path.join(metadata_dir, "_no_merge_build_host"))[0][0] - assert not any("bzip2" in dep for dep in m.meta["requirements"]["run"]) + metadata = api.render(os.path.join(metadata_dir, "_no_merge_build_host"))[0][0] + assert not any("bzip2" in dep for dep in metadata.meta["requirements"]["run"]) def test_merge_build_host_empty_host_section(): - m = api.render(os.path.join(metadata_dir, "_empty_host_avoids_merge"))[0][0] - assert not any("bzip2" in dep for dep in m.meta["requirements"]["run"]) + metadata = api.render(os.path.join(metadata_dir, "_empty_host_avoids_merge"))[0][0] + assert not any("bzip2" in dep for dep in metadata.meta["requirements"]["run"]) def test_pin_expression_works_with_prereleases(testing_config): recipe = os.path.join(metadata_dir, "_pinning_prerelease") - ms = api.render(recipe, config=testing_config) - assert len(ms) == 2 - m = next(m_[0] for m_ in ms if m_[0].meta["package"]["name"] == "bar") - assert "foo >=3.10.0.rc1,<3.11.0a0" in m.meta["requirements"]["run"] + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + metadata = next( + metadata + for metadata, _, _ in metadata_tuples + if metadata.meta["package"]["name"] == "bar" + ) + assert "foo >=3.10.0.rc1,<3.11.0a0" in metadata.meta["requirements"]["run"] def test_pin_expression_works_with_python_prereleases(testing_config): recipe = os.path.join(metadata_dir, "_pinning_prerelease_python") - ms = api.render(recipe, config=testing_config) - assert len(ms) == 2 - m = next(m_[0] for m_ in ms if m_[0].meta["package"]["name"] == "bar") - assert "python >=3.10.0rc1,<3.11.0a0" in m.meta["requirements"]["run"] + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + metadata = next( + metadata + for metadata, _, _ in metadata_tuples + if metadata.meta["package"]["name"] == "bar" + ) + assert "python >=3.10.0rc1,<3.11.0a0" in metadata.meta["requirements"]["run"] @pytest.mark.benchmark @@ -326,7 +337,7 @@ def create_variants(): validate_spec("", variant) return variant - ms = api.render( + metadata_tuples = api.render( recipe, config=testing_config, channels=[], variants=create_variants() ) - assert len(ms) == 11 - 3 # omits libarrow-all, pyarrow, pyarrow-tests + assert len(metadata_tuples) == 11 - 3 # omits libarrow-all, pyarrow, pyarrow-tests diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index a8273492b0..963312ee44 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -234,8 +234,8 @@ def test_sympy(package: str, version: str | None, tmp_path: Path, testing_config config=testing_config, output_dir=tmp_path, ) - m = api.render(str(tmp_path / "sympy" / "meta.yaml"))[0][0] - assert m.version() == "1.10" + metadata = api.render(str(tmp_path / "sympy" / "meta.yaml"))[0][0] + assert metadata.version() == "1.10" def test_get_entry_points(pylint_pkginfo, pylint_metadata): @@ -350,8 +350,8 @@ def test_pypi_with_setup_options(tmp_path: Path, testing_config): ) # Check that the setup option occurs in bld.bat and build.sh. - m = api.render(str(tmp_path / "photutils"))[0][0] - assert "--offline" in m.meta["build"]["script"] + metadata = api.render(str(tmp_path / "photutils"))[0][0] + assert "--offline" in metadata.meta["build"]["script"] def test_pypi_pin_numpy(tmp_path: Path, testing_config: Config): @@ -377,8 +377,8 @@ def test_pypi_version_sorting(tmp_path: Path, testing_config: Config): config=testing_config, output_dir=tmp_path, ) - m = api.render(str(tmp_path / "fasttext"))[0][0] - assert parse_version(m.version()) >= parse_version("0.9.2") + metadata = api.render(str(tmp_path / "fasttext"))[0][0] + assert parse_version(metadata.version()) >= parse_version("0.9.2") def test_list_skeletons(): @@ -394,8 +394,8 @@ def test_pypi_with_entry_points(tmp_path: Path): def test_pypi_with_version_arg(tmp_path: Path): # regression test for https://github.com/conda/conda-build/issues/1442 api.skeletonize("PrettyTable", "pypi", version="0.7.2", output_dir=tmp_path) - m = api.render(str(tmp_path / "prettytable"))[0][0] - assert parse_version(m.version()) == parse_version("0.7.2") + metadata = api.render(str(tmp_path / "prettytable"))[0][0] + assert parse_version(metadata.version()) == parse_version("0.7.2") @pytest.mark.slow @@ -415,10 +415,10 @@ def test_pypi_with_extra_specs(tmp_path: Path, testing_config): config=testing_config, output_dir=tmp_path, ) - m = api.render(str(tmp_path / "bigfile"))[0][0] - assert parse_version(m.version()) == parse_version("0.1.24") - assert any("cython" in req for req in m.meta["requirements"]["host"]) - assert any("mpi4py" in req for req in m.meta["requirements"]["host"]) + metadata = api.render(str(tmp_path / "bigfile"))[0][0] + assert parse_version(metadata.version()) == parse_version("0.1.24") + assert any("cython" in req for req in metadata.meta["requirements"]["host"]) + assert any("mpi4py" in req for req in metadata.meta["requirements"]["host"]) @pytest.mark.slow @@ -438,17 +438,17 @@ def test_pypi_with_version_inconsistency(tmp_path: Path, testing_config): config=testing_config, output_dir=tmp_path, ) - m = api.render(str(tmp_path / "mpi4py_test"))[0][0] - assert parse_version(m.version()) == parse_version("0.0.10") + metadata = api.render(str(tmp_path / "mpi4py_test"))[0][0] + assert parse_version(metadata.version()) == parse_version("0.0.10") def test_pypi_with_basic_environment_markers(tmp_path: Path): # regression test for https://github.com/conda/conda-build/issues/1974 api.skeletonize("coconut", "pypi", version="1.2.2", output_dir=tmp_path) - m = api.render(tmp_path / "coconut")[0][0] + metadata = api.render(tmp_path / "coconut")[0][0] - build_reqs = str(m.meta["requirements"]["host"]) - run_reqs = str(m.meta["requirements"]["run"]) + build_reqs = str(metadata.meta["requirements"]["host"]) + run_reqs = str(metadata.meta["requirements"]["run"]) # should include the right dependencies for the right version assert "futures" not in build_reqs assert "futures" not in run_reqs @@ -458,8 +458,8 @@ def test_pypi_with_basic_environment_markers(tmp_path: Path): def test_setuptools_test_requirements(tmp_path: Path): api.skeletonize(packages="hdf5storage", repo="pypi", output_dir=tmp_path) - m = api.render(str(tmp_path / "hdf5storage"))[0][0] - assert m.meta["test"]["requires"] == ["nose >=1.0"] + metadata = api.render(str(tmp_path / "hdf5storage"))[0][0] + assert metadata.meta["test"]["requires"] == ["nose >=1.0"] @pytest.mark.skipif(sys.version_info < (3, 8), reason="sympy is python 3.8+") diff --git a/tests/test_api_skeleton_cpan.py b/tests/test_api_skeleton_cpan.py index 9f08ccbae6..5945158023 100644 --- a/tests/test_api_skeleton_cpan.py +++ b/tests/test_api_skeleton_cpan.py @@ -17,8 +17,8 @@ def test_xs_needs_c_compiler(testing_config): """Perl packages with XS files need a C compiler""" # This uses Sub::Identify=0.14 since it includes no .c files but a .xs file. api.skeletonize("Sub::Identify", version="0.14", repo="cpan", config=testing_config) - m = api.render("perl-sub-identify/0.14", finalize=False, bypass_env_check=True)[0][ - 0 - ] - build_requirements = m.get_value("requirements/build") + metadata = api.render( + "perl-sub-identify/0.14", finalize=False, bypass_env_check=True + )[0][0] + build_requirements = metadata.get_value("requirements/build") assert compiler("c", testing_config) in build_requirements diff --git a/tests/test_api_skeleton_cran.py b/tests/test_api_skeleton_cran.py index 912b2bee0c..57e9d02550 100644 --- a/tests/test_api_skeleton_cran.py +++ b/tests/test_api_skeleton_cran.py @@ -41,12 +41,12 @@ def test_cran_license( api.skeletonize( packages=package, repo="cran", output_dir=tmp_path, config=testing_config ) - m = api.render(str(tmp_path / package / "meta.yaml"))[0][0] + metadata = api.render(str(tmp_path / package / "meta.yaml"))[0][0] - assert m.get_value("about/license") == license_id - assert m.get_value("about/license_family") == license_family + assert metadata.get_value("about/license") == license_id + assert metadata.get_value("about/license_family") == license_family assert { - Path(license).name for license in m.get_value("about/license_file", "") + Path(license).name for license in metadata.get_value("about/license_file", "") } == set(license_files) diff --git a/tests/test_build.py b/tests/test_build.py index eca9441af8..fd6a3a8f67 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -41,11 +41,11 @@ def test_find_prefix_files(testing_workdir): def test_build_preserves_PATH(testing_config): - m = api.render(os.path.join(metadata_dir, "source_git"), config=testing_config)[0][ - 0 - ] + metadata = api.render( + os.path.join(metadata_dir, "source_git"), config=testing_config + )[0][0] ref_path = os.environ["PATH"] - build.build(m, stats=None) + build.build(metadata, stats=None) assert os.environ["PATH"] == ref_path diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index 4fe966c054..11e43383d0 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -11,6 +11,7 @@ from conda.base.context import context from conda_build import api, utils +from conda_build.metadata import MetaDataTuple from conda_build.render import finalize_metadata from .utils import get_valid_recipes, subpackage_dir @@ -56,7 +57,10 @@ def test_output_pkg_path_shows_all_subpackages(testing_metadata): testing_metadata.meta["outputs"] = [{"name": "a"}, {"name": "b"}] out_dicts_and_metadata = testing_metadata.get_output_metadata_set() outputs = api.get_output_file_paths( - [(m, None, None) for (_, m) in out_dicts_and_metadata] + [ + MetaDataTuple(metadata, False, False) + for _, metadata in out_dicts_and_metadata + ] ) assert len(outputs) == 2 @@ -65,7 +69,10 @@ def test_subpackage_version_provided(testing_metadata): testing_metadata.meta["outputs"] = [{"name": "a", "version": "2.0"}] out_dicts_and_metadata = testing_metadata.get_output_metadata_set() outputs = api.get_output_file_paths( - [(m, None, None) for (_, m) in out_dicts_and_metadata] + [ + MetaDataTuple(metadata, False, False) + for _, metadata in out_dicts_and_metadata + ] ) assert len(outputs) == 1 assert "a-2.0-1" in outputs[0] @@ -79,7 +86,10 @@ def test_subpackage_independent_hash(testing_metadata): out_dicts_and_metadata = testing_metadata.get_output_metadata_set() assert len(out_dicts_and_metadata) == 2 outputs = api.get_output_file_paths( - [(m, None, None) for (_, m) in out_dicts_and_metadata] + [ + MetaDataTuple(metadata, False, False) + for _, metadata in out_dicts_and_metadata + ] ) assert len(outputs) == 2 assert outputs[0][-15:] != outputs[1][-15:] @@ -120,34 +130,34 @@ def test_intradependencies(testing_config): def test_git_in_output_version(testing_config, conda_build_test_recipe_envvar: str): recipe = os.path.join(subpackage_dir, "_git_in_output_version") - outputs = api.render( + metadata_tuples = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True ) - assert len(outputs) == 1 - assert outputs[0][0].version() == "1.22.0" + assert len(metadata_tuples) == 1 + assert metadata_tuples[0][0].version() == "1.22.0" def test_intradep_with_templated_output_name(testing_config): recipe = os.path.join(subpackage_dir, "_intradep_with_templated_output_name") - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 3 + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 3 expected_names = { "test_templated_subpackage_name", "templated_subpackage_nameabc", "depends_on_templated", } - assert {m.name() for (m, _, _) in metadata} == expected_names + assert {metadata.name() for metadata, _, _ in metadata_tuples} == expected_names def test_output_specific_subdir(testing_config): recipe = os.path.join(subpackage_dir, "_output_specific_subdir") - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 3 - for m, _, _ in metadata: - if m.name() in ("default_subdir", "default_subdir_2"): - assert m.config.target_subdir == context.subdir - elif m.name() == "custom_subdir": - assert m.config.target_subdir == "linux-aarch64" + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 3 + for metadata, _, _ in metadata_tuples: + if metadata.name() in ("default_subdir", "default_subdir_2"): + assert metadata.config.target_subdir == context.subdir + elif metadata.name() == "custom_subdir": + assert metadata.config.target_subdir == "linux-aarch64" else: raise AssertionError( "Test for output_specific_subdir written incorrectly - " @@ -157,17 +167,17 @@ def test_output_specific_subdir(testing_config): def test_about_metadata(testing_config): recipe = os.path.join(subpackage_dir, "_about_metadata") - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 2 - for m, _, _ in metadata: - if m.name() == "abc": - assert "summary" in m.meta["about"] - assert m.meta["about"]["summary"] == "weee" - assert "home" not in m.meta["about"] - elif m.name() == "def": - assert "home" in m.meta["about"] - assert "summary" not in m.meta["about"] - assert m.meta["about"]["home"] == "http://not.a.url" + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + for metadata, _, _ in metadata_tuples: + if metadata.name() == "abc": + assert "summary" in metadata.meta["about"] + assert metadata.meta["about"]["summary"] == "weee" + assert "home" not in metadata.meta["about"] + elif metadata.name() == "def": + assert "home" in metadata.meta["about"] + assert "summary" not in metadata.meta["about"] + assert metadata.meta["about"]["home"] == "http://not.a.url" outs = api.build(recipe, config=testing_config) for out in outs: about_meta = utils.package_has_file(out, "info/about.json") @@ -288,23 +298,24 @@ def test_per_output_tests_script(testing_config): def test_pin_compatible_in_outputs(testing_config): recipe_dir = os.path.join(subpackage_dir, "_pin_compatible_in_output") - m = api.render(recipe_dir, config=testing_config)[0][0] + metadata = api.render(recipe_dir, config=testing_config)[0][0] assert any( - re.search(r"numpy\s*>=.*,<.*", req) for req in m.meta["requirements"]["run"] + re.search(r"numpy\s*>=.*,<.*", req) + for req in metadata.meta["requirements"]["run"] ) def test_output_same_name_as_top_level_does_correct_output_regex(testing_config): recipe_dir = os.path.join(subpackage_dir, "_output_named_same_as_top_level") - ms = api.render(recipe_dir, config=testing_config) + metadata_tuples = api.render(recipe_dir, config=testing_config) # TODO: need to decide what best behavior is for saying whether the # top-level build reqs or the output reqs for the similarly naemd output # win. I think you could have both, but it means rendering a new, extra, # build-only metadata in addition to all the outputs - for m, _, _ in ms: - if m.name() == "ipp": + for metadata, _, _ in metadata_tuples: + if metadata.name() == "ipp": for env in ("build", "host", "run"): - assert not m.meta.get("requirements", {}).get(env) + assert not metadata.meta.get("requirements", {}).get(env) def test_subpackage_order_natural(testing_config): @@ -361,23 +372,34 @@ def test_strong_run_exports_from_build_applies_to_host(testing_config): def test_python_line_up_with_compiled_lib(recipe, testing_config): recipe = os.path.join(subpackage_dir, recipe) # we use windows so that we have 2 libxyz results (VS2008, VS2015) - ms = api.render(recipe, config=testing_config, platform="win", arch="64") + metadata_tuples = api.render( + recipe, config=testing_config, platform="win", arch="64" + ) # 2 libxyz, 3 py-xyz, 3 xyz - assert len(ms) == 8 - for m, _, _ in ms: - if m.name() in ("py-xyz" or "xyz"): - deps = m.meta["requirements"]["run"] + assert len(metadata_tuples) == 8 + for metadata, _, _ in metadata_tuples: + if metadata.name() in ("py-xyz" or "xyz"): + deps = metadata.meta["requirements"]["run"] assert any( dep.startswith("libxyz ") and len(dep.split()) == 3 for dep in deps - ), (m.name(), deps) - assert any(dep.startswith("python >") for dep in deps), (m.name(), deps) - assert any(dep.startswith("zlib >") for dep in deps), (m.name(), deps) - if m.name() == "xyz": - deps = m.meta["requirements"]["run"] + ), (metadata.name(), deps) + assert any(dep.startswith("python >") for dep in deps), ( + metadata.name(), + deps, + ) + assert any(dep.startswith("zlib >") for dep in deps), ( + metadata.name(), + deps, + ) + if metadata.name() == "xyz": + deps = metadata.meta["requirements"]["run"] assert any( dep.startswith("py-xyz ") and len(dep.split()) == 3 for dep in deps - ), (m.name(), deps) - assert any(dep.startswith("python >") for dep in deps), (m.name(), deps) + ), (metadata.name(), deps) + assert any(dep.startswith("python >") for dep in deps), ( + metadata.name(), + deps, + ) @pytest.mark.xfail( @@ -385,17 +407,17 @@ def test_python_line_up_with_compiled_lib(recipe, testing_config): ) def test_merge_build_host_applies_in_outputs(testing_config): recipe = os.path.join(subpackage_dir, "_merge_build_host") - ms = api.render(recipe, config=testing_config) - for m, _, _ in ms: + metadata_tuples = api.render(recipe, config=testing_config) + for metadata, _, _ in metadata_tuples: # top level - if m.name() == "test_build_host_merge": - assert not m.meta.get("requirements", {}).get("run") + if metadata.name() == "test_build_host_merge": + assert not metadata.meta.get("requirements", {}).get("run") # output else: - run_exports = set(m.meta.get("build", {}).get("run_exports", [])) + run_exports = set(metadata.meta.get("build", {}).get("run_exports", [])) assert len(run_exports) == 2 assert all(len(export.split()) > 1 for export in run_exports) - run_deps = set(m.meta.get("requirements", {}).get("run", [])) + run_deps = set(metadata.meta.get("requirements", {}).get("run", [])) assert len(run_deps) == 2 assert all(len(dep.split()) > 1 for dep in run_deps) @@ -411,11 +433,13 @@ def test_activation_in_output_scripts(testing_config): def test_inherit_build_number(testing_config): recipe = os.path.join(subpackage_dir, "_inherit_build_number") - ms = api.render(recipe, config=testing_config) - for m, _, _ in ms: - assert "number" in m.meta["build"], "build number was not inherited at all" + metadata_tuples = api.render(recipe, config=testing_config) + for metadata, _, _ in metadata_tuples: + assert ( + "number" in metadata.meta["build"] + ), "build number was not inherited at all" assert ( - int(m.meta["build"]["number"]) == 1 + int(metadata.meta["build"]["number"]) == 1 ), "build number should have been inherited as '1'" diff --git a/tests/test_variants.py b/tests/test_variants.py index e853f172fd..3c79e36e16 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -73,7 +73,7 @@ def test_python_variants(testing_workdir, testing_config, as_yaml): testing_config.variant_config_files = [str(variants_path)] # render the metadata - metadata = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "variant_recipe"), no_download_source=False, config=testing_config, @@ -82,14 +82,14 @@ def test_python_variants(testing_workdir, testing_config, as_yaml): ) # we should have one package/metadata per python version - assert len(metadata) == 2 + assert len(metadata_tuples) == 2 # there should only be one run requirement for each package/metadata - assert len(metadata[0][0].meta["requirements"]["run"]) == 1 - assert len(metadata[1][0].meta["requirements"]["run"]) == 1 + assert len(metadata_tuples[0][0].meta["requirements"]["run"]) == 1 + assert len(metadata_tuples[1][0].meta["requirements"]["run"]) == 1 # the run requirements should be python ranges assert { - *metadata[0][0].meta["requirements"]["run"], - *metadata[1][0].meta["requirements"]["run"], + *metadata_tuples[0][0].meta["requirements"]["run"], + *metadata_tuples[1][0].meta["requirements"]["run"], } == {"python >=3.11,<3.12.0a0", "python >=3.12,<3.13.0a0"} @@ -109,7 +109,7 @@ def test_use_selectors_in_variants(testing_workdir, testing_config): ) ) def test_variant_with_ignore_version_reduces_matrix(): - metadata = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "03_ignore_version_reduces_matrix"), variants={ "packageA": ["1.2", "3.4"], @@ -119,13 +119,13 @@ def test_variant_with_ignore_version_reduces_matrix(): }, finalize=False, ) - assert len(metadata) == 2 + assert len(metadata_tuples) == 2 def test_variant_with_numpy_pinned_has_matrix(): recipe = os.path.join(variants_dir, "04_numpy_matrix_pinned") - metadata = api.render(recipe, finalize=False) - assert len(metadata) == 4 + metadata_tuples = api.render(recipe, finalize=False) + assert len(metadata_tuples) == 4 def test_pinning_in_build_requirements(): @@ -223,13 +223,13 @@ def test_validate_spec(): def test_cross_compilers(): recipe = os.path.join(variants_dir, "09_cross") - ms = api.render( + metadata_tuples = api.render( recipe, permit_unsatisfiable_variants=True, finalize=False, bypass_env_check=True, ) - assert len(ms) == 3 + assert len(metadata_tuples) == 3 def test_variants_in_output_names(): @@ -252,11 +252,11 @@ def test_variants_in_versions_with_setup_py_data(): def test_git_variables_with_variants(testing_config): recipe = os.path.join(variants_dir, "13_git_vars") - m = api.render( + metadata = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True )[0][0] - assert m.version() == "1.20.2" - assert m.build_number() == 0 + assert metadata.version() == "1.20.2" + assert metadata.build_number() == 0 def test_variant_input_with_zip_keys_keeps_zip_keys_list(): @@ -305,57 +305,109 @@ def test_serial_builds_have_independent_configs(testing_config): def test_subspace_selection(testing_config): recipe = os.path.join(variants_dir, "18_subspace_selection") testing_config.variant = {"a": "coffee"} - ms = api.render( + metadata_tuples = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True ) # there are two entries with a==coffee, so we should end up with 2 variants - assert len(ms) == 2 + assert len(metadata_tuples) == 2 # ensure that the zipped keys still agree - assert sum(m.config.variant["b"] == "123" for m, _, _ in ms) == 1 - assert sum(m.config.variant["b"] == "abc" for m, _, _ in ms) == 1 - assert sum(m.config.variant["b"] == "concrete" for m, _, _ in ms) == 0 - assert sum(m.config.variant["c"] == "mooo" for m, _, _ in ms) == 1 - assert sum(m.config.variant["c"] == "baaa" for m, _, _ in ms) == 1 - assert sum(m.config.variant["c"] == "woof" for m, _, _ in ms) == 0 + assert ( + sum(metadata.config.variant["b"] == "123" for metadata, _, _ in metadata_tuples) + == 1 + ) + assert ( + sum(metadata.config.variant["b"] == "abc" for metadata, _, _ in metadata_tuples) + == 1 + ) + assert ( + sum( + metadata.config.variant["b"] == "concrete" + for metadata, _, _ in metadata_tuples + ) + == 0 + ) + assert ( + sum( + metadata.config.variant["c"] == "mooo" for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["c"] == "baaa" for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["c"] == "woof" for metadata, _, _ in metadata_tuples + ) + == 0 + ) # test compound selection testing_config.variant = {"a": "coffee", "b": "123"} - ms = api.render( + metadata_tuples = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True ) # there are two entries with a==coffee, but one with both 'coffee' for a, and '123' for b, # so we should end up with 1 variants - assert len(ms) == 1 + assert len(metadata_tuples) == 1 # ensure that the zipped keys still agree - assert sum(m.config.variant["b"] == "123" for m, _, _ in ms) == 1 - assert sum(m.config.variant["b"] == "abc" for m, _, _ in ms) == 0 - assert sum(m.config.variant["b"] == "concrete" for m, _, _ in ms) == 0 - assert sum(m.config.variant["c"] == "mooo" for m, _, _ in ms) == 1 - assert sum(m.config.variant["c"] == "baaa" for m, _, _ in ms) == 0 - assert sum(m.config.variant["c"] == "woof" for m, _, _ in ms) == 0 + assert ( + sum(metadata.config.variant["b"] == "123" for metadata, _, _ in metadata_tuples) + == 1 + ) + assert ( + sum(metadata.config.variant["b"] == "abc" for metadata, _, _ in metadata_tuples) + == 0 + ) + assert ( + sum( + metadata.config.variant["b"] == "concrete" + for metadata, _, _ in metadata_tuples + ) + == 0 + ) + assert ( + sum( + metadata.config.variant["c"] == "mooo" for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["c"] == "baaa" for metadata, _, _ in metadata_tuples + ) + == 0 + ) + assert ( + sum( + metadata.config.variant["c"] == "woof" for metadata, _, _ in metadata_tuples + ) + == 0 + ) # test when configuration leads to no valid combinations - only c provided, and its value # doesn't match any other existing values of c, so it's then ambiguous which zipped # values to choose testing_config.variant = {"c": "not an animal"} with pytest.raises(ValueError): - ms = api.render( - recipe, config=testing_config, finalize=False, bypass_env_check=True - ) + api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True) # all zipped keys provided by the new variant. It should clobber the old one. testing_config.variant = {"a": "some", "b": "new", "c": "animal"} - ms = api.render( + metadata_tuples = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True ) - assert len(ms) == 1 - assert ms[0][0].config.variant["a"] == "some" - assert ms[0][0].config.variant["b"] == "new" - assert ms[0][0].config.variant["c"] == "animal" + assert len(metadata_tuples) == 1 + assert metadata_tuples[0][0].config.variant["a"] == "some" + assert metadata_tuples[0][0].config.variant["b"] == "new" + assert metadata_tuples[0][0].config.variant["c"] == "animal" def test_get_used_loop_vars(): - m = api.render( + metadata = api.render( os.path.join(variants_dir, "19_used_variables"), finalize=False, bypass_env_check=True, @@ -363,9 +415,9 @@ def test_get_used_loop_vars(): # conda_build_config.yaml has 4 loop variables defined, but only 3 are used. # python and zlib are both implicitly used (depend on name matching), while # some_package is explicitly used as a jinja2 variable - assert m.get_used_loop_vars() == {"python", "some_package"} + assert metadata.get_used_loop_vars() == {"python", "some_package"} # these are all used vars - including those with only one value (and thus not loop vars) - assert m.get_used_vars() == { + assert metadata.get_used_vars() == { "python", "some_package", "zlib", @@ -380,49 +432,63 @@ def test_reprovisioning_source(): def test_reduced_hashing_behavior(testing_config): # recipes using any compiler jinja2 function need a hash - m = api.render( + metadata = api.render( os.path.join(variants_dir, "26_reduced_hashing", "hash_yes_compiler"), finalize=False, bypass_env_check=True, )[0][0] assert ( - "c_compiler" in m.get_hash_contents() + "c_compiler" in metadata.get_hash_contents() ), "hash contents should contain c_compiler" assert re.search( - "h[0-9a-f]{%d}" % testing_config.hash_length, m.build_id() + "h[0-9a-f]{%d}" % testing_config.hash_length, metadata.build_id() ), "hash should be present when compiler jinja2 function is used" # recipes that use some variable in conda_build_config.yaml to control what # versions are present at build time also must have a hash (except # python, r_base, and the other stuff covered by legacy build string # behavior) - m = api.render( + metadata = api.render( os.path.join(variants_dir, "26_reduced_hashing", "hash_yes_pinned"), finalize=False, bypass_env_check=True, )[0][0] - assert "zlib" in m.get_hash_contents() - assert re.search("h[0-9a-f]{%d}" % testing_config.hash_length, m.build_id()) + assert "zlib" in metadata.get_hash_contents() + assert re.search("h[0-9a-f]{%d}" % testing_config.hash_length, metadata.build_id()) # anything else does not get a hash - m = api.render( + metadata = api.render( os.path.join(variants_dir, "26_reduced_hashing", "hash_no_python"), finalize=False, bypass_env_check=True, )[0][0] - assert not m.get_hash_contents() - assert not re.search("h[0-9a-f]{%d}" % testing_config.hash_length, m.build_id()) + assert not metadata.get_hash_contents() + assert not re.search( + "h[0-9a-f]{%d}" % testing_config.hash_length, metadata.build_id() + ) def test_variants_used_in_jinja2_conditionals(): - ms = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "21_conditional_sections"), finalize=False, bypass_env_check=True, ) - assert len(ms) == 2 - assert sum(m.config.variant["blas_impl"] == "mkl" for m, _, _ in ms) == 1 - assert sum(m.config.variant["blas_impl"] == "openblas" for m, _, _ in ms) == 1 + assert len(metadata_tuples) == 2 + assert ( + sum( + metadata.config.variant["blas_impl"] == "mkl" + for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["blas_impl"] == "openblas" + for metadata, _, _ in metadata_tuples + ) + == 1 + ) def test_build_run_exports_act_on_host(caplog): @@ -436,14 +502,14 @@ def test_build_run_exports_act_on_host(caplog): def test_detect_variables_in_build_and_output_scripts(): - ms = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "24_test_used_vars_in_scripts"), platform="linux", arch="64", ) - for m, _, _ in ms: - if m.name() == "test_find_used_variables_in_scripts": - used_vars = m.get_used_vars() + for metadata, _, _ in metadata_tuples: + if metadata.name() == "test_find_used_variables_in_scripts": + used_vars = metadata.get_used_vars() assert used_vars assert "SELECTOR_VAR" in used_vars assert "OUTPUT_SELECTOR_VAR" not in used_vars @@ -452,7 +518,7 @@ def test_detect_variables_in_build_and_output_scripts(): assert "BAT_VAR" not in used_vars assert "OUTPUT_VAR" not in used_vars else: - used_vars = m.get_used_vars() + used_vars = metadata.get_used_vars() assert used_vars assert "SELECTOR_VAR" not in used_vars assert "OUTPUT_SELECTOR_VAR" in used_vars @@ -461,14 +527,14 @@ def test_detect_variables_in_build_and_output_scripts(): assert "BAT_VAR" not in used_vars assert "OUTPUT_VAR" in used_vars # on windows, we find variables in bat scripts as well as shell scripts - ms = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "24_test_used_vars_in_scripts"), platform="win", arch="64", ) - for m, _, _ in ms: - if m.name() == "test_find_used_variables_in_scripts": - used_vars = m.get_used_vars() + for metadata, _, _ in metadata_tuples: + if metadata.name() == "test_find_used_variables_in_scripts": + used_vars = metadata.get_used_vars() assert used_vars assert "SELECTOR_VAR" in used_vars assert "OUTPUT_SELECTOR_VAR" not in used_vars @@ -478,7 +544,7 @@ def test_detect_variables_in_build_and_output_scripts(): assert "BAT_VAR" in used_vars assert "OUTPUT_VAR" not in used_vars else: - used_vars = m.get_used_vars() + used_vars = metadata.get_used_vars() assert used_vars assert "SELECTOR_VAR" not in used_vars assert "OUTPUT_SELECTOR_VAR" in used_vars @@ -522,11 +588,11 @@ def test_exclusive_config_files(): os.path.join("config_dir", "config-0.yaml"), os.path.join("config_dir", "config-1.yaml"), ) - output = api.render( + metadata = api.render( os.path.join(variants_dir, "exclusive_config_file"), exclusive_config_files=exclusive_config_files, )[0][0] - variant = output.config.variant + variant = metadata.config.variant # is cwd ignored? assert "cwd" not in variant # did we load the exclusive configs? @@ -547,11 +613,11 @@ def test_exclusive_config_file(): yaml.dump( {"abc": ["super"], "exclusive": ["someval"]}, f, default_flow_style=False ) - output = api.render( + metadata = api.render( os.path.join(variants_dir, "exclusive_config_file"), exclusive_config_file=os.path.join("config_dir", "config.yaml"), )[0][0] - variant = output.config.variant + variant = metadata.config.variant # is cwd ignored? assert "cwd" not in variant # did we load the exclusive config @@ -610,27 +676,27 @@ def test_inner_python_loop_with_output(testing_config): def test_variant_as_dependency_name(testing_config): - outputs = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "27_requirements_host"), config=testing_config ) - assert len(outputs) == 2 + assert len(metadata_tuples) == 2 def test_custom_compiler(): recipe = os.path.join(variants_dir, "28_custom_compiler") - ms = api.render( + metadata_tuples = api.render( recipe, permit_unsatisfiable_variants=True, finalize=False, bypass_env_check=True, ) - assert len(ms) == 3 + assert len(metadata_tuples) == 3 def test_different_git_vars(): recipe = os.path.join(variants_dir, "29_different_git_vars") - ms = api.render(recipe) - versions = [m[0].version() for m in ms] + metadata_tuples = api.render(recipe) + versions = [metadata[0].version() for metadata in metadata_tuples] assert "1.20.0" in versions assert "1.21.11" in versions @@ -647,7 +713,7 @@ def test_top_level_finalized(testing_config): def test_variant_subkeys_retained(): - m = api.render( + metadata = api.render( os.path.join(variants_dir, "31_variant_subkeys"), finalize=False, bypass_env_check=True, @@ -655,11 +721,11 @@ def test_variant_subkeys_retained(): found_replacements = False from conda_build.build import get_all_replacements - for variant in m.config.variants: + for variant in metadata.config.variants: found_replacements = get_all_replacements(variant) assert len(found_replacements), "Did not find replacements" - m.final = False - outputs = m.get_output_metadata_set(permit_unsatisfiable_variants=False) + metadata.final = False + outputs = metadata.get_output_metadata_set(permit_unsatisfiable_variants=False) get_all_replacements(outputs[0][1].config.variant) From a6d53af0a23675cd93dc4a0c3f7fdcd2291fbae0 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Thu, 18 Apr 2024 23:35:51 +0200 Subject: [PATCH 345/366] Properly initialize context with parsed arguments (#5271) --- .devcontainer/post_create.sh | 2 +- .github/workflows/tests.yml | 4 ++-- conda_build/cli/main_build.py | 3 ++- conda_build/cli/main_convert.py | 4 ++++ conda_build/cli/main_debug.py | 3 +++ conda_build/cli/main_develop.py | 7 ++++--- conda_build/cli/main_inspect.py | 7 ++++--- conda_build/cli/main_metapackage.py | 10 +++++++--- conda_build/cli/main_render.py | 3 +-- conda_build/cli/main_skeleton.py | 4 ++++ conda_build/config.py | 15 +++++++++++++- news/5271-context | 19 ++++++++++++++++++ pyproject.toml | 2 +- recipe/meta.yaml | 2 +- tests/cli/test_main_build.py | 12 +++++++++++ tests/cli/test_main_render.py | 31 ++++++++++------------------- tests/conftest.py | 9 +++++++++ tests/requirements.txt | 2 +- 18 files changed, 100 insertions(+), 39 deletions(-) create mode 100644 news/5271-context diff --git a/.devcontainer/post_create.sh b/.devcontainer/post_create.sh index 73ea60380c..766bcb9f29 100644 --- a/.devcontainer/post_create.sh +++ b/.devcontainer/post_create.sh @@ -24,4 +24,4 @@ echo "Installing dev dependencies" --file "$SRC_CONDA_BUILD/tests/requirements.txt" \ --file "$SRC_CONDA_BUILD/tests/requirements-Linux.txt" \ --file "$SRC_CONDA_BUILD/tests/requirements-ci.txt" \ - "conda>=23.5.0" + "conda>=23.7.0" diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index aafe0ed977..29f98a129d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -83,10 +83,10 @@ jobs: include: # minimum Python/conda combo - python-version: '3.8' - conda-version: 23.5.0 + conda-version: 23.7.0 test-type: serial - python-version: '3.8' - conda-version: 23.5.0 + conda-version: 23.7.0 test-type: parallel # maximum Python/conda combo - python-version: '3.12' diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index 18e24827e0..a966677471 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -532,13 +532,14 @@ def check_action(recipe, config): def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + config = get_or_merge_config(None, **parsed.__dict__) build.check_external() # change globals in build module, see comment there as well config.channel_urls = get_channel_urls(parsed.__dict__) - config.override_channels = parsed.override_channels config.verbose = not parsed.quiet or parsed.debug if "purge" in parsed.recipe: diff --git a/conda_build/cli/main_convert.py b/conda_build/cli/main_convert.py index cd12f21ddc..d30b725b3d 100644 --- a/conda_build/cli/main_convert.py +++ b/conda_build/cli/main_convert.py @@ -6,6 +6,8 @@ from os.path import abspath, expanduser from typing import TYPE_CHECKING +from conda.base.context import context + from .. import api if TYPE_CHECKING: @@ -126,6 +128,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + files = parsed.files del parsed.__dict__["files"] diff --git a/conda_build/cli/main_debug.py b/conda_build/cli/main_debug.py index 59689bfa05..731f964217 100644 --- a/conda_build/cli/main_debug.py +++ b/conda_build/cli/main_debug.py @@ -6,6 +6,8 @@ import sys from typing import TYPE_CHECKING +from conda.base.context import context + from .. import api from ..utils import on_win from . import validators as valid @@ -94,6 +96,7 @@ def get_parser() -> ArgumentParser: def execute(args: Sequence[str] | None = None) -> int: parser = get_parser() parsed = parser.parse_args(args) + context.__init__(argparse_args=parsed) try: activation_string = api.debug( diff --git a/conda_build/cli/main_develop.py b/conda_build/cli/main_develop.py index 326c5fd2a7..9b680cbf5a 100644 --- a/conda_build/cli/main_develop.py +++ b/conda_build/cli/main_develop.py @@ -5,7 +5,7 @@ import logging from typing import TYPE_CHECKING -from conda.base.context import context, determine_target_prefix +from conda.base.context import context from .. import api @@ -88,10 +88,11 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) - prefix = determine_target_prefix(context, parsed) + context.__init__(argparse_args=parsed) + api.develop( parsed.source, - prefix=prefix, + prefix=context.target_prefix, no_pth_file=parsed.no_pth_file, build_ext=parsed.build_ext, clean=parsed.clean, diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index 88b31cb837..b1c47c0586 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -8,7 +8,7 @@ from pprint import pprint from typing import TYPE_CHECKING -from conda.base.context import context, determine_target_prefix +from conda.base.context import context from .. import api @@ -196,6 +196,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: parser, parsed = parse_args(args) + context.__init__(argparse_args=parsed) if not parsed.subcommand: parser.print_help() @@ -206,7 +207,7 @@ def execute(args: Sequence[str] | None = None) -> int: print( api.inspect_linkages( parsed.packages, - prefix=determine_target_prefix(context, parsed), + prefix=context.target_prefix, untracked=parsed.untracked, all_packages=parsed.all, show_files=parsed.show_files, @@ -218,7 +219,7 @@ def execute(args: Sequence[str] | None = None) -> int: print( api.inspect_objects( parsed.packages, - prefix=determine_target_prefix(context, parsed), + prefix=context.target_prefix, groupby=parsed.groupby, ) ) diff --git a/conda_build/cli/main_metapackage.py b/conda_build/cli/main_metapackage.py index 0e4507359e..91d2edcebb 100644 --- a/conda_build/cli/main_metapackage.py +++ b/conda_build/cli/main_metapackage.py @@ -121,8 +121,12 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: - _, args = parse_args(args) - channel_urls = args.__dict__.get("channel") or args.__dict__.get("channels") or () - api.create_metapackage(channel_urls=channel_urls, **args.__dict__) + _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + + api.create_metapackage( + channel_urls=context.channels, + **parsed.__dict__, + ) return 0 diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 3e0bf845f5..a5cbb8b443 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -202,6 +202,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) config = get_or_merge_config(None, **parsed.__dict__) @@ -213,8 +214,6 @@ def execute(args: Sequence[str] | None = None) -> int: config.channel_urls = get_channel_urls(parsed.__dict__) - config.override_channels = parsed.override_channels - if parsed.output: config.verbose = False config.debug = False diff --git a/conda_build/cli/main_skeleton.py b/conda_build/cli/main_skeleton.py index 825f3742de..7013e2ffab 100644 --- a/conda_build/cli/main_skeleton.py +++ b/conda_build/cli/main_skeleton.py @@ -9,6 +9,8 @@ from importlib import import_module from typing import TYPE_CHECKING +from conda.base.context import context + from .. import api from ..config import Config @@ -52,6 +54,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: parser, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + config = Config(**parsed.__dict__) if not parsed.repo: diff --git a/conda_build/config.py b/conda_build/config.py index d37479e3b2..1949fbc071 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -19,6 +19,7 @@ from conda.base.context import context from conda.utils import url_path +from .deprecations import deprecated from .utils import ( get_build_folders, get_conda_operation_locks, @@ -89,7 +90,6 @@ def _get_default_settings(): Setting("dirty", False), Setting("include_recipe", True), Setting("no_download_source", False), - Setting("override_channels", False), Setting("skip_existing", False), Setting("token", None), Setting("user", None), @@ -297,6 +297,10 @@ def set_lang(variant, lang): for lang in ("perl", "lua", "python", "numpy", "r_base"): set_lang(self.variant, lang) + # --override-channels is a valid CLI argument but we no longer wish to set it here + # use conda.base.context.context.override_channels instead + kwargs.pop("override_channels", None) + self._build_id = kwargs.pop("build_id", getattr(self, "_build_id", "")) source_cache = kwargs.pop("cache_dir", None) croot = kwargs.pop("croot", None) @@ -779,6 +783,15 @@ def test_dir(self): def subdirs_same(self): return self.host_subdir == self.build_subdir + @property + @deprecated( + "24.5", + "24.7", + addendum="Use `conda.base.context.context.override_channels` instead.", + ) + def override_channels(self): + return context.override_channels + def clean(self, remove_folders=True): # build folder is the whole burrito containing envs and source folders # It will only exist if we download source, or create a build or test environment diff --git a/news/5271-context b/news/5271-context new file mode 100644 index 0000000000..b4143e00f4 --- /dev/null +++ b/news/5271-context @@ -0,0 +1,19 @@ +### Enhancements + +* Require `conda >=23.7.0`. (#5271) + +### Bug fixes + +* Fix all CLI arguments to properly initialize `conda.base.context.context` with parsed arguments. Fixes issue with arguments not being processed (e.g., `--override-channels` was previously ignored). (#3693 via #5271) + +### Deprecations + +* Deprecate `conda_build.config.Config.override_channels`. Use `conda.base.context.context.override_channels` instead. (#5271) + +### Docs + +* + +### Other + +* diff --git a/pyproject.toml b/pyproject.toml index 229333b6a5..334c119996 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ dependencies = [ "beautifulsoup4", "chardet", - "conda >=23.5.0", + "conda >=23.7.0", "conda-index >=0.4.0", "conda-package-handling >=1.3", "filelock", diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 8171f8167d..d1b6440118 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -30,7 +30,7 @@ requirements: run: - beautifulsoup4 - chardet - - conda >=23.5.0 + - conda >=23.7.0 - conda-index >=0.4.0 - conda-package-handling >=1.3 - filelock diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py index 15b3d67237..9f4ce1cbb0 100644 --- a/tests/cli/test_main_build.py +++ b/tests/cli/test_main_build.py @@ -8,6 +8,7 @@ from typing import TYPE_CHECKING import pytest +from conda.exceptions import PackagesNotFoundError from conda_build import api from conda_build.cli import main_build, main_render @@ -549,3 +550,14 @@ def test_user_warning(tmpdir, recwarn): main_build.parse_args([str(dir_recipe_path)]) assert not recwarn.list + + +def test_build_with_empty_channel_fails(empty_channel: Path) -> None: + with pytest.raises(PackagesNotFoundError): + main_build.execute( + [ + "--override-channels", + f"--channel={empty_channel}", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + ] + ) diff --git a/tests/cli/test_main_render.py b/tests/cli/test_main_render.py index bf00ac6fd1..ef5fdf077d 100644 --- a/tests/cli/test_main_render.py +++ b/tests/cli/test_main_render.py @@ -8,6 +8,7 @@ import pytest import yaml +from conda.exceptions import PackagesNotFoundError from conda_build import api from conda_build.cli import main_render @@ -48,26 +49,16 @@ def test_render_add_channel(tmp_path: Path) -> None: ), f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}" -def test_render_without_channel_fails(tmp_path): - # do make extra channel available, so the required package should not be found - rendered_filename = tmp_path / "out.yaml" - args = [ - "--override-channels", - os.path.join(metadata_dir, "_recipe_requiring_external_channel"), - "--file", - str(rendered_filename), - ] - main_render.execute(args) - with open(rendered_filename) as rendered_file: - rendered_meta = yaml.safe_load(rendered_file) - required_package_string = [ - pkg - for pkg in rendered_meta.get("requirements", {}).get("build", []) - if "conda_build_test_requirement" in pkg - ][0] - assert ( - required_package_string == "conda_build_test_requirement" - ), f"Expected to get only base package name because it should not be found, but got :{required_package_string}" +def test_render_with_empty_channel_fails(tmp_path: Path, empty_channel: Path) -> None: + with pytest.raises(PackagesNotFoundError): + main_render.execute( + [ + "--override-channels", + f"--channel={empty_channel}", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + f"--file={tmp_path / 'out.yaml'}", + ] + ) def test_render_output_build_path( diff --git a/tests/conftest.py b/tests/conftest.py index f055b05d80..7dc0ae021c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,7 @@ import pytest from conda.common.compat import on_mac, on_win +from conda_index.api import update_index from pytest import MonkeyPatch import conda_build @@ -251,3 +252,11 @@ def conda_build_test_recipe_envvar( name = "CONDA_BUILD_TEST_RECIPE_PATH" monkeypatch.setenv(name, str(conda_build_test_recipe_path)) return name + + +@pytest.fixture(scope="session") +def empty_channel(tmp_path_factory: pytest.TempPathFactory) -> Path: + """Create a temporary, empty conda channel.""" + channel = tmp_path_factory.mktemp("empty_channel", numbered=False) + update_index(channel) + return channel diff --git a/tests/requirements.txt b/tests/requirements.txt index e005250f59..acb3317206 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,6 +1,6 @@ beautifulsoup4 chardet -conda >=23.5.0 +conda >=23.7.0 conda-index >=0.4.0 conda-libmamba-solver # ensure we use libmamba conda-package-handling >=1.3 From 9870f9b9a429fae5f96632771061d923c329cfcf Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 19 Apr 2024 16:34:32 +0200 Subject: [PATCH 346/366] Remove deprecated code for `conda-build 24.5` (#5299) --- conda_build/api.py | 27 -- conda_build/bdist_conda.py | 298 ------------------ conda_build/build.py | 119 +------ conda_build/conda_interface.py | 47 --- conda_build/environ.py | 69 ---- conda_build/index.py | 76 +---- .../recipes/build-without-recipe.rst | 157 --------- docs/source/user-guide/recipes/index.rst | 1 - news/5299-remove-deprecations | 39 +++ pyproject.toml | 3 - recipe/meta.yaml | 4 - recipe/test_bdist_conda_setup.py | 11 - tests/bdist-recipe/bin/test-script-setup.py | 13 - .../bdist-recipe/conda_build_test/__init__.py | 7 - tests/bdist-recipe/conda_build_test/empty.py | 2 - .../conda_build_test/manual_entry.py | 10 - tests/bdist-recipe/setup.py | 31 -- tests/test_api_consistency.py | 2 +- tests/test_api_render.py | 10 +- tests/test_build.py | 20 -- tests/test_environ.py | 22 +- 21 files changed, 48 insertions(+), 920 deletions(-) delete mode 100644 conda_build/bdist_conda.py delete mode 100644 docs/source/user-guide/recipes/build-without-recipe.rst create mode 100644 news/5299-remove-deprecations delete mode 100644 recipe/test_bdist_conda_setup.py delete mode 100644 tests/bdist-recipe/bin/test-script-setup.py delete mode 100644 tests/bdist-recipe/conda_build_test/__init__.py delete mode 100644 tests/bdist-recipe/conda_build_test/empty.py delete mode 100644 tests/bdist-recipe/conda_build_test/manual_entry.py delete mode 100644 tests/bdist-recipe/setup.py diff --git a/conda_build/api.py b/conda_build/api.py index 8c47ef1e6d..571f08f534 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -22,7 +22,6 @@ # make the Config class available in the api namespace from .config import DEFAULT_PREFIX_LENGTH as _prefix_length from .config import Config, get_channel_urls, get_or_merge_config -from .deprecations import deprecated from .metadata import MetaData, MetaDataTuple from .utils import ( CONDA_PACKAGE_EXTENSIONS, @@ -186,32 +185,6 @@ def get_output_file_paths( return sorted(set(outs)) -@deprecated("24.3.0", "24.5.0", addendum="Use `get_output_file_paths` instead.") -def get_output_file_path( - recipe_path_or_metadata: str - | os.PathLike - | Path - | MetaData - | Iterable[MetaDataTuple], - no_download_source: bool = False, - config: Config | None = None, - variants: dict[str, Any] | None = None, - **kwargs, -) -> list[str]: - """Get output file paths for any packages that would be created by a recipe - - Both split packages (recipes with more than one output) and build matrices, - created with variants, contribute to the list of file paths here. - """ - return get_output_file_paths( - recipe_path_or_metadata, - no_download_source=no_download_source, - config=config, - variants=variants, - **kwargs, - ) - - def check( recipe_path: str | os.PathLike | Path, no_download_source: bool = False, diff --git a/conda_build/bdist_conda.py b/conda_build/bdist_conda.py deleted file mode 100644 index 45a1ff845e..0000000000 --- a/conda_build/bdist_conda.py +++ /dev/null @@ -1,298 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -import configparser -import sys -import time -from collections import defaultdict -from io import StringIO - -from conda.cli.common import spec_from_line -from setuptools.command.install import install -from setuptools.dist import Distribution -from setuptools.errors import BaseError, OptionError - -from . import api -from .build import handle_anaconda_upload -from .config import Config -from .deprecations import deprecated -from .metadata import MetaData -from .skeletons import pypi - -deprecated.module("24.3", "24.5") - - -class GetoptError(BaseError): - """The option table provided to 'fancy_getopt()' is bogus.""" - - -class CondaDistribution(Distribution): - """ - Distribution subclass that supports bdist_conda options - - This class is required if you want to pass any bdist_conda specific - options to setup(). To use, set distclass=CondaDistribution in setup(). - - Options that can be passed to setup() (must include - distclass=CondaDistribution): - - - conda_buildnum: The build number. Defaults to 0. Can be overridden on - the command line with the --buildnum flag. - - - conda_buildstr: The build string. Default is generated automatically - from the Python version, NumPy version if relevant, and the build - number, like py34_0. - - - conda_import_tests: Whether to automatically run import tests. The - default is True, which runs import tests for the all the modules in - "packages". Also allowed are False, which runs no tests, or a list of - module names to be tested on import. - - - conda_command_tests: Command line tests to run. Default is True, which - runs ``command --help`` for each ``command`` in the console_scripts and - gui_scripts entry_points. Also allowed are False, which doesn't run any - command tests, or a list of command tests to run. - - - conda_binary_relocation: Whether binary files should be made relocatable - (using install_name_tool on OS X or patchelf on Linux). The default is - True. See the "making packages relocatable" section in the conda build - documentation for more information on this. - - - conda_preserve_egg_dir: Whether to preserve the egg directory as - installed by setuptools. The default is True if the package depends on - setuptools or has a setuptools entry_points other than console_scripts - and gui_scripts. - - Command line options: - - --buildnum: Set the build number. Defaults to the conda_buildnum passed to - setup(), or 0. Overrides any conda_buildnum passed to setup(). - - """ - - # Unfortunately, there's no way to warn the users that they need to use - # distclass=CondaDistribution when they try to use a conda option to - # setup(). Distribution.__init__ will just print a warning when it sees an - # attr it doesn't recognize, and then it is discarded. - - # attr: default - conda_attrs = { - "conda_buildnum": 0, - "conda_buildstr": None, - "conda_import_tests": True, - "conda_command_tests": True, - "conda_binary_relocation": True, - "conda_preserve_egg_dir": None, - "conda_features": None, - "conda_track_features": None, - } - - def __init__(self, attrs=None): - given_attrs = {} - # We need to remove the attrs so that Distribution.__init__ doesn't - # warn about them. - if attrs: - for attr in self.conda_attrs: - if attr in attrs: - given_attrs[attr] = attrs.pop(attr) - - super().__init__(attrs) - - for attr in self.conda_attrs: - setattr(self.metadata, attr, given_attrs.get(attr, self.conda_attrs[attr])) - - -class bdist_conda(install): - description = "create a conda package" - config = Config( - build_id="bdist_conda" + "_" + str(int(time.time() * 1000)), build_is_host=True - ) - - def initialize_options(self): - super().initialize_options() - self.buildnum = None - self.anaconda_upload = False - - def finalize_options(self): - opt_dict = self.distribution.get_option_dict("install") - if self.prefix: - raise OptionError("--prefix is not allowed") - opt_dict["prefix"] = ("bdist_conda", self.config.host_prefix) - super().finalize_options() - - def run(self): - # Make sure the metadata has the conda attributes, even if the - # distclass isn't CondaDistribution. We primarily do this to simplify - # the code below. - - metadata = self.distribution.metadata - - for attr in CondaDistribution.conda_attrs: - if not hasattr(metadata, attr): - setattr(metadata, attr, CondaDistribution.conda_attrs[attr]) - - # The command line takes precedence - if self.buildnum is not None: - metadata.conda_buildnum = self.buildnum - - d = defaultdict(dict) - # PyPI allows uppercase letters but conda does not, so we fix the - # name here. - d["package"]["name"] = metadata.name.lower() - d["package"]["version"] = metadata.version - d["build"]["number"] = metadata.conda_buildnum - - # MetaData does the auto stuff if the build string is None - d["build"]["string"] = metadata.conda_buildstr - - d["build"]["binary_relocation"] = metadata.conda_binary_relocation - d["build"]["preserve_egg_dir"] = metadata.conda_preserve_egg_dir - d["build"]["features"] = metadata.conda_features - d["build"]["track_features"] = metadata.conda_track_features - - # XXX: I'm not really sure if it is correct to combine requires - # and install_requires - d["requirements"]["run"] = d["requirements"]["build"] = [ - spec_from_line(i) - for i in (metadata.requires or []) - + (getattr(self.distribution, "install_requires", []) or []) - ] + ["python"] - if hasattr(self.distribution, "tests_require"): - # A lot of packages use extras_require['test'], but - # tests_require is the one that is officially supported by - # setuptools. - d["test"]["requires"] = [ - spec_from_line(i) for i in self.distribution.tests_require or [] - ] - - d["about"]["home"] = metadata.url - # Don't worry about classifiers. This isn't skeleton pypi. We - # don't need to make this work with random stuff in the wild. If - # someone writes their setup.py wrong and this doesn't work, it's - # their fault. - d["about"]["license"] = metadata.license - d["about"]["summary"] = metadata.description - - # This is similar logic from conda skeleton pypi - entry_points = getattr(self.distribution, "entry_points", []) - if entry_points: - if isinstance(entry_points, str): - # makes sure it is left-shifted - newstr = "\n".join(x.strip() for x in entry_points.splitlines()) - c = configparser.ConfigParser() - entry_points = {} - try: - c.read_file(StringIO(newstr)) - except Exception as err: - # This seems to be the best error here - raise GetoptError( - "ERROR: entry-points not understood: " - + str(err) - + "\nThe string was" - + newstr - ) - else: - for section in c.sections(): - if section in ["console_scripts", "gui_scripts"]: - value = [ - f"{option}={c.get(section, option)}" - for option in c.options(section) - ] - entry_points[section] = value - else: - # Make sure setuptools is added as a dependency below - entry_points[section] = None - - if not isinstance(entry_points, dict): - raise GetoptError( - "ERROR: Could not add entry points. They were:\n" + entry_points - ) - else: - rs = entry_points.get("scripts", []) - cs = entry_points.get("console_scripts", []) - gs = entry_points.get("gui_scripts", []) - # We have *other* kinds of entry-points so we need - # setuptools at run-time - if not rs and not cs and not gs and len(entry_points) > 1: - d["requirements"]["run"].append("setuptools") - d["requirements"]["build"].append("setuptools") - entry_list = rs + cs + gs - if gs and self.config.platform == "osx": - d["build"]["osx_is_app"] = True - if len(cs + gs) != 0: - d["build"]["entry_points"] = entry_list - if metadata.conda_command_tests is True: - d["test"]["commands"] = list( - map(str, pypi.make_entry_tests(entry_list)) - ) - - if "setuptools" in d["requirements"]["run"]: - d["build"]["preserve_egg_dir"] = True - - if metadata.conda_import_tests: - if metadata.conda_import_tests is True: - d["test"]["imports"] = (self.distribution.packages or []) + ( - self.distribution.py_modules or [] - ) - else: - d["test"]["imports"] = metadata.conda_import_tests - - if metadata.conda_command_tests and not isinstance( - metadata.conda_command_tests, bool - ): - d["test"]["commands"] = list(map(str, metadata.conda_command_tests)) - - d = dict(d) - self.config.keep_old_work = True - m = MetaData.fromdict(d, config=self.config) - # Shouldn't fail, but do you really trust the code above? - m.check_fields() - m.config.set_build_id = False - m.config.variant["python"] = ".".join( - (str(sys.version_info.major), str(sys.version_info.minor)) - ) - api.build(m, build_only=True, notest=True) - self.config = m.config - # prevent changes in the build ID from here, so that we're working in the same prefix - # Do the install - super().run() - output = api.build(m, post=True, notest=True)[0] - api.test(output, config=m.config) - m.config.clean() - if self.anaconda_upload: - - class args: - anaconda_upload = self.anaconda_upload - - handle_anaconda_upload(output, args) - else: - no_upload_message = ( - """\ -# If you want to upload this package to anaconda.org later, type: -# -# $ anaconda upload %s -""" - % output - ) - print(no_upload_message) - - -# Distutils looks for user_options on the class (not instance). It also -# requires that it is an instance of list. So we do this here because we want -# to keep the options from the superclass (and because I don't feel like -# making a metaclass just to make this work). - -bdist_conda.user_options.extend( - [ - ( - "buildnum=", - None, - """The build number of - the conda package. Defaults to 0, or the conda_buildnum specified in the - setup() function. The command line flag overrides the option to - setup().""", - ), - ("anaconda-upload", None, ("""Upload the finished package to anaconda.org""")), - ] -) - -bdist_conda.boolean_options.extend(["anaconda-upload"]) diff --git a/conda_build/build.py b/conda_build/build.py index 531b38323f..f1bf8eec02 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -42,7 +42,6 @@ from . import environ, noarch_python, source, tarcheck, utils from .config import Config from .create_test import create_all_test_files -from .deprecations import deprecated from .exceptions import CondaBuildException, DependencyNeedsBuildingError from .index import _delegated_update_index, get_build_index from .metadata import FIELDS, MetaData @@ -184,121 +183,6 @@ def prefix_replacement_excluded(path): return False -@deprecated("24.3", "24.5") -def have_prefix_files(files, prefix): - """ - Yields files that contain the current prefix in them, and modifies them - to replace the prefix with a placeholder. - - :param files: Filenames to check for instances of prefix - :type files: list of tuples containing strings (prefix, mode, filename) - """ - - prefix_bytes = prefix.encode(utils.codec) - prefix_placeholder_bytes = PREFIX_PLACEHOLDER.encode(utils.codec) - searches = {prefix: prefix_bytes} - if utils.on_win: - # some windows libraries use unix-style path separators - forward_slash_prefix = prefix.replace("\\", "/") - forward_slash_prefix_bytes = forward_slash_prefix.encode(utils.codec) - searches[forward_slash_prefix] = forward_slash_prefix_bytes - # some windows libraries have double backslashes as escaping - double_backslash_prefix = prefix.replace("\\", "\\\\") - double_backslash_prefix_bytes = double_backslash_prefix.encode(utils.codec) - searches[double_backslash_prefix] = double_backslash_prefix_bytes - searches[PREFIX_PLACEHOLDER] = prefix_placeholder_bytes - min_prefix = min(len(k) for k, _ in searches.items()) - - # mm.find is incredibly slow, so ripgrep is used to pre-filter the list. - # Really, ripgrep could be used on its own with a bit more work though. - rg_matches = [] - prefix_len = len(prefix) + 1 - rg = external.find_executable("rg") - if rg: - for rep_prefix, _ in searches.items(): - try: - args = [ - rg, - "--unrestricted", - "--no-heading", - "--with-filename", - "--files-with-matches", - "--fixed-strings", - "--text", - rep_prefix, - prefix, - ] - matches = subprocess.check_output(args) - rg_matches.extend( - matches.decode("utf-8").replace("\r\n", "\n").splitlines() - ) - except subprocess.CalledProcessError: - continue - # HACK: this is basically os.path.relpath, just simpler and faster - # NOTE: path normalization needs to be in sync with create_info_files - if utils.on_win: - rg_matches = [ - rg_match.replace("\\", "/")[prefix_len:] for rg_match in rg_matches - ] - else: - rg_matches = [rg_match[prefix_len:] for rg_match in rg_matches] - else: - print( - "WARNING: Detecting which files contain PREFIX is slow, installing ripgrep makes it faster." - " 'conda install ripgrep'" - ) - - for f in files: - if os.path.isabs(f): - f = f[prefix_len:] - if rg_matches and f not in rg_matches: - continue - path = os.path.join(prefix, f) - if prefix_replacement_excluded(path): - continue - - # dont try to mmap an empty file, and no point checking files that are smaller - # than the smallest prefix. - if os.stat(path).st_size < min_prefix: - continue - - try: - fi = open(path, "rb+") - except OSError: - log = utils.get_logger(__name__) - log.warn("failed to open %s for detecting prefix. Skipping it." % f) - continue - try: - mm = utils.mmap_mmap( - fi.fileno(), 0, tagname=None, flags=utils.mmap_MAP_PRIVATE - ) - except OSError: - mm = fi.read() - - mode = "binary" if mm.find(b"\x00") != -1 else "text" - if mode == "text": - # TODO :: Ask why we do not do this on Windows too?! - if not utils.on_win and mm.find(prefix_bytes) != -1: - # Use the placeholder for maximal backwards compatibility, and - # to minimize the occurrences of usernames appearing in built - # packages. - data = mm[:] - mm.close() - fi.close() - rewrite_file_with_new_prefix( - path, data, prefix_bytes, prefix_placeholder_bytes - ) - fi = open(path, "rb+") - mm = utils.mmap_mmap( - fi.fileno(), 0, tagname=None, flags=utils.mmap_MAP_PRIVATE - ) - for rep_prefix, rep_prefix_bytes in searches.items(): - if mm.find(rep_prefix_bytes) != -1: - yield (rep_prefix, mode, f) - mm.close() - fi.close() - - # It may be that when using the list form of passing args to subprocess # what matters is the number of arguments rather than the accumulated # string length. In that case, len(l[i]) should become 1, and we should @@ -3734,8 +3618,7 @@ def build_tree( reset_build_id=not cfg.dirty, bypass_env_check=True, ) - # restrict to building only one variant for bdist_conda. The way it splits the build - # job breaks variants horribly. + if post in (True, False): metadata_tuples = metadata_tuples[:1] diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index c5acfbfd06..18056cc368 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -35,14 +35,12 @@ from conda.exceptions import UnsatisfiableError as _UnsatisfiableError from conda.exports import Completer as _Completer from conda.exports import InstalledPackages as _InstalledPackages -from conda.exports import get_index as _get_index from conda.exports import symlink_conda as _symlink_conda from conda.gateways.connection.download import TmpDownload as _TmpDownload from conda.gateways.connection.download import download as _download from conda.gateways.connection.session import CondaSession as _CondaSession from conda.gateways.disk.create import TemporaryDirectory as _TemporaryDirectory from conda.gateways.disk.link import lchmod as _lchmod -from conda.gateways.disk.read import compute_sum as _compute_sum from conda.misc import untracked as _untracked from conda.misc import walk_prefix as _walk_prefix from conda.models.channel import Channel as _Channel @@ -433,13 +431,6 @@ _get_conda_build_local_url, addendum="Use `conda.models.channel.get_conda_build_local_url` instead.", ) -deprecated.constant( - "24.1.0", - "24.5.0", - "get_index", - _get_index, - addendum="Use `conda.core.index.get_index` instead.", -) deprecated.constant( "24.5", "24.7", @@ -550,33 +541,6 @@ ) -@deprecated( - "24.3", - "24.5", - addendum="Handled by `conda.gateways.connection.session.CondaSession`.", -) -def handle_proxy_407(x, y): - pass - - -deprecated.constant( - "24.3", - "24.5", - "hashsum_file", - _compute_sum, - addendum="Use `conda.gateways.disk.read.compute_sum` instead.", -) - - -@deprecated( - "24.3", - "24.5", - addendum="Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead.", -) -def md5_file(path: str | _os.PathLike) -> str: - return _compute_sum(path, "md5") - - deprecated.constant( "24.5", "24.7", @@ -584,14 +548,3 @@ def md5_file(path: str | _os.PathLike) -> str: __version__, addendum="Use `conda.__version__` instead.", ) - - -@deprecated( - "24.3", - "24.5", - addendum="Use `conda_build.environ.get_version_from_git_tag` instead.", -) -def get_version_from_git_tag(tag): - from .environ import get_version_from_git_tag - - return get_version_from_git_tag(tag) diff --git a/conda_build/environ.py b/conda_build/environ.py index 5a24d83172..5aae94e682 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -3,7 +3,6 @@ from __future__ import annotations import contextlib -import json import logging import multiprocessing import os @@ -44,7 +43,6 @@ from conda.models.records import PackageRecord from . import utils -from .deprecations import deprecated from .exceptions import BuildLockError, DependencyNeedsBuildingError from .features import feature_list from .index import get_build_index @@ -73,9 +71,6 @@ class InstallActionsType(TypedDict): log = getLogger(__name__) -deprecated.constant("24.3", "24.5", "PREFIX_ACTION", _PREFIX_ACTION := "PREFIX") -deprecated.constant("24.3", "24.5", "LINK_ACTION", _LINK_ACTION := "LINK") - # these are things that we provide env vars for more explicitly. This list disables the # pass-through of variant values to env vars for these keys. LANGUAGES = ("PERL", "LUA", "R", "NUMPY", "PYTHON") @@ -819,71 +814,9 @@ def os_vars(m, prefix): return d -@deprecated("24.3", "24.5") -class InvalidEnvironment(Exception): - pass - - -# Stripped-down Environment class from conda-tools ( https://github.com/groutr/conda-tools ) -# Vendored here to avoid the whole dependency for just this bit. -@deprecated("24.3", "24.5") -def _load_json(path): - with open(path) as fin: - x = json.load(fin) - return x - - -@deprecated("24.3", "24.5") -def _load_all_json(path): - """ - Load all json files in a directory. Return dictionary with filenames mapped to json - dictionaries. - """ - root, _, files = next(utils.walk(path)) - result = {} - for f in files: - if f.endswith(".json"): - result[f] = _load_json(join(root, f)) - return result - - -@deprecated("24.3", "24.5", addendum="Use `conda.core.prefix_data.PrefixData` instead.") -class Environment: - def __init__(self, path): - """ - Initialize an Environment object. - - To reflect changes in the underlying environment, a new Environment object should be - created. - """ - self.path = path - self._meta = join(path, "conda-meta") - if os.path.isdir(path) and os.path.isdir(self._meta): - self._packages = {} - else: - raise InvalidEnvironment(f"Unable to load environment {path}") - - def _read_package_json(self): - if not self._packages: - self._packages = _load_all_json(self._meta) - - def package_specs(self): - """ - List all package specs in the environment. - """ - self._read_package_json() - json_objs = self._packages.values() - specs = [] - for i in json_objs: - p, v, b = i["name"], i["version"], i["build"] - specs.append(f"{p} {v} {b}") - return specs - - cached_precs: dict[ tuple[tuple[str | MatchSpec, ...], Any, Any, Any, bool], list[PackageRecord] ] = {} -deprecated.constant("24.3", "24.5", "cached_actions", cached_precs) last_index_ts = 0 @@ -1378,7 +1311,6 @@ def install_actions( del install_actions -@deprecated.argument("24.3", "24.5", "actions", rename="precs") def _execute_actions(prefix, precs): # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L575 # but reduced to only the functionality actually used within conda-build. @@ -1403,7 +1335,6 @@ def _execute_actions(prefix, precs): unlink_link_transaction.execute() -@deprecated.argument("24.3", "24.5", "actions", rename="precs") def _display_actions(prefix, precs): # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L58 # but reduced to only the functionality actually used within conda-build. diff --git a/conda_build/index.py b/conda_build/index.py index 28f29063aa..3a2f9ab10b 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -4,7 +4,6 @@ import logging import os import time -from concurrent.futures import Executor from functools import partial from os.path import dirname @@ -17,42 +16,20 @@ from . import utils from .deprecations import deprecated from .utils import ( - CONDA_PACKAGE_EXTENSION_V1, - CONDA_PACKAGE_EXTENSION_V2, JSONDecodeError, get_logger, - on_win, ) log = get_logger(__name__) -@deprecated("24.3", "24.5") -class DummyExecutor(Executor): - def map(self, func, *iterables): - for iterable in iterables: - for thing in iterable: - yield func(thing) - - local_index_timestamp = 0 cached_index = None local_subdir = "" local_output_folder = "" cached_channels = [] _channel_data = {} -deprecated.constant("24.1", "24.5", "channel_data", _channel_data) - - -# TODO: support for libarchive seems to have broken ability to use multiple threads here. -# The new conda format is so much faster that it more than makes up for it. However, it -# would be nice to fix this at some point. -_MAX_THREADS_DEFAULT = os.cpu_count() or 1 -if on_win: # see https://github.com/python/cpython/commit/8ea0fd85bc67438f679491fae29dfe0a3961900a - _MAX_THREADS_DEFAULT = min(48, _MAX_THREADS_DEFAULT) -deprecated.constant("24.3", "24.5", "MAX_THREADS_DEFAULT", _MAX_THREADS_DEFAULT) -deprecated.constant("24.3", "24.5", "LOCK_TIMEOUT_SECS", 3 * 3600) -deprecated.constant("24.3", "24.5", "LOCKFILE_NAME", ".lock") +deprecated.constant("24.1", "24.7", "channel_data", _channel_data) # TODO: this is to make sure that the index doesn't leak tokens. It breaks use of private channels, though. # os.environ['CONDA_ADD_ANACONDA_TOKEN'] = "false" @@ -251,54 +228,3 @@ def _delegated_update_index( current_index_versions=current_index_versions, debug=debug, ) - - -@deprecated( - "24.1.0", "24.5.0", addendum="Use `conda_index._apply_instructions` instead." -) -def _apply_instructions(subdir, repodata, instructions): - repodata.setdefault("removed", []) - utils.merge_or_update_dict( - repodata.get("packages", {}), - instructions.get("packages", {}), - merge=False, - add_missing_keys=False, - ) - # we could have totally separate instructions for .conda than .tar.bz2, but it's easier if we assume - # that a similarly-named .tar.bz2 file is the same content as .conda, and shares fixes - new_pkg_fixes = { - k.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2): v - for k, v in instructions.get("packages", {}).items() - } - - utils.merge_or_update_dict( - repodata.get("packages.conda", {}), - new_pkg_fixes, - merge=False, - add_missing_keys=False, - ) - utils.merge_or_update_dict( - repodata.get("packages.conda", {}), - instructions.get("packages.conda", {}), - merge=False, - add_missing_keys=False, - ) - - for fn in instructions.get("revoke", ()): - for key in ("packages", "packages.conda"): - if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == "packages.conda": - fn = fn.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2) - if fn in repodata[key]: - repodata[key][fn]["revoked"] = True - repodata[key][fn]["depends"].append("package_has_been_revoked") - - for fn in instructions.get("remove", ()): - for key in ("packages", "packages.conda"): - if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == "packages.conda": - fn = fn.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2) - popped = repodata[key].pop(fn, None) - if popped: - repodata["removed"].append(fn) - repodata["removed"].sort() - - return repodata diff --git a/docs/source/user-guide/recipes/build-without-recipe.rst b/docs/source/user-guide/recipes/build-without-recipe.rst deleted file mode 100644 index 51c465db4a..0000000000 --- a/docs/source/user-guide/recipes/build-without-recipe.rst +++ /dev/null @@ -1,157 +0,0 @@ -================================================= -Building a package without a recipe (bdist_conda) -================================================= - -You can use conda-build to build packages for Python to install -rather than conda by using ``setup.py bdist_conda``. This is a -quick way to build packages without using a recipe, but it has -limitations. The script is limited to the Python version used in -the build and it is not as reproducible as using a recipe. We -recommend using a recipe with conda-build. - -.. note:: - If you use Setuptools, you must first import Setuptools and - then import ``conda_build.bdist_conda``, because Setuptools - monkey patches ``distutils.dist.Distribution``. - -EXAMPLE: A minimal ``setup.py`` file using the setup options -``name`` and ``version``: - -.. code:: - - from setuptools import setup - import conda_build.bdist_conda - - setup( - name="foo", - version="1.0", - distclass=conda_build.bdist_conda.CondaDistribution, - conda_buildnum=1, - ) - - -Setup options -============= - -You can pass the following options to ``setup()``. You must -include ``distclass=conda_build.bdist_conda.CondaDistribution``. - -Build number ------------- - -The number of the build. Can be overridden on the command line -with the ``--buildnum`` flag. Defaults to ``0``. - -.. code:: - - conda_buildnum=1 - - -Build string ------------- - -The build string. Default is generated automatically from the -Python version, NumPy version---if relevant---and the build -number, such as ``py34_0``. - -.. code:: - - conda_buildstr=py34_0 - - -Import tests ------------- - -Whether to automatically run import tests. The default is -``True``, which runs import tests for all the modules in -``packages``. Also allowed are ``False``, which runs no tests, or -a list of module names to be tested on import. - -.. code:: - - conda_import_tests=False - - -Command line tests ------------------- - -Command line tests to run. Default is ``True``, which runs -``command --help`` for each command in the console_scripts and -gui_scripts entry_points. Also allowed are ``False``, which does -not run any command tests, or a list of command tests to run. - -.. code:: - - conda_command_tests=False - - -Binary files relocatable ------------------------- - -Whether binary files should be made relocatable, using -install_name_tool on macOS or patchelf on Linux. The default is -``True``. - -.. code:: - - conda_binary_relocation=False - -For more information, see :ref:`Making packages relocatable `. - - -Preserve egg directory ----------------------- - -Whether to preserve the egg directory as installed by Setuptools. -The default is ``True`` if the package depends on Setuptools or -has Setuptools entry_points other than console_scripts and -gui_scripts. - -.. code:: - - conda_preserve_egg_dir=False - - -Command line options -==================== - -Build number ------------- - -Set the build number. Defaults to the conda_buildnum passed -to ``setup()`` or ``0``. Overrides any conda_buildnum passed to -``setup()``. - -.. code:: - - --buildnum=1 - - -Notes -===== - -* You must install ``bdist_conda`` into a root conda environment, - as it imports ``conda`` and ``conda_build``. It is included as - part of the ``conda-build`` package. - -* All metadata is gathered from the standard metadata from the - ``setup()`` function. Metadata that are not directly supported - by ``setup()`` can be added using one of the options specified - above. - -* By default, import tests are run for each subpackage specified - by packages, and command line tests ``command --help`` are run - for each ``setuptools entry_points`` command. This is done to - ensure that the package is built correctly. You can disable or - change these using the ``conda_import_tests`` and - ``conda_command_tests`` options specified above. - -* The Python version used in the build must be the same as where - conda is installed, as ``bdist_conda`` uses ``conda-build``. - -* ``bdist_conda`` uses the metadata provided to the ``setup()`` - function. - -* If you want to pass any ``bdist_conda`` specific options to - ``setup()``, in ``setup()`` you must set - ``distclass=conda_build.bdist_conda.CondaDistribution``. diff --git a/docs/source/user-guide/recipes/index.rst b/docs/source/user-guide/recipes/index.rst index d482f27415..876b500396 100644 --- a/docs/source/user-guide/recipes/index.rst +++ b/docs/source/user-guide/recipes/index.rst @@ -8,6 +8,5 @@ conda-build recipes. .. toctree:: :maxdepth: 1 - build-without-recipe sample-recipes debugging diff --git a/news/5299-remove-deprecations b/news/5299-remove-deprecations new file mode 100644 index 0000000000..c78531ea4d --- /dev/null +++ b/news/5299-remove-deprecations @@ -0,0 +1,39 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Postpone `conda_build.index.channel_data` deprecation. (#5299) +* Remove `conda_build.api.get_output_file_path`. Use `conda_build.api.get_output_file_paths` instead. (#5299) +* Remove `conda_build.bdist_conda`. (#5299) +* Remove `conda_build.build.have_prefix_files`. (#5299) +* Remove `conda_build.conda_interface.get_index`. Use `conda.core.index.get_index` instead. (#5299) +* Remove `conda_build.conda_interface.get_version_from_git_tag`. Use `conda_build.environ.get_version_from_git_tag` instead. (#5299) +* Remove `conda_build.conda_interface.handle_proxy_407`. Handled by `conda.gateways.connection.session.CondaSession`. (#5299) +* Remove `conda_build.conda_interface.hashsum_file`. Use `conda.gateways.disk.read.compute_sum` instead. (#5299) +* Remove `conda_build.conda_interface.md5_file`. Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead. (#5299) +* Remove `conda_build.environ._load_all_json`. (#5299) +* Remove `conda_build.environ._load_json`. (#5299) +* Remove `conda_build.environ.cached_actions`. (#5299) +* Remove `conda_build.environ.Environment`. Use `conda.core.prefix_data.PrefixData` instead. (#5299) +* Remove `conda_build.environ.InvalidEnvironment`. (#5299) +* Remove `conda_build.environ.LINK_ACTION`. (#5299) +* Remove `conda_build.environ.PREFIX_ACTION`. (#5299) +* Remove `conda_build.index._apply_instructions`. Use `conda_index._apply_instructions` instead. (#5299) +* Remove `conda_build.index.DummyExecutor`. (#5299) +* Remove `conda_build.index.LOCK_TIMEOUT_SECS`. (#5299) +* Remove `conda_build.index.LOCKFILE_NAME`. (#5299) +* Remove `conda_build.index.MAX_THREADS_DEFAULT`. (#5299) + +### Docs + +* + +### Other + +* diff --git a/pyproject.toml b/pyproject.toml index 334c119996..a8b907644a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,9 +53,6 @@ requires-python = ">=3.8" [project.entry-points.conda] conda-build = "conda_build.plugin" -[project.entry-points."distutils.commands"] -bdist_conda = "conda_build.bdist_conda:bdist_conda" - [project.scripts] conda-build = "conda_build.cli.main_build:execute" conda-convert = "conda_build.cli.main_convert:execute" diff --git a/recipe/meta.yaml b/recipe/meta.yaml index d1b6440118..33f8fe9125 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -64,8 +64,6 @@ test: requires: - setuptools - pip - files: - - test_bdist_conda_setup.py commands: - python -m pip check # subcommands @@ -91,8 +89,6 @@ test: - conda-render --help - conda-skeleton --help - conda-debug --help - # bdist_conda - - python test_bdist_conda_setup.py bdist_conda --help about: home: https://conda.org diff --git a/recipe/test_bdist_conda_setup.py b/recipe/test_bdist_conda_setup.py deleted file mode 100644 index c7b3d34abf..0000000000 --- a/recipe/test_bdist_conda_setup.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -from setuptools import setup - -import conda_build.bdist_conda - -setup( - name="package", - version="1.0.0", - distclass=conda_build.bdist_conda.CondaDistribution, -) diff --git a/tests/bdist-recipe/bin/test-script-setup.py b/tests/bdist-recipe/bin/test-script-setup.py deleted file mode 100644 index c515fb849e..0000000000 --- a/tests/bdist-recipe/bin/test-script-setup.py +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env python -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -import conda_build_test - -conda_build_test - -print("Test script setup.py") - -if __name__ == "__main__": - from conda_build_test import manual_entry - - manual_entry.main() diff --git a/tests/bdist-recipe/conda_build_test/__init__.py b/tests/bdist-recipe/conda_build_test/__init__.py deleted file mode 100644 index 1f22b11325..0000000000 --- a/tests/bdist-recipe/conda_build_test/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -""" -conda build test package -""" - -print("conda_build_test has been imported") diff --git a/tests/bdist-recipe/conda_build_test/empty.py b/tests/bdist-recipe/conda_build_test/empty.py deleted file mode 100644 index 3f48e8b789..0000000000 --- a/tests/bdist-recipe/conda_build_test/empty.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause diff --git a/tests/bdist-recipe/conda_build_test/manual_entry.py b/tests/bdist-recipe/conda_build_test/manual_entry.py deleted file mode 100644 index 1a63c8a8e9..0000000000 --- a/tests/bdist-recipe/conda_build_test/manual_entry.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -def main(): - import argparse - - # Just picks them up from `sys.argv`. - parser = argparse.ArgumentParser(description="Basic parser.") - parser.parse_args() - - print("Manual entry point") diff --git a/tests/bdist-recipe/setup.py b/tests/bdist-recipe/setup.py deleted file mode 100644 index 74982e5865..0000000000 --- a/tests/bdist-recipe/setup.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -from setuptools import setup - -import conda_build.bdist_conda - -setup( - name="conda-build-test-project", - version="1.0", - distclass=conda_build.bdist_conda.CondaDistribution, - conda_buildnum=1, - conda_features=[], - author="Continuum Analytics, Inc.", - url="https://github.com/conda/conda-build", - license="BSD", - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "Operating System :: OS Independent", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.3", - "Programming Language :: Python :: 3.4", - ], - description="test package for testing conda-build", - packages=["conda_build_test"], - scripts=[ - "bin/test-script-setup.py", - ], -) diff --git a/tests/test_api_consistency.py b/tests/test_api_consistency.py index 9d88b60eee..9dac14351c 100644 --- a/tests/test_api_consistency.py +++ b/tests/test_api_consistency.py @@ -42,7 +42,7 @@ def test_api_output_yaml(): assert argspec.defaults == (None, False) -def test_api_get_output_file_path(): +def test_api_get_output_file_paths(): argspec = getargspec(api.get_output_file_paths) assert argspec.args == [ "recipe_path_or_metadata", diff --git a/tests/test_api_render.py b/tests/test_api_render.py index 293ca09815..0882de0df1 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -56,7 +56,7 @@ def test_render_yaml_output(testing_workdir, testing_config): assert "package:" in open(os.path.join(testing_workdir, "output.yaml")).read() -def test_get_output_file_path(testing_workdir, testing_metadata): +def test_get_output_file_paths(testing_workdir, testing_metadata): testing_metadata = render.finalize_metadata(testing_metadata) api.output_yaml(testing_metadata, "recipe/meta.yaml") @@ -68,21 +68,21 @@ def test_get_output_file_path(testing_workdir, testing_metadata): assert build_path == os.path.join( testing_metadata.config.croot, testing_metadata.config.host_subdir, - "test_get_output_file_path-1.0-1.tar.bz2", + "test_get_output_file_paths-1.0-1.tar.bz2", ) -def test_get_output_file_path_metadata_object(testing_metadata): +def test_get_output_file_paths_metadata_object(testing_metadata): testing_metadata.final = True build_path = api.get_output_file_paths(testing_metadata)[0] assert build_path == os.path.join( testing_metadata.config.croot, testing_metadata.config.host_subdir, - "test_get_output_file_path_metadata_object-1.0-1.tar.bz2", + "test_get_output_file_paths_metadata_object-1.0-1.tar.bz2", ) -def test_get_output_file_path_jinja2(testing_config): +def test_get_output_file_paths_jinja2(testing_config): # If this test does not raise, it's an indicator that the workdir is not # being cleaned as it should. recipe = os.path.join(metadata_dir, "source_git_jinja2") diff --git a/tests/test_build.py b/tests/test_build.py index fd6a3a8f67..839cce4b9e 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -14,31 +14,11 @@ from pathlib import Path import pytest -from conda.common.compat import on_win from conda_build import api, build from .utils import get_noarch_python_meta, metadata_dir -PREFIX_TESTS = {"normal": os.path.sep} -if on_win: - PREFIX_TESTS.update({"double_backslash": "\\\\", "forward_slash": "/"}) - - -def test_find_prefix_files(testing_workdir): - """ - Write test output that has the prefix to be found, then verify that the prefix finding - identified the correct number of files. - """ - # create text files to be replaced - files = [] - for style, replacement in PREFIX_TESTS.items(): - filename = Path(testing_workdir, f"{style}.txt") - filename.write_text(testing_workdir.replace(os.path.sep, replacement)) - files.append(str(filename)) - - assert len(list(build.have_prefix_files(files, testing_workdir))) == len(files) - def test_build_preserves_PATH(testing_config): metadata = api.render( diff --git a/tests/test_environ.py b/tests/test_environ.py index 327accaeea..f446420feb 100644 --- a/tests/test_environ.py +++ b/tests/test_environ.py @@ -1,14 +1,8 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import os -import sys -import pytest -from conda.core.prefix_data import PrefixData -from packaging.version import parse - -import conda_build -from conda_build.environ import Environment, create_env +from conda_build.environ import create_env def test_environment_creation_preserves_PATH(testing_workdir, testing_config): @@ -21,17 +15,3 @@ def test_environment_creation_preserves_PATH(testing_workdir, testing_config): subdir=testing_config.build_subdir, ) assert os.environ["PATH"] == ref_path - - -def test_environment(): - """Asserting PrefixData can accomplish the same thing as Environment.""" - with pytest.warns( - PendingDeprecationWarning - if parse(conda_build.__version__) < parse("24.3") - else DeprecationWarning, - ): - assert (specs := Environment(sys.prefix).package_specs()) - assert specs == [ - f"{prec.name} {prec.version} {prec.build}" - for prec in PrefixData(sys.prefix).iter_records() - ] From 5d8444b2114e621839eb276f6a2b9266117fb9fa Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 19 Apr 2024 16:35:30 +0200 Subject: [PATCH 347/366] Deprecate `noarch_python_build_age` (#5298) --- conda_build/config.py | 8 +------- conda_build/render.py | 34 ++++++++++------------------------ tests/conftest.py | 3 --- 3 files changed, 11 insertions(+), 34 deletions(-) diff --git a/conda_build/config.py b/conda_build/config.py index 1949fbc071..09ce6b0718 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -54,7 +54,7 @@ def set_invocation_time(): _src_cache_root_default = None error_overlinking_default = "false" error_overdepending_default = "false" -noarch_python_build_age_default = 0 +deprecated.constant("24.5", "24.7", "noarch_python_build_age_default", 0) enable_static_default = "false" no_rewrite_stdout_env_default = "false" ignore_verify_codes_default = [] @@ -157,12 +157,6 @@ def _get_default_settings(): ).lower() == "true", ), - Setting( - "noarch_python_build_age", - context.conda_build.get( - "noarch_python_build_age", noarch_python_build_age_default - ), - ), Setting( "enable_static", context.conda_build.get("enable_static", enable_static_default).lower() diff --git a/conda_build/render.py b/conda_build/render.py index 9bbdcc6efa..b021f8a5b6 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -795,11 +795,11 @@ def reparse(metadata): def distribute_variants( - metadata, + metadata: MetaData, variants, - permit_unsatisfiable_variants=False, - allow_no_other_outputs=False, - bypass_env_check=False, + permit_unsatisfiable_variants: bool = False, + allow_no_other_outputs: bool = False, + bypass_env_check: bool = False, ) -> list[MetaDataTuple]: rendered_metadata: dict[ tuple[str, str, tuple[tuple[str, str], ...]], MetaDataTuple @@ -810,27 +810,13 @@ def distribute_variants( # which python version we prefer. `python_age` can use used to tweak which # python gets used here. if metadata.noarch or metadata.noarch_python: - age = int( - metadata.get_value( - "build/noarch_python_build_age", metadata.config.noarch_python_build_age - ) - ) - versions = [] - for variant in variants: - if "python" in variant: - vo = variant["python"] - if vo not in versions: - versions.append(vo) - version_indices = sorted( - range(len(versions)), key=lambda k: VersionOrder(versions[k].split(" ")[0]) - ) - if age < 0: - age = 0 - elif age > len(versions) - 1: - age = len(versions) - 1 - build_ver = versions[version_indices[len(versions) - 1 - age]] + # filter variants by the newest Python version + version = sorted( + {version for variant in variants if (version := variant.get("python"))}, + key=lambda key: VersionOrder(key.split(" ")[0]), + )[-1] variants = filter_by_key_value( - variants, "python", build_ver, "noarch_python_reduction" + variants, "python", version, "noarch_python_reduction" ) # store these for reference later diff --git a/tests/conftest.py b/tests/conftest.py index 7dc0ae021c..465cab6fcc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -27,7 +27,6 @@ filename_hashing_default, ignore_verify_codes_default, no_rewrite_stdout_env_default, - noarch_python_build_age_default, ) from conda_build.metadata import MetaData from conda_build.utils import check_call_env, copy_into, prepend_bin_path @@ -100,7 +99,6 @@ def boolify(v): _src_cache_root=_src_cache_root_default, error_overlinking=boolify(error_overlinking_default), error_overdepending=boolify(error_overdepending_default), - noarch_python_build_age=noarch_python_build_age_default, enable_static=boolify(enable_static_default), no_rewrite_stdout_env=boolify(no_rewrite_stdout_env_default), ignore_verify_codes=ignore_verify_codes_default, @@ -112,7 +110,6 @@ def boolify(v): assert result.no_rewrite_stdout_env is False assert result._src_cache_root is None assert result.src_cache_root == testing_workdir - assert result.noarch_python_build_age == 0 return result From b2d97b37b1ab8e1a416af6479d73668d3a173906 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 22 Apr 2024 12:34:24 -0400 Subject: [PATCH 348/366] [pre-commit.ci] pre-commit autoupdate (#5303) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.3.7 → v0.4.1](https://github.com/astral-sh/ruff-pre-commit/compare/v0.3.7...v0.4.1) - [github.com/python-jsonschema/check-jsonschema: 0.28.1 → 0.28.2](https://github.com/python-jsonschema/check-jsonschema/compare/0.28.1...0.28.2) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index da60f66ed2..b29ba4260c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -58,7 +58,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.7 + rev: v0.4.1 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff @@ -87,7 +87,7 @@ repos: tests/ ) - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.28.1 + rev: 0.28.2 hooks: # verify github syntaxes - id: check-github-workflows From aea5a2231d89efcca88ef456e639aa727b69b9dc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Apr 2024 14:45:53 -0400 Subject: [PATCH 349/366] Bump sphinx from 7.2.6 to 7.3.7 in /docs (#5307) Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 7.2.6 to 7.3.7. - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/master/CHANGES.rst) - [Commits](https://github.com/sphinx-doc/sphinx/compare/v7.2.6...v7.3.7) --- updated-dependencies: - dependency-name: sphinx dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 993e9ea9e4..58f1311df7 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,6 +1,6 @@ Pillow==10.0.1 PyYAML==6.0.1 -Sphinx==7.2.6 +Sphinx==7.3.7 conda-sphinx-theme==0.2.1 linkify-it-py==2.0.2 myst-parser==2.0.0 From d38a151f9199fcbc67996ae166454a733c974d0d Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Wed, 24 Apr 2024 11:03:36 -0500 Subject: [PATCH 350/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#5308)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/labels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index f13985fb0a..cef565aa68 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -19,7 +19,7 @@ jobs: GLOBAL: https://raw.githubusercontent.com/conda/infra/main/.github/global.yml LOCAL: .github/labels.yml steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f - id: has_local uses: andstor/file-existence-action@076e0072799f4942c8bc574a82233e1e4d13e9d6 with: From 1cb17c0d36fa5653f406bb12540210656bf426a3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 24 Apr 2024 12:03:50 -0400 Subject: [PATCH 351/366] Bump actions/download-artifact from 4.1.4 to 4.1.6 in /.github/workflows (#5305) Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 4.1.4 to 4.1.6. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/c850b930e6ba138125429b7e5c93fc707a7f8427...9c19ed7fe5d278cd354c7dfd5d3b88589c7e2395) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Bianca Henderson --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 29f98a129d..657354dacc 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -441,7 +441,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download Artifacts - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 + uses: actions/download-artifact@9c19ed7fe5d278cd354c7dfd5d3b88589c7e2395 - name: Upload Combined Test Results # provides one downloadable archive of all matrix run test results for further analysis From 757341269084b5b6d43d001b9cdfd6eefa8de2db Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 24 Apr 2024 16:44:51 -0400 Subject: [PATCH 352/366] Bump actions/checkout from 4.1.2 to 4.1.3 in /.github/workflows (#5304) Bumps [actions/checkout](https://github.com/actions/checkout) from 4.1.2 to 4.1.3. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/9bb56186c3b09b4f86b1c65136769dd318469633...1d96c772d19495a3b5c517cd2bc0cb401ea0529f) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Bianca Henderson --- .github/workflows/builds-review.yaml | 2 +- .github/workflows/docs.yml | 2 +- .github/workflows/tests.yml | 12 ++++++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/builds-review.yaml b/.github/workflows/builds-review.yaml index d789e536c6..68a7b7503a 100644 --- a/.github/workflows/builds-review.yaml +++ b/.github/workflows/builds-review.yaml @@ -46,7 +46,7 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f with: ref: ${{ github.ref }} clean: true diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 03b32fc111..208c1e1027 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -22,7 +22,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f with: fetch-depth: 0 - name: Setup diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 657354dacc..151d6c8c43 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -45,7 +45,7 @@ jobs: code: ${{ steps.filter.outputs.code }} steps: - name: Checkout Source - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # dorny/paths-filter needs git clone for non-PR events # https://github.com/dorny/paths-filter#supported-workflows if: github.event_name != 'pull_request' @@ -102,7 +102,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f with: fetch-depth: 0 @@ -182,7 +182,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f with: fetch-depth: 0 @@ -262,7 +262,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f with: fetch-depth: 0 @@ -362,7 +362,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f with: fetch-depth: 0 @@ -500,7 +500,7 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - name: Checkout Source - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f with: ref: ${{ github.ref }} clean: true From 3f9346fe63514ed01ce01e74b3c0cc61bd13dd11 Mon Sep 17 00:00:00 2001 From: jaimergp Date: Thu, 25 Apr 2024 15:39:13 +0200 Subject: [PATCH 353/366] Be explicit about macOS runner choices in GHA workflows (#5309) * Be explicit about macOS runner choices in GHA workflows * install latest miniconda * Bump SDK * correct macos version * pre-commit * revert and document choice * pre-commit --- .github/workflows/tests.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 151d6c8c43..a6b1071e3e 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -337,6 +337,10 @@ jobs: needs: changes if: github.event_name == 'schedule' || needs.changes.outputs.code == 'true' + # Old macOS needed for old SDK (see xcode step below) + # This is needed for some MACOSX_DEPLOYMENT_TARGET tests + # We could also install SDKs from a external provider in the future + # if we want to update this runner to a non-deprecated version runs-on: macos-11 defaults: run: @@ -489,9 +493,9 @@ jobs: include: - runner: ubuntu-latest subdir: linux-64 - - runner: macos-latest + - runner: macos-13 subdir: osx-64 - - runner: macos-14 + - runner: macos-14 # FUTURE: Use -latest subdir: osx-arm64 - runner: windows-latest subdir: win-64 From 8a7e45cb5b0bc8161d0b1e345ff5d0408dedd2f9 Mon Sep 17 00:00:00 2001 From: conda-bot <18747875+conda-bot@users.noreply.github.com> Date: Thu, 25 Apr 2024 11:29:53 -0500 Subject: [PATCH 354/366] =?UTF-8?q?=F0=9F=94=84=20synced=20file(s)=20with?= =?UTF-8?q?=20conda/infrastructure=20(#5312)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Conda Bot --- .github/workflows/cla.yml | 2 +- .github/workflows/labels.yml | 2 +- .github/workflows/stale.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index 07c7f75b12..ad56cf6e64 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -18,7 +18,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check CLA - uses: conda/actions/check-cla@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 + uses: conda/actions/check-cla@1e442e090ad28c9b0f85697105703a303320ffd1 with: # [required] # A token with ability to comment, label, and modify the commit status diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index cef565aa68..e6817ddf7b 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -19,7 +19,7 @@ jobs: GLOBAL: https://raw.githubusercontent.com/conda/infra/main/.github/global.yml LOCAL: .github/labels.yml steps: - - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - id: has_local uses: andstor/file-existence-action@076e0072799f4942c8bc574a82233e1e4d13e9d6 with: diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index bcda1fea30..78f4ac5eee 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -33,7 +33,7 @@ jobs: days-before-issue-stale: 90 days-before-issue-close: 21 steps: - - uses: conda/actions/read-yaml@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 + - uses: conda/actions/read-yaml@1e442e090ad28c9b0f85697105703a303320ffd1 id: read_yaml with: path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml From 1793464a98d69a610dbde229279f84a07b1ec8a5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Apr 2024 11:30:41 -0500 Subject: [PATCH 355/366] Bump actions/upload-artifact from 4.3.1 to 4.3.3 in /.github/workflows (#5306) Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 4.3.1 to 4.3.3. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/5d5d22a31266ced268874388b861e4b58bb5c2f3...65462800fd760344b1a7b4382951275a0abb4808) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Bianca Henderson --- .github/workflows/tests.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index a6b1071e3e..08439f51ca 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -156,7 +156,7 @@ jobs: - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 with: name: test-results-${{ env.HASH }} path: | @@ -323,7 +323,7 @@ jobs: - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 with: name: test-results-${{ env.HASH }} path: | @@ -423,7 +423,7 @@ jobs: - name: Upload Test Results if: '!cancelled()' - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 with: name: test-results-${{ env.HASH }} path: | @@ -449,7 +449,7 @@ jobs: - name: Upload Combined Test Results # provides one downloadable archive of all matrix run test results for further analysis - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 with: name: test-results-${{ github.sha }}-all path: test-results-* From f62dac203d40234583677595d20c90bbe153fa7e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Apr 2024 19:35:44 +0000 Subject: [PATCH 356/366] Bump actions/checkout from 4.1.3 to 4.1.4 in /.github/workflows (#5313) Bumps [actions/checkout](https://github.com/actions/checkout) from 4.1.3 to 4.1.4. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v4.1.3...0ad4b8fadaa221de15dcec353f45205ec38ea70b) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builds-review.yaml | 2 +- .github/workflows/docs.yml | 2 +- .github/workflows/tests.yml | 12 ++++++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/builds-review.yaml b/.github/workflows/builds-review.yaml index 68a7b7503a..ce39170ac7 100644 --- a/.github/workflows/builds-review.yaml +++ b/.github/workflows/builds-review.yaml @@ -46,7 +46,7 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b with: ref: ${{ github.ref }} clean: true diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 208c1e1027..23a93bb620 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -22,7 +22,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b with: fetch-depth: 0 - name: Setup diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 08439f51ca..df9b8e5759 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -45,7 +45,7 @@ jobs: code: ${{ steps.filter.outputs.code }} steps: - name: Checkout Source - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # dorny/paths-filter needs git clone for non-PR events # https://github.com/dorny/paths-filter#supported-workflows if: github.event_name != 'pull_request' @@ -102,7 +102,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b with: fetch-depth: 0 @@ -182,7 +182,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b with: fetch-depth: 0 @@ -262,7 +262,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b with: fetch-depth: 0 @@ -366,7 +366,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b with: fetch-depth: 0 @@ -504,7 +504,7 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - name: Checkout Source - uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f + uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b with: ref: ${{ github.ref }} clean: true From 53828aadcbadf69c2e1a549bdb659c1805b41fa8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Apr 2024 19:48:47 +0000 Subject: [PATCH 357/366] Bump actions/download-artifact from 4.1.6 to 4.1.7 in /.github/workflows (#5314) Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 4.1.6 to 4.1.7. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/9c19ed7fe5d278cd354c7dfd5d3b88589c7e2395...65a9edc5881444af0b9093a5e628f2fe47ea3b2e) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index df9b8e5759..78f907a231 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -445,7 +445,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download Artifacts - uses: actions/download-artifact@9c19ed7fe5d278cd354c7dfd5d3b88589c7e2395 + uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e - name: Upload Combined Test Results # provides one downloadable archive of all matrix run test results for further analysis From 87ebde78238c9e5eee7cb1b4fb7c76ed93512bf2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Apr 2024 21:06:20 +0000 Subject: [PATCH 358/366] Bump conda/actions from 24.2.0 to 24.4.0 in /.github/workflows (#5315) Bumps [conda/actions](https://github.com/conda/actions) from 24.2.0 to 24.4.0. - [Release notes](https://github.com/conda/actions/releases) - [Commits](https://github.com/conda/actions/compare/v24.2.0...1e442e090ad28c9b0f85697105703a303320ffd1) --- updated-dependencies: - dependency-name: conda/actions dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builds-review.yaml | 2 +- .github/workflows/tests.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/builds-review.yaml b/.github/workflows/builds-review.yaml index ce39170ac7..7f7b44ed4c 100644 --- a/.github/workflows/builds-review.yaml +++ b/.github/workflows/builds-review.yaml @@ -53,7 +53,7 @@ jobs: fetch-depth: 0 - name: Create and upload review build - uses: conda/actions/canary-release@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 + uses: conda/actions/canary-release@1e442e090ad28c9b0f85697105703a303320ffd1 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 78f907a231..939c3471b9 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -538,7 +538,7 @@ jobs: Path(environ["GITHUB_ENV"]).write_text(f"ANACONDA_ORG_LABEL={label}") - name: Create & Upload - uses: conda/actions/canary-release@f46142e89fa703cc69f0421ca1d313ab2d5bfff6 + uses: conda/actions/canary-release@1e442e090ad28c9b0f85697105703a303320ffd1 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} From ace8391549668be5d382054216c90d0c28078d18 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Apr 2024 21:34:10 +0000 Subject: [PATCH 359/366] Bump conda-incubator/setup-miniconda from 3.0.3 to 3.0.4 in /.github/workflows (#5316) Bumps [conda-incubator/setup-miniconda](https://github.com/conda-incubator/setup-miniconda) from 3.0.3 to 3.0.4. - [Release notes](https://github.com/conda-incubator/setup-miniconda/releases) - [Changelog](https://github.com/conda-incubator/setup-miniconda/blob/main/CHANGELOG.md) - [Commits](https://github.com/conda-incubator/setup-miniconda/compare/030178870c779d9e5e1b4e563269f3aa69b04081...a4260408e20b96e80095f42ff7f1a15b27dd94ca) --- updated-dependencies: - dependency-name: conda-incubator/setup-miniconda dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/tests.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 939c3471b9..f736a231e7 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -116,7 +116,7 @@ jobs: key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca with: condarc-file: .github/condarc run-post: false # skip post cleanup @@ -196,7 +196,7 @@ jobs: key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca with: condarc-file: .github/condarc run-post: false # skip post cleanup @@ -277,7 +277,7 @@ jobs: key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca with: condarc-file: .github\condarc run-post: false # skip post cleanup @@ -380,7 +380,7 @@ jobs: key: cache-${{ env.HASH }} - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca with: condarc-file: .github/condarc run-post: false # skip post cleanup From 1ee3ed3c3dcd53196758d91d55a587a14f706db7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Apr 2024 22:27:33 +0000 Subject: [PATCH 360/366] Bump CodSpeedHQ/action from 2.3.0 to 2.3.1 in /.github/workflows (#5317) Bumps [CodSpeedHQ/action](https://github.com/codspeedhq/action) from 2.3.0 to 2.3.1. - [Release notes](https://github.com/codspeedhq/action/releases) - [Changelog](https://github.com/CodSpeedHQ/action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codspeedhq/action/compare/1dbf41f0ae41cebfe61e084e535aebe533409b4d...aa9939428da62f095e83438509c41499e7a2d751) --- updated-dependencies: - dependency-name: CodSpeedHQ/action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f736a231e7..04a47e76b9 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -229,7 +229,7 @@ jobs: run: conda list --show-channel-urls - name: Run Benchmarks - uses: CodSpeedHQ/action@1dbf41f0ae41cebfe61e084e535aebe533409b4d + uses: CodSpeedHQ/action@aa9939428da62f095e83438509c41499e7a2d751 with: token: ${{ secrets.CODSPEED_TOKEN }} run: $CONDA/envs/test/bin/pytest --codspeed From 440a34edaa3fefbba9689a588ee9f545575a0963 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 26 Apr 2024 10:32:01 -0500 Subject: [PATCH 361/366] Add arm64 review builds (#5310) --- .github/workflows/builds-review.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/builds-review.yaml b/.github/workflows/builds-review.yaml index 7f7b44ed4c..40651286f4 100644 --- a/.github/workflows/builds-review.yaml +++ b/.github/workflows/builds-review.yaml @@ -17,7 +17,9 @@ jobs: include: - runner: ubuntu-latest subdir: linux-64 - - runner: macos-latest + - runner: macos-14 + subdir: osx-arm64 + - runner: macos-13 subdir: osx-64 - runner: windows-latest subdir: win-64 From 3e9fe719aecc61c92cdfc20fe131cde1c36e3a3c Mon Sep 17 00:00:00 2001 From: "Justin Wood (Callek)" Date: Mon, 29 Apr 2024 16:21:41 -0400 Subject: [PATCH 362/366] Provide codesigned stub exe's (#5252) Co-authored-by: Schuyler Martin Co-authored-by: Bianca Henderson Co-authored-by: Jannis Leidel --- conda_build/cli-32.exe | Bin 40960 -> 53016 bytes conda_build/cli-64.exe | Bin 41984 -> 54032 bytes conda_build/gui-32.exe | Bin 41472 -> 53528 bytes conda_build/gui-64.exe | Bin 42496 -> 54544 bytes news/5252-sign-stubs | 19 ++++++++ tests/test_codesigned.py | 97 +++++++++++++++++++++++++++++++++++++++ 6 files changed, 116 insertions(+) create mode 100644 news/5252-sign-stubs create mode 100644 tests/test_codesigned.py diff --git a/conda_build/cli-32.exe b/conda_build/cli-32.exe index b17d9c7b234b74abc6ff5c2753c3beba4485f9ad..eaf5188c315ffa66186bfe039e5ae5c6df234cf2 100755 GIT binary patch delta 12180 zcmcI~cOcc@|G#@}nb~B^hns^#l*rr7bG9FMF7EuA`v{B0D@-~3W30g00Jyrqab;R89D|O zf{6y`W8raOK`IO)++A^4~T-GrS2aIw&}FtA9WS{g8Nfb_6|M~W|NWnm>_=IqL-U~dAWMlBQ7 zeTtOW!^4Be6g6aG20FX)m^j$Mm;i>uF?3R@pSNf_ySccUnKG(7IJh#(2>|qz1TcO; z1c1T#0Mtj1l7RoPA$ZjMzbyj%zKY*p1Om!@^5s##208*tg!=WMcu)isf>t*d>0&wY zeAYnm0>BJd@h>%IpXF&|l2{EHz1BF*nJuB4@1EFIber}LK@-5RMhj+&P%LV{inF83 zFT1tcPSQVXIE;Bgok*SGRSNlI-xsO5IuzHmoQo}^9UIG@xg}V;@Wu2I%M$ZUBG$0) z%(;r&IlViNPy3m1sEv`%g!6p&(v7RF(O;R29|Df|I z+Vs?#N(7xaY;;gKlLyo1?gm1V_<>`-76Q&UGLGSL&i(&0actuOd;Fn#;F7=1IS?-6H>3^W+r_v`(Kh zu`eUGn99mXDxZU^L!pk`aHG6Z6DrwQ8uy#Nn|>v`3#M88efO}ZYb)nuGO4PvJRs9V zG5Ab$WtyC4@7ca5R96G#TwkyiRKVRPyb27%F1FPSz)4PDe^0Es2y1S4reMaM^j31a zXtKm)*uy5VW=K6^AQeQl_gpFA%-gvt68)La;bnj`QsN)C8 zkOrgxBGjIyBZWW?Ffajh)E|KGu!HEZ0|Rxkf&e=3u!9hT0)uR6WCqdiWMHZ^$zySG z{#ks{86uUFTMc#2P+JXx_S|^PC)f4`Wzz%Nmucx}VKb(dMA|(bMULBhiHZr;ZJzr0 z_=dA{WSrWkgcJ;9O{NNy9#WapvJgnz`e0`>togP7LW7J{5uNIW`nO1N@OgZKl}$@S zc8+Bx`;#j%^+JZzMH1FL@2@NOGoG5j_#FC3&fl4!o{Vo9xW-d7<+Q3jYaQnhrE2#u%+f2d zOo+@2Yn8Pcmqc|0r?l#x7kG!oSeo;FMprr`e55bQbnGKr+I_MSCF(L&xRb<1Wk;4E ze8o5Z{hfR*XOl>6KYdfRv5 zRWgtB23}&ys&ETEdbjiLQWcAaEtZ3Y7I^uo(nWg?y<2SknK?J(`su-UpA_%c(C*18 z5=>f{`<7jY_lnQ1i?3std{}POL{meCoNMd$>>Tql~FeoEA7 z7~di8p{~5INE-pEV2{-R7b@=AU9YhmWLdbcmae_MlI4dSrZ2A{BGPc<%=<{{F*_0J z&s2y_EJsopEP>tUR@1X)?h7EWN&o~#{84zuDU>QR>Q7Qiz0s9h8(lQ^r)~33krDha z@%i^7BPu-e!2kh1(Zk^UBRmVCnj(Dsf}(<=dVtj*F@h2xKMeRF9x35*#OFRl2_6_6 zxOg~%PAYOdz<3M=asrGp4yI;|8deteR`wQ*>KbqXKvalPLmtjABEZN8@WB{m;4oA) z{^uw?^n_Qsm!r_L(f}M1y+&JlS9*g#bWy@T&awGB>q75_xsY3F2BZ9fJibQnO~TOeK(8K2!US;P5t2QXU$4pYT2n zR!pOJ>qlC?x%S5O>yUOh9xfD_u0;3l?4pyZ<13heY!bygMW2lVF@q01fA`8(*6%4_8~5z-SCo{JGi>u$ zbqOS9VY+4%VyfBD$wadtLhWsSrpr_05|E&BV4|Y*JdXfJ1MaOWAy>Xru@uUO3+=SM zfUiyRBT9n^$Ta`~(E?5%2IB9b8VA5a1!z>H1?YYTXG`I!#R~DSh9p6|`^L9%?wYM% zmHy6H=vXkQ01CgqkKTWd_J^#9?komAfQN;HB5;_P*ytcS;Bnl5Krn0oD}eR54M2jZ zEL~k4#dvugOk5m~7}Jjv3BZ3|1q~8By2`}Z1jV&n{%|4h@gX`oLx*fiPJqY6*%gpM zEeE7yz_le#7>@hrh#Ikk0Z@AiO7r-uyuccF#hk18R*Z8gT6?iAz)pHPB~mu^%3ex_ zH`|bKjhKPFUF^_Bg?0Al6#kb~_mXNW0;k-4g`GN>xAgKNdp4r2XBHXzhE#9eO$-TX zsHjbor}BL>>PJIDP1R+b;IF;eHse0uG)rqJ^HnnDz+F@#WiX*f@h!-)Xvmfpt1CZ5 zvp>RqaWAsZJ?QP35+nUJf}GD&_tI)br$w&5wtp^^y!)O>L+@q#2kS5ClH)Q}{H-b? zWKjtlZ@V?^#tfij3{w2w=;7e(z$t# ziv_2P9`iEW!nYCn44{6@eTz#^;5!fjIB35!kh3 zs&ai)Jd^Tx51Gw?NT_l-4)~GCmjkoF4?6XfQK5M&0+rQgYb{ukfrm<0~VRCxTIE#9iXk_dL*R#!5z+hH(vRZKWQX@esziM1wQ@79Te8lyA`= zfIei^R6PWJvX0?l&nb)>^}VvWL~ipY(%N_qOR7tR+AeMJ5;QaA<3@^7mzNLh;_Gu% z8j*cVw)KFV%NoCGlClVFs-Vs}5Ne9V&9WfRh8q1PvD!$lG>E&K?t>3pfcaSq5 z8Ln%CP~23k693k`4)aMV3&%4(6LTT-gBqF{outi`7b^)&w;O%20&XrQ55-&UrqKvI zEfjcU#BN3$V)w-mbEmqJm}*I677;C7num_A1~swKlGrqE0?@CJ$qc*VnSSW7Rk#k; zK8&*Vx#|35zPhOOQ(RJmg1pmmWB)UmTs6w-Y}t;z06jZ%4^kdUM^Yy__gnq+-(YaQ2x=ln_`Nym-&ilco<3MTau34+fOHNO7*IB zmi~Z91Qd!w$o)US-QVKjFSz7eX;+~dYl_2GR zK(!of|2nI{KVs!SP73~<)j#I=i$L)J|6suX z&TldS{p0%pXprjhtNsQP_`WXizr3yMcVCB&6axyN{V6Da2lKkEbx#HPa|?JwRjeTw zm!{0GWZB#^-^niJMhvf-Y&fF-~Cd-na5rE4>2 z-y`7vks9l(mTX)`foq6`5L-tMTqK?`dDffD6^$ar!2+L#_Q4LevgAZd_*X1Sv><;s z2JDqL6p&Ps=}+wof=YC34z?WocbLa}YO~x&Ml?#4^0Mxx#QQ55r`^po|Qr zPcMO{$)KP-lQw!vQv8YyH=si!sd`c~ZcnnI9&)ZoRVvH-p&oAFRZZEI&!ujC+Y=GWRuwtTLv zm${8}=I!BHpvr#L{(c7OR}v?^Sn!%UP)K%5_v&S$b4IWFR_>k>ejP9k#S#o8h&22X z?Z9Z(bHEWz=|5gjv;4G(J4ugYJy5w-4!Ra>o!88#6tHbhU;{(YxBv(m2b5O{`1~)S z8vHjp4@&%Zyy1jXibNeEsuR{IJ`4}QL85%ov_o%{8f9W0;U6wK4FAM+7${cz%VIqR zI|SfgZE}G3r~QeG0b|Bs%ty+Jy{K6>ngk0t^xggJ?Z=Vr-GYg);imUDPT5$yojf@x zq{To$(eA(P;NTGj!2L0(X3QPkpwZEhLRC;5D|Mhh-ie^1H7ax!KmN|BsXczK>U2pL z|F;|a+GC+HQ7xKHISv%x%EFdr>Mm6t*r;1$U&D>qXA_x^vAduim`sE%$)qyiRvAX# zY<=_Wz+)Y^d3Vva?oBmv`<32{Yjr`LSY!LLLLRME0V5*0m%w+iRn;9~JZ-IJShiUP z4i8daOP5f@!6y8%Nkw=t}g;2Dbq5qku@j>t`Rd6I`A%y zViWS2CC9qKujgjG7n8YkSvW54l;j7A5AP)0my&2w4ZlG@TZ8WUG|z^Ov3y*+vDf#> z$b8P{R1?G-&^3pRiQmS~Es_plR7md(*TH|9k+PL9V(^kW))}3ZDoj^sw!R_a6&8O{ zAH_HIz(|sM`s|i82^jG%z4D_-O11h{U51$z+rijWAC?!Xv68UKn!S657EEY{Wkd(B z?er+PIIb(CAt*s906}@-=jK93hrke&+^9Z|pIhuO89+&m>LN!m&A%oCJh4t4S@F2U zsBC~0h6P}TI*C6r20?qa*pRc8-UhkPQ4VQ^3a$2*U(^GI)Lg7y#oxX7KXARskO7D}K@EjjdU-{K`!Vaz zNRtiIvjql$cwVuOHjt97-E%=px79^*<8jkKr!rfoV#z9pheOlEr2EElc%l0%6)i(8 z6Sk}?b<&7-q?1HaR5ZpDD@S$$mCC0=eQky~nT{Dtcwb0(Os6`Sx!S*QkTNN#`C^$5 zh`-obd>}HahTR@h`A#)$O>7_}phN2R5sil2N73luKhapw@4L49;XEC3EQ&+^diF3w zaMC8i0T>Va88J{I^QXH41x}qj<%bEhV>{{@#h9IPAg>Fjdm^t}s5N6mN~^;OJL_@9 zGU@zsSFdRrIHa9x2FsWjno$7s4kuv1&;Zl`6{IrqN|3@aoBVHfket9TvWz;AC(ok{ z{5xm%k343nhwL^0@b~+{Zoxwr>>rIT-Qyck^j7J3LBBxS-(HWmYeGmQP-~-`hW%ac- zBc>*8xlgZ2u7vWvygR4+u3}tr+pt7>IAVtKL0?|I)=W@c0XsLmH<4d4M`*Cx`Jr#z zEfIOc_Qp37e3Nd&LKb@WzHVl>yjKytfL8k=>74B?$eY))b7B)B;@CGtb=SRJU|L8& zk`6L_-=dm*Fmv&iS)S!EbG!IrB0^AygQCM~l`pZZe^jih^m35RGs?Xt4_k8EqWoeP zu?2<8l|x`>g-HGA(wU8lv?>|y$*W2}^p=kG9(aS$DSe~o90wxr)%Au8D4AKVt}hKx z?yA}xV0$n)#mU0ksxA-M@_Fm>TF9Bbr*)}F*tX%u0xvJa=~q?^r^d$$BmHEH{fOZq zC`>CtVcNqZOv4ruu4L8p9+fND<1jyWQBa%BvtjEQRt031#LI5>1p~J~fx5^c#!tk7^Pt!`g%M9OoAYN}syTGhms8#c&RcEm=*DMFw9+ zyFD%ozxT%5TE}Ze_*SYxLC3-!(a`gCuI zp0-#fd~9Mno7GI$B}AY?%X)Q(fg-`y&x9(FNS1?;gt!uav+@ zb9Fc$vh!>NXGL6fC)Z+gjAsLpG;)kGGcqQAx83a7&ql_JiI9|UZl=tMUtjW{U?v+w zoNI}yLr%;{rn~ISGw6Y~F{|paBwV_#QXKg%GToDkX=iLinmO0n^B%fLTc%cAE>9NA z7($<_TS>prF8zi|9wZa!=~v2Mek8F^SY>){$Bly({z*U3Fb;O-MocK4U+( zj^Hh>oV80vH7sGeTW^dfqW4AXx66RXN+TcQ6?G*C(C_Y-M-1EmuehI9i*5_W7r!3D zoYTODvrR^P8r{c`<+_oaM;%Wm(CGIVhv9B71hG(UHLFlfC2 zoUXl`@^rwxx}VS{0l%OBN!PJy1TjFN-L?OcR(VDN{tZr@1hT))ivLDl1?M-@D&YU8 zyvpyURSxS<4hv-doSm`#4ZuH9gmVmEKG(wQqbnTZb7h$!zUITDOw!XOR6NAWq&kr9E1bKIAq07xR^O}|dO$kt50XZ$qo>~MUmvjx-nqkgRsd$vRdM>ELzAk~ zO*(uWL=Hu5u67T(OBjUtYH=+ z-tUFhn6d@iM%5;J_d4QvFXz;_sXWu$$|Wu{_;A3Xd2eMX#L3<4b?1t62Fngu&yJ8S zN?@nWXQgLrsGK;rxS3LVE5ScQJ18ITQrl!x7;-*-cQ-59xwxsNp~2c|>=mgWZc{~S zS8y=vbCnE*Y7g@VU!0RZT!E@*3;RZE6o}K|C=kVTiF&;~IBSx?&Mk4#Ci{!|OL&(` zh14S^CaK;^Qr50Fbe>|fDTVI^K&Pk?Hk(-ym;5Ycu#NO5%}RpJL&aVngKduqQWhiSTOu$4K| z9mE9RB*wiJv@V0j?OqlT?bNJ0lx=Lq%w0N*GZ;3R)ps~jRC*`j z3m+3i1I&J@q5oy^)=_Z)E5M9mjwr20`%@*sU0mFpQMFqNvOmhVjzk&XVfofCQ3iv- zKtS{u!u}PG{w9q61;D_6S-(XL+@p@DT0(M^Qes(rf@|s;M(H{jMh$;fRb*yku(#Q_5 z`Ayqj=J+m@E)xD7U37FN6=V-f0KUsISKt!(L-R=js3HI)st6!R;CQP4sQLank1Z-4 z`dyu(+*JSz3j-xo|Bx*BUb+T>pgAJzWc0P>MGTP{uCx*r6eC-8pbmCzVnJFk_fp`E zT$<<1UxR6^G?_nT-u1Anr6F4stkYSK|G$8-qQ4GO~|km=+&8r^!D_=tx${#zH6cV3!@72N$m8tkm(I%KES>aA9c+ zy(KuOUrCBu@Upd)W(io8rX9YNIV7=A`eyIU=(>lVzGkSya+<2dH+NjiJE;!sKGUT-5pg|>wIXHdmOr9;K{n#|L8;g8#fG9z56$& z+_6cH(%)!E|2}}`r`?FM!U_IhDWbga<6>W2Oc>FR?D$U;5a2p-F%%zm2A}~@6)_lO z<||x$jg4(Y2!rmtkhWJU)JZ*3e=*LH;Jd~+G;A@fZ;=EX;$)LAG||rZwZ-{omhg%q z4vNbB0pXL=^TId)HUNb;7>xa!vv!|cnLGOirF1OeLJo+K*IJ21{1nB?G>L=wo*XA( z?#OZvjeOWvSPp;8Ulpf!y42^*MWu3gu)kBXcKE)utlMs#*CWWscLg<%CVR>F7R1F6 zy-}I?i}Tn8d^8uQ((UkJ-cJ8Pz+pFli?rnVhkF+WQ)dtDT2@D{YxaDD0|!cc}=CT>3Gqe1?xg%iyr@ij2+JbvLm10bEB0Q&N-f?lE)p(uQAc;?g36{ zS283{bL5AbN1kG)Y{hsB&-f-1H@{QM7T%hd4z delta 23 fcmbO+kGbIh(}WvLz1$OT*-w7Mq`ujK@xVL)dMpYe diff --git a/conda_build/cli-64.exe b/conda_build/cli-64.exe index 7b7f9c67d274efcee6e1991445828124222ab978..0251e7a4bca1830f4777cb4f855f3e5fd3b2046a 100755 GIT binary patch delta 12172 zcmcJVcOaGT|NotH?2*0Kv1i=J9@)x>j1VGZ@4bp{ju6QvgpNHTBat#9QeT+>u_JM$Mf}ky@FmqWAkCLBhWJWxG^oPTnr2n!2}i< z>jP~_c{WHQh=G9tg<=3mK}>8s-Z0g$Eg}dG6onMn0FVNop%4gy7$CyNGY?aRSi>-( z5G)M91RI|Z8-fWzibElosB?e@Kz(e67{G%;Fi@D;&>#!|@t0>Y;gmkj4=N*`!QifO z6|)lHEYb^SSHhY{RS`>9ZNqEC{2>ZSfdwG3VgV$~4+VolpimO5JMzkRJ^2$qAM1ZS9q;J-py*E|v&d^fuAc z1mq;XzPZ71zk2e zU#J$^T)1#o&oG*huSTl?07nfzwO=)zE`6zW>AmP2qGK%K((CA$pt0(>6!}>HXMSzf z6)g`PXe3}Hw?+oUa|N&hZ)_lCN#fk@=_3_{hQ~2Id0pZl0W2B=b@%CE-ll#wqf!Xv zG-3Hh++kz2+~(OLl{Dsi0U+DTE1&f6RY|xoEL;gTi>3=#CR3R zkn}8Oi>0!XwB`x8IvC}~uV7wWVM(K$MCWt)+GbGs&b(E=aBl?8RDI2?QZCJ-d|${E zaUvlLW2GJsM}*V+6S_K}idO??NwtFa#FY}WTXHY!`V~mYF1{zxUO=?Adr-0By$#Ut zma|-BF>8VAIJ#Ec3%jY+JapPD1O6?Gsr6}0cEvS(&eV+=d%+aOlN+N&m5lG{q3GiW z$x#I40b=w!%}5S`>|rxJ1W5*L{BM%mH{Jy`;+dg1~U$~UE{T#lW*Bw zNvjm2xPraPUW-SnJ&arNDB=ouoy|g#=Y39RHe6x2H{ELVBWKo4ieU}fN^J#q897Zi zwlG5V*TH?Sgc7e4;o{w9(0cmG5$*-_O-hQ8gtAaVaOd@fCfwCVxv8}^XB#woy-Rx9 zcN88eW7T)2(>Is0-8FW>HC0^cpE->jqG zRZ%B;Yio0@@}feI^vt^SI;Q1^+A;D=5Oj`8+={K4?+zwJ*XolKJU#_{E^&u zW+Ez>r4)xmg|swW2B+7%u6xbq8i2$u2auR4hv6BwRKC)@FJ0sArOu-Ig!`j^d2RkJ zGJ^jlKL2`TM2BY~1Rx?Lc@Ug`gl94IP(nypR8myZ7_k2{Bd7t&gMbg>lb<+>`1}Wy z5I`sZatBvn5rHeYT3N%-+uOR>yV$~Y&nt)kl49`lstUpqB5)x<2mx18 zK%k@Xzenl8DnZ2_?oz)RQ*cnyh`!>6;)Zb4f=uvjx7H^up`5_o+-SQIlZ^!tcATlR zTK#@X>(s+cV_*9QZRvEiPGnv9b|bYZ?o}{MR@05$>cOpdxPc;p8bKGx9F@{iS6ii- zSC>0fjLGvg9zft6Yb5~`drv#~qhRf*C&d0L&ofZJI?($F3G}FCm z{gj~QrYID%^ZX;jnzdbAwx$M>R8`k*f~20}g6TXBxpga8sTInvnrm=fbm*%dg@{RA!fEahFSduRM;7NZ(P-G=LBgZ3GT7?O>0?K#KyH}!nv)N*9-(P8?$;fxE-SBM z_A*%8Gn9ml#mGF;O0Ve^3*Ec~ZGa6!r=P@WAXRJMQcL4WF%j;0g#betl?U6^i%~*& z&{s~U2n|SoL}?HSF#?bfeSqvB5PuKVxBxafK%*lq!1yyb+lfyuR7-y`BMsZxv$%?T z!+QOk;&;Wu$c9M+PzC*S^!{tKKTt)C9GHXvJ~l2|z+qwGz(5$_do+MRFdYGVfc3o!(~lDgDEzz&8YX&pm!*XzT5Ea!=|;h$Lv(nC4%C#2h=8Ss z7oddR4#>!ic?ysMBv4`~kuag7g*=>ue{B^ffZ^av!7KJS8wn263-I1SZT}l- zk&#W+^;xPk*IthV(UH>9bXuea8*ILu_L*y$p*K_dBAdAHBPo+PklL;O2IO8aQT~cK;i?^q4$C;595r*u%v$r~3eZ=`g<>>*Cun)ums zW@u%i)-90h6<<1jDmOihdAiAfo{w!m=HbU1SKS02w5++9nbX$)|91xZ>6aji}X$K2i4I zxnZD#n9j2%+p-vpd`GT4wLW4=Y5r}g1-5LE@|qjwylZo1MqTx1r%B@?Bd@1>j=cMY zbFtBej!5-Q^+(d*TGtVQnU&G_Rt$-!qCV(gSTo8xo^4o3Wx3jX4LjuWLdIZ<{Z1C0 z$iq^RJLX*0B$3Xa&9Js>Ye;AoC1#KbiWS8$m=4s^QD0`$q6L7RrBE93!ngX+;iPsE ztWS=22)yi3HCKE8#nSEcCN)*}<>tP}N<})o{8sA7f z3*eP6M+B|h2r8V;Hco5BBN@APw&*Jr6R3J;+*>`<c!rede@DE%Ht+Z=Tyq}DjuzFaV5cMztF#P-+kof@$v?v6GjzGZ! z0$fKRgF>k25Bc+lKk$Qgf=b%w|C~GF1bz1y{z6k25d84qQOM&2G;5j9O2htL)EYaL_`unOP|H{gLT@?JeYH;GwF9NLtg2RE} z>%XW3OpcxdpkdlaU-cK1!1sND|K;0yefM=3$uXe-#$ST+cbGSBt^28|^3LNAK4OoQ zTb#5xo9`H5vt3ZZj~rUH+;D4~AMaS(Ms=2VsXg%Dbbj5@sA<+G(bu>7hU5LDomVU9 z(k&AFkrw-lzVhvy60b;GG0u)c1&I`R#!LX87Y0?Pt1TfNecU#VrtCyp^cQSujIdyD zW}KDRRFJ!*Q%mjhq8f~xu1?&0_&$^P3B5i#Mhb-_=M-a1wDThu&FR>vfkd8ZjXnSs zIdAbUZL3WuVZkQegeiVpWj09hVy6IDO{lS_w14|;?%s3ia=YlB)@rl0Vob|<(@F}2 z36l(l9`IO%3%D1xKF)C5v5z~e{l_5ecj^k141`ZxpS2Ds3SjP;~orJvD z&+waHm~&Ck2etMd?d|1|e<5`@P6CZ|fl>;8!*geddCi~quG}CH9}Ag+VvB|n#hQIi zaD`iU?{g(9;*351-cgQP~0k{5%SfB$Pt00^8k=^E@-b32>f57 z8vLC+4@&Y+y5X2pib5YEnq$@|Ap#%3MWKDstOIY97Hwi4@(&M&AUt*-CR)}0VX+b* z4gmbem=X~DX@BBjB3LouMhp2f!;i5^s=PG`)e7z##M*hC>KMWBkPl>?dm0v3e*EO1 zNsEbuVwh?67&JWp1P*mL;A(Fhk0SS3T+{{11voOItN8BMn+aox1RD$}E$GzNzT=%8 za;AcB%4`RDy(r+ourf(kVQyFu(cP+Aq z1Utt~6tR=`fsvJzqFH&>M&@KtGljz!B_e0q3YZLQrQ({iniT9Yp`fT>$T#8G9G^hU zS^vGc1?lj(^CEfgj7DDOtpvMYzLLFBTKeIpCzOKZnc-VkBhP|I+nK6#JdTceo+T@7 zXHJhxjy62sR`*E)r>5%ecyL&FISxP?7Zk!K$Q~)Hj8Q=_vtW{HN}Kj2EqJC@QmTEP z1O=&>L#C^CTLt-KWODYJ9djOB?bUvq4J57DpUU}Zacyh_L~iH9$r75*qE=nPJU+fZ zBSxQZURbtX5{8vjmCtLxhOb#N5xK*scPH2xUyV{QP3VR*C^9p-*6A_x;<7qUe6q1( z3fT}nJUD^()T!)I6@@vs5go^I|B(Cm9Qbm!B{E2!K%EH*_U?X$v-(yGG?&b1-gXEq zb>uXCHguHbiNKGk_EooADg6?Dr&ZiTAVq-Sg|uPn{UXO9mxkBepRaS6gESL~n|Kuv?5qChLnzZV01u?Y^Xcsvqx zHNXzj2Czb(#2*!duw5q{$UhVTviRO{&5|=fPJ*N*cGMR|OvW1IM$d^#%x}E@(tct3 z@hFtUpx=bzu9l|p^2o+p1EX0{8e%j9(Yjbj2q7*kgm!dLNOC;15!H+~qKc2|1j=P= zi5a+(`HkapX74Sij=%Q=Xc=$$XI+a{rw9Hgx=`>}4@er2Jh%WO7dlE`k9$DJY#$*} zF}R++v-No|3um_jOGruKpim$r42UB{ehPSD5d?q`J`(V%$94gt%N2j9=c5lhRL^jL z>Blapk1l-wEYE|_Ixw4rf6D0pq@r`qpAKgXiN07OtM`Cyvit7v`@T8TtP^;z1yyry zUyuKnzUfD&9wcT z{ociALws1}kUUku5bjVbkC-{@)ER0U67I_;h8z4wMBi$0|3(X4LLLzQ7a z9!bYZKcm~z%6#gBNnOjh)E|*nn)__i?O~s)a#^h076#v=#w;HQe8Botyve@7Sa*k^ zvc~M~$|s*dM5?uleug@`UpU|`JZY$~Gg84nrBhZ_N{LPJ@iH!Xzdcicm1YL<+pgYA zQN{PPDPKNluN!WWwo}f@lUCgI4E*#4f8Sq(pdztJb*W|$|67oF@75u2ZmFl*^ zwh1Tpl}1HmJIY-qJw5@m%HEBONUP?dSno?S++4SuMf}gC0#=h9tbFaCxyf17bgp6B z^h-BvFT_cV=-{*`*1XfsT9fLJ4C#=+dZ?lyH_<9O{I4n&{GQtVFrE%178Ux1W8^p} z5FB@jZ~^$iJRB39WBxQ(pa8+~Lw-;{J948QQ;j-nh6;M}_@oJXM>)V_GGDkJbF=cQ zm&xarJA16M20?=&Yede(;ItZGe6RpB=6OH|&_ZdV&W5QSiOK)w2C0brp_S1G^7wgF zQur>JgJX|G>Vdco0fK`L#Vv?7!T#0fGCX<^t!_1rHuOvC_TTs9^&>Wc-|MXqba47E zXM^d6CQwBWQ>g3QZfQlHvP&P~*ze$<>HKKQ8owCD3N3on9aK0vH{H6#iHvj?qKKX-P&k`jO+~0Mq(r3voL&-<*ZmVzObb(6wl$__1briMysRPy!r&92vY= zSFZDct61Qhc-wYIU2dGbDR)DF{=GgJF3i}UZZvDOe=SP>6{XV8#wVQC6hJ+_HiIid|6X> z?WH+Oi-F4080pz4p=UQ{4c}Fd%YHR0R~(9&rjF|^eyTqm)>y*Duh5ewtX?QKQ0tL= z?Y6&!s#$yUYZ;-p-a}%x#t~mO3)_wKFw?;Hzqhrz49D8`W8ll=$DC(+Fg;b9?HGKLDLHF@M6nFY#X2c&LP$mF?X6 zVn6kcw&OmIFSGk?WrdfI&h$G81sDq2s#w3L_pDC+>d23M23%HPT3Ioh93L%>4N@)( zB2kD$GwnW_X~~C7!x0m&Vb=>7Q7PHww&A^W@vlt#zt_GGa$E41Yu`dbX!ic$NePP| zp!f)Be+Mi2F#ifyL9hP>SpQpD<$0JWw%UUx84oob{WcC*%W9l>(QTcqp}vsbOpR=3 zRQaorug06CS;~}(X1O=k?6w2f!1M?LFoQXhSeRLxoPg6a+4;+bLTKQwQq}b_($Ah1 z4?gh)U5{j?dAl$s(?Sj2qvexTd~F;3QAcKF$Y5ZO`{Y7@#Y68oW^C(Qi9G$Ii}r6v zW5ds2Tpd?dh|DfIp2&kGW|b4_ zfn6JtWVXvE@+_TZ@>>}@#fVG>XXy)@ti0q4o_e%CKF3_~&`{HVZ#c#?p@d)ZU7qUD zN;!hO=mJkNm%v7Ne#|)^N_|eZ6iyIDrvg`+Rg~eC5t||%`bKmn+>Uyi5#o#E>%Cpv9ob0J+cenb? zgAA#hLD*#DO7^LC#n&{dAcaVGpT@oAJ2HDG9$E2jdvmiZEGaf(rQ4|KYOm|8iK%Jc zg!l1Z5cTIPTsv)E$9Btb>$Sy1!k*;Qua!Wa#_)#}bwk;H*p0mhG5wd$f<9zA2`{4v zr7uRZ7B+F>ex)EGg9VzgJ+Yd$oXnq+-E8)%lw-p4S-fX@#`~Rljv*d|8{@=vy-!^q zOkZ3A$m-8zKJ53Y?K|O^O4uh{)p_I@K}^xKGx{%86*Q*6zjNw1$o{b?{tIOl++VG# zfd7BWD!*J+`LqA@vpD1Q3&20tgL8zhK%?lV3DvGCMary@Yc@k8EQ(X*Gy)`=XbUaffKux&pDxsb~D zQ@QBxu`_Wt)lBd%Y-U#Yrv~R3gx$kG{qk+gE!14f&Q5-YM_Ef-QYJF|uK6|8pI1AM+5WkjizC@Z4SB*HaQ}W82eGbc1E`Av~$AZr`&lEbf zs^#ynu*mn+kh6EbX7rPq$t-;@;ts|Sb2`pQc;3s0Xw}O}`;}^x?zALbUBWjLmJC<$ zyCU%+ed-O{E+cn;+t>NeDP}u-BiB~XWO(y4aq)AGyHZ|dqP;2hC}CiBl{(d^-EBp@ zF$Lo-oJxyp$J~}G%jWSL{ijkqVdW_(zNI9E8mU3b6hzf!JYxL%;8F6u#@t?T?ZRjs z<~DLd=q+Z%3Pz`C^Vc3W5QAXG2g*6RV(p`t&l8TP!Z!=j<%SY}OG)0;*W+Uj+J3rv zg75v%PLZH8rTQHC3C3}83A!x4&84A$Z|JhhEi}Ho5Ak*44o&peo|ALS%N{m(2(!PC>*1Kql%ru?IO>o6%3I_Tc|BPl~5 z5CAB7gs|V^=&xb)?*IeQvhO$Aw|={O3;ezIt>5n60)MZ4>$khNz~5`%`t9y5@ISS0 zAt6#|f{FdTh3k(h$3Md9IK56UWS6bScOVfbb#4|Euw<+`zaai|0tf-a0RN*52b~%@ z1CGDw`YRnhh0cq_e-pDcjx0YIU90K!C$mimu5-{049 zLRUk-XQya$6~M;EL?^0$sug_iT!TO`+)#~5Ci-&{W~dx5dYNjf;jP9CAWnT+Nme-j zV(6tJx+kn(!s+bwSeJ5d_&V3qQLK}zUlBZ6BeA@`A5{A#YireGg1t@`_TpR*@5J^= zgYK{ETP47iw40Te2D*EdlL$cNs?3eT_}@qFk4=v}L~e;MrZj>)JjJ zza?tU0EKnH%S#MVC*&<#Q&^iSjW+ZhySeT;`E?!e~Ax=;2hR>5+q27^vW0fbOT;h<3t>{^Tg4t?;8>Upy=X@sHy8PZJQ}J9aaa5WxV@0qBMp zObVM7KB4C3m&8cZuHwj-&$VdNyXF7po8u6H24gzvW{<`E`}Xa3g;W8}=gmrTUEMnK zy15@A-46lr$?!+!e7&hoSQ z_^oA((L&Cog;Ost2@m-9+~0${)td1&5awdjWIZ0nXYP496c%x%oD*wyPeHAV^Um~5 z5Y;N{ryP*Rzw-7r7lnatX+vqqH~-78_33cmSl5~thkM%86Y1J|D=VjoJmp@V8nV`~ zjAdnO!-$!EP{>OZ`f6sWJZgq>iW)I7($(=yQaAKH{TQ!zKQqHLV{?1x>~aKZLnxER zI8$(o)Y_rZxgkgY<^}3Ir1vKx@(r(T27=1yc7rjQ(`UjnPT^}f+Rx&I z?biAlHh#-}h1}RJZX#V=lMYEZ!I?gC`5Kt_TD8j3lX9S z-+PPl9-sH8@Av)t-G4aeyk6&ZUiY5!e2nKaq#2r=1&benk@aIGj4R+|VPVmUBvL>) z9xFl`P#~QM78Vv1iUlA=aBv9(BGe+bNFaDn6jFE-KnkxxArLqjK!QtP5upaLfnh@- zI9PxwE}nm)H9E(S;g zaJVRd{+Q5`h+!HM2hIQ6BEavf`29s7p&UnF9{p>;kWe!8uLmWBBB2nh`nh;d`-x{W zW*R2|4!}WdsVR?JxRqUgEo$U^(-eQ6ym66FYUjg1#;YXF0P8vfGSEjDmAC=M6m`*(bU+l~sGET=o>b)l05IF8mg`j(2s= zOU~K--Em^Zk8FdjY|K{t$Ges;o_&@0h28S5_#)1Wi%S<6Y8G3&J0FNEnwJLHe)N5% zmT!CRT!w*hEU{pvb{+tZ8hvaFG@CAN)xJ0@F%NGaOT73xHZEkWA|Y8Z-v7DZ%Zk$G z^2>D6F!Gp@0jV5eoGaHpBNfOK+=>j52+`qjY)=7~1V|9O=0MFuMwqvmpY5nDTqRXZ zu^xZeL_Mczt^j5CCWp5sQ(LB-!kBi8%k*@d^;V<9HJ@Z3`SWI=l27FIB!>XC)yb@5 zjJBy>Z4+l$=1Hv{&JC>GyeXS5{~BxN6!-OtR26!`;D%dI`YfFn#YS7+p25cIxWaV% zdE6FzSs6v;GjMG%+D!;yQCMn4r*fOY=Th)yNXgEE^?kA4>v&VOm2=8DbWiX5LZ--W z5wkOw8SryocbX;D(*;$%>UbVhAiO669+=0RY^~`>P#n83ORlpBZ)x+O;UIV$r0IRq zYKh&vnN8QxwX`VWhIZ4?Df4@5-!fTR8sf4_g9&+3KF>IaBr_lXJX%o3Jj)10A3sR8 z5}*i>q4zX1B?PjMjRU~Ye*jWU2N|XV8-21O0A>)=L5fX-O|>*UjqGzbv(|hoYq*W;uGLz^RV5If3zcW)@b=w6#5%QMd0yx39aY8Yxw+ex}64 z<}-6t{6<8Q>Siidlf`Lw=xiA|N#yyyEBUoG2EMGJ^Fzs^9I$hCViO>!d?EPmK$dT zvBvArz7Ek_S4r>*l9;p`K02~_0ezD);^g7%)DZjyL(vI$^-(@r9j&=KtzPd3J#9OP zr^>m7{m*e!w1p(^zT19x>M5tb6RxYBA$Yo6^P~%(i9b(YPX48&J{IuZvc`=X#$8nn zlDBrY!DSZ^J#sS}avRuI?^l`(uyj#T0=0i)_hdnmUYj4rNxsgv3uRDCj1h!Xdg<@U^(?% z-hFSMP_QhFBigkdf(o^A~=g^qrZA z4rMRKBUhy;j+Dpi^{(k&w+#l6xFrA*JNY0y;}JBO^LIi^2hMQ5j5d{t=!f(M@SlF$oz7856+aj~GDm4DKoriVjDVw~ z@jplD!6zb0J$%J}m1f|e%z4JrYf7KRq8H^uliXUKHHY&689A}`Bc`7h#kugN&S>}h zDR0mYvy6T18?H0I9zlam-ga_`xRf(am!fAzA)V)%B z47j(Nf?j=K6rF2p+Q`d>k$UAmlBp=NI5xd7RJ8@4dR;t2hZGW$ZyVS-kJtbUd3@(^>aTVAvbi$vXYY(m45Z5R~Rb7+_h*<&> zeW>nVIAi8zsde&Lg3m;~ak~dOKVo&T#hJ&)o&rH>ocfwa5VCPy>-@N1cc_MfqN;go zsE%hiIVbyhizsV@#twFd1!?*qTc%Dw=~F<8_P&+2=CeW)e0@ZavApWz?TV#nQ37a( z(+OfDiXTxLL_*F3NQfbD3=@dIhiZHP7agF{krrV78Jz8X79PKfzh?yIk7$C&OM-w<496T5Z1AGq~5D2y--~ezPwgE^4 zoxPWro2-b4tCgqQ0b}}cA_2tDtDq4Q2Ul5HTA{g?=N~Qg-O?+LbV$REU>y}3;R@);dfS2-Edb~=;ncej4 zAf7>~8d)2&px5(+=b(8cqft}Y(0 zL>at&jScCx+R?VM%P(Qd3SO5s+>(mAL6F%_^d8QzFHCPL<33fFmPd#?#A8=^bMCA; znwh9~3+H&deL^hmAz$=~_PA?AB=vF|T`YmIE9Kx~d*!Mx<=i~q z$p^o^SGq5#yaf_2WS=UseRv&WPvhvOOzi499)-Yw>N zD@NG}Bfql%60ZuaF)~r$4}k&8U_36x4&A?fe3?tN?~&QjeM5p47=I5Q{|~SO{4021 zfJ__^Lu-vgAQP2DH$@MDEVL0$2~gk?9Z0YfPzdb1>cPmHBb`u6d=(ofOJ7S58#pt- zh(0CQWW-cD9u7X1UN&sn9(I;44gr>+mxHSdocud6#Kxh-lK`Z|#AT%Zh?S%OG3IuB zN*n_lKTFT=f)L%tqr^p^C6a>+`~XbR-v`3Q(#qAv+VTKP4+da=9{7on;19tG`HPS# zrIHgPbvXMnaKMMrR-DLP#gK(o|4kM>@63~mk4D~!F7I}<+(zq8qg1FS!^60I7e%GB z$4XMaY}0r*y|A!)B5@Ooyi;!Kx-YC|v}A;Rh`_woN%4-2FzGGNL~vTn?)@gAmOslr zc!jEpuA8J+#Vt1G8I5I=X+WN!bf8Eoqod!Df}VV|)6$j*Neg=sLA(t8}n2EEkFQu@88y6m(Q4n5j!$r8q z(3M!~9Gu7=XPz~CF8pEdx}H@^$#xYv{A(pkDw{RO;*%?nkG_23r^^hmvT%;nzma|( zB%oLV4_Unyl0ThglG;K*J{EkY;42LasCIYUTO-{gz;;z8HijbS4MlrFVENliiaoj= zr9U7N35B8&a_)#!aWc&MN+oSox2Wfsy><9Spm{)OBoKP_ zH<^Ix;e7x!Lg(;Re}f5pUl;ga-q!28uft4<4F$0N6qLV%dHvRgpSqgB0^#6OuBeks zleTB>J6^Zl&MOr{4y{>zc6+%n-oCz#>MZF}f9${M{JOnf%e+szuW#)Q_w1y-R}1LU zEgt%T9`}o(N>cU%uP8f7p7wl%bTZq$nIJ(gEShvzJ7NaLgl#-6g^8E3UvO!$B0{}c z@m62cKr$$%mfIF2G?{r^o%r?$eI^MLdwq1zBl5}5D#e-V+>ctaVBn+$Zt+iR_5rA< z1E&}-){0!Gb7&Bp0eb;!AB%{H;aO{#V&S$QfKoz zs<|AjOhTXuxb|(N^;BIiCxpgBHr7uH*ECcX>4xM}5JHW-7xp$;P4`T*v!}@fEMD}kUL%qk3!8%CN`#Zdn}14l zWwYtt=S!pw9e+@>QvOgV&4h0wTvwf6F@S!qPTTx!fHpCPf1JV&{|T!Xn}~TZ3F~mo7`vwib?d zgmo4vPxvL5KAvJ9+^4JThpvdY507aXPZ~-y4fa<*sj<9I?2K1_!oD$Wmba`6-juue zG|-#xmc4f7m7=c$mnMxmxnR5v-%MI>tg@Eq8x9y}+|a$7Cj=HwOXFzSz!=)NlkPV3 z$|>yc<(teI%lP2HV$WQgvGgV(Gh-C*SWA%Cd0}~76EFH(4@tD*;}G%JW7Z>VZn2)U z?trQV|F9U|dBmFEFpb~NxB{qm{0qRyBA(Yai}I3f0K=$pD+ z)m?7OIMSl+4hIdeqb#*Ro)o*pAT_x6Gwz1++0_q_=s4jF-lr4dW(Bl0(-)wg(hdU0 zoRg?7kp#T^yni<8+g&W#FH+y=WhHGbwfwRLP8NQ=s@Cd1XqHA;S6$K9 z101mJ02}m4{E;z;*mc5#{7n)-%QgA}J3I=h2G@rGkIlA$Q&^+GikHnT10KdCHejwaf0ifaSs1rT>?e=@Pc%AP|5PM%SnP!d3I>lkC%ZF-XZz;OGD!(ui>PHM1;`12 zX?CwKQ5!cFBvRyc7M5LpL(U(?a>>e}-#AUey^;6WfQEsd6HBY^NRO9ri9f z9};9^YD~KIBv&VNt-BrXK49M#rZn~qB;dN0gJc-HTs6y*>Af z&A6#2R$GMq$rx?*re6&yn2#yc)tv@9`f#tJLY4Q%e6h+ z?8sidz$%_BBKyG+QnIxp5U~`fCtZ+CkO{h%Ik`Vlc1Q+ zC$80cC1e}QT|Vt*BK8vpH(nC$%5urxR&)Fux9latPZYw|lkFUWZJ+ok+0}J}ac%qM z>b4gXq(^k|+HO_8)5%j-GN%0`1U_s-_ur)(RK#X(QbG?Lsr1AIvyzE=6-d3!Zj|R26QR@-DLWv}j=y%=A3Y?wEEJ(lI>Sd*f3hS@8WFt;D~Jpr76tNh6o z=ltWwc|aqk*Vv5}+~sPuf9Rrkd=VQ+cToqU_Ypt`E2(Sg3Yn`SLcO5CLc!SiOqBuQH=W+2A* ziub9^Tw|Wguu~JGI^{aikBn+HTc%F8@bwqZD`SCf6CKIv^w(h@1`?!JwDi_nE!dll zRF}sn&P0nozcy$5u3}u_t9gmiP~0?aLT_P%;dDg(170CSPpX(kzT`la$DQCLe`z)I zwx-weqHn#2B<)PDf7#4?Ijb#k0;{$zO~At^MM|9GqiJ_o6HP7a8=Kn64U33%%{piZ_jF8VX=2>@Fg68PMTUg)iW7-K+C~-c4MiZc1Oo? zAJ3Q7JxK-8`t)?alW35!h@GmQ3yFA3XN$G z(U^AU0Mqa!r7F1$f<{yy?DE+PT)gmSO#8nlztQ3r{Qcy&s3;nHf0u=&JI{*H^0M`GOS9uX3fvf(wQQkvc*YG7bta&X#svt=>bEq$@H(fK!8CB8|_SIyQ zB2$H0$vo%Uy8ZSQa6Roh5qOU^olJ~XhmwfL^N#bEbNSFKyUI_lj!}H_EPeb@Fyv|! z2i@DnG5Ka%u!vqzLFu(!><3->)ghySdA{R|{iWsJ^Q^cwF}L{pN0%Jlj>box#tIx) zL0o?wbopGss+50**@O0ltJY8S)_hdAEW_yEm_IQfVz+mZVt)oIJ}@tv(74j|S>~AC z3TdvD)6D%A=1xfx)4@5${6=dp#k>ZOmg=*trRBz2{(Hl5o{0~HWZvbf4Xu{IDGSc= z-{BSh9C<(PtPiyzk6SVih+Q=V3~3JLUf;pL7`mX)S>yt$b^CwE~fbb^Cw6e;lX zW<6?RS|Q7Gd!E$scbh`c*h`VG$WyuNtwtu3qjU@ZdRyTgPbK?|iqntp$z~72 zChJ$TPP8e#rc(o{#Jl@6i&pN+?~y*W7TEUY<3cPe)#Ieus_W@&=&nnuYu#Y$6FMj1 zFPOi6%A$rd#(3+sk1Q0$f(yIm^yWX3X<^@C-$WC&@_UWq0#RAe@UxU(|~`2Q%8a9Z?odRkypY0&9n;m|0%EX zyJ;0n{RyT(=Fiy~r{4hlBSkoe@O9;UY(rv&YjS}K2PD{bXoOv9s)SCMT#K?B(C@0J z+j*&D_U;V-j$0H-A-}^nhsJJ@8SlM<#j6pbS2$O9UQmx4VbDr`jkk8S$yNb*nc4e&K7;G4gHi52He(&D9@(7RU;{5w^}N}GK8SHBBL`h zlIxjvwtAJXZNevywD)JAdU;a8iTV%ZnDN!gZgt88yxHfrO5qifKk1nF$@V#-Q@cX( zE<3woPbDQ+=WAv^*_rg>S#i+)<}i=rjI?LbeTa7LNjbk_&ElQr+kwl3=3+9D2)_X7 z_i0maICq)(`d@xs_>^qEBRCSgcKV*T5DTvm&$ugfAPfBs$)||}b8ECI=iA&?rRtNh z-m=kX^X^#K(PY|IzcFl(<&P*yMhPz8MpViUQYXWoTp}PNtPLHd+^f$4d)Lp6)?jZV zCq&<3Uth)QG;8|W!wF&$&G7<=9e1!Ulwm26bEnt9BAf<)@qDDRT9F})7t}GyQQx3qkQW?l!;=>w|OBW=J;E1I*MMW7PB60+-yg}kL%vZ%VWQ{?)-}Ej{*=x7lHX7Djc*lat0iK z)Am1b{*p0Yr!&PW2x(-(Tl(LZ?H& zt5dYQ3gF^mqlM}pk_F#O*B}rqH&nf{so}h|IV#(WQNDs^c&i>fhgX~WATv^EDg0sq z!!wRAkqiz79LqV^e4T3+g;DzQstnA73+MA!>qUs}zh4tSv9twy z8JXXwsYowz`m%#T3HUVAD0V4lP=2BG^)A!MhOddKLA2{irjGnK9|HTU8Ln;PY%vlR zOi)-mTgd}s6se+BOEO1e+4;|gj@`UPPCi|(oX0nU-7j-Ga_k>_YH*x4ctmz|wIcG) z?uA~{2XxQ1GCV6G1sUfGFY>*sx&=qVe9(2Z9vG|K7vqM3N9$()qYw3O+%RHS7g+fVd_IMnnzj;sRP7*d`DsK zz;X{8-#L5zQq@rH^Mc2$LKl3~aTsVp<+q8O^5?dBT;2!KXd|eXm!KUmqLor?L%4hu z%NvcPCgOoD<(Azo+S-G zJ0y>=nwmDjcb-OtVck_q!PB8+jtB z>3CpU&m!FJX4a|4^tI+S>8~5oGOuDU<_0XjxHz;j&&Xaqb-9otBNX~<+BM*tm2--Z zFfYL|4g}O7E2*VW)GD>KpVOkZ2aLJvP#8bTMI0WnI>C{1il*Qip>1leaImpPgHWC* zu46Ug1-k09TGwpH#!wR)asg;2IcFc>omPo;CT9JTloKf>lrCqXdRue04#uZ>eD{+@ zf>~31_fqeL&$Fqr%r1@QUyGu%q!KS(u1&C&Og5O|J1r1OpZuI-T7Xk*L*M5jiE&C{ zkEY@NUQ4>Z?%4S(tF_EYJkVz2DYNCCTMZZ2fbN|pZh#{{7C}i!k>D%sB6?i2r!Fht zMEjSn2l3bBjGkB;k;^M(@89vuPB&DXoQgv+`*MU0g10R-OI>Pc{e<*wbk3{RoTdqU M9Z-%gw#R(_2g)|X-T(jq delta 24 gcmbQSh`Hei(}Ww$&T|=7b+lO6XtHj%v|BP*HNd#|FKQ)adlLdV{_NRbiA%E~Syvrs~W z=zA2sy~pSM>HF>X>mOa$ec#u)4)^tXJYUb(E3gY1TL_CCfo@yGjcH)zVqlQ)CoscU zsx%?x*&qo&1_lNciUA<`F|qNuLzF|dh#)vn6q4^VfaLoGg+SoM01-BxS%@;k3Wf=V zU||48*!Voy5KIVC1PZ}KT>#Vo%40Lc03Hm2fx^s$24Vp4zdVb{pt~oZg^LrM8DKoP1|y^T`G}6Ymxrg7C6l&`izkzkAizLL1Q!5A z0XQ51p#K?C5(yj(gbs)Qw@rZGck%0+KtfrLe|hv@1BQeWqyKtPd?*qM!DyI|^{}0I zGHa}Q7GMGF1eTlgPV==hORhzXTxy=?%9AuG@=ob4y+(VJs0CnLrv^eHasx|i7|xHeZl_DZ(*K-}&lktgAsM6To9 zocEM)a(jD*koE)9U^^4N1=pFE%O)3IBz$2uzbCka_1t9nGELo5drx<7G_4V)T7(^5D)M(@Z;Hch*j%&sS7W7Tm<(y@NeeA}uk zS{~X{iNZ*tMqY_z^I-*qeMU-=#JLvfB4rT6psaUj_+7iJ%BxeE zCuyxyJUb@NGc262D4l=xF+N^AUGg=?titK=>J)iup6iXbAM~3$EeVXaPM*ia=n9}q zd=|6CTvGq8Mj(K1({FC1sOzGhx7o9a=n4`iD7 zHX$>8r4HBWaL2h5+FGEJ=Tr9LY8kJItHq{K=i2KAWJph5o+Hs*g12_KQ?TGo`m1@J zvsh*}ZDG=KaH%K?xuwxOq+puH^fiN_wJ|2M;yONi^5E~#n=;H^; zk_DszV)Q#rPX>W}!^8q$=pVp|g9+k;2~70K3IXWBgNYNE6qw}8!!yW!H)Bh+Nj{r% z3y+gZ>4-IsziQ}nhJMu`7*EVc0}33UQntj=KFi3&h?=pqCD!ZlDRtf6OHoa(ZTB@I zBs86!C+E^55LPjkx0otPy+dV9%St5a_s+>-Nasuc*(N#ZQhLqL+FxTOz$Zz`b`EV# zdHJ^49Iw;1IZA=x+D#jEajBR~Vk{TsmtGjdI#x8V23%pr4}9Ta-G{<5GhNJikXM$! zb=cul7}QMOl8 zDuv0fVz05);*n|&<5oNhzY5-DHJ9a@%j(W#k{RwxwH$rVo^gwOSdF?;Q^rm5oVqJ( z2%+lhp#Cnz?VCincz5YE8b3HNc>?`YVuB>W%%>o@i@JyjHS*_r zREZ{StglyImg$w4-H_P8w0QTiSqDQ46~W#9(zk2yeJCbX)yp!Ku(I{|WtddihC6p3?F-mHDn#z@#{Xw?TEYOyijM)5!wqkt_!|GkLXA7=&YpZ zH;wO*^ibE_Ql*W7RC6R=1Q)Broi5kezG7XxrIo3-y_y?{8e*ubBPPD+#Z~y8%x88Y zGKjeZheV0ABvcZo&#SIy-TFF!#4ZPrm`R7>8Mj2b(yTvK?Y>EOL488$=wDu&e~XOZ ze~Hh(9vRW$836|b5n>0y`A2vbMh`_10zzU!Vupa-pBX_3oH_{jAU@fNqlnLYKnXs$ z3~=t?3XDwj=mOIb3X}wyi8Ur3WAI%l2}X_|_PMgJPFxE5c+y3q&qS2HkaSebN%l4&2XD{gV!IDamWC5{zpf zJCzK{a@DFJOsCgL25wdE;;u$arQtPNUB0x>`RZ zsJO}^Vs@Xur(3tOjmuP5Ly{_M+fIpACRSZ!c5-HMsCElJ{kmk93N_Nis-#qvLF}AW_b=l< zKONY0NiY{Qs!LGdydlhO&1qWh@$we?Y3#}yuZ%WNQLC+7nKBVrTU=bm&nt)V#8nFW zJ>4q7W$?UuJxthw$JN3%b!nW^}~+#u<*Ka}F^4bBp=kmXlk4bjEkFG}-XK-4mj z;7y);`Mj~Gx%#=2ao!UR2A%FCTr#VJtxoLTwxlv-1}U%Ec_16%>X*iSdxBJ@q?JtD zgET#YNm!XLnMGLYG<7l4EQ(V5Thn#>iYfrf8s985)SeU);a-&SH;`1S+NoZSMBqWY z9M2N!k^YF%AQEy3KtgnZlLvwLd#J_*u+ae;9ccmjpTXHyWNN8e;)^M1$nL)RHQX?( zjSI5h6$?EpCKW&t_|MV%uhITM715u@Bn0rWanS+}3kwGZ!T_J60R)2S0N4R+M`HjI zLS^ge=_=08?_%NMdZ?IwoJc_C=Uvbcp~Jf@%q`Gb%i~Ws@*f?d!!vZCrj!KvEZjW- zIrMfwdPdB%fH)wE5=IGzAdVJtcNF-wRU84jgD=H|iPn4o4mOre5FrEx;9_GR{09^A zmq_#-VB)Oul2N$cA7bjnlLtV(DJV_O7iGb9-s*Wz>#f`F6&M|5jsOSQ$@EzH`{(!4 zv;5fyMe4+jm7Nj?#1JfR3u(A-O{uMVE_zAoa{$+Bfw7~AtX!G30msc%rzFDxY@ zqN%z*Lz(LO>ybbjQfjJh^W-4C&Gs4Zg_c=bQ@Jlvx4(IdNv6L_?ooXMaxR&&XCxXZ z57O)pbDZ0Y?eh+KLsxEQv`&=&VJbYMUTj+Q!kF_D;k4a3=8J~UI^NlT%9I+HdnE8e zLzFx|`SY8XI!>d;P;y3T0e{%r#L8x7FAwcb#gP_4y3CyOQQa=yBD}%pXkN0vqk0;P zQ!WQ4hA|;sSGzhs?r{kjF@oWux?3U|PGoI-BPB1N3`Ety8;o9FMS z7s*S{M6I6Ue7W+8N)LydpQMEl*Yd1DOetsN+0K|S0-^?qjh779qCY+ampW@!eaPk) zIL{TIEY0C(ag^Cc8lAH4ay-$|8uDzDU4RhhJZb<3YHu$MD4!S1*J657LK}?x_7ag( z2bLz!dsVvhoIO+?Bev(l+K_haURkg*^R~XkgH+f`h{DDLbLR%zx9nyi?_ZP^?BAwe zu%HzW(eu3kAaQEZIYtHw{E=Y5?$e$TVTOK7JhQ^4)L&+N{JkMUCm8<>kN*ek0RIk; z1CR*<0_a@h2xJIh^bm0bvY;k789<6naF~Lfg+gH8vz~+W=GY{Z3|HRD(cH(}-3m?* z(4tQXCNUworn{ZDxu+GAhP#cqv)xs5(9_Pv8BX$D7-C|P;RpdD0)k>9e`e(gfbigP zTrw;jD_?Vu?+GD#j6;SkgHDm`oZ*KsMgKlzoXss?y9?vK2=1?lTQWTVDni* zh-QtIr+nF=@M?Z;X7NBM9)qM?VmjOhRySHc!aRg$TJI=*$BOU7ZI1+SM!@FXCce5K z!#6O1yq2nms88NCI_e39d9%^gJRZ?&{3)~!zC%*ll97(fTl_?=%td&yV$@Yt1G|Jq zoHb_TU(+0YAl$j*Hw`6JDUA)Yg)M3PDhp0iq9Wvb8~iEkVv~z@W`o~gbaYLK&OO(9 zZV+HEto5wPrYr^{*MTEPrJq<#f_F!Ji8T{Eb=?(n(WSXEt*-i$6Ov=mUOqzM!wU+u%ef8IRA7tnfY4tb?h5gmeK~3>~=F~ z1Rs_N-ZSH{B8hPNWQw&@TSG#%EINx!kgX_$!L*Z z+66)KiKGz}KdMB@f7dfF zG2*PgrhwchonGl!6jGyScX8z0$M>GXPw4a3x+IfNazQr6NHaHL*^Gvj61dGZqt*|g zA{NcxrfjuoB`jIT8!^O>E6oSWUhd`ts|nTi<-XmW%-(-aS#BHM+gfe9UWjS2Xk1AS zH)4>)&|y?jpUD^@&=KRey}~Win%_7+`gEa|)tUcmU`4C$Qz{e-v8Xz?qLQe1qHUou zj!nP&JXg-NnDp+dCSKh;-qV?<>xG?#^L_V{mRo3XZuV9te;(nUQG+pV;R#+bwqfw zDe{`>FF4ESf*Sje_V=^MzL2^ZCW4o=ff8~*gA0np+-A@FR>KHH#%@eQv4w((Vog6K zxG-7ueB(@@3>q)4`}nYwH`S1HBUt@~5_CP(zOWUcc4OO`$N`R|aR-nz&S4&3 z8vLC+4@&Y+y5X2pib5YEs$l^CR)}0VX+dx z4*>kf_!PkZ)BeQ6gtK5Waes9Xo{LHBd+8t+UdwfQX?WUgidbgYoTUK8k?E4zfBfX2 zNsEbuVlb5AG7)k;0hcM#?#7v9e08I!?xi&yoGH?_LLn7*eYj7uMXo4>;mA#*o`V^u zV7lv6ukV+Jwp+Hn5ZkF`IlUKFJvxC?eT$sp{$;}EOG4zI)lw`II%HYC$WyIE!Ffm* zSFTyOnDkPPj80XSekG&PeV90&id(pSUM!VSJ*PzPOm82{G5!;e@aJO zF$x1ukdkm&mq1vYwYKKM?kl=PpU}q&rXWGWBd@ok-d0&eciI(j%fmu;KT)fMBs_Fk zAfda*qyETZ?!C?xFl=22I!|a_6E_Wgd=D%7vc^J`d#U8ums)~Mkx!c3?KNCCHxx2G z5|br|Z*c|VFYN`~x1z&0srBuNxY;kz(N<<<>wa-q**2y(%Aiq?W2em=uY;;!w2E=8 ziOFzgxW!!zu^`6Z8_I^v+ShH`JG-yh9w>6*jwr9i%c(I=5pc#B&Z9XJqGBNwPKfYYy9&XZcH|Ay!SkpJdrIpFj1^o&Tbj zkvp)qpitN|Z=`?jHUk$zL5xzsRo>du!U5js!F}__>jlR=Ex!01@B?R`bPdlUm+|ev zQ>I0FFSy$>kd)v#07)tF^W}oUAaEol4|M zH5QbF>J7`jJ8a=hUA(~Bg6bp}6)~EDXkCmzz(oWQXh#QyB*Q}+QO#&0s_>{zAYZnA zI}KMXw_*I8>6|&m@%NqpE#ocztZUKg^uYf_7YhFB0Z9O22Nz&uh@=*z`YI#sMwJ@<#_`WK8dPT;-fSI)Y7 zRf0Pew0V8yltEKL0%=}%VWs^W60Qh_D;9PG2B|`B4dQo~ZfnNSF}-g~k>I~$J!7|E zw}1KB5Dybw)7{$-ax{b1dOC4(0o#rnvSVLCJht1}NSZPG$0HIQ#uL<3!Mh6YiOe0- z_T5Zcuv5;iw(|KA(^_lKycm))9g`cl`MI}kQyDyCqTqGMYyS;N2JMZeNAuc=50!>} zxx^f%d@tRdk>^nzOnmtfm-0Q*QvDn2Oh?Fv2ieS)u1kY+s4ao308h43(oIuk<1gm9`}F?o-Zc*?#0PD`-B(YRUZHpe@6Aj&TTn*&V6ioz^OQP;v^J{QVVNvH?2=e@pc=;>nkN zCXt0jny1>UI_id7By8ogawKH;JOVzv!T;tbNOC>b2;V>rLn_$LtL=v6kXLuiNpHOz zww>V?2j<6%jDku0;_n?GwhbvzGBck@8tib+YhueBvZyR?)hS zZ9O3IbZ03}bVLiM<95wk&5U*NfruNO($@}EG~^aqMTh=X#e&~cyC25WfyAOXkgul? z3IxYpB3yv!V4evRon!tqSD*mF@k4%4KRa@x9#f1usR#3WaCxWjdqvta#iYM*Ip${N zRDUE}_}JZRnK20J6XcO#TeJ+Ee2hr+Q?Px>4q;CIxPo6(w6ZpN}3PA^_ z?{YSXW@rLc@Gyz8&NWIS;;e1z$mwsLytCc!jalNCBUzvYk9q?0M;B&VSJ;seZv0el zE>urA4o{n<@~o^#Dn~zZcw!Gw>v{Degd<7b8hW+b&DCk51wji=GDeYheYHF-&#(2Q?)bh< zpz+SM^BT~cl|T8Ei7355zW2+ViVv$F+4$cGQo1DAsO>2s)R`u)mow zysIG)^(x9Iz)K+`$AH})rZ6$8`A{>mOs`(8by~5NbD(5F4#O3k=t@eb4u`#e6({mh zU3T#)U({kCNm>J5rzQRV`nUIEJ4qlnw6am$I;a9cp zcdp;{6IC|tXnrk;nDiPFwlNIjsUkXTAV3h8rFblR;UQY!mL#2-~ChB!Q?+-vc1Jmpwg zROTVRsG?Xi2zFJAHjXUQeXh=UB*!~>K`ns6*0s?G{}pt~$m|K{H_GyC%FDDrQXFtZ_#5!sC|S+XdQ>NGJMdyFf*)lVR##1@#z#wH1LeyC zNn|3>Oe;k*?am?7aD+u_*mV3yl#2H_t+`Du|CMR~_uBVCZVUc$?HhqWv-b~AN}f9-Fte|1&F@4j(y=i*Jv2ij}iN?YbPsNa}A&>>*9brxZMA`KRsR!*n} zy!9MoGXy^#uUeit_}x7zkX*2*U#;llJ<0tOk1V-&yg1opR%9EnQms|AH8-@@g;msV zG4=E63;FTnuPd0ovv7E7(%AJlM=<4tUr0pKwSf>=$^@edHNIjM22a^k1qfXiR~B=hShK{bN!57s@KQzgkrR z|NoR#ez~ghXaDJEamMi%fPbt9=LlZ`m!caJs$G%_n{MG>zY$=h}6RAS&dt`)b$JLl5G-lQMfTLNI4^dDtm*=O)u>LAXtK^~pOfEt+aq z=m~L=`4sg9dOfE+q981%Unbg}Q}Wq$_aqnt@Sa%vh4y1f( zG6!uD8v}b&%>?(-W_pERa!{6DND;n5`(#TLY9VQNH#g0_tfj4~$=+@BIawfHOZENk z&``D~8d)l}KGt!c+*9A3hid1ETu-=IEJ2T}LVUYh?CP6ur!A5>cqPv{aOaEL6t# zs_47a={Kx<^qd22+l!x)Om}%kuCFPkdGRuE@UoA)oVvz9eM|UJ!mIf;%H&HOuB##q zNf?t%6dD}6W;PTV){o!lHi~nFlqaEhRuW}u#0O6$!5>_~BgU@}8YSCr$nFEz^+)S4 zcaRf^NzCw7jBexR?Os+8gJ9Mh%09Yk}b6^f_CGY`^a2NQoyy0fXP!^0T3)3|nm zXKrY>fM5QU>H^sb`f(9anhc)Jl_CGH=(0)_8ed+A_&RZqDtf#3%)-ZU&9}JHREEd# zg>ESU0jp!#(EqM?>##R~4PZHd4UCNTXO<-6;o;?uZrxIm|Ixj5n3N$7y0`vF%HVJ~ z0E!(U?DshOYZ(1Iz(BO@`;GRk-|pT5f3JP(x4XB%-)rCc?d~n`_u999yL$`#PwiVs zh&Y;H!hdh!`lHJ6k8nCpFNLMdvW@soB>arlt-L(uv~{N!#D7izAz%pLca-6vQzIw9 z;TK(hxud7hd6CGkMDS>-|CsareH}-1 zHS~LSiZ)jPY-~()qWY&=!S~KJ2n53w)gWi2yC7Qjm{LV1^i zO$un9uzU%nvD0B$$qw^zs;42}AXB}{f2KzC64ScC&#>W$2~*&L2_rBMjked%!fn%)dIM9DSx$rpCI?$n&8>x zEm&JxyT zhu+z{+-Fow^-TS~M-8OlzJ8$z=iA!ba3std-B#;<;FSA(cf-JAZL|N;hx#2i4Bfo@ z52oBtj~v9pKn3*yG(X)&v=dJBCr1%&g&+0$;$gvwe-y`mnt%Y$v74cUa5{hnKsUr- zl3TCxAex)oiIK)H3nSW}Yfz{5NdL_@j~fN;@!BPo$Z3#t43ps>bDxz~N73ImI75u$ zeY}nAO9DiWFV7F>1lR#IZ!nqqH)owzJXyN>UPRj~ZuXp)anu8u;B z-0mCPVx~P$bhp)6IKV8T+O*~AwBZP8ENE{&X&aA|}TMMiu z7Mge`8M>Jk(4nKWc9!g1=0@i0ZebFE(Hjnh4e7}nQPy*QZ#hII790>QxOnC|@6XG9 zdJ~w%#x8ygk@s`+O?jE6q7VWt0)_F9IEb*BwyALlq-ilHWOD&YWg8lXSMJ%Ct z6`ZYfyk|N7-`+-haPFI1#2CegO;t>vRwNW9GX0wAPLyf{$6#bvpw=s0_6}`$eDR6k K88tEXt^WZ(-_2P7 delta 23 fcmbQRin-wo(}WvLQmGSf`A=41R^J@JxM2wZbTtWR diff --git a/news/5252-sign-stubs b/news/5252-sign-stubs new file mode 100644 index 0000000000..3f8bec0b49 --- /dev/null +++ b/news/5252-sign-stubs @@ -0,0 +1,19 @@ +### Enhancements + +* For Windows users, the stub executables used for Python entrypoints in packages are now codesigned. (#5252) + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* diff --git a/tests/test_codesigned.py b/tests/test_codesigned.py new file mode 100644 index 0000000000..3ed13086da --- /dev/null +++ b/tests/test_codesigned.py @@ -0,0 +1,97 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import os +from functools import lru_cache +from pathlib import Path +from shutil import which +from subprocess import CalledProcessError, check_output, run + +import pytest + +from conda_build.utils import on_win + +HERE = os.path.abspath(os.path.dirname(__file__)) +REPO_ROOT = (Path(HERE) / "..").resolve().absolute() +STUB_FOLDER = REPO_ROOT / "conda_build" + + +@lru_cache(maxsize=None) +def find_signtool() -> str | None: + """Tries to find signtool + + Prefers signtool on PATH otherwise searches system. + Ref: + - https://learn.microsoft.com/en-us/dotnet/framework/tools/signtool-exe + - https://learn.microsoft.com/en-us/windows/win32/seccrypto/signtool + - https://learn.microsoft.com/en-us/windows/win32/seccrypto/using-signtool-to-verify-a-file-signature + """ + signtool_path = which("signtool") + if signtool_path: + return signtool_path + + # Common installation directories where signtool might be located + common_paths = [ + "C:\\Program Files (x86)\\Windows Kits\\10\\bin", + "C:\\Program Files\\Windows Kits\\10\\bin", + "C:\\Windows\\System32", + ] + + signtool_path = None + # Search for signtool in common paths + for path in common_paths: + if signtool_path: + # We found one already + return signtool_path + if not os.path.exists(path): + continue + signtool_path = os.path.join(path, "signtool.exe") + if os.path.exists(signtool_path): + return signtool_path + elif "Windows Kits" in path: + signtool_path = None + max_version = 0 + for dirname in os.listdir(path): + # Use most recent signtool version + if not dirname.endswith(".0"): + continue # next dirname + if int(dirname.replace(".", "")) < max_version: + continue # next dirname + + maybe_signtool_path = os.path.join(path, dirname, "x64", "signtool.exe") + if os.path.exists(maybe_signtool_path): + signtool_path = maybe_signtool_path + return signtool_path + + +@lru_cache(maxsize=None) +def signtool_unsupported_because() -> str: + reason = "" + if not on_win: + reason = "Only verifying signatures of stub exe's on windows" + return reason + signtool = find_signtool() + if not signtool: + reason = "signtool: unable to locate signtool.exe" + try: + check_output([signtool, "verify", "/?"]) + except CalledProcessError as exc: + reason = f"signtool: something went wrong while running 'signtool verify /?', output:\n{exc.output}\n" + return reason + + +def signtool_unsupported() -> bool: + return bool(signtool_unsupported_because()) + + +@pytest.mark.skipif(signtool_unsupported(), reason=signtool_unsupported_because()) +@pytest.mark.parametrize( + "stub_file_name", ["cli-32.exe", "cli-64.exe", "gui-32.exe", "gui-64.exe"] +) +def test_stub_exe_signatures(stub_file_name: str) -> None: + """Verify that signtool verifies the signature of the stub exes""" + stub_file = STUB_FOLDER / stub_file_name + signtool_exe = find_signtool() + completed_process = run([signtool_exe, "verify", "/pa", "/v", stub_file]) + assert completed_process.returncode == 0 From e235104aca7c6bc0039a10f5ba2f2889c214c24d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Apr 2024 11:01:38 +0000 Subject: [PATCH 363/366] Bump CodSpeedHQ/action from 2.3.1 to 2.4.1 in /.github/workflows (#5321) Bumps [CodSpeedHQ/action](https://github.com/codspeedhq/action) from 2.3.1 to 2.4.1. - [Release notes](https://github.com/codspeedhq/action/releases) - [Changelog](https://github.com/CodSpeedHQ/action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codspeedhq/action/compare/aa9939428da62f095e83438509c41499e7a2d751...0b631f8998f2389eb5144632b6f9f8fabd33a86e) --- updated-dependencies: - dependency-name: CodSpeedHQ/action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 04a47e76b9..2819648dea 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -229,7 +229,7 @@ jobs: run: conda list --show-channel-urls - name: Run Benchmarks - uses: CodSpeedHQ/action@aa9939428da62f095e83438509c41499e7a2d751 + uses: CodSpeedHQ/action@0b631f8998f2389eb5144632b6f9f8fabd33a86e with: token: ${{ secrets.CODSPEED_TOKEN }} run: $CONDA/envs/test/bin/pytest --codspeed From 2fb469ab10344968feaac21f1973f791062c0442 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 30 Apr 2024 10:58:17 -0500 Subject: [PATCH 364/366] [pre-commit.ci] pre-commit autoupdate (#5320) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/astral-sh/ruff-pre-commit: v0.4.1 → v0.4.2](https://github.com/astral-sh/ruff-pre-commit/compare/v0.4.1...v0.4.2) * Format --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Ken Odegard Co-authored-by: Bianca Henderson --- .pre-commit-config.yaml | 2 +- conda_build/_link.py | 14 ++++---- conda_build/api.py | 2 +- conda_build/build.py | 27 ++++++++-------- conda_build/cli/main_render.py | 2 +- conda_build/create_test.py | 24 +++++++------- conda_build/develop.py | 5 ++- conda_build/environ.py | 13 ++++---- conda_build/exceptions.py | 6 ++-- conda_build/inspect_pkg.py | 4 +-- conda_build/license_family.py | 2 +- conda_build/metadata.py | 28 ++++++++-------- conda_build/noarch_python.py | 8 ++--- conda_build/os_utils/ldd.py | 2 +- conda_build/os_utils/liefldd.py | 10 +++--- conda_build/os_utils/macho.py | 6 ++-- conda_build/post.py | 19 ++++++----- conda_build/render.py | 2 +- conda_build/skeletons/cpan.py | 18 ++++------- conda_build/skeletons/cran.py | 37 +++++++++++---------- conda_build/skeletons/luarocks.py | 10 +++--- conda_build/skeletons/pypi.py | 54 +++++++++++++++---------------- conda_build/source.py | 22 ++++++------- conda_build/tarcheck.py | 6 ++-- conda_build/utils.py | 10 +++--- conda_build/variants.py | 6 ++-- conda_build/windows.py | 9 ++---- docs/scrape_help.py | 28 ++++++++-------- tests/test_api_build_conda_v2.py | 2 +- tests/utils.py | 21 ++++++------ 30 files changed, 192 insertions(+), 207 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b29ba4260c..fb922265c4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -58,7 +58,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.1 + rev: v0.4.2 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff diff --git a/conda_build/_link.py b/conda_build/_link.py index af841c0275..e8984fcd37 100644 --- a/conda_build/_link.py +++ b/conda_build/_link.py @@ -26,7 +26,7 @@ SITE_PACKAGES = "Lib/site-packages" else: BIN_DIR = join(PREFIX, "bin") - SITE_PACKAGES = "lib/python%s/site-packages" % sys.version[:3] + SITE_PACKAGES = f"lib/python{sys.version[:3]}/site-packages" # the list of these files is going to be store in info/_files FILES = [] @@ -110,20 +110,20 @@ def create_script(fn): dst = join(BIN_DIR, fn) if sys.platform == "win32": shutil.copy2(src, dst + "-script.py") - FILES.append("Scripts/%s-script.py" % fn) + FILES.append(f"Scripts/{fn}-script.py") shutil.copy2( join(THIS_DIR, "cli-%d.exe" % (8 * tuple.__itemsize__)), dst + ".exe" ) - FILES.append("Scripts/%s.exe" % fn) + FILES.append(f"Scripts/{fn}.exe") else: with open(src) as fi: data = fi.read() with open(dst, "w") as fo: - shebang = replace_long_shebang("#!%s\n" % normpath(sys.executable)) + shebang = replace_long_shebang(f"#!{normpath(sys.executable)}\n") fo.write(shebang) fo.write(data) os.chmod(dst, 0o775) - FILES.append("bin/%s" % fn) + FILES.append(f"bin/{fn}") def create_scripts(files): @@ -140,9 +140,9 @@ def main(): link_files("site-packages", SITE_PACKAGES, DATA["site-packages"]) link_files("Examples", "Examples", DATA["Examples"]) - with open(join(PREFIX, "conda-meta", "%s.files" % DATA["dist"]), "w") as fo: + with open(join(PREFIX, "conda-meta", "{}.files".format(DATA["dist"])), "w") as fo: for f in FILES: - fo.write("%s\n" % f) + fo.write(f"{f}\n") if __name__ == "__main__": diff --git a/conda_build/api.py b/conda_build/api.py index 571f08f534..cc866a865d 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -418,7 +418,7 @@ def convert( "Conversion from wheel packages is not implemented yet, stay tuned." ) else: - raise RuntimeError("cannot convert: %s" % package_file) + raise RuntimeError(f"cannot convert: {package_file}") def test_installable(channel: str = "defaults") -> bool: diff --git a/conda_build/build.py b/conda_build/build.py index f1bf8eec02..6dd2b49256 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -774,7 +774,7 @@ def copy_readme(m): if readme: src = join(m.config.work_dir, readme) if not isfile(src): - sys.exit("Error: no readme file: %s" % readme) + sys.exit(f"Error: no readme file: {readme}") dst = join(m.config.info_dir, readme) utils.copy_into(src, dst, m.config.timeout, locking=m.config.locking) if os.path.split(readme)[1] not in {"README.md", "README.rst", "README"}: @@ -1187,7 +1187,7 @@ def record_prefix_files(m, files_with_prefix): if fn in text_has_prefix_files: text_has_prefix_files.remove(fn) else: - ignored_because = " (not in build/%s_has_prefix_files)" % (mode) + ignored_because = f" (not in build/{mode}_has_prefix_files)" print( "{fn} ({mode}): {action}{reason}".format( @@ -1204,10 +1204,10 @@ def record_prefix_files(m, files_with_prefix): # make sure we found all of the files expected errstr = "" for f in text_has_prefix_files: - errstr += "Did not detect hard-coded path in %s from has_prefix_files\n" % f + errstr += f"Did not detect hard-coded path in {f} from has_prefix_files\n" for f in binary_has_prefix_files: errstr += ( - "Did not detect hard-coded path in %s from binary_has_prefix_files\n" % f + f"Did not detect hard-coded path in {f} from binary_has_prefix_files\n" ) if errstr: raise RuntimeError(errstr) @@ -1276,7 +1276,7 @@ def write_about_json(m): with open(join(m.config.info_dir, "about.json"), "w") as fo: d = {} for key, default in FIELDS["about"].items(): - value = m.get_value("about/%s" % key) + value = m.get_value(f"about/{key}") if value: d[key] = value if default is list: @@ -1332,7 +1332,7 @@ def write_info_json(m: MetaData): "# $ conda create --name --file " ) for dist in sorted(runtime_deps + [" ".join(m.dist().rsplit("-", 2))]): - fo.write("%s\n" % "=".join(dist.split())) + fo.write("{}\n".format("=".join(dist.split()))) mode_dict = {"mode": "w", "encoding": "utf-8"} with open(join(m.config.info_dir, "index.json"), **mode_dict) as fo: @@ -1355,10 +1355,10 @@ def get_entry_point_script_names(entry_point_scripts): for entry_point in entry_point_scripts: cmd = entry_point[: entry_point.find("=")].strip() if utils.on_win: - scripts.append("Scripts\\%s-script.py" % cmd) - scripts.append("Scripts\\%s.exe" % cmd) + scripts.append(f"Scripts\\{cmd}-script.py") + scripts.append(f"Scripts\\{cmd}.exe") else: - scripts.append("bin/%s" % cmd) + scripts.append(f"bin/{cmd}") return scripts @@ -1520,7 +1520,7 @@ def _recurse_symlink_to_size(path, seen=None): return _recurse_symlink_to_size(dest, seen=seen) elif not isfile(dest): # this is a symlink that points to nowhere, so is zero bytes - warnings.warn("file %s is a symlink with no target" % path, UserWarning) + warnings.warn(f"file {path} is a symlink with no target", UserWarning) return 0 return 0 @@ -1764,8 +1764,7 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): var = var.split("=", 1)[0] elif var not in os.environ: warnings.warn( - "The environment variable '%s' specified in script_env is undefined." - % var, + f"The environment variable '{var}' specified in script_env is undefined.", UserWarning, ) val = "" @@ -3295,9 +3294,9 @@ def test( os.path.dirname(prefix), "_".join( ( - "%s_prefix_moved" % name, + f"{name}_prefix_moved", metadata.dist(), - getattr(metadata.config, "%s_subdir" % name), + getattr(metadata.config, f"{name}_subdir"), ) ), ) diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index a5cbb8b443..6e6f2bfa41 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -66,7 +66,7 @@ def get_render_parser() -> ArgumentParser: "--version", action="version", help="Show the conda-build version number and exit.", - version="conda-build %s" % __version__, + version=f"conda-build {__version__}", ) p.add_argument( "-n", diff --git a/conda_build/create_test.py b/conda_build/create_test.py index 1a8a0f1c34..441fe4a17c 100644 --- a/conda_build/create_test.py +++ b/conda_build/create_test.py @@ -122,7 +122,7 @@ def _create_test_files( fo.write( f"{comment_char} tests for {m.dist()} (this is a generated file);\n" ) - fo.write("print('===== testing package: %s =====');\n" % m.dist()) + fo.write(f"print('===== testing package: {m.dist()} =====');\n") try: with open(test_file) as fi: @@ -134,7 +134,7 @@ def _create_test_files( fo.write( "# tests were not packaged with this module, and cannot be run\n" ) - fo.write("\nprint('===== %s OK =====');\n" % m.dist()) + fo.write(f"\nprint('===== {m.dist()} OK =====');\n") return ( out_file, bool(name) and isfile(out_file) and basename(test_file) != "no-file", @@ -175,8 +175,8 @@ def create_py_files(m: MetaData, test_dir: os.PathLike) -> bool: if imports: with open(tf, "a") as fo: for name in imports: - fo.write('print("import: %r")\n' % name) - fo.write("import %s\n" % name) + fo.write(f'print("import: {name!r}")\n') + fo.write(f"import {name}\n") fo.write("\n") return tf if (tf_exists or imports) else False @@ -202,8 +202,8 @@ def create_r_files(m: MetaData, test_dir: os.PathLike) -> bool: if imports: with open(tf, "a") as fo: for name in imports: - fo.write('print("library(%r)")\n' % name) - fo.write("library(%s)\n" % name) + fo.write(f'print("library({name!r})")\n') + fo.write(f"library({name})\n") fo.write("\n") return tf if (tf_exists or imports) else False @@ -225,11 +225,13 @@ def create_pl_files(m: MetaData, test_dir: os.PathLike) -> bool: break if tf_exists or imports: with open(tf, "a") as fo: - print(r'my $expected_version = "%s";' % m.version().rstrip("0"), file=fo) + print( + r'my $expected_version = "{}";'.format(m.version().rstrip("0")), file=fo + ) if imports: for name in imports: - print(r'print("import: %s\n");' % name, file=fo) - print("use %s;\n" % name, file=fo) + print(rf'print("import: {name}\n");', file=fo) + print(f"use {name};\n", file=fo) # Don't try to print version for complex imports if " " not in name: print( @@ -264,8 +266,8 @@ def create_lua_files(m: MetaData, test_dir: os.PathLike) -> bool: if imports: with open(tf, "a+") as fo: for name in imports: - print(r'print("require \"%s\"\n");' % name, file=fo) - print('require "%s"\n' % name, file=fo) + print(rf'print("require \"{name}\"\n");', file=fo) + print(f'require "{name}"\n', file=fo) return tf if (tf_exists or imports) else False diff --git a/conda_build/develop.py b/conda_build/develop.py index 59b31a3231..d0e3d59fd6 100644 --- a/conda_build/develop.py +++ b/conda_build/develop.py @@ -137,12 +137,11 @@ def execute( ) -> None: if not isdir(prefix): sys.exit( - """\ -Error: environment does not exist: %s + f"""\ +Error: environment does not exist: {prefix} # # Use 'conda create' to create the environment first. #""" - % prefix ) assert find_executable("python", prefix=prefix) diff --git a/conda_build/environ.py b/conda_build/environ.py index 5aae94e682..7a3a7ca8cb 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -536,8 +536,7 @@ def meta_vars(meta: MetaData, skip_build_id=False): value = os.getenv(var_name) if value is None: warnings.warn( - "The environment variable '%s' specified in script_env is undefined." - % var_name, + f"The environment variable '{var_name}' specified in script_env is undefined.", UserWarning, ) else: @@ -855,7 +854,7 @@ def get_install_actions( capture = utils.capture for feature, value in feature_list: if value: - specs.append("%s@" % feature) + specs.append(f"{feature}@") bldpkgs_dirs = ensure_list(bldpkgs_dirs) @@ -961,7 +960,7 @@ def get_install_actions( # specs are the raw specifications, not the conda-derived actual specs # We're testing that pip etc. are manually specified if not any( - re.match(r"^%s(?:$|[\s=].*)" % pkg, str(dep)) for dep in specs + re.match(rf"^{pkg}(?:$|[\s=].*)", str(dep)) for dep in specs ): precs = [prec for prec in precs if prec.name != pkg] cached_precs[(specs, env, subdir, channel_urls, disable_pip)] = precs.copy() @@ -1341,7 +1340,7 @@ def _display_actions(prefix, precs): builder = ["", "## Package Plan ##\n"] if prefix: - builder.append(" environment location: %s" % prefix) + builder.append(f" environment location: {prefix}") builder.append("") print("\n".join(builder)) @@ -1385,9 +1384,9 @@ def channel_filt(s): # string with new-style string formatting. fmt[pkg] = f"{{pkg:<{maxpkg}}} {{vers:<{maxver}}}" if maxchannels: - fmt[pkg] += " {channel:<%s}" % maxchannels + fmt[pkg] += f" {{channel:<{maxchannels}}}" if features[pkg]: - fmt[pkg] += " [{features:<%s}]" % maxfeatures + fmt[pkg] += f" [{{features:<{maxfeatures}}}]" lead = " " * 4 diff --git a/conda_build/exceptions.py b/conda_build/exceptions.py index f38706786a..9744ca14b4 100644 --- a/conda_build/exceptions.py +++ b/conda_build/exceptions.py @@ -110,19 +110,19 @@ class BuildLockError(CondaBuildException): class OverLinkingError(RuntimeError): def __init__(self, error, *args): self.error = error - self.msg = "overlinking check failed \n%s" % (error) + self.msg = f"overlinking check failed \n{error}" super().__init__(self.msg) class OverDependingError(RuntimeError): def __init__(self, error, *args): self.error = error - self.msg = "overdepending check failed \n%s" % (error) + self.msg = f"overdepending check failed \n{error}" super().__init__(self.msg) class RunPathError(RuntimeError): def __init__(self, error, *args): self.error = error - self.msg = "runpaths check failed \n%s" % (error) + self.msg = f"runpaths check failed \n{error}" super().__init__(self.msg) diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 19c0db7ca3..43fc401551 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -132,7 +132,7 @@ def print_linkages( else sort_order.get(key[0], (4, key[0])) ), ): - output_string += "%s:\n" % prec + output_string += f"{prec}:\n" if show_files: for lib, path, binary in sorted(links): output_string += f" {lib} ({path}) from {binary}\n" @@ -296,7 +296,7 @@ def inspect_linkages( output_string += print_linkages(inverted_map[dep], show_files=show_files) else: - raise ValueError("Unrecognized groupby: %s" % groupby) + raise ValueError(f"Unrecognized groupby: {groupby}") if hasattr(output_string, "decode"): output_string = output_string.decode("utf-8") return output_string diff --git a/conda_build/license_family.py b/conda_build/license_family.py index 976cc1b33a..ab101274ae 100644 --- a/conda_build/license_family.py +++ b/conda_build/license_family.py @@ -29,7 +29,7 @@ gpl3_regex = re.compile("GPL[^2]*3") # match GPL3 gpl23_regex = re.compile("GPL[^2]*>= *2") # match GPL >= 2 cc_regex = re.compile(r"CC\w+") # match CC -punk_regex = re.compile("[%s]" % re.escape(string.punctuation)) # removes punks +punk_regex = re.compile(f"[{re.escape(string.punctuation)}]") # removes punks def match_gpl3(family): diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 6fd065e0b2..2552682840 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -397,7 +397,7 @@ def ensure_valid_noarch_value(meta): build_noarch = meta.get("build", {}).get("noarch") if build_noarch and build_noarch not in NOARCH_TYPES: raise exceptions.CondaBuildException( - "Invalid value for noarch: %s" % build_noarch + f"Invalid value for noarch: {build_noarch}" ) @@ -828,7 +828,7 @@ def _get_env_path(env_name_or_path): break bootstrap_metadir = os.path.join(env_name_or_path, "conda-meta") if not os.path.isdir(bootstrap_metadir): - print("Bootstrap environment '%s' not found" % env_name_or_path) + print(f"Bootstrap environment '{env_name_or_path}' not found") sys.exit(1) return env_name_or_path @@ -1478,7 +1478,7 @@ def check_field(key, section): if section == "extra": continue if section not in FIELDS: - raise ValueError("unknown section: %s" % section) + raise ValueError(f"unknown section: {section}") for key_or_dict in submeta: if section in OPTIONALLY_ITERABLE_FIELDS and isinstance( key_or_dict, dict @@ -1492,17 +1492,17 @@ def check_field(key, section): def name(self) -> str: name = self.get_value("package/name", "") if not name and self.final: - sys.exit("Error: package/name missing in: %r" % self.meta_path) + sys.exit(f"Error: package/name missing in: {self.meta_path!r}") name = str(name) if name != name.lower(): - sys.exit("Error: package/name must be lowercase, got: %r" % name) + sys.exit(f"Error: package/name must be lowercase, got: {name!r}") check_bad_chrs(name, "package/name") return name def version(self) -> str: version = self.get_value("package/version", "") if not version and not self.get_section("outputs") and self.final: - sys.exit("Error: package/version missing in: %r" % self.meta_path) + sys.exit(f"Error: package/version missing in: {self.meta_path!r}") version = str(version) check_bad_chrs(version, "package/version") if self.final and version.startswith("."): @@ -1571,7 +1571,7 @@ def ms_depends(self, typ="run"): try: ms = MatchSpec(spec) except AssertionError: - raise RuntimeError("Invalid package specification: %r" % spec) + raise RuntimeError(f"Invalid package specification: {spec!r}") except (AttributeError, ValueError) as e: raise RuntimeError( "Received dictionary as spec. Note that pip requirements are " @@ -1580,7 +1580,7 @@ def ms_depends(self, typ="run"): if ms.name == self.name() and not ( typ == "build" and self.config.host_subdir != self.config.build_subdir ): - raise RuntimeError("%s cannot depend on itself" % self.name()) + raise RuntimeError(f"{self.name()} cannot depend on itself") for name, ver in name_ver_list: if ms.name == name: if self.noarch: @@ -1708,7 +1708,7 @@ def build_id(self): out = build_string_from_metadata(self) if self.config.filename_hashing and self.final: hash_ = self.hash_dependencies() - if not re.findall("h[0-9a-f]{%s}" % self.config.hash_length, out): + if not re.findall(f"h[0-9a-f]{{{self.config.hash_length}}}", out): ret = out.rsplit("_", 1) try: int(ret[0]) @@ -1718,14 +1718,14 @@ def build_id(self): if len(ret) > 1: out = "_".join([out] + ret[1:]) else: - out = re.sub("h[0-9a-f]{%s}" % self.config.hash_length, hash_, out) + out = re.sub(f"h[0-9a-f]{{{self.config.hash_length}}}", hash_, out) return out def dist(self): return f"{self.name()}-{self.version()}-{self.build_id()}" def pkg_fn(self): - return "%s.tar.bz2" % self.dist() + return f"{self.dist()}.tar.bz2" def is_app(self): return bool(self.get_value("app/entry")) @@ -1733,8 +1733,8 @@ def is_app(self): def app_meta(self): d = {"type": "app"} if self.get_value("app/icon"): - d["icon"] = "%s.png" % compute_sum( - join(self.path, self.get_value("app/icon")), "md5" + d["icon"] = "{}.png".format( + compute_sum(join(self.path, self.get_value("app/icon")), "md5") ) for field, key in [ @@ -2319,7 +2319,7 @@ def variant_in_source(self): # constrain the stored variants to only this version in the output # variant mapping if re.search( - r"\s*\{\{\s*%s\s*(?:.*?)?\}\}" % key, self.extract_source_text() + rf"\s*\{{\{{\s*{key}\s*(?:.*?)?\}}\}}", self.extract_source_text() ): return True return False diff --git a/conda_build/noarch_python.py b/conda_build/noarch_python.py index fb81565b3d..1e80fcd2e4 100644 --- a/conda_build/noarch_python.py +++ b/conda_build/noarch_python.py @@ -26,7 +26,7 @@ def rewrite_script(fn, prefix): try: data = fi.read() except UnicodeDecodeError: # file is binary - sys.exit("[noarch_python] Noarch package contains binary script: %s" % fn) + sys.exit(f"[noarch_python] Noarch package contains binary script: {fn}") src_mode = os.stat(src).st_mode os.unlink(src) @@ -83,7 +83,7 @@ def handle_file(f, d, prefix): else: # this should be the built-in logging module, not conda-build's stuff, because this file is standalone. log = logging.getLogger(__name__) - log.debug("Don't know how to handle file: %s. Including it as-is." % f) + log.debug(f"Don't know how to handle file: {f}. Including it as-is.") def populate_files(m, files, prefix, entry_point_scripts=None): @@ -119,7 +119,7 @@ def transform(m, files, prefix): # Create *nix prelink script # Note: it's important to use LF newlines or it wont work if we build on Win - with open(join(bin_dir, ".%s-pre-link.sh" % name), "wb") as fo: + with open(join(bin_dir, f".{name}-pre-link.sh"), "wb") as fo: fo.write( b"""\ #!/bin/bash @@ -128,7 +128,7 @@ def transform(m, files, prefix): ) # Create windows prelink script (be nice and use Windows newlines) - with open(join(scripts_dir, ".%s-pre-link.bat" % name), "wb") as fo: + with open(join(scripts_dir, f".{name}-pre-link.bat"), "wb") as fo: fo.write( """\ @echo off diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index b2de763074..84e80b8e90 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -44,7 +44,7 @@ def ldd(path): continue if "ld-linux" in line: continue - raise RuntimeError("Unexpected output from ldd: %s" % line) + raise RuntimeError(f"Unexpected output from ldd: {line}") return res diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index d02cd2bd30..d6ee2841d6 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -1125,9 +1125,9 @@ def get_symbols(file, defined=True, undefined=True, notexported=False, arch="nat ) if binary.__class__ != lief.MachO.Binary: if isinstance(s, str): - s_name = "%s" % s + s_name = f"{s}" else: - s_name = "%s" % s.name + s_name = f"{s.name}" if s.exported and s.imported: print(f"Weird, symbol {s.name} is both imported and exported") if s.exported: @@ -1136,16 +1136,16 @@ def get_symbols(file, defined=True, undefined=True, notexported=False, arch="nat elif s.imported: is_undefined = False else: - s_name = "%s" % s.name + s_name = f"{s.name}" is_notexported = False if s.type & 1 else True # print("{:32s} : s.type 0b{:020b}, s.value 0b{:020b}".format(s.name, s.type, s.value)) # print("s.value 0b{:020b} :: s.type 0b{:020b}, {:32s}".format(s.value, s.type, s.name)) if notexported is True or is_notexported is False: if is_undefined and undefined: - res.append("%s" % s_name) + res.append(f"{s_name}") elif not is_undefined and defined: - res.append("%s" % s_name) + res.append(f"{s_name}") return res diff --git a/conda_build/os_utils/macho.py b/conda_build/os_utils/macho.py index 516df7a0a6..17fc5d5a13 100644 --- a/conda_build/os_utils/macho.py +++ b/conda_build/os_utils/macho.py @@ -286,7 +286,7 @@ def add_rpath(path, rpath, build_prefix=None, verbose=False): args = ["-add_rpath", rpath, path] code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s\n" % path) + print(f"Skipping Mach-O dynamic shared library stub file {path}\n") return elif "would duplicate path, file already has LC_RPATH for:" in stderr: print("Skipping -add_rpath, file already has LC_RPATH set") @@ -304,7 +304,7 @@ def delete_rpath(path, rpath, build_prefix=None, verbose=False): args = ["-delete_rpath", rpath, path] code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s\n" % path) + print(f"Skipping Mach-O dynamic shared library stub file {path}\n") return elif "no LC_RPATH load command with path:" in stderr: print("Skipping -delete_rpath, file doesn't contain that LC_RPATH") @@ -341,7 +341,7 @@ def install_name_change(path, build_prefix, cb_func, dylibs, verbose=False): args.extend(("-change", dylibs[index]["name"], new_name, path)) code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s" % path) + print(f"Skipping Mach-O dynamic shared library stub file {path}") ret = False continue else: diff --git a/conda_build/post.py b/conda_build/post.py index 30a4057a30..67c6a355a7 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -150,11 +150,11 @@ def write_pth(egg_path, config): with open( join( utils.get_site_packages(config.host_prefix, py_ver), - "%s.pth" % (fn.split("-")[0]), + "{}.pth".format(fn.split("-")[0]), ), "w", ) as fo: - fo.write("./%s\n" % fn) + fo.write(f"./{fn}\n") def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False): @@ -368,7 +368,7 @@ def find_lib(link, prefix, files, path=None): if link.startswith(prefix): link = normpath(link[len(prefix) + 1 :]) if not any(link == normpath(w) for w in files): - sys.exit("Error: Could not find %s" % link) + sys.exit(f"Error: Could not find {link}") return link if link.startswith("/"): # but doesn't start with the build prefix return @@ -382,7 +382,7 @@ def find_lib(link, prefix, files, path=None): for f in files: file_names[basename(f)].append(f) if link not in file_names: - sys.exit("Error: Could not find %s" % link) + sys.exit(f"Error: Could not find {link}") if len(file_names[link]) > 1: if path and basename(path) == link: # The link is for the file itself, just use it @@ -403,7 +403,7 @@ def find_lib(link, prefix, files, path=None): "Choosing the first one." ) return file_names[link][0] - print("Don't know how to find %s, skipping" % link) + print(f"Don't know how to find {link}, skipping") def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): @@ -417,8 +417,7 @@ def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): ) if not codefile_class(link, skip_symlinks=True): sys.exit( - "Error: Compiler runtime library in build prefix not found in host prefix %s" - % link + f"Error: Compiler runtime library in build prefix not found in host prefix {link}" ) else: print(f".. fixing linking of {link} in {path} instead") @@ -429,7 +428,7 @@ def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): return print(f"Fixing linking of {link} in {path}") - print("New link location is %s" % (link_loc)) + print(f"New link location is {link_loc}") lib_to_link = relpath(dirname(link_loc), "lib") # path_to_lib = utils.relative(path[len(prefix) + 1:]) @@ -647,7 +646,7 @@ def assert_relative_osx(path, host_prefix, build_prefix): for prefix in (host_prefix, build_prefix): if prefix and name.startswith(prefix): raise RuntimeError( - "library at %s appears to have an absolute path embedded" % path + f"library at {path} appears to have an absolute path embedded" ) @@ -1770,7 +1769,7 @@ def check_symlinks(files, prefix, croot): if msgs: for msg in msgs: - print("Error: %s" % msg, file=sys.stderr) + print(f"Error: {msg}", file=sys.stderr) sys.exit(1) diff --git a/conda_build/render.py b/conda_build/render.py index b021f8a5b6..cc3bcd87c0 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -115,7 +115,7 @@ def _categorize_deps(m, specs, exclude_pattern, variant): # for sake of comparison, ignore dashes and underscores if dash_or_under.sub("", key) == dash_or_under.sub( "", spec_name - ) and not re.search(r"%s\s+[0-9a-zA-Z\_\.\<\>\=\*]" % spec_name, spec): + ) and not re.search(rf"{spec_name}\s+[0-9a-zA-Z\_\.\<\>\=\*]", spec): dependencies.append(" ".join((spec_name, value))) elif exclude_pattern.match(spec): pass_through_deps.append(spec) diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index c9bd5c398c..31213054d1 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -511,9 +511,7 @@ def skeletonize( # packages, unless we're newer than what's in core if metacpan_api_is_core_version(meta_cpan_url, package): if not write_core: - print( - "We found core module %s. Skipping recipe creation." % packagename - ) + print(f"We found core module {packagename}. Skipping recipe creation.") continue d["useurl"] = "#" @@ -577,12 +575,11 @@ def skeletonize( version = None if exists(dir_path) and not force: print( - "Directory %s already exists and you have not specified --force " - % dir_path + f"Directory {dir_path} already exists and you have not specified --force " ) continue elif exists(dir_path) and force: - print("Directory %s already exists, but forcing recipe creation" % dir_path) + print(f"Directory {dir_path} already exists, but forcing recipe creation") try: d["homeurl"] = release_data["resources"]["homepage"] @@ -756,7 +753,7 @@ def deps_for_package( } packages_to_append = set() - print("Processing dependencies for %s..." % package, end="") + print(f"Processing dependencies for {package}...", end="") sys.stdout.flush() if not release_data.get("dependency"): @@ -1052,11 +1049,8 @@ def metacpan_api_is_core_version(cpan_url, module): return True else: sys.exit( - ( - "Error: Could not find module or distribution named" - " %s on MetaCPAN." - ) - % (module) + "Error: Could not find module or distribution named" + f" {module} on MetaCPAN." ) diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index 38628a52f4..93958333fb 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -489,7 +489,7 @@ def dict_from_cran_lines(lines): # - Suggests in corpcor (k, v) = line.split(":", 1) except ValueError: - sys.exit("Error: Could not parse metadata (%s)" % line) + sys.exit(f"Error: Could not parse metadata ({line})") d[k] = v # if k not in CRAN_KEYS: # print("Warning: Unknown key %s" % k) @@ -597,7 +597,7 @@ def read_description_contents(fp): def get_archive_metadata(path, verbose=True): if verbose: - print("Reading package metadata from %s" % path) + print(f"Reading package metadata from {path}") if basename(path) == "DESCRIPTION": with open(path, "rb") as fp: return read_description_contents(fp) @@ -614,8 +614,8 @@ def get_archive_metadata(path, verbose=True): fp = zf.open(member, "r") return read_description_contents(fp) else: - sys.exit("Cannot extract a DESCRIPTION from file %s" % path) - sys.exit("%s does not seem to be a CRAN package (no DESCRIPTION) file" % path) + sys.exit(f"Cannot extract a DESCRIPTION from file {path}") + sys.exit(f"{path} does not seem to be a CRAN package (no DESCRIPTION) file") def get_latest_git_tag(config): @@ -638,12 +638,12 @@ def get_latest_git_tag(config): stdout = stdout.decode("utf-8") stderr = stderr.decode("utf-8") if stderr or p.returncode: - sys.exit("Error: git tag failed (%s)" % stderr) + sys.exit(f"Error: git tag failed ({stderr})") tags = stdout.strip().splitlines() if not tags: sys.exit("Error: no tags found") - print("Using tag %s" % tags[-1]) + print(f"Using tag {tags[-1]}") return tags[-1] @@ -683,7 +683,7 @@ def get_cran_archive_versions(cran_url, session, package, verbose=True): r.raise_for_status() except requests.exceptions.HTTPError as e: if e.response.status_code == 404: - print("No archive directory for package %s" % package) + print(f"No archive directory for package {package}") return [] raise versions = [] @@ -698,7 +698,7 @@ def get_cran_archive_versions(cran_url, session, package, verbose=True): def get_cran_index(cran_url, session, verbose=True): if verbose: - print("Fetching main index from %s" % cran_url) + print(f"Fetching main index from {cran_url}") r = session.get(cran_url + "/src/contrib/") r.raise_for_status() records = {} @@ -775,7 +775,7 @@ def package_to_inputs_dict( """ if isfile(package): return None - print("Parsing input package %s:" % package) + print(f"Parsing input package {package}:") package = strip_end(package, "/") package = strip_end(package, sep) if "github.com" in package: @@ -1037,7 +1037,7 @@ def skeletonize( session = get_session(output_dir) cran_index = get_cran_index(cran_url, session) if pkg_name.lower() not in cran_index: - sys.exit("Package %s not found" % pkg_name) + sys.exit(f"Package {pkg_name} not found") package, cran_version = cran_index[pkg_name.lower()] if cran_version and (not version or version == cran_version): version = cran_version @@ -1048,8 +1048,7 @@ def skeletonize( sys.exit(1) elif not version and not cran_version and not allow_archived: print( - "ERROR: Package %s is archived; to build, use --allow-archived or a --version value" - % pkg_name + f"ERROR: Package {pkg_name} is archived; to build, use --allow-archived or a --version value" ) sys.exit(1) else: @@ -1325,7 +1324,7 @@ def skeletonize( if cran_package is None: cran_package = get_archive_metadata(description_path) d["cran_metadata"] = "\n".join( - ["# %s" % line for line in cran_package["orig_lines"] if line] + [f"# {line}" for line in cran_package["orig_lines"] if line] ) # Render the source and binaryN keys @@ -1377,7 +1376,7 @@ def skeletonize( d["summary"] = " " + yaml_quote_string(cran_package["Description"]) if "Suggests" in cran_package and not no_comments: - d["suggests"] = "# Suggests: %s" % cran_package["Suggests"] + d["suggests"] = "# Suggests: {}".format(cran_package["Suggests"]) else: d["suggests"] = "" @@ -1589,7 +1588,7 @@ def skeletonize( ) package_list.append(lower_name) - d["%s_depends" % dep_type] = "".join(deps) + d[f"{dep_type}_depends"] = "".join(deps) if no_comments: global CRAN_BUILD_SH_SOURCE, CRAN_META @@ -1603,7 +1602,7 @@ def skeletonize( if update_policy == "error": raise RuntimeError( "directory already exists " - "(and --update-policy is 'error'): %s" % dir_path + f"(and --update-policy is 'error'): {dir_path}" ) elif update_policy == "overwrite": rm_rf(dir_path) @@ -1626,7 +1625,7 @@ def skeletonize( makedirs(join(dir_path)) except: pass - print("Writing recipe for %s" % package.lower()) + print(f"Writing recipe for {package.lower()}") with open(join(dir_path, "meta.yaml"), "w") as f: f.write(clear_whitespace(CRAN_META.format(**d))) if not exists(join(dir_path, "build.sh")) or update_policy == "overwrite": @@ -1683,14 +1682,14 @@ def get_outdated(output_dir, cran_index, packages=()): continue if recipe_name not in cran_index: - print("Skipping %s, not found on CRAN" % recipe) + print(f"Skipping {recipe}, not found on CRAN") continue version_compare( join(output_dir, recipe), cran_index[recipe_name][1].replace("-", "_") ) - print("Updating %s" % recipe) + print(f"Updating {recipe}") to_update.append(recipe_name) return to_update diff --git a/conda_build/skeletons/luarocks.py b/conda_build/skeletons/luarocks.py index da8e641928..41ec499bad 100644 --- a/conda_build/skeletons/luarocks.py +++ b/conda_build/skeletons/luarocks.py @@ -174,7 +174,7 @@ def package_exists(package_name): def getval(spec, k): if k not in spec: - raise Exception("Required key %s not in spec" % k) + raise Exception(f"Required key {k} not in spec") else: return spec[k] @@ -184,7 +184,7 @@ def warn_against_branches(branch): print("=========================================") print("") print("WARNING:") - print("Building a rock referenced to branch %s." % branch) + print(f"Building a rock referenced to branch {branch}.") print("This is not a tag. This is dangerous, because rebuilding") print("at a later date may produce a different package.") print("Please replace with a tag, git commit, or tarball.") @@ -253,7 +253,7 @@ def skeletonize( package = packages.pop() packagename = ( - "lua-%s" % package.lower() if package[:4] != "lua-" else package.lower() + f"lua-{package.lower()}" if package[:4] != "lua-" else package.lower() ) d = package_dicts.setdefault( package, @@ -372,13 +372,13 @@ def skeletonize( modules = spec["build"]["platforms"][our_plat]["modules"] if modules: d["test_commands"] = INDENT.join( - [""] + ["""lua -e "require '%s'\"""" % r for r in modules.keys()] + [""] + [f"""lua -e "require '{r}'\"""" for r in modules.keys()] ) # If we didn't find any modules to import, import the base name if d["test_commands"] == "": d["test_commands"] = INDENT.join( - [""] + ["""lua -e "require '%s'" """ % d["rockname"]] + [""] + ["""lua -e "require '{}'" """.format(d["rockname"])] ) # Build the luarocks skeleton diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index c45c843a6d..d3b716bc8b 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -300,7 +300,7 @@ def skeletonize( if not is_url: dir_path = join(output_dir, package.lower()) if exists(dir_path) and not version_compare: - raise RuntimeError("directory already exists: %s" % dir_path) + raise RuntimeError(f"directory already exists: {dir_path}") d = package_dicts.setdefault( package, { @@ -343,14 +343,12 @@ def skeletonize( else: # select the most visible version from PyPI. if not versions: - sys.exit( - "Error: Could not find any versions of package %s" % package - ) + sys.exit(f"Error: Could not find any versions of package {package}") if len(versions) > 1: - print("Warning, the following versions were found for %s" % package) + print(f"Warning, the following versions were found for {package}") for ver in versions: print(ver) - print("Using %s" % versions[-1]) + print(f"Using {versions[-1]}") print("Use --version to specify a different version.") d["version"] = versions[-1] @@ -404,7 +402,7 @@ def skeletonize( d = package_dicts[package] name = d["packagename"].lower() makedirs(join(output_dir, name)) - print("Writing recipe for %s" % package.lower()) + print(f"Writing recipe for {package.lower()}") with open(join(output_dir, name, "meta.yaml"), "w") as f: rendered_recipe = PYPI_META_HEADER.format(**d) @@ -642,8 +640,8 @@ def get_download_data( if not urls[0]["url"]: # The package doesn't have a url, or maybe it only has a wheel. sys.exit( - "Error: Could not build recipe for %s. " - "Could not find any valid urls." % package + f"Error: Could not build recipe for {package}. " + "Could not find any valid urls." ) U = parse_url(urls[0]["url"]) if not U.path: @@ -652,9 +650,9 @@ def get_download_data( fragment = U.fragment or "" digest = fragment.split("=") else: - sys.exit("Error: No source urls found for %s" % package) + sys.exit(f"Error: No source urls found for {package}") if len(urls) > 1 and not noprompt: - print("More than one source version is available for %s:" % package) + print(f"More than one source version is available for {package}:") if manual_url: for i, url in enumerate(urls): print( @@ -689,7 +687,7 @@ def get_download_data( filename = url["filename"] or "package" else: # User provided a URL, try to use it. - print("Using url %s" % package) + print(f"Using url {package}") pypiurl = package U = parse_url(package) digest = U.fragment.split("=") @@ -711,7 +709,7 @@ def version_compare(package, versions): recipe_dir = abspath(package.lower()) if not isdir(recipe_dir): - sys.exit("Error: no such directory: %s" % recipe_dir) + sys.exit(f"Error: no such directory: {recipe_dir}") m = MetaData(recipe_dir) local_version = nv(m.version()) print(f"Local recipe for {package} has version {local_version}") @@ -721,11 +719,11 @@ def version_compare(package, versions): # Comparing normalized versions, displaying non normalized ones new_versions = versions[: norm_versions.index(local_version)] if len(new_versions) > 0: - print("Following new versions of %s are avaliable" % (package)) + print(f"Following new versions of {package} are avaliable") for ver in new_versions: print(ver) else: - print("No new version for %s is available" % (package)) + print(f"No new version for {package} is available") sys.exit() @@ -828,7 +826,7 @@ def get_package_metadata( config, setup_options, ): - print("Downloading %s" % package) + print(f"Downloading {package}") print("PyPI URL: ", metadata["pypiurl"]) pkginfo = get_pkginfo( package, @@ -982,7 +980,7 @@ def _spec_from_line(line): ) spec = _spec_from_line(dep_orig) if spec is None: - sys.exit("Error: Could not parse: %s" % dep) + sys.exit(f"Error: Could not parse: {dep}") if marker: spec = " ".join((spec, marker)) @@ -1058,10 +1056,10 @@ def get_license_name(package, pkginfo, no_prompt=False, data=None): if no_prompt: return license_name elif "\n" not in license_name: - print('Using "%s" for the license' % license_name) + print(f'Using "{license_name}" for the license') else: # Some projects put the whole license text in this field - print("This is the license for %s" % package) + print(f"This is the license for {package}") print() print(license_name) print() @@ -1070,8 +1068,8 @@ def get_license_name(package, pkginfo, no_prompt=False, data=None): license_name = "UNKNOWN" else: license_name = input( - "No license could be found for %s on PyPI or in the source. " - "What license should I use? " % package + f"No license could be found for {package} on PyPI or in the source. " + "What license should I use? " ) return license_name @@ -1175,7 +1173,7 @@ def unpack(src_path, tempdir): if src_path.lower().endswith(decompressible_exts): tar_xf(src_path, tempdir) else: - raise Exception("not a valid source: %s" % src_path) + raise Exception(f"not a valid source: {src_path}") def get_dir(tempdir): @@ -1209,7 +1207,7 @@ def get_requirements(package, pkginfo, all_extras=True): try: extras_require = [pkginfo["extras_require"][x] for x in extras] except KeyError: - sys.exit("Error: Invalid extra features: [%s]" % ",".join(extras)) + sys.exit("Error: Invalid extra features: [{}]".format(",".join(extras))) # match PEP 508 environment markers; currently only matches the # subset of environment markers that compare to python_version # using a single basic Python comparison operator @@ -1297,10 +1295,10 @@ def get_pkginfo( else: new_hash_value = "" - print("Unpacking %s..." % package) + print(f"Unpacking {package}...") unpack(join(config.src_cache, filename), tempdir) print("done") - print("working in %s" % tempdir) + print(f"working in {tempdir}") src_dir = get_dir(tempdir) # TODO: find args parameters needed by run_setuppy run_setuppy( @@ -1366,7 +1364,7 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op ) stdlib_dir = join( config.host_prefix, - "Lib" if on_win else "lib/python%s" % python_version, + "Lib" if on_win else f"lib/python{python_version}", ) patch = join(temp_dir, "pypi-distutils.patch") @@ -1421,8 +1419,8 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op try: check_call_env(cmdargs, env=env) except subprocess.CalledProcessError: - print("$PYTHONPATH = %s" % env["PYTHONPATH"]) - sys.exit("Error: command failed: %s" % " ".join(cmdargs)) + print("$PYTHONPATH = {}".format(env["PYTHONPATH"])) + sys.exit("Error: command failed: {}".format(" ".join(cmdargs))) finally: chdir(cwd) diff --git a/conda_build/source.py b/conda_build/source.py index c7b3d1921b..903f5d7ca0 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -55,7 +55,7 @@ def append_hash_to_fn(fn, hash_value): def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): """Download a source to the local cache.""" if verbose: - log.info("Source cache directory is: %s" % cache_folder) + log.info(f"Source cache directory is: {cache_folder}") if not isdir(cache_folder) and not os.path.islink(cache_folder): os.makedirs(cache_folder) @@ -81,10 +81,10 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): path = join(cache_folder, fn) if isfile(path): if verbose: - log.info("Found source in cache: %s" % fn) + log.info(f"Found source in cache: {fn}") else: if verbose: - log.info("Downloading source to cache: %s" % fn) + log.info(f"Downloading source to cache: {fn}") for url in source_urls: if "://" not in url: @@ -98,14 +98,14 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): url = "file:///" + expanduser(url[8:]).replace("\\", "/") try: if verbose: - log.info("Downloading %s" % url) + log.info(f"Downloading {url}") with LoggingContext(): download(url, path) except CondaHTTPError as e: - log.warn("Error: %s" % str(e).strip()) + log.warn(f"Error: {str(e).strip()}") rm_rf(path) except RuntimeError as e: - log.warn("Error: %s" % str(e).strip()) + log.warn(f"Error: {str(e).strip()}") rm_rf(path) else: if verbose: @@ -113,7 +113,7 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): break else: # no break rm_rf(path) - raise RuntimeError("Could not download %s" % url) + raise RuntimeError(f"Could not download {url}") hashed = None for tp in ("md5", "sha1", "sha256"): @@ -344,7 +344,7 @@ def git_mirror_checkout_recursive( ) checkout = output.decode("utf-8") if verbose: - print("checkout: %r" % checkout) + print(f"checkout: {checkout!r}") if checkout: check_call_env( [git, "checkout", checkout], @@ -492,7 +492,7 @@ def git_info(src_dir, build_prefix, git=None, verbose=True, fo=None): stdout = check_output_env(cmd, stderr=stderr, cwd=src_dir, env=env) except CalledProcessError as e: if check_error: - raise Exception("git error: %s" % str(e)) + raise Exception(f"git error: {str(e)}") encoding = locale.getpreferredencoding() if not fo: encoding = sys.stdout.encoding @@ -535,7 +535,7 @@ def hg_source(source_dict, src_dir, hg_cache, verbose): # now clone in to work directory update = source_dict.get("hg_tag") or "tip" if verbose: - print("checkout: %r" % update) + print(f"checkout: {update!r}") check_call_env(["hg", "clone", cache_repo, src_dir], stdout=stdout, stderr=stderr) check_call_env( @@ -953,7 +953,7 @@ def try_apply_patch(patch, patch_args, cwd, stdout, stderr): exception = None if not isfile(path): - raise RuntimeError("Error: no such patch: %s" % path) + raise RuntimeError(f"Error: no such patch: {path}") if config.verbose: stdout = None diff --git a/conda_build/tarcheck.py b/conda_build/tarcheck.py index 3a98559187..374422f1e1 100644 --- a/conda_build/tarcheck.py +++ b/conda_build/tarcheck.py @@ -13,7 +13,7 @@ def dist_fn(fn): elif fn.endswith(".tar.bz2"): return fn[:-8] else: - raise Exception("did not expect filename: %r" % fn) + raise Exception(f"did not expect filename: {fn!r}") class TarCheck: @@ -51,9 +51,9 @@ def info_files(self): return for p in sorted(seta | setb): if p not in seta: - print("%r not in info/files" % p) + print(f"{p!r} not in info/files") if p not in setb: - print("%r not in tarball" % p) + print(f"{p!r} not in tarball") raise Exception("info/files") def index_json(self): diff --git a/conda_build/utils.py b/conda_build/utils.py index 4a3e1f782c..796f849caf 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -425,7 +425,7 @@ def bytes2human(n): if n >= prefix[s]: value = float(n) / prefix[s] return f"{value:.1f}{s}" - return "%sB" % n + return f"{n}B" def seconds2human(s): @@ -458,7 +458,7 @@ def get_recipe_abspath(recipe): tar_xf(recipe_tarfile, os.path.join(recipe_dir, "info")) need_cleanup = True else: - print("Ignoring non-recipe: %s" % recipe) + print(f"Ignoring non-recipe: {recipe}") return (None, None) else: recipe_dir = abspath(os.path.join(os.getcwd(), recipe)) @@ -1054,7 +1054,7 @@ def iter_entry_points(items): for item in items: m = entry_pat.match(item) if m is None: - sys.exit("Error cound not match entry point: %r" % item) + sys.exit(f"Error cound not match entry point: {item!r}") yield m.groups() @@ -1076,7 +1076,7 @@ def create_entry_point(path, module, func, config): os.remove(path) with open(path, "w") as fo: if not config.noarch: - fo.write("#!%s\n" % config.host_python) + fo.write(f"#!{config.host_python}\n") fo.write(pyscript) os.chmod(path, 0o775) @@ -1951,7 +1951,7 @@ def insert_variant_versions(requirements_dict, variant, env): ) reqs = ensure_list(requirements_dict.get(env)) for key, val in variant.items(): - regex = re.compile(r"^(%s)(?:\s*$)" % key.replace("_", "[-_]")) + regex = re.compile(r"^({})(?:\s*$)".format(key.replace("_", "[-_]"))) matches = [regex.match(pkg) for pkg in reqs] if any(matches): for i, x in enumerate(matches): diff --git a/conda_build/variants.py b/conda_build/variants.py index 1e2b1adc0c..447025818c 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -745,13 +745,13 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False): continue v_regex = re.escape(v) v_req_regex = "[-_]".join(map(re.escape, v.split("_"))) - variant_regex = r"\{\s*(?:pin_[a-z]+\(\s*?['\"])?%s[^'\"]*?\}\}" % v_regex - selector_regex = r"^[^#\[]*?\#?\s\[[^\]]*?(?!\]]" % v_regex + variant_regex = rf"\{{\s*(?:pin_[a-z]+\(\s*?['\"])?{v_regex}[^'\"]*?\}}\}}" + selector_regex = rf"^[^#\[]*?\#?\s\[[^\]]*?(?!\]]" conditional_regex = ( r"(?:^|[^\{])\{%\s*(?:el)?if\s*.*" + v_regex + r"\s*(?:[^%]*?)?%\}" ) # plain req name, no version spec. Look for end of line after name, or comment or selector - requirement_regex = r"^\s+\-\s+%s\s*(?:\s[\[#]|$)" % v_req_regex + requirement_regex = rf"^\s+\-\s+{v_req_regex}\s*(?:\s[\[#]|$)" if selectors_only: all_res.insert(0, selector_regex) else: diff --git a/conda_build/windows.py b/conda_build/windows.py index 706b499265..00287c50bf 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -56,16 +56,13 @@ def fix_staged_scripts(scripts_dir, config): # If it's a #!python script if not (line.startswith(b"#!") and b"python" in line.lower()): continue - print( - "Adjusting unix-style #! script %s, " - "and adding a .bat file for it" % fn - ) + print(f"Adjusting unix-style #! script {fn}, and adding a .bat file for it") # copy it with a .py extension (skipping that first #! line) with open(join(scripts_dir, fn + "-script.py"), "wb") as fo: fo.write(f.read()) # now create the .exe file copy_into( - join(dirname(__file__), "cli-%s.exe" % config.host_arch), + join(dirname(__file__), f"cli-{config.host_arch}.exe"), join(scripts_dir, fn + ".exe"), ) @@ -338,7 +335,7 @@ def build(m, bld_bat, stats, provision_only=False): rewrite_env = { k: env[k] for k in ["PREFIX", "BUILD_PREFIX", "SRC_DIR"] if k in env } - print("Rewriting env in output: %s" % pprint.pformat(rewrite_env)) + print(f"Rewriting env in output: {pprint.pformat(rewrite_env)}") check_call_env( cmd, cwd=m.config.work_dir, stats=stats, rewrite_stdout_env=rewrite_env ) diff --git a/docs/scrape_help.py b/docs/scrape_help.py index 2f99fbb403..66d5af1e57 100755 --- a/docs/scrape_help.py +++ b/docs/scrape_help.py @@ -112,7 +112,7 @@ def external_commands(): def get_help(command): command_help[command] = conda_command_help(command) - print("Checked for subcommand help for %s" % command) + print(f"Checked for subcommand help for {command}") with ThreadPoolExecutor(len(commands)) as executor: # list() is needed for force exceptions to be raised @@ -164,7 +164,7 @@ def generate_man(command): [ "help2man", "--name", - "conda %s" % command, + f"conda {command}", "--section", "1", "--source", @@ -172,36 +172,34 @@ def generate_man(command): "--version-string", conda_version, "--no-info", - "conda %s" % command, + f"conda {command}", ] ) retries -= 1 if not manpage: - sys.exit("Error: Could not get help for conda %s" % command) + sys.exit(f"Error: Could not get help for conda {command}") replacements = man_replacements() for text in replacements: manpage = manpage.replace(text, replacements[text]) - with open(join(manpath, "conda-%s.1" % command.replace(" ", "-")), "w") as f: + with open(join(manpath, "conda-{}.1".format(command.replace(" ", "-"))), "w") as f: f.write(manpage) - print("Generated manpage for conda %s" % command) + print(f"Generated manpage for conda {command}") def generate_html(command): command_file = command.replace(" ", "-") # Use abspath so that it always has a path separator - man = Popen( - ["man", abspath(join(manpath, "conda-%s.1" % command_file))], stdout=PIPE - ) + man = Popen(["man", abspath(join(manpath, f"conda-{command_file}.1"))], stdout=PIPE) htmlpage = check_output( [ "man2html", "-bare", # Don't use HTML, HEAD, or BODY tags "title", - "conda-%s" % command_file, + f"conda-{command_file}", "-topm", "0", # No top margin "-botm", @@ -210,14 +208,14 @@ def generate_html(command): stdin=man.stdout, ) - with open(join(manpath, "conda-%s.html" % command_file), "wb") as f: + with open(join(manpath, f"conda-{command_file}.html"), "wb") as f: f.write(htmlpage) - print("Generated html for conda %s" % command) + print(f"Generated html for conda {command}") def write_rst(command, sep=None): command_file = command.replace(" ", "-") - with open(join(manpath, "conda-%s.html" % command_file)) as f: + with open(join(manpath, f"conda-{command_file}.html")) as f: html = f.read() rp = rstpath @@ -225,13 +223,13 @@ def write_rst(command, sep=None): rp = join(rp, sep) if not isdir(rp): makedirs(rp) - with open(join(rp, "conda-%s.rst" % command_file), "w") as f: + with open(join(rp, f"conda-{command_file}.rst"), "w") as f: f.write(RST_HEADER.format(command=command)) for line in html.splitlines(): f.write(" ") f.write(line) f.write("\n") - print("Generated rst for conda %s" % command) + print(f"Generated rst for conda {command}") def main(): diff --git a/tests/test_api_build_conda_v2.py b/tests/test_api_build_conda_v2.py index 4c0c09b9ac..dc4078e61f 100644 --- a/tests/test_api_build_conda_v2.py +++ b/tests/test_api_build_conda_v2.py @@ -40,4 +40,4 @@ def test_conda_pkg_format( # Verify that test pass ran through api assert "Manual entry point" in out - assert "TEST END: %s" % output_file in out + assert f"TEST END: {output_file}" in out diff --git a/tests/utils.py b/tests/utils.py index b4ed64912b..4d6803f09d 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -91,8 +91,7 @@ def assert_package_consistency(package_path): has_prefix_present = False except tarfile.ReadError: raise RuntimeError( - "Could not extract metadata from %s. " - "File probably corrupt." % package_path + f"Could not extract metadata from {package_path}. File probably corrupt." ) errors = [] member_set = set(member_list) # The tar format allows duplicates in member_list @@ -101,7 +100,7 @@ def assert_package_consistency(package_path): file_set = set(file_list) # Check that there are no duplicates in info/files if len(file_list) != len(file_set): - errors.append("Duplicate files in info/files in %s" % package_path) + errors.append(f"Duplicate files in info/files in {package_path}") # Compare the contents of files and members unlisted_members = member_set.difference(file_set) missing_members = file_set.difference(member_set) @@ -109,14 +108,16 @@ def assert_package_consistency(package_path): missing_files = [m for m in unlisted_members if not m.startswith("info/")] if len(missing_files) > 0: errors.append( - "The following package files are not listed in " - "info/files: %s" % ", ".join(missing_files) + "The following package files are not listed in info/files: {}".format( + ", ".join(missing_files) + ) ) # Find any files missing in the archive if len(missing_members) > 0: errors.append( - "The following files listed in info/files are missing: " - "%s" % ", ".join(missing_members) + "The following files listed in info/files are missing: {}".format( + ", ".join(missing_members) + ) ) # Find any files in has_prefix that are not present in files if has_prefix_present: @@ -129,15 +130,15 @@ def assert_package_consistency(package_path): elif len(parts) == 3: prefix_path_list.append(parts[2]) else: - errors.append("Invalid has_prefix file in package: %s" % package_path) + errors.append(f"Invalid has_prefix file in package: {package_path}") prefix_path_set = set(prefix_path_list) if len(prefix_path_list) != len(prefix_path_set): - errors.append("Duplicate files in info/has_prefix in %s" % package_path) + errors.append(f"Duplicate files in info/has_prefix in {package_path}") prefix_not_in_files = prefix_path_set.difference(file_set) if len(prefix_not_in_files) > 0: errors.append( "The following files listed in info/has_prefix are missing " - "from info/files: %s" % ", ".join(prefix_not_in_files) + "from info/files: {}".format(", ".join(prefix_not_in_files)) ) # Assert that no errors are detected From a5a63c76d9a5dda366fc0423925b17f105d1b51b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 May 2024 21:08:42 +0000 Subject: [PATCH 365/366] Bump codecov/codecov-action from 4.3.0 to 4.3.1 in /.github/workflows (#5329) * Bump codecov/codecov-action from 4.3.0 to 4.3.1 in /.github/workflows Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 4.3.0 to 4.3.1. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/84508663e988701840491b86de86b666e8a86bed...5ecb98a3c6b747ed38dc09f787459979aebb39be) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Apply suggestions from code review --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Ken Odegard --- .github/workflows/tests.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 2819648dea..3ff3dd50b4 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -150,7 +150,7 @@ jobs: -m "${{ env.PYTEST_MARKER }}" - name: Upload Coverage - uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed + uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be # v4.3.1 with: flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} @@ -317,7 +317,7 @@ jobs: -m "${{ env.PYTEST_MARKER }}" - name: Upload Coverage - uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed + uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be # v4.3.1 with: flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} @@ -417,7 +417,7 @@ jobs: -m "${{ env.PYTEST_MARKER }}" - name: Upload Coverage - uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed + uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be # v4.3.1 with: flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }} From cf7e02ec66fe3d59fd26ef0ded559e3a8217361b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 May 2024 21:29:33 +0000 Subject: [PATCH 366/366] Bump actions/checkout from 4.1.4 to 4.1.5 in /.github/workflows (#5328) * Bump actions/checkout from 4.1.4 to 4.1.5 in /.github/workflows Bumps [actions/checkout](https://github.com/actions/checkout) from 4.1.4 to 4.1.5. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/0ad4b8fadaa221de15dcec353f45205ec38ea70b...44c2b7a8a4ea60a981eaca3cf939b5f4305c123b) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Apply suggestions from code review --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Ken Odegard --- .github/workflows/builds-review.yaml | 2 +- .github/workflows/docs.yml | 2 +- .github/workflows/labels.yml | 2 +- .github/workflows/tests.yml | 12 ++++++------ 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/builds-review.yaml b/.github/workflows/builds-review.yaml index 40651286f4..cad5d250b0 100644 --- a/.github/workflows/builds-review.yaml +++ b/.github/workflows/builds-review.yaml @@ -48,7 +48,7 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b + - uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: ref: ${{ github.ref }} clean: true diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 23a93bb620..11bd69c67b 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -22,7 +22,7 @@ jobs: if: '!github.event.repository.fork' runs-on: ubuntu-latest steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b + - uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: fetch-depth: 0 - name: Setup diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index e6817ddf7b..9ec951a22f 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -19,7 +19,7 @@ jobs: GLOBAL: https://raw.githubusercontent.com/conda/infra/main/.github/global.yml LOCAL: .github/labels.yml steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b + - uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 - id: has_local uses: andstor/file-existence-action@076e0072799f4942c8bc574a82233e1e4d13e9d6 with: diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 3ff3dd50b4..e548cc609a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -45,7 +45,7 @@ jobs: code: ${{ steps.filter.outputs.code }} steps: - name: Checkout Source - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 # dorny/paths-filter needs git clone for non-PR events # https://github.com/dorny/paths-filter#supported-workflows if: github.event_name != 'pull_request' @@ -102,7 +102,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: fetch-depth: 0 @@ -182,7 +182,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: fetch-depth: 0 @@ -262,7 +262,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: fetch-depth: 0 @@ -366,7 +366,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: fetch-depth: 0 @@ -504,7 +504,7 @@ jobs: # Clean checkout of specific git ref needed for package metadata version # which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR: - name: Checkout Source - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b + uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5 with: ref: ${{ github.ref }} clean: true