diff --git a/.devcontainer/post_create.sh b/.devcontainer/post_create.sh index 73ea60380c..766bcb9f29 100644 --- a/.devcontainer/post_create.sh +++ b/.devcontainer/post_create.sh @@ -24,4 +24,4 @@ echo "Installing dev dependencies" --file "$SRC_CONDA_BUILD/tests/requirements.txt" \ --file "$SRC_CONDA_BUILD/tests/requirements-Linux.txt" \ --file "$SRC_CONDA_BUILD/tests/requirements-ci.txt" \ - "conda>=23.5.0" + "conda>=23.7.0" diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index aafe0ed977..29f98a129d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -83,10 +83,10 @@ jobs: include: # minimum Python/conda combo - python-version: '3.8' - conda-version: 23.5.0 + conda-version: 23.7.0 test-type: serial - python-version: '3.8' - conda-version: 23.5.0 + conda-version: 23.7.0 test-type: parallel # maximum Python/conda combo - python-version: '3.12' diff --git a/conda_build/api.py b/conda_build/api.py index 2d4e3ef567..8c47ef1e6d 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -17,11 +17,13 @@ import sys from os.path import dirname, expanduser, join from pathlib import Path +from typing import TYPE_CHECKING, Iterable # make the Config class available in the api namespace from .config import DEFAULT_PREFIX_LENGTH as _prefix_length from .config import Config, get_channel_urls, get_or_merge_config from .deprecations import deprecated +from .metadata import MetaData, MetaDataTuple from .utils import ( CONDA_PACKAGE_EXTENSIONS, LoggingContext, @@ -32,21 +34,25 @@ on_win, ) +if TYPE_CHECKING: + from typing import Any, Literal + + StatsDict = dict[str, Any] + def render( - recipe_path, - config=None, - variants=None, - permit_unsatisfiable_variants=True, - finalize=True, - bypass_env_check=False, + recipe_path: str | os.PathLike | Path, + config: Config | None = None, + variants: dict[str, Any] | None = None, + permit_unsatisfiable_variants: bool = True, + finalize: bool = True, + bypass_env_check: bool = False, **kwargs, -): +) -> list[MetaDataTuple]: """Given path to a recipe, return the MetaData object(s) representing that recipe, with jinja2 templates evaluated. - Returns a list of (metadata, needs_download, needs_reparse in env) tuples""" - from collections import OrderedDict + Returns a list of (metadata, need_download, need_reparse in env) tuples""" from conda.exceptions import NoPackagesFoundError @@ -63,7 +69,7 @@ def render( variants=variants, permit_unsatisfiable_variants=permit_unsatisfiable_variants, ) - output_metas = OrderedDict() + output_metas: dict[tuple[str, str, tuple[tuple[str, str], ...]], MetaDataTuple] = {} for meta, download, render_in_env in metadata_tuples: if not meta.skip() or not config.trim_skip: for od, om in meta.get_output_metadata_set( @@ -95,7 +101,7 @@ def render( (var, om.config.variant[var]) for var in om.get_used_vars() ), - ] = (om, download, render_in_env) + ] = MetaDataTuple(om, download, render_in_env) else: output_metas[ f"{om.type}: {om.name()}", @@ -104,12 +110,16 @@ def render( (var, om.config.variant[var]) for var in om.get_used_vars() ), - ] = (om, download, render_in_env) + ] = MetaDataTuple(om, download, render_in_env) return list(output_metas.values()) -def output_yaml(metadata, file_path=None, suppress_outputs=False): +def output_yaml( + metadata: MetaData, + file_path: str | os.PathLike | Path | None = None, + suppress_outputs: bool = False, +) -> str: """Save a rendered recipe in its final form to the path given by file_path""" from .render import output_yaml @@ -117,12 +127,16 @@ def output_yaml(metadata, file_path=None, suppress_outputs=False): def get_output_file_paths( - recipe_path_or_metadata, - no_download_source=False, - config=None, - variants=None, + recipe_path_or_metadata: str + | os.PathLike + | Path + | MetaData + | Iterable[MetaDataTuple], + no_download_source: bool = False, + config: Config | None = None, + variants: dict[str, Any] | None = None, **kwargs, -): +) -> list[str]: """Get output file paths for any packages that would be created by a recipe Both split packages (recipes with more than one output) and build matrices, @@ -132,22 +146,9 @@ def get_output_file_paths( config = get_or_merge_config(config, **kwargs) - if hasattr(recipe_path_or_metadata, "__iter__") and not isinstance( - recipe_path_or_metadata, str - ): - list_of_metas = [ - hasattr(item[0], "config") - for item in recipe_path_or_metadata - if len(item) == 3 - ] - - if list_of_metas and all(list_of_metas): - metadata = recipe_path_or_metadata - else: - raise ValueError(f"received mixed list of metas: {recipe_path_or_metadata}") - elif isinstance(recipe_path_or_metadata, (str, Path)): + if isinstance(recipe_path_or_metadata, (str, Path)): # first, render the parent recipe (potentially multiple outputs, depending on variants). - metadata = render( + metadata_tuples = render( recipe_path_or_metadata, no_download_source=no_download_source, variants=variants, @@ -155,29 +156,48 @@ def get_output_file_paths( finalize=True, **kwargs, ) + + elif isinstance(recipe_path_or_metadata, MetaData): + metadata_tuples = [MetaDataTuple(recipe_path_or_metadata, False, False)] + + elif isinstance(recipe_path_or_metadata, Iterable) and all( + isinstance(recipe, MetaDataTuple) + and isinstance(recipe.metadata, MetaData) + and isinstance(recipe.need_download, bool) + and isinstance(recipe.need_reparse, bool) + for recipe in recipe_path_or_metadata + ): + metadata_tuples = recipe_path_or_metadata + else: - assert hasattr( - recipe_path_or_metadata, "config" - ), f"Expecting metadata object - got {recipe_path_or_metadata}" - metadata = [(recipe_path_or_metadata, None, None)] - # Next, loop over outputs that each metadata defines + raise ValueError( + f"Unknown input type: {type(recipe_path_or_metadata)}; expecting " + "PathLike object, MetaData object, or a list of tuples containing " + "(MetaData, bool, bool)." + ) + + # Next, loop over outputs that each metadata defines outs = [] - for m, _, _ in metadata: - if m.skip(): - outs.append(get_skip_message(m)) + for metadata, _, _ in metadata_tuples: + if metadata.skip(): + outs.append(get_skip_message(metadata)) else: - outs.append(bldpkg_path(m)) - return sorted(list(set(outs))) + outs.append(bldpkg_path(metadata)) + return sorted(set(outs)) @deprecated("24.3.0", "24.5.0", addendum="Use `get_output_file_paths` instead.") def get_output_file_path( - recipe_path_or_metadata, - no_download_source=False, - config=None, - variants=None, + recipe_path_or_metadata: str + | os.PathLike + | Path + | MetaData + | Iterable[MetaDataTuple], + no_download_source: bool = False, + config: Config | None = None, + variants: dict[str, Any] | None = None, **kwargs, -): +) -> list[str]: """Get output file paths for any packages that would be created by a recipe Both split packages (recipes with more than one output) and build matrices, @@ -192,7 +212,13 @@ def get_output_file_path( ) -def check(recipe_path, no_download_source=False, config=None, variants=None, **kwargs): +def check( + recipe_path: str | os.PathLike | Path, + no_download_source: bool = False, + config: Config | None = None, + variants: dict[str, Any] | None = None, + **kwargs, +) -> bool: """Check validity of input recipe path Verifies that recipe can be completely rendered, and that fields of the rendered recipe are @@ -209,16 +235,16 @@ def check(recipe_path, no_download_source=False, config=None, variants=None, **k def build( - recipe_paths_or_metadata, - post=None, - need_source_download=True, - build_only=False, - notest=False, - config=None, - variants=None, - stats=None, + recipe_paths_or_metadata: str | os.PathLike | Path | MetaData, + post: bool | None = None, + need_source_download: bool = True, + build_only: bool = False, + notest: bool = False, + config: Config | None = None, + variants: dict[str, Any] | None = None, + stats: StatsDict | None = None, **kwargs, -): +) -> list[str]: """Run the build step. If recipe paths are provided, renders recipe before building. @@ -230,16 +256,15 @@ def build( "other arguments (config) by keyword." ) - recipes = [] + recipes: list[str | MetaData] = [] for recipe in ensure_list(recipe_paths_or_metadata): - if isinstance(recipe, str): + if isinstance(recipe, (str, os.PathLike, Path)): for recipe in expand_globs(recipe, os.getcwd()): try: - recipe = find_recipe(recipe) + recipes.append(find_recipe(recipe)) except OSError: continue - recipes.append(recipe) - elif hasattr(recipe, "config"): + elif isinstance(recipe, MetaData): recipes.append(recipe) else: raise ValueError(f"Recipe passed was unrecognized object: {recipe}") @@ -263,12 +288,12 @@ def build( def test( - recipedir_or_package_or_metadata, - move_broken=True, - config=None, - stats=None, + recipedir_or_package_or_metadata: str | os.PathLike | Path | MetaData, + move_broken: bool = True, + config: Config | None = None, + stats: StatsDict | None = None, **kwargs, -): +) -> bool: """Run tests on either packages (.tar.bz2 or extracted) or recipe folders For a recipe folder, it renders the recipe enough to know what package to download, and obtains @@ -282,24 +307,22 @@ def test( # if people don't pass in an object to capture stats in, they won't get them returned. # We'll still track them, though. - if not stats: - stats = {} + stats = stats or {} with config: # This will create a new local build folder if and only if config # doesn't already have one. What this means is that if we're # running a test immediately after build, we use the one that the # build already provided - test_result = test( + return test( recipedir_or_package_or_metadata, config=config, move_broken=move_broken, stats=stats, ) - return test_result -def list_skeletons(): +def list_skeletons() -> list[str]: """List available skeletons for generating conda recipes from external sources. The returned list is generally the names of supported repositories (pypi, cran, etc.) @@ -315,8 +338,14 @@ def list_skeletons(): def skeletonize( - packages, repo, output_dir=".", version=None, recursive=False, config=None, **kwargs -): + packages: str | Iterable[str], + repo: Literal["cpan", "cran", "luarocks", "pypi", "rpm"], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, + config: Config | None = None, + **kwargs, +) -> None: """Generate a conda recipe from an external repo. Translates metadata from external sources into expected conda recipe format.""" @@ -355,7 +384,7 @@ def skeletonize( if arg in kwargs: del kwargs[arg] with config: - skeleton_return = module.skeletonize( + module.skeletonize( packages, output_dir=output_dir, version=version, @@ -363,42 +392,42 @@ def skeletonize( config=config, **kwargs, ) - return skeleton_return def develop( - recipe_dir, - prefix=sys.prefix, - no_pth_file=False, - build_ext=False, - clean=False, - uninstall=False, -): + recipe_dir: str | Iterable[str], + prefix: str = sys.prefix, + no_pth_file: bool = False, + build_ext: bool = False, + clean: bool = False, + uninstall: bool = False, +) -> None: """Install a Python package in 'development mode'. This works by creating a conda.pth file in site-packages.""" from .develop import execute recipe_dir = ensure_list(recipe_dir) - return execute(recipe_dir, prefix, no_pth_file, build_ext, clean, uninstall) + execute(recipe_dir, prefix, no_pth_file, build_ext, clean, uninstall) def convert( - package_file, - output_dir=".", - show_imports=False, - platforms=None, - force=False, - dependencies=None, - verbose=False, - quiet=True, - dry_run=False, -): + package_file: str, + output_dir: str = ".", + show_imports: bool = False, + platforms: str | Iterable[str] | None = None, + force: bool = False, + dependencies: str | Iterable[str] | None = None, + verbose: bool = False, + quiet: bool = True, + dry_run: bool = False, +) -> None: """Convert changes a package from one platform to another. It applies only to things that are portable, such as pure python, or header-only C/C++ libraries.""" from .convert import conda_convert platforms = ensure_list(platforms) + dependencies = ensure_list(dependencies) if package_file.endswith("tar.bz2"): return conda_convert( package_file, @@ -419,7 +448,7 @@ def convert( raise RuntimeError("cannot convert: %s" % package_file) -def test_installable(channel="defaults"): +def test_installable(channel: str = "defaults") -> bool: """Check to make sure that packages in channel are installable. This is a consistency check for the channel.""" from .inspect_pkg import test_installable @@ -428,14 +457,14 @@ def test_installable(channel="defaults"): def inspect_linkages( - packages, - prefix=sys.prefix, - untracked=False, - all_packages=False, - show_files=False, - groupby="package", - sysroot="", -): + packages: str | Iterable[str], + prefix: str | os.PathLike | Path = sys.prefix, + untracked: bool = False, + all_packages: bool = False, + show_files: bool = False, + groupby: Literal["package", "dependency"] = "package", + sysroot: str = "", +) -> str: from .inspect_pkg import inspect_linkages packages = ensure_list(packages) @@ -575,7 +604,7 @@ def debug( config.channel_urls = get_channel_urls(kwargs) - metadata_tuples: list[tuple[MetaData, bool, bool]] = [] + metadata_tuples: list[MetaDataTuple] = [] best_link_source_method = "skip" if isinstance(recipe_or_package_path_or_metadata_tuples, str): @@ -583,7 +612,7 @@ def debug( for metadata_conda_debug in metadatas_conda_debug: best_link_source_method = "symlink" metadata = MetaData(metadata_conda_debug, config, {}) - metadata_tuples.append((metadata, False, True)) + metadata_tuples.append(MetaDataTuple(metadata, False, True)) else: ext = os.path.splitext(recipe_or_package_path_or_metadata_tuples)[1] if not ext or not any(ext in _ for _ in CONDA_PACKAGE_EXTENSIONS): diff --git a/conda_build/build.py b/conda_build/build.py index 88461ac941..531b38323f 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -21,6 +21,7 @@ from collections import OrderedDict, deque from os.path import dirname, isdir, isfile, islink, join from pathlib import Path +from typing import TYPE_CHECKING import conda_package_handling.api import yaml @@ -85,6 +86,9 @@ if on_win: from . import windows +if TYPE_CHECKING: + from typing import Any, Iterable + if "bsd" in sys.platform: shell_path = "/bin/sh" elif utils.on_win: @@ -3322,12 +3326,12 @@ def write_test_scripts( def test( - recipedir_or_package_or_metadata, - config, - stats, - move_broken=True, - provision_only=False, -): + recipedir_or_package_or_metadata: str | os.PathLike | Path | MetaData, + config: Config, + stats: dict, + move_broken: bool = True, + provision_only: bool = False, +) -> bool: """ Execute any test scripts for the given package. @@ -3641,8 +3645,14 @@ def check_external(): def build_tree( - recipe_list, config, stats, build_only=False, post=None, notest=False, variants=None -): + recipe_list: Iterable[str | MetaData], + config: Config, + stats: dict, + build_only: bool = False, + post: bool | None = None, + notest: bool = False, + variants: dict[str, Any] | None = None, +) -> list[str]: to_build_recursive = [] recipe_list = deque(recipe_list) diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index 18e24827e0..a966677471 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -532,13 +532,14 @@ def check_action(recipe, config): def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + config = get_or_merge_config(None, **parsed.__dict__) build.check_external() # change globals in build module, see comment there as well config.channel_urls = get_channel_urls(parsed.__dict__) - config.override_channels = parsed.override_channels config.verbose = not parsed.quiet or parsed.debug if "purge" in parsed.recipe: diff --git a/conda_build/cli/main_convert.py b/conda_build/cli/main_convert.py index cd12f21ddc..d30b725b3d 100644 --- a/conda_build/cli/main_convert.py +++ b/conda_build/cli/main_convert.py @@ -6,6 +6,8 @@ from os.path import abspath, expanduser from typing import TYPE_CHECKING +from conda.base.context import context + from .. import api if TYPE_CHECKING: @@ -126,6 +128,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + files = parsed.files del parsed.__dict__["files"] diff --git a/conda_build/cli/main_debug.py b/conda_build/cli/main_debug.py index 59689bfa05..731f964217 100644 --- a/conda_build/cli/main_debug.py +++ b/conda_build/cli/main_debug.py @@ -6,6 +6,8 @@ import sys from typing import TYPE_CHECKING +from conda.base.context import context + from .. import api from ..utils import on_win from . import validators as valid @@ -94,6 +96,7 @@ def get_parser() -> ArgumentParser: def execute(args: Sequence[str] | None = None) -> int: parser = get_parser() parsed = parser.parse_args(args) + context.__init__(argparse_args=parsed) try: activation_string = api.debug( diff --git a/conda_build/cli/main_develop.py b/conda_build/cli/main_develop.py index 326c5fd2a7..9b680cbf5a 100644 --- a/conda_build/cli/main_develop.py +++ b/conda_build/cli/main_develop.py @@ -5,7 +5,7 @@ import logging from typing import TYPE_CHECKING -from conda.base.context import context, determine_target_prefix +from conda.base.context import context from .. import api @@ -88,10 +88,11 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) - prefix = determine_target_prefix(context, parsed) + context.__init__(argparse_args=parsed) + api.develop( parsed.source, - prefix=prefix, + prefix=context.target_prefix, no_pth_file=parsed.no_pth_file, build_ext=parsed.build_ext, clean=parsed.clean, diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index 88b31cb837..b1c47c0586 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -8,7 +8,7 @@ from pprint import pprint from typing import TYPE_CHECKING -from conda.base.context import context, determine_target_prefix +from conda.base.context import context from .. import api @@ -196,6 +196,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: parser, parsed = parse_args(args) + context.__init__(argparse_args=parsed) if not parsed.subcommand: parser.print_help() @@ -206,7 +207,7 @@ def execute(args: Sequence[str] | None = None) -> int: print( api.inspect_linkages( parsed.packages, - prefix=determine_target_prefix(context, parsed), + prefix=context.target_prefix, untracked=parsed.untracked, all_packages=parsed.all, show_files=parsed.show_files, @@ -218,7 +219,7 @@ def execute(args: Sequence[str] | None = None) -> int: print( api.inspect_objects( parsed.packages, - prefix=determine_target_prefix(context, parsed), + prefix=context.target_prefix, groupby=parsed.groupby, ) ) diff --git a/conda_build/cli/main_metapackage.py b/conda_build/cli/main_metapackage.py index 0e4507359e..91d2edcebb 100644 --- a/conda_build/cli/main_metapackage.py +++ b/conda_build/cli/main_metapackage.py @@ -121,8 +121,12 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: - _, args = parse_args(args) - channel_urls = args.__dict__.get("channel") or args.__dict__.get("channels") or () - api.create_metapackage(channel_urls=channel_urls, **args.__dict__) + _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + + api.create_metapackage( + channel_urls=context.channels, + **parsed.__dict__, + ) return 0 diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 3e0bf845f5..a5cbb8b443 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -202,6 +202,7 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) + context.__init__(argparse_args=parsed) config = get_or_merge_config(None, **parsed.__dict__) @@ -213,8 +214,6 @@ def execute(args: Sequence[str] | None = None) -> int: config.channel_urls = get_channel_urls(parsed.__dict__) - config.override_channels = parsed.override_channels - if parsed.output: config.verbose = False config.debug = False diff --git a/conda_build/cli/main_skeleton.py b/conda_build/cli/main_skeleton.py index 825f3742de..7013e2ffab 100644 --- a/conda_build/cli/main_skeleton.py +++ b/conda_build/cli/main_skeleton.py @@ -9,6 +9,8 @@ from importlib import import_module from typing import TYPE_CHECKING +from conda.base.context import context + from .. import api from ..config import Config @@ -52,6 +54,8 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: def execute(args: Sequence[str] | None = None) -> int: parser, parsed = parse_args(args) + context.__init__(argparse_args=parsed) + config = Config(**parsed.__dict__) if not parsed.repo: diff --git a/conda_build/config.py b/conda_build/config.py index b48646f331..09ce6b0718 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -31,6 +31,7 @@ if TYPE_CHECKING: from pathlib import Path + from typing import Any invocation_time = "" @@ -89,7 +90,6 @@ def _get_default_settings(): Setting("dirty", False), Setting("include_recipe", True), Setting("no_download_source", False), - Setting("override_channels", False), Setting("skip_existing", False), Setting("token", None), Setting("user", None), @@ -291,6 +291,10 @@ def set_lang(variant, lang): for lang in ("perl", "lua", "python", "numpy", "r_base"): set_lang(self.variant, lang) + # --override-channels is a valid CLI argument but we no longer wish to set it here + # use conda.base.context.context.override_channels instead + kwargs.pop("override_channels", None) + self._build_id = kwargs.pop("build_id", getattr(self, "_build_id", "")) source_cache = kwargs.pop("cache_dir", None) croot = kwargs.pop("croot", None) @@ -773,6 +777,15 @@ def test_dir(self): def subdirs_same(self): return self.host_subdir == self.build_subdir + @property + @deprecated( + "24.5", + "24.7", + addendum="Use `conda.base.context.context.override_channels` instead.", + ) + def override_channels(self): + return context.override_channels + def clean(self, remove_folders=True): # build folder is the whole burrito containing envs and source folders # It will only exist if we download source, or create a build or test environment @@ -816,7 +829,7 @@ def clean_pkgs(self): for folder in self.bldpkgs_dirs: rm_rf(folder) - def copy(self): + def copy(self) -> Config: new = copy.copy(self) new.variant = copy.deepcopy(self.variant) if hasattr(self, "variants"): @@ -842,7 +855,11 @@ def __exit__(self, e_type, e_value, traceback): self.clean(remove_folders=False) -def _get_or_merge_config(config, variant=None, **kwargs): +def _get_or_merge_config( + config: Config | None, + variant: dict[str, Any] | None = None, + **kwargs, +) -> Config: # This function should only ever be called via get_or_merge_config. # It only exists for us to monkeypatch a default config when running tests. if not config: @@ -858,7 +875,11 @@ def _get_or_merge_config(config, variant=None, **kwargs): return config -def get_or_merge_config(config, variant=None, **kwargs): +def get_or_merge_config( + config: Config | None, + variant: dict[str, Any] | None = None, + **kwargs, +) -> Config: """Always returns a new object - never changes the config that might be passed in.""" return _get_or_merge_config(config, variant=variant, **kwargs) diff --git a/conda_build/convert.py b/conda_build/convert.py index 793f0dc93c..e910d47e21 100644 --- a/conda_build/convert.py +++ b/conda_build/convert.py @@ -4,6 +4,8 @@ Tools for converting conda packages """ +from __future__ import annotations + import glob import hashlib import json @@ -14,8 +16,12 @@ import tarfile import tempfile from pathlib import Path +from typing import TYPE_CHECKING + +from .utils import ensure_list, filter_info_files, walk -from .utils import filter_info_files, walk +if TYPE_CHECKING: + from typing import Iterable def retrieve_c_extensions(file_path, show_imports=False): @@ -776,31 +782,35 @@ def convert_from_windows_to_unix( def conda_convert( - file_path, - output_dir=".", - show_imports=False, - platforms=None, - force=False, - dependencies=None, - verbose=False, - quiet=False, - dry_run=False, -): + file_path: str, + output_dir: str = ".", + show_imports: bool = False, + platforms: str | Iterable[str] | None = None, + force: bool = False, + dependencies: str | Iterable[str] | None = None, + verbose: bool = False, + quiet: bool = False, + dry_run: bool = False, +) -> None: """Convert a conda package between different platforms and architectures. Positional arguments: file_path (str) -- the file path to the source package's tar file output_dir (str) -- the file path to where to output the converted tar file show_imports (bool) -- show all C extensions found in the source package - platforms (str) -- the platforms to convert to: 'win-64', 'win-32', 'linux-64', + platforms list[str] -- the platforms to convert to: 'win-64', 'win-32', 'linux-64', 'linux-32', 'osx-64', or 'all' force (bool) -- force conversion of packages that contain C extensions - dependencies (List[str]) -- the new dependencies to add to the source package's + dependencies (list[str]) -- the new dependencies to add to the source package's existing dependencies verbose (bool) -- show output of items that are updated quiet (bool) -- hide all output except warnings and errors dry_run (bool) -- show which conversions will take place """ + + platforms = ensure_list(platforms) + dependencies = ensure_list(dependencies) + if show_imports: imports = retrieve_c_extensions(file_path) if len(imports) == 0: diff --git a/conda_build/develop.py b/conda_build/develop.py index 5b83185fdc..59b31a3231 100644 --- a/conda_build/develop.py +++ b/conda_build/develop.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import shutil import sys from os.path import abspath, exists, expanduser, isdir, join @@ -126,13 +128,13 @@ def _uninstall(sp_dir, pkg_path): def execute( - recipe_dirs, - prefix=sys.prefix, - no_pth_file=False, - build_ext=False, - clean=False, - uninstall=False, -): + recipe_dirs: list[str], + prefix: str = sys.prefix, + no_pth_file: bool = False, + build_ext: bool = False, + clean: bool = False, + uninstall: bool = False, +) -> None: if not isdir(prefix): sys.exit( """\ diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index b202a7eb68..19c0db7ca3 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -216,8 +216,8 @@ def inspect_linkages( all_packages: bool = False, show_files: bool = False, groupby: Literal["package", "dependency"] = "package", - sysroot="", -): + sysroot: str = "", +) -> str: if not packages and not untracked and not all_packages: sys.exit("At least one package or --untracked or --all must be provided") elif on_win: diff --git a/conda_build/metadata.py b/conda_build/metadata.py index b05c27d8ae..6fd065e0b2 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -13,7 +13,7 @@ from collections import OrderedDict from functools import lru_cache from os.path import isfile, join -from typing import TYPE_CHECKING, overload +from typing import TYPE_CHECKING, NamedTuple, overload from bs4 import UnicodeDammit from conda.base.context import context @@ -907,7 +907,10 @@ def toposort(output_metadata_map): return result -def get_output_dicts_from_metadata(metadata, outputs=None): +def get_output_dicts_from_metadata( + metadata: MetaData, + outputs: list[dict[str, Any]] | None = None, +) -> list[dict[str, Any]]: outputs = outputs or metadata.get_section("outputs") if not outputs: @@ -2514,16 +2517,16 @@ def get_reduced_variant_set(self, used_variables): def get_output_metadata_set( self, - permit_undefined_jinja=False, - permit_unsatisfiable_variants=False, - bypass_env_check=False, - ): + permit_undefined_jinja: bool = False, + permit_unsatisfiable_variants: bool = False, + bypass_env_check: bool = False, + ) -> list[tuple[dict[str, Any], MetaData]]: from .source import provide out_metadata_map = {} if self.final: - outputs = get_output_dicts_from_metadata(self)[0] - output_tuples = [(outputs, self)] + outputs = get_output_dicts_from_metadata(self) + output_tuples = [(outputs[0], self)] else: all_output_metadata = OrderedDict() @@ -2972,3 +2975,9 @@ def get_test_deps(self, py_files, pl_files, lua_files, r_files): specs.extend(utils.ensure_list(self.config.extra_deps)) return specs + + +class MetaDataTuple(NamedTuple): + metadata: MetaData + need_download: bool + need_reparse: bool diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index c07a7adb71..b2de763074 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -52,7 +52,7 @@ def ldd(path): def get_linkages( obj_files: Iterable[str], prefix: str | os.PathLike | Path, - sysroot, + sysroot: str, ) -> dict[str, list[tuple[str, str]]]: return _get_linkages(tuple(obj_files), Path(prefix), sysroot) @@ -61,7 +61,7 @@ def get_linkages( def _get_linkages( obj_files: tuple[str], prefix: Path, - sysroot, + sysroot: str, ) -> dict[str, list[tuple[str, str]]]: linkages = {} for file in obj_files: diff --git a/conda_build/os_utils/pyldd.py b/conda_build/os_utils/pyldd.py index ceffb1dbc6..ff48d5f891 100644 --- a/conda_build/os_utils/pyldd.py +++ b/conda_build/os_utils/pyldd.py @@ -1048,7 +1048,7 @@ def _get_magic_bit(path: Path) -> bytes: return None -def _trim_sysroot(sysroot): +def _trim_sysroot(sysroot: str) -> str: if sysroot: while sysroot.endswith("/") or sysroot.endswith("\\"): sysroot = sysroot[:-1] @@ -1066,7 +1066,7 @@ def _get_arch_if_native(arch): # TODO :: Consider memoizing instead of repeatedly scanning # TODO :: libc.so/libSystem.dylib when inspect_linkages(recurse=True) -def _inspect_linkages_this(filename, sysroot="", arch="native"): +def _inspect_linkages_this(filename, sysroot: str = "", arch="native"): """ :param filename: @@ -1100,7 +1100,7 @@ def _inspect_linkages_this(filename, sysroot="", arch="native"): # TODO :: Consider returning a tree structure or a dict when recurse is True? def inspect_linkages( - filename, resolve_filenames=True, recurse=True, sysroot="", arch="native" + filename, resolve_filenames=True, recurse=True, sysroot: str = "", arch="native" ): already_seen = set() todo = {filename} diff --git a/conda_build/render.py b/conda_build/render.py index 9e68031fc3..b021f8a5b6 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -14,7 +14,6 @@ from contextlib import contextmanager from functools import lru_cache from os.path import ( - dirname, isabs, isdir, isfile, @@ -36,7 +35,7 @@ from . import environ, exceptions, source, utils from .exceptions import DependencyNeedsBuildingError from .index import get_build_index -from .metadata import MetaData, combine_top_level_metadata_with_output +from .metadata import MetaData, MetaDataTuple, combine_top_level_metadata_with_output from .utils import ( CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, @@ -49,7 +48,8 @@ ) if TYPE_CHECKING: - from typing import Iterator + import os + from typing import Any, Iterable, Iterator from .config import Config @@ -63,7 +63,7 @@ def odict_representer(dumper, data): yaml.add_representer(OrderedDict, odict_representer) -def bldpkg_path(m): +def bldpkg_path(m: MetaData) -> str: """ Returns path to built package's tarball given its ``Metadata``. """ @@ -800,8 +800,10 @@ def distribute_variants( permit_unsatisfiable_variants: bool = False, allow_no_other_outputs: bool = False, bypass_env_check: bool = False, -): - rendered_metadata = {} +) -> list[MetaDataTuple]: + rendered_metadata: dict[ + tuple[str, str, tuple[tuple[str, str], ...]], MetaDataTuple + ] = {} need_source_download = True # don't bother distributing python if it's a noarch package, and figure out @@ -892,23 +894,25 @@ def distribute_variants( mv.config.variant.get("target_platform", mv.config.subdir), tuple((var, mv.config.variant.get(var)) for var in mv.get_used_vars()), ) - ] = (mv, need_source_download, None) + ] = MetaDataTuple(mv, need_source_download, False) # list of tuples. # each tuple item is a tuple of 3 items: - # metadata, need_download, need_reparse_in_env + # metadata, need_download, need_reparse return list(rendered_metadata.values()) -def expand_outputs(metadata_tuples): +def expand_outputs( + metadata_tuples: Iterable[MetaDataTuple], +) -> list[tuple[dict, MetaData]]: """Obtain all metadata objects for all outputs from recipe. Useful for outputting paths.""" - expanded_outputs = OrderedDict() + from copy import deepcopy - for _m, download, reparse in metadata_tuples: - from .build import get_all_replacements + from .build import get_all_replacements - get_all_replacements(_m.config) - from copy import deepcopy + expanded_outputs: dict[str, tuple[dict, MetaData]] = {} + for _m, download, reparse in metadata_tuples: + get_all_replacements(_m.config) for output_dict, m in deepcopy(_m).get_output_metadata_set( permit_unsatisfiable_variants=False ): @@ -943,11 +947,11 @@ def render_recipe( recipe_dir: str | os.PathLike | Path, config: Config, no_download_source: bool = False, - variants: dict | None = None, + variants: dict[str, Any] | None = None, permit_unsatisfiable_variants: bool = True, reset_build_id: bool = True, bypass_env_check: bool = False, -) -> list[tuple[MetaData, bool, bool]]: +) -> list[MetaDataTuple]: """Returns a list of tuples, each consisting of (metadata-object, needs_download, needs_render_in_env) @@ -980,7 +984,7 @@ def render_recipe( m.config.variant_config_files = [cbc_yaml] m.config.variants = get_package_variants(m, variants=variants) m.config.variant = m.config.variants[0] - return [(m, False, False)] + return [MetaDataTuple(m, False, False)] else: # merge any passed-in variants with any files found variants = get_package_variants(m, variants=variants) @@ -1041,7 +1045,11 @@ def ignore_aliases(self, data): unicode = None # silence pyflakes about unicode not existing in py3 -def output_yaml(metadata, filename=None, suppress_outputs=False): +def output_yaml( + metadata: MetaData, + filename: str | os.PathLike | Path | None = None, + suppress_outputs: bool = False, +) -> str: local_metadata = metadata.copy() if ( suppress_outputs @@ -1056,13 +1064,9 @@ def output_yaml(metadata, filename=None, suppress_outputs=False): indent=2, ) if filename: - if any(sep in filename for sep in ("\\", "/")): - try: - os.makedirs(dirname(filename)) - except OSError: - pass - with open(filename, "w") as f: - f.write(output) - return "Wrote yaml to %s" % filename + filename = Path(filename) + filename.parent.mkdir(parents=True, exist_ok=True) + filename.write_text(output) + return f"Wrote yaml to {filename}" else: return output diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index 4d65ef7cb1..c9bd5c398c 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -4,6 +4,8 @@ Tools for converting CPAN packages to conda recipes. """ +from __future__ import annotations + import codecs import gzip import hashlib @@ -384,15 +386,15 @@ def get_core_modules_for_this_perl_version(version, cache_dir): # meta_cpan_url="http://api.metacpan.org", def skeletonize( - packages, - output_dir=".", - version=None, - meta_cpan_url="https://fastapi.metacpan.org/v1", - recursive=False, - force=False, - config=None, - write_core=False, -): + packages: list[str], + output_dir: str = ".", + version: str | None = None, + meta_cpan_url: str = "https://fastapi.metacpan.org/v1", + recursive: bool = False, + force: bool = False, + config: Config | None = None, + write_core: bool = False, +) -> None: """ Loops over packages, outputting conda recipes converted from CPAN metata. """ diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index fbd959dba2..38628a52f4 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -55,6 +55,8 @@ if TYPE_CHECKING: from typing import Literal + from ..config import Config + SOURCE_META = """\ {archive_keys} {git_url_key} {git_url} @@ -863,28 +865,36 @@ def remove_comments(template): def skeletonize( - in_packages, - output_dir=".", - output_suffix="", - add_maintainer=None, - version=None, - git_tag=None, - cran_url=None, - recursive=False, - archive=True, - version_compare=False, - update_policy="", - r_interp="r-base", - use_binaries_ver=None, - use_noarch_generic=False, - use_when_no_binary: Literal["error" | "src" | "old" | "old-src"] = "src", - use_rtools_win=False, - config=None, - variant_config_files=None, - allow_archived=False, - add_cross_r_base=False, - no_comments=False, -): + in_packages: list[str], + output_dir: str = ".", + output_suffix: str = "", + add_maintainer: str | None = None, + version: str | None = None, + git_tag: str | None = None, + cran_url: str | None = None, + recursive: bool = False, + archive: bool = True, + version_compare: bool = False, + update_policy: Literal[ + "error", + "skip-up-to-date", + "skip-existing", + "overwrite", + "merge-keep-build-num", + "merge-incr-build-num", + ] + | None = None, + r_interp: str = "r-base", + use_binaries_ver: str | None = None, + use_noarch_generic: bool = False, + use_when_no_binary: Literal["error", "src", "old", "old-src"] = "src", + use_rtools_win: bool = False, + config: Config | None = None, + variant_config_files: list[str] | None = None, + allow_archived: bool = False, + add_cross_r_base: bool = False, + no_comments: bool = False, +) -> None: if ( use_when_no_binary != "error" and use_when_no_binary != "src" @@ -1089,7 +1099,11 @@ def skeletonize( script_env = [] extra_recipe_maintainers = [] build_number = 0 - if update_policy.startswith("merge") and inputs["old-metadata"]: + if ( + update_policy + and update_policy.startswith("merge") + and inputs["old-metadata"] + ): m = inputs["old-metadata"] patches = make_array(m, "source/patches") script_env = make_array(m, "build/script_env") diff --git a/conda_build/skeletons/luarocks.py b/conda_build/skeletons/luarocks.py index 14d9c44f77..da8e641928 100644 --- a/conda_build/skeletons/luarocks.py +++ b/conda_build/skeletons/luarocks.py @@ -8,6 +8,8 @@ # - mingw32 support (really any windows support, completely untested) # - replace manual "luajit -e require 'blah'" with built-in entry-point testing +from __future__ import annotations + import json import os import subprocess @@ -224,7 +226,12 @@ def ensure_base_deps(deps): return deps -def skeletonize(packages, output_dir=".", version=None, recursive=False): +def skeletonize( + packages: list[str], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, +) -> None: # Check that we have Lua installed (any version) # Check that we have luarocks installed diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index 7df95a9ad5..c45c843a6d 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -4,6 +4,8 @@ Tools for converting PyPI packages to conda recipes. """ +from __future__ import annotations + import configparser import keyword import logging @@ -17,6 +19,7 @@ from os.path import abspath, exists, isdir, isfile, join from shutil import copy2 from tempfile import mkdtemp +from typing import TYPE_CHECKING from urllib.parse import urljoin, urlsplit import pkginfo @@ -46,6 +49,9 @@ ) from ..version import _parse as parse_version +if TYPE_CHECKING: + from typing import Iterable + pypi_example = """ Examples: @@ -251,30 +257,27 @@ def _formating_value(attribute_name, attribute_value): def skeletonize( - packages, - output_dir=".", - version=None, - recursive=False, - all_urls=False, - pypi_url="https://pypi.io/pypi/", - noprompt=True, - version_compare=False, - python_version=None, - manual_url=False, - all_extras=False, - noarch_python=False, - config=None, - setup_options=None, - extra_specs=[], - pin_numpy=False, -): + packages: list[str], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, + all_urls: bool = False, + pypi_url: str = "https://pypi.io/pypi/", + noprompt: bool = True, + version_compare: bool = False, + python_version: str | None = None, + manual_url: bool = False, + all_extras: bool = False, + noarch_python: bool = False, + config: Config | None = None, + setup_options: str | Iterable[str] | None = None, + extra_specs: str | Iterable[str] | None = None, + pin_numpy: bool = False, +) -> None: package_dicts = {} - if not setup_options: - setup_options = [] - - if isinstance(setup_options, str): - setup_options = [setup_options] + setup_options = ensure_list(setup_options) + extra_specs = ensure_list(extra_specs) if not config: config = Config() diff --git a/conda_build/skeletons/rpm.py b/conda_build/skeletons/rpm.py index f0abb8c747..d44477171f 100644 --- a/conda_build/skeletons/rpm.py +++ b/conda_build/skeletons/rpm.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import argparse import gzip import hashlib @@ -9,13 +11,20 @@ from os import chmod, makedirs from os.path import basename, dirname, exists, join, splitext from textwrap import wrap +from typing import TYPE_CHECKING from urllib.request import urlopen from xml.etree import ElementTree as ET from ..license_family import guess_license_family from ..source import download_to_cache +from ..utils import ensure_list from .cran import yaml_quote_string +if TYPE_CHECKING: + from typing import Iterable + + from ..config import Config + # This is used in two places default_architecture = "x86_64" default_distro = "centos6" @@ -637,14 +646,14 @@ def write_conda_recipes( # Do I want to pass just the package name, the CDT and the arch and rely on # expansion to form the URL? I have been going backwards and forwards here. def write_conda_recipe( - packages, - distro, - output_dir, - architecture, - recursive, - override_arch, - dependency_add, - config, + packages: list[str], + distro: str, + output_dir: str, + architecture: str, + recursive: bool, + override_arch: bool, + dependency_add: list[str], + config: Config | None, ): cdt_name = distro bits = "32" if architecture in ("armv6", "armv7a", "i686", "i386") else "64" @@ -706,16 +715,18 @@ def write_conda_recipe( def skeletonize( - packages, - output_dir=".", - version=None, - recursive=False, - architecture=default_architecture, - override_arch=True, - dependency_add=[], - config=None, - distro=default_distro, + packages: list[str], + output_dir: str = ".", + version: str | None = None, + recursive: bool = False, + architecture: str = default_architecture, + override_arch: bool = True, + dependency_add: str | Iterable[str] | None = None, + config: Config | None = None, + distro: str = default_distro, ): + dependency_add = ensure_list(dependency_add) + write_conda_recipe( packages, distro, diff --git a/conda_build/utils.py b/conda_build/utils.py index 92de8b24a1..4a3e1f782c 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -72,6 +72,8 @@ if TYPE_CHECKING: from typing import Mapping, TypeVar + from .metadata import MetaData + T = TypeVar("T") K = TypeVar("K") V = TypeVar("V") @@ -1124,7 +1126,7 @@ def convert_path_for_cygwin_or_msys2(exe, path): return path -def get_skip_message(m): +def get_skip_message(m: MetaData) -> str: return ( f"Skipped: {m.name()} from {m.path} defines build/skip for this configuration " f"({({k: m.config.variant[k] for k in m.get_used_vars()})})." @@ -1250,9 +1252,13 @@ def tmp_chdir(dest): os.chdir(curdir) -def expand_globs(path_list, root_dir): +def expand_globs( + path_list: str | os.PathLike | Path | Iterable[str | os.PathLike | Path], + root_dir: str | os.PathLike | Path, +) -> list[str]: files = [] for path in ensure_list(path_list): + path = str(path) if not os.path.isabs(path): path = os.path.join(root_dir, path) if os.path.isfile(path): @@ -1276,11 +1282,10 @@ def expand_globs(path_list, root_dir): # Avoid this potential ambiguity by sorting. (see #4185) files.extend(sorted(glob_files)) prefix_path_re = re.compile("^" + re.escape(f"{root_dir}{os.path.sep}")) - files = [prefix_path_re.sub("", f, 1) for f in files] - return files + return [prefix_path_re.sub("", f, 1) for f in files] -def find_recipe(path): +def find_recipe(path: str) -> str: """recurse through a folder, locating valid meta files (see VALID_METAS). Raises error if more than one is found. Returns full path to meta file to be built. diff --git a/news/5271-context b/news/5271-context new file mode 100644 index 0000000000..b4143e00f4 --- /dev/null +++ b/news/5271-context @@ -0,0 +1,19 @@ +### Enhancements + +* Require `conda >=23.7.0`. (#5271) + +### Bug fixes + +* Fix all CLI arguments to properly initialize `conda.base.context.context` with parsed arguments. Fixes issue with arguments not being processed (e.g., `--override-channels` was previously ignored). (#3693 via #5271) + +### Deprecations + +* Deprecate `conda_build.config.Config.override_channels`. Use `conda.base.context.context.override_channels` instead. (#5271) + +### Docs + +* + +### Other + +* diff --git a/pyproject.toml b/pyproject.toml index 229333b6a5..334c119996 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ dependencies = [ "beautifulsoup4", "chardet", - "conda >=23.5.0", + "conda >=23.7.0", "conda-index >=0.4.0", "conda-package-handling >=1.3", "filelock", diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 8171f8167d..d1b6440118 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -30,7 +30,7 @@ requirements: run: - beautifulsoup4 - chardet - - conda >=23.5.0 + - conda >=23.7.0 - conda-index >=0.4.0 - conda-package-handling >=1.3 - filelock diff --git a/tests/cli/test_main_build.py b/tests/cli/test_main_build.py index 15b3d67237..9f4ce1cbb0 100644 --- a/tests/cli/test_main_build.py +++ b/tests/cli/test_main_build.py @@ -8,6 +8,7 @@ from typing import TYPE_CHECKING import pytest +from conda.exceptions import PackagesNotFoundError from conda_build import api from conda_build.cli import main_build, main_render @@ -549,3 +550,14 @@ def test_user_warning(tmpdir, recwarn): main_build.parse_args([str(dir_recipe_path)]) assert not recwarn.list + + +def test_build_with_empty_channel_fails(empty_channel: Path) -> None: + with pytest.raises(PackagesNotFoundError): + main_build.execute( + [ + "--override-channels", + f"--channel={empty_channel}", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + ] + ) diff --git a/tests/cli/test_main_render.py b/tests/cli/test_main_render.py index bf00ac6fd1..ef5fdf077d 100644 --- a/tests/cli/test_main_render.py +++ b/tests/cli/test_main_render.py @@ -8,6 +8,7 @@ import pytest import yaml +from conda.exceptions import PackagesNotFoundError from conda_build import api from conda_build.cli import main_render @@ -48,26 +49,16 @@ def test_render_add_channel(tmp_path: Path) -> None: ), f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}" -def test_render_without_channel_fails(tmp_path): - # do make extra channel available, so the required package should not be found - rendered_filename = tmp_path / "out.yaml" - args = [ - "--override-channels", - os.path.join(metadata_dir, "_recipe_requiring_external_channel"), - "--file", - str(rendered_filename), - ] - main_render.execute(args) - with open(rendered_filename) as rendered_file: - rendered_meta = yaml.safe_load(rendered_file) - required_package_string = [ - pkg - for pkg in rendered_meta.get("requirements", {}).get("build", []) - if "conda_build_test_requirement" in pkg - ][0] - assert ( - required_package_string == "conda_build_test_requirement" - ), f"Expected to get only base package name because it should not be found, but got :{required_package_string}" +def test_render_with_empty_channel_fails(tmp_path: Path, empty_channel: Path) -> None: + with pytest.raises(PackagesNotFoundError): + main_render.execute( + [ + "--override-channels", + f"--channel={empty_channel}", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + f"--file={tmp_path / 'out.yaml'}", + ] + ) def test_render_output_build_path( diff --git a/tests/cli/test_main_skeleton.py b/tests/cli/test_main_skeleton.py index 0333d77c1f..c2dd0a65b5 100644 --- a/tests/cli/test_main_skeleton.py +++ b/tests/cli/test_main_skeleton.py @@ -54,6 +54,6 @@ def test_skeleton_pypi_arguments_work(testing_workdir): assert os.path.isdir("photutils") # Check that the setup option occurs in bld.bat and build.sh. - m = api.render("photutils")[0][0] - assert "--offline" in m.meta["build"]["script"] - assert m.version() == "1.10.0" + metadata = api.render("photutils")[0][0] + assert "--offline" in metadata.meta["build"]["script"] + assert metadata.version() == "1.10.0" diff --git a/tests/conftest.py b/tests/conftest.py index 378211b3dd..465cab6fcc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,7 @@ import pytest from conda.common.compat import on_mac, on_win +from conda_index.api import update_index from pytest import MonkeyPatch import conda_build @@ -248,3 +249,11 @@ def conda_build_test_recipe_envvar( name = "CONDA_BUILD_TEST_RECIPE_PATH" monkeypatch.setenv(name, str(conda_build_test_recipe_path)) return name + + +@pytest.fixture(scope="session") +def empty_channel(tmp_path_factory: pytest.TempPathFactory) -> Path: + """Create a temporary, empty conda channel.""" + channel = tmp_path_factory.mktemp("empty_channel", numbered=False) + update_index(channel) + return channel diff --git a/tests/requirements.txt b/tests/requirements.txt index e005250f59..acb3317206 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,6 +1,6 @@ beautifulsoup4 chardet -conda >=23.5.0 +conda >=23.7.0 conda-index >=0.4.0 conda-libmamba-solver # ensure we use libmamba conda-package-handling >=1.3 diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 8871fcedf7..a663f18e73 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -237,8 +237,8 @@ def test_offline( def test_git_describe_info_on_branch(testing_config): recipe_path = os.path.join(metadata_dir, "_git_describe_number_branch") - m = api.render(recipe_path, config=testing_config)[0][0] - output = api.get_output_file_paths(m)[0] + metadata = api.render(recipe_path, config=testing_config)[0][0] + output = api.get_output_file_paths(metadata)[0] # missing hash because we set custom build string in meta.yaml test_path = os.path.join( testing_config.croot, @@ -613,13 +613,13 @@ def test_numpy_setup_py_data(testing_config): # - cython subprocess.call("conda remove -y cython".split()) with pytest.raises(CondaBuildException) as exc_info: - api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] + api.render(recipe_path, config=testing_config, numpy="1.16") assert exc_info.match("Cython") subprocess.check_call(["conda", "install", "-y", "cython"]) - m = api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] - _hash = m.hash_dependencies() + metadata = api.render(recipe_path, config=testing_config, numpy="1.16")[0][0] + _hash = metadata.hash_dependencies() assert ( - os.path.basename(api.get_output_file_paths(m)[0]) + os.path.basename(api.get_output_file_paths(metadata)[0]) == f"load_setup_py_test-0.1.0-np116py{sys.version_info.major}{sys.version_info.minor}{_hash}_0.tar.bz2" ) @@ -1178,9 +1178,9 @@ def test_run_exports(testing_metadata, testing_config, testing_workdir): # will be merged when build subdir == host_subdir, the weak run_exports should be present. testing_metadata.meta["requirements"]["build"] = ["test_has_run_exports"] api.output_yaml(testing_metadata, "meta.yaml") - m = api.render(testing_workdir, config=testing_config)[0][0] - assert "strong_pinned_package 1.0.*" in m.meta["requirements"]["run"] - assert "weak_pinned_package 1.0.*" in m.meta["requirements"]["run"] + metadata = api.render(testing_workdir, config=testing_config)[0][0] + assert "strong_pinned_package 1.0.*" in metadata.meta["requirements"]["run"] + assert "weak_pinned_package 1.0.*" in metadata.meta["requirements"]["run"] # 2. host present. Use run_exports from host, ignore 'weak' ones from build. All are # weak by default. @@ -1190,10 +1190,12 @@ def test_run_exports(testing_metadata, testing_config, testing_workdir): ] testing_metadata.meta["requirements"]["host"] = ["python"] api.output_yaml(testing_metadata, "host_present_weak/meta.yaml") - m = api.render( + metadata = api.render( os.path.join(testing_workdir, "host_present_weak"), config=testing_config )[0][0] - assert "weak_pinned_package 2.0.*" not in m.meta["requirements"].get("run", []) + assert "weak_pinned_package 2.0.*" not in metadata.meta["requirements"].get( + "run", [] + ) # 3. host present, and deps in build have "strong" run_exports section. use host, add # in "strong" from build. @@ -1205,15 +1207,15 @@ def test_run_exports(testing_metadata, testing_config, testing_workdir): "test_has_run_exports_implicit_weak" ] api.output_yaml(testing_metadata, "host_present_strong/meta.yaml") - m = api.render( + metadata = api.render( os.path.join(testing_workdir, "host_present_strong"), config=testing_config )[0][0] - assert "strong_pinned_package 1.0 0" in m.meta["requirements"]["host"] - assert "strong_pinned_package 1.0.*" in m.meta["requirements"]["run"] + assert "strong_pinned_package 1.0 0" in metadata.meta["requirements"]["host"] + assert "strong_pinned_package 1.0.*" in metadata.meta["requirements"]["run"] # weak one from test_has_run_exports should be excluded, since it is a build dep - assert "weak_pinned_package 1.0.*" not in m.meta["requirements"]["run"] + assert "weak_pinned_package 1.0.*" not in metadata.meta["requirements"]["run"] # weak one from test_has_run_exports_implicit_weak should be present, since it is a host dep - assert "weak_pinned_package 2.0.*" in m.meta["requirements"]["run"] + assert "weak_pinned_package 2.0.*" in metadata.meta["requirements"]["run"] @pytest.mark.sanity @@ -1279,20 +1281,20 @@ def test_run_exports_constrains(testing_metadata, testing_config, testing_workdi testing_metadata.meta["requirements"]["build"] = ["run_exports_constrains"] testing_metadata.meta["requirements"]["host"] = [] api.output_yaml(testing_metadata, "in_build/meta.yaml") - m = api.render(os.path.join(testing_workdir, "in_build"), config=testing_config)[0][ - 0 - ] - reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + metadata = api.render( + os.path.join(testing_workdir, "in_build"), config=testing_config + )[0][0] + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) assert {"strong_run_export"} == reqs_set("run") assert {"strong_constrains_export"} == reqs_set("run_constrained") testing_metadata.meta["requirements"]["build"] = [] testing_metadata.meta["requirements"]["host"] = ["run_exports_constrains"] api.output_yaml(testing_metadata, "in_host/meta.yaml") - m = api.render(os.path.join(testing_workdir, "in_host"), config=testing_config)[0][ - 0 - ] - reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + metadata = api.render( + os.path.join(testing_workdir, "in_host"), config=testing_config + )[0][0] + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) assert {"strong_run_export", "weak_run_export"} == reqs_set("run") assert {"strong_constrains_export", "weak_constrains_export"} == reqs_set( "run_constrained" @@ -1303,32 +1305,32 @@ def test_run_exports_constrains(testing_metadata, testing_config, testing_workdi ] testing_metadata.meta["requirements"]["host"] = [] api.output_yaml(testing_metadata, "only_weak_in_build/meta.yaml") - m = api.render( + metadata = api.render( os.path.join(testing_workdir, "only_weak_in_build"), config=testing_config )[0][0] - reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) assert set() == reqs_set("run") assert set() == reqs_set("run_constrained") testing_metadata.meta["requirements"]["build"] = [] testing_metadata.meta["requirements"]["host"] = ["run_exports_constrains_only_weak"] api.output_yaml(testing_metadata, "only_weak_in_host/meta.yaml") - m = api.render( + metadata = api.render( os.path.join(testing_workdir, "only_weak_in_host"), config=testing_config )[0][0] - reqs_set = lambda section: set(m.meta["requirements"].get(section, [])) + reqs_set = lambda section: set(metadata.meta["requirements"].get(section, [])) assert {"weak_run_export"} == reqs_set("run") assert {"weak_constrains_export"} == reqs_set("run_constrained") def test_pin_subpackage_exact(testing_config): recipe = os.path.join(metadata_dir, "_pin_subpackage_exact") - ms = api.render(recipe, config=testing_config) - assert len(ms) == 2 + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 assert any( re.match(r"run_exports_subpkg\ 1\.0\ 0", req) - for (m, _, _) in ms - for req in m.meta.get("requirements", {}).get("run", []) + for metadata, _, _ in metadata_tuples + for req in metadata.meta.get("requirements", {}).get("run", []) ) @@ -1420,12 +1422,12 @@ def test_unknown_selectors(testing_config): @pytest.mark.flaky(reruns=5, reruns_delay=2) def test_failed_recipe_leaves_folders(testing_config): recipe = os.path.join(fail_dir, "recursive-build") - m = api.render(recipe, config=testing_config)[0][0] - locks = get_conda_operation_locks(m.config) + metadata = api.render(recipe, config=testing_config)[0][0] + locks = get_conda_operation_locks(metadata.config) with pytest.raises((RuntimeError, exceptions.DependencyNeedsBuildingError)): - api.build(m) - assert os.path.isdir(m.config.build_folder), "build folder was removed" - assert os.listdir(m.config.build_folder), "build folder has no files" + api.build(metadata) + assert os.path.isdir(metadata.config.build_folder), "build folder was removed" + assert os.listdir(metadata.config.build_folder), "build folder has no files" # make sure that it does not leave lock files, though, as these cause permission errors on # centralized installations @@ -1646,13 +1648,14 @@ def test_pin_depends(testing_config): dependencies """ recipe = os.path.join(metadata_dir, "_pin_depends_record") - m = api.render(recipe, config=testing_config)[0][0] + metadata = api.render(recipe, config=testing_config)[0][0] # the recipe python is not pinned, and having pin_depends set to record # will not show it in record assert not any( - re.search(r"python\s+[23]\.", dep) for dep in m.meta["requirements"]["run"] + re.search(r"python\s+[23]\.", dep) + for dep in metadata.meta["requirements"]["run"] ) - output = api.build(m, config=testing_config)[0] + output = api.build(metadata, config=testing_config)[0] requires = package_has_file(output, "info/requires") assert requires if hasattr(requires, "decode"): diff --git a/tests/test_api_render.py b/tests/test_api_render.py index 60a381ebf1..293ca09815 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -139,9 +139,9 @@ def test_resolved_packages_recipe(testing_config): @pytest.mark.slow def test_host_entries_finalized(testing_config): recipe = os.path.join(metadata_dir, "_host_entries_finalized") - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 2 - outputs = api.get_output_file_paths(metadata) + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + outputs = api.get_output_file_paths(metadata_tuples) assert any("py27" in out for out in outputs) assert any("py39" in out for out in outputs) @@ -159,10 +159,11 @@ def test_hash_no_apply_to_custom_build_string(testing_metadata, testing_workdir) def test_pin_depends(testing_config): """This is deprecated functionality - replaced by the more general variants pinning scheme""" recipe = os.path.join(metadata_dir, "_pin_depends_strict") - m = api.render(recipe, config=testing_config)[0][0] + metadata = api.render(recipe, config=testing_config)[0][0] # the recipe python is not pinned, but having pin_depends set will force it to be. assert any( - re.search(r"python\s+[23]\.", dep) for dep in m.meta["requirements"]["run"] + re.search(r"python\s+[23]\.", dep) + for dep in metadata.meta["requirements"]["run"] ) @@ -189,10 +190,10 @@ def test_noarch_with_platform_deps(testing_workdir, testing_config): build_ids = {} for subdir_ in ["linux-64", "linux-aarch64", "linux-ppc64le", "osx-64", "win-64"]: platform, arch = subdir_.split("-") - m = api.render( + metadata = api.render( recipe_path, config=testing_config, platform=platform, arch=arch )[0][0] - build_ids[subdir_] = m.build_id() + build_ids[subdir_] = metadata.build_id() # one hash for each platform, plus one for the archspec selector assert len(set(build_ids.values())) == 4 @@ -206,8 +207,10 @@ def test_noarch_with_no_platform_deps(testing_workdir, testing_config): recipe_path = os.path.join(metadata_dir, "_noarch_with_no_platform_deps") build_ids = set() for platform in ["osx", "linux", "win"]: - m = api.render(recipe_path, config=testing_config, platform=platform)[0][0] - build_ids.add(m.build_id()) + metadata = api.render(recipe_path, config=testing_config, platform=platform)[0][ + 0 + ] + build_ids.add(metadata.build_id()) assert len(build_ids) == 1 @@ -230,21 +233,21 @@ def test_setting_condarc_vars_with_env_var_expansion(testing_workdir, mocker): ) os.environ["TEST_WORKDIR"] = testing_workdir - m = api.render( + metadata = api.render( os.path.join(variants_dir, "19_used_variables"), bypass_env_check=True, finalize=False, )[0][0] # this one should have gotten clobbered by the values in the recipe - assert m.config.variant["python"] not in python_versions + assert metadata.config.variant["python"] not in python_versions # this confirms that we loaded the config file correctly - assert len(m.config.squished_variants["bzip2"]) == 2 + assert len(metadata.config.squished_variants["bzip2"]) == 2 def test_self_reference_run_exports_pin_subpackage_picks_up_version_correctly(): recipe = os.path.join(metadata_dir, "_self_reference_run_exports") - m = api.render(recipe)[0][0] - run_exports = m.meta.get("build", {}).get("run_exports", []) + metadata = api.render(recipe)[0][0] + run_exports = metadata.meta.get("build", {}).get("run_exports", []) assert run_exports assert len(run_exports) == 1 assert run_exports[0].split()[1] == ">=1.0.0,<2.0a0" @@ -252,11 +255,11 @@ def test_self_reference_run_exports_pin_subpackage_picks_up_version_correctly(): def test_run_exports_with_pin_compatible_in_subpackages(testing_config): recipe = os.path.join(metadata_dir, "_run_exports_in_outputs") - ms = api.render(recipe, config=testing_config) - for m, _, _ in ms: - if m.name().startswith("gfortran_"): + metadata_tuples = api.render(recipe, config=testing_config) + for metadata, _, _ in metadata_tuples: + if metadata.name().startswith("gfortran_"): run_exports = set( - m.meta.get("build", {}).get("run_exports", {}).get("strong", []) + metadata.meta.get("build", {}).get("run_exports", {}).get("strong", []) ) assert len(run_exports) == 1 # len after splitting should be more than one because of pin_compatible. If it's only zlib, we've lost the @@ -266,38 +269,46 @@ def test_run_exports_with_pin_compatible_in_subpackages(testing_config): def test_ignore_build_only_deps(): - ms = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "python_in_build_only"), bypass_env_check=True, finalize=False, ) - assert len(ms) == 1 + assert len(metadata_tuples) == 1 def test_merge_build_host_build_key(): - m = api.render(os.path.join(metadata_dir, "_no_merge_build_host"))[0][0] - assert not any("bzip2" in dep for dep in m.meta["requirements"]["run"]) + metadata = api.render(os.path.join(metadata_dir, "_no_merge_build_host"))[0][0] + assert not any("bzip2" in dep for dep in metadata.meta["requirements"]["run"]) def test_merge_build_host_empty_host_section(): - m = api.render(os.path.join(metadata_dir, "_empty_host_avoids_merge"))[0][0] - assert not any("bzip2" in dep for dep in m.meta["requirements"]["run"]) + metadata = api.render(os.path.join(metadata_dir, "_empty_host_avoids_merge"))[0][0] + assert not any("bzip2" in dep for dep in metadata.meta["requirements"]["run"]) def test_pin_expression_works_with_prereleases(testing_config): recipe = os.path.join(metadata_dir, "_pinning_prerelease") - ms = api.render(recipe, config=testing_config) - assert len(ms) == 2 - m = next(m_[0] for m_ in ms if m_[0].meta["package"]["name"] == "bar") - assert "foo >=3.10.0.rc1,<3.11.0a0" in m.meta["requirements"]["run"] + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + metadata = next( + metadata + for metadata, _, _ in metadata_tuples + if metadata.meta["package"]["name"] == "bar" + ) + assert "foo >=3.10.0.rc1,<3.11.0a0" in metadata.meta["requirements"]["run"] def test_pin_expression_works_with_python_prereleases(testing_config): recipe = os.path.join(metadata_dir, "_pinning_prerelease_python") - ms = api.render(recipe, config=testing_config) - assert len(ms) == 2 - m = next(m_[0] for m_ in ms if m_[0].meta["package"]["name"] == "bar") - assert "python >=3.10.0rc1,<3.11.0a0" in m.meta["requirements"]["run"] + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + metadata = next( + metadata + for metadata, _, _ in metadata_tuples + if metadata.meta["package"]["name"] == "bar" + ) + assert "python >=3.10.0rc1,<3.11.0a0" in metadata.meta["requirements"]["run"] @pytest.mark.benchmark @@ -326,7 +337,7 @@ def create_variants(): validate_spec("", variant) return variant - ms = api.render( + metadata_tuples = api.render( recipe, config=testing_config, channels=[], variants=create_variants() ) - assert len(ms) == 11 - 3 # omits libarrow-all, pyarrow, pyarrow-tests + assert len(metadata_tuples) == 11 - 3 # omits libarrow-all, pyarrow, pyarrow-tests diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index a8273492b0..963312ee44 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -234,8 +234,8 @@ def test_sympy(package: str, version: str | None, tmp_path: Path, testing_config config=testing_config, output_dir=tmp_path, ) - m = api.render(str(tmp_path / "sympy" / "meta.yaml"))[0][0] - assert m.version() == "1.10" + metadata = api.render(str(tmp_path / "sympy" / "meta.yaml"))[0][0] + assert metadata.version() == "1.10" def test_get_entry_points(pylint_pkginfo, pylint_metadata): @@ -350,8 +350,8 @@ def test_pypi_with_setup_options(tmp_path: Path, testing_config): ) # Check that the setup option occurs in bld.bat and build.sh. - m = api.render(str(tmp_path / "photutils"))[0][0] - assert "--offline" in m.meta["build"]["script"] + metadata = api.render(str(tmp_path / "photutils"))[0][0] + assert "--offline" in metadata.meta["build"]["script"] def test_pypi_pin_numpy(tmp_path: Path, testing_config: Config): @@ -377,8 +377,8 @@ def test_pypi_version_sorting(tmp_path: Path, testing_config: Config): config=testing_config, output_dir=tmp_path, ) - m = api.render(str(tmp_path / "fasttext"))[0][0] - assert parse_version(m.version()) >= parse_version("0.9.2") + metadata = api.render(str(tmp_path / "fasttext"))[0][0] + assert parse_version(metadata.version()) >= parse_version("0.9.2") def test_list_skeletons(): @@ -394,8 +394,8 @@ def test_pypi_with_entry_points(tmp_path: Path): def test_pypi_with_version_arg(tmp_path: Path): # regression test for https://github.com/conda/conda-build/issues/1442 api.skeletonize("PrettyTable", "pypi", version="0.7.2", output_dir=tmp_path) - m = api.render(str(tmp_path / "prettytable"))[0][0] - assert parse_version(m.version()) == parse_version("0.7.2") + metadata = api.render(str(tmp_path / "prettytable"))[0][0] + assert parse_version(metadata.version()) == parse_version("0.7.2") @pytest.mark.slow @@ -415,10 +415,10 @@ def test_pypi_with_extra_specs(tmp_path: Path, testing_config): config=testing_config, output_dir=tmp_path, ) - m = api.render(str(tmp_path / "bigfile"))[0][0] - assert parse_version(m.version()) == parse_version("0.1.24") - assert any("cython" in req for req in m.meta["requirements"]["host"]) - assert any("mpi4py" in req for req in m.meta["requirements"]["host"]) + metadata = api.render(str(tmp_path / "bigfile"))[0][0] + assert parse_version(metadata.version()) == parse_version("0.1.24") + assert any("cython" in req for req in metadata.meta["requirements"]["host"]) + assert any("mpi4py" in req for req in metadata.meta["requirements"]["host"]) @pytest.mark.slow @@ -438,17 +438,17 @@ def test_pypi_with_version_inconsistency(tmp_path: Path, testing_config): config=testing_config, output_dir=tmp_path, ) - m = api.render(str(tmp_path / "mpi4py_test"))[0][0] - assert parse_version(m.version()) == parse_version("0.0.10") + metadata = api.render(str(tmp_path / "mpi4py_test"))[0][0] + assert parse_version(metadata.version()) == parse_version("0.0.10") def test_pypi_with_basic_environment_markers(tmp_path: Path): # regression test for https://github.com/conda/conda-build/issues/1974 api.skeletonize("coconut", "pypi", version="1.2.2", output_dir=tmp_path) - m = api.render(tmp_path / "coconut")[0][0] + metadata = api.render(tmp_path / "coconut")[0][0] - build_reqs = str(m.meta["requirements"]["host"]) - run_reqs = str(m.meta["requirements"]["run"]) + build_reqs = str(metadata.meta["requirements"]["host"]) + run_reqs = str(metadata.meta["requirements"]["run"]) # should include the right dependencies for the right version assert "futures" not in build_reqs assert "futures" not in run_reqs @@ -458,8 +458,8 @@ def test_pypi_with_basic_environment_markers(tmp_path: Path): def test_setuptools_test_requirements(tmp_path: Path): api.skeletonize(packages="hdf5storage", repo="pypi", output_dir=tmp_path) - m = api.render(str(tmp_path / "hdf5storage"))[0][0] - assert m.meta["test"]["requires"] == ["nose >=1.0"] + metadata = api.render(str(tmp_path / "hdf5storage"))[0][0] + assert metadata.meta["test"]["requires"] == ["nose >=1.0"] @pytest.mark.skipif(sys.version_info < (3, 8), reason="sympy is python 3.8+") diff --git a/tests/test_api_skeleton_cpan.py b/tests/test_api_skeleton_cpan.py index 9f08ccbae6..5945158023 100644 --- a/tests/test_api_skeleton_cpan.py +++ b/tests/test_api_skeleton_cpan.py @@ -17,8 +17,8 @@ def test_xs_needs_c_compiler(testing_config): """Perl packages with XS files need a C compiler""" # This uses Sub::Identify=0.14 since it includes no .c files but a .xs file. api.skeletonize("Sub::Identify", version="0.14", repo="cpan", config=testing_config) - m = api.render("perl-sub-identify/0.14", finalize=False, bypass_env_check=True)[0][ - 0 - ] - build_requirements = m.get_value("requirements/build") + metadata = api.render( + "perl-sub-identify/0.14", finalize=False, bypass_env_check=True + )[0][0] + build_requirements = metadata.get_value("requirements/build") assert compiler("c", testing_config) in build_requirements diff --git a/tests/test_api_skeleton_cran.py b/tests/test_api_skeleton_cran.py index 912b2bee0c..57e9d02550 100644 --- a/tests/test_api_skeleton_cran.py +++ b/tests/test_api_skeleton_cran.py @@ -41,12 +41,12 @@ def test_cran_license( api.skeletonize( packages=package, repo="cran", output_dir=tmp_path, config=testing_config ) - m = api.render(str(tmp_path / package / "meta.yaml"))[0][0] + metadata = api.render(str(tmp_path / package / "meta.yaml"))[0][0] - assert m.get_value("about/license") == license_id - assert m.get_value("about/license_family") == license_family + assert metadata.get_value("about/license") == license_id + assert metadata.get_value("about/license_family") == license_family assert { - Path(license).name for license in m.get_value("about/license_file", "") + Path(license).name for license in metadata.get_value("about/license_file", "") } == set(license_files) diff --git a/tests/test_build.py b/tests/test_build.py index eca9441af8..fd6a3a8f67 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -41,11 +41,11 @@ def test_find_prefix_files(testing_workdir): def test_build_preserves_PATH(testing_config): - m = api.render(os.path.join(metadata_dir, "source_git"), config=testing_config)[0][ - 0 - ] + metadata = api.render( + os.path.join(metadata_dir, "source_git"), config=testing_config + )[0][0] ref_path = os.environ["PATH"] - build.build(m, stats=None) + build.build(metadata, stats=None) assert os.environ["PATH"] == ref_path diff --git a/tests/test_subpackages.py b/tests/test_subpackages.py index 4fe966c054..11e43383d0 100644 --- a/tests/test_subpackages.py +++ b/tests/test_subpackages.py @@ -11,6 +11,7 @@ from conda.base.context import context from conda_build import api, utils +from conda_build.metadata import MetaDataTuple from conda_build.render import finalize_metadata from .utils import get_valid_recipes, subpackage_dir @@ -56,7 +57,10 @@ def test_output_pkg_path_shows_all_subpackages(testing_metadata): testing_metadata.meta["outputs"] = [{"name": "a"}, {"name": "b"}] out_dicts_and_metadata = testing_metadata.get_output_metadata_set() outputs = api.get_output_file_paths( - [(m, None, None) for (_, m) in out_dicts_and_metadata] + [ + MetaDataTuple(metadata, False, False) + for _, metadata in out_dicts_and_metadata + ] ) assert len(outputs) == 2 @@ -65,7 +69,10 @@ def test_subpackage_version_provided(testing_metadata): testing_metadata.meta["outputs"] = [{"name": "a", "version": "2.0"}] out_dicts_and_metadata = testing_metadata.get_output_metadata_set() outputs = api.get_output_file_paths( - [(m, None, None) for (_, m) in out_dicts_and_metadata] + [ + MetaDataTuple(metadata, False, False) + for _, metadata in out_dicts_and_metadata + ] ) assert len(outputs) == 1 assert "a-2.0-1" in outputs[0] @@ -79,7 +86,10 @@ def test_subpackage_independent_hash(testing_metadata): out_dicts_and_metadata = testing_metadata.get_output_metadata_set() assert len(out_dicts_and_metadata) == 2 outputs = api.get_output_file_paths( - [(m, None, None) for (_, m) in out_dicts_and_metadata] + [ + MetaDataTuple(metadata, False, False) + for _, metadata in out_dicts_and_metadata + ] ) assert len(outputs) == 2 assert outputs[0][-15:] != outputs[1][-15:] @@ -120,34 +130,34 @@ def test_intradependencies(testing_config): def test_git_in_output_version(testing_config, conda_build_test_recipe_envvar: str): recipe = os.path.join(subpackage_dir, "_git_in_output_version") - outputs = api.render( + metadata_tuples = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True ) - assert len(outputs) == 1 - assert outputs[0][0].version() == "1.22.0" + assert len(metadata_tuples) == 1 + assert metadata_tuples[0][0].version() == "1.22.0" def test_intradep_with_templated_output_name(testing_config): recipe = os.path.join(subpackage_dir, "_intradep_with_templated_output_name") - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 3 + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 3 expected_names = { "test_templated_subpackage_name", "templated_subpackage_nameabc", "depends_on_templated", } - assert {m.name() for (m, _, _) in metadata} == expected_names + assert {metadata.name() for metadata, _, _ in metadata_tuples} == expected_names def test_output_specific_subdir(testing_config): recipe = os.path.join(subpackage_dir, "_output_specific_subdir") - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 3 - for m, _, _ in metadata: - if m.name() in ("default_subdir", "default_subdir_2"): - assert m.config.target_subdir == context.subdir - elif m.name() == "custom_subdir": - assert m.config.target_subdir == "linux-aarch64" + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 3 + for metadata, _, _ in metadata_tuples: + if metadata.name() in ("default_subdir", "default_subdir_2"): + assert metadata.config.target_subdir == context.subdir + elif metadata.name() == "custom_subdir": + assert metadata.config.target_subdir == "linux-aarch64" else: raise AssertionError( "Test for output_specific_subdir written incorrectly - " @@ -157,17 +167,17 @@ def test_output_specific_subdir(testing_config): def test_about_metadata(testing_config): recipe = os.path.join(subpackage_dir, "_about_metadata") - metadata = api.render(recipe, config=testing_config) - assert len(metadata) == 2 - for m, _, _ in metadata: - if m.name() == "abc": - assert "summary" in m.meta["about"] - assert m.meta["about"]["summary"] == "weee" - assert "home" not in m.meta["about"] - elif m.name() == "def": - assert "home" in m.meta["about"] - assert "summary" not in m.meta["about"] - assert m.meta["about"]["home"] == "http://not.a.url" + metadata_tuples = api.render(recipe, config=testing_config) + assert len(metadata_tuples) == 2 + for metadata, _, _ in metadata_tuples: + if metadata.name() == "abc": + assert "summary" in metadata.meta["about"] + assert metadata.meta["about"]["summary"] == "weee" + assert "home" not in metadata.meta["about"] + elif metadata.name() == "def": + assert "home" in metadata.meta["about"] + assert "summary" not in metadata.meta["about"] + assert metadata.meta["about"]["home"] == "http://not.a.url" outs = api.build(recipe, config=testing_config) for out in outs: about_meta = utils.package_has_file(out, "info/about.json") @@ -288,23 +298,24 @@ def test_per_output_tests_script(testing_config): def test_pin_compatible_in_outputs(testing_config): recipe_dir = os.path.join(subpackage_dir, "_pin_compatible_in_output") - m = api.render(recipe_dir, config=testing_config)[0][0] + metadata = api.render(recipe_dir, config=testing_config)[0][0] assert any( - re.search(r"numpy\s*>=.*,<.*", req) for req in m.meta["requirements"]["run"] + re.search(r"numpy\s*>=.*,<.*", req) + for req in metadata.meta["requirements"]["run"] ) def test_output_same_name_as_top_level_does_correct_output_regex(testing_config): recipe_dir = os.path.join(subpackage_dir, "_output_named_same_as_top_level") - ms = api.render(recipe_dir, config=testing_config) + metadata_tuples = api.render(recipe_dir, config=testing_config) # TODO: need to decide what best behavior is for saying whether the # top-level build reqs or the output reqs for the similarly naemd output # win. I think you could have both, but it means rendering a new, extra, # build-only metadata in addition to all the outputs - for m, _, _ in ms: - if m.name() == "ipp": + for metadata, _, _ in metadata_tuples: + if metadata.name() == "ipp": for env in ("build", "host", "run"): - assert not m.meta.get("requirements", {}).get(env) + assert not metadata.meta.get("requirements", {}).get(env) def test_subpackage_order_natural(testing_config): @@ -361,23 +372,34 @@ def test_strong_run_exports_from_build_applies_to_host(testing_config): def test_python_line_up_with_compiled_lib(recipe, testing_config): recipe = os.path.join(subpackage_dir, recipe) # we use windows so that we have 2 libxyz results (VS2008, VS2015) - ms = api.render(recipe, config=testing_config, platform="win", arch="64") + metadata_tuples = api.render( + recipe, config=testing_config, platform="win", arch="64" + ) # 2 libxyz, 3 py-xyz, 3 xyz - assert len(ms) == 8 - for m, _, _ in ms: - if m.name() in ("py-xyz" or "xyz"): - deps = m.meta["requirements"]["run"] + assert len(metadata_tuples) == 8 + for metadata, _, _ in metadata_tuples: + if metadata.name() in ("py-xyz" or "xyz"): + deps = metadata.meta["requirements"]["run"] assert any( dep.startswith("libxyz ") and len(dep.split()) == 3 for dep in deps - ), (m.name(), deps) - assert any(dep.startswith("python >") for dep in deps), (m.name(), deps) - assert any(dep.startswith("zlib >") for dep in deps), (m.name(), deps) - if m.name() == "xyz": - deps = m.meta["requirements"]["run"] + ), (metadata.name(), deps) + assert any(dep.startswith("python >") for dep in deps), ( + metadata.name(), + deps, + ) + assert any(dep.startswith("zlib >") for dep in deps), ( + metadata.name(), + deps, + ) + if metadata.name() == "xyz": + deps = metadata.meta["requirements"]["run"] assert any( dep.startswith("py-xyz ") and len(dep.split()) == 3 for dep in deps - ), (m.name(), deps) - assert any(dep.startswith("python >") for dep in deps), (m.name(), deps) + ), (metadata.name(), deps) + assert any(dep.startswith("python >") for dep in deps), ( + metadata.name(), + deps, + ) @pytest.mark.xfail( @@ -385,17 +407,17 @@ def test_python_line_up_with_compiled_lib(recipe, testing_config): ) def test_merge_build_host_applies_in_outputs(testing_config): recipe = os.path.join(subpackage_dir, "_merge_build_host") - ms = api.render(recipe, config=testing_config) - for m, _, _ in ms: + metadata_tuples = api.render(recipe, config=testing_config) + for metadata, _, _ in metadata_tuples: # top level - if m.name() == "test_build_host_merge": - assert not m.meta.get("requirements", {}).get("run") + if metadata.name() == "test_build_host_merge": + assert not metadata.meta.get("requirements", {}).get("run") # output else: - run_exports = set(m.meta.get("build", {}).get("run_exports", [])) + run_exports = set(metadata.meta.get("build", {}).get("run_exports", [])) assert len(run_exports) == 2 assert all(len(export.split()) > 1 for export in run_exports) - run_deps = set(m.meta.get("requirements", {}).get("run", [])) + run_deps = set(metadata.meta.get("requirements", {}).get("run", [])) assert len(run_deps) == 2 assert all(len(dep.split()) > 1 for dep in run_deps) @@ -411,11 +433,13 @@ def test_activation_in_output_scripts(testing_config): def test_inherit_build_number(testing_config): recipe = os.path.join(subpackage_dir, "_inherit_build_number") - ms = api.render(recipe, config=testing_config) - for m, _, _ in ms: - assert "number" in m.meta["build"], "build number was not inherited at all" + metadata_tuples = api.render(recipe, config=testing_config) + for metadata, _, _ in metadata_tuples: + assert ( + "number" in metadata.meta["build"] + ), "build number was not inherited at all" assert ( - int(m.meta["build"]["number"]) == 1 + int(metadata.meta["build"]["number"]) == 1 ), "build number should have been inherited as '1'" diff --git a/tests/test_variants.py b/tests/test_variants.py index e853f172fd..3c79e36e16 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -73,7 +73,7 @@ def test_python_variants(testing_workdir, testing_config, as_yaml): testing_config.variant_config_files = [str(variants_path)] # render the metadata - metadata = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "variant_recipe"), no_download_source=False, config=testing_config, @@ -82,14 +82,14 @@ def test_python_variants(testing_workdir, testing_config, as_yaml): ) # we should have one package/metadata per python version - assert len(metadata) == 2 + assert len(metadata_tuples) == 2 # there should only be one run requirement for each package/metadata - assert len(metadata[0][0].meta["requirements"]["run"]) == 1 - assert len(metadata[1][0].meta["requirements"]["run"]) == 1 + assert len(metadata_tuples[0][0].meta["requirements"]["run"]) == 1 + assert len(metadata_tuples[1][0].meta["requirements"]["run"]) == 1 # the run requirements should be python ranges assert { - *metadata[0][0].meta["requirements"]["run"], - *metadata[1][0].meta["requirements"]["run"], + *metadata_tuples[0][0].meta["requirements"]["run"], + *metadata_tuples[1][0].meta["requirements"]["run"], } == {"python >=3.11,<3.12.0a0", "python >=3.12,<3.13.0a0"} @@ -109,7 +109,7 @@ def test_use_selectors_in_variants(testing_workdir, testing_config): ) ) def test_variant_with_ignore_version_reduces_matrix(): - metadata = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "03_ignore_version_reduces_matrix"), variants={ "packageA": ["1.2", "3.4"], @@ -119,13 +119,13 @@ def test_variant_with_ignore_version_reduces_matrix(): }, finalize=False, ) - assert len(metadata) == 2 + assert len(metadata_tuples) == 2 def test_variant_with_numpy_pinned_has_matrix(): recipe = os.path.join(variants_dir, "04_numpy_matrix_pinned") - metadata = api.render(recipe, finalize=False) - assert len(metadata) == 4 + metadata_tuples = api.render(recipe, finalize=False) + assert len(metadata_tuples) == 4 def test_pinning_in_build_requirements(): @@ -223,13 +223,13 @@ def test_validate_spec(): def test_cross_compilers(): recipe = os.path.join(variants_dir, "09_cross") - ms = api.render( + metadata_tuples = api.render( recipe, permit_unsatisfiable_variants=True, finalize=False, bypass_env_check=True, ) - assert len(ms) == 3 + assert len(metadata_tuples) == 3 def test_variants_in_output_names(): @@ -252,11 +252,11 @@ def test_variants_in_versions_with_setup_py_data(): def test_git_variables_with_variants(testing_config): recipe = os.path.join(variants_dir, "13_git_vars") - m = api.render( + metadata = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True )[0][0] - assert m.version() == "1.20.2" - assert m.build_number() == 0 + assert metadata.version() == "1.20.2" + assert metadata.build_number() == 0 def test_variant_input_with_zip_keys_keeps_zip_keys_list(): @@ -305,57 +305,109 @@ def test_serial_builds_have_independent_configs(testing_config): def test_subspace_selection(testing_config): recipe = os.path.join(variants_dir, "18_subspace_selection") testing_config.variant = {"a": "coffee"} - ms = api.render( + metadata_tuples = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True ) # there are two entries with a==coffee, so we should end up with 2 variants - assert len(ms) == 2 + assert len(metadata_tuples) == 2 # ensure that the zipped keys still agree - assert sum(m.config.variant["b"] == "123" for m, _, _ in ms) == 1 - assert sum(m.config.variant["b"] == "abc" for m, _, _ in ms) == 1 - assert sum(m.config.variant["b"] == "concrete" for m, _, _ in ms) == 0 - assert sum(m.config.variant["c"] == "mooo" for m, _, _ in ms) == 1 - assert sum(m.config.variant["c"] == "baaa" for m, _, _ in ms) == 1 - assert sum(m.config.variant["c"] == "woof" for m, _, _ in ms) == 0 + assert ( + sum(metadata.config.variant["b"] == "123" for metadata, _, _ in metadata_tuples) + == 1 + ) + assert ( + sum(metadata.config.variant["b"] == "abc" for metadata, _, _ in metadata_tuples) + == 1 + ) + assert ( + sum( + metadata.config.variant["b"] == "concrete" + for metadata, _, _ in metadata_tuples + ) + == 0 + ) + assert ( + sum( + metadata.config.variant["c"] == "mooo" for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["c"] == "baaa" for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["c"] == "woof" for metadata, _, _ in metadata_tuples + ) + == 0 + ) # test compound selection testing_config.variant = {"a": "coffee", "b": "123"} - ms = api.render( + metadata_tuples = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True ) # there are two entries with a==coffee, but one with both 'coffee' for a, and '123' for b, # so we should end up with 1 variants - assert len(ms) == 1 + assert len(metadata_tuples) == 1 # ensure that the zipped keys still agree - assert sum(m.config.variant["b"] == "123" for m, _, _ in ms) == 1 - assert sum(m.config.variant["b"] == "abc" for m, _, _ in ms) == 0 - assert sum(m.config.variant["b"] == "concrete" for m, _, _ in ms) == 0 - assert sum(m.config.variant["c"] == "mooo" for m, _, _ in ms) == 1 - assert sum(m.config.variant["c"] == "baaa" for m, _, _ in ms) == 0 - assert sum(m.config.variant["c"] == "woof" for m, _, _ in ms) == 0 + assert ( + sum(metadata.config.variant["b"] == "123" for metadata, _, _ in metadata_tuples) + == 1 + ) + assert ( + sum(metadata.config.variant["b"] == "abc" for metadata, _, _ in metadata_tuples) + == 0 + ) + assert ( + sum( + metadata.config.variant["b"] == "concrete" + for metadata, _, _ in metadata_tuples + ) + == 0 + ) + assert ( + sum( + metadata.config.variant["c"] == "mooo" for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["c"] == "baaa" for metadata, _, _ in metadata_tuples + ) + == 0 + ) + assert ( + sum( + metadata.config.variant["c"] == "woof" for metadata, _, _ in metadata_tuples + ) + == 0 + ) # test when configuration leads to no valid combinations - only c provided, and its value # doesn't match any other existing values of c, so it's then ambiguous which zipped # values to choose testing_config.variant = {"c": "not an animal"} with pytest.raises(ValueError): - ms = api.render( - recipe, config=testing_config, finalize=False, bypass_env_check=True - ) + api.render(recipe, config=testing_config, finalize=False, bypass_env_check=True) # all zipped keys provided by the new variant. It should clobber the old one. testing_config.variant = {"a": "some", "b": "new", "c": "animal"} - ms = api.render( + metadata_tuples = api.render( recipe, config=testing_config, finalize=False, bypass_env_check=True ) - assert len(ms) == 1 - assert ms[0][0].config.variant["a"] == "some" - assert ms[0][0].config.variant["b"] == "new" - assert ms[0][0].config.variant["c"] == "animal" + assert len(metadata_tuples) == 1 + assert metadata_tuples[0][0].config.variant["a"] == "some" + assert metadata_tuples[0][0].config.variant["b"] == "new" + assert metadata_tuples[0][0].config.variant["c"] == "animal" def test_get_used_loop_vars(): - m = api.render( + metadata = api.render( os.path.join(variants_dir, "19_used_variables"), finalize=False, bypass_env_check=True, @@ -363,9 +415,9 @@ def test_get_used_loop_vars(): # conda_build_config.yaml has 4 loop variables defined, but only 3 are used. # python and zlib are both implicitly used (depend on name matching), while # some_package is explicitly used as a jinja2 variable - assert m.get_used_loop_vars() == {"python", "some_package"} + assert metadata.get_used_loop_vars() == {"python", "some_package"} # these are all used vars - including those with only one value (and thus not loop vars) - assert m.get_used_vars() == { + assert metadata.get_used_vars() == { "python", "some_package", "zlib", @@ -380,49 +432,63 @@ def test_reprovisioning_source(): def test_reduced_hashing_behavior(testing_config): # recipes using any compiler jinja2 function need a hash - m = api.render( + metadata = api.render( os.path.join(variants_dir, "26_reduced_hashing", "hash_yes_compiler"), finalize=False, bypass_env_check=True, )[0][0] assert ( - "c_compiler" in m.get_hash_contents() + "c_compiler" in metadata.get_hash_contents() ), "hash contents should contain c_compiler" assert re.search( - "h[0-9a-f]{%d}" % testing_config.hash_length, m.build_id() + "h[0-9a-f]{%d}" % testing_config.hash_length, metadata.build_id() ), "hash should be present when compiler jinja2 function is used" # recipes that use some variable in conda_build_config.yaml to control what # versions are present at build time also must have a hash (except # python, r_base, and the other stuff covered by legacy build string # behavior) - m = api.render( + metadata = api.render( os.path.join(variants_dir, "26_reduced_hashing", "hash_yes_pinned"), finalize=False, bypass_env_check=True, )[0][0] - assert "zlib" in m.get_hash_contents() - assert re.search("h[0-9a-f]{%d}" % testing_config.hash_length, m.build_id()) + assert "zlib" in metadata.get_hash_contents() + assert re.search("h[0-9a-f]{%d}" % testing_config.hash_length, metadata.build_id()) # anything else does not get a hash - m = api.render( + metadata = api.render( os.path.join(variants_dir, "26_reduced_hashing", "hash_no_python"), finalize=False, bypass_env_check=True, )[0][0] - assert not m.get_hash_contents() - assert not re.search("h[0-9a-f]{%d}" % testing_config.hash_length, m.build_id()) + assert not metadata.get_hash_contents() + assert not re.search( + "h[0-9a-f]{%d}" % testing_config.hash_length, metadata.build_id() + ) def test_variants_used_in_jinja2_conditionals(): - ms = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "21_conditional_sections"), finalize=False, bypass_env_check=True, ) - assert len(ms) == 2 - assert sum(m.config.variant["blas_impl"] == "mkl" for m, _, _ in ms) == 1 - assert sum(m.config.variant["blas_impl"] == "openblas" for m, _, _ in ms) == 1 + assert len(metadata_tuples) == 2 + assert ( + sum( + metadata.config.variant["blas_impl"] == "mkl" + for metadata, _, _ in metadata_tuples + ) + == 1 + ) + assert ( + sum( + metadata.config.variant["blas_impl"] == "openblas" + for metadata, _, _ in metadata_tuples + ) + == 1 + ) def test_build_run_exports_act_on_host(caplog): @@ -436,14 +502,14 @@ def test_build_run_exports_act_on_host(caplog): def test_detect_variables_in_build_and_output_scripts(): - ms = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "24_test_used_vars_in_scripts"), platform="linux", arch="64", ) - for m, _, _ in ms: - if m.name() == "test_find_used_variables_in_scripts": - used_vars = m.get_used_vars() + for metadata, _, _ in metadata_tuples: + if metadata.name() == "test_find_used_variables_in_scripts": + used_vars = metadata.get_used_vars() assert used_vars assert "SELECTOR_VAR" in used_vars assert "OUTPUT_SELECTOR_VAR" not in used_vars @@ -452,7 +518,7 @@ def test_detect_variables_in_build_and_output_scripts(): assert "BAT_VAR" not in used_vars assert "OUTPUT_VAR" not in used_vars else: - used_vars = m.get_used_vars() + used_vars = metadata.get_used_vars() assert used_vars assert "SELECTOR_VAR" not in used_vars assert "OUTPUT_SELECTOR_VAR" in used_vars @@ -461,14 +527,14 @@ def test_detect_variables_in_build_and_output_scripts(): assert "BAT_VAR" not in used_vars assert "OUTPUT_VAR" in used_vars # on windows, we find variables in bat scripts as well as shell scripts - ms = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "24_test_used_vars_in_scripts"), platform="win", arch="64", ) - for m, _, _ in ms: - if m.name() == "test_find_used_variables_in_scripts": - used_vars = m.get_used_vars() + for metadata, _, _ in metadata_tuples: + if metadata.name() == "test_find_used_variables_in_scripts": + used_vars = metadata.get_used_vars() assert used_vars assert "SELECTOR_VAR" in used_vars assert "OUTPUT_SELECTOR_VAR" not in used_vars @@ -478,7 +544,7 @@ def test_detect_variables_in_build_and_output_scripts(): assert "BAT_VAR" in used_vars assert "OUTPUT_VAR" not in used_vars else: - used_vars = m.get_used_vars() + used_vars = metadata.get_used_vars() assert used_vars assert "SELECTOR_VAR" not in used_vars assert "OUTPUT_SELECTOR_VAR" in used_vars @@ -522,11 +588,11 @@ def test_exclusive_config_files(): os.path.join("config_dir", "config-0.yaml"), os.path.join("config_dir", "config-1.yaml"), ) - output = api.render( + metadata = api.render( os.path.join(variants_dir, "exclusive_config_file"), exclusive_config_files=exclusive_config_files, )[0][0] - variant = output.config.variant + variant = metadata.config.variant # is cwd ignored? assert "cwd" not in variant # did we load the exclusive configs? @@ -547,11 +613,11 @@ def test_exclusive_config_file(): yaml.dump( {"abc": ["super"], "exclusive": ["someval"]}, f, default_flow_style=False ) - output = api.render( + metadata = api.render( os.path.join(variants_dir, "exclusive_config_file"), exclusive_config_file=os.path.join("config_dir", "config.yaml"), )[0][0] - variant = output.config.variant + variant = metadata.config.variant # is cwd ignored? assert "cwd" not in variant # did we load the exclusive config @@ -610,27 +676,27 @@ def test_inner_python_loop_with_output(testing_config): def test_variant_as_dependency_name(testing_config): - outputs = api.render( + metadata_tuples = api.render( os.path.join(variants_dir, "27_requirements_host"), config=testing_config ) - assert len(outputs) == 2 + assert len(metadata_tuples) == 2 def test_custom_compiler(): recipe = os.path.join(variants_dir, "28_custom_compiler") - ms = api.render( + metadata_tuples = api.render( recipe, permit_unsatisfiable_variants=True, finalize=False, bypass_env_check=True, ) - assert len(ms) == 3 + assert len(metadata_tuples) == 3 def test_different_git_vars(): recipe = os.path.join(variants_dir, "29_different_git_vars") - ms = api.render(recipe) - versions = [m[0].version() for m in ms] + metadata_tuples = api.render(recipe) + versions = [metadata[0].version() for metadata in metadata_tuples] assert "1.20.0" in versions assert "1.21.11" in versions @@ -647,7 +713,7 @@ def test_top_level_finalized(testing_config): def test_variant_subkeys_retained(): - m = api.render( + metadata = api.render( os.path.join(variants_dir, "31_variant_subkeys"), finalize=False, bypass_env_check=True, @@ -655,11 +721,11 @@ def test_variant_subkeys_retained(): found_replacements = False from conda_build.build import get_all_replacements - for variant in m.config.variants: + for variant in metadata.config.variants: found_replacements = get_all_replacements(variant) assert len(found_replacements), "Did not find replacements" - m.final = False - outputs = m.get_output_metadata_set(permit_unsatisfiable_variants=False) + metadata.final = False + outputs = metadata.get_output_metadata_set(permit_unsatisfiable_variants=False) get_all_replacements(outputs[0][1].config.variant)