From 5353b38ffd38e06918b64484c3a319c8cfeeeab3 Mon Sep 17 00:00:00 2001 From: Ken Odegard Date: Fri, 8 Mar 2024 09:57:13 -0600 Subject: [PATCH] Remove deprecations slated for 24.3 (#5203) --- .github/workflows/tests.yml | 4 +- conda_build/build.py | 2 +- conda_build/cli/main_render.py | 2 - conda_build/conda_interface.py | 51 +- conda_build/environ.py | 240 +++-- conda_build/index.py | 1520 +------------------------------- conda_build/inspect_pkg.py | 4 - conda_build/metadata.py | 7 +- conda_build/noarch_python.py | 22 +- conda_build/post.py | 4 +- conda_build/render.py | 29 +- conda_build/skeletons/cpan.py | 2 +- conda_build/skeletons/pypi.py | 10 +- conda_build/source.py | 7 +- conda_build/utils.py | 60 +- news/5203-remove-deprecations | 82 ++ pyproject.toml | 8 +- recipe/meta.yaml | 2 +- tests/requirements.txt | 4 +- tests/test_source.py | 5 +- tests/test_variants.py | 2 +- 21 files changed, 295 insertions(+), 1772 deletions(-) create mode 100644 news/5203-remove-deprecations diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 6bf8249c4c..c0b0e8ff59 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -76,10 +76,10 @@ jobs: include: # minimum Python/conda combo - python-version: '3.8' - conda-version: 22.11.0 + conda-version: 23.5.0 test-type: serial - python-version: '3.8' - conda-version: 22.11.0 + conda-version: 23.5.0 test-type: parallel # maximum Python/conda combo - python-version: '3.12' diff --git a/conda_build/build.py b/conda_build/build.py index a24e468eca..087e932f81 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -3501,7 +3501,7 @@ def test( AssertionError, ) as exc: log.warn( - "failed to get install actions, retrying. exception was: %s", str(exc) + "failed to get package records, retrying. exception was: %s", str(exc) ) tests_failed( metadata, diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 61c46c1c4b..933528b114 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -13,7 +13,6 @@ from .. import __version__, api from ..conda_interface import ArgumentParser, add_parser_channels, cc_conda_build from ..config import get_channel_urls, get_or_merge_config -from ..deprecations import deprecated from ..utils import LoggingContext from ..variants import get_package_variants, set_language_env_vars @@ -193,7 +192,6 @@ def parse_args(args: Sequence[str] | None) -> tuple[ArgumentParser, Namespace]: return parser, parser.parse_args(args) -@deprecated.argument("24.1.1", "24.3.0", "print_results") def execute(args: Sequence[str] | None = None) -> int: _, parsed = parse_args(args) diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 4fa9fb3777..f309b338a0 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -45,13 +45,9 @@ add_parser_channels, add_parser_prefix, download, - handle_proxy_407, - hashsum_file, human_bytes, input, lchmod, - md5_file, - memoized, normalized_version, prefix_placeholder, rm_rf, @@ -65,29 +61,11 @@ walk_prefix, win_path_to_unix, ) -from conda.exports import display_actions as _display_actions -from conda.exports import execute_actions as _execute_actions -from conda.exports import execute_plan as _execute_plan -from conda.exports import get_index as _get_index -from conda.exports import install_actions as _install_actions -from conda.exports import linked as _linked -from conda.exports import linked_data as _linked_data -from conda.exports import package_cache as _package_cache +from conda.gateways.disk.read import compute_sum from conda.models.channel import get_conda_build_local_url # noqa: F401 -from conda.models.dist import Dist as _Dist from .deprecations import deprecated -deprecated.constant("24.1.0", "24.3.0", "Dist", _Dist) -deprecated.constant("24.1.0", "24.3.0", "display_actions", _display_actions) -deprecated.constant("24.1.0", "24.3.0", "execute_actions", _execute_actions) -deprecated.constant("24.1.0", "24.3.0", "execute_plan", _execute_plan) -deprecated.constant("24.1.0", "24.3.0", "get_index", _get_index) -deprecated.constant("24.1.0", "24.3.0", "install_actions", _install_actions) -deprecated.constant("24.1.0", "24.3.0", "linked", _linked) -deprecated.constant("24.1.0", "24.3.0", "linked_data", _linked_data) -deprecated.constant("24.1.0", "24.3.0", "package_cache", _package_cache) - # TODO: Go to references of all properties below and import them from `context` instead binstar_upload = context.binstar_upload default_python = context.default_python @@ -112,3 +90,30 @@ # When deactivating envs (e.g. switching from root to build/test) this env var is used, # except the PR that removed this has been reverted (for now) and Windows doesn't need it. env_path_backup_var_exists = os.environ.get("CONDA_PATH_BACKUP", None) + + +@deprecated( + "24.3", + "24.5", + addendum="Handled by `conda.gateways.connection.session.CondaSession`.", +) +def handle_proxy_407(x, y): + pass + + +deprecated.constant( + "24.3", + "24.5", + "hashsum_file", + compute_sum, + addendum="Use `conda.gateways.disk.read.compute_sum` instead.", +) + + +@deprecated( + "24.3", + "24.5", + addendum="Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead.", +) +def md5_file(path: str | os.PathLike) -> str: + return compute_sum(path, "md5") diff --git a/conda_build/environ.py b/conda_build/environ.py index 762b9c7479..ba57d39314 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -1,5 +1,7 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + import contextlib import json import logging @@ -15,6 +17,7 @@ from glob import glob from logging import getLogger from os.path import join, normpath +from typing import TYPE_CHECKING from conda.base.constants import ( CONDA_PACKAGE_EXTENSIONS, @@ -26,21 +29,23 @@ from conda.core.link import PrefixSetup, UnlinkLinkTransaction from conda.core.package_cache_data import PackageCacheData from conda.core.prefix_data import PrefixData +from conda.exceptions import ( + CondaError, + LinkError, + LockError, + NoPackagesFoundError, + PaddingError, + UnsatisfiableError, +) from conda.models.channel import prioritize_channels +from conda.models.match_spec import MatchSpec from . import utils from .conda_interface import ( Channel, - CondaError, - LinkError, - LockError, - MatchSpec, - NoPackagesFoundError, PackageRecord, - PaddingError, ProgressiveFetchExtract, TemporaryDirectory, - UnsatisfiableError, context, create_default_packages, get_version_from_git_tag, @@ -48,12 +53,10 @@ reset_context, root_dir, ) -from .config import Config from .deprecations import deprecated from .exceptions import BuildLockError, DependencyNeedsBuildingError from .features import feature_list from .index import get_build_index -from .metadata import MetaData from .os_utils import external from .utils import ( ensure_list, @@ -65,10 +68,22 @@ ) from .variants import get_default_variant +if TYPE_CHECKING: + from pathlib import Path + from typing import Any, Iterable, TypedDict + + from .config import Config + from .metadata import MetaData + + class InstallActionsType(TypedDict): + PREFIX: str | os.PathLike | Path + LINK: list[PackageRecord] + + log = getLogger(__name__) -PREFIX_ACTION = "PREFIX" -LINK_ACTION = "LINK" +deprecated.constant("24.3", "24.5", "PREFIX_ACTION", _PREFIX_ACTION := "PREFIX") +deprecated.constant("24.3", "24.5", "LINK_ACTION", _LINK_ACTION := "LINK") # these are things that we provide env vars for more explicitly. This list disables the # pass-through of variant values to env vars for these keys. @@ -852,29 +867,35 @@ def package_specs(self): return specs -cached_actions = {} +cached_precs: dict[ + tuple[tuple[str | MatchSpec, ...], Any, Any, Any, bool], list[PackageRecord] +] = {} +deprecated.constant("24.3", "24.5", "cached_actions", cached_precs) last_index_ts = 0 -def get_package_records( - prefix, - specs, - env, - retries=0, +# NOTE: The function has to retain the "get_install_actions" name for now since +# conda_libmamba_solver.solver.LibMambaSolver._called_from_conda_build +# checks for this name in the call stack explicitly. +def get_install_actions( + prefix: str | os.PathLike | Path, + specs: Iterable[str | MatchSpec], + env, # unused + retries: int = 0, subdir=None, - verbose=True, - debug=False, - locking=True, + verbose: bool = True, + debug: bool = False, + locking: bool = True, bldpkgs_dirs=None, timeout=900, - disable_pip=False, - max_env_retry=3, + disable_pip: bool = False, + max_env_retry: int = 3, output_folder=None, channel_urls=None, -): - global cached_actions +) -> list[PackageRecord]: + global cached_precs global last_index_ts - actions = {} + log = utils.get_logger(__name__) conda_log_level = logging.WARN specs = list(specs) @@ -906,16 +927,15 @@ def get_package_records( utils.ensure_valid_spec(spec) for spec in specs if not str(spec).endswith("@") ) + precs: list[PackageRecord] = [] if ( specs, env, subdir, channel_urls, disable_pip, - ) in cached_actions and last_index_ts >= index_ts: - actions = cached_actions[(specs, env, subdir, channel_urls, disable_pip)].copy() - if PREFIX_ACTION in actions: - actions[PREFIX_ACTION] = prefix + ) in cached_precs and last_index_ts >= index_ts: + precs = cached_precs[(specs, env, subdir, channel_urls, disable_pip)].copy() elif specs: # this is hiding output like: # Fetching package metadata ........... @@ -923,7 +943,7 @@ def get_package_records( with utils.LoggingContext(conda_log_level): with capture(): try: - actions = _install_actions(prefix, index, specs) + precs = _install_actions(prefix, index, specs)["LINK"] except (NoPackagesFoundError, UnsatisfiableError) as exc: raise DependencyNeedsBuildingError(exc, subdir=subdir) except ( @@ -937,7 +957,7 @@ def get_package_records( ) as exc: if "lock" in str(exc): log.warn( - "failed to get install actions, retrying. exception was: %s", + "failed to get package records, retrying. exception was: %s", str(exc), ) elif ( @@ -966,12 +986,12 @@ def get_package_records( utils.rm_rf(pkg_dir) if retries < max_env_retry: log.warn( - "failed to get install actions, retrying. exception was: %s", + "failed to get package records, retrying. exception was: %s", str(exc), ) - actions = get_install_actions( + precs = get_package_records( prefix, - tuple(specs), + specs, env, retries=retries + 1, subdir=subdir, @@ -987,7 +1007,7 @@ def get_package_records( ) else: log.error( - "Failed to get install actions, max retries exceeded." + "Failed to get package records, max retries exceeded." ) raise if disable_pip: @@ -997,64 +1017,28 @@ def get_package_records( if not any( re.match(r"^%s(?:$|[\s=].*)" % pkg, str(dep)) for dep in specs ): - actions[LINK_ACTION] = [ - prec for prec in actions[LINK_ACTION] if prec.name != pkg - ] - utils.trim_empty_keys(actions) - cached_actions[(specs, env, subdir, channel_urls, disable_pip)] = actions.copy() + precs = [prec for prec in precs if prec.name != pkg] + cached_precs[(specs, env, subdir, channel_urls, disable_pip)] = precs.copy() last_index_ts = index_ts - return actions.get(LINK_ACTION, []) + return precs -@deprecated("24.1.0", "24.3.0", addendum="Use `get_package_records` instead.") -def get_install_actions( - prefix, - specs, - env, - retries=0, - subdir=None, - verbose=True, - debug=False, - locking=True, - bldpkgs_dirs=None, - timeout=900, - disable_pip=False, - max_env_retry=3, - output_folder=None, - channel_urls=None, -): - precs = get_package_records( - prefix=prefix, - specs=specs, - env=env, - retries=retries, - subdir=subdir, - verbose=verbose, - debug=debug, - locking=locking, - bldpkgs_dirs=bldpkgs_dirs, - timeout=timeout, - disable_pip=disable_pip, - max_env_retry=max_env_retry, - output_folder=output_folder, - channel_urls=channel_urls, - ) - return {PREFIX_ACTION: prefix, LINK_ACTION: precs} +get_package_records = get_install_actions +del get_install_actions -@deprecated.argument("24.1.0", "24.3.0", "specs_or_actions", rename="specs_or_precs") def create_env( - prefix, - specs_or_precs, + prefix: str | os.PathLike | Path, + specs_or_precs: Iterable[str | MatchSpec] | Iterable[PackageRecord], env, config, subdir, - clear_cache=True, - retry=0, + clear_cache: bool = True, + retry: int = 0, locks=None, - is_cross=False, - is_conda=False, -): + is_cross: bool = False, + is_conda: bool = False, +) -> None: """ Create a conda envrionment for the given prefix and specs. """ @@ -1073,6 +1057,7 @@ def create_env( # if os.path.isdir(prefix): # utils.rm_rf(prefix) + specs_or_precs = tuple(ensure_list(specs_or_precs)) if specs_or_precs: # Don't waste time if there is nothing to do log.debug("Creating environment in %s", prefix) log.debug(str(specs_or_precs)) @@ -1082,14 +1067,10 @@ def create_env( try: with utils.try_acquire_locks(locks, timeout=config.timeout): # input is a list of specs in MatchSpec format - if not ( - hasattr(specs_or_precs, "keys") - or isinstance(specs_or_precs[0], PackageRecord) - ): - specs = list(set(specs_or_precs)) - actions = get_install_actions( + if not isinstance(specs_or_precs[0], PackageRecord): + precs = get_package_records( prefix, - tuple(specs), + tuple(set(specs_or_precs)), env, subdir=subdir, verbose=config.verbose, @@ -1103,10 +1084,7 @@ def create_env( channel_urls=tuple(config.channel_urls), ) else: - if not hasattr(specs_or_precs, "keys"): - actions = {LINK_ACTION: specs_or_precs} - else: - actions = specs_or_precs + precs = specs_or_precs index, _, _ = get_build_index( subdir=subdir, bldpkgs_dir=config.bldpkgs_dir, @@ -1117,14 +1095,13 @@ def create_env( locking=config.locking, timeout=config.timeout, ) - utils.trim_empty_keys(actions) - _display_actions(prefix, actions) + _display_actions(prefix, precs) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) with env_var("CONDA_QUIET", not config.verbose, reset_context): with env_var("CONDA_JSON", not config.verbose, reset_context): - _execute_actions(prefix, actions) + _execute_actions(prefix, precs) except ( SystemExit, PaddingError, @@ -1159,15 +1136,13 @@ def create_env( ) config.prefix_length = 80 - host = "_h_env" in prefix - # Set this here and use to create environ - # Setting this here is important because we use it below (symlink) - prefix = config.host_prefix if host else config.build_prefix - actions[PREFIX_ACTION] = prefix - create_env( - prefix, - actions, + ( + config.host_prefix + if "_h_env" in prefix + else config.build_prefix + ), + specs_or_precs, config=config, subdir=subdir, env=env, @@ -1308,7 +1283,7 @@ def remove_existing_packages(dirs, fns, config): def get_pinned_deps(m, section): with TemporaryDirectory(prefix="_") as tmpdir: - actions = get_install_actions( + precs = get_package_records( tmpdir, tuple(m.ms_depends(section)), section, @@ -1323,16 +1298,17 @@ def get_pinned_deps(m, section): output_folder=m.config.output_folder, channel_urls=tuple(m.config.channel_urls), ) - runtime_deps = [ - package_record_to_requirement(prec) for prec in actions.get(LINK_ACTION, []) - ] - return runtime_deps + return [package_record_to_requirement(prec) for prec in precs] # NOTE: The function has to retain the "install_actions" name for now since # conda_libmamba_solver.solver.LibMambaSolver._called_from_conda_build # checks for this name in the call stack explicitly. -def install_actions(prefix, index, specs): +def install_actions( + prefix: str | os.PathLike | Path, + index, + specs: Iterable[str | MatchSpec], +) -> InstallActionsType: # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L471 # but reduced to only the functionality actually used within conda-build. @@ -1344,6 +1320,8 @@ def install_actions(prefix, index, specs): callback=reset_context, ): # a hack since in conda-build we don't track channel_priority_map + channels: tuple[Channel, ...] | None + subdirs: tuple[str, ...] | None if LAST_CHANNEL_URLS: channel_priority_map = prioritize_channels(LAST_CHANNEL_URLS) # tuple(dict.fromkeys(...)) removes duplicates while preserving input order. @@ -1353,7 +1331,7 @@ def install_actions(prefix, index, specs): subdirs = ( tuple( dict.fromkeys( - subdir for subdir in (c.subdir for c in channels) if subdir + subdir for channel in channels if (subdir := channel.subdir) ) ) or context.subdirs @@ -1361,12 +1339,12 @@ def install_actions(prefix, index, specs): else: channels = subdirs = None - specs = tuple(MatchSpec(spec) for spec in specs) + mspecs = tuple(MatchSpec(spec) for spec in specs) PrefixData._cache_.clear() solver_backend = context.plugin_manager.get_cached_solver_backend() - solver = solver_backend(prefix, channels, subdirs, specs_to_add=specs) + solver = solver_backend(prefix, channels, subdirs, specs_to_add=mspecs) if index: # Solver can modify the index (e.g., Solver._prepare adds virtual # package) => Copy index (just outer container, not deep copy) @@ -1374,42 +1352,33 @@ def install_actions(prefix, index, specs): solver._index = index.copy() txn = solver.solve_for_transaction(prune=False, ignore_pinned=False) prefix_setup = txn.prefix_setups[prefix] - actions = { - PREFIX_ACTION: prefix, - LINK_ACTION: [prec for prec in prefix_setup.link_precs], + return { + "PREFIX": prefix, + "LINK": [prec for prec in prefix_setup.link_precs], } - return actions _install_actions = install_actions del install_actions -def _execute_actions(prefix, actions): +@deprecated.argument("24.3", "24.5", "actions", rename="precs") +def _execute_actions(prefix, precs): # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L575 # but reduced to only the functionality actually used within conda-build. - assert prefix - if LINK_ACTION not in actions: - log.debug(f"action {LINK_ACTION} not in actions") - return - - link_precs = actions[LINK_ACTION] - if not link_precs: - log.debug(f"action {LINK_ACTION} has None value") - return - # Always link menuinst first/last on windows in case a subsequent # package tries to import it to create/remove a shortcut - link_precs = [p for p in link_precs if p.name == "menuinst"] + [ - p for p in link_precs if p.name != "menuinst" + precs = [ + *(prec for prec in precs if prec.name == "menuinst"), + *(prec for prec in precs if prec.name != "menuinst"), ] - progressive_fetch_extract = ProgressiveFetchExtract(link_precs) + progressive_fetch_extract = ProgressiveFetchExtract(precs) progressive_fetch_extract.prepare() - stp = PrefixSetup(prefix, (), link_precs, (), [], ()) + stp = PrefixSetup(prefix, (), precs, (), [], ()) unlink_link_transaction = UnlinkLinkTransaction(stp) log.debug(" %s(%r)", "PROGRESSIVEFETCHEXTRACT", progressive_fetch_extract) @@ -1418,7 +1387,8 @@ def _execute_actions(prefix, actions): unlink_link_transaction.execute() -def _display_actions(prefix, actions): +@deprecated.argument("24.3", "24.5", "actions", rename="precs") +def _display_actions(prefix, precs): # This is copied over from https://github.com/conda/conda/blob/23.11.0/conda/plan.py#L58 # but reduced to only the functionality actually used within conda-build. @@ -1450,7 +1420,7 @@ def channel_filt(s): features = defaultdict(lambda: "") channels = defaultdict(lambda: "") - for prec in actions.get(LINK_ACTION, []): + for prec in precs: assert isinstance(prec, PackageRecord) pkg = prec["name"] channels[pkg] = channel_filt(channel_str(prec)) diff --git a/conda_build/index.py b/conda_build/index.py index 229c5e1632..c3968d238a 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -1,81 +1,25 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import bz2 -import copy -import fnmatch -import functools import json import logging import os -import subprocess -import sys import time -from collections import OrderedDict -from concurrent.futures import Executor, ProcessPoolExecutor -from datetime import datetime +from concurrent.futures import Executor from functools import partial -from itertools import groupby -from numbers import Number -from os.path import ( - abspath, - basename, - dirname, - getmtime, - getsize, - isfile, - join, - splitext, -) -from pathlib import Path -from uuid import uuid4 +from os.path import dirname -import conda_package_handling.api -import pytz -import yaml - -# Lots of conda internals here. Should refactor to use exports. -from conda.common.compat import ensure_binary - -# BAD BAD BAD - conda internals from conda.core.index import get_index -from conda.core.subdir_data import SubdirData -from conda.models.channel import Channel from conda_index.index import update_index as _update_index -from conda_package_handling.api import InvalidArchiveError -from jinja2 import Environment, PackageLoader -from tqdm import tqdm -from yaml.constructor import ConstructorError -from yaml.parser import ParserError -from yaml.reader import ReaderError -from yaml.scanner import ScannerError from . import conda_interface, utils -from .conda_interface import ( - CondaError, - CondaHTTPError, - MatchSpec, - Resolve, - TemporaryDirectory, - VersionOrder, - context, - human_bytes, - url_path, -) +from .conda_interface import CondaHTTPError, context, url_path from .deprecations import deprecated -from .utils import ( - CONDA_PACKAGE_EXTENSION_V1, - CONDA_PACKAGE_EXTENSION_V2, - CONDA_PACKAGE_EXTENSIONS, - JSONDecodeError, - get_logger, - glob, - on_win, -) +from .utils import JSONDecodeError, get_logger, on_win log = get_logger(__name__) -# use this for debugging, because ProcessPoolExecutor isn't pdb/ipdb friendly +@deprecated("24.3", "24.5") class DummyExecutor(Executor): def map(self, func, *iterables): for iterable in iterables: @@ -83,50 +27,24 @@ def map(self, func, *iterables): yield func(thing) -try: - from conda.base.constants import NAMESPACE_PACKAGE_NAMES, NAMESPACES_MAP -except ImportError: - NAMESPACES_MAP = { # base package name, namespace - "python": "python", - "r": "r", - "r-base": "r", - "mro-base": "r", - "mro-base_impl": "r", - "erlang": "erlang", - "java": "java", - "openjdk": "java", - "julia": "julia", - "latex": "latex", - "lua": "lua", - "nodejs": "js", - "perl": "perl", - "php": "php", - "ruby": "ruby", - "m2-base": "m2", - "msys2-conda-epoch": "m2w64", - } - NAMESPACE_PACKAGE_NAMES = frozenset(NAMESPACES_MAP) - NAMESPACES = frozenset(NAMESPACES_MAP.values()) - local_index_timestamp = 0 cached_index = None local_subdir = "" local_output_folder = "" cached_channels = [] _channel_data = {} -deprecated.constant("24.1.0", "24.3.0", "channel_data", _channel_data) +deprecated.constant("24.1", "24.5", "channel_data", _channel_data) # TODO: support for libarchive seems to have broken ability to use multiple threads here. # The new conda format is so much faster that it more than makes up for it. However, it # would be nice to fix this at some point. -MAX_THREADS_DEFAULT = ( - os.cpu_count() if (hasattr(os, "cpu_count") and os.cpu_count() > 1) else 1 -) +_MAX_THREADS_DEFAULT = os.cpu_count() or 1 if on_win: # see https://github.com/python/cpython/commit/8ea0fd85bc67438f679491fae29dfe0a3961900a - MAX_THREADS_DEFAULT = min(48, MAX_THREADS_DEFAULT) -LOCK_TIMEOUT_SECS = 3 * 3600 -LOCKFILE_NAME = ".lock" + _MAX_THREADS_DEFAULT = min(48, _MAX_THREADS_DEFAULT) +deprecated.constant("24.3", "24.5", "MAX_THREADS_DEFAULT", _MAX_THREADS_DEFAULT) +deprecated.constant("24.3", "24.5", "LOCK_TIMEOUT_SECS", 3 * 3600) +deprecated.constant("24.3", "24.5", "LOCKFILE_NAME", ".lock") # TODO: this is to make sure that the index doesn't leak tokens. It breaks use of private channels, though. # os.environ['CONDA_ADD_ANACONDA_TOKEN'] = "false" @@ -325,1419 +243,3 @@ def _delegated_update_index( current_index_versions=current_index_versions, debug=debug, ) - - -# Everything below is deprecated to maintain API/feature compatibility. - - -@deprecated("24.1.0", "24.3.0") -def _determine_namespace(info): - if info.get("namespace"): - namespace = info["namespace"] - else: - depends_names = set() - for spec in info.get("depends", []): - try: - depends_names.add(MatchSpec(spec).name) - except CondaError: - pass - spaces = depends_names & NAMESPACE_PACKAGE_NAMES - if len(spaces) == 1: - namespace = NAMESPACES_MAP[spaces.pop()] - else: - namespace = "global" - info["namespace"] = namespace - - if not info.get("namespace_in_name") and "-" in info["name"]: - namespace_prefix, reduced_name = info["name"].split("-", 1) - if namespace_prefix == namespace: - info["name_in_channel"] = info["name"] - info["name"] = reduced_name - - return namespace, info.get("name_in_channel", info["name"]), info["name"] - - -@deprecated("24.1.0", "24.3.0") -def _make_seconds(timestamp): - timestamp = int(timestamp) - if timestamp > 253402300799: # 9999-12-31 - timestamp //= ( - 1000 # convert milliseconds to seconds; see conda/conda-build#1988 - ) - return timestamp - - -# ========================================================================== - - -_REPODATA_VERSION = 1 -_CHANNELDATA_VERSION = 1 -_REPODATA_JSON_FN = "repodata.json" -_REPODATA_FROM_PKGS_JSON_FN = "repodata_from_packages.json" -_CHANNELDATA_FIELDS = ( - "description", - "dev_url", - "doc_url", - "doc_source_url", - "home", - "license", - "reference_package", - "source_url", - "source_git_url", - "source_git_tag", - "source_git_rev", - "summary", - "version", - "subdirs", - "icon_url", - "icon_hash", # "md5:abc123:12" - "run_exports", - "binary_prefix", - "text_prefix", - "activate.d", - "deactivate.d", - "pre_link", - "post_link", - "pre_unlink", - "tags", - "identifiers", - "keywords", - "recipe_origin", - "commits", -) -deprecated.constant("24.1.0", "24.3.0", "REPODATA_VERSION", _REPODATA_VERSION) -deprecated.constant("24.1.0", "24.3.0", "CHANNELDATA_VERSION", _CHANNELDATA_VERSION) -deprecated.constant("24.1.0", "24.3.0", "REPODATA_JSON_FN", _REPODATA_JSON_FN) -deprecated.constant( - "24.1.0", "24.3.0", "REPODATA_FROM_PKGS_JSON_FN", _REPODATA_FROM_PKGS_JSON_FN -) -deprecated.constant("24.1.0", "24.3.0", "CHANNELDATA_FIELDS", _CHANNELDATA_FIELDS) - - -@deprecated("24.1.0", "24.3.0") -def _clear_newline_chars(record, field_name): - if field_name in record: - try: - record[field_name] = record[field_name].strip().replace("\n", " ") - except AttributeError: - # sometimes description gets added as a list instead of just a string - record[field_name] = record[field_name][0].strip().replace("\n", " ") - - -@deprecated( - "24.1.0", "24.5.0", addendum="Use `conda_index._apply_instructions` instead." -) -def _apply_instructions(subdir, repodata, instructions): - repodata.setdefault("removed", []) - utils.merge_or_update_dict( - repodata.get("packages", {}), - instructions.get("packages", {}), - merge=False, - add_missing_keys=False, - ) - # we could have totally separate instructions for .conda than .tar.bz2, but it's easier if we assume - # that a similarly-named .tar.bz2 file is the same content as .conda, and shares fixes - new_pkg_fixes = { - k.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2): v - for k, v in instructions.get("packages", {}).items() - } - - utils.merge_or_update_dict( - repodata.get("packages.conda", {}), - new_pkg_fixes, - merge=False, - add_missing_keys=False, - ) - utils.merge_or_update_dict( - repodata.get("packages.conda", {}), - instructions.get("packages.conda", {}), - merge=False, - add_missing_keys=False, - ) - - for fn in instructions.get("revoke", ()): - for key in ("packages", "packages.conda"): - if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == "packages.conda": - fn = fn.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2) - if fn in repodata[key]: - repodata[key][fn]["revoked"] = True - repodata[key][fn]["depends"].append("package_has_been_revoked") - - for fn in instructions.get("remove", ()): - for key in ("packages", "packages.conda"): - if fn.endswith(CONDA_PACKAGE_EXTENSION_V1) and key == "packages.conda": - fn = fn.replace(CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2) - popped = repodata[key].pop(fn, None) - if popped: - repodata["removed"].append(fn) - repodata["removed"].sort() - - return repodata - - -@deprecated("24.1.0", "24.3.0") -def _get_jinja2_environment(): - def _filter_strftime(dt, dt_format): - if isinstance(dt, Number): - if dt > 253402300799: # 9999-12-31 - dt //= 1000 # convert milliseconds to seconds; see #1988 - dt = datetime.utcfromtimestamp(dt).replace(tzinfo=pytz.timezone("UTC")) - return dt.strftime(dt_format) - - def _filter_add_href(text, link, **kwargs): - if link: - kwargs_list = [f'href="{link}"'] - kwargs_list.append(f'alt="{text}"') - kwargs_list += [f'{k}="{v}"' for k, v in kwargs.items()] - return "{}".format(" ".join(kwargs_list), text) - else: - return text - - environment = Environment( - loader=PackageLoader("conda_build", "templates"), - ) - environment.filters["human_bytes"] = human_bytes - environment.filters["strftime"] = _filter_strftime - environment.filters["add_href"] = _filter_add_href - environment.trim_blocks = True - environment.lstrip_blocks = True - - return environment - - -@deprecated("24.1.0", "24.3.0") -def _maybe_write(path, content, write_newline_end=False, content_is_binary=False): - # Create the temp file next "path" so that we can use an atomic move, see - # https://github.com/conda/conda-build/issues/3833 - temp_path = f"{path}.{uuid4()}" - - if not content_is_binary: - content = ensure_binary(content) - with open(temp_path, "wb") as fh: - fh.write(content) - if write_newline_end: - fh.write(b"\n") - if isfile(path): - if utils.md5_file(temp_path) == utils.md5_file(path): - # No need to change mtimes. The contents already match. - os.unlink(temp_path) - return False - # log.info("writing %s", path) - utils.move_with_fallback(temp_path, path) - return True - - -@deprecated("24.1.0", "24.3.0") -def _make_build_string(build, build_number): - build_number_as_string = str(build_number) - if build.endswith(build_number_as_string): - build = build[: -len(build_number_as_string)] - build = build.rstrip("_") - build_string = build - return build_string - - -@deprecated("24.1.0", "24.3.0") -def _warn_on_missing_dependencies(missing_dependencies, patched_repodata): - """ - The following dependencies do not exist in the channel and are not declared - as external dependencies: - - dependency1: - - subdir/fn1.tar.bz2 - - subdir/fn2.tar.bz2 - dependency2: - - subdir/fn3.tar.bz2 - - subdir/fn4.tar.bz2 - - The associated packages are being removed from the index. - """ - - if missing_dependencies: - builder = [ - "WARNING: The following dependencies do not exist in the channel", - " and are not declared as external dependencies:", - ] - for dep_name in sorted(missing_dependencies): - builder.append(" %s" % dep_name) - for subdir_fn in sorted(missing_dependencies[dep_name]): - builder.append(" - %s" % subdir_fn) - subdir, fn = subdir_fn.split("/") - popped = patched_repodata["packages"].pop(fn, None) - if popped: - patched_repodata["removed"].append(fn) - - builder.append("The associated packages are being removed from the index.") - builder.append("") - log.warn("\n".join(builder)) - - -@deprecated("24.1.0", "24.3.0") -def _cache_post_install_details(paths_cache_path, post_install_cache_path): - post_install_details_json = { - "binary_prefix": False, - "text_prefix": False, - "activate.d": False, - "deactivate.d": False, - "pre_link": False, - "post_link": False, - "pre_unlink": False, - } - if os.path.lexists(paths_cache_path): - with open(paths_cache_path) as f: - paths = json.load(f).get("paths", []) - - # get embedded prefix data from paths.json - for f in paths: - if f.get("prefix_placeholder"): - if f.get("file_mode") == "binary": - post_install_details_json["binary_prefix"] = True - elif f.get("file_mode") == "text": - post_install_details_json["text_prefix"] = True - # check for any activate.d/deactivate.d scripts - for k in ("activate.d", "deactivate.d"): - if not post_install_details_json.get(k) and f["_path"].startswith( - "etc/conda/%s" % k - ): - post_install_details_json[k] = True - # check for any link scripts - for pat in ("pre-link", "post-link", "pre-unlink"): - if not post_install_details_json.get(pat) and fnmatch.fnmatch( - f["_path"], "*/.*-%s.*" % pat - ): - post_install_details_json[pat.replace("-", "_")] = True - - with open(post_install_cache_path, "w") as fh: - json.dump(post_install_details_json, fh) - - -@deprecated("24.1.0", "24.3.0") -def _cache_recipe(tmpdir, recipe_cache_path): - recipe_path_search_order = ( - "info/recipe/meta.yaml.rendered", - "info/recipe/meta.yaml", - "info/meta.yaml", - ) - for path in recipe_path_search_order: - recipe_path = os.path.join(tmpdir, path) - if os.path.lexists(recipe_path): - break - recipe_path = None - - recipe_json = {} - if recipe_path: - with open(recipe_path) as f: - try: - recipe_json = yaml.safe_load(f) - except (ConstructorError, ParserError, ScannerError, ReaderError): - pass - try: - recipe_json_str = json.dumps(recipe_json) - except TypeError: - recipe_json.get("requirements", {}).pop("build") - recipe_json_str = json.dumps(recipe_json) - with open(recipe_cache_path, "w") as fh: - fh.write(recipe_json_str) - return recipe_json - - -@deprecated("24.1.0", "24.3.0") -def _cache_run_exports(tmpdir, run_exports_cache_path): - run_exports = {} - try: - with open(os.path.join(tmpdir, "info", "run_exports.json")) as f: - run_exports = json.load(f) - except (OSError, FileNotFoundError): - try: - with open(os.path.join(tmpdir, "info", "run_exports.yaml")) as f: - run_exports = yaml.safe_load(f) - except (OSError, FileNotFoundError): - log.debug("%s has no run_exports file (this is OK)" % tmpdir) - with open(run_exports_cache_path, "w") as fh: - json.dump(run_exports, fh) - - -@deprecated("24.1.0", "24.3.0") -def _cache_icon(tmpdir, recipe_json, icon_cache_path): - # If a conda package contains an icon, also extract and cache that in an .icon/ - # directory. The icon file name is the name of the package, plus the extension - # of the icon file as indicated by the meta.yaml `app/icon` key. - # apparently right now conda-build renames all icons to 'icon.png' - # What happens if it's an ico file, or a svg file, instead of a png? Not sure! - app_icon_path = recipe_json.get("app", {}).get("icon") - if app_icon_path: - icon_path = os.path.join(tmpdir, "info", "recipe", app_icon_path) - if not os.path.lexists(icon_path): - icon_path = os.path.join(tmpdir, "info", "icon.png") - if os.path.lexists(icon_path): - icon_cache_path += splitext(app_icon_path)[-1] - utils.move_with_fallback(icon_path, icon_cache_path) - - -@deprecated("24.1.0", "24.3.0") -def _make_subdir_index_html(channel_name, subdir, repodata_packages, extra_paths): - environment = _get_jinja2_environment() - template = environment.get_template("subdir-index.html.j2") - rendered_html = template.render( - title="{}/{}".format(channel_name or "", subdir), - packages=repodata_packages, - current_time=datetime.utcnow().replace(tzinfo=pytz.timezone("UTC")), - extra_paths=extra_paths, - ) - return rendered_html - - -@deprecated("24.1.0", "24.3.0") -def _make_channeldata_index_html(channel_name, channeldata): - environment = _get_jinja2_environment() - template = environment.get_template("channeldata-index.html.j2") - rendered_html = template.render( - title=channel_name, - packages=channeldata["packages"], - subdirs=channeldata["subdirs"], - current_time=datetime.utcnow().replace(tzinfo=pytz.timezone("UTC")), - ) - return rendered_html - - -@deprecated("24.1.0", "24.3.0") -def _get_source_repo_git_info(path): - is_repo = subprocess.check_output( - ["git", "rev-parse", "--is-inside-work-tree"], cwd=path - ) - if is_repo.strip().decode("utf-8") == "true": - output = subprocess.check_output( - ["git", "log", "--pretty=format:'%h|%ad|%an|%s'", "--date=unix"], cwd=path - ) - commits = [] - for line in output.decode("utf-8").strip().splitlines(): - _hash, _time, _author, _desc = line.split("|") - commits.append( - { - "hash": _hash, - "timestamp": int(_time), - "author": _author, - "description": _desc, - } - ) - return commits - - -@deprecated("24.1.0", "24.3.0") -def _cache_info_file(tmpdir, info_fn, cache_path): - info_path = os.path.join(tmpdir, "info", info_fn) - if os.path.lexists(info_path): - utils.move_with_fallback(info_path, cache_path) - - -@deprecated("24.1.0", "24.3.0") -def _alternate_file_extension(fn): - cache_fn = fn - for ext in CONDA_PACKAGE_EXTENSIONS: - cache_fn = cache_fn.replace(ext, "") - other_ext = set(CONDA_PACKAGE_EXTENSIONS) - {fn.replace(cache_fn, "")} - return cache_fn + next(iter(other_ext)) - - -@deprecated("24.1.0", "24.3.0") -def _get_resolve_object(subdir, file_path=None, precs=None, repodata=None): - packages = {} - conda_packages = {} - if file_path: - with open(file_path) as fi: - packages = json.load(fi) - recs = json.load(fi) - for k, v in recs.items(): - if k.endswith(CONDA_PACKAGE_EXTENSION_V1): - packages[k] = v - elif k.endswith(CONDA_PACKAGE_EXTENSION_V2): - conda_packages[k] = v - if not repodata: - repodata = { - "info": { - "subdir": subdir, - "arch": context.arch_name, - "platform": context.platform, - }, - "packages": packages, - "packages.conda": conda_packages, - } - - channel = Channel("https://conda.anaconda.org/dummy-channel/%s" % subdir) - sd = SubdirData(channel) - sd._process_raw_repodata_str(json.dumps(repodata)) - sd._loaded = True - SubdirData._cache_[channel.url(with_credentials=True)] = sd - - index = {prec: prec for prec in precs or sd._package_records} - r = Resolve(index, channels=(channel,)) - return r - - -@deprecated("24.1.0", "24.3.0") -def _get_newest_versions(r, pins={}): - groups = {} - for g_name, g_recs in r.groups.items(): - if g_name in pins: - matches = [] - for pin in pins[g_name]: - version = r.find_matches(MatchSpec(f"{g_name}={pin}"))[0].version - matches.extend(r.find_matches(MatchSpec(f"{g_name}={version}"))) - else: - version = r.groups[g_name][0].version - matches = r.find_matches(MatchSpec(f"{g_name}={version}")) - groups[g_name] = matches - return [pkg for group in groups.values() for pkg in group] - - -@deprecated("24.1.0", "24.3.0") -def _add_missing_deps(new_r, original_r): - """For each package in new_r, if any deps are not satisfiable, backfill them from original_r.""" - - expanded_groups = copy.deepcopy(new_r.groups) - seen_specs = set() - for g_name, g_recs in new_r.groups.items(): - for g_rec in g_recs: - for dep_spec in g_rec.depends: - if dep_spec in seen_specs: - continue - ms = MatchSpec(dep_spec) - if not new_r.find_matches(ms): - matches = original_r.find_matches(ms) - if matches: - version = matches[0].version - expanded_groups[ms.name] = set( - expanded_groups.get(ms.name, []) - ) | set( - original_r.find_matches(MatchSpec(f"{ms.name}={version}")) - ) - seen_specs.add(dep_spec) - return [pkg for group in expanded_groups.values() for pkg in group] - - -@deprecated("24.1.0", "24.3.0") -def _add_prev_ver_for_features(new_r, orig_r): - expanded_groups = copy.deepcopy(new_r.groups) - for g_name in new_r.groups: - if not any(m.track_features or m.features for m in new_r.groups[g_name]): - # no features so skip - continue - - # versions are sorted here so this is the latest - latest_version = VersionOrder(str(new_r.groups[g_name][0].version)) - if g_name in orig_r.groups: - # now we iterate through the list to find the next to latest - # without a feature - keep_m = None - for i in range(len(orig_r.groups[g_name])): - _m = orig_r.groups[g_name][i] - if VersionOrder(str(_m.version)) <= latest_version and not ( - _m.track_features or _m.features - ): - keep_m = _m - break - if keep_m is not None: - expanded_groups[g_name] = {keep_m} | set( - expanded_groups.get(g_name, []) - ) - - return [pkg for group in expanded_groups.values() for pkg in group] - - -@deprecated("24.1.0", "24.3.0") -def _shard_newest_packages(subdir, r, pins=None): - """Captures only the newest versions of software in the resolve object. - - For things where more than one version is supported simultaneously (like Python), - pass pins as a dictionary, with the key being the package name, and the value being - a list of supported versions. For example: - - {'python': ["2.7", "3.6"]} - """ - groups = {} - pins = pins or {} - for g_name, g_recs in r.groups.items(): - # always do the latest implicitly - version = r.groups[g_name][0].version - matches = set(r.find_matches(MatchSpec(f"{g_name}={version}"))) - if g_name in pins: - for pin_value in pins[g_name]: - version = r.find_matches(MatchSpec(f"{g_name}={pin_value}"))[0].version - matches.update(r.find_matches(MatchSpec(f"{g_name}={version}"))) - groups[g_name] = matches - - # add the deps of the stuff in the index - new_r = _get_resolve_object( - subdir, precs=[pkg for group in groups.values() for pkg in group] - ) - new_r = _get_resolve_object(subdir, precs=_add_missing_deps(new_r, r)) - - # now for any pkg with features, add at least one previous version - # also return - return set(_add_prev_ver_for_features(new_r, r)) - - -@deprecated("24.1.0", "24.3.0") -def _build_current_repodata(subdir, repodata, pins): - r = _get_resolve_object(subdir, repodata=repodata) - keep_pkgs = _shard_newest_packages(subdir, r, pins) - new_repodata = { - k: repodata[k] for k in set(repodata.keys()) - {"packages", "packages.conda"} - } - packages = {} - conda_packages = {} - for keep_pkg in keep_pkgs: - if keep_pkg.fn.endswith(CONDA_PACKAGE_EXTENSION_V2): - conda_packages[keep_pkg.fn] = repodata["packages.conda"][keep_pkg.fn] - # in order to prevent package churn we consider the md5 for the .tar.bz2 that matches the .conda file - # This holds when .conda files contain the same files as .tar.bz2, which is an assumption we'll make - # until it becomes more prevalent that people provide only .conda files and just skip .tar.bz2 - counterpart = keep_pkg.fn.replace( - CONDA_PACKAGE_EXTENSION_V2, CONDA_PACKAGE_EXTENSION_V1 - ) - conda_packages[keep_pkg.fn]["legacy_bz2_md5"] = ( - repodata["packages"].get(counterpart, {}).get("md5") - ) - elif keep_pkg.fn.endswith(CONDA_PACKAGE_EXTENSION_V1): - packages[keep_pkg.fn] = repodata["packages"][keep_pkg.fn] - new_repodata["packages"] = packages - new_repodata["packages.conda"] = conda_packages - return new_repodata - - -@deprecated("24.1.0", "24.3.0") -class ChannelIndex: - def __init__( - self, - channel_root, - channel_name, - subdirs=None, - threads=MAX_THREADS_DEFAULT, - deep_integrity_check=False, - debug=False, - ): - self.channel_root = abspath(channel_root) - self.channel_name = channel_name or basename(channel_root.rstrip("/")) - self._subdirs = subdirs - self.thread_executor = ( - DummyExecutor() - if debug or sys.version_info.major == 2 or threads == 1 - else ProcessPoolExecutor(threads) - ) - self.deep_integrity_check = deep_integrity_check - - def index( - self, - patch_generator, - hotfix_source_repo=None, - verbose=False, - progress=False, - current_index_versions=None, - index_file=None, - ): - if verbose: - level = logging.DEBUG - else: - level = logging.ERROR - - with utils.LoggingContext(level, loggers=[__name__]): - if not self._subdirs: - detected_subdirs = { - subdir.name - for subdir in os.scandir(self.channel_root) - if subdir.name in utils.DEFAULT_SUBDIRS and subdir.is_dir() - } - log.debug("found subdirs %s" % detected_subdirs) - self.subdirs = subdirs = sorted(detected_subdirs | {"noarch"}) - else: - self.subdirs = subdirs = sorted(set(self._subdirs) | {"noarch"}) - - # Step 1. Lock local channel. - with utils.try_acquire_locks( - [utils.get_lock(self.channel_root)], timeout=900 - ): - channel_data = {} - channeldata_file = os.path.join(self.channel_root, "channeldata.json") - if os.path.isfile(channeldata_file): - with open(channeldata_file) as f: - channel_data = json.load(f) - # Step 2. Collect repodata from packages, save to pkg_repodata.json file - with tqdm( - total=len(subdirs), disable=(verbose or not progress), leave=False - ) as t: - for subdir in subdirs: - t.set_description("Subdir: %s" % subdir) - t.update() - with tqdm( - total=8, disable=(verbose or not progress), leave=False - ) as t2: - t2.set_description("Gathering repodata") - t2.update() - _ensure_valid_channel(self.channel_root, subdir) - repodata_from_packages = self.index_subdir( - subdir, - verbose=verbose, - progress=progress, - index_file=index_file, - ) - - t2.set_description("Writing pre-patch repodata") - t2.update() - self._write_repodata( - subdir, - repodata_from_packages, - _REPODATA_FROM_PKGS_JSON_FN, - ) - - # Step 3. Apply patch instructions. - t2.set_description("Applying patch instructions") - t2.update() - patched_repodata, patch_instructions = self._patch_repodata( - subdir, repodata_from_packages, patch_generator - ) - - # Step 4. Save patched and augmented repodata. - # If the contents of repodata have changed, write a new repodata.json file. - # Also create associated index.html. - - t2.set_description("Writing patched repodata") - t2.update() - self._write_repodata( - subdir, patched_repodata, _REPODATA_JSON_FN - ) - t2.set_description("Building current_repodata subset") - t2.update() - current_repodata = _build_current_repodata( - subdir, patched_repodata, pins=current_index_versions - ) - t2.set_description("Writing current_repodata subset") - t2.update() - self._write_repodata( - subdir, - current_repodata, - json_filename="current_repodata.json", - ) - - t2.set_description("Writing subdir index HTML") - t2.update() - self._write_subdir_index_html(subdir, patched_repodata) - - t2.set_description("Updating channeldata") - t2.update() - self._update_channeldata( - channel_data, patched_repodata, subdir - ) - - # Step 7. Create and write channeldata. - self._write_channeldata_index_html(channel_data) - self._write_channeldata(channel_data) - - def index_subdir(self, subdir, index_file=None, verbose=False, progress=False): - subdir_path = join(self.channel_root, subdir) - self._ensure_dirs(subdir) - repodata_json_path = join(subdir_path, _REPODATA_FROM_PKGS_JSON_FN) - - if verbose: - log.info("Building repodata for %s" % subdir_path) - - # gather conda package filenames in subdir - # we'll process these first, because reading their metadata is much faster - fns_in_subdir = { - fn - for fn in os.listdir(subdir_path) - if fn.endswith(".conda") or fn.endswith(".tar.bz2") - } - - # load current/old repodata - try: - with open(repodata_json_path) as fh: - old_repodata = json.load(fh) or {} - except (OSError, JSONDecodeError): - # log.info("no repodata found at %s", repodata_json_path) - old_repodata = {} - - old_repodata_packages = old_repodata.get("packages", {}) - old_repodata_conda_packages = old_repodata.get("packages.conda", {}) - old_repodata_fns = set(old_repodata_packages) | set(old_repodata_conda_packages) - - # Load stat cache. The stat cache has the form - # { - # 'package_name.tar.bz2': { - # 'mtime': 123456, - # 'md5': 'abd123', - # }, - # } - stat_cache_path = join(subdir_path, ".cache", "stat.json") - try: - with open(stat_cache_path) as fh: - stat_cache = json.load(fh) or {} - except: - stat_cache = {} - - stat_cache_original = stat_cache.copy() - - remove_set = old_repodata_fns - fns_in_subdir - ignore_set = set(old_repodata.get("removed", [])) - try: - # calculate all the paths and figure out what we're going to do with them - # add_set: filenames that aren't in the current/old repodata, but exist in the subdir - if index_file: - with open(index_file) as fin: - add_set = set() - for line in fin: - fn_subdir, fn = line.strip().split("/") - if fn_subdir != subdir: - continue - if fn.endswith(".conda") or fn.endswith(".tar.bz2"): - add_set.add(fn) - else: - add_set = fns_in_subdir - old_repodata_fns - - add_set -= ignore_set - - # update_set: Filenames that are in both old repodata and new repodata, - # and whose contents have changed based on file size or mtime. We're - # not using md5 here because it takes too long. If needing to do full md5 checks, - # use the --deep-integrity-check flag / self.deep_integrity_check option. - update_set = self._calculate_update_set( - subdir, - fns_in_subdir, - old_repodata_fns, - stat_cache, - verbose=verbose, - progress=progress, - ) - # unchanged_set: packages in old repodata whose information can carry straight - # across to new repodata - unchanged_set = set(old_repodata_fns - update_set - remove_set - ignore_set) - - assert isinstance(unchanged_set, set) # faster `in` queries - - # clean up removed files - removed_set = old_repodata_fns - fns_in_subdir - for fn in removed_set: - if fn in stat_cache: - del stat_cache[fn] - - new_repodata_packages = { - k: v - for k, v in old_repodata.get("packages", {}).items() - if k in unchanged_set - } - new_repodata_conda_packages = { - k: v - for k, v in old_repodata.get("packages.conda", {}).items() - if k in unchanged_set - } - - for k in sorted(unchanged_set): - if not (k in new_repodata_packages or k in new_repodata_conda_packages): - fn, rec = ChannelIndex._load_index_from_cache( - self.channel_root, subdir, fn, stat_cache - ) - # this is how we pass an exception through. When fn == rec, there's been a problem, - # and we need to reload this file - if fn == rec: - update_set.add(fn) - else: - if fn.endswith(CONDA_PACKAGE_EXTENSION_V1): - new_repodata_packages[fn] = rec - else: - new_repodata_conda_packages[fn] = rec - - # Invalidate cached files for update_set. - # Extract and cache update_set and add_set, then add to new_repodata_packages. - # This is also where we update the contents of the stat_cache for successfully - # extracted packages. - # Sorting here prioritizes .conda files ('c') over .tar.bz2 files ('b') - hash_extract_set = (*add_set, *update_set) - - extract_func = functools.partial( - ChannelIndex._extract_to_cache, self.channel_root, subdir - ) - # split up the set by .conda packages first, then .tar.bz2. This avoids race conditions - # with execution in parallel that would end up in the same place. - for conda_format in tqdm( - CONDA_PACKAGE_EXTENSIONS, - desc="File format", - disable=(verbose or not progress), - leave=False, - ): - for fn, mtime, size, index_json in tqdm( - self.thread_executor.map( - extract_func, - (fn for fn in hash_extract_set if fn.endswith(conda_format)), - ), - desc="hash & extract packages for %s" % subdir, - disable=(verbose or not progress), - leave=False, - ): - # fn can be None if the file was corrupt or no longer there - if fn and mtime: - stat_cache[fn] = {"mtime": int(mtime), "size": size} - if index_json: - if fn.endswith(CONDA_PACKAGE_EXTENSION_V2): - new_repodata_conda_packages[fn] = index_json - else: - new_repodata_packages[fn] = index_json - else: - log.error( - "Package at %s did not contain valid index.json data. Please" - " check the file and remove/redownload if necessary to obtain " - "a valid package." % os.path.join(subdir_path, fn) - ) - - new_repodata = { - "packages": new_repodata_packages, - "packages.conda": new_repodata_conda_packages, - "info": { - "subdir": subdir, - }, - "repodata_version": _REPODATA_VERSION, - "removed": sorted(list(ignore_set)), - } - finally: - if stat_cache != stat_cache_original: - # log.info("writing stat cache to %s", stat_cache_path) - with open(stat_cache_path, "w") as fh: - json.dump(stat_cache, fh) - return new_repodata - - def _ensure_dirs(self, subdir: str): - """Create cache directories within a subdir. - - Args: - subdir (str): name of the subdirectory - """ - # Create all cache directories in the subdir. - cache_path = Path(self.channel_root, subdir, ".cache") - cache_path.mkdir(parents=True, exist_ok=True) - (cache_path / "index").mkdir(exist_ok=True) - (cache_path / "about").mkdir(exist_ok=True) - (cache_path / "paths").mkdir(exist_ok=True) - (cache_path / "recipe").mkdir(exist_ok=True) - (cache_path / "run_exports").mkdir(exist_ok=True) - (cache_path / "post_install").mkdir(exist_ok=True) - (cache_path / "icon").mkdir(exist_ok=True) - (cache_path / "recipe_log").mkdir(exist_ok=True) - Path(self.channel_root, "icons").mkdir(exist_ok=True) - - def _calculate_update_set( - self, - subdir, - fns_in_subdir, - old_repodata_fns, - stat_cache, - verbose=False, - progress=True, - ): - # Determine the packages that already exist in repodata, but need to be updated. - # We're not using md5 here because it takes too long. - candidate_fns = fns_in_subdir & old_repodata_fns - subdir_path = join(self.channel_root, subdir) - - update_set = set() - for fn in tqdm( - iter(candidate_fns), - desc="Finding updated files", - disable=(verbose or not progress), - leave=False, - ): - if fn not in stat_cache: - update_set.add(fn) - else: - stat_result = os.stat(join(subdir_path, fn)) - if ( - int(stat_result.st_mtime) != int(stat_cache[fn]["mtime"]) - or stat_result.st_size != stat_cache[fn]["size"] - ): - update_set.add(fn) - return update_set - - @staticmethod - def _extract_to_cache(channel_root, subdir, fn, second_try=False): - # This method WILL reread the tarball. Probably need another one to exit early if - # there are cases where it's fine not to reread. Like if we just rebuild repodata - # from the cached files, but don't use the existing repodata.json as a starting point. - subdir_path = join(channel_root, subdir) - - # allow .conda files to reuse cache from .tar.bz2 and vice-versa. - # Assumes that .tar.bz2 and .conda files have exactly the same - # contents. This is convention, but not guaranteed, nor checked. - alternate_cache_fn = _alternate_file_extension(fn) - cache_fn = fn - - abs_fn = os.path.join(subdir_path, fn) - - stat_result = os.stat(abs_fn) - size = stat_result.st_size - mtime = stat_result.st_mtime - retval = fn, mtime, size, None - - index_cache_path = join(subdir_path, ".cache", "index", cache_fn + ".json") - about_cache_path = join(subdir_path, ".cache", "about", cache_fn + ".json") - paths_cache_path = join(subdir_path, ".cache", "paths", cache_fn + ".json") - recipe_cache_path = join(subdir_path, ".cache", "recipe", cache_fn + ".json") - run_exports_cache_path = join( - subdir_path, ".cache", "run_exports", cache_fn + ".json" - ) - post_install_cache_path = join( - subdir_path, ".cache", "post_install", cache_fn + ".json" - ) - icon_cache_path = join(subdir_path, ".cache", "icon", cache_fn) - - log.debug("hashing, extracting, and caching %s" % fn) - - alternate_cache = False - if not os.path.exists(index_cache_path) and os.path.exists( - index_cache_path.replace(fn, alternate_cache_fn) - ): - alternate_cache = True - - try: - # allow .tar.bz2 files to use the .conda cache, but not vice-versa. - # .conda readup is very fast (essentially free), but .conda files come from - # converting .tar.bz2 files, which can go wrong. Forcing extraction for - # .conda files gives us a check on the validity of that conversion. - if not fn.endswith(CONDA_PACKAGE_EXTENSION_V2) and os.path.isfile( - index_cache_path - ): - with open(index_cache_path) as f: - index_json = json.load(f) - elif not alternate_cache and ( - second_try or not os.path.exists(index_cache_path) - ): - with TemporaryDirectory() as tmpdir: - conda_package_handling.api.extract( - abs_fn, dest_dir=tmpdir, components="info" - ) - index_file = os.path.join(tmpdir, "info", "index.json") - if not os.path.exists(index_file): - return retval - with open(index_file) as f: - index_json = json.load(f) - - _cache_info_file(tmpdir, "about.json", about_cache_path) - _cache_info_file(tmpdir, "paths.json", paths_cache_path) - _cache_info_file(tmpdir, "recipe_log.json", paths_cache_path) - _cache_run_exports(tmpdir, run_exports_cache_path) - _cache_post_install_details( - paths_cache_path, post_install_cache_path - ) - recipe_json = _cache_recipe(tmpdir, recipe_cache_path) - _cache_icon(tmpdir, recipe_json, icon_cache_path) - - # decide what fields to filter out, like has_prefix - filter_fields = { - "arch", - "has_prefix", - "mtime", - "platform", - "ucs", - "requires_features", - "binstar", - "target-triplet", - "machine", - "operatingsystem", - } - for field_name in filter_fields & set(index_json): - del index_json[field_name] - elif alternate_cache: - # we hit the cache of the other file type. Copy files to this name, and replace - # the size, md5, and sha256 values - paths = [ - index_cache_path, - about_cache_path, - paths_cache_path, - recipe_cache_path, - run_exports_cache_path, - post_install_cache_path, - icon_cache_path, - ] - bizarro_paths = [_.replace(fn, alternate_cache_fn) for _ in paths] - for src, dest in zip(bizarro_paths, paths): - if os.path.exists(src): - try: - os.makedirs(os.path.dirname(dest)) - except: - pass - utils.copy_into(src, dest) - - with open(index_cache_path) as f: - index_json = json.load(f) - else: - with open(index_cache_path) as f: - index_json = json.load(f) - - # calculate extra stuff to add to index.json cache, size, md5, sha256 - # This is done always for all files, whether the cache is loaded or not, - # because the cache may be from the other file type. We don't store this - # info in the cache to avoid confusion. - index_json.update(conda_package_handling.api.get_pkg_details(abs_fn)) - - with open(index_cache_path, "w") as fh: - json.dump(index_json, fh) - retval = fn, mtime, size, index_json - except (InvalidArchiveError, KeyError, EOFError, JSONDecodeError): - if not second_try: - return ChannelIndex._extract_to_cache( - channel_root, subdir, fn, second_try=True - ) - return retval - - @staticmethod - def _load_index_from_cache(channel_root, subdir, fn, stat_cache): - index_cache_path = join(channel_root, subdir, ".cache", "index", fn + ".json") - try: - with open(index_cache_path) as fh: - index_json = json.load(fh) - except (OSError, JSONDecodeError): - index_json = fn - - return fn, index_json - - @staticmethod - def _load_all_from_cache(channel_root, subdir, fn): - subdir_path = join(channel_root, subdir) - try: - mtime = getmtime(join(subdir_path, fn)) - except FileNotFoundError: - return {} - # In contrast to self._load_index_from_cache(), this method reads up pretty much - # all of the cached metadata, except for paths. It all gets dumped into a single map. - index_cache_path = join(subdir_path, ".cache", "index", fn + ".json") - about_cache_path = join(subdir_path, ".cache", "about", fn + ".json") - recipe_cache_path = join(subdir_path, ".cache", "recipe", fn + ".json") - run_exports_cache_path = join( - subdir_path, ".cache", "run_exports", fn + ".json" - ) - post_install_cache_path = join( - subdir_path, ".cache", "post_install", fn + ".json" - ) - icon_cache_path_glob = join(subdir_path, ".cache", "icon", fn + ".*") - recipe_log_path = join(subdir_path, ".cache", "recipe_log", fn + ".json") - - data = {} - for path in ( - recipe_cache_path, - about_cache_path, - index_cache_path, - post_install_cache_path, - recipe_log_path, - ): - try: - if os.path.getsize(path) != 0: - with open(path) as fh: - data.update(json.load(fh)) - except (OSError, EOFError): - pass - - try: - icon_cache_paths = glob(icon_cache_path_glob) - if icon_cache_paths: - icon_cache_path = sorted(icon_cache_paths)[-1] - icon_ext = icon_cache_path.rsplit(".", 1)[-1] - channel_icon_fn = "{}.{}".format(data["name"], icon_ext) - icon_url = "icons/" + channel_icon_fn - icon_channel_path = join(channel_root, "icons", channel_icon_fn) - icon_md5 = utils.md5_file(icon_cache_path) - icon_hash = f"md5:{icon_md5}:{getsize(icon_cache_path)}" - data.update(icon_hash=icon_hash, icon_url=icon_url) - # log.info("writing icon from %s to %s", icon_cache_path, icon_channel_path) - utils.move_with_fallback(icon_cache_path, icon_channel_path) - except: - pass - - # have to stat again, because we don't have access to the stat cache here - data["mtime"] = mtime - - source = data.get("source", {}) - try: - data.update({"source_" + k: v for k, v in source.items()}) - except AttributeError: - # sometimes source is a list instead of a dict - pass - _clear_newline_chars(data, "description") - _clear_newline_chars(data, "summary") - try: - with open(run_exports_cache_path) as fh: - data["run_exports"] = json.load(fh) - except (OSError, EOFError): - data["run_exports"] = {} - return data - - def _write_repodata(self, subdir, repodata, json_filename): - repodata_json_path = join(self.channel_root, subdir, json_filename) - new_repodata_binary = ( - json.dumps( - repodata, - indent=2, - sort_keys=True, - ) - .replace("':'", "': '") - .encode("utf-8") - ) - write_result = _maybe_write( - repodata_json_path, new_repodata_binary, write_newline_end=True - ) - if write_result: - repodata_bz2_path = repodata_json_path + ".bz2" - bz2_content = bz2.compress(new_repodata_binary) - _maybe_write(repodata_bz2_path, bz2_content, content_is_binary=True) - return write_result - - def _write_subdir_index_html(self, subdir, repodata): - repodata_packages = repodata["packages"] - subdir_path = join(self.channel_root, subdir) - - def _add_extra_path(extra_paths, path): - if isfile(join(self.channel_root, path)): - extra_paths[basename(path)] = { - "size": getsize(path), - "timestamp": int(getmtime(path)), - "sha256": utils.sha256_checksum(path), - "md5": utils.md5_file(path), - } - - extra_paths = OrderedDict() - _add_extra_path(extra_paths, join(subdir_path, _REPODATA_JSON_FN)) - _add_extra_path(extra_paths, join(subdir_path, _REPODATA_JSON_FN + ".bz2")) - _add_extra_path(extra_paths, join(subdir_path, _REPODATA_FROM_PKGS_JSON_FN)) - _add_extra_path( - extra_paths, join(subdir_path, _REPODATA_FROM_PKGS_JSON_FN + ".bz2") - ) - # _add_extra_path(extra_paths, join(subdir_path, "repodata2.json")) - _add_extra_path(extra_paths, join(subdir_path, "patch_instructions.json")) - rendered_html = _make_subdir_index_html( - self.channel_name, subdir, repodata_packages, extra_paths - ) - index_path = join(subdir_path, "index.html") - return _maybe_write(index_path, rendered_html) - - def _write_channeldata_index_html(self, channeldata): - rendered_html = _make_channeldata_index_html(self.channel_name, channeldata) - index_path = join(self.channel_root, "index.html") - _maybe_write(index_path, rendered_html) - - def _update_channeldata(self, channel_data, repodata, subdir): - legacy_packages = repodata["packages"] - conda_packages = repodata["packages.conda"] - - use_these_legacy_keys = set(legacy_packages.keys()) - { - k[:-6] + CONDA_PACKAGE_EXTENSION_V1 for k in conda_packages.keys() - } - all_packages = conda_packages.copy() - all_packages.update({k: legacy_packages[k] for k in use_these_legacy_keys}) - package_data = channel_data.get("packages", {}) - - def _append_group(groups, candidates): - candidate = sorted(candidates, key=lambda x: x[1].get("timestamp", 0))[-1] - pkg_dict = candidate[1] - pkg_name = pkg_dict["name"] - - run_exports = package_data.get(pkg_name, {}).get("run_exports", {}) - if ( - pkg_name not in package_data - or subdir not in package_data.get(pkg_name, {}).get("subdirs", []) - or package_data.get(pkg_name, {}).get("timestamp", 0) - < _make_seconds(pkg_dict.get("timestamp", 0)) - or run_exports - and pkg_dict["version"] not in run_exports - ): - groups.append(candidate) - - groups = [] - for name, group in groupby(all_packages.items(), lambda x: x[1]["name"]): - if name not in package_data or package_data[name].get("run_exports"): - # pay special attention to groups that have run_exports - we need to process each version - # group by version; take newest per version group. We handle groups that are not - # in the index t all yet similarly, because we can't check if they have any run_exports - for _, vgroup in groupby(group, lambda x: x[1]["version"]): - _append_group(groups, vgroup) - else: - # take newest per group - _append_group(groups, group) - - def _replace_if_newer_and_present(pd, data, erec, data_newer, k): - if data.get(k) and (data_newer or not erec.get(k)): - pd[k] = data[k] - else: - pd[k] = erec.get(k) - - # unzipping - fns, fn_dicts = [], [] - if groups: - fns, fn_dicts = zip(*groups) - - load_func = functools.partial( - ChannelIndex._load_all_from_cache, - self.channel_root, - subdir, - ) - for fn_dict, data in zip(fn_dicts, self.thread_executor.map(load_func, fns)): - if data: - data.update(fn_dict) - name = data["name"] - # existing record - erec = package_data.get(name, {}) - data_v = data.get("version", "0") - erec_v = erec.get("version", "0") - data_newer = VersionOrder(data_v) > VersionOrder(erec_v) - - package_data[name] = package_data.get(name, {}) - # keep newer value for these - for k in ( - "description", - "dev_url", - "doc_url", - "doc_source_url", - "home", - "license", - "source_url", - "source_git_url", - "summary", - "icon_url", - "icon_hash", - "tags", - "identifiers", - "keywords", - "recipe_origin", - "version", - ): - _replace_if_newer_and_present( - package_data[name], data, erec, data_newer, k - ) - - # keep any true value for these, since we don't distinguish subdirs - for k in ( - "binary_prefix", - "text_prefix", - "activate.d", - "deactivate.d", - "pre_link", - "post_link", - "pre_unlink", - ): - package_data[name][k] = any((data.get(k), erec.get(k))) - - package_data[name]["subdirs"] = sorted( - list(set(erec.get("subdirs", []) + [subdir])) - ) - # keep one run_exports entry per version of the package, since these vary by version - run_exports = erec.get("run_exports", {}) - exports_from_this_version = data.get("run_exports") - if exports_from_this_version: - run_exports[data_v] = data.get("run_exports") - package_data[name]["run_exports"] = run_exports - package_data[name]["timestamp"] = _make_seconds( - max( - data.get("timestamp", 0), - channel_data.get(name, {}).get("timestamp", 0), - ) - ) - - channel_data.update( - { - "channeldata_version": _CHANNELDATA_VERSION, - "subdirs": sorted( - list(set(channel_data.get("subdirs", []) + [subdir])) - ), - "packages": package_data, - } - ) - - def _write_channeldata(self, channeldata): - # trim out commits, as they can take up a ton of space. They're really only for the RSS feed. - for _pkg, pkg_dict in channeldata.get("packages", {}).items(): - if "commits" in pkg_dict: - del pkg_dict["commits"] - channeldata_path = join(self.channel_root, "channeldata.json") - content = json.dumps(channeldata, indent=2, sort_keys=True).replace( - "':'", "': '" - ) - _maybe_write(channeldata_path, content, True) - - def _load_patch_instructions_tarball(self, subdir, patch_generator): - instructions = {} - with TemporaryDirectory() as tmpdir: - conda_package_handling.api.extract(patch_generator, dest_dir=tmpdir) - instructions_file = os.path.join(tmpdir, subdir, "patch_instructions.json") - if os.path.isfile(instructions_file): - with open(instructions_file) as f: - instructions = json.load(f) - return instructions - - def _create_patch_instructions(self, subdir, repodata, patch_generator=None): - gen_patch_path = patch_generator or join(self.channel_root, "gen_patch.py") - if isfile(gen_patch_path): - log.debug(f"using patch generator {gen_patch_path} for {subdir}") - - # https://stackoverflow.com/a/41595552/2127762 - try: - from importlib.util import module_from_spec, spec_from_file_location - - spec = spec_from_file_location("a_b", gen_patch_path) - mod = module_from_spec(spec) - - spec.loader.exec_module(mod) - # older pythons - except ImportError: - import imp - - mod = imp.load_source("a_b", gen_patch_path) - - instructions = mod._patch_repodata(repodata, subdir) - - if instructions.get("patch_instructions_version", 0) > 1: - raise RuntimeError("Incompatible patch instructions version") - - return instructions - else: - if patch_generator: - raise ValueError( - f"Specified metadata patch file '{patch_generator}' does not exist. Please try an absolute " - "path, or examine your relative path carefully with respect to your cwd." - ) - return {} - - def _write_patch_instructions(self, subdir, instructions): - new_patch = json.dumps(instructions, indent=2, sort_keys=True).replace( - "':'", "': '" - ) - patch_instructions_path = join( - self.channel_root, subdir, "patch_instructions.json" - ) - _maybe_write(patch_instructions_path, new_patch, True) - - def _load_instructions(self, subdir): - patch_instructions_path = join( - self.channel_root, subdir, "patch_instructions.json" - ) - if isfile(patch_instructions_path): - log.debug("using patch instructions %s" % patch_instructions_path) - with open(patch_instructions_path) as fh: - instructions = json.load(fh) - if instructions.get("patch_instructions_version", 0) > 1: - raise RuntimeError("Incompatible patch instructions version") - return instructions - return {} - - def _patch_repodata(self, subdir, repodata, patch_generator=None): - if patch_generator and any( - patch_generator.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS - ): - instructions = self._load_patch_instructions_tarball( - subdir, patch_generator - ) - else: - instructions = self._create_patch_instructions( - subdir, repodata, patch_generator - ) - if instructions: - self._write_patch_instructions(subdir, instructions) - else: - instructions = self._load_instructions(subdir) - if instructions.get("patch_instructions_version", 0) > 1: - raise RuntimeError("Incompatible patch instructions version") - - return _apply_instructions(subdir, repodata, instructions), instructions diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 1b50a076c6..7a9985fc8a 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -22,7 +22,6 @@ from .conda_interface import ( specs_from_args, ) -from .deprecations import deprecated from .os_utils.ldd import ( get_linkages, get_package_obj_files, @@ -96,9 +95,6 @@ def __str__(self): untracked_package = _untracked_package() -@deprecated.argument("24.1.0", "24.3.0", "platform", rename="subdir") -@deprecated.argument("24.1.0", "24.3.0", "prepend") -@deprecated.argument("24.1.0", "24.3.0", "minimal_hint") def check_install( packages: Iterable[str], subdir: str | None = None, diff --git a/conda_build/metadata.py b/conda_build/metadata.py index f9f0d55438..633b6de8fc 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -16,9 +16,10 @@ from typing import TYPE_CHECKING, overload from bs4 import UnicodeDammit +from conda.gateways.disk.read import compute_sum from . import exceptions, utils, variants -from .conda_interface import MatchSpec, envs_dirs, md5_file +from .conda_interface import MatchSpec, envs_dirs from .config import Config, get_or_merge_config from .features import feature_list from .license_family import ensure_valid_license_family @@ -1704,7 +1705,9 @@ def is_app(self): def app_meta(self): d = {"type": "app"} if self.get_value("app/icon"): - d["icon"] = "%s.png" % md5_file(join(self.path, self.get_value("app/icon"))) + d["icon"] = "%s.png" % compute_sum( + join(self.path, self.get_value("app/icon")), "md5" + ) for field, key in [ ("app/entry", "app_entry"), diff --git a/conda_build/noarch_python.py b/conda_build/noarch_python.py index daaf163490..fb81565b3d 100644 --- a/conda_build/noarch_python.py +++ b/conda_build/noarch_python.py @@ -6,30 +6,10 @@ import os import shutil import sys -from os.path import basename, dirname, isdir, isfile, join +from os.path import basename, dirname, isfile, join -from .deprecations import deprecated from .utils import on_win -deprecated.constant( - "24.1", - "24.3", - "ISWIN", - on_win, - addendum="Use `conda_build.utils.on_win` instead.", -) - - -@deprecated("24.1", "24.3", addendum="Use `os.makedirs(exist_ok=True)` instead.") -def _force_dir(dirname): - if not isdir(dirname): - os.makedirs(dirname) - - -@deprecated("24.1", "24.3") -def _error_exit(exit_message): - sys.exit("[noarch_python] %s" % exit_message) - def rewrite_script(fn, prefix): """Take a file from the bin directory and rewrite it into the python-scripts diff --git a/conda_build/post.py b/conda_build/post.py index 17edda3d6e..4512c9e508 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -35,13 +35,13 @@ from typing import TYPE_CHECKING from conda.core.prefix_data import PrefixData +from conda.gateways.disk.read import compute_sum from conda.models.records import PrefixRecord from . import utils from .conda_interface import ( TemporaryDirectory, lchmod, - md5_file, walk_prefix, ) from .exceptions import OverDependingError, OverLinkingError, RunPathError @@ -393,7 +393,7 @@ def find_lib(link, prefix, files, path=None): # multiple places. md5s = set() for f in file_names[link]: - md5s.add(md5_file(join(prefix, f))) + md5s.add(compute_sum(join(prefix, f), "md5")) if len(md5s) > 1: sys.exit( f"Error: Found multiple instances of {link}: {file_names[link]}" diff --git a/conda_build/render.py b/conda_build/render.py index a46130f4ed..9ba417bf23 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -35,8 +35,6 @@ pkgs_dirs, specs_from_url, ) -from .deprecations import deprecated -from .environ import LINK_ACTION from .exceptions import DependencyNeedsBuildingError from .index import get_build_index from .metadata import MetaData, combine_top_level_metadata_with_output @@ -91,13 +89,6 @@ def bldpkg_path(m): return path -@deprecated("24.1.0", "24.3.0") -def actions_to_pins(actions): - if LINK_ACTION in actions: - return [package_record_to_requirement(prec) for prec in actions[LINK_ACTION]] - return [] - - def _categorize_deps(m, specs, exclude_pattern, variant): subpackages = [] dependencies = [] @@ -158,7 +149,7 @@ def get_env_dependencies( ) with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir: try: - actions = environ.get_install_actions( + precs = environ.get_package_records( tmpdir, tuple(dependencies), env, @@ -180,19 +171,17 @@ def get_env_dependencies( else: unsat = e.message if permit_unsatisfiable_variants: - actions = {} + precs = [] else: raise - specs = [ - package_record_to_requirement(prec) for prec in actions.get(LINK_ACTION, []) - ] + specs = [package_record_to_requirement(prec) for prec in precs] return ( utils.ensure_list( (specs + subpackages + pass_through_deps) or m.get_value(f"requirements/{env}", []) ), - actions, + precs, unsat, ) @@ -329,7 +318,6 @@ def _read_specs_from_package(pkg_loc, pkg_dist): return specs -@deprecated.argument("24.1.0", "24.3.0", "actions", rename="precs") def execute_download_actions(m, precs, env, package_subset=None, require_files=False): subdir = getattr(m.config, f"{env}_subdir") index, _, _ = get_build_index( @@ -359,8 +347,6 @@ def execute_download_actions(m, precs, env, package_subset=None, require_files=F pkg_files = {} - if hasattr(precs, "keys"): - precs = precs.get(LINK_ACTION, []) if isinstance(package_subset, PackageRecord): package_subset = [package_subset] else: @@ -409,14 +395,11 @@ def execute_download_actions(m, precs, env, package_subset=None, require_files=F return pkg_files -@deprecated.argument("24.1.0", "24.3.0", "actions", rename="precs") def get_upstream_pins(m: MetaData, precs, env): """Download packages from specs, then inspect each downloaded package for additional downstream dependency specs. Return these additional specs.""" env_specs = m.get_value(f"requirements/{env}", []) explicit_specs = [req.split(" ")[0] for req in env_specs] if env_specs else [] - if hasattr(precs, "keys"): - precs = precs.get(LINK_ACTION, []) precs = [prec for prec in precs if prec.name in explicit_specs] ignore_pkgs_list = utils.ensure_list(m.get_value("build/ignore_run_exports_from")) @@ -453,7 +436,7 @@ def _read_upstream_pin_files( permit_unsatisfiable_variants, exclude_pattern, ): - deps, actions, unsat = get_env_dependencies( + deps, precs, unsat = get_env_dependencies( m, env, m.config.variant, @@ -462,7 +445,7 @@ def _read_upstream_pin_files( ) # extend host deps with strong build run exports. This is important for things like # vc feature activation to work correctly in the host env. - extra_run_specs = get_upstream_pins(m, actions, env) + extra_run_specs = get_upstream_pins(m, precs, env) return ( list(set(deps)) or m.get_value(f"requirements/{env}", []), unsat, diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index e1c061bf73..891f62f3cb 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -18,6 +18,7 @@ from os.path import basename, dirname, exists, join import requests +from conda.core.index import get_index from .. import environ from ..conda_interface import ( @@ -28,7 +29,6 @@ TemporaryDirectory, TmpDownload, download, - get_index, ) from ..config import Config, get_or_merge_config from ..utils import check_call_env, on_linux, on_win diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index fbe59199b3..92e2ff9efd 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -19,6 +19,7 @@ import pkginfo import requests import yaml +from conda.gateways.disk.read import compute_sum from requests.packages.urllib3.util.url import parse_url from ..conda_interface import ( @@ -26,7 +27,6 @@ configparser, default_python, download, - hashsum_file, human_bytes, input, normalized_version, @@ -1276,10 +1276,10 @@ def get_pkginfo( download_path = join(config.src_cache, filename) if ( not isfile(download_path) - or hashsum_file(download_path, hash_type) != hash_value + or compute_sum(download_path, hash_type) != hash_value ): download(pypiurl, join(config.src_cache, filename)) - if hashsum_file(download_path, hash_type) != hash_value: + if compute_sum(download_path, hash_type) != hash_value: raise RuntimeError( f" Download of {package} failed" f" checksum type {hash_type} expected value {hash_value}. Please" @@ -1291,7 +1291,7 @@ def get_pkginfo( # Needs to be done in this block because this is where we have # access to the source file. if hash_type != "sha256": - new_hash_value = hashsum_file(download_path, "sha256") + new_hash_value = compute_sum(download_path, "sha256") else: new_hash_value = "" @@ -1356,7 +1356,7 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op create_env( config.host_prefix, - specs_or_actions=specs, + specs_or_precs=specs, env="host", subdir=subdir, clear_cache=False, diff --git a/conda_build/source.py b/conda_build/source.py index d4e1ca5b69..436a4137b2 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -15,11 +15,12 @@ from typing import TYPE_CHECKING from urllib.parse import urljoin +from conda.gateways.disk.read import compute_sum + from .conda_interface import ( CondaHTTPError, TemporaryDirectory, download, - hashsum_file, url_path, ) from .exceptions import MissingDependency @@ -120,7 +121,7 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): for tp in ("md5", "sha1", "sha256"): if tp in source_dict: expected_hash = source_dict[tp] - hashed = hashsum_file(path, tp) + hashed = compute_sum(path, tp) if expected_hash != hashed: rm_rf(path) raise RuntimeError( @@ -132,7 +133,7 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): # collisions in our source cache, but the end user will get no benefit from the cache. if not hash_added: if not hashed: - hashed = hashsum_file(path, "sha256") + hashed = compute_sum(path, "sha256") dest_path = append_hash_to_fn(path, hashed) if not os.path.isfile(dest_path): shutil.move(path, dest_path) diff --git a/conda_build/utils.py b/conda_build/utils.py index 29baa98005..7635c45a6f 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -41,7 +41,7 @@ ) from pathlib import Path from threading import Thread -from typing import TYPE_CHECKING, Iterable +from typing import TYPE_CHECKING, Iterable, overload import conda_package_handling.api import filelock @@ -53,12 +53,11 @@ CONDA_PACKAGE_EXTENSIONS, KNOWN_SUBDIRS, ) -from conda.core.prefix_data import PrefixData -from conda.models.dist import Dist +from conda.gateways.disk.read import compute_sum +from conda.models.match_spec import MatchSpec from .conda_interface import ( CondaHTTPError, - MatchSpec, PackageRecord, StringIO, TemporaryDirectory, @@ -67,19 +66,20 @@ context, download, get_conda_channel, - hashsum_file, - md5_file, pkgs_dirs, root_dir, unix_path_to_win, win_path_to_unix, ) from .conda_interface import rm_rf as _rm_rf -from .deprecations import deprecated from .exceptions import BuildLockError if TYPE_CHECKING: - from conda.models.records import PrefixRecord + from typing import Mapping, TypeVar + + T = TypeVar("T") + K = TypeVar("K") + V = TypeVar("V") on_win = sys.platform == "win32" on_mac = sys.platform == "darwin" @@ -876,8 +876,8 @@ def tar_xf(tarball, dir_path): def file_info(path): return { "size": getsize(path), - "md5": md5_file(path), - "sha256": hashsum_file(path, "sha256"), + "md5": compute_sum(path, "md5"), + "sha256": compute_sum(path, "sha256"), "mtime": getmtime(path), } @@ -1162,7 +1162,7 @@ def package_has_file(package_path, file_path, refresh_mode="modified"): return content -def ensure_list(arg, include_dict=True): +def ensure_list(arg: T | Iterable[T] | None, include_dict: bool = True) -> list[T]: """ Ensure the object is a list. If not return it in a list. @@ -1181,7 +1181,11 @@ def ensure_list(arg, include_dict=True): return [arg] -def islist(arg, uniform=False, include_dict=True): +def islist( + arg: T | Iterable[T], + uniform: bool = False, + include_dict: bool = True, +) -> bool: """ Check whether `arg` is a `list`. Optionally determine whether the list elements are all uniform. @@ -1767,7 +1771,10 @@ def merge_or_update_dict( return base -def merge_dicts_of_lists(dol1, dol2): +def merge_dicts_of_lists( + dol1: Mapping[K, Iterable[V]], + dol2: Mapping[K, Iterable[V]], +) -> dict[K, list[V]]: """ From Alex Martelli: https://stackoverflow.com/a/1495821/3257826 """ @@ -1889,7 +1896,17 @@ def sort_list_in_nested_structure(dictionary, omissions=""): spec_ver_needing_star_re = re.compile(r"^([0-9a-zA-Z\.]+)$") -def ensure_valid_spec(spec, warn=False): +@overload +def ensure_valid_spec(spec: str, warn: bool = False) -> str: + ... + + +@overload +def ensure_valid_spec(spec: MatchSpec, warn: bool = False) -> MatchSpec: + ... + + +def ensure_valid_spec(spec: str | MatchSpec, warn: bool = False) -> str | MatchSpec: if isinstance(spec, MatchSpec): if ( hasattr(spec, "version") @@ -2112,21 +2129,6 @@ def download_channeldata(channel_url): return data -@deprecated("24.1.0", "24.3.0") -def linked_data_no_multichannels( - prefix: str | os.PathLike | Path, -) -> dict[Dist, PrefixRecord]: - """ - Return a dictionary of the linked packages in prefix, with correct channels, hopefully. - cc @kalefranz. - """ - prefix = Path(prefix) - return { - Dist.from_string(prec.fn, channel_override=prec.channel.name): prec - for prec in PrefixData(str(prefix)).iter_records() - } - - def shutil_move_more_retrying(src, dest, debug_name): log = get_logger(__name__) log.info(f"Renaming {debug_name} directory '{src}' to '{dest}'") diff --git a/news/5203-remove-deprecations b/news/5203-remove-deprecations new file mode 100644 index 0000000000..fb77c3b149 --- /dev/null +++ b/news/5203-remove-deprecations @@ -0,0 +1,82 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* Mark `conda_build.conda_interface.handle_proxy_407` as deprecated. Handled by `conda.gateways.connection.session.CondaSession`. (#5203) +* Mark `conda_build.conda_interface.hashsum_file` as deprecated. Use `conda.gateways.disk.read.compute_sum` instead. (#5203) +* Mark `conda_build.conda_interface.md5_file` as deprecated. Use `conda.gateways.disk.read.compute_sum(path, 'md5')` instead. (#5203) +* Mark `conda_build.environ.PREFIX_ACTION` as deprecated. (#5203) +* Mark `conda_build.environ.LINK_ACTION` as deprecated. (#5203) +* Mark `conda_build.environ.cache_actions` as deprecated. (#5203) +* Mark `conda_build.index.DummyExecutor` as deprecated. (#5203) +* Mark `conda_build.index.MAX_THREADS_DEFAULT` as deprecated. (#5203) +* Mark `conda_build.index.LOCK_TIMEOUT_SECS` as deprecated. (#5203) +* Mark `conda_build.index.LOCKFILE_NAME` as deprecated. (#5203) +* Postpone `conda_build.index.channel_data` deprecation. (#5203) +* Rename `conda_build.environ.create_env('specs_or_actions' -> 'specs_or_precs')`. (#5203) +* Rename `conda_build.environ._execute_actions('actions' -> 'precs'). (#5203) +* Rename `conda_build.environ._display_actions('actions' -> 'precs'). (#5203) +* Rename `conda_build.inspect.check_install('platform' -> 'subdir')`. (#5203) +* Rename `conda_build.render.execute_download_actions('actions' -> 'precs')`. (#5203) +* Rename `conda_build.render.get_upstream_pins('actions' -> 'precs')`. (#5203) +* Remove `conda_build.cli.main_render.execute(print_results)`. (#5203) +* Remove `conda_build.conda_interface.Dist`. (#5203) +* Remove `conda_build.conda_interface.display_actions`. (#5203) +* Remove `conda_build.conda_interface.execute_actions`. (#5203) +* Remove `conda_build.conda_interface.execute_plan`. (#5203) +* Remove `conda_build.conda_interface.get_index`. (#5203) +* Remove `conda_build.conda_interface.install_actions`. (#5203) +* Remove `conda_build.conda_interface.linked`. (#5203) +* Remove `conda_build.conda_interface.linked_data`. (#5203) +* Remove `conda_build.conda_interface.package_cache`. (#5203) +* Remove `conda_build.environ.get_install_actions`. Use `conda_build.environ.get_package_records` instead. (#5203) +* Remove `conda_build.index._determine_namespace`. (#5203) +* Remove `conda_build.index._make_seconds`. (#5203) +* Remove `conda_build.index.REPODATA_VERSION`. (#5203) +* Remove `conda_build.index.CHANNELDATA_VERSION`. (#5203) +* Remove `conda_build.index.REPODATA_JSON_FN`. (#5203) +* Remove `conda_build.index.REPODATA_FROM_PKGS_JSON_FN`. (#5203) +* Remove `conda_build.index.CHANNELDATA_FIELDS`. (#5203) +* Remove `conda_build.index._clear_newline_chars`. (#5203) +* Remove `conda_build.index._apply_instructions`. (#5203) +* Remove `conda_build.index._get_jinja2_environment`. (#5203) +* Remove `conda_build.index._maybe_write`. (#5203) +* Remove `conda_build.index._make_build_string`. (#5203) +* Remove `conda_build.index._warn_on_missing_dependencies`. (#5203) +* Remove `conda_build.index._cache_post_install_details`. (#5203) +* Remove `conda_build.index._cache_recipe`. (#5203) +* Remove `conda_build.index._cache_run_exports`. (#5203) +* Remove `conda_build.index._cache_icon`. (#5203) +* Remove `conda_build.index._make_subdir_index_html`. (#5203) +* Remove `conda_build.index._make_channeldata_index_html`. (#5203) +* Remove `conda_build.index._get_source_repo_git_info`. (#5203) +* Remove `conda_build.index._cache_info_file`. (#5203) +* Remove `conda_build.index._alternate_file_extension`. (#5203) +* Remove `conda_build.index._get_resolve_object`. (#5203) +* Remove `conda_build.index._get_newest_versions`. (#5203) +* Remove `conda_build.index._add_missing_deps`. (#5203) +* Remove `conda_build.index._add_prev_ver_for_features`. (#5203) +* Remove `conda_build.index._shard_newest_packages`. (#5203) +* Remove `conda_build.index._build_current_repodata`. (#5203) +* Remove `conda_build.index.ChannelIndex`. (#5203) +* Remove `conda_build.inspect.check_install('prepend')`. (#5203) +* Remove `conda_build.inspect.check_install('minimal_hint')`. (#5203) +* Remove `conda_build.noarch_python.ISWIN`. Use `conda_build.utils.on_win` instead. (#5203) +* Remove `conda_build.noarch_python._force_dir`. Use `os.makedirs(exist_ok=True)` instead. (#5203) +* Remove `conda_build.noarch_python._error_exit`. (#5203) +* Remove `conda_build.render.actions_to_pins`. (#5203) +* Remove `conda_build.utils.linked_data_no_multichannels`. (#5203) + +### Docs + +* + +### Other + +* diff --git a/pyproject.toml b/pyproject.toml index 21d787c86d..e8cfc5e011 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ requires-python = ">=3.8" dependencies = [ "beautifulsoup4", "chardet", - "conda >=22.11", + "conda >=23.5.0", "conda-index >=0.4.0", "conda-package-handling >=1.3", "filelock", @@ -125,9 +125,9 @@ addopts = [ # "--cov=conda_build", # passed in test runner scripts instead (avoid debugger) "--cov-append", "--cov-branch", - "--cov-report=term-missing", - "--cov-report=xml", - "--durations=16", + "--cov-report=term", # print summary table to screen + "--cov-report=xml", # for codecov/codecov-action upload + "--durations=16", # show 16 slowest tests "--junitxml=junit.xml", # "--splitting-algorithm=least_duration", # not available yet # "--store-durations", # not available yet diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 9b1ec2f3bc..a9062803cb 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -30,7 +30,7 @@ requirements: run: - beautifulsoup4 - chardet - - conda >=22.11.0 + - conda >=23.5.0 - conda-index >=0.4.0 - conda-package-handling >=1.3 - filelock diff --git a/tests/requirements.txt b/tests/requirements.txt index a4ecdd07a8..5f96c8fd66 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,8 +1,8 @@ beautifulsoup4 chardet -conda >=22.11.0 +conda >=23.5.0 conda-forge::anaconda-client -conda-index +conda-index >=0.4.0 conda-package-handling >=1.3 conda-verify contextlib2 diff --git a/tests/test_source.py b/tests/test_source.py index e32a133b84..711407d153 100644 --- a/tests/test_source.py +++ b/tests/test_source.py @@ -5,9 +5,10 @@ import tarfile import pytest +from conda.gateways.disk.read import compute_sum from conda_build import source -from conda_build.conda_interface import TemporaryDirectory, hashsum_file +from conda_build.conda_interface import TemporaryDirectory from conda_build.source import download_to_cache from conda_build.utils import reset_deduplicator @@ -142,7 +143,7 @@ def test_source_user_expand(): "url": os.path.join( prefix, os.path.basename(tmp), "cb-test.tar.bz2" ), - "sha256": hashsum_file(tbz_name, "sha256"), + "sha256": compute_sum(tbz_name, "sha256"), } with TemporaryDirectory() as tmp2: download_to_cache(tmp2, "", source_dict) diff --git a/tests/test_variants.py b/tests/test_variants.py index 89ebb67999..50e9cea4f2 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -429,7 +429,7 @@ def test_build_run_exports_act_on_host(caplog): platform="win", arch="64", ) - assert "failed to get install actions, retrying" not in caplog.text + assert "failed to get package records, retrying" not in caplog.text def test_detect_variables_in_build_and_output_scripts():