Skip to content

Commit

Permalink
Add open_recipe context to simplify recipe handling (#5238)
Browse files Browse the repository at this point in the history
  • Loading branch information
kenodegard authored Mar 25, 2024
1 parent ab137d2 commit 51f81ce
Show file tree
Hide file tree
Showing 3 changed files with 124 additions and 83 deletions.
149 changes: 74 additions & 75 deletions conda_build/render.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
import tarfile
import tempfile
from collections import OrderedDict, defaultdict
from contextlib import contextmanager
from functools import lru_cache
from os.path import (
abspath,
dirname,
isabs,
isdir,
Expand All @@ -23,6 +23,7 @@
normpath,
)
from pathlib import Path
from typing import TYPE_CHECKING

import yaml
from conda.base.context import context
Expand All @@ -49,6 +50,11 @@
list_of_dicts_to_dict_of_lists,
)

if TYPE_CHECKING:
from typing import Iterator

from .config import Config


def odict_representer(dumper, data):
return dumper.represent_dict(data.items())
Expand Down Expand Up @@ -929,90 +935,83 @@ def expand_outputs(metadata_tuples):
return list(expanded_outputs.values())


@contextmanager
def open_recipe(recipe: str | os.PathLike | Path) -> Iterator[Path]:
"""Open the recipe from a file (meta.yaml), directory (recipe), or tarball (package)."""
recipe = Path(recipe)

if not recipe.exists():
sys.exit(f"Error: non-existent: {recipe}")
elif recipe.is_dir():
# read the recipe from the current directory
yield recipe
elif recipe.suffixes in [[".tar"], [".tar", ".gz"], [".tgz"], [".tar", ".bz2"]]:
# extract the recipe to a temporary directory
with tempfile.TemporaryDirectory() as tmp, tarfile.open(recipe, "r:*") as tar:
tar.extractall(path=tmp)
yield Path(tmp)
elif recipe.suffix == ".yaml":
# read the recipe from the parent directory
yield recipe.parent
else:
sys.exit(f"Error: non-recipe: {recipe}")


def render_recipe(
recipe_path,
config,
no_download_source=False,
variants=None,
permit_unsatisfiable_variants=True,
reset_build_id=True,
bypass_env_check=False,
):
recipe_dir: str | os.PathLike | Path,
config: Config,
no_download_source: bool = False,
variants: dict | None = None,
permit_unsatisfiable_variants: bool = True,
reset_build_id: bool = True,
bypass_env_check: bool = False,
) -> list[tuple[MetaData, bool, bool]]:
"""Returns a list of tuples, each consisting of
(metadata-object, needs_download, needs_render_in_env)
You get one tuple per variant. Outputs are not factored in here (subpackages won't affect these
results returned here.)
"""
arg = recipe_path
if isfile(arg):
if arg.endswith((".tar", ".tar.gz", ".tgz", ".tar.bz2")):
recipe_dir = tempfile.mkdtemp()
t = tarfile.open(arg, "r:*")
t.extractall(path=recipe_dir)
t.close()
need_cleanup = True
elif arg.endswith(".yaml"):
recipe_dir = dirname(arg)
need_cleanup = False
with open_recipe(recipe_dir) as recipe:
try:
m = MetaData(str(recipe), config=config)
except exceptions.YamlParsingError as e:
sys.exit(e.error_msg())

# important: set build id *before* downloading source. Otherwise source goes into a different
# build folder.
if config.set_build_id:
m.config.compute_build_id(m.name(), m.version(), reset=reset_build_id)

# this source may go into a folder that doesn't match the eventual build folder.
# There's no way around it AFAICT. We must download the source to be able to render
# the recipe (from anything like GIT_FULL_HASH), but we can't know the final build
# folder until rendering is complete, because package names can have variant jinja2 in them.
if m.needs_source_for_render and not m.source_provided:
try_download(m, no_download_source=no_download_source)

if m.final:
if not getattr(m.config, "variants", None):
m.config.ignore_system_variants = True
if isfile(cbc_yaml := join(m.path, "conda_build_config.yaml")):
m.config.variant_config_files = [cbc_yaml]
m.config.variants = get_package_variants(m, variants=variants)
m.config.variant = m.config.variants[0]
return [(m, False, False)]
else:
print("Ignoring non-recipe: %s" % arg)
return None, None
else:
recipe_dir = abspath(arg)
need_cleanup = False
# merge any passed-in variants with any files found
variants = get_package_variants(m, variants=variants)

if not isdir(recipe_dir):
sys.exit("Error: no such directory: %s" % recipe_dir)

try:
m = MetaData(recipe_dir, config=config)
except exceptions.YamlParsingError as e:
sys.stderr.write(e.error_msg())
sys.exit(1)

rendered_metadata = {}

# important: set build id *before* downloading source. Otherwise source goes into a different
# build folder.
if config.set_build_id:
m.config.compute_build_id(m.name(), m.version(), reset=reset_build_id)

# this source may go into a folder that doesn't match the eventual build folder.
# There's no way around it AFAICT. We must download the source to be able to render
# the recipe (from anything like GIT_FULL_HASH), but we can't know the final build
# folder until rendering is complete, because package names can have variant jinja2 in them.
if m.needs_source_for_render and not m.source_provided:
try_download(m, no_download_source=no_download_source)
if m.final:
if not hasattr(m.config, "variants") or not m.config.variant:
m.config.ignore_system_variants = True
if isfile(join(m.path, "conda_build_config.yaml")):
m.config.variant_config_files = [
join(m.path, "conda_build_config.yaml")
]
m.config.variants = get_package_variants(m, variants=variants)
m.config.variant = m.config.variants[0]
rendered_metadata = [
(m, False, False),
]
else:
# merge any passed-in variants with any files found
variants = get_package_variants(m, variants=variants)

# when building, we don't want to fully expand all outputs into metadata, only expand
# whatever variants we have (i.e. expand top-level variants, not output-only variants)
rendered_metadata = distribute_variants(
m,
variants,
permit_unsatisfiable_variants=permit_unsatisfiable_variants,
allow_no_other_outputs=True,
bypass_env_check=bypass_env_check,
)
if need_cleanup:
utils.rm_rf(recipe_dir)
return rendered_metadata
# when building, we don't want to fully expand all outputs into metadata, only expand
# whatever variants we have (i.e. expand top-level variants, not output-only variants)
return distribute_variants(
m,
variants,
permit_unsatisfiable_variants=permit_unsatisfiable_variants,
allow_no_other_outputs=True,
bypass_env_check=bypass_env_check,
)


# Keep this out of the function below so it can be imported by other modules.
Expand Down
19 changes: 19 additions & 0 deletions news/5238-open_recipe
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
### Enhancements

* Add `conda_build.render.open_recipe` context manager to detect the recipe type (file/`meta.yaml`, directory/recipe, or tarball/package) and properly handling any exit/close behavior. (#5238)

### Bug fixes

* <news item>

### Deprecations

* <news item>

### Docs

* <news item>

### Other

* <news item>
39 changes: 31 additions & 8 deletions tests/test_render.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,19 @@

import json
import os
import re
from typing import TYPE_CHECKING
from uuid import uuid4

import pytest

from conda_build import api, render
from conda_build.api import get_output_file_paths
from conda_build.render import (
_simplify_to_exact_constraints,
find_pkg_dir_or_file_in_pkgs_dirs,
get_pin_from_build,
open_recipe,
)
from conda_build.utils import CONDA_PACKAGE_EXTENSION_V1

if TYPE_CHECKING:
Expand All @@ -27,7 +34,7 @@
)
def test_noarch_output(build, testing_metadata):
testing_metadata.meta["build"].update(build)
output = api.get_output_file_paths(testing_metadata)
output = get_output_file_paths(testing_metadata)
assert os.path.sep + "noarch" + os.path.sep in output[0]


Expand All @@ -36,7 +43,7 @@ def test_reduce_duplicate_specs(testing_metadata):
"build": ["exact", "exact 1.2.3 1", "exact >1.0,<2"],
"host": ["exact", "exact 1.2.3 1"],
}
render._simplify_to_exact_constraints(testing_metadata)
_simplify_to_exact_constraints(testing_metadata)
simplified = testing_metadata.meta["requirements"]

assert simplified["build"] == simplified["host"]
Expand All @@ -47,9 +54,7 @@ def test_reduce_duplicate_specs(testing_metadata):
def test_pin_run_as_build_preserve_string(testing_metadata):
m = testing_metadata
m.config.variant["pin_run_as_build"]["pkg"] = {"max_pin": "x.x"}
dep = render.get_pin_from_build(
m, "pkg * somestring*", {"pkg": "1.2.3 somestring_h1234"}
)
dep = get_pin_from_build(m, "pkg * somestring*", {"pkg": "1.2.3 somestring_h1234"})
assert dep == "pkg >=1.2.3,<1.3.0a0 somestring*"


Expand All @@ -74,7 +79,7 @@ def test_find_package(
"""
Testing our ability to find the package directory or archive.
The render.find_pkg_dir_or_file_in_pkgs_dirs function will scan the various
The find_pkg_dir_or_file_in_pkgs_dirs function will scan the various
locations where packages may exist locally and returns the full package path
if found.
"""
Expand Down Expand Up @@ -105,9 +110,27 @@ def test_find_package(
package = other_cache / distribution

# attempt to find the package and check we found the expected path
found = render.find_pkg_dir_or_file_in_pkgs_dirs(
found = find_pkg_dir_or_file_in_pkgs_dirs(
distribution,
testing_metadata,
files_only=files_only,
)
assert package is found is None or package.samefile(found)


def test_open_recipe(tmp_path: Path):
path = tmp_path / "missing"
with pytest.raises(
SystemExit,
match=rf"Error: non-existent: {re.escape(str(path))}",
):
with open_recipe(path):
pass

(path := tmp_path / "bad.ext").touch()
with pytest.raises(
SystemExit,
match=rf"Error: non-recipe: {re.escape(str(path))}",
):
with open_recipe(path):
pass

0 comments on commit 51f81ce

Please sign in to comment.