Skip to content

Commit

Permalink
Merge branch 'main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
tttc3 authored Jun 13, 2024
2 parents e5090af + 45be77d commit 56303cc
Show file tree
Hide file tree
Showing 13 changed files with 132 additions and 15 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ repos:
# auto format Python codes within docstrings
- id: blacken-docs
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.4.7
rev: v0.4.8
hooks:
# lint & attempt to correct failures (e.g. pyupgrade)
- id: ruff
Expand Down
19 changes: 11 additions & 8 deletions conda_build/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -1753,13 +1753,16 @@ def bundle_conda(
output["script"],
args[0],
)
if "system32" in args[0] and "bash" in args[0]:
print(
"ERROR :: WSL bash.exe detected, this will not work (PRs welcome!). Please\n"
" use MSYS2 packages. Add `m2-base` and more (depending on what your"
" script needs) to `requirements/build` instead."
if (
# WSL bash is always the same path, it is an alias to the default
# distribution as configured by the user
on_win and Path("C:\\Windows\\System32\\bash.exe").samefile(args[0])
):
raise CondaBuildUserError(
"WSL bash.exe is not supported. Please use MSYS2 packages. Add "
"`m2-base` and more (depending on what your script needs) to "
"`requirements/build` instead."
)
sys.exit(1)
else:
args = interpreter.split(" ")

Expand Down Expand Up @@ -4073,11 +4076,11 @@ def handle_pypi_upload(wheels, config):
try:
utils.check_call_env(args + [f])
except:
utils.get_logger(__name__).warn(
utils.get_logger(__name__).warning(
"wheel upload failed - is twine installed?"
" Is this package registered?"
)
utils.get_logger(__name__).warn(f"Wheel file left in {f}")
utils.get_logger(__name__).warning(f"Wheel file left in {f}")

else:
print(f"anaconda_upload is not set. Not uploading wheels: {wheels}")
Expand Down
2 changes: 1 addition & 1 deletion conda_build/inspect_pkg.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ def inspect_linkages(
if relative:
precs = list(which_package(relative, prefix))
if len(precs) > 1:
get_logger(__name__).warn(
get_logger(__name__).warning(
"Warning: %s comes from multiple packages: %s",
path,
comma_join(map(str, precs)),
Expand Down
8 changes: 5 additions & 3 deletions conda_build/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ def get_selectors(config: Config) -> dict[str, bool]:
if not np:
np = defaults["numpy"]
if config.verbose:
utils.get_logger(__name__).warn(
utils.get_logger(__name__).warning(
"No numpy version specified in conda_build_config.yaml. "
"Falling back to default numpy value of {}".format(defaults["numpy"])
)
Expand Down Expand Up @@ -998,7 +998,9 @@ def _toposort_outputs(output_tuples: list[OutputTuple]) -> list[OutputTuple]:
non_conda_outputs.append(output_tuple)
else:
# TODO: is it even possible to get here? and if so should we silently ignore or error?
utils.get_logger(__name__).warn("Found an output without a name, skipping")
utils.get_logger(__name__).warning(
"Found an output without a name, skipping"
)

# Iterate over conda packages, creating a mapping of package names to their
# dependencies to be used in toposort
Expand Down Expand Up @@ -2329,7 +2331,7 @@ def extract_single_output_text(
output = output_matches[output_index] if output_matches else ""
except ValueError:
if not self.path and self.meta.get("extra", {}).get("parent_recipe"):
utils.get_logger(__name__).warn(
utils.get_logger(__name__).warning(
f"Didn't match any output in raw metadata. Target value was: {output_name}"
)
output = ""
Expand Down
4 changes: 4 additions & 0 deletions conda_build/os_utils/liefldd.py
Original file line number Diff line number Diff line change
Expand Up @@ -1174,6 +1174,10 @@ def __call__(self, *args, **kw):
if not data:
break
sha1.update(data)
# update with file name, if its a different
# file with the same contents, we don't want
# to treat it as cached
sha1.update(os.path.realpath(arg).encode("utf-8"))
arg = sha1.hexdigest()
if isinstance(arg, list):
newargs.append(tuple(arg))
Expand Down
2 changes: 1 addition & 1 deletion conda_build/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1318,7 +1318,7 @@ def find_recipe(path: str) -> str:

metas = [m for m in VALID_METAS if os.path.isfile(os.path.join(path, m))]
if len(metas) == 1:
get_logger(__name__).warn(
get_logger(__name__).warning(
"Multiple meta files found. "
f"The {metas[0]} file in the base directory ({path}) "
"will be used."
Expand Down
19 changes: 19 additions & 0 deletions news/4821-include-file-hash
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
### Enhancements

* <news item>

### Bug fixes

* Include file path in addition to the content when generating the file hash to avoid unwanted caching during linkage analysis. (#4821)

### Deprecations

* <news item>

### Docs

* <news item>

### Other

* <news item>
8 changes: 8 additions & 0 deletions tests/test-recipes/split-packages/_test-file-hash/build.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
echo "int main() {}" > main.c
mkdir -p $PREFIX/bin
$CC main.c -o $PREFIX/bin/_file_hash

echo "int foo() {return 2;}" > foo.c
echo "int foo(); int bar() {return foo()*2;}" > bar.c
$CC -shared foo.c -o libupstream.so
$CC -shared bar.c -o libdownstream.so -L$PWD -lupstream '-Wl,-rpath,$ORIGIN'
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
python:
- 3.10
- 3.11
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
mkdir -p $SP_DIR/_py_file_hash
cp libdownstream.so $SP_DIR/_py_file_hash/
cp libupstream.so $SP_DIR/_py_file_hash/

30 changes: 30 additions & 0 deletions tests/test-recipes/split-packages/_test-file-hash/meta.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package:
name: _file_hash-split
version: 0.0.1

build:
number: 0
skip: True # [not linux64]
error_overlinking: true

requirements:
build:
- {{ compiler('c') }}
host:
run:

outputs:
- name: py-file-hash
script: install-py.sh
requirements:
build:
- {{ compiler('c') }}
host:
- python
run:
- python

- name: _file_hash
requirements:
build:
- {{ compiler('c') }}
24 changes: 24 additions & 0 deletions tests/test_build.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,16 @@
from typing import TYPE_CHECKING

import pytest
from conda.common.compat import on_win

from conda_build import api, build
from conda_build.exceptions import CondaBuildUserError

from .utils import get_noarch_python_meta, metadata_dir

if TYPE_CHECKING:
from pytest_mock import MockerFixture

from conda_build.metadata import MetaData


Expand Down Expand Up @@ -345,3 +348,24 @@ def test_copy_readme(testing_metadata: MetaData, readme: str):
Path(testing_metadata.config.work_dir, readme).touch()
build.copy_readme(testing_metadata)
assert Path(testing_metadata.config.info_dir, readme).exists()


@pytest.mark.skipif(not on_win, reason="WSL is only on Windows")
def test_wsl_unsupported(
testing_metadata: MetaData,
mocker: MockerFixture,
tmp_path: Path,
):
mocker.patch(
"conda_build.os_utils.external.find_executable",
return_value="C:\\Windows\\System32\\bash.exe",
)

(script := tmp_path / "install.sh").touch()
with pytest.raises(CondaBuildUserError):
build.bundle_conda(
output={"script": script},
metadata=testing_metadata,
env={},
stats={},
)
22 changes: 21 additions & 1 deletion tests/test_post.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package_has_file,
)

from .utils import add_mangling, metadata_dir
from .utils import add_mangling, metadata_dir, subpackage_path


@pytest.mark.skipif(
Expand Down Expand Up @@ -156,6 +156,26 @@ def test_menuinst_validation_fails_bad_json(testing_config, caplog, tmp_path):
assert "JSONDecodeError" in captured_text


def test_file_hash(testing_config, caplog, tmp_path):
"check that the post-link check caching takes the file path into consideration"
recipe = Path(subpackage_path, "_test-file-hash")
recipe_tmp = tmp_path / "test-file-hash"
shutil.copytree(recipe, recipe_tmp)

variants = {"python": ["3.11", "3.12"]}
testing_config.ignore_system_config = True
testing_config.activate = True

with caplog.at_level(logging.INFO):
api.build(
str(recipe_tmp),
config=testing_config,
notest=True,
variants=variants,
activate=True,
)


@pytest.mark.skipif(on_win, reason="rpath fixup not done on Windows.")
def test_rpath_symlink(mocker, testing_config):
if on_linux:
Expand Down

0 comments on commit 56303cc

Please sign in to comment.