Skip to content

Commit

Permalink
Merge branch 'main' into deprecate-testing-recipes
Browse files Browse the repository at this point in the history
  • Loading branch information
beeankha authored Jul 9, 2024
2 parents 5ce12ed + 6d7805c commit 3f1ac30
Show file tree
Hide file tree
Showing 24 changed files with 346 additions and 134 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/builds-review.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ jobs:
# Clean checkout of specific git ref needed for package metadata version
# which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
ref: ${{ github.ref }}
clean: true
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
if: '!github.event.repository.fork'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
fetch-depth: 0
- name: Setup
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/labels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
GLOBAL: https://raw.githubusercontent.com/conda/infra/main/.github/global.yml
LOCAL: .github/labels.yml
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- id: has_local
uses: andstor/file-existence-action@076e0072799f4942c8bc574a82233e1e4d13e9d6 # v3.0.0
with:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/project.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
if: '!github.event.repository.fork'
runs-on: ubuntu-latest
steps:
- uses: actions/add-to-project@9bfe908f2eaa7ba10340b31e314148fcfe6a2458 # v1.0.1
- uses: actions/add-to-project@244f685bbc3b7adfa8466e08b698b5577571133e # v1.0.2
with:
# issues are added to the Planning project
# PRs are added to the Review project
Expand Down
54 changes: 36 additions & 18 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ jobs:
code: ${{ steps.filter.outputs.code }}
steps:
- name: Checkout Source
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
# dorny/paths-filter needs git clone for non-PR events
# https://github.com/dorny/paths-filter#supported-workflows
if: github.event_name != 'pull_request'
Expand Down Expand Up @@ -102,7 +102,7 @@ jobs:

steps:
- name: Checkout Source
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
fetch-depth: 0

Expand Down Expand Up @@ -150,7 +150,7 @@ jobs:
-m "${{ env.PYTEST_MARKER }}"
- name: Upload Coverage
uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c # v4.4.1
uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673 # v4.5.0
with:
flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }}

Expand Down Expand Up @@ -182,7 +182,7 @@ jobs:

steps:
- name: Checkout Source
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
fetch-depth: 0

Expand Down Expand Up @@ -229,7 +229,7 @@ jobs:
run: conda list --show-channel-urls

- name: Run Benchmarks
uses: CodSpeedHQ/action@0b631f8998f2389eb5144632b6f9f8fabd33a86e
uses: CodSpeedHQ/action@f11c406b8c87cda176ff341ed4925bc98086f6d1
with:
token: ${{ secrets.CODSPEED_TOKEN }}
run: $CONDA/envs/test/bin/pytest --codspeed
Expand Down Expand Up @@ -262,7 +262,7 @@ jobs:

steps:
- name: Checkout Source
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
fetch-depth: 0

Expand Down Expand Up @@ -317,7 +317,7 @@ jobs:
-m "${{ env.PYTEST_MARKER }}"
- name: Upload Coverage
uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c # v4.4.1
uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673 # v4.5.0
with:
flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }}

Expand All @@ -337,11 +337,10 @@ jobs:
needs: changes
if: github.event_name == 'schedule' || needs.changes.outputs.code == 'true'

# Old macOS needed for old SDK (see xcode step below)
# This is needed for some MACOSX_DEPLOYMENT_TARGET tests
# We could also install SDKs from a external provider in the future
# if we want to update this runner to a non-deprecated version
runs-on: macos-11
# we still need intel macs so we are stuck on macos-13 (not -14 or -latest)
# the issue is that there are recipes that depend on packages
# that do not exist for osx-arm64 - see #5388
runs-on: macos-13
defaults:
run:
# https://github.com/conda-incubator/setup-miniconda#use-a-default-shell
Expand All @@ -366,7 +365,7 @@ jobs:

steps:
- name: Checkout Source
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
fetch-depth: 0

Expand All @@ -376,7 +375,9 @@ jobs:
- name: Cache Conda
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9
with:
path: ~/conda_pkgs_dir
path: |
~/conda_pkgs_dir
~/macosx_sdks
key: cache-${{ env.HASH }}

- name: Setup Miniconda
Expand All @@ -385,8 +386,23 @@ jobs:
condarc-file: .github/condarc
run-post: false # skip post cleanup

- name: Xcode Install
run: sudo xcode-select --switch /Applications/Xcode_11.7.app
- name: SDK Download
run: |
echo "MACOSX_SDK_DIR=${HOME}/macosx_sdks" >> "$GITHUB_ENV"
export MACOSX_SDK_DIR=${HOME}/macosx_sdks
echo "MACOSX_SDK_VERSION=10.15" >> "$GITHUB_ENV"
export MACOSX_SDK_VERSION=10.15
echo "MACOSX_SDK_ROOT=${MACOSX_SDK_DIR}/MacOSX${MACOSX_SDK_VERSION}.sdk" >> "$GITHUB_ENV"
export MACOSX_SDK_ROOT=${MACOSX_SDK_DIR}/MacOSX${MACOSX_SDK_VERSION}.sdk
if [ ! -d ${MACOSX_SDK_DIR} ]; then mkdir ${MACOSX_SDK_DIR}; fi
if [ ! -d ${MACOSX_SDK_ROOT} ]; then
url="https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX${MACOSX_SDK_VERSION}.sdk.tar.xz"
curl -L --output MacOSX${MACOSX_SDK_VERSION}.sdk.tar.xz "${url}"
sdk_sha256=ac75d9e0eb619881f5aa6240689fce862dcb8e123f710032b7409ff5f4c3d18b
echo "${sdk_sha256} *MacOSX${MACOSX_SDK_VERSION}.sdk.tar.xz" | shasum -a 256 -c
tar -xf MacOSX${MACOSX_SDK_VERSION}.sdk.tar.xz -C "${MACOSX_SDK_DIR}"
fi
- name: Conda Install
run: >
Expand Down Expand Up @@ -415,9 +431,11 @@ jobs:
--cov=conda_build
-n auto
-m "${{ env.PYTEST_MARKER }}"
env:
CONDA_BUILD_SYSROOT: ${{ env.MACOSX_SDK_ROOT }}

- name: Upload Coverage
uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c # v4.4.1
uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673 # v4.5.0
with:
flags: ${{ runner.os }},${{ runner.arch }},${{ matrix.python-version }},${{ matrix.test-type }}

Expand Down Expand Up @@ -504,7 +522,7 @@ jobs:
# Clean checkout of specific git ref needed for package metadata version
# which needs env vars GIT_DESCRIBE_TAG and GIT_BUILD_STR:
- name: Checkout Source
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
ref: ${{ github.ref }}
clean: true
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/upload.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jobs:
ARCHIVE_NAME: ${{ github.event.repository.name }}-${{ github.ref_name }}
steps:
- name: Checkout Source
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7

- name: Create Release Directory
run: mkdir -p release
Expand Down
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,12 @@ repos:
files: \.py$
args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol]
- repo: https://github.com/asottile/blacken-docs
rev: 1.16.0
rev: 1.18.0
hooks:
# auto format Python codes within docstrings
- id: blacken-docs
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.4.7
rev: v0.5.1
hooks:
# lint & attempt to correct failures (e.g. pyupgrade)
- id: ruff
Expand Down Expand Up @@ -87,7 +87,7 @@ repos:
tests/
)
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.28.4
rev: 0.28.6
hooks:
# verify github syntaxes
- id: check-github-workflows
Expand Down
53 changes: 35 additions & 18 deletions conda_build/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -1753,13 +1753,16 @@ def bundle_conda(
output["script"],
args[0],
)
if "system32" in args[0] and "bash" in args[0]:
print(
"ERROR :: WSL bash.exe detected, this will not work (PRs welcome!). Please\n"
" use MSYS2 packages. Add `m2-base` and more (depending on what your"
" script needs) to `requirements/build` instead."
if (
# WSL bash is always the same path, it is an alias to the default
# distribution as configured by the user
on_win and Path("C:\\Windows\\System32\\bash.exe").samefile(args[0])
):
raise CondaBuildUserError(
"WSL bash.exe is not supported. Please use MSYS2 packages. Add "
"`m2-base` and more (depending on what your script needs) to "
"`requirements/build` instead."
)
sys.exit(1)
else:
args = interpreter.split(" ")

Expand All @@ -1772,6 +1775,7 @@ def bundle_conda(
env_output["RECIPE_DIR"] = metadata.path
env_output["MSYS2_PATH_TYPE"] = "inherit"
env_output["CHERE_INVOKING"] = "1"
_set_env_variables_for_build(metadata, env_output)
for var in utils.ensure_list(metadata.get_value("build/script_env")):
if "=" in var:
val = var.split("=", 1)[1]
Expand Down Expand Up @@ -3061,13 +3065,7 @@ def construct_metadata_for_test(recipedir_or_package, config):
return m, hash_input


def write_build_scripts(m, script, build_file):
# TODO: Prepending the prefixes here should probably be guarded by
# if not m.activate_build_script:
# Leaving it as is, for now, since we need a quick, non-disruptive patch release.
with utils.path_prepended(m.config.host_prefix, False):
with utils.path_prepended(m.config.build_prefix, False):
env = environ.get_dict(m=m)
def _set_env_variables_for_build(m, env):
env["CONDA_BUILD_STATE"] = "BUILD"

# hard-code this because we never want pip's build isolation
Expand Down Expand Up @@ -3099,6 +3097,17 @@ def write_build_scripts(m, script, build_file):
if "replacements" in env:
del env["replacements"]


def write_build_scripts(m, script, build_file):
# TODO: Prepending the prefixes here should probably be guarded by
# if not m.activate_build_script:
# Leaving it as is, for now, since we need a quick, non-disruptive patch release.
with utils.path_prepended(m.config.host_prefix, False):
with utils.path_prepended(m.config.build_prefix, False):
env = environ.get_dict(m=m)

_set_env_variables_for_build(m, env)

work_file = join(m.config.work_dir, "conda_build.sh")
env_file = join(m.config.work_dir, "build_env_setup.sh")
with open(env_file, "w") as bf:
Expand Down Expand Up @@ -3554,7 +3563,12 @@ def test(
return True


def tests_failed(package_or_metadata, move_broken, broken_dir, config):
def tests_failed(
package_or_metadata: str | os.PathLike | Path | MetaData,
move_broken: bool,
broken_dir: str | os.PathLike | Path,
config: Config,
) -> None:
"""
Causes conda to exit if any of the given package's tests failed.
Expand Down Expand Up @@ -3582,7 +3596,7 @@ def tests_failed(package_or_metadata, move_broken, broken_dir, config):
_delegated_update_index(
os.path.dirname(os.path.dirname(pkg)), verbose=config.debug, threads=1
)
sys.exit("TESTS FAILED: " + os.path.basename(pkg))
raise CondaBuildUserError("TESTS FAILED: " + os.path.basename(pkg))


@deprecated(
Expand Down Expand Up @@ -3978,7 +3992,10 @@ def build_tree(
return list(built_packages.keys())


def handle_anaconda_upload(paths, config):
def handle_anaconda_upload(
paths: Iterable[str | os.PathLike | Path],
config: Config,
) -> None:
from .os_utils.external import find_executable

paths = utils.ensure_list(paths)
Expand Down Expand Up @@ -4012,15 +4029,15 @@ def handle_anaconda_upload(paths, config):
"# To have conda build upload to anaconda.org automatically, use\n"
f"# {prompter}conda config --set anaconda_upload yes\n"
)
no_upload_message += f"anaconda upload{joiner}" + joiner.join(paths)
no_upload_message += f"anaconda upload{joiner}" + joiner.join(map(str, paths))

if not upload:
print(no_upload_message)
return

if not anaconda:
print(no_upload_message)
sys.exit(
raise CondaBuildUserError(
"Error: cannot locate anaconda command (required for upload)\n"
"# Try:\n"
f"# {prompter}conda install anaconda-client"
Expand Down
8 changes: 6 additions & 2 deletions conda_build/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import copy
import math
import os
import pickle
import re
import shutil
import time
Expand Down Expand Up @@ -820,9 +821,12 @@ def clean_pkgs(self):

def copy(self) -> Config:
new = copy.copy(self)
new.variant = copy.deepcopy(self.variant)
# Use picke.loads(pickle.dumps(...) as a faster copy.deepcopy alternative.
new.variant = pickle.loads(pickle.dumps(self.variant, pickle.HIGHEST_PROTOCOL))
if hasattr(self, "variants"):
new.variants = copy.deepcopy(self.variants)
new.variants = pickle.loads(
pickle.dumps(self.variants, pickle.HIGHEST_PROTOCOL)
)
return new

# context management - automatic cleanup if self.dirty or self.keep_old_work is not True
Expand Down
Loading

0 comments on commit 3f1ac30

Please sign in to comment.