diff --git a/.binstar.yml b/.binstar.yml index b34b4b7022..16e1960a67 100644 --- a/.binstar.yml +++ b/.binstar.yml @@ -1,31 +1,28 @@ -## The package attribure specifies a binstar package namespace to build the package to. -## This can be specified here or on the command line package: conda-build - -## You can also specify the account to upload to, -## you must be an admin of that account, this -## defaults to your user account -# user: USERNAME +user: conda-team #=============================================================================== # Build Matrix Options -# Thes options may be a single item, a list or empty -# The resulting number of builds is [platform * engine * env] +# These options may be a single item, a list or empty +# The resulting number of builds is [platform * engine * envvars] #=============================================================================== ## The platforms to build on. ## platform defaults to linux-64 platform: - - win-64 + - linux-32 + - linux-64 + - osx-64 - win-32 + - win-64 ## The engine are the inital conda packages you want to run with engine: - - python=2 - python=3 -## The env param is an environment variable list -# env: -# - MY_ENV=A CC=gcc -# - MY_ENV=B +## The envvars param is an environment variable list +env: + - PYTHON_VERSION=2.7 + - PYTHON_VERSION=3.4 + - PYTHON_VERSION=3.5 #=============================================================================== # Script options @@ -34,39 +31,18 @@ engine: #=============================================================================== ## Run before the script -# before_script: -# - echo "before_script!" +before_script: + - python -c "from platform import system as s; import sys; sys.exit(int(s()!='Windows'))" || export SOURCE_DIR=$SOURCE_DIR ## Put your main computations here! - -install: - # Use the provided conda and Python to run the install script. The order of these commands matters. - - conda install requests jinja2 - - set "CONDA_DEFAULT_ENV=" - - python tests\install_miniconda.py - -test: - - cd tests\test-recipes\metadata - # This will be effectively a no-op for recipes without bld.bat - - for /D %%f in (*) do (C:\Users\binstar\conda-build-miniconda\Scripts\conda-build.exe --no-binstar-upload %%~nf) - -# script: -# - echo "This is my binstar build!" - -## This will run after the script regardless of the result of script -## BINSTAR_BUILD_RESULT=[succcess|failure] -# after_script: -# - echo "The build was a $BINSTAR_BUILD_RESULT" | tee artifact1.txt -## This will be run only after a successfull build -# after_success: -# - echo "after_success!" -## This will be run only after a build failure -# after_failure: -# - echo "after_failure!" +script: + # - echo 4.1.0.rc1 > conda/.version + - python -c "from platform import system as s; import sys; sys.exit(int(s()!='Windows'))" || conda build conda.recipe --python $PYTHON_VERSION + - python -c "from platform import system as s; import sys; sys.exit(int(s()=='Windows'))" || conda build conda.recipe --python %PYTHON_VERSION% #=============================================================================== # Build Results # Build results are split into two categories: artifacts and targets -# You may omit either key and stiff have a successfull build +# You may omit either key and stiff have a successful build # They may be a string, list and contain any bash glob #=============================================================================== @@ -75,5 +51,5 @@ test: ## The special build targets 'conda' and 'pypi' may be used to ## upload conda builds ## e.g. conda is an alias for /opt/anaconda/conda-bld//*.tar.bz2 -# build_targets: -# - conda +build_targets: + - conda diff --git a/.gitignore b/.gitignore index 75df15407a..fa8b814181 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,6 @@ dist/ docs/build tags .idea/ +MANIFEST +ve +venv diff --git a/.travis.yml b/.travis.yml index db1cdb9373..3a5c95c3e6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,7 +2,6 @@ language: python python: # We don't actually use the system Python but this keeps it organized. - "2.7" - - "3.3" - "3.4" - "3.5" env: @@ -10,26 +9,25 @@ env: install: # TODO: Use a "latest" url - if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then - wget http://repo.continuum.io/miniconda/Miniconda-3.5.2-Linux-x86_64.sh -O miniconda.sh; - elif [[ "$TRAVIS_PYTHON_VERSION" == "3.3" ]]; then - wget http://repo.continuum.io/miniconda/Miniconda3-3.0.0-Linux-x86_64.sh -O miniconda.sh; + wget http://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh; else - wget http://repo.continuum.io/miniconda/Miniconda3-3.5.2-Linux-x86_64.sh -O miniconda.sh; + wget http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; fi - bash miniconda.sh -b -p $HOME/miniconda - export PATH="$HOME/miniconda/bin:$PATH" - hash -r - conda config --set always_yes yes - - conda install --force --no-deps conda requests - - conda install pytest requests jinja2 patchelf pyflakes python=$TRAVIS_PYTHON_VERSION + - conda update -q --all + - conda install -q --force --no-deps conda requests + - conda install -q pip pytest requests jinja2 patchelf flake8 python=$TRAVIS_PYTHON_VERSION pyflakes=1.1 + - conda install -q anaconda-client conda-build + - pip install pytest-cov - python setup.py install - conda info -a script: - - pyflakes conda_build tests bin/* bdist_conda.py setup.py example_packages - - py.test tests - - tests/test-recipes/build_recipes.sh - - tests/test-skeleton/test-skeleton.sh + - flake8 . + - py.test --cov conda_build --cov-report xml tests - conda build --help notifications: @@ -39,3 +37,7 @@ git: depth: 10000 sudo: false + +after_success: + - pip install codecov + - codecov --env TRAVIS_PYTHON_VERSION diff --git a/CHANGELOG.txt b/CHANGELOG.txt index 60eb8fb458..2126d236e5 100644 --- a/CHANGELOG.txt +++ b/CHANGELOG.txt @@ -1,4 +1,88 @@ -2016-XX-XX 1.19.1: +2016-05-13 1.20.3: +------------------ +Features: + * use posix metapackage for cran skeleton packaging (#956) + +Bug fixes: + * fix output of package paths (extra output was breaking tools). Add tests. (#950) + * change default of no_download_source in build.py (for compatibility with conda-build-all) (#950) + * fix regression in [] being confused for selectors (#957) + +2016-05-13 1.20.2: +------------------ +Features: + * added --token and --user flags to pass corresponding information to anaconda upload (#921) + * added conda render command that outputs a fully-rendered meta.yaml to either stdout, or to file (with --file) (#908) + * support source checkout tools specified in meta.yaml. If source checkout fails at the rendering phase, source checkout and rendering are re-done after the build environment is created. (#843, #946) + * fn is now optional when a URL specifies a filename. (#942) + * CRAN skeleton generator now uses MSYS2 for Windows support (#942) + * conda build & conda render both recursively look for meta.yaml (support conda-forge feedstock submodules) (#908) + * Whitelist MAKEFLAGS environment variable. Setting this outside conda build should take effect in your build. Parallelize on *nix by adding -j here, instead of -j${CPU_COUNT} in your build.sh. This helps on CI's, where CPU_COUNT is not always well-behaved. (#917) + * Run python_d executable on windows when debug feature is active (#724) + * add conda build flag --keep-old-work that temporarily moves your last build, then moves it back after completion. For debugging, when more than one package is involved. (#833) + * Allow selectors in imported jinja templates (#739) + +Bug fixes: + * fixed several instances wherein --skip-existing did not work (#897, #945) + * Fully render recipe before outputting build string * fixes empty spots where GIT_* info should have been (#923) + * Add MSYS2 path conversion filters to avoid issues with Win 7.1 SDK (#900) + * Address PyPI's change of URL format (#922, + * Fix invalid gcc "-m 32" flag (#916) + * Fix empty section (due to selectors) handling regression (#919) + * Fix regression in handling of VS2008 Pro (not Express + VC for Python 2.7). It is important to at least try to run vcvarsall.bat. (#913) + * Fix CPAN skeleton generator (handle missing sections better) (#912) + * Make test/requires versions match build/requires without additional pinning (#907) + * Remove hard-coded CYGWIN path from conda-build's custom PATH (#903) + * Source is downloaded before testing, fixing an issue where if build machine and some other test machine had different source, strange things happened. (#946) + * Fix regression with Python 3.x fixing shebangs (#892) + * Fix conda inspect crashes by using conda-meta info rather than filenames or dist names for package info (#947) + +Miscellany: + * restore AppVeyor testing for Windows builds (#864) + * Build py3.5 on Appveyor (#938) + * PEP8 cleanup; use flake8 rather than pyflakes (#938) + * limited scope of project locking to avoid lock conflicts between build and rendering (#923) + * set up anaconda.org build infrastructure (#924) + * on Windows, environment variables are written to the temporary bld.bat in the source work folder. (#933) + + +2016-04-21 1.20.1: +-------------------- + * fix source/path and GIT_* issues, #801 + * fix invalid assertion, #855 + * environ.py refactor/clenup, #856 + * Better messaging for yaml parsing errors, #862 + * fix typo, #863 + * make CONDA_PY and CONDA_NPY available in build.sh, #837 + * execute source fetchers (e.g., git, hg) in the _build environment, #843 + * use memory map rather than read() to reduce memory usage, #866 + * fix svn url on Windows in checkout tool test, #867 + * fix empty files bug, #869 + * improve Visual Studio logic, #861 + * add files in order of increasing size to improve access times to tar, #870 + * VS_YEAR, VS_VERSION, VS_MAJOR and CMAKE_GENERATOR environment variables, #872 + + +2016-03-25 1.20.0: +-------------------- + * support for Lua as a built-in language (Alex Wiltschko), #719 + * allow additional keys in "about" section, #831 + * fix Examples directory in noarch_python, #838 + * revert OS X SIP fix, part of #808, #844 + * fixed race condition between removal and creation of tmp_dir on win, #847 + + +2016-03-10 1.19.2: +-------------------- + * silence some errors when subprocessing git #790 + * fixes conda skeleton cran under python3 #817 + * fixes some bugs introduced with the #808 otools refactor, #821, #825 + * fixes #818 conda-build 1.19.1 breaks C compilation, #825 + * actually fix #807 recurisive builds after conda 4.0 release, #826 + * fixes #820 crash when building from a git repo on Windows, #824 + + +2016-03-09 1.19.1: -------------------- * Environment variables defined in the 'script_env' build section of the meta.yaml file were previously assigned the value '' @@ -6,6 +90,10 @@ warning is raised instead, #763. * fix printing of NumPy 1.10 in help message, #776 * add -m32 to CFLAGS and CXXFLAGS for multilib gcc, #775 + * fixes CYGWIN_PREFIX for drive letters other than C:, #788 + * fixes for noarch package building on Windows, #799 + * work-arounds for System Integrity Protection on OS X El Capitan, #808 + * fix recurisive builds after conda 4.0 release, #813 2016-01-29 1.19.0: diff --git a/README.rst b/README.rst index 868131f588..38386ae35e 100644 --- a/README.rst +++ b/README.rst @@ -5,6 +5,19 @@ conda-build .. image:: https://travis-ci.org/conda/conda-build.svg?branch=master :target: https://travis-ci.org/conda/conda-build +.. image:: https://ci.appveyor.com/api/projects/status/wdis0rkcfdoeql3x?svg=true + :target: https://ci.appveyor.com/project/ContinuumAnalytics/conda-build + +.. image:: https://anaconda.org/conda-team/conda-build/badges/build.svg + :target: https://anaconda.org/conda-team/conda-build/ + +.. image:: https://www.quantifiedcode.com/api/v1/project/1960a96404aa431bab5d834edff1cf85/badge.svg + :target: https://www.quantifiedcode.com/app/project/1960a96404aa431bab5d834edff1cf85 + :alt: Code issues + +.. image:: https://codecov.io/gh/conda/conda-build/branch/master/graph/badge.svg + :target: https://codecov.io/gh/conda/conda-build + Installation -------------- diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 0000000000..a5ef291591 --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,89 @@ +environment: + global: + # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the + # /E:ON and /V:ON options are not enabled in the batch script intepreter + # See: http://stackoverflow.com/a/13751649/163740 + CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\tools\\appveyor\\run_with_env.cmd" + + matrix: + - PYTHON: "C:\\Python35_64" + PYTHON_VERSION: "3.5" + PYTHON_ARCH: "64" + + - PYTHON: "C:\\Python27_64" + PYTHON_VERSION: "2.7" + PYTHON_ARCH: "64" + + - PYTHON: "C:\\Python34_64" + PYTHON_VERSION: "3.4" + PYTHON_ARCH: "64" + + - PYTHON: "C:\\Python27_32" + PYTHON_VERSION: "2.7" + PYTHON_ARCH: "32" + + - PYTHON: "C:\\Python35_32" + PYTHON_VERSION: "3.5" + PYTHON_ARCH: "32" + + - PYTHON: "C:\\Python34_32" + PYTHON_VERSION: "3.4" + PYTHON_ARCH: "32" + +init: + - ECHO %PYTHON% %PYTHON_VERSION% %PYTHON_ARCH% %HOME% + + +install: + # If there is a newer build queued for the same PR, cancel this one. + # The AppVeyor 'rollout builds' option is supposed to serve the same + # purpose but it is problematic because it tends to cancel builds pushed + # directly to master instead of just PR builds (or the converse). + # credits: JuliaLang developers. + - ps: if ($env:APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER -ne ((Invoke-RestMethod ` + https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | ` + Where-Object pullRequestId -eq $env:APPVEYOR_PULL_REQUEST_NUMBER)[0].buildNumber) { ` + throw "There are newer queued builds for this pull request, failing early." } + # these correspond to folder naming of miniconda installs on appveyor. See + # https://www.appveyor.com/docs/installed-software#python + - if "%PYTHON_VERSION%" == "3.4" set "BASE_PYTHON_VERSION=3" + - if "%PYTHON_VERSION%" == "3.5" set "BASE_PYTHON_VERSION=35" + - if "%PYTHON_ARCH%" == "64" set "ARCH_LABEL=-x64" + # These are already installed on appveyor. Update them. + - set "CONDA_ROOT=C:\Miniconda%BASE_PYTHON_VERSION%%ARCH_LABEL%" + - set "PATH=%CONDA_ROOT%;%CONDA_ROOT%\Scripts;%CONDA_ROOT%\Library\bin;%PATH%" + - conda config --set always_yes yes + - conda update -q conda + - git clone https://github.com/conda/conda + - cd conda + - git checkout 4.0.8 + - python setup.py install + - cd ../ + - conda info + - conda update -q --all + - python -c "import sys; print(sys.version)" + - python -c "import sys; print(sys.executable)" + - python -c "import sys; print(sys.prefix)" + - conda install -q pytest pytest-cov git anaconda-client + # this is to ensure dependencies + - conda install -q conda-build + - python --version + - python -c "import struct; print(struct.calcsize('P') * 8)" + - python setup.py install + - set PATH + - conda build --version + - call appveyor\setup_x64.bat + - copy "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\bin\vcvars64.bat" "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\bin\amd64\vcvarsamd64.bat" + + +# Not a .NET project, we build package in the install step instead +build: false + +test_script: + - set "PATH=%CONDA_ROOT%;%CONDA_ROOT%\Scripts;%CONDA_ROOT%\Library\bin;%PATH%" + - set PATH + - py.test --cov conda_build --cov-report xml tests + +on_success: + - pip install codecov + - codecov --env PYTHON_VERSION diff --git a/appveyor/setup_x64.bat b/appveyor/setup_x64.bat new file mode 100755 index 0000000000..4786b3c207 --- /dev/null +++ b/appveyor/setup_x64.bat @@ -0,0 +1,13 @@ +regedit /s x64\VC_OBJECTS_PLATFORM_INFO.reg + +regedit /s x64\600dd186-2429-11d7-8bf6-00b0d03daa06.reg +regedit /s x64\600dd187-2429-11d7-8bf6-00b0d03daa06.reg +regedit /s x64\600dd188-2429-11d7-8bf6-00b0d03daa06.reg +regedit /s x64\600dd189-2429-11d7-8bf6-00b0d03daa06.reg +regedit /s x64\656d875f-2429-11d7-8bf6-00b0d03daa06.reg +regedit /s x64\656d8760-2429-11d7-8bf6-00b0d03daa06.reg +regedit /s x64\656d8763-2429-11d7-8bf6-00b0d03daa06.reg +regedit /s x64\656d8766-2429-11d7-8bf6-00b0d03daa06.reg + +copy "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcpackages\AMD64.VCPlatform.config" "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcpackages\AMD64.VCPlatform.Express.config" +copy "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcpackages\Itanium.VCPlatform.config" "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcpackages\Itanium.VCPlatform.Express.config" \ No newline at end of file diff --git a/appveyor/x64/600dd186-2429-11d7-8bf6-00b0d03daa06.reg b/appveyor/x64/600dd186-2429-11d7-8bf6-00b0d03daa06.reg new file mode 100755 index 0000000000..ff97081e5b Binary files /dev/null and b/appveyor/x64/600dd186-2429-11d7-8bf6-00b0d03daa06.reg differ diff --git a/appveyor/x64/600dd187-2429-11d7-8bf6-00b0d03daa06.reg b/appveyor/x64/600dd187-2429-11d7-8bf6-00b0d03daa06.reg new file mode 100755 index 0000000000..6f218a5707 Binary files /dev/null and b/appveyor/x64/600dd187-2429-11d7-8bf6-00b0d03daa06.reg differ diff --git a/appveyor/x64/600dd188-2429-11d7-8bf6-00b0d03daa06.reg b/appveyor/x64/600dd188-2429-11d7-8bf6-00b0d03daa06.reg new file mode 100755 index 0000000000..8dd2bebd55 Binary files /dev/null and b/appveyor/x64/600dd188-2429-11d7-8bf6-00b0d03daa06.reg differ diff --git a/appveyor/x64/600dd189-2429-11d7-8bf6-00b0d03daa06.reg b/appveyor/x64/600dd189-2429-11d7-8bf6-00b0d03daa06.reg new file mode 100755 index 0000000000..26403d883e Binary files /dev/null and b/appveyor/x64/600dd189-2429-11d7-8bf6-00b0d03daa06.reg differ diff --git a/appveyor/x64/656d875f-2429-11d7-8bf6-00b0d03daa06.reg b/appveyor/x64/656d875f-2429-11d7-8bf6-00b0d03daa06.reg new file mode 100755 index 0000000000..4e196d46f3 Binary files /dev/null and b/appveyor/x64/656d875f-2429-11d7-8bf6-00b0d03daa06.reg differ diff --git a/appveyor/x64/656d8760-2429-11d7-8bf6-00b0d03daa06.reg b/appveyor/x64/656d8760-2429-11d7-8bf6-00b0d03daa06.reg new file mode 100755 index 0000000000..d39caed6c0 Binary files /dev/null and b/appveyor/x64/656d8760-2429-11d7-8bf6-00b0d03daa06.reg differ diff --git a/appveyor/x64/656d8763-2429-11d7-8bf6-00b0d03daa06.reg b/appveyor/x64/656d8763-2429-11d7-8bf6-00b0d03daa06.reg new file mode 100755 index 0000000000..76ec9de540 Binary files /dev/null and b/appveyor/x64/656d8763-2429-11d7-8bf6-00b0d03daa06.reg differ diff --git a/appveyor/x64/656d8766-2429-11d7-8bf6-00b0d03daa06.reg b/appveyor/x64/656d8766-2429-11d7-8bf6-00b0d03daa06.reg new file mode 100755 index 0000000000..d945da4f4b Binary files /dev/null and b/appveyor/x64/656d8766-2429-11d7-8bf6-00b0d03daa06.reg differ diff --git a/appveyor/x64/VC_OBJECTS_PLATFORM_INFO.reg b/appveyor/x64/VC_OBJECTS_PLATFORM_INFO.reg new file mode 100755 index 0000000000..b8282bb937 Binary files /dev/null and b/appveyor/x64/VC_OBJECTS_PLATFORM_INFO.reg differ diff --git a/bdist_conda.py b/bdist_conda.py index 54cd61cbc7..2ffed3aa25 100644 --- a/bdist_conda.py +++ b/bdist_conda.py @@ -16,12 +16,13 @@ import conda.config from conda.cli.common import spec_from_line from conda_build.metadata import MetaData -from conda_build import build, pypi +from conda_build import build, pypi, render from conda_build.config import config from conda_build.main_build import handle_binstar_upload # TODO: Add support for all the options that conda build has + class CondaDistribution(Distribution): """ Distribution subclass that supports bdist_conda options @@ -91,7 +92,7 @@ class CondaDistribution(Distribution): 'conda_preserve_egg_dir': None, 'conda_features': None, 'conda_track_features': None, - } + } def __init__(self, attrs=None): given_attrs = {} @@ -111,6 +112,7 @@ def __init__(self, attrs=None): for attr in self.conda_attrs: setattr(self.metadata, attr, given_attrs.get(attr, self.conda_attrs[attr])) + class bdist_conda(install): description = "create a conda package" @@ -201,7 +203,8 @@ def run(self): c.readfp(StringIO(newstr)) except Exception as err: # This seems to be the best error here - raise DistutilsGetoptError("ERROR: entry-points not understood: " + str(err) + "\nThe string was" + newstr) + raise DistutilsGetoptError("ERROR: entry-points not understood: " + + str(err) + "\nThe string was" + newstr) else: for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: @@ -213,7 +216,8 @@ def run(self): entry_points[section] = None if not isinstance(entry_points, dict): - raise DistutilsGetoptError("ERROR: Could not add entry points. They were:\n" + entry_points) + raise DistutilsGetoptError("ERROR: Could not add entry points. They were:\n" + + entry_points) else: rs = entry_points.get('scripts', []) cs = entry_points.get('console_scripts', []) @@ -229,21 +233,22 @@ def run(self): if len(cs + gs) != 0: d['build']['entry_points'] = entry_list if metadata.conda_command_tests is True: - d['test']['commands'] = list(map(unicode, pypi.make_entry_tests(entry_list))) + d['test']['commands'] = list(map(unicode, + pypi.make_entry_tests(entry_list))) if 'setuptools' in d['requirements']['run']: d['build']['preserve_egg_dir'] = True if metadata.conda_import_tests: if metadata.conda_import_tests is True: - d['test']['imports'] = ((self.distribution.packages or []) - + (self.distribution.py_modules or [])) + d['test']['imports'] = ((self.distribution.packages or []) + + (self.distribution.py_modules or [])) else: d['test']['imports'] = metadata.conda_import_tests if (metadata.conda_command_tests and not - isinstance(metadata.conda_command_tests, - bool)): + isinstance(metadata.conda_command_tests, + bool)): d['test']['commands'] = list(map(unicode, metadata.conda_command_tests)) d = dict(d) @@ -262,13 +267,13 @@ def run(self): if self.binstar_upload: class args: binstar_upload = self.binstar_upload - handle_binstar_upload(build.bldpkg_path(m), args) + handle_binstar_upload(render.bldpkg_path(m), args) else: no_upload_message = """\ # If you want to upload this package to anaconda.org later, type: # # $ anaconda upload %s -""" % build.bldpkg_path(m) +""" % render.bldpkg_path(m) print(no_upload_message) @@ -283,6 +288,6 @@ class args: setup() function. The command line flag overrides the option to setup().''')), (str('anaconda-upload'), None, ("""Upload the finished package to anaconda.org""")), - ]) +]) bdist_conda.boolean_options.extend([str('anaconda-upload')]) diff --git a/bin/conda-render b/bin/conda-render new file mode 100644 index 0000000000..c813e1c321 --- /dev/null +++ b/bin/conda-render @@ -0,0 +1,5 @@ +#!/usr/bin/env python +import sys +from conda_build.main_render import main + +sys.exit(main()) diff --git a/conda_build.recipe/bld.bat b/conda.recipe/bld.bat similarity index 100% rename from conda_build.recipe/bld.bat rename to conda.recipe/bld.bat diff --git a/conda_build.recipe/build.sh b/conda.recipe/build.sh similarity index 100% rename from conda_build.recipe/build.sh rename to conda.recipe/build.sh diff --git a/conda.recipe/meta.yaml b/conda.recipe/meta.yaml new file mode 100644 index 0000000000..49ad5d5cb3 --- /dev/null +++ b/conda.recipe/meta.yaml @@ -0,0 +1,35 @@ +package: + name: conda-build + version: {{ GIT_DESCRIBE_TAG }} + +source: + git_url: https://github.com/conda/conda-build + +build: + number: 0 + {% if GIT_DESCRIBE_NUMBER|int > 0 %} + string: py{{ PY_VER }}_git_{{ GIT_BUILD_STR }} + {% endif %} + +requirements: + build: + - python + run: + - python + - psutil + - conda + - jinja2 + - patchelf [linux] + +test: + requires: + - pytest + - pytest-cov + commands: + - conda-build -h + imports: + - conda_build + +about: + home: https://github.com/conda/conda-build + license: BSD diff --git a/conda.recipe/run_test.py.backup b/conda.recipe/run_test.py.backup new file mode 100644 index 0000000000..a7eb3d23b0 --- /dev/null +++ b/conda.recipe/run_test.py.backup @@ -0,0 +1,7 @@ +import os + +import pytest +import conda_build + +print('conda_build.__version__: %s' % conda_build.__version__) +pytest.main([os.path.join(os.getenv("SOURCE_DIR"), 'tests')]) diff --git a/conda_build.recipe/meta.yaml b/conda_build.recipe/meta.yaml deleted file mode 100644 index 3644326b23..0000000000 --- a/conda_build.recipe/meta.yaml +++ /dev/null @@ -1,33 +0,0 @@ -package: - name: conda-build - version: 1.10alpha.0 - -source: - git_url: ../ - -build: -{% if 'CONDA_RELEASE' in environ %} - number: {{ environ.get('CONDA_BUILD_NUMBER', 0) }} -{% else %} - number: {{ environ.get('GIT_DESCRIBE_NUMBER', 0) }} - string: py{{ environ.get('PY_VER').replace('.', '') }}_{{ environ.get('GIT_BUILD_STR', 'GIT_STUB') }} -{% endif %} - -requirements: - build: - - python - run: - - python - - conda - - jinja2 - - patchelf [linux] - -test: - commands: - - conda-build -h - imports: - - conda_build - -about: - home: https://github.com/conda/conda-build - license: BSD diff --git a/conda_build.recipe/run_test.py b/conda_build.recipe/run_test.py deleted file mode 100644 index 62baa807eb..0000000000 --- a/conda_build.recipe/run_test.py +++ /dev/null @@ -1,3 +0,0 @@ -import conda_build - -print('conda_build.__version__: %s' % conda_build.__version__) diff --git a/conda_build/_version.py b/conda_build/_version.py index 543ebd3218..6bfc617046 100644 --- a/conda_build/_version.py +++ b/conda_build/_version.py @@ -114,7 +114,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose=False): # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: - print("discarding '%s', no digits" % ",".join(refs-tags)) + print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): @@ -144,13 +144,13 @@ def git_parse_vcs_describe(git_describe, tag_prefix, verbose=False): # now we have TAG-NUM-gHEX or HEX if "-" not in git_describe: # just HEX - return "0+untagged.g"+git_describe+dirty_suffix, dirty + return "0+untagged.g" + git_describe + dirty_suffix, dirty # just TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? - return "0+unparseable"+dirty_suffix, dirty + return "0+unparseable" + dirty_suffix, dirty # tag full_tag = mo.group(1) diff --git a/conda_build/build.py b/conda_build/build.py index e54f8d34a6..e4ddae7f34 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -10,9 +10,12 @@ import stat import subprocess import sys +import time import tarfile import fnmatch +import tempfile from os.path import exists, isdir, isfile, islink, join +import mmap import conda.config as cc import conda.plan as plan @@ -20,11 +23,14 @@ from conda.compat import PY3 from conda.fetch import fetch_index from conda.install import prefix_placeholder, linked, move_to_trash +from conda.lock import Locked from conda.utils import url_path from conda.resolve import Resolve, MatchSpec, NoPackagesFound +from conda_build import __version__ from conda_build import environ, source, tarcheck from conda_build.config import config +from conda_build.render import parse_or_try_download, output_yaml, bldpkg_path from conda_build.scripts import create_entry_points, prepend_bin_path from conda_build.post import (post_process, post_build, fix_permissions, get_build_metadata) @@ -33,6 +39,7 @@ from conda_build.create_test import (create_files, create_shell_files, create_py_files, create_pl_files) from conda_build.exceptions import indent +from conda_build.features import feature_list on_win = (sys.platform == 'win32') @@ -91,9 +98,13 @@ def have_prefix_files(files): ''' prefix = config.build_prefix prefix_bytes = prefix.encode('utf-8') - alt_prefix = prefix.replace('\\', '/') - alt_prefix_bytes = alt_prefix.encode('utf-8') prefix_placeholder_bytes = prefix_placeholder.encode('utf-8') + if on_win: + forward_slash_prefix = prefix.replace('\\', '/') + forward_slash_prefix_bytes = forward_slash_prefix.encode('utf-8') + double_backslash_prefix = prefix.replace('\\', '\\\\') + double_backslash_prefix_bytes = double_backslash_prefix.encode('utf-8') + for f in files: if f.endswith(('.pyc', '.pyo', '.a')): continue @@ -104,34 +115,46 @@ def have_prefix_files(files): # OSX does not allow hard-linking symbolic links, so we cannot # skip symbolic links (as we can on Linux) continue - with open(path, 'rb') as fi: - data = fi.read() - mode = 'binary' if b'\x00' in data else 'text' + + # dont try to mmap an empty file + if os.stat(path).st_size == 0: + continue + + fi = open(path, 'rb+') + mm = mmap.mmap(fi.fileno(), 0) + + mode = 'binary' if mm.find(b'\x00') != -1 else 'text' if mode == 'text': - if not (sys.platform == 'win32' and alt_prefix_bytes in data): + if sys.platform != 'win32' and mm.find(prefix_bytes) != -1: # Use the placeholder for maximal backwards compatibility, and # to minimize the occurrences of usernames appearing in built # packages. - data = rewrite_file_with_new_prefix(path, data, prefix_bytes, prefix_placeholder_bytes) - - if prefix_bytes in data: + rewrite_file_with_new_prefix(path, mm[:], prefix_bytes, prefix_placeholder_bytes) + mm.close() and fi.close() + fi = open(path, 'rb+') + mm = mmap.mmap(fi.fileno(), 0) + if mm.find(prefix_bytes) != -1: yield (prefix, mode, f) - if (sys.platform == 'win32') and (alt_prefix_bytes in data): + if on_win and mm.find(forward_slash_prefix_bytes) != -1: # some windows libraries use unix-style path separators - yield (alt_prefix, mode, f) - if prefix_placeholder_bytes in data: + yield (forward_slash_prefix, mode, f) + elif on_win and mm.find(double_backslash_prefix_bytes) != -1: + # some windows libraries have double backslashes as escaping + yield (double_backslash_prefix, mode, f) + if mm.find(prefix_placeholder_bytes) != -1: yield (prefix_placeholder, mode, f) + mm.close() and fi.close() def rewrite_file_with_new_prefix(path, data, old_prefix, new_prefix): # Old and new prefix should be bytes - data = data.replace(old_prefix, new_prefix) st = os.stat(path) + data = data.replace(old_prefix, new_prefix) # Save as with open(path, 'wb') as fo: fo.write(data) - os.chmod(path, stat.S_IMODE(st.st_mode) | stat.S_IWUSR) # chmod u+w + os.chmod(path, stat.S_IMODE(st.st_mode) | stat.S_IWUSR) # chmod u+w return data @@ -170,6 +193,16 @@ def create_info_files(m, files, include_recipe=True): else: shutil.copy(src_path, dst_path) + # store the rendered meta.yaml file, plus information about where it came from + # and what version of conda-build created it + metayaml = output_yaml(m) + with open(join(recipe_dir, "meta.yaml.rendered"), 'w') as f: + f.write("# This file created by conda-build {}\n".format(__version__)) + f.write("# meta.yaml template originally from:\n") + f.write("# " + source.get_repository_info(m.path) + "\n") + f.write("# ------------------------------------------------\n\n") + f.write(metayaml) + license_file = m.get_value('about/license_file') if license_file: shutil.copyfile(join(source.get_dir(), license_file), @@ -183,8 +216,7 @@ def create_info_files(m, files, include_recipe=True): dst = join(config.info_dir, readme) shutil.copyfile(src, dst) if os.path.split(readme)[1] not in {"README.md", "README.rst", "README"}: - print("WARNING: anaconda.org only recognizes about/readme as README.md and README.rst", - file=sys.stderr) + print("WARNING: anaconda.org only recognizes about/readme as README.md and README.rst", file=sys.stderr) # noqa info_index = m.info_index() pin_depends = m.get_value('build/pin_depends') @@ -210,11 +242,20 @@ def create_info_files(m, files, include_recipe=True): with open(join(config.info_dir, 'index.json'), **mode_dict) as fo: json.dump(info_index, fo, indent=2, sort_keys=True) + with open(join(config.info_dir, 'about.json'), 'w') as fo: + d = {} + for key in ('home', 'dev_url', 'doc_url', 'license_url', + 'license', 'summary', 'description', 'license_family'): + value = m.get_value('about/%s' % key) + if value: + d[key] = value + json.dump(d, fo, indent=2, sort_keys=True) + if sys.platform == 'win32': # make sure we use '/' path separators in metadata - files = [f.replace('\\', '/') for f in files] + files = [_f.replace('\\', '/') for _f in files] - with open(join(config.info_dir, 'files'), 'w') as fo: + with open(join(config.info_dir, 'files'), **mode_dict) as fo: if m.get_value('build/noarch_python'): fo.write('\n') else: @@ -224,6 +265,17 @@ def create_info_files(m, files, include_recipe=True): files_with_prefix = sorted(have_prefix_files(files)) binary_has_prefix_files = m.binary_has_prefix_files() text_has_prefix_files = m.has_prefix_files() + + ignore_files = m.ignore_prefix_files() + if ignore_files: + # do we have a list of files, or just ignore everything? + if hasattr(ignore_files, "__iter__"): + files_with_prefix = [f for f in files_with_prefix if f[2] not in ignore_files] + binary_has_prefix_files = [f for f in binary_has_prefix_files if f[2] not in ignore_files] # noqa + text_has_prefix_files = [f for f in text_has_prefix_files if f[2] not in ignore_files] + else: + files_with_prefix = [] + if files_with_prefix and not m.get_value('build/noarch_python'): auto_detect = m.get_value('build/detect_binary_files_with_prefix') if sys.platform == 'win32': @@ -278,6 +330,7 @@ def create_info_files(m, files, include_recipe=True): shutil.copyfile(join(m.path, m.get_value('app/icon')), join(config.info_dir, 'icon.png')) + def get_build_index(clear_cache=True): if clear_cache: # remove the cache such that a refetch is made, @@ -286,14 +339,21 @@ def get_build_index(clear_cache=True): return get_index(channel_urls=[url_path(config.croot)] + list(channel_urls), prepend=not override_channels) + def create_env(prefix, specs, clear_cache=True): ''' Create a conda envrionment for the given prefix and specs. ''' - if not isdir(config.bldpkgs_dir): - os.makedirs(config.bldpkgs_dir) - update_index(config.bldpkgs_dir) - if specs: # Don't waste time if there is nothing to do + specs = list(specs) + for feature, value in feature_list: + if value: + specs.append('%s@' % feature) + + for d in config.bldpkgs_dirs: + if not isdir(d): + os.makedirs(d) + update_index(d) + if specs: # Don't waste time if there is nothing to do index = get_build_index(clear_cache=True) warn_on_old_conda_build(index) @@ -306,6 +366,7 @@ def create_env(prefix, specs, clear_cache=True): if not isdir(prefix): os.makedirs(prefix) + def warn_on_old_conda_build(index): root_linked = linked(cc.root_dir) vers_inst = [dist.rsplit('-', 2)[1] for dist in root_linked @@ -317,7 +378,7 @@ def warn_on_old_conda_build(index): try: pkgs = sorted(r.get_pkgs(MatchSpec('conda-build'))) except NoPackagesFound: - print("WARNING: Could not find any versions of conda-build in the channels", file=sys.stderr) + print("WARNING: Could not find any versions of conda-build in the channels", file=sys.stderr) # noqa return if pkgs[-1].version != vers_inst[0]: print(""" @@ -339,26 +400,23 @@ def rm_pkgs_cache(dist): 'RM_EXTRACTED %s' % dist] plan.execute_plan(rmplan) -def bldpkg_path(m): - ''' - Returns path to built package's tarball given its ``Metadata``. - ''' - return join(config.bldpkgs_dir, '%s.tar.bz2' % m.dist()) -def build(m, get_src=True, post=None, include_recipe=True): +def build(m, post=None, include_recipe=True, keep_old_work=False, + need_source_download=True, verbose=True, dirty=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata - :param get_src: Should we download the source? - :type get_src: bool :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. + :type keep_old_work: bool: Keep any previous work directory. + :type need_source_download: bool: if rendering failed to download source + (due to missing tools), retry here after build env is populated ''' - if (m.get_value('build/detect_binary_files_with_prefix') - or m.binary_has_prefix_files()): + if (m.get_value('build/detect_binary_files_with_prefix') or + m.binary_has_prefix_files()) and not on_win: # We must use a long prefix here as the package will only be # installable into prefixes shorter than this one. config.use_long_build_prefix = True @@ -371,138 +429,189 @@ def build(m, get_src=True, post=None, include_recipe=True): "configuration." % m.dist()) return - if post in [False, None]: - print("Removing old build environment") - if on_win: - if isdir(config.short_build_prefix): - move_to_trash(config.short_build_prefix, '') - if isdir(config.long_build_prefix): - move_to_trash(config.long_build_prefix, '') - else: - rm_rf(config.short_build_prefix) - rm_rf(config.long_build_prefix) - print("Removing old work directory") - if on_win: - if isdir(source.WORK_DIR): - move_to_trash(source.WORK_DIR, '') - else: - rm_rf(source.WORK_DIR) - - # Display the name only - # Version number could be missing due to dependency on source info. - print("BUILD START:", m.dist()) - create_env(config.build_prefix, - [ms.spec for ms in m.ms_depends('build')]) - - if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: - print("%s is installed as a build dependency. Removing." % - m.name()) - index = get_build_index(clear_cache=False) - actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) - assert not plan.nothing_to_do(actions), actions - plan.display_actions(actions, index) - plan.execute_actions(actions, index) - - if get_src: - source.provide(m.path, m.get_section('source')) - - # Parse our metadata again because we did not initialize the source - # information before. - # By now, all jinja variables should be defined, so don't permit undefined vars. - m.parse_again(permit_undefined_jinja=False) - - print("Package:", m.dist()) - - assert isdir(source.WORK_DIR) - src_dir = source.get_dir() - contents = os.listdir(src_dir) - if contents: - print("source tree in:", src_dir) - else: - print("no source") - - rm_rf(config.info_dir) - files1 = prefix_files() - for pat in m.always_include_files(): - has_matches = False - for f in set(files1): - if fnmatch.fnmatch(f, pat): - print("Including in package existing file", f) - files1.discard(f) - has_matches = True - if not has_matches: - sys.exit("Error: Glob %s from always_include_files does not match any files" % pat) - # Save this for later - with open(join(config.croot, 'prefix_files.txt'), 'w') as f: - f.write(u'\n'.join(sorted(list(files1)))) - f.write(u'\n') - - if sys.platform == 'win32': - import conda_build.windows as windows - windows.build(m) - else: - env = environ.get_dict(m) - build_file = join(m.path, 'build.sh') - + with Locked(cc.root_dir): + + # If --keep-old-work, then move the contents of source.WORK_DIR to a + # temporary directory for the duration of the build. + # The source unpacking procedure is too varied and complex + # to allow this to be written cleanly (see source.get_dir() for example) + if keep_old_work: + old_WORK_DIR = tempfile.mkdtemp() + old_sub_dirs = [name for name in os.listdir(source.WORK_DIR) + if os.path.isdir(os.path.join(source.WORK_DIR, name))] + if len(old_sub_dirs): + print("Keeping old work directory backup: %s => %s" + % (old_sub_dirs, old_WORK_DIR)) + for old_sub in old_sub_dirs: + shutil.move(os.path.join(source.WORK_DIR, old_sub), old_WORK_DIR) + + if post in [False, None]: + print("Removing old build environment") + print("BUILD START:", m.dist()) + if on_win: + if isdir(config.short_build_prefix): + move_to_trash(config.short_build_prefix, '') + if isdir(config.long_build_prefix): + move_to_trash(config.long_build_prefix, '') + else: + rm_rf(config.short_build_prefix) + rm_rf(config.long_build_prefix) + + # Display the name only + # Version number could be missing due to dependency on source info. + create_env(config.build_prefix, + [ms.spec for ms in m.ms_depends('build')]) + + if need_source_download: + # Execute any commands fetching the source (e.g., git) in the _build environment. + # This makes it possible to provide source fetchers (eg. git, hg, svn) as build + # dependencies. + _old_path = os.environ['PATH'] + try: + os.environ['PATH'] = prepend_bin_path({'PATH': _old_path}, + config.build_prefix)['PATH'] + m, need_source_download = parse_or_try_download(m, + no_download_source=False, + force_download=True, + verbose=verbose, + dirty=dirty) + assert not need_source_download, "Source download failed. Please investigate." + finally: + os.environ['PATH'] = _old_path + + if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: + print("%s is installed as a build dependency. Removing." % + m.name()) + index = get_build_index(clear_cache=False) + actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) + assert not plan.nothing_to_do(actions), actions + plan.display_actions(actions, index) + plan.execute_actions(actions, index) + + print("Package:", m.dist()) + + assert isdir(source.WORK_DIR) + src_dir = source.get_dir() + contents = os.listdir(src_dir) + if contents: + print("source tree in:", src_dir) + else: + print("no source") + + rm_rf(config.info_dir) + files1 = prefix_files() + for pat in m.always_include_files(): + has_matches = False + for f in set(files1): + if fnmatch.fnmatch(f, pat): + print("Including in package existing file", f) + files1.discard(f) + has_matches = True + if not has_matches: + sys.exit("Error: Glob %s from always_include_files does not match any files" % + pat) + # Save this for later + with open(join(config.croot, 'prefix_files.txt'), 'w') as f: + f.write(u'\n'.join(sorted(list(files1)))) + f.write(u'\n') + + # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) - build_file = join(source.get_dir(), 'conda_build.sh') - with open(build_file, 'w') as bf: - bf.write(script) - os.chmod(build_file, 0o766) - - if isfile(build_file): - cmd = [shell_path, '-x', '-e', build_file] - _check_call(cmd, env=env, cwd=src_dir) - - if post in [True, None]: - if post == True: - with open(join(config.croot, 'prefix_files.txt'), 'r') as f: - files1 = set(f.read().splitlines()) - - get_build_metadata(m) - create_post_scripts(m) - create_entry_points(m.get_value('build/entry_points')) - assert not exists(config.info_dir) - files2 = prefix_files() - - post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) - - # The post processing may have deleted some files (like easy-install.pth) - files2 = prefix_files() - if any(config.meta_dir in join(config.build_prefix, f) for f in - files2 - files1): - sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory. This error -usually comes from using conda in the build script. Avoid doing this, as it -can lead to packages that include their dependencies.""" % - (tuple(f for f in files2 - files1 if config.meta_dir in - join(config.build_prefix, f)),))) - post_build(m, sorted(files2 - files1)) - create_info_files(m, sorted(files2 - files1), - include_recipe=bool(m.path) and include_recipe) - if m.get_value('build/noarch_python'): - import conda_build.noarch_python as noarch_python - noarch_python.transform(m, sorted(files2 - files1)) - - files3 = prefix_files() - fix_permissions(files3 - files1) - - path = bldpkg_path(m) - t = tarfile.open(path, 'w:bz2') - for f in sorted(files3 - files1): - t.add(join(config.build_prefix, f), f) - t.close() - - print("BUILD END:", m.dist()) - - # we're done building, perform some checks - tarcheck.check_all(path) - update_index(config.bldpkgs_dir) - else: - print("STOPPING BUILD BEFORE POST:", m.dist()) + if sys.platform == 'win32': + build_file = join(m.path, 'bld.bat') + if script: + build_file = join(source.get_dir(), 'bld.bat') + with open(join(source.get_dir(), 'bld.bat'), 'w') as bf: + bf.write(script) + import conda_build.windows as windows + windows.build(m, build_file, dirty=dirty) + else: + env = environ.get_dict(m, dirty=dirty) + build_file = join(m.path, 'build.sh') + + if script: + build_file = join(source.get_dir(), 'conda_build.sh') + with open(build_file, 'w') as bf: + bf.write(script) + os.chmod(build_file, 0o766) + + if isfile(build_file): + cmd = [shell_path, '-x', '-e', build_file] + + _check_call(cmd, env=env, cwd=src_dir) + + if post in [True, None]: + if post: + with open(join(config.croot, 'prefix_files.txt'), 'r') as f: + files1 = set(f.read().splitlines()) + + get_build_metadata(m) + create_post_scripts(m) + create_entry_points(m.get_value('build/entry_points')) + assert not exists(config.info_dir) + files2 = prefix_files() + + post_process(sorted(files2 - files1), + preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) + + # The post processing may have deleted some files (like easy-install.pth) + files2 = prefix_files() + if any(config.meta_dir in join(config.build_prefix, f) for f in + files2 - files1): + sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory. This error + usually comes from using conda in the build script. Avoid doing this, as it + can lead to packages that include their dependencies.""" % + (tuple(f for f in files2 - files1 if config.meta_dir in + join(config.build_prefix, f)),))) + post_build(m, sorted(files2 - files1)) + create_info_files(m, sorted(files2 - files1), + include_recipe=bool(m.path) and include_recipe) + if m.get_value('build/noarch_python'): + import conda_build.noarch_python as noarch_python + noarch_python.transform(m, sorted(files2 - files1)) + + files3 = prefix_files() + fix_permissions(files3 - files1) + + path = bldpkg_path(m) + t = tarfile.open(path, 'w:bz2') + + def order(f): + # we don't care about empty files so send them back via 100000 + fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 + # info/* records will be False == 0, others will be 1. + info_order = int(os.path.dirname(f) != 'info') + return info_order, fsize + + # add files in order of a) in info directory, b) increasing size so + # we can access small manifest or json files without decompressing + # possible large binary or data files + for f in sorted(files3 - files1, key=order): + t.add(join(config.build_prefix, f), f) + t.close() + + print("BUILD END:", m.dist()) + + # we're done building, perform some checks + tarcheck.check_all(path) + update_index(config.bldpkgs_dir) + else: + print("STOPPING BUILD BEFORE POST:", m.dist()) + + if keep_old_work and len(old_sub_dirs): + print("Restoring old work directory backup: %s :: %s => %s" + % (old_WORK_DIR, old_sub_dirs, source.WORK_DIR)) + for old_sub in old_sub_dirs: + if os.path.exists(os.path.join(source.WORK_DIR, old_sub)): + print("Not restoring old source directory %s over new build's version" % + (old_sub)) + else: + shutil.move(os.path.join(old_WORK_DIR, old_sub), source.WORK_DIR) + shutil.rmtree(old_WORK_DIR, ignore_errors=True) def test(m, move_broken=True): @@ -512,100 +621,121 @@ def test(m, move_broken=True): :param m: Package's metadata. :type m: Metadata ''' - # remove from package cache - rm_pkgs_cache(m.dist()) - - tmp_dir = join(config.croot, 'test-tmp_dir') - rm_rf(tmp_dir) - os.makedirs(tmp_dir) - create_files(tmp_dir, m) - # Make Perl or Python-specific test files - if m.name().startswith('perl-'): - pl_files = create_pl_files(tmp_dir, m) - py_files = False - else: - py_files = create_py_files(tmp_dir, m) - pl_files = False - shell_files = create_shell_files(tmp_dir, m) - if not (py_files or shell_files or pl_files): - print("Nothing to test for:", m.dist()) - return - print("TEST START:", m.dist()) - if on_win: - if isdir(config.build_prefix): - move_to_trash(config.build_prefix, '') - if isdir(config.test_prefix): - move_to_trash(config.test_prefix, '') - else: - rm_rf(config.build_prefix) - rm_rf(config.test_prefix) + with Locked(cc.root_dir): - get_build_metadata(m) - specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())] + # remove from package cache + rm_pkgs_cache(m.dist()) - # add packages listed in test/requires - specs += m.get_value('test/requires', []) + tmp_dir = join(config.croot, 'test-tmp_dir') + rm_rf(tmp_dir) + if on_win: + time.sleep(1) # wait for rm_rf(tmp_dir) to finish before recreating tmp_dir + os.makedirs(tmp_dir) + create_files(tmp_dir, m) + # Make Perl or Python-specific test files + if m.name().startswith('perl-'): + pl_files = create_pl_files(tmp_dir, m) + py_files = False + lua_files = False + else: + py_files = create_py_files(tmp_dir, m) + pl_files = False + lua_files = False + shell_files = create_shell_files(tmp_dir, m) + if not (py_files or shell_files or pl_files or lua_files): + print("Nothing to test for:", m.dist()) + return + + print("TEST START:", m.dist()) + if on_win: + if isdir(config.build_prefix): + move_to_trash(config.build_prefix, '') + if isdir(config.test_prefix): + move_to_trash(config.test_prefix, '') + else: + rm_rf(config.build_prefix) + rm_rf(config.test_prefix) - if py_files: - # as the tests are run by python, ensure that python is installed. - # (If they already provided python as a run or test requirement, this won't hurt anything.) - specs += ['python %s*' % environ.get_py_ver()] - if pl_files: - # as the tests are run by perl, we need to specify it - specs += ['perl %s*' % environ.get_perl_ver()] + get_build_metadata(m) + specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())] - create_env(config.test_prefix, specs) + # add packages listed in the run environment and test/requires + specs.extend(ms.spec for ms in m.ms_depends('run')) + specs += m.get_value('test/requires', []) - env = dict(os.environ) - env.update(environ.get_dict(m, prefix=config.test_prefix)) + if py_files: + # as the tests are run by python, ensure that python is installed. + # (If they already provided python as a run or test requirement, + # this won't hurt anything.) + specs += ['python %s*' % environ.get_py_ver()] + if pl_files: + # as the tests are run by perl, we need to specify it + specs += ['perl %s*' % environ.get_perl_ver()] + if lua_files: + # not sure how this shakes out + specs += ['lua %s*' % environ.get_lua_ver()] - # prepend bin (or Scripts) directory - env = prepend_bin_path(env, config.test_prefix, prepend_prefix=True) + create_env(config.test_prefix, specs) + + env = dict(os.environ) + env.update(environ.get_dict(m, prefix=config.test_prefix)) + + # prepend bin (or Scripts) directory + env = prepend_bin_path(env, config.test_prefix, prepend_prefix=True) - if sys.platform == 'win32': - env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] - for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL': - env[varname] = str(getattr(config, varname) or '') - env['PREFIX'] = config.test_prefix - - # Python 2 Windows requires that envs variables be string, not unicode - env = {str(key): str(value) for key, value in env.items()} - if py_files: - try: - subprocess.check_call([config.test_python, '-s', - join(tmp_dir, 'run_test.py')], - env=env, cwd=tmp_dir) - except subprocess.CalledProcessError: - tests_failed(m, move_broken=move_broken) - - if pl_files: - try: - subprocess.check_call([config.test_perl, - join(tmp_dir, 'run_test.pl')], - env=env, cwd=tmp_dir) - except subprocess.CalledProcessError: - tests_failed(m, move_broken=move_broken) - - if shell_files: if sys.platform == 'win32': - test_file = join(tmp_dir, 'run_test.bat') - cmd = [os.environ['COMSPEC'], '/c', 'call', test_file] + env['PATH'] = config.test_prefix + os.pathsep + env['PATH'] + for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL', 'CONDA_LUA': + env[varname] = str(getattr(config, varname) or '') + env['PREFIX'] = config.test_prefix + + # Python 2 Windows requires that envs variables be string, not unicode + env = {str(key): str(value) for key, value in env.items()} + if py_files: try: - subprocess.check_call(cmd, env=env, cwd=tmp_dir) + subprocess.check_call([config.test_python, '-s', + join(tmp_dir, 'run_test.py')], + env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) - else: - test_file = join(tmp_dir, 'run_test.sh') - # TODO: Run the test/commands here instead of in run_test.py - cmd = [shell_path, '-x', '-e', test_file] + + if pl_files: try: - subprocess.check_call(cmd, env=env, cwd=tmp_dir) + subprocess.check_call([config.test_perl, + join(tmp_dir, 'run_test.pl')], + env=env, cwd=tmp_dir) except subprocess.CalledProcessError: tests_failed(m, move_broken=move_broken) + if lua_files: + try: + subprocess.check_call([config.test_lua, + join(tmp_dir, 'run_test.lua')], + env=env, cwd=tmp_dir) + except subprocess.CalledProcessError: + tests_failed(m) + + if shell_files: + if sys.platform == 'win32': + test_file = join(tmp_dir, 'run_test.bat') + cmd = [os.environ['COMSPEC'], '/c', 'call', test_file] + try: + subprocess.check_call(cmd, env=env, cwd=tmp_dir) + except subprocess.CalledProcessError: + tests_failed(m, move_broken=move_broken) + else: + test_file = join(tmp_dir, 'run_test.sh') + # TODO: Run the test/commands here instead of in run_test.py + cmd = [shell_path, '-x', '-e', test_file] + try: + subprocess.check_call(cmd, env=env, cwd=tmp_dir) + except subprocess.CalledProcessError: + tests_failed(m, move_broken=move_broken) + print("TEST END:", m.dist()) + def tests_failed(m, move_broken): ''' Causes conda to exit if any of the given package's tests failed. diff --git a/conda_build/completers.py b/conda_build/completers.py new file mode 100644 index 0000000000..af333809ba --- /dev/null +++ b/conda_build/completers.py @@ -0,0 +1,54 @@ +import os +from os.path import isdir, isfile, join +from conda.cli.common import Completer + + +all_versions = { + 'python': [26, 27, 33, 34, 35], + 'numpy': [16, 17, 18, 19, 110], + 'perl': None, + 'R': None, + 'lua': ["2.0", "5.1", "5.2", "5.3"] +} + +conda_version = { + 'python': 'CONDA_PY', + 'numpy': 'CONDA_NPY', + 'perl': 'CONDA_PERL', + 'R': 'CONDA_R', + 'lua': 'CONDA_LUA', +} + + +class RecipeCompleter(Completer): + def _get_items(self): + completions = [] + for path in os.listdir('.'): + if isdir(path) and isfile(join(path, 'meta.yaml')): + completions.append(path) + if isfile('meta.yaml'): + completions.append('.') + return completions + +# These don't represent all supported versions. It's just for tab completion. + + +class PythonVersionCompleter(Completer): + def _get_items(self): + return ['all'] + [str(i / 10) for i in all_versions['python']] + + +class NumPyVersionCompleter(Completer): + def _get_items(self): + versions = [str(i) for i in all_versions['numpy']] + return ['all'] + ['%s.%s' % (ver[0], ver[1:]) for ver in versions] + + +class RVersionsCompleter(Completer): + def _get_items(self): + return ['3.1.2', '3.1.3', '3.2.0', '3.2.1', '3.2.2'] + + +class LuaVersionsCompleter(Completer): + def _get_items(self): + return ['all'] + [i for i in all_versions['lua']] diff --git a/conda_build/config.py b/conda_build/config.py index 3014b5a852..68c0e21168 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -13,12 +13,14 @@ # conda_build.config.config.build_prefix, as that won't reflect any mutated # changes. + class Config(object): __file__ = __path__ = __file__ __package__ = __package__ __doc__ = __doc__ CONDA_PERL = os.getenv('CONDA_PERL', '5.18.2') + CONDA_LUA = os.getenv('CONDA_LUA', '5.2') CONDA_PY = int(os.getenv('CONDA_PY', cc.default_python.replace('.', '')).replace('.', '')) CONDA_NPY = os.getenv("CONDA_NPY") @@ -56,7 +58,7 @@ def get_conda_py(self): croot = abspath(expanduser('~/conda-bld')) short_build_prefix = join(cc.envs_dirs[0], '_build') - long_build_prefix = max(short_build_prefix, (short_build_prefix + 8 * '_placehold')[:80]) + long_build_prefix = max(short_build_prefix, (short_build_prefix + 25 * '_placehold')[:255]) # XXX: Make this None to be more rigorous about requiring the build_prefix # to be known before it is used. use_long_build_prefix = False @@ -64,7 +66,13 @@ def get_conda_py(self): def _get_python(self, prefix): if sys.platform == 'win32': - res = join(prefix, 'python.exe') + import conda.install + packages = conda.install.linked(prefix) + packages_names = (pkg.split('-')[0] for pkg in packages) + if 'debug' in packages_names: + res = join(prefix, 'python_d.exe') + else: + res = join(prefix, 'python.exe') else: res = join(prefix, 'bin/python') return res @@ -76,6 +84,14 @@ def _get_perl(self, prefix): res = join(prefix, 'bin/perl') return res + def _get_lua(self, prefix): + binary_name = "luajit" if "2" == self.CONDA_LUA[0] else "lua" + if sys.platform == 'win32': + res = join(prefix, '{}.exe'.format(binary_name)) + else: + res = join(prefix, 'bin/{}'.format(binary_name)) + return res + @property def build_prefix(self): if self.use_long_build_prefix is None: @@ -100,6 +116,14 @@ def build_perl(self): def test_perl(self): return self._get_perl(self.test_prefix) + @property + def build_lua(self): + return self._get_lua(self.build_prefix) + + @property + def test_lua(self): + return self._get_lua(self.test_prefix) + @property def info_dir(self): return join(self.build_prefix, 'info') @@ -114,11 +138,17 @@ def broken_dir(self): @property def bldpkgs_dir(self): + """ Dir where the package is saved. """ if self.noarch: return join(self.croot, "noarch") else: return join(self.croot, cc.subdir) + @property + def bldpkgs_dirs(self): + """ Dirs where previous build packages might be. """ + return join(self.croot, cc.subdir), join(self.croot, "noarch") + config = Config() croot = config.croot diff --git a/conda_build/convert.py b/conda_build/convert.py index bb720bd23c..4858a3e9bd 100644 --- a/conda_build/convert.py +++ b/conda_build/convert.py @@ -34,6 +34,7 @@ r'(lib/python\d\.\d|Lib)' r'/(site-packages|lib-dynload)/(\S+?)(\.cpython-\d\dm)?\.(so|pyd)') + def has_cext(t, show=False): matched = False for m in t.getmembers(): @@ -47,6 +48,7 @@ def has_cext(t, show=False): return True return matched + def has_nonpy_entry_points(t, unix_to_win=True, show=False, quiet=False): """ If unix_to_win=True, assumes a Unix type package (i.e., entry points @@ -158,7 +160,7 @@ def tar_update(source, dest, file_map, verbose=True, quiet=False): path_mapping_bat_proxy = [ (re.compile(r'bin/(.*)(\.py)'), r'Scripts/\1.bat'), (re.compile(r'bin/(.*)'), r'Scripts/\1.bat'), - ] +] path_mapping_unix_windows = [ (r'lib/python{pyver}/', r'Lib/'), @@ -167,15 +169,16 @@ def tar_update(source, dest, file_map, verbose=True, quiet=False): # which seems unlikely (r'bin/(.*)(\.py)', r'Scripts/\1-script.py'), (r'bin/(.*)', r'Scripts/\1-script.py'), - ] +] path_mapping_windows_unix = [ (r'Lib/', r'lib/python{pyver}/'), - (r'Scripts/', r'bin/'), # Not supported right now anyway - ] + (r'Scripts/', r'bin/'), # Not supported right now anyway +] pyver_re = re.compile(r'python\s+(\d.\d)') + def get_pure_py_file_map(t, platform): info = json.loads(t.extractfile('info/index.json').read().decode('utf-8')) source_plat = info['platform'] diff --git a/conda_build/convert_gohlke.py b/conda_build/convert_gohlke.py index d5321765cf..9cfefceca6 100644 --- a/conda_build/convert_gohlke.py +++ b/conda_build/convert_gohlke.py @@ -33,7 +33,7 @@ def info_from_fn(fn): m = fn_pat.match(fn) if m is None: - return + return py_ver = m.group(4) return { "name": m.group(1).lower(), @@ -85,8 +85,8 @@ def convert(path, repo_dir='.', add_depends=None, verbose=False): fn1 = basename(path) info = info_from_fn(fn1) if info is None: - print("WARNING: Invalid .exe filename '%s', skipping" % fn1) - return + print("WARNING: Invalid .exe filename '%s', skipping" % fn1) + return fn2 = '%(name)s-%(version)s-%(build)s.tar.bz2' % info subdir = subdir_map[info['arch']] if verbose: diff --git a/conda_build/cpan.py b/conda_build/cpan.py index b1d0db7c2b..6b031bedd4 100644 --- a/conda_build/cpan.py +++ b/conda_build/cpan.py @@ -27,7 +27,7 @@ name: {packagename} version: "{version}" -source: +{source_comment}source: {useurl}fn: {filename} {useurl}url: {cpanurl} {usemd5}md5: {md5} @@ -47,7 +47,7 @@ run: - perl{run_depends} -test: +{import_comment}test: # Perl 'use' tests {import_comment}imports:{import_tests} @@ -192,6 +192,7 @@ def main(args, parser): 'test_commands': '', 'usemd5': '', 'useurl': '', + 'source_comment': '', 'summary': "''", 'import_tests': ''}) @@ -217,6 +218,7 @@ def main(args, parser): else: d['useurl'] = '#' d['usemd5'] = '#' + d['source_comment'] = '#' d['cpanurl'] = '' d['filename'] = '' d['md5'] = '' @@ -241,6 +243,7 @@ def main(args, parser): LooseVersion(args.version))): d['useurl'] = '#' d['usemd5'] = '#' + d['source_comment'] = '#' empty_recipe = True # Add dependencies to d if not in core, or newer than what's in core else: @@ -449,6 +452,7 @@ def deps_for_package(package, release_data, perl_version, args, output_dir, return build_deps, run_deps, packages_to_append + @memoized def dist_for_module(cpan_url, module, perl_version): ''' diff --git a/conda_build/cran.py b/conda_build/cran.py index b57bdae829..0abaa51e01 100644 --- a/conda_build/cran.py +++ b/conda_build/cran.py @@ -20,6 +20,7 @@ from os.path import join, exists, isfile, basename, isdir from itertools import chain import subprocess +from difflib import get_close_matches from conda.install import rm_rf from conda import compat @@ -27,6 +28,9 @@ from conda_build import source, metadata CRAN_META = """\ +{{% set posix = 'm2-' if win else '' %}} +{{% set native = 'm2w64-' if win else '' %}} + package: name: {packagename} # Note that conda versions cannot contain -, so any -'s in the version have @@ -77,6 +81,7 @@ {home_comment}home:{homeurl} license: {license} {summary_comment}summary:{summary} + license_family: {license_family} # The original CRAN metadata for this package was: @@ -188,6 +193,7 @@ r'?(\s*\[(?P[\s!\w\-]+)\])?\s*$' ) + def dict_from_cran_lines(lines): d = {} for line in lines: @@ -203,6 +209,7 @@ def dict_from_cran_lines(lines): d['orig_lines'] = lines return d + def remove_package_line_continuations(chunk): """ >>> chunk = [ @@ -223,7 +230,7 @@ def remove_package_line_continuations(chunk): 'Imports: MASS, R.methodsS3 (>= 1.5.2), R.oo (>= 1.15.8), R.utils (>= 1.27.1), matrixStats (>= 0.8.12), R.filesets (>= 2.3.0), sampleSelection, scatterplot3d, strucchange, systemfit, rgl,' 'License: GPL (>= 2)', 'NeedsCompilation: no'] - """ + """ # NOQA continuation = (' ', '\t') continued_ix = None continued_line = None @@ -241,7 +248,7 @@ def remove_package_line_continuations(chunk): chunk[i] = None else: accumulating_continuations = True - continued_ix = i-1 + continued_ix = i - 1 continued_line = chunk[continued_ix] + line had_continuation = True chunk[i] = None @@ -255,12 +262,13 @@ def remove_package_line_continuations(chunk): if had_continuation: # Remove the None(s). - chunk = [ c for c in chunk if c ] + chunk = [c for c in chunk if c] chunk.append('') return chunk + def yaml_quote_string(string): """ Quote a string for use in YAML. @@ -273,12 +281,14 @@ def yaml_quote_string(string): """ return yaml.dump(string, Dumper=SafeDumper).replace('\n...\n', '').replace('\n', '\n ') + def clear_trailing_whitespace(string): lines = [] for line in string.splitlines(): lines.append(line.rstrip()) return '\n'.join(lines) + def get_package_metadata(cran_url, package, session): url = cran_url + 'web/packages/' + package + '/DESCRIPTION' r = session.get(url) @@ -293,8 +303,10 @@ def get_package_metadata(cran_url, package, session): d['orig_description'] = DESCRIPTION return d + def get_latest_git_tag(): - p = subprocess.Popen(['git', 'tag'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=source.WORK_DIR) + p = subprocess.Popen(['git', 'describe', '--abbrev=0', '--tags'], + stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=source.WORK_DIR) stdout, stderr = p.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') @@ -307,6 +319,7 @@ def get_latest_git_tag(): print("Using tag %s" % tags[-1]) return tags[-1] + def get_session(output_dir, verbose=True, cache=[]): if cache: return cache[0] @@ -325,6 +338,7 @@ def get_session(output_dir, verbose=True, cache=[]): cache.append(session) return session + def get_cran_metadata(cran_url, output_dir, verbose=True): session = get_session(output_dir, verbose=verbose) if verbose: @@ -332,10 +346,12 @@ def get_cran_metadata(cran_url, output_dir, verbose=True): r = session.get(cran_url + "src/contrib/PACKAGES") r.raise_for_status() PACKAGES = r.text - package_list = [remove_package_line_continuations(i.splitlines()) for i in PACKAGES.split('\n\n')] + package_list = [remove_package_line_continuations(i.splitlines()) + for i in PACKAGES.split('\n\n')] return {d['Package'].lower(): d for d in map(dict_from_cran_lines, package_list)} + def main(args, parser): if len(args.packages) > 1 and args.version_compare: parser.error("--version-compare only works with one package at a time") @@ -351,8 +367,7 @@ def main(args, parser): if args.update_outdated: args.packages = get_outdated(output_dir, cran_metadata, args.packages) for pkg in args.packages: - rm_rf(join(args.output_dir, 'r-' + pkg)) - + rm_rf(join(args.output_dir[0], 'r-' + pkg)) while args.packages: package = args.packages.pop() @@ -360,17 +375,18 @@ def main(args, parser): is_github_url = 'github.com' in package url = package - if is_github_url: rm_rf(source.WORK_DIR) source.git_source({'git_url': package}, '.') git_tag = args.git_tag[0] if args.git_tag else get_latest_git_tag() - p = subprocess.Popen(['git', 'checkout', git_tag], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=source.WORK_DIR) + p = subprocess.Popen(['git', 'checkout', git_tag], stdout=subprocess.PIPE, + stderr=subprocess.PIPE, cwd=source.WORK_DIR) stdout, stderr = p.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if p.returncode: - sys.exit("Error: 'git checkout %s' failed (%s).\nInvalid tag?" % (git_tag, stderr.strip())) + sys.exit("Error: 'git checkout %s' failed (%s).\nInvalid tag?" % + (git_tag, stderr.strip())) if stdout: print(stdout, file=sys.stdout) if stderr: @@ -385,12 +401,15 @@ def main(args, parser): elif isfile(sub_description_name): DESCRIPTION = sub_description_name else: - sys.exit("%s does not appear to be a valid R package (no DESCRIPTION file)" % package) + sys.exit("%s does not appear to be a valid R package " + "(no DESCRIPTION file in %s, %s)" + % (package, sub_description_pkg, sub_description_name)) with open(DESCRIPTION) as f: description_text = clear_trailing_whitespace(f.read()) - d = dict_from_cran_lines(remove_package_line_continuations(description_text.splitlines())) + d = dict_from_cran_lines(remove_package_line_continuations( + description_text.splitlines())) d['orig_description'] = description_text package = d['Package'].lower() cran_metadata[package] = d @@ -471,6 +490,13 @@ def main(args, parser): # XXX: We should maybe normalize these d['license'] = cran_package.get("License", "None") + + # Tend towards the more clear GPL3 and away from the ambiguity of GPL2. + if 'GPL (>= 2)' in d['license'] or d['license'] == 'GPL': + d['license_family'] = 'GPL3' + else: + d['license_family'] = get_close_matches(d['license'], + metadata.allowed_license_families, 1, 0.0)[0] if 'License_is_FOSS' in cran_package: d['license'] += ' (FOSS)' if cran_package.get('License_restricts_use', None) == 'yes': @@ -529,49 +555,41 @@ def main(args, parser): continue if name == 'R': # Put R first - if d['cran_packagename'] in R_RECOMMENDED_PACKAGE_NAMES and dep_type == 'build': - # On Linux and OS X, r is a metapackage depending on - # r-base and r-recommended. Recommended packages cannot - # build depend on r as they would then build depend on - # themselves and the built package would end up being - # empty (because conda would find no new files) - r_name = 'r-base' - else: - r_name = 'r' + # Regarless of build or run, and whether this is a recommended package or not, + # it can only depend on 'r-base' since anything else can and will cause cycles + # in the dependency graph. The cran metadata lists all dependencies anyway, even + # those packages that are in the recommended group. + r_name = 'r-base' # We don't include any R version restrictions because we # always build R packages against an exact R version deps.insert(0, '{indent}{r_name}'.format(indent=INDENT, r_name=r_name)) else: conda_name = 'r-' + name.lower() - # The r package on Windows includes the recommended packages - if name in R_RECOMMENDED_PACKAGE_NAMES: - end = ' # [not win]' - else: - end = '' if dep_dict[name]: - deps.append('{indent}{name} {version}{end}'.format(name=conda_name, - version=dep_dict[name], end=end, indent=INDENT)) + deps.append('{indent}{name} {version}'.format(name=conda_name, + version=dep_dict[name], indent=INDENT)) else: - deps.append('{indent}{name}{end}'.format(name=conda_name, - indent=INDENT, end=end)) + deps.append('{indent}{name}'.format(name=conda_name, + indent=INDENT)) if args.recursive: if not exists(join(output_dir, conda_name)): args.packages.append(name) if cran_package.get("NeedsCompilation", 'no') == 'yes': if dep_type == 'build': - deps.append('{indent}gcc # [not win]'.format(indent=INDENT)) - else: - deps.append('{indent}libgcc # [not win]'.format(indent=INDENT)) + deps.append('{indent}posix # [win]'.format(indent=INDENT)) + deps.append('{indent}{{{{native}}}}toolchain # [win]'.format(indent=INDENT)) + deps.append('{indent}gcc # [not win]'.format(indent=INDENT)) d['%s_depends' % dep_type] = ''.join(deps) for package in package_dicts: d = package_dicts[package] name = d['packagename'] - #Normalize the metadata values - d = {k:unicodedata.normalize("NFKD", compat.text_type(v)).encode('ascii', 'ignore') for k, v in d.items()} + # Normalize the metadata values + d = {k: unicodedata.normalize("NFKD", compat.text_type(v)).encode('ascii', 'ignore') + .decode() for k, v in compat.iteritems(d)} makedirs(join(output_dir, name)) print("Writing recipe for %s" % package.lower()) @@ -584,6 +602,7 @@ def main(args, parser): print("Done") + def version_compare(recipe_dir, newest_conda_version): m = metadata.MetaData(recipe_dir) local_version = m.version() @@ -595,6 +614,7 @@ def version_compare(recipe_dir, newest_conda_version): return local_version == newest_conda_version + def get_outdated(output_dir, cran_metadata, packages=()): to_update = [] recipes = listdir(output_dir) diff --git a/conda_build/create_test.py b/conda_build/create_test.py index 437324a7cf..b770fd3ba6 100644 --- a/conda_build/create_test.py +++ b/conda_build/create_test.py @@ -9,6 +9,7 @@ from os.path import dirname, join, isdir, exists + def create_files(dir_path, m): """ Create the test files for pkg in the directory given. The resulting diff --git a/conda_build/elf.py b/conda_build/elf.py index aa722544e3..a23d0da2d3 100644 --- a/conda_build/elf.py +++ b/conda_build/elf.py @@ -8,7 +8,7 @@ NO_EXT = ( '.py', '.pyc', '.pyo', '.h', '.a', '.c', '.txt', '.html', '.xml', '.png', '.jpg', '.gif', - '.o' # ELF but not what we are looking for + '.o' # ELF but not what we are looking for ) MAGIC = b'\x7fELF' @@ -19,7 +19,7 @@ def is_elf(path): return False with open(path, 'rb') as fi: head = fi.read(4) - return bool(head == MAGIC) + return bool(head == MAGIC) if __name__ == '__main__': diff --git a/conda_build/environ.py b/conda_build/environ.py index 64897975bd..55bbd3e41f 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -1,26 +1,36 @@ from __future__ import absolute_import, division, print_function +import logging +import multiprocessing import os import sys -from os.path import join, normpath, isabs -import subprocess -import multiprocessing import warnings +from collections import defaultdict +from os.path import join, normpath +from subprocess import STDOUT, check_output, CalledProcessError, Popen, PIPE import conda.config as cc +from conda.compat import text_type -from conda_build.config import config - +from conda_build import external from conda_build import source +from conda_build.config import config +from conda_build.features import feature_list from conda_build.scripts import prepend_bin_path def get_perl_ver(): return str(config.CONDA_PERL) + +def get_lua_ver(): + return str(config.CONDA_LUA) + + def get_py_ver(): return '.'.join(str(config.CONDA_PY)) + def get_npy_ver(): if config.CONDA_NPY: # Convert int -> string, e.g. @@ -30,182 +40,328 @@ def get_npy_ver(): return conda_npy[0] + '.' + conda_npy[1:] return '' + def get_stdlib_dir(): return join(config.build_prefix, 'Lib' if sys.platform == 'win32' else - 'lib/python%s' % get_py_ver()) + 'lib/python%s' % get_py_ver()) + + +def get_lua_include_dir(): + return join(config.build_prefix, "include") + def get_sp_dir(): return join(get_stdlib_dir(), 'site-packages') -def get_git_build_info(src_dir, git_url, expected_rev): - expected_rev = expected_rev or 'master' + +def verify_git_repo(git_dir, git_url, expected_rev='HEAD'): env = os.environ.copy() - d = {} - git_dir = join(src_dir, '.git') - if not os.path.exists(git_dir): - return d + + if not expected_rev: + return False env['GIT_DIR'] = git_dir try: # Verify current commit matches expected commit - current_commit = subprocess.check_output(["git", "log", "-n1", "--format=%H"], env=env) + current_commit = check_output(["git", "log", "-n1", "--format=%H"], + env=env, stderr=STDOUT) current_commit = current_commit.decode('utf-8') - expected_tag_commit = subprocess.check_output(["git", "log", "-n1", "--format=%H", expected_rev], env=env) + expected_tag_commit = check_output(["git", "log", "-n1", "--format=%H", + expected_rev], + env=env, stderr=STDOUT) expected_tag_commit = expected_tag_commit.decode('utf-8') - # Verify correct remote url. - # (Need to find the git cache directory, and check the remote from there.) - cache_details = subprocess.check_output(["git", "remote", "-v"], env=env) + if current_commit != expected_tag_commit: + return False + + # Verify correct remote url. Need to find the git cache directory, + # and check the remote from there. + cache_details = check_output(["git", "remote", "-v"], env=env, + stderr=STDOUT) cache_details = cache_details.decode('utf-8') cache_dir = cache_details.split('\n')[0].split()[1] - assert "conda-bld/git_cache" in cache_dir - env['GIT_DIR'] = cache_dir - remote_details = subprocess.check_output(["git", "remote", "-v"], env=env) + if not isinstance(cache_dir, str): + # On Windows, subprocess env can't handle unicode. + cache_dir = cache_dir.encode(sys.getfilesystemencoding() or 'utf-8') + + remote_details = check_output(["git", "--git-dir", cache_dir, "remote", "-v"], env=env, + stderr=STDOUT) remote_details = remote_details.decode('utf-8') remote_url = remote_details.split('\n')[0].split()[1] - if '://' not in remote_url: + + # on windows, remote URL comes back to us as cygwin or msys format. Python doesn't + # know how to normalize it. Need to convert it to a windows path. + if sys.platform == 'win32' and remote_url.startswith('/'): + remote_url = check_output(["cygpath", '-w', remote_url]).rstrip().rstrip("\\") + + if os.path.exists(remote_url): # Local filepaths are allowed, but make sure we normalize them remote_url = normpath(remote_url) # If the current source directory in conda-bld/work doesn't match the user's # metadata git_url or git_rev, then we aren't looking at the right source. - if remote_url != git_url or current_commit != expected_tag_commit: - return d - except subprocess.CalledProcessError: - return d - - env['GIT_DIR'] = git_dir + if remote_url.lower() != git_url.lower(): + logging.debug("\nremote does not match git_url\n") + logging.debug("Remote: " + remote_url.lower() + "\n") + logging.debug("git_url: " + git_url.lower() + "\n") + return False + except CalledProcessError as error: + logging.warn("Error obtaining git information. Error was: ") + logging.warn(error) + return False + return True + + +def get_git_info(repo): + """ + Given a repo to a git repo, return a dictionary of: + GIT_DESCRIBE_TAG + GIT_DESCRIBE_NUMBER + GIT_DESCRIBE_HASH + GIT_FULL_HASH + GIT_BUILD_STR + from the output of git describe. + :return: + """ + d = {} # grab information from describe - key_name = lambda a: "GIT_DESCRIBE_{}".format(a) - keys = [key_name("TAG"), key_name("NUMBER"), key_name("HASH")] - env = {str(key): str(value) for key, value in env.items()} - process = subprocess.Popen(["git", "describe", "--tags", "--long", "HEAD"], - stdout=subprocess.PIPE, stderr=subprocess.PIPE, - env=env) + env = os.environ.copy() + env['GIT_DIR'] = repo + keys = ["GIT_DESCRIBE_TAG", "GIT_DESCRIBE_NUMBER", "GIT_DESCRIBE_HASH"] + + process = Popen(["git", "describe", "--tags", "--long", "HEAD"], + stdout=PIPE, stderr=PIPE, + env=env) output = process.communicate()[0].strip() output = output.decode('utf-8') + parts = output.rsplit('-', 2) - parts_length = len(parts) - if parts_length == 3: + if len(parts) == 3: d.update(dict(zip(keys, parts))) + # get the _full_ hash of the current HEAD - process = subprocess.Popen(["git", "rev-parse", "HEAD"], - stdout=subprocess.PIPE, stderr=subprocess.PIPE, - env=env) + process = Popen(["git", "rev-parse", "HEAD"], + stdout=PIPE, stderr=PIPE, env=env) output = process.communicate()[0].strip() output = output.decode('utf-8') + d['GIT_FULL_HASH'] = output # set up the build string - if key_name('NUMBER') in d and key_name('HASH') in d: - d['GIT_BUILD_STR'] = '{}_{}'.format(d[key_name('NUMBER')], - d[key_name('HASH')]) + if "GIT_DESCRIBE_NUMBER" in d and "GIT_DESCRIBE_HASH" in d: + d['GIT_BUILD_STR'] = '{}_{}'.format(d["GIT_DESCRIBE_NUMBER"], + d["GIT_DESCRIBE_HASH"]) return d -def get_dict(m=None, prefix=None): + +def get_dict(m=None, prefix=None, dirty=False): if not prefix: prefix = config.build_prefix - python = config.build_python - d = {'CONDA_BUILD': '1', 'PYTHONNOUSERSITE': '1'} - d['CONDA_DEFAULT_ENV'] = config.build_prefix - d['ARCH'] = str(cc.bits) - d['PREFIX'] = prefix - d['PYTHON'] = python - d['PY3K'] = str(config.PY3K) - d['STDLIB_DIR'] = get_stdlib_dir() - d['SP_DIR'] = get_sp_dir() - d['SYS_PREFIX'] = sys.prefix - d['SYS_PYTHON'] = sys.executable - d['PERL_VER'] = get_perl_ver() - d['PY_VER'] = get_py_ver() - if get_npy_ver(): - d['NPY_VER'] = get_npy_ver() - d['SRC_DIR'] = source.get_dir() - if "LANG" in os.environ: - d['LANG'] = os.environ['LANG'] - if "HTTPS_PROXY" in os.environ: - d['HTTPS_PROXY'] = os.environ['HTTPS_PROXY'] - if "HTTP_PROXY" in os.environ: - d['HTTP_PROXY'] = os.environ['HTTP_PROXY'] + # conda-build specific vars + d = conda_build_vars(prefix, dirty) + + # languages + d.update(python_vars()) + d.update(perl_vars()) + d.update(lua_vars()) if m: - for var_name in m.get_value('build/script_env', []): - value = os.getenv(var_name) - if value is None: - warnings.warn( - "The environment variable '%s' is undefined." % var_name, - UserWarning - ) - else: - d[var_name] = value + d.update(meta_vars(m)) + + # system + d.update(system_vars(d, prefix)) + + # features + d.update({feat.upper(): str(int(value)) for feat, value in + feature_list}) + + return d + + +def conda_build_vars(prefix, dirty): + return { + 'CONDA_BUILD': '1', + 'PYTHONNOUSERSITE': '1', + 'CONDA_DEFAULT_ENV': config.build_prefix, + 'ARCH': str(cc.bits), + 'PREFIX': prefix, + 'SYS_PREFIX': sys.prefix, + 'SYS_PYTHON': sys.executable, + 'SUBDIR': cc.subdir, + 'SRC_DIR': source.get_dir(), + 'HTTPS_PROXY': os.getenv('HTTPS_PROXY', ''), + 'HTTP_PROXY': os.getenv('HTTP_PROXY', ''), + 'DIRTY': '1' if dirty else '', + } + + +def python_vars(): + vars = { + 'PYTHON': config.build_python, + 'PY3K': str(config.PY3K), + 'STDLIB_DIR': get_stdlib_dir(), + 'SP_DIR': get_sp_dir(), + 'PY_VER': get_py_ver(), + 'CONDA_PY': str(config.CONDA_PY), + } + # Only define these variables if '--numpy=X.Y' was provided, + # otherwise any attempt to use them should be an error. + if get_npy_ver(): + vars['NPY_VER'] = get_npy_ver() + vars['CONDA_NPY'] = str(config.CONDA_NPY) + return vars + + +def perl_vars(): + return { + 'PERL_VER': get_perl_ver(), + } + +def lua_vars(): + lua = config.build_lua + if lua: + return { + 'LUA': lua, + 'LUA_INCLUDE_DIR': get_lua_include_dir(), + 'LUA_VER': get_lua_ver(), + } + else: + return {} + + +def meta_vars(meta): + d = {} + for var_name in meta.get_value('build/script_env', []): + value = os.getenv(var_name) + if value is None: + warnings.warn( + "The environment variable '%s' is undefined." % var_name, + UserWarning + ) + else: + d[var_name] = value + + git_dir = join(source.get_dir(), '.git') + if not isinstance(git_dir, str): + # On Windows, subprocess env can't handle unicode. + git_dir = git_dir.encode(sys.getfilesystemencoding() or 'utf-8') + + if external.find_executable('git') and os.path.exists(git_dir): + git_url = meta.get_value('source/git_url') + + if os.path.exists(git_url): + # If git_url is a relative path instead of a url, convert it to an abspath + git_url = normpath(join(meta.path, git_url)) + + _x = False + + if git_url: + _x = verify_git_repo(git_dir, + git_url, + meta.get_value('source/git_rev', 'HEAD')) + + if _x or meta.get_value('source/path'): + d.update(get_git_info(git_dir)) + + d['PKG_NAME'] = meta.name() + d['PKG_VERSION'] = meta.version() + d['PKG_BUILDNUM'] = str(meta.build_number()) + d['PKG_BUILD_STRING'] = str(meta.build_id()) + d['RECIPE_DIR'] = meta.path + return d + + +def get_cpu_count(): if sys.platform == "darwin": # multiprocessing.cpu_count() is not reliable on OSX # See issue #645 on github.com/conda/conda-build - out, err = subprocess.Popen('sysctl -n hw.logicalcpu', shell=True, stdout=subprocess.PIPE).communicate() - d['CPU_COUNT'] = out.decode('utf-8').strip() + out, err = Popen('sysctl -n hw.logicalcpu', shell=True, + stdout=PIPE).communicate() + return out.decode('utf-8').strip() else: try: - d['CPU_COUNT'] = str(multiprocessing.cpu_count()) + return str(multiprocessing.cpu_count()) except NotImplementedError: - d['CPU_COUNT'] = "1" - - if m and m.get_value('source/git_url'): - git_url = m.get_value('source/git_url') - if '://' not in git_url: - # If git_url is a relative path instead of a url, convert it to an abspath - if not isabs(git_url): - git_url = join(m.path, git_url) - git_url = normpath(join(m.path, git_url)) - d.update(**get_git_build_info(d['SRC_DIR'], - git_url, - m.get_value('source/git_rev'))) - - d['PATH'] = dict(os.environ)['PATH'] + return "1" + + +def windows_vars(prefix): + library_prefix = join(prefix, 'Library') + drive, tail = prefix.split(':') + return { + 'SCRIPTS': join(prefix, 'Scripts'), + 'LIBRARY_PREFIX': library_prefix, + 'LIBRARY_BIN': join(library_prefix, 'bin'), + 'LIBRARY_INC': join(library_prefix, 'include'), + 'LIBRARY_LIB': join(library_prefix, 'lib'), + 'R': join(prefix, 'Scripts', 'R.exe'), + 'CYGWIN_PREFIX': ''.join(('/cygdrive/', drive.lower(), tail.replace('\\', '/'))) + } + + +def unix_vars(prefix): + return { + 'HOME': os.getenv('HOME', 'UNKNOWN'), + 'PKG_CONFIG_PATH': join(prefix, 'lib', 'pkgconfig'), + 'CMAKE_GENERATOR': 'Unix Makefiles', + 'R': join(prefix, 'bin', 'R'), + } + + +def osx_vars(compiler_vars): + OSX_ARCH = 'i386' if cc.bits == 32 else 'x86_64' + compiler_vars['CFLAGS'] += ' -arch {0}'.format(OSX_ARCH) + compiler_vars['CXXFLAGS'] += ' -arch {0}'.format(OSX_ARCH) + compiler_vars['LDFLAGS'] += ' -arch {0}'.format(OSX_ARCH) + # 10.7 install_name_tool -delete_rpath causes broken dylibs, I will revisit this ASAP. + # rpath = ' -Wl,-rpath,%(PREFIX)s/lib' % d # SIP workaround, DYLD_* no longer works. + # d['LDFLAGS'] = ldflags + rpath + ' -arch %(OSX_ARCH)s' % d + return { + 'OSX_ARCH': OSX_ARCH, + 'MACOSX_DEPLOYMENT_TARGET': '10.6', + } + + +def linux_vars(compiler_vars, prefix): + compiler_vars['LD_RUN_PATH'] = prefix + '/lib' + if cc.bits == 32: + compiler_vars['CFLAGS'] += ' -m32' + compiler_vars['CXXFLAGS'] += ' -m32' + return {} + + +def system_vars(env_dict, prefix): + d = dict() + compiler_vars = defaultdict(text_type) + + if 'MAKEFLAGS' in os.environ: + d['MAKEFLAGS'] = os.environ['MAKEFLAGS'] + + d['CPU_COUNT'] = get_cpu_count() + if "LANG" in os.environ: + d['LANG'] = os.environ['LANG'] + d['PATH'] = os.environ['PATH'] d = prepend_bin_path(d, prefix) - if sys.platform == 'win32': # -------- Windows - d['SCRIPTS'] = join(prefix, 'Scripts') - d['LIBRARY_PREFIX'] = join(prefix, 'Library') - d['LIBRARY_BIN'] = join(d['LIBRARY_PREFIX'], 'bin') - d['LIBRARY_INC'] = join(d['LIBRARY_PREFIX'], 'include') - d['LIBRARY_LIB'] = join(d['LIBRARY_PREFIX'], 'lib') - - drive, tail = prefix.split(':') - d['CYGWIN_PREFIX'] = ''.join(['/cygdrive/', drive.lower(), tail.replace('\\', '/')]) - - d['R'] = join(prefix, 'Scripts', 'R.exe') - else: # -------- Unix - d['HOME'] = os.getenv('HOME', 'UNKNOWN') - d['PKG_CONFIG_PATH'] = join(prefix, 'lib', 'pkgconfig') - d['R'] = join(prefix, 'bin', 'R') - - cflags = d.get('CFLAGS', '') # in case CFLAGS was added in the `script_env` section above - cxxflags = d.get('CXXFLAGS', '') - ldflags = d.get('LDFLAGS', '') - - if sys.platform == 'darwin': # -------- OSX - d['OSX_ARCH'] = 'i386' if cc.bits == 32 else 'x86_64' - d['CFLAGS'] = cflags + ' -arch %(OSX_ARCH)s' % d - d['CXXFLAGS'] = cxxflags + ' -arch %(OSX_ARCH)s' % d - d['LDFLAGS'] = ldflags + ' -arch %(OSX_ARCH)s' % d - d['MACOSX_DEPLOYMENT_TARGET'] = '10.6' - - elif sys.platform.startswith('linux'): # -------- Linux - d['LD_RUN_PATH'] = prefix + '/lib' - if cc.bits == 32: - d['CFLAGS'] = cflags + ' -m32' - d['CXXFLAGS'] = cxxflags + ' -m32' - - if m: - d['PKG_NAME'] = m.name() - d['PKG_VERSION'] = m.version() - d['PKG_BUILDNUM'] = str(m.build_number()) - d['PKG_BUILD_STRING'] = str(m.build_id()) - d['RECIPE_DIR'] = m.path + if sys.platform == 'win32': + d.update(windows_vars(prefix)) + else: + d.update(unix_vars(prefix)) + + if sys.platform == 'darwin': + d.update(osx_vars(compiler_vars)) + elif sys.platform.startswith('linux'): + d.update(linux_vars(compiler_vars, prefix)) + + # make sure compiler_vars get appended to anything already set, including build/script_env + for key in compiler_vars: + if key in env_dict: + compiler_vars[key] += env_dict[key] + d.update(compiler_vars) return d diff --git a/conda_build/external.py b/conda_build/external.py index 50d1d11f8f..76099cd8fb 100644 --- a/conda_build/external.py +++ b/conda_build/external.py @@ -7,25 +7,29 @@ import conda.config as cc from conda_build.config import config + def find_executable(executable): # dir_paths is referenced as a module-level variable # in other code global dir_paths if sys.platform == 'win32': dir_paths = [join(config.build_prefix, 'Scripts'), + join(config.build_prefix, 'Library\\mingw-w64\\bin'), + join(config.build_prefix, 'Library\\usr\\bin'), join(config.build_prefix, 'Library\\bin'), join(cc.root_dir, 'Scripts'), - join(cc.root_dir, 'Library\\bin'), - 'C:\\cygwin\\bin'] + join(cc.root_dir, 'Library\\mingw-w64\\bin'), + join(cc.root_dir, 'Library\\usr\\bin'), + join(cc.root_dir, 'Library\\bin'), ] else: dir_paths = [join(config.build_prefix, 'bin'), - join(cc.root_dir, 'bin'),] + join(cc.root_dir, 'bin'), ] dir_paths.extend(os.environ['PATH'].split(os.pathsep)) for dir_path in dir_paths: if sys.platform == 'win32': - for ext in '.exe', '.bat', '': + for ext in '.exe', '.bat', '': path = join(dir_path, executable + ext) if isfile(path): return path diff --git a/conda_build/features.py b/conda_build/features.py new file mode 100644 index 0000000000..7cdce34107 --- /dev/null +++ b/conda_build/features.py @@ -0,0 +1,23 @@ +from __future__ import absolute_import, division, print_function + +import os +import sys + +from conda.compat import iteritems + + +env_vars = [ + 'FEATURE_DEBUG', + 'FEATURE_NOMKL', + 'FEATURE_OPT', +] + +# list of features, where each element is a tuple(name, boolean), i.e. having +# FEATURE_DEBUG=1 and FEATURE_NOMKL=0 -> [('debug', True), ('nomkl', False)] +feature_list = [] +for key, value in iteritems(os.environ): + if key in env_vars: + if value not in ('0', '1'): + sys.exit("Error: did not expect environment variable '%s' " + "being set to '%s' (not '0' or '1')" % (key, value)) + feature_list.append((key[8:].lower(), bool(int(value)))) diff --git a/conda_build/header_test.py b/conda_build/header_test.py index 9711c2f0cf..174918fe2f 100644 --- a/conda_build/header_test.py +++ b/conda_build/header_test.py @@ -5,6 +5,7 @@ from distutils.spawn import find_executable import shlex + def call_args(string): args = shlex.split(string) arg0 = args[0] diff --git a/conda_build/index.py b/conda_build/index.py index 6f988c4a10..6e7a2c175d 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -30,6 +30,7 @@ def read_index_tar(tar_path): except tarfile.ReadError: raise RuntimeError("Could not extract metadata from %s. File probably corrupt." % tar_path) + def write_repodata(repodata, dir_path): """ Write updated repodata.json and repodata.json.bz2 """ data = json.dumps(repodata, indent=2, sort_keys=True) @@ -43,6 +44,7 @@ def write_repodata(repodata, dir_path): with open(join(dir_path, 'repodata.json.bz2'), 'wb') as fo: fo.write(bz2.compress(data.encode('utf-8'))) + def update_index(dir_path, verbose=False, force=False, check_md5=False, remove=True): """ Update all index files in dir_path with changed packages. diff --git a/conda_build/jinja_context.py b/conda_build/jinja_context.py index 90529486f3..2a3da2dfdb 100644 --- a/conda_build/jinja_context.py +++ b/conda_build/jinja_context.py @@ -9,30 +9,93 @@ import os from functools import partial +import jinja2 + from conda.compat import PY3 from .environ import get_dict as get_environ +from .metadata import select_lines, ns_cfg _setuptools_data = None + +class UndefinedNeverFail(jinja2.Undefined): + """ + A class for Undefined jinja variables. + This is even less strict than the default jinja2.Undefined class, + because it permits things like {{ MY_UNDEFINED_VAR[:2] }} and + {{ MY_UNDEFINED_VAR|int }}. This can mask lots of errors in jinja templates, so it + should only be used for a first-pass parse, when you plan on running a 'strict' + second pass later. + """ + all_undefined_names = [] + + def __init__(self, hint=None, obj=jinja2.runtime.missing, name=None, + exc=jinja2.exceptions.UndefinedError): + UndefinedNeverFail.all_undefined_names.append(name) + jinja2.Undefined.__init__(self, hint, obj, name, exc) + + __add__ = __radd__ = __mul__ = __rmul__ = __div__ = __rdiv__ = \ + __truediv__ = __rtruediv__ = __floordiv__ = __rfloordiv__ = \ + __mod__ = __rmod__ = __pos__ = __neg__ = __call__ = \ + __getitem__ = __lt__ = __le__ = __gt__ = __ge__ = \ + __complex__ = __pow__ = __rpow__ = \ + lambda self, *args, **kwargs: UndefinedNeverFail(hint=self._undefined_hint, + obj=self._undefined_obj, + name=self._undefined_name, + exc=self._undefined_exception) + + __str__ = __repr__ = \ + lambda *args, **kwargs: u'' + + __int__ = lambda _: 0 + __float__ = lambda _: 0.0 + + def __getattr__(self, k): + try: + return object.__getattr__(self, k) + except AttributeError: + return UndefinedNeverFail(hint=self._undefined_hint, + obj=self._undefined_obj, + name=self._undefined_name + '.' + k, + exc=self._undefined_exception) + + +class FilteredLoader(jinja2.BaseLoader): + """ + A pass-through for the given loader, except that the loaded source is + filtered according to any metadata selectors in the source text. + """ + + def __init__(self, unfiltered_loader): + self._unfiltered_loader = unfiltered_loader + self.list_templates = unfiltered_loader.list_templates + + def get_source(self, environment, template): + contents, filename, uptodate = self._unfiltered_loader.get_source(environment, + template) + return select_lines(contents, ns_cfg()), filename, uptodate + + def load_setuptools(setup_file='setup.py', from_recipe_dir=False, recipe_dir=None): global _setuptools_data if _setuptools_data is None: _setuptools_data = {} + def setup(**kw): _setuptools_data.update(kw) import setuptools import distutils.core - #Add current directory to path + # Add current directory to path import sys sys.path.append('.') if from_recipe_dir and recipe_dir: setup_file = os.path.abspath(os.path.join(recipe_dir, setup_file)) - #Patch setuptools, distutils + # Patch setuptools, distutils setuptools_setup = setuptools.setup distutils_setup = distutils.core.setup setuptools.setup = distutils.core.setup = setup @@ -49,12 +112,14 @@ def setup(**kw): del sys.path[-1] return _setuptools_data + def load_npm(): # json module expects bytes in Python 2 and str in Python 3. mode_dict = {'mode': 'r', 'encoding': 'utf-8'} if PY3 else {'mode': 'rb'} with open('package.json', **mode_dict) as pkg: return json.load(pkg) + def context_processor(initial_metadata, recipe_dir): """ Return a dictionary to use as context for jinja templates. diff --git a/conda_build/ldd.py b/conda_build/ldd.py index 7501983ed2..324f5a9ecb 100644 --- a/conda_build/ldd.py +++ b/conda_build/ldd.py @@ -3,11 +3,11 @@ import sys import re import subprocess -import json from os.path import join, basename from conda.utils import memoized from conda.misc import untracked +from conda.install import linked_data from conda_build import post from conda_build.macho import otool @@ -15,6 +15,7 @@ LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)') LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found') + def ldd(path): "thin wrapper around ldd" lines = subprocess.check_output(['ldd', path]).decode('utf-8').splitlines() @@ -38,6 +39,7 @@ def ldd(path): return res + @memoized def get_linkages(obj_files, prefix): res = {} @@ -48,25 +50,25 @@ def get_linkages(obj_files, prefix): res[f] = ldd(path) elif sys.platform.startswith('darwin'): links = otool(path) - res[f] = [(basename(l), l) for l in links] + res[f] = [(basename(l['name']), l['name']) for l in links] return res + @memoized def get_package_obj_files(dist, prefix): - with open(join(prefix, 'conda-meta', dist + - '.json')) as f: - data = json.load(f) + data = linked_data(prefix).get(dist) res = [] - files = data['files'] - for f in files: - path = join(prefix, f) - if post.is_obj(path): - res.append(f) + if data: + for f in data.get('files', []): + path = join(prefix, f) + if post.is_obj(path): + res.append(f) return res + @memoized def get_untracked_obj_files(prefix): res = [] diff --git a/conda_build/luarocks.py b/conda_build/luarocks.py new file mode 100644 index 0000000000..43509df056 --- /dev/null +++ b/conda_build/luarocks.py @@ -0,0 +1,348 @@ +""" +Tools for converting luarocks packages to conda recipes. +""" + +# TODO: +# - mingw32 support (really any windows support, completely untested) +# - replace manual "luajit -e require 'blah'" with built-in entry-point testing + +import os +import subprocess +import tempfile +from glob import glob +import json +from sys import platform as _platform + +INDENT = '\n - ' + +rockspec_parser = """ +local ok,cjson = pcall(require, "cjson") +if not ok then + print("ERROR: lua-cjson not installed. Use conda to install luarocks, " + "then run 'luarocks install lua-cjson'.") + os.exit() +end + +local rockspecFile = "%s" +local origPackage = package +local ok, _ = pcall(dofile, rockspecFile) +if not ok then + print("ERROR: could not load rockspecFile " .. tostring(rockspecFile)) + os.exit() +end + +-- Resolve name clash +if origPackage == package then + package = nil +end +local out = { + rockspec_format=rockspec_format, + package=package, + version=version, + description=description, + supported_platforms=supported_platforms, + dependencies=dependencies, + external_dependencies=external_dependencies, + source=source, + build=build, + modules=modules, +} +print(cjson.encode(out)) +""" + + +LUAROCKS_META = """\ +package: + name: {packagename} + version: "{version}" + +source: + {usefile}fn: {filename} + {usefile}url: {url} + {usegit}git_url: {url} + {usegittag}git_tag: {gittag} # can also be a branch, but that is highly discouraged + {usegitrev}git_rev: {gitrev} # prefer tags over commits, commits over branches + {usemd5}md5:{md5} +# patches: + # List any patch files here + # - fix.patch + +build: + {noarch_python_comment}noarch_python: True + # Useful to leave this on by default, will allow relocating + # packages that have hard-coded paths in them + detect_binary_files_with_prefix: true + # If this is a new build for the same version, increment the build + # number. If you do not include this key, it defaults to 0. + # number: 1 + +requirements: + build:{build_depends} + + run:{run_depends} + +{test_comment}test: + {entry_comment}commands: + # You can put test commands to be run here. Use this to test that the + # entry points work. +{test_commands} + + # You can also put a file called run_test.lua in the recipe that will be run + # at test time. + +about: + {home_comment}home: {homeurl} + license: {license} + {summary_comment}summary: {summary} + +# See +# http://docs.continuum.io/conda/build.html for +# more information about meta.yaml +""" + +LUAROCKS_BUILD_SH = """\ +#!/bin/bash + +# Make sure luarocks can see all local dependencies +$PREFIX/bin/luarocks-admin make_manifest --local-tree + +# Install +# Rocks aren't located in a standard location, although +# they tend to be top-level or in a rocks/ directory. +# NOTE: we're just picking the first rock we find. If there's +# more than one, specify it manually. +ROCK=$(find . -name "*.rockspec" | sort -n -r | head -n 1) +$PREFIX/bin/luarocks install "$ROCK" --local-tree + +# Add more build steps here, if they are necessary. + +# See +# http://docs.continuum.io/conda/build.html +# for a list of environment variables that are set during the build process. +""" + +LUAROCKS_POSTLINK_SH = """\ +# Let luarocks know that we've installed a new project +$PREFIX/bin/luarocks-admin make_manifest --local-tree +""" + +LUAROCKS_PREUNLINK_SH = """\ +# Tell luarocks we've removed the project +$PREFIX/bin/luarocks remove {rockname} +""" + + +def getval(spec, k): + if k not in spec: + raise Exception("Required key %s not in spec" % k) + else: + return spec[k] + + +def warn_against_branches(branch): + print("") + print("=========================================") + print("") + print("WARNING:") + print("Building a rock referenced to branch %s." % branch) + print("This is not a tag. This is dangerous, because rebuilding") + print("at a later date may produce a different package.") + print("Please replace with a tag, git commit, or tarball.") + print("") + print("=========================================") + + +def format_dep(dep): + name_without_ver = "".join([c for c in dep if c.isalpha()]) + if name_without_ver not in ["lua"]: + # Enforce conda naming convention. + # lower case, no white-space, and prepended "lua-" + # (all languages other than Python prepend their language to package names) + if dep[:4] != "lua-": + dep = "lua-" + dep + dep = dep.replace(" ", "").lower() + + # Ensure a space between the first special-character that specifies version logic + # Not "-", because that's used in e.g. lua-penlight + special_char_test = [c in "<>=~" for c in dep] + for i, v in enumerate(special_char_test): + if v: + split_dep = [c for c in dep] + split_dep.insert(i, " ") + dep = "".join(split_dep) + break + return dep + + +def ensure_base_deps(deps): + basenames = ["".join([c for c in dep if c.isalpha()]) for dep in deps] + extra_deps = [] + if "lua" not in basenames: + extra_deps.append("lua") + if "luarocks" not in basenames: + extra_deps.append("luarocks") + if len(extra_deps): + deps = extra_deps + deps + return deps + + +def main(args, parser): + + # Check that we have Lua installed (any version) + + # Check that we have luarocks installed + + # Check that we have lua-cjson installed + + # Get the platform + platform = "linux" if _platform == "linux2" else _platform + + # Make temporary directory + cwd = os.getcwd() + temp_dir = tempfile.mkdtemp() + package_dicts = {} + + # Step into it + os.chdir(temp_dir) + + while args.packages: + [output_dir] = args.output_dir + package = args.packages.pop() + + packagename = "lua-%s" % package.lower() if package[:4] != "lua-" else package.lower() + d = package_dicts.setdefault(package, + { + 'packagename': packagename, + 'version': "0.0", + 'filename': "", + 'url': "", + 'md5': "", + 'usemd5': "# ", + 'usefile': "# ", + 'usegit': "# ", + 'usegittag': "# ", + 'usegitrev': "# ", + 'gittag': "", + 'gitrev': "", + 'noarch_python_comment': "# ", + 'build_depends': "", + 'run_depends': "", + 'test_comment': "", + 'entry_comment': "", + 'test_commands': "", + 'home_comment': "# ", + 'homeurl': "", + 'license': "Unknown", + 'summary_comment': "# ", + 'summary': "", + }) + + # Download rockspec + o = subprocess.call(["luarocks", "download", package, "--rockspec"]) + if o != 0: + raise Exception("Could not download rockspec for {}".format(package)) + + # Find the downloaded rockspec + fs = glob(package + "*.rockspec") + if len(fs) != 1: + raise Exception("Failed to download rockspec") + d['rockspec_file'] = fs[0] + + # Parse the rockspec into a dictionary + p = subprocess.Popen(["lua", "-e", rockspec_parser % d['rockspec_file']], + stdout=subprocess.PIPE) + out, err = p.communicate() + if "ERROR" in out: + raise Exception(out.replace("ERROR: ", "")) + spec = json.loads(out) + + # Gather the basic details + d['rockname'] = getval(spec, "package") + d['version'] = getval(spec, "version") + d['version'] = "".join([c for c in d['version'] if c.isalnum()]) + source = getval(spec, "source") + + # Figure out how to download the package, and from where + d['url'] = getval(source, "url") + ext = os.path.splitext(d['url'])[-1] + if ext in [".zip", ".tar", ".tar.bz2", ".tar.xz", ".tar.gz"]: + d['usefile'] = "" + d['filename'] = os.path.split(d['url'])[-1] + if "md5" in source: + md5 = getval(source, "md5") + if len(md5): + d['md5'] = md5 + d['usemd5'] = "" + elif ext in [".git"] or d['url'][:4] == "git:": + d['usegit'] = "" + # Check if we're using a tag or a commit + if "tag" in source: + d['usegittag'] = "" + d['gittag'] = getval(source, "tag") + elif "branch" in source: + d['usegittag'] = "" + d['gittag'] = getval(source, "branch") + warn_against_branches(d['gittag']) + else: + d['usegittag'] = "" + d['gittag'] = "master" + warn_against_branches(d['gittag']) + + # Gather the description + if "description" in spec: + desc = getval(spec, "description") + if "homepage" in desc: + d['homeurl'] = desc['homepage'] + d['home_comment'] = "" + if "summary" in desc: + d['summary'] = desc['summary'] + d['summary_comment'] = "" + if "license" in desc: + d['license'] = desc['license'] + + # Gather the dependencies + if "dependencies" in spec: + deps = getval(spec, "dependencies") + if len(deps): + deps = ensure_base_deps([format_dep(dep) for dep in deps]) + d['build_depends'] = INDENT.join([''] + deps) + d['run_depends'] = d['build_depends'] + + # Build some entry-point tests. + if "build" in spec: + if platform == "darwin": + our_plat = "macosx" + elif platform == "linux": + our_plat = "unix" + + modules = None + if "modules" in spec['build']: + modules = spec['build']["modules"] + elif "platforms" in spec['build']: + if our_plat in spec['build']['platforms']: + if "modules" in spec['build']['platforms'][our_plat]: + modules = spec['build']['platforms'][our_plat]["modules"] + if modules: + d['test_commands'] = INDENT.join([''] + + ["""lua -e "require '%s'\"""" % r + for r in modules.keys()]) + + # If we didn't find any modules to import, import the base name + if d['test_commands'] == "": + d['test_commands'] = INDENT.join([''] + ["""lua -e "require '%s'" """ % d['rockname']]) + + # Build the luarocks skeleton + os.chdir(cwd) + for package in package_dicts: + d = package_dicts[package] + name = d['packagename'] + os.makedirs(os.path.join(output_dir, name)) + print("Writing recipe for %s to %s" % (package.lower(), os.path.join(output_dir, name))) + with open(os.path.join(output_dir, name, 'meta.yaml'), 'w') as f: + f.write(LUAROCKS_META.format(**d)) + with open(os.path.join(output_dir, name, 'build.sh'), 'w') as f: + f.write(LUAROCKS_BUILD_SH.format(**d)) + with open(os.path.join(output_dir, name, 'post-link.sh'), 'w') as f: + f.write(LUAROCKS_POSTLINK_SH) + with open(os.path.join(output_dir, name, 'pre-unlink.sh'), 'w') as f: + f.write(LUAROCKS_PREUNLINK_SH.format(**d)) diff --git a/conda_build/macho.py b/conda_build/macho.py index 82c8dd6495..f51ab8b1ba 100644 --- a/conda_build/macho.py +++ b/conda_build/macho.py @@ -1,8 +1,9 @@ from __future__ import absolute_import, division, print_function import sys -import subprocess +from subprocess import Popen, check_output, PIPE from os.path import islink, isfile +from itertools import islice NO_EXT = ( '.py', '.pyc', '.pyo', '.h', '.a', '.c', '.txt', '.html', @@ -43,8 +44,9 @@ def is_macho(path): def is_dylib(path): return human_filetype(path) == 'DYLIB' + def human_filetype(path): - lines = subprocess.check_output(['otool', '-h', path]).decode('utf-8').splitlines() + lines = check_output(['otool', '-h', path]).decode('utf-8').splitlines() assert lines[0].startswith(path), path for line in lines: @@ -53,51 +55,196 @@ def human_filetype(path): filetype = int(header[4]) return FILETYPE[filetype][3:] -def otool(path): - "thin wrapper around otool -L" - lines = subprocess.check_output(['otool', '-L', path]).decode('utf-8').splitlines() - assert lines[0].startswith(path), path - res = [] - for line in lines[1:]: - assert line[0] == '\t', path - res.append(line.split()[0]) - return res -def get_rpaths(path): - lines = subprocess.check_output(['otool', '-l', - path]).decode('utf-8').splitlines() - check_for_rpath = False - rpaths = [] - for line in lines: - if 'cmd LC_RPATH' in line: - check_for_rpath = True - if check_for_rpath and 'path' in line: - _, rpath, _ = line.split(None, 2) - rpaths.append(rpath) - return rpaths - -def install_name_change(path, cb_func): +def is_dylib_info(lines): + dylib_info = ('LC_ID_DYLIB', 'LC_LOAD_DYLIB') + if len(lines) > 1 and lines[1].split()[1] in dylib_info: + return True + return False + + +def is_id_dylib(lines): + if len(lines) > 1 and lines[1].split()[1] == 'LC_ID_DYLIB': + return True + return False + + +def is_load_dylib(lines): + if len(lines) > 1 and lines[1].split()[1] == 'LC_LOAD_DYLIB': + return True + return False + + +def is_rpath(lines): + if len(lines) > 1 and lines[1].split()[1] == 'LC_RPATH': + return True + return False + + +def _get_load_commands(lines): + """yields each load command from the output of otool -l""" + a = 1 # first line is the filename. + for ln, line in enumerate(lines): + if line.startswith("Load command"): + if a < ln: + yield lines[a:ln] + a = ln + yield lines[a:] + + +def _get_matching_load_commands(lines, cb_filter): + """Workhorse function for otool + + Does the work of filtering load commands and making a list + of dicts. The logic for splitting the free-form lines into + keys and values in entirely encoded here. Values that can + be converted to ints are converted to ints. """ - change dynamic shared library install names of Mach-O binary `path`. + result = [] + for lcmds in _get_load_commands(lines): + if cb_filter(lcmds): + lcdict = {} + for line in islice(lcmds, 1, len(lcmds)): + listy = line.split() + # This could be prettier, but what we need it to handle + # is fairly simple so let's just hardcode it for speed. + if len(listy) == 2: + key, value = listy + elif listy[0] == 'name' or listy[0] == 'path': + # Create an entry for 'name offset' if there is one + # as that can be useful if we need to know if there + # is space to patch it for relocation purposes. + if listy[2] == '(offset': + key = listy[0] + ' offset' + value = int(listy[3][:-1]) + lcdict[key] = value + key, value = listy[0:2] + elif listy[0] == 'time': + key = ' '.join(listy[0:3]) + value = ' '.join(listy[3:]) + elif listy[0] in ('current', 'compatibility'): + key = ' '.join(listy[0:2]) + value = listy[2] + try: + value = int(value) + except: + pass + lcdict[key] = value + result.append(lcdict) + return result + + +def otool(path, cb_filter=is_dylib_info): + """A wrapper around otool -l + + Parse the output of the otool -l 'load commands', filtered by + cb_filter, returning a list of dictionairies for the records. - `cb_func` is a callback function which called for each shared library name. - It is called with `path` and the current shared library install name, - and return the new name (or None if the name should be unchanged). + cb_filter receives the whole load command record, including the + first line, the 'Load Command N' one. All the records have been + pre-stripped of white space. + + The output of otool -l is entirely freeform; delineation between + key and value doesn't formally exist, so that is hard coded. I + didn't want to use regexes to parse it for speed purposes. + + Any key values that can be converted to integers are converted + to integers, the rest are strings. """ + lines = check_output(['otool', '-l', path]).decode('utf-8').splitlines() + return _get_matching_load_commands(lines, cb_filter) + + +def get_dylibs(path): + """Return a list of the loaded dylib pathnames""" + dylib_loads = otool(path, is_load_dylib) + return [dylib_load['name'] for dylib_load in dylib_loads] + + +def get_id(path): + """Returns the id name of the Mach-O file `path` or an empty string""" + dylib_loads = otool(path, is_id_dylib) + try: + return [dylib_load['name'] for dylib_load in dylib_loads][0] + except: + return '' + + +def get_rpaths(path): + """Return a list of the dylib rpaths""" + rpaths = otool(path, is_rpath) + return [rpath['path'] for rpath in rpaths] + + +def add_rpath(path, rpath, verbose=False): + """Add an `rpath` to the Mach-O file at `path`""" + args = ['install_name_tool', '-add_rpath', rpath, path] + if verbose: + print(' '.join(args)) + p = Popen(args, stderr=PIPE) + stdout, stderr = p.communicate() + stderr = stderr.decode('utf-8') + if "Mach-O dynamic shared library stub file" in stderr: + print("Skipping Mach-O dynamic shared library stub file %s\n" % path) + return + elif "would duplicate path, file already has LC_RPATH for:" in stderr: + print("Skipping -add_rpath, file already has LC_RPATH set") + return + else: + print(stderr, file=sys.stderr) + if p.returncode: + raise RuntimeError("install_name_tool failed with exit status %d" + % p.returncode) + + +def delete_rpath(path, rpath, verbose=False): + """Delete an `rpath` from the Mach-O file at `path`""" + args = ['install_name_tool', '-delete_rpath', rpath, path] + if verbose: + print(' '.join(args)) + p = Popen(args, stderr=PIPE) + stdout, stderr = p.communicate() + stderr = stderr.decode('utf-8') + if "Mach-O dynamic shared library stub file" in stderr: + print("Skipping Mach-O dynamic shared library stub file %s\n" % path) + return + elif "no LC_RPATH load command with path:" in stderr: + print("Skipping -delete_rpath, file doesn't contain that LC_RPATH") + return + else: + print(stderr, file=sys.stderr) + if p.returncode: + raise RuntimeError("install_name_tool failed with exit status %d" + % p.returncode) + + +def install_name_change(path, cb_func, verbose=False): + """Change dynamic shared library load name or id name of Mach-O Binary `path`. + + `cb_func` is called for each shared library load command. The dictionary of + the load command is passed in and the callback returns the new name or None + if the name should be unchanged. + + When dealing with id load commands, `install_name_tool -id` is used. + When dealing with dylib load commands `install_name_tool -change` is used. + """ + dylibs = otool(path) changes = [] - for link in otool(path): - # The first link may be the install name of the library itself, but - # this isn't a big deal because install_name_tool -change is a no-op - # if given a dependent install name that doesn't exist. - new_link = cb_func(path, link) - if new_link: - changes.append((link, new_link)) + for index, dylib in enumerate(dylibs): + new_name = cb_func(path, dylib) + if new_name: + changes.append((index, new_name)) ret = True - for old, new in changes: - args = ['install_name_tool', '-change', old, new, path] - print(' '.join(args)) - p = subprocess.Popen(args, stderr=subprocess.PIPE) + for index, new_name in changes: + args = ['install_name_tool'] + if dylibs[index]['cmd'] == 'LC_ID_DYLIB': + args.extend(('-id', new_name, path)) + else: + args.extend(('-change', dylibs[index]['name'], new_name, path)) + if verbose: + print(' '.join(args)) + p = Popen(args, stderr=PIPE) stdout, stderr = p.communicate() stderr = stderr.decode('utf-8') if "Mach-O dynamic shared library stub file" in stderr: @@ -111,6 +258,7 @@ def install_name_change(path, cb_func): % p.returncode) return ret + if __name__ == '__main__': if sys.platform == 'darwin': for path in '/bin/ls', '/etc/locate.rc': diff --git a/conda_build/main_build.py b/conda_build/main_build.py index df6d778fe0..88f2793aa5 100644 --- a/conda_build/main_build.py +++ b/conda_build/main_build.py @@ -7,72 +7,35 @@ from __future__ import absolute_import, division, print_function import argparse +import os import sys from collections import deque from glob import glob from locale import getpreferredencoding -from os import listdir -from os import environ as os_environ -from os.path import exists, isdir, isfile, join import warnings import conda.config as config from conda.compat import PY3 -from conda.cli.common import add_parser_channels, Completer -from conda.cli.conda_argparse import ArgumentParser +from conda.cli.common import add_parser_channels +from conda.install import delete_trash +from conda.resolve import NoPackagesFound, Unsatisfiable -from conda_build import __version__, exceptions +from conda_build.build import bldpkg_path from conda_build.index import update_index -from conda.install import delete_trash +from conda_build.main_render import get_render_parser +from conda_build.utils import find_recipe +from conda_build.main_render import (set_language_env_vars, RecipeCompleter, render_recipe) on_win = (sys.platform == 'win32') -all_versions = { - 'python': [26, 27, 33, 34, 35], - 'numpy': [16, 17, 18, 19, 110], - 'perl': None, - 'R': None, -} - -class RecipeCompleter(Completer): - def _get_items(self): - completions = [] - for path in listdir('.'): - if isdir(path) and isfile(join(path, 'meta.yaml')): - completions.append(path) - if isfile('meta.yaml'): - completions.append('.') - return completions - -# These don't represent all supported versions. It's just for tab completion. - -class PythonVersionCompleter(Completer): - def _get_items(self): - return ['all'] + [str(i/10) for i in all_versions['python']] - -class NumPyVersionCompleter(Completer): - def _get_items(self): - versions = [str(i) for i in all_versions['numpy']] - return ['all'] + ['%s.%s' % (ver[0], ver[1:]) for ver in versions] - -class RVersionsCompleter(Completer): - def _get_items(self): - return ['3.1.2', '3.1.3', '3.2.0', '3.2.1', '3.2.2'] def main(): - p = ArgumentParser( - description=""" + p = get_render_parser() + p.description = """ Tool for building conda packages. A conda package is a binary tarball containing system-level libraries, Python modules, executable programs, or other components. conda keeps track of dependencies between packages and platform specifics, making it simple to create working environments from different sets of packages.""" - ) - p.add_argument( - '-V', '--version', - action='version', - help='Show the conda-build version number and exit.', - version = 'conda-build %s' % __version__, - ) p.add_argument( "--check", action="store_true", @@ -99,12 +62,6 @@ def main(): dest='include_recipe', default=True, ) - p.add_argument( - "--output", - action="store_true", - help="Output the conda package filename which would have been " - "created and exit.", - ) p.add_argument( '-s', "--source", action="store_true", @@ -113,15 +70,9 @@ def main(): p.add_argument( '-t', "--test", action="store_true", - help="Test package (assumes package is already build).", - ) - p.add_argument( - 'recipe', - action="store", - metavar='RECIPE_PATH', - nargs='+', - choices=RecipeCompleter(), - help="Path to recipe directory.", + help="Test package (assumes package is already built). RECIPE_DIR argument can be either " + "recipe directory, in which case source download may be necessary to resolve package" + "version, or path to built package .tar.bz2 file, in which case no source is necessary.", ) p.add_argument( '--no-test', @@ -140,52 +91,48 @@ def main(): action="store_true", help="Run the post-build logic. Implies --no-test and --no-anaconda-upload.", ) + p.add_argument( + 'recipe', + action="store", + metavar='RECIPE_PATH', + nargs='+', + choices=RecipeCompleter(), + help="Path to recipe directory.", + ) p.add_argument( '--skip-existing', action='store_true', help="""Skip recipes for which there already exists an existing build (locally or in the channels). """ - ) + ) p.add_argument( - '-q', "--quiet", - action="store_true", - help="do not display progress bar", + '--keep-old-work', + action='store_true', + help="""Keep any existing, old work directory. Useful if debugging across + callstacks involving multiple packages/recipes. """ ) p.add_argument( - '--python', - action="append", - help="""Set the Python version used by conda build. Can be passed - multiple times to build against multiple versions. Can be 'all' to - build against all known versions (%r)""" % [i for i in - PythonVersionCompleter() if '.' in i], - metavar="PYTHON_VER", - choices=PythonVersionCompleter(), + '--dirty', + action='store_true', + help='Do not remove work directory or _build environment, ' + 'to speed up debugging. Does not apply patches or download source.' ) p.add_argument( - '--perl', - action="append", - help="""Set the Perl version used by conda build. Can be passed - multiple times to build against multiple versions.""", - metavar="PERL_VER", + '-q', "--quiet", + action="store_true", + help="do not display progress bar", ) p.add_argument( - '--numpy', - action="append", - help="""Set the NumPy version used by conda build. Can be passed - multiple times to build against multiple versions. Can be 'all' to - build against all known versions (%r)""" % [i for i in - NumPyVersionCompleter() if '.' in i], - metavar="NUMPY_VER", - choices=NumPyVersionCompleter(), + '--token', + action="store", + help="Token to pass through to anaconda upload" ) p.add_argument( - '--R', - action="append", - help="""Set the R version used by conda build. Can be passed - multiple times to build against multiple versions.""", - metavar="R_VER", - choices=RVersionsCompleter(), + '--user', + action='store', + help="User/organization to upload packages to on anaconda.org" ) + add_parser_channels(p) p.set_defaults(func=execute) @@ -229,9 +176,16 @@ def handle_binstar_upload(path, args): # $ conda install anaconda-client ''') print("Uploading to anaconda.org") - args = [binstar, 'upload', path] + cmd = [binstar, ] + + if hasattr(args, "token") and args.token: + cmd.extend(['--token', args.token]) + cmd.append('upload') + if hasattr(args, "user") and args.user: + cmd.extend(['--user', args.user]) + cmd.append(path) try: - subprocess.call(args) + subprocess.call(cmd) except: print(no_upload_message) raise @@ -261,11 +215,9 @@ def execute(args, parser): from os import makedirs from os.path import abspath, isdir, isfile - from conda.lock import Locked import conda_build.build as build import conda_build.source as source from conda_build.config import config - from conda_build.metadata import MetaData check_external() @@ -288,210 +240,140 @@ def execute(args, parser): "imported that is hard-linked by files in the trash. " "Will try again on next run.") - conda_version = { - 'python': 'CONDA_PY', - 'numpy': 'CONDA_NPY', - 'perl': 'CONDA_PERL', - 'R': 'CONDA_R', - } - - for lang in ['python', 'numpy', 'perl', 'R']: - versions = getattr(args, lang) - if not versions: - continue - if versions == ['all']: - if all_versions[lang]: - versions = all_versions[lang] - else: - parser.error("'all' is not supported for --%s" % lang) - if len(versions) > 1: - for ver in versions[:]: - setattr(args, lang, [str(ver)]) - execute(args, parser) - # This is necessary to make all combinations build. - setattr(args, lang, versions) - return - else: - version = versions[0] - if lang in ('python', 'numpy'): - version = int(version.replace('.', '')) - setattr(config, conda_version[lang], version) - if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']: - if all_versions[lang]: - raise RuntimeError("%s must be major.minor, like %s, not %s" % - (conda_version[lang], all_versions[lang][-1]/10, version)) - else: - raise RuntimeError("%s must be major.minor, not %s" % - (conda_version[lang], version)) - - # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. - # Auto-set those env variables - for var in conda_version.values(): - if getattr(config, var): - # Set the env variable. - os_environ[var] = str(getattr(config, var)) + set_language_env_vars(args, parser, execute=execute) if args.skip_existing: - if not isdir(config.bldpkgs_dir): - makedirs(config.bldpkgs_dir) - update_index(config.bldpkgs_dir) + for d in config.bldpkgs_dirs: + if not isdir(d): + makedirs(d) + update_index(d) index = build.get_build_index(clear_cache=True) - already_built = [] + already_built = set() to_build_recursive = [] - with Locked(config.croot): - recipes = deque(args.recipe) - while recipes: - arg = recipes.popleft() - try_again = False - # Don't use byte literals for paths in Python 2 - if not PY3: - arg = arg.decode(getpreferredencoding() or 'utf-8') - if isfile(arg): - if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): - recipe_dir = tempfile.mkdtemp() - t = tarfile.open(arg, 'r:*') - t.extractall(path=recipe_dir) - t.close() - need_cleanup = True - else: - print("Ignoring non-recipe: %s" % arg) - continue + recipes = deque(args.recipe) + while recipes: + arg = recipes.popleft() + try_again = False + # Don't use byte literals for paths in Python 2 + if not PY3: + arg = arg.decode(getpreferredencoding() or 'utf-8') + if isfile(arg): + if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): + recipe_dir = tempfile.mkdtemp() + t = tarfile.open(arg, 'r:*') + t.extractall(path=recipe_dir) + t.close() + need_cleanup = True else: - recipe_dir = abspath(arg) - need_cleanup = False - - if not isdir(recipe_dir): - sys.exit("Error: no such directory: %s" % recipe_dir) - - try: - m = MetaData(recipe_dir) - if m.get_value('build/noarch_python'): - config.noarch = True - except exceptions.YamlParsingError as e: - sys.stderr.write(e.error_msg()) - sys.exit(1) - binstar_upload = False - if args.check and len(args.recipe) > 1: - print(m.path) - m.check_fields() - if args.check: - continue - if args.skip_existing: - if m.pkg_fn() in index or m.pkg_fn() in already_built: - print("%s is already built, skipping." % m.dist()) - continue - if m.skip(): - print("Skipped: The %s recipe defines build/skip for this " - "configuration." % m.dist()) + print("Ignoring non-recipe: %s" % arg) continue - if args.output: - try: - m.parse_again(permit_undefined_jinja=False) - except SystemExit: - # Something went wrong; possibly due to undefined GIT_ jinja variables. - # Maybe we need to actually download the source in order to resolve the build_id. - source.provide(m.path, m.get_section('source')) - - # Parse our metadata again because we did not initialize the source - # information before. - m.parse_again(permit_undefined_jinja=False) - - print(build.bldpkg_path(m)) + else: + recipe_dir = abspath(arg) + need_cleanup = False + + # recurse looking for meta.yaml that is potentially not in immediate folder + recipe_dir = find_recipe(recipe_dir) + if not isdir(recipe_dir): + sys.exit("Error: no such directory: %s" % recipe_dir) + + # this fully renders any jinja templating, throwing an error if any data is missing + m, need_source_download = render_recipe(recipe_dir, no_download_source=False, + verbose=False, dirty=args.dirty) + if m.get_value('build/noarch_python'): + config.noarch = True + + if args.check and len(args.recipe) > 1: + print(m.path) + m.check_fields() + if args.check: + continue + if m.skip(): + print("Skipped: The %s recipe defines build/skip for this " + "configuration." % m.dist()) + continue + if args.skip_existing: + # 'or m.pkg_fn() in index' is for conda <4.1 and could be removed in the future. + if ('local::' + m.pkg_fn() in index or + m.pkg_fn() in index or + m.pkg_fn() in already_built): + print(m.dist(), "is already built, skipping.") continue - elif args.test: - build.test(m, move_broken=False) - elif args.source: - source.provide(m.path, m.get_section('source')) - print('Source tree in:', source.get_dir()) + if args.output: + print(bldpkg_path(m)) + continue + elif args.test: + build.test(m, move_broken=False) + elif args.source: + source.provide(m.path, m.get_section('source'), verbose=build.verbose) + print('Source tree in:', source.get_dir()) + else: + # This loop recursively builds dependencies if recipes exist + if args.build_only: + post = False + args.notest = True + args.binstar_upload = False + elif args.post: + post = True + args.notest = True + args.binstar_upload = False else: - # This loop recursively builds dependencies if recipes exist - if args.build_only: - post = False - args.notest = True - args.binstar_upload = False - elif args.post: - post = True - args.notest = True - args.binstar_upload = False - else: - post = None - try: - build.build(m, post=post, - include_recipe=args.include_recipe) - except (RuntimeError, SystemExit) as e: - error_str = str(e) - if error_str.startswith('No packages found') or error_str.startswith('Could not find some'): - # Build dependency if recipe exists - dep_pkg = error_str.split(': ')[1] - # Handle package names that contain version deps. - if ' ' in dep_pkg: - dep_pkg = dep_pkg.split(' ')[0] - recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*') - if exists(dep_pkg): - recipe_glob.append(dep_pkg) - if recipe_glob: - recipes.appendleft(arg) - try_again = True - for recipe_dir in recipe_glob: - if dep_pkg in to_build_recursive: - sys.exit(str(e)) - print(("Missing dependency {0}, but found" + - " recipe directory, so building " + - "{0} first").format(dep_pkg)) - recipes.appendleft(recipe_dir) - to_build_recursive.append(dep_pkg) - else: - raise - elif error_str.strip().startswith("Hint:"): - lines = [line for line in error_str.splitlines() if line.strip().startswith('- ')] - pkgs = [line.lstrip('- ') for line in lines] - # Typically if a conflict is with one of these - # packages, the other package needs to be rebuilt - # (e.g., a conflict with 'python 3.5*' and 'x' means - # 'x' isn't build for Python 3.5 and needs to be - # rebuilt). - skip_names = ['python', 'r'] - pkgs = [pkg for pkg in pkgs if pkg.split(' ')[0] not - in skip_names] - for pkg in pkgs: - # Handle package names that contain version deps. - if ' ' in pkg: - pkg = pkg.split(' ')[0] - recipe_glob = glob(pkg + '-[v0-9][0-9.]*') - if exists(pkg): - recipe_glob.append(pkg) - if recipe_glob: - recipes.appendleft(arg) - try_again = True - for recipe_dir in recipe_glob: - if pkg in to_build_recursive: - sys.exit(str(e)) - print(error_str) - print(("Missing dependency {0}, but found" + - " recipe directory, so building " + - "{0} first").format(pkg)) - recipes.appendleft(recipe_dir) - to_build_recursive.append(pkg) - else: - raise + post = None + try: + build.build(m, post=post, + include_recipe=args.include_recipe, + keep_old_work=args.keep_old_work, + need_source_download=need_source_download, + dirty=args.dirty) + except (NoPackagesFound, Unsatisfiable) as e: + error_str = str(e) + # Typically if a conflict is with one of these + # packages, the other package needs to be rebuilt + # (e.g., a conflict with 'python 3.5*' and 'x' means + # 'x' isn't build for Python 3.5 and needs to be + # rebuilt). + skip_names = ['python', 'r'] + add_recipes = [] + for line in error_str.splitlines(): + if not line.startswith(' - '): + continue + pkg = line.lstrip(' - ').split(' -> ')[-1] + pkg = pkg.strip().split(' ')[0] + if pkg in skip_names: + continue + recipe_glob = glob(pkg + '-[v0-9][0-9.]*') + if os.path.exists(pkg): + recipe_glob.append(pkg) + if recipe_glob: + try_again = True + for recipe_dir in recipe_glob: + if pkg in to_build_recursive: + sys.exit(str(e)) + print(error_str) + print(("Missing dependency {0}, but found" + + " recipe directory, so building " + + "{0} first").format(pkg)) + add_recipes.append(recipe_dir) + to_build_recursive.append(pkg) else: raise - if try_again: - continue + recipes.appendleft(arg) + recipes.extendleft(reversed(add_recipes)) - if not args.notest: - build.test(m) + if try_again: + continue + + if not args.notest: + build.test(m) - binstar_upload = True + if need_cleanup: + shutil.rmtree(recipe_dir) - if need_cleanup: - shutil.rmtree(recipe_dir) + # outputs message, or does upload, depending on value of args.binstar_upload + handle_binstar_upload(build.bldpkg_path(m), args) - if binstar_upload: - handle_binstar_upload(build.bldpkg_path(m), args) + already_built.add(m.pkg_fn()) - already_built.append(m.pkg_fn()) def args_func(args, p): try: @@ -505,6 +387,7 @@ def args_func(args, p): print_issue_message(e) raise # as if we did not catch it + def print_issue_message(e): if e.__class__.__name__ not in ('ScannerError', 'ParserError'): message = """\ diff --git a/conda_build/main_convert.py b/conda_build/main_convert.py index 591d24ce5b..ed9969d6df 100644 --- a/conda_build/main_convert.py +++ b/conda_build/main_convert.py @@ -59,7 +59,7 @@ def main(): description=""" Various tools to convert conda packages. Takes a pure Python package build for one platform and converts it to work on one or more other platforms, or -all.""" , +all.""", epilog=epilog, ) @@ -126,7 +126,7 @@ def main(): args_func(args, p) -path_mapping = [# (unix, windows) +path_mapping = [ # (unix, windows) ('lib/python{pyver}', 'Lib'), ('bin', 'Scripts')] diff --git a/conda_build/main_develop.py b/conda_build/main_develop.py index c3f93b8fd9..72e2f4f204 100644 --- a/conda_build/main_develop.py +++ b/conda_build/main_develop.py @@ -8,15 +8,13 @@ import sys from os.path import join, isdir, abspath, expanduser, exists -from os import walk -import fnmatch import shutil from conda.cli.common import add_parser_prefix, get_prefix from conda.cli.conda_argparse import ArgumentParser from conda_build.main_build import args_func from conda_build.post import mk_relative_osx -from conda_build.utils import _check_call +from conda_build.utils import _check_call, rec_glob from conda.install import linked @@ -67,27 +65,6 @@ def main(): args_func(args, p) -def sharedobjects_list(pkg_path): - ''' - return list of shared objects (*.so) found in pkg_path. - - :param pkg_path: look for shared objects to relink in pkg_path - ''' - bin_files = [] - - # only relevant for mac/linux - pattern = '*.so' - - for d_f in walk(pkg_path): - m = fnmatch.filter(d_f[2], pattern) - if m: - # list is not empty, append full path to binary, then add it - # to bin_files list - bin_files.extend([join(d_f[0], f) for f in m]) - - return bin_files - - def relink_sharedobjects(pkg_path, build_prefix): ''' invokes functions in post module to relink to libraries in conda env @@ -101,7 +78,7 @@ def relink_sharedobjects(pkg_path, build_prefix): since runtime libraries should be loaded from environment's lib/. first ''' # find binaries in package dir and make them relocatable - bin_files = sharedobjects_list(pkg_path) + bin_files = rec_glob(pkg_path, ['.so']) for b_file in bin_files: if sys.platform == 'darwin': mk_relative_osx(b_file, build_prefix) diff --git a/conda_build/main_index.py b/conda_build/main_index.py index db061818c3..52001984ec 100644 --- a/conda_build/main_index.py +++ b/conda_build/main_index.py @@ -45,7 +45,7 @@ def main(): dest="remove", default=True, help="Don't remove entries for files that don't exist.", - ) + ) args = p.parse_args() diff --git a/conda_build/main_inspect.py b/conda_build/main_inspect.py index aeb098d7d3..651408ba94 100644 --- a/conda_build/main_inspect.py +++ b/conda_build/main_inspect.py @@ -14,19 +14,21 @@ from operator import itemgetter from conda.misc import which_package +from conda.compat import iteritems from conda.cli.common import add_parser_prefix, get_prefix, InstalledPackages from conda.cli.conda_argparse import ArgumentParser import conda.install as ci from conda.api import get_index from conda.cli.install import check_install -from conda.config import get_default_urls, normalize_urls +from conda.config import get_default_urls from conda_build.main_build import args_func from conda_build.ldd import get_linkages, get_package_obj_files, get_untracked_obj_files from conda_build.macho import get_rpaths, human_filetype from conda_build.utils import groupby, getter, comma_join + def main(): p = ArgumentParser( description='Tools for inspecting conda packages.', @@ -52,13 +54,13 @@ def main(): # inspect linkages -h help=linkages_help, description=linkages_help, - ) + ) linkages.add_argument( 'packages', action='store', nargs='*', help='Conda packages to inspect.', - ).completer=InstalledPackages + ).completer = InstalledPackages linkages.add_argument( '--untracked', action='store_true', @@ -95,13 +97,13 @@ def main(): "objects", help=objects_help, description=objects_help, - ) + ) objects.add_argument( 'packages', action='store', nargs='*', help='Conda packages to inspect.', - ).completer=InstalledPackages + ).completer = InstalledPackages objects.add_argument( '--untracked', action='store_true', @@ -130,13 +132,13 @@ def main(): "channels", help=channels_help, description=channels_help, - ) + ) channels.add_argument( '--verbose', action='store_true', help="""Show verbose output. Note that error output to stderr will always be shown regardless of this flag. """, - ) + ) channels.add_argument( '--test-installable', '-t', action='store_true', @@ -148,12 +150,13 @@ def main(): nargs='?', default="defaults", help="The channel to test. The default is %(default)s." - ) + ) p.set_defaults(func=execute) args = p.parse_args() args_func(args, p) + def print_linkages(depmap, show_files=False): # Print system and not found last k = sorted(set(depmap.keys()) - {'system', 'not found'}) @@ -168,6 +171,7 @@ def print_linkages(depmap, show_files=False): print(" %s (%s)" % (lib, path)) print() + def replace_path(binary, path, prefix): if sys.platform.startswith('linux'): return abspath(path) @@ -192,6 +196,7 @@ def replace_path(binary, path, prefix): return abspath(path) return 'not found' + def print_object_info(info, key): gb = groupby(key, info) for header in sorted(gb, key=str): @@ -207,12 +212,14 @@ def print_object_info(info, key): print() print() + class _untracked_package: def __str__(self): return "" untracked_package = _untracked_package() + def test_installable(channel='defaults', verbose=True): if not verbose: sys.stdout = open(os.devnull, 'w') @@ -223,19 +230,19 @@ def test_installable(channel='defaults', verbose=True): print("######## Testing platform %s ########" % platform) channels = [channel] + get_default_urls() index = get_index(channel_urls=channels, prepend=False, platform=platform) - for package in sorted(index): - if channel != 'defaults': - # If we give channels at the command line, only look at - # packages from those channels (not defaults). - if index[package]['channel'] not in normalize_urls([channel], platform=platform): - continue - name, version, build = package.rsplit('.tar.bz2', 1)[0].rsplit('-', 2) + for package, rec in iteritems(index): + # If we give channels at the command line, only look at + # packages from those channels (not defaults). + if channel != 'defaults' and rec.get('schannel', 'defaults') == 'defaults': + continue + name = rec['name'] if name in {'conda', 'conda-build'}: # conda can only be installed in the root environment continue # Don't fail just because the package is a different version of Python # than the default. We should probably check depends rather than the # build string. + build = rec['build'] match = has_py.search(build) assert match if 'py' in build else True, build if match: @@ -243,6 +250,7 @@ def test_installable(channel='defaults', verbose=True): else: additional_packages = [] + version = rec['version'] print('Testing %s=%s' % (name, version)) # if additional_packages: # print("Including %s" % additional_packages[0]) @@ -261,6 +269,7 @@ def test_installable(channel='defaults', verbose=True): return success + def execute(args, parser): if not args.subcommand: parser.print_help() @@ -273,29 +282,27 @@ def execute(args, parser): sys.exit(not test_installable(channel=args.channel, verbose=args.verbose)) prefix = get_prefix(args) - installed = ci.linked(prefix) + installed = ci.linked_data(prefix) + installed = {rec['name']: dist for dist, rec in iteritems(installed)} if not args.packages and not args.untracked and not args.all: parser.error("At least one package or --untracked or --all must be provided") if args.all: - args.packages = sorted([i.rsplit('-', 2)[0] for i in installed]) + args.packages = sorted(installed.keys()) if args.untracked: args.packages.append(untracked_package) - if args.subcommand == 'linkages': pkgmap = {} for pkg in args.packages: if pkg == untracked_package: dist = untracked_package + elif pkg not in installed: + sys.exit("Package %s is not installed in %s" % (pkg, prefix)) else: - for dist in installed: - if pkg == dist.rsplit('-', 2)[0]: - break - else: - sys.exit("Package %s is not installed in %s" % (pkg, prefix)) + dist = installed[pkg] if not sys.platform.startswith(('linux', 'darwin')): sys.exit("Error: conda inspect linkages is only implemented in Linux and OS X") @@ -310,18 +317,20 @@ def execute(args, parser): depmap['not found'] = [] for binary in linkages: for lib, path in linkages[binary]: - path = replace_path(binary, path, prefix) if path not in {'', 'not found'} else path + path = replace_path(binary, path, prefix) if path not in {'', + 'not found'} else path if path.startswith(prefix): deps = list(which_package(path)) if len(deps) > 1: - print("Warning: %s comes from multiple packages: %s" % (path, comma_join(deps)), file=sys.stderr) + print("Warning: %s comes from multiple packages: %s" % + (path, comma_join(deps)), file=sys.stderr) if not deps: if exists(path): - depmap['untracked'].append((lib, path.split(prefix - + '/', 1)[-1], binary)) + depmap['untracked'].append((lib, path.split(prefix + + '/', 1)[-1], binary)) else: - depmap['not found'].append((lib, path.split(prefix - + '/', 1)[-1], binary)) + depmap['not found'].append((lib, path.split(prefix + + '/', 1)[-1], binary)) for d in deps: depmap[d].append((lib, path.split(prefix + '/', 1)[-1], binary)) @@ -333,7 +342,7 @@ def execute(args, parser): if args.groupby == 'package': for pkg in args.packages: print(pkg) - print('-'*len(str(pkg))) + print('-' * len(str(pkg))) print() print_linkages(pkgmap[pkg], show_files=args.show_files) @@ -349,7 +358,7 @@ def execute(args, parser): k = sorted(set(inverted_map.keys()) - {'system', 'not found'}) for dep in k + ['system', 'not found']: print(dep) - print('-'*len(str(dep))) + print('-' * len(str(dep))) print() print_linkages(inverted_map[dep], show_files=args.show_files) @@ -361,15 +370,13 @@ def execute(args, parser): for pkg in args.packages: if pkg == untracked_package: dist = untracked_package + elif pkg not in installed: + sys.exit("Package %s is not installed in %s" % (pkg, prefix)) else: - for dist in installed: - if pkg == dist.rsplit('-', 2)[0]: - break - else: - sys.exit("Package %s is not installed in %s" % (pkg, prefix)) + dist = installed[pkg] print(pkg) - print('-'*len(str(pkg))) + print('-' * len(str(pkg))) print() if not sys.platform.startswith('darwin'): diff --git a/conda_build/main_metapackage.py b/conda_build/main_metapackage.py index 56fe66cf67..ddb20baa51 100644 --- a/conda_build/main_metapackage.py +++ b/conda_build/main_metapackage.py @@ -17,6 +17,7 @@ from conda_build.build import build, bldpkg_path from conda_build.main_build import handle_binstar_upload + def main(): p = ArgumentParser( description=''' @@ -106,6 +107,7 @@ def main(): args = p.parse_args() args_func(args, p) + def execute(args, parser): d = defaultdict(dict) d['package']['name'] = args.name diff --git a/conda_build/main_pipbuild.py b/conda_build/main_pipbuild.py index db196ff58b..1b4b505431 100644 --- a/conda_build/main_pipbuild.py +++ b/conda_build/main_pipbuild.py @@ -12,7 +12,6 @@ import subprocess import yaml -#from conda.cli import common import conda.config as cc from conda.cli.conda_argparse import ArgumentParser @@ -333,7 +332,8 @@ def build_package(package, version=None, noarch_python=False): def execute(args, parser): - print("*** conda pipbuild is no longer supported. It will be removmed in a future release of conda-build. ***") + print("*** conda pipbuild is no longer supported. " + "It will be removmed in a future release of conda-build. ***") print("Please use conda build instead.") global binstar_upload @@ -351,7 +351,8 @@ def execute(args, parser): search = client.search({'name': package}) if search: - r_name = list(filter(lambda x: ('name' in x and package.lower() == x['name'].lower()), search)) + r_name = list(filter(lambda x: ('name' in x and + package.lower() == x['name'].lower()), search)) if r_name: print('Package search: %s' % r_name[0]) package = r_name[0]['name'] diff --git a/conda_build/main_render.py b/conda_build/main_render.py new file mode 100644 index 0000000000..01c0c96487 --- /dev/null +++ b/conda_build/main_render.py @@ -0,0 +1,132 @@ +# (c) Continuum Analytics, Inc. / http://continuum.io +# All Rights Reserved +# +# conda is distributed under the terms of the BSD 3-clause license. +# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause. + +from __future__ import absolute_import, division, print_function + +import sys + +from conda.cli.common import add_parser_channels +from conda.cli.conda_argparse import ArgumentParser + +from conda_build import __version__ +from conda_build.render import render_recipe, set_language_env_vars, bldpkg_path, output_yaml +from conda_build.utils import find_recipe +from conda_build.completers import (RecipeCompleter, PythonVersionCompleter, RVersionsCompleter, + LuaVersionsCompleter, NumPyVersionCompleter) + +on_win = (sys.platform == 'win32') + + +def get_render_parser(): + p = ArgumentParser( + description=""" +Tool for building conda packages. A conda package is a binary tarball +containing system-level libraries, Python modules, executable programs, or +other components. conda keeps track of dependencies between packages and +platform specifics, making it simple to create working environments from + different sets of packages.""", + conflict_handler='resolve' + ) + p.add_argument( + '-V', '--version', + action='version', + help='Show the conda-build version number and exit.', + version='conda-build %s' % __version__, + ) + p.add_argument( + '-n', "--no-source", + action="store_true", + help="When templating can't be completed, do not obtain the \ +source to try fill in related template variables.", + ) + p.add_argument( + "--output", + action="store_true", + help="Output the conda package filename which would have been " + "created", + ) + p.add_argument( + '--python', + action="append", + help="""Set the Python version used by conda build. Can be passed + multiple times to build against multiple versions. Can be 'all' to + build against all known versions (%r)""" % [i for i in + PythonVersionCompleter() if '.' in i], + metavar="PYTHON_VER", + choices=PythonVersionCompleter(), + ) + p.add_argument( + '--perl', + action="append", + help="""Set the Perl version used by conda build. Can be passed + multiple times to build against multiple versions.""", + metavar="PERL_VER", + ) + p.add_argument( + '--numpy', + action="append", + help="""Set the NumPy version used by conda build. Can be passed + multiple times to build against multiple versions. Can be 'all' to + build against all known versions (%r)""" % [i for i in + NumPyVersionCompleter() if '.' in i], + metavar="NUMPY_VER", + choices=NumPyVersionCompleter(), + ) + p.add_argument( + '--R', + action="append", + help="""Set the R version used by conda build. Can be passed + multiple times to build against multiple versions.""", + metavar="R_VER", + choices=RVersionsCompleter(), + ) + p.add_argument( + '--lua', + action="append", + help="Set the Lua version used by conda build. Can be passed" + "multiple times to build against multiple versions (%r)." % + [i for i in LuaVersionsCompleter()], + metavar="LUA_VER", + choices=LuaVersionsCompleter(), + ) + add_parser_channels(p) + return p + + +def main(): + p = get_render_parser() + p.add_argument( + '-f', '--file', + action="store", + help="write YAML to file, given as argument here.\ + Overwrites existing files." + ) + # we do this one separately because we only allow one entry to conda render + p.add_argument( + 'recipe', + action="store", + metavar='RECIPE_PATH', + choices=RecipeCompleter(), + help="Path to recipe directory.", + ) + # this is here because we have a different default than build + p.add_argument( + '--verbose', + action='store_true', + help='Enable verbose output from download tools and progress updates', + ) + args = p.parse_args() + set_language_env_vars(args, p) + + metadata, _ = render_recipe(find_recipe(args.recipe), no_download_source=args.no_source, + verbose=args.verbose) + if args.output: + print(bldpkg_path(metadata)) + else: + print(output_yaml(metadata, args.file)) + +if __name__ == '__main__': + main() diff --git a/conda_build/main_sign.py b/conda_build/main_sign.py index c2f921caea..abb37a506b 100644 --- a/conda_build/main_sign.py +++ b/conda_build/main_sign.py @@ -24,7 +24,6 @@ from conda.signature import KEYS_DIR, hash_file, verify, SignatureError - def keygen(name, size=2048): print("Generating public/private key pair (%d bits)..." % size) random_generator = Random.new().read @@ -72,8 +71,7 @@ def main(): p.add_argument('files', help="Files to sign.", nargs='*', - metavar="FILE", - ) + metavar="FILE",) p.add_argument('-k', '--keygen', action="store", help="Generate a public-private " diff --git a/conda_build/main_skeleton.py b/conda_build/main_skeleton.py index 4a6af6d0de..c3eb3aedbb 100644 --- a/conda_build/main_skeleton.py +++ b/conda_build/main_skeleton.py @@ -11,6 +11,7 @@ from conda.cli.conda_argparse import ArgumentParser from conda.cli.common import Completer + class PyPIPackagesCompleter(Completer): def __init__(self, prefix, parsed_args, **kwargs): self.prefix = prefix @@ -19,9 +20,10 @@ def __init__(self, prefix, parsed_args, **kwargs): def _get_items(self): from conda_build.pypi import get_xmlrpc_client args = self.parsed_args - client = get_xmlrpc_client(getattr(args, 'pypi_url', 'https://pypi.python.org/pypi')) + client = get_xmlrpc_client(getattr(args, 'pypi_url')) return [i.lower() for i in client.list_packages()] + class CRANPackagesCompleter(Completer): def __init__(self, prefix, parsed_args, **kwargs): self.prefix = prefix @@ -36,6 +38,7 @@ def _get_items(self): return [i.lower() for i in cran_metadata] + ['r-%s' % i.lower() for i in cran_metadata] + def main(): p = ArgumentParser( description=""" @@ -105,7 +108,7 @@ def main(): pypi.add_argument( "--pypi-url", action="store", - default='https://pypi.python.org/pypi', + default='https://pypi.io/pypi', help="URL to use for PyPI (default: %(default)s).", ) pypi.add_argument( @@ -132,14 +135,14 @@ def main(): action='store_true', help="""Compare the package version of the recipe with the one available on PyPI.""" - ) + ) pypi.add_argument( "--python-version", action='store', default=default_python, help="""Version of Python to use to run setup.py. Default is %(default)s.""", choices=['2.6', '2.7', '3.3', '3.4'], - ) + ) pypi.add_argument( "--manual-url", @@ -147,14 +150,14 @@ def main(): default=False, help="Manually choose source url when more than one urls are present." + "Default is the one with least source size." - ) + ) pypi.add_argument( "--noarch-python", action='store_true', default=False, help="Creates recipe as noarch python" - ) + ) cpan = repos.add_parser( "cpan", @@ -190,7 +193,6 @@ def main(): action='store_true', help='Create recipes for dependencies if they do not already exist.') - cran = repos.add_parser( "cran", help=""" @@ -266,6 +268,32 @@ def main(): --output-dir). If packages are given, they are updated; otherwise, all recipes in the output directory are updated.""", ) + luarocks = repos.add_parser( + "luarocks", + help=""" +Create recipe skeleton for luarocks, hosted at luarocks.org + """, + ) + luarocks.add_argument( + "packages", + action="store", + nargs='+', + help="luarocks packages to create recipe skeletons for.", + ) + luarocks.add_argument( + "--output-dir", + help="Directory to write recipes to (default: %(default)s).", + default=".", + ) + luarocks.add_argument( + "--version", + help="Version to use. Applies to all packages.", + ) + luarocks.add_argument( + "--recursive", + action='store_true', + help='Create recipes for dependencies if they do not already exist.') + p.set_defaults(func=execute) args = p.parse_args() @@ -276,6 +304,7 @@ def execute(args, parser): import conda_build.pypi as pypi import conda_build.cpan as cpan import conda_build.cran as cran + import conda_build.luarocks as luarocks from conda.lock import Locked from conda_build.config import config @@ -288,6 +317,8 @@ def execute(args, parser): cpan.main(args, parser) elif args.repo == 'cran': cran.main(args, parser) + elif args.repo == 'luarocks': + luarocks.main(args, parser) if __name__ == '__main__': main() diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 4e975bf7e0..6f4566aab1 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -11,7 +11,8 @@ from conda.resolve import MatchSpec from conda.cli.common import specs_from_url -from . import exceptions +from conda_build import exceptions +from conda_build.features import feature_list try: import yaml @@ -28,44 +29,62 @@ from conda_build.config import config from conda_build.utils import comma_join + def ns_cfg(): # Remember to update the docs of any of this changes plat = cc.subdir py = config.CONDA_PY np = config.CONDA_NPY pl = config.CONDA_PERL + lua = config.CONDA_LUA assert isinstance(py, int), py d = dict( - linux = plat.startswith('linux-'), - linux32 = bool(plat == 'linux-32'), - linux64 = bool(plat == 'linux-64'), - arm = plat.startswith('linux-arm'), - osx = plat.startswith('osx-'), - unix = plat.startswith(('linux-', 'osx-')), - win = plat.startswith('win-'), - win32 = bool(plat == 'win-32'), - win64 = bool(plat == 'win-64'), - pl = pl, - py = py, - py3k = bool(30 <= py < 40), - py2k = bool(20 <= py < 30), - py26 = bool(py == 26), - py27 = bool(py == 27), - py33 = bool(py == 33), - py34 = bool(py == 34), - py35 = bool(py == 35), - np = np, - os = os, - environ = os.environ, + linux=plat.startswith('linux-'), + linux32=bool(plat == 'linux-32'), + linux64=bool(plat == 'linux-64'), + arm=plat.startswith('linux-arm'), + osx=plat.startswith('osx-'), + unix=plat.startswith(('linux-', 'osx-')), + win=plat.startswith('win-'), + win32=bool(plat == 'win-32'), + win64=bool(plat == 'win-64'), + x86=plat.endswith(('-32', '-64')), + x86_64=plat.endswith('-64'), + pl=pl, + py=py, + lua=lua, + luajit=bool(lua[0] == "2"), + py3k=bool(30 <= py < 40), + py2k=bool(20 <= py < 30), + py26=bool(py == 26), + py27=bool(py == 27), + py33=bool(py == 33), + py34=bool(py == 34), + py35=bool(py == 35), + np=np, + os=os, + environ=os.environ, ) for machine in cc.non_x86_linux_machines: d[machine] = bool(plat == 'linux-%s' % machine) + for feature, value in feature_list: + d[feature] = value d.update(os.environ) return d -sel_pat = re.compile(r'(.+?)\s*(#.*)?\[(.+)\](?(2).*)$') +# Selectors must be either: +# - at end of the line +# - embedded (anywhere) within a comment +# +# Notes: +# - [([^\[\]]+)\] means "find a pair of brackets containing any +# NON-bracket chars, and capture the contents" +# - (?(2).*)$ means "allow trailing characters iff group 2 (#.*) was found." +sel_pat = re.compile(r'(.+?)\s*(#.*)?\[([^\[\]]+)\](?(2).*)$') + + def select_lines(data, namespace): lines = [] for i, line in enumerate(data.splitlines()): @@ -94,11 +113,11 @@ def select_lines(data, namespace): def yamlize(data): try: return yaml.load(data, Loader=BaseLoader) - except yaml.parser.ParserError as e: + except yaml.error.YAMLError as e: if '{{' in data: try: import jinja2 - jinja2 # Avoid pyflakes failure: 'jinja2' imported but unused + jinja2 # Avoid pyflakes failure: 'jinja2' imported but unused except ImportError: raise exceptions.UnableToParseMissingJinja2(original=e) raise exceptions.UnableToParse(original=e) @@ -118,6 +137,7 @@ def yamlize(data): Public-Domain """.split()) + def ensure_valid_license_family(meta): try: license_family = meta['about']['license_family'] @@ -128,6 +148,7 @@ def ensure_valid_license_family(meta): "about/license_family '%s' not allowed. Allowed families are %s." % (license_family, comma_join(sorted(allowed_license_families))))) + def ensure_valid_fields(meta): try: pin_depends = meta['build']['pin_depends'] @@ -136,7 +157,8 @@ def ensure_valid_fields(meta): if pin_depends not in ('', 'record', 'strict'): raise RuntimeError("build/pin_depends cannot be '%s'" % pin_depends) -def parse(data): + +def parse(data, path=None): data = select_lines(data, ns_cfg()) res = yamlize(data) # ensure the result is a dict @@ -145,70 +167,61 @@ def parse(data): for field in FIELDS: if field not in res: continue + # ensure that empty fields are dicts (otherwise selectors can cause invalid fields) if not res[field]: res[field] = {} if not isinstance(res[field], dict): - raise RuntimeError("The %s field should be a dict, not %s" % - (field, res[field].__class__.__name__)) - # ensure those are lists - for field in ('source/patches', - 'build/entry_points', 'build/script_env', - 'build/features', 'build/track_features', - 'requirements/build', 'requirements/run', - 'requirements/conflicts', 'test/requires', - 'test/files', 'test/commands', 'test/imports'): - section, key = field.split('/') - if res.get(section) is None: - res[section] = {} - if res[section].get(key, None) is None: - res[section][key] = [] - - # ensure those are strings - for field in ('package/version', 'build/string', 'build/pin_depends', - 'source/svn_rev', 'source/git_tag', 'source/git_branch', - 'source/md5', 'source/git_rev', 'source/path'): - section, key = field.split('/') - if res.get(section) is None: - res[section] = {} - val = res[section].get(key, '') - if val is None: - val = '' - res[section][key] = text_type(val) - - # ensure these fields are booleans - trues = {'y', 'on', 'true', 'yes'} - falses = {'n', 'no', 'false', 'off'} - for field in ('build/osx_is_app', 'build/preserve_egg_dir', - 'build/binary_relocation', - 'build/detect_binary_files_with_prefix', - 'build/skip', 'app/own_environment'): - section, key = field.split('/') - if res.get(section) is None: - res[section] = {} - - try: - val = res[section].get(key, '').lower() - except AttributeError: - # val wasn't a string - continue - - if val in trues: - res[section][key] = True - elif val in falses: - res[section][key] = False + raise RuntimeError("The %s field should be a dict, not %s in file %s." % + (field, res[field].__class__.__name__, path)) ensure_valid_fields(res) ensure_valid_license_family(res) return sanitize(res) +trues = {'y', 'on', 'true', 'yes'} +falses = {'n', 'no', 'false', 'off'} + +default_structs = { + 'source/patches': list, + 'build/entry_points': list, + 'build/script_env': list, + 'build/features': list, + 'build/track_features': list, + 'requirements/build': list, + 'requirements/run': list, + 'requirements/conflicts': list, + 'test/requires': list, + 'test/files': list, + 'test/commands': list, + 'test/imports': list, + 'package/version': text_type, + 'build/string': text_type, + 'build/pin_depends': text_type, + 'source/svn_rev': text_type, + 'source/git_tag': text_type, + 'source/git_branch': text_type, + 'source/md5': text_type, + 'source/git_rev': text_type, + 'source/path': text_type, + 'source/git_url': text_type, + 'build/osx_is_app': bool, + 'build/preserve_egg_dir': bool, + 'build/binary_relocation': bool, + 'build/noarch_python': bool, + 'build/detect_binary_files_with_prefix': bool, + 'build/skip': bool, + 'app/own_environment': bool +} + + def sanitize(meta): """ Sanitize the meta-data to remove aliases/handle deprecation """ # make a copy to avoid side-effects - meta = dict(meta) + meta = meta.copy() sanitize_funs = [('source', _git_clean), ] for section, func in sanitize_funs: if section in meta: @@ -232,23 +245,23 @@ def _git_clean(source_meta): git_rev_tags = (git_rev,) + git_rev_tags_old - has_rev_tags = tuple(bool(source_meta[tag]) for + has_rev_tags = tuple(bool(source_meta.get(tag, text_type())) for tag in git_rev_tags) if sum(has_rev_tags) > 1: - msg = "Error: mulitple git_revs:" + msg = "Error: multiple git_revs:" msg += ', '.join("{}".format(key) for key, has in zip(git_rev_tags, has_rev_tags) if has) sys.exit(msg) # make a copy of the input so we have no side-effects - ret_meta = dict(source_meta) + ret_meta = source_meta.copy() # loop over the old versions for key, has in zip(git_rev_tags[1:], has_rev_tags[1:]): # update if needed if has: ret_meta[git_rev_tags[0]] = ret_meta[key] # and remove - del ret_meta[key] + ret_meta.pop(key, None) return ret_meta @@ -260,21 +273,24 @@ def _git_clean(source_meta): 'git_url', 'git_tag', 'git_branch', 'git_rev', 'git_depth', 'hg_url', 'hg_tag', 'svn_url', 'svn_rev', 'svn_ignore_externals', - 'patches'], + 'patches' + ], 'build': ['number', 'string', 'entry_points', 'osx_is_app', 'features', 'track_features', 'preserve_egg_dir', 'no_link', 'binary_relocation', 'script', 'noarch_python', - 'has_prefix_files', 'binary_has_prefix_files', 'script_env', - 'detect_binary_files_with_prefix', 'rpaths', + 'has_prefix_files', 'binary_has_prefix_files', 'ignore_prefix_files', + 'detect_binary_files_with_prefix', 'rpaths', 'script_env', 'always_include_files', 'skip', 'msvc_compiler', - 'pin_depends' # pin_depends is experimental still - ], + 'pin_depends' # pin_depends is experimental still + ], 'requirements': ['build', 'run', 'conflicts'], 'app': ['entry', 'icon', 'summary', 'type', 'cli_opts', 'own_environment'], 'test': ['requires', 'commands', 'files', 'imports'], - 'about': ['home', 'license', 'license_family', - 'summary', 'readme', 'license_file'], + 'about': ['home', 'dev_url', 'doc_url', 'license_url', # these are URLs + 'license', 'summary', 'description', 'license_family', # text + 'license_file', 'readme', # paths in source tree + ], } @@ -287,7 +303,7 @@ def check_bad_chrs(s, field): sys.exit("Error: bad character '%s' in %s: %s" % (c, field, s)) -def handle_config_version(ms, ver): +def handle_config_version(ms, ver, dep_type='run'): """ 'ms' is an instance of MatchSpec, and 'ver' is the version from the configuration, e.g. for ms.name == 'python', ver = 26 or None, @@ -301,10 +317,16 @@ def handle_config_version(ms, ver): if ver is None: raise RuntimeError("'%s' requires external setting" % ms.spec) # (no return here - proceeds below) - else: # regular version + else: # regular version return ms - if ver is None or (ms.strictness == 1 and ms.name == 'numpy'): + # If we don't have a configured version, or we are dealing with a simple + # numpy runtime dependency; just use "numpy"/the name of the package as + # the specification. In practice this means that a recipe which just + # defines numpy as a runtime dependency will match any version of numpy + # at install time. + if ver is None or (dep_type == 'run' and ms.strictness == 1 and + ms.name == 'numpy'): return MatchSpec(ms.name) ver = text_type(ver) @@ -331,12 +353,13 @@ def __init__(self, path): # Start with bare-minimum contents so we can call environ.get_dict() with impunity # We'll immediately replace these contents in parse_again() self.meta = parse("package:\n" - " name: uninitialized") + " name: uninitialized", path=self.meta_path) # This is the 'first pass' parse of meta.yaml, so not all variables are defined yet # (e.g. GIT_FULL_HASH, etc. are undefined) # Therefore, undefined jinja variables are permitted here # In the second pass, we'll be more strict. See build.build() + self.undefined_jinja_vars = [] self.parse_again(permit_undefined_jinja=True) def parse_again(self, permit_undefined_jinja=False): @@ -348,7 +371,7 @@ def parse_again(self, permit_undefined_jinja=False): """ if not self.meta_path: return - self.meta = parse(self._get_contents(permit_undefined_jinja)) + self.meta = parse(self._get_contents(permit_undefined_jinja), path=self.meta_path) if (isfile(self.requirements_path) and not self.meta['requirements']['run']): @@ -370,9 +393,30 @@ def fromdict(cls, metadata): def get_section(self, section): return self.meta.get(section, {}) - def get_value(self, field, default=None): + def get_value(self, field, default=None, autotype=True): + """ + Get a value from a meta.yaml. + :param field: Field to return + :param default: Default object to return if field doesn't exist + :param autotype: If True, return the default type of field if one exists. + False will return the default object. + :return: + """ section, key = field.split('/') + + # get correct default + if autotype and default is None and field in default_structs: + default = default_structs[field]() + value = self.get_section(section).get(key, default) + + # handle yaml 1.1 boolean values + if isinstance(value, text_type): + if value.lower() in trues: + value = True + elif value.lower() in falses: + value = False + return value def check_fields(self): @@ -412,7 +456,10 @@ def ms_depends(self, typ='run'): ('python', config.CONDA_PY), ('numpy', config.CONDA_NPY), ('perl', config.CONDA_PERL), + ('lua', config.CONDA_LUA), + # r is kept for legacy installations, r-base deprecates it. ('r', config.CONDA_R), + ('r-base', config.CONDA_R), ] for spec in self.get_value('requirements/' + typ, []): try: @@ -425,7 +472,7 @@ def ms_depends(self, typ='run'): if ms.name == name: if self.get_value('build/noarch_python'): continue - ms = handle_config_version(ms, ver) + ms = handle_config_version(ms, ver, typ) for c in '=!@#$%^&*:;"\'\\|<>?/': if c in ms.name: @@ -451,7 +498,8 @@ def build_id(self): res = [] version_pat = re.compile(r'(?:==)?(\d+)\.(\d+)') for name, s in (('numpy', 'np'), ('python', 'py'), - ('perl', 'pl'), ('r', 'r')): + ('perl', 'pl'), ('lua', 'lua'), + ('r', 'r'), ('r-base', 'r')): for ms in self.ms_depends(): if ms.name == name: try: @@ -462,7 +510,7 @@ def build_id(self): break if any(i in v for i in ',|>!<'): break - if name not in ['perl', 'r']: + if name not in ['perl', 'lua', 'r', 'r-base']: match = version_pat.match(v) if match: res.append(s + match.group(1) + match.group(2)) @@ -491,7 +539,7 @@ def app_meta(self): d = {'type': 'app'} if self.get_value('app/icon'): d['icon'] = '%s.png' % md5_file(join( - self.path, self.get_value('app/icon'))) + self.path, self.get_value('app/icon'))) for field, key in [('app/entry', 'app_entry'), ('app/type', 'app_type'), @@ -505,14 +553,14 @@ def app_meta(self): def info_index(self): d = dict( - name = self.name(), - version = self.version(), - build = self.build_id(), - build_number = self.build_number(), - platform = cc.platform, - arch = cc.arch_name, - subdir = cc.subdir, - depends = sorted(' '.join(ms.spec.split()) + name=self.name(), + version=self.version(), + build=self.build_id(), + build_number=self.build_number(), + platform=cc.platform, + arch=cc.arch_name, + subdir=cc.subdir, + depends=sorted(' '.join(ms.spec.split()) for ms in self.ms_depends()), ) for key in ('license', 'license_family'): @@ -537,7 +585,18 @@ def has_prefix_files(self): raise RuntimeError('build/has_prefix_files should be a list of paths') if sys.platform == 'win32': if any('\\' in i for i in ret): - raise RuntimeError("build/has_prefix_files paths must use / as the path delimiter on Windows") + raise RuntimeError("build/has_prefix_files paths must use / " + "as the path delimiter on Windows") + return ret + + def ignore_prefix_files(self): + ret = self.get_value('build/ignore_prefix_files', False) + if type(ret) not in (list, bool): + raise RuntimeError('build/ignore_prefix_files should be boolean or a list of paths') + if sys.platform == 'win32': + if type(ret) is list and any('\\' in i for i in ret): + raise RuntimeError("build/ignore_prefix_files paths must use / " + "as the path delimiter on Windows") return ret def always_include_files(self): @@ -549,7 +608,8 @@ def binary_has_prefix_files(self): raise RuntimeError('build/binary_has_prefix_files should be a list of paths') if sys.platform == 'win32': if any('\\' in i for i in ret): - raise RuntimeError("build/binary_has_prefix_files paths must use / as the path delimiter on Windows") + raise RuntimeError("build/binary_has_prefix_files paths must use / " + "as the path delimiter on Windows") return ret def skip(self): @@ -568,14 +628,14 @@ def _get_contents(self, permit_undefined_jinja): import jinja2 except ImportError: print("There was an error importing jinja2.", file=sys.stderr) - print("Please run `conda install jinja2` to enable jinja template support", file=sys.stderr) + print("Please run `conda install jinja2` to enable jinja template support", file=sys.stderr) # noqa with open(self.meta_path) as fd: return fd.read() - from conda_build.jinja_context import context_processor + from conda_build.jinja_context import context_processor, UndefinedNeverFail, FilteredLoader path, filename = os.path.split(self.meta_path) - loaders = [# search relative to '/Lib/site-packages/conda_build/templates' + loaders = [ # search relative to '/Lib/site-packages/conda_build/templates' jinja2.PackageLoader('conda_build'), # search relative to RECIPE_DIR jinja2.FileSystemLoader(path) @@ -585,53 +645,36 @@ def _get_contents(self, permit_undefined_jinja): conda_env_path = os.environ.get('CONDA_DEFAULT_ENV') # path to current conda environment if conda_env_path and os.path.isdir(conda_env_path): conda_env_path = os.path.abspath(conda_env_path) - conda_env_path = conda_env_path.replace('\\', '/') # need unix-style path + conda_env_path = conda_env_path.replace('\\', '/') # need unix-style path env_loader = jinja2.FileSystemLoader(conda_env_path) loaders.append(jinja2.PrefixLoader({'$CONDA_DEFAULT_ENV': env_loader})) undefined_type = jinja2.StrictUndefined if permit_undefined_jinja: - class UndefinedNeverFail(jinja2.Undefined): - """ - A class for Undefined jinja variables. - This is even less strict than the default jinja2.Undefined class, - because it permits things like {{ MY_UNDEFINED_VAR[:2] }} and {{ MY_UNDEFINED_VAR|int }}. - This can mask lots of errors in jinja templates, so it should only be used for a first-pass - parse, when you plan on running a 'strict' second pass later. - """ - __add__ = __radd__ = __mul__ = __rmul__ = __div__ = __rdiv__ = \ - __truediv__ = __rtruediv__ = __floordiv__ = __rfloordiv__ = \ - __mod__ = __rmod__ = __pos__ = __neg__ = __call__ = \ - __getitem__ = __lt__ = __le__ = __gt__ = __ge__ = \ - __complex__ = __pow__ = __rpow__ = \ - lambda *args, **kwargs: UndefinedNeverFail() - - __str__ = __repr__ = \ - lambda *args, **kwargs: u'' - - __int__ = lambda _: 0 - __float__ = lambda _: 0.0 - - def __getattr__(self, k): - try: - return object.__getattr__(self, k) - except AttributeError: - return UndefinedNeverFail() - - def __setattr__(self, k, v): - pass - + # The UndefinedNeverFail class keeps a global list of all undefined names + # Clear any leftover names from the last parse. + UndefinedNeverFail.all_undefined_names = [] undefined_type = UndefinedNeverFail - env = jinja2.Environment(loader=jinja2.ChoiceLoader(loaders), undefined=undefined_type) + loader = FilteredLoader(jinja2.ChoiceLoader(loaders)) + env = jinja2.Environment(loader=loader, undefined=undefined_type) + env.globals.update(ns_cfg()) env.globals.update(context_processor(self, path)) try: template = env.get_or_select_template(filename) - return template.render(environment=env) + rendered = template.render(environment=env) + + if permit_undefined_jinja: + self.undefined_jinja_vars = UndefinedNeverFail.all_undefined_names + else: + self.undefined_jinja_vars = [] + + return rendered except jinja2.TemplateError as ex: - sys.exit("Error: Failed to render jinja template in {}:\n{}".format(self.meta_path, ex.message)) + sys.exit("Error: Failed to render jinja template in {}:\n{}" + .format(self.meta_path, ex.message)) def __unicode__(self): ''' @@ -650,11 +693,3 @@ def __repr__(self): String representation of the MetaData. ''' return self.__str__() - - -if __name__ == '__main__': - from pprint import pprint - from os.path import expanduser - - m = MetaData(expanduser('~/conda-recipes/pycosat')) - pprint(m.info_index()) diff --git a/conda_build/noarch_python.py b/conda_build/noarch_python.py index bf3d5a1907..ecad18d85f 100644 --- a/conda_build/noarch_python.py +++ b/conda_build/noarch_python.py @@ -4,11 +4,13 @@ import json import shutil import locale -from os.path import basename, dirname, isdir, join +from os.path import basename, dirname, isdir, join, isfile from conda_build.config import config from conda_build.post import SHEBANG_PAT +ISWIN = sys.platform.startswith('win') + def _force_dir(dirname): if not isdir(dirname): @@ -24,7 +26,7 @@ def rewrite_script(fn): directory after it passes some sanity checks for noarch pacakges""" # Load and check the source file for not being a binary - src = join(config.build_prefix, 'bin', fn) + src = join(config.build_prefix, 'Scripts' if ISWIN else 'bin', fn) with io.open(src, encoding=locale.getpreferredencoding()) as fi: try: data = fi.read() @@ -32,17 +34,27 @@ def rewrite_script(fn): _error_exit("Noarch package contains binary script: %s" % fn) os.unlink(src) - # Check that it does have a #! python string - m = SHEBANG_PAT.match(data) - if not (m and 'python' in m.group()): + # Get rid of '-script.py' suffix on Windows + if ISWIN and fn.endswith('-script.py'): + fn = fn[:-10] + + # Check that it does have a #! python string, and skip it + encoding = sys.stdout.encoding or 'utf8' + + m = SHEBANG_PAT.match(data.encode(encoding)) + if m and b'python' in m.group(): + new_data = data[data.find('\n') + 1:] + elif ISWIN: + new_data = data + else: _error_exit("No python shebang in: %s" % fn) - # Rewrite the file to the python-scripts directory after skipping the #! line - new_data = data[data.find('\n') + 1:] + # Rewrite the file to the python-scripts directory dst_dir = join(config.build_prefix, 'python-scripts') _force_dir(dst_dir) with open(join(dst_dir, fn), 'w') as fo: fo.write(new_data) + return fn def handle_file(f, d): @@ -55,8 +67,12 @@ def handle_file(f, d): os.unlink(path) # The presence of .so indicated this is not a noarch package - elif f.endswith('.so'): - _error_exit("Error: Shared object file found: %s" % f) + elif f.endswith(('.so', '.dll', '.pyd', '.exe', '.dylib')): + if f.endswith('.exe') and (isfile(f[:-4] + '-script.py') or + basename(f[:-4]) in d['python-scripts']): + os.unlink(path) # this is an entry point with a matching xx-script.py + return + _error_exit("Error: Binary library or executable found: %s" % f) elif 'site-packages' in f: nsp = join(config.build_prefix, 'site-packages') @@ -70,44 +86,44 @@ def handle_file(f, d): d['site-packages'].append(g[14:]) # Treat scripts specially with the logic from above - elif f.startswith('bin/'): + elif f.startswith(('bin/', 'Scripts')): fn = basename(path) - rewrite_script(fn) + fn = rewrite_script(fn) d['python-scripts'].append(fn) # Include examples in the metadata doc - elif f.startswith('Examples/'): + elif f.startswith(('Examples/', 'Examples\\')): d['Examples'].append(f[9:]) - else: _error_exit("Error: Don't know how to handle file: %s" % f) def transform(m, files): assert 'py_' in m.dist() - if sys.platform == 'win32': - _error_exit("Error: Python noarch packages can currently " - "not be created on Windows systems.") prefix = config.build_prefix name = m.name() + bin_dir = join(prefix, 'bin') + _force_dir(bin_dir) + # Create *nix prelink script - with open(join(prefix, 'bin/.%s-pre-link.sh' % name), 'w') as fo: + # Note: it's important to use LF newlines or it wont work if we build on Win + with open(join(bin_dir, '.%s-pre-link.sh' % name), 'wb') as fo: fo.write('''\ #!/bin/bash $PREFIX/bin/python $SOURCE_DIR/link.py -''') +'''.encode('utf-8')) scripts_dir = join(prefix, 'Scripts') _force_dir(scripts_dir) - # Create windows prelink script - with open(join(scripts_dir, '.%s-pre-link.bat' % name), 'w') as fo: + # Create windows prelink script (be nice and use Windows newlines) + with open(join(scripts_dir, '.%s-pre-link.bat' % name), 'wb') as fo: fo.write('''\ @echo off "%PREFIX%\\python.exe" "%SOURCE_DIR%\\link.py" -''') +'''.replace('\n', '\r\n').encode('utf-8')) d = {'dist': m.dist(), 'site-packages': [], @@ -118,6 +134,12 @@ def transform(m, files): for f in files: handle_file(f, d) + # Windows path conversion + if ISWIN: + for fns in (d['site-packages'], d['Examples']): + for i in range(len(fns)): + fns[i] = fns[i].replace('\\', '/') + # Find our way to this directory this_dir = dirname(__file__) diff --git a/conda_build/post.py b/conda_build/post.py index 9566dcfd5b..2e579e623c 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -13,8 +13,9 @@ except ImportError: readlink = False import io -from subprocess import call, Popen, PIPE +from subprocess import call from collections import defaultdict +import mmap from conda_build.config import config from conda_build import external @@ -30,7 +31,7 @@ elif sys.platform == 'darwin': from conda_build import macho -SHEBANG_PAT = re.compile(r'^#!.+$', re.M) +SHEBANG_PAT = re.compile(br'^#!.+$', re.M) def is_obj(path): @@ -45,24 +46,37 @@ def fix_shebang(f, osx_is_app=False): return elif os.path.islink(path): return - with io.open(path, encoding=locale.getpreferredencoding()) as fi: + + if os.stat(path).st_size == 0: + return + + with io.open(path, encoding=locale.getpreferredencoding(), mode='r+') as fi: try: - data = fi.read() - except UnicodeDecodeError: # file is binary + data = fi.read(100) + except UnicodeDecodeError: # file is binary return - m = SHEBANG_PAT.match(data) - if not (m and 'python' in m.group()): - return + + # regexp on the memory mapped file so we only read it into + # memory if the regexp matches. + mm = mmap.mmap(fi.fileno(), 0) + m = SHEBANG_PAT.match(mm) + + if not (m and b'python' in m.group()): + return + + data = mm[:] + + encoding = sys.stdout.encoding or 'utf8' py_exec = ('/bin/bash ' + config.build_prefix + '/bin/python.app' if sys.platform == 'darwin' and osx_is_app else config.build_prefix + '/bin/' + basename(config.build_python)) - new_data = SHEBANG_PAT.sub('#!' + py_exec, data, count=1) + new_data = SHEBANG_PAT.sub(b'#!' + py_exec.encode(encoding), data, count=1) if new_data == data: return print("updating shebang:", f) with io.open(path, 'w', encoding=locale.getpreferredencoding()) as fo: - fo.write(new_data) + fo.write(new_data.decode(encoding)) os.chmod(path, int('755', 8)) @@ -83,7 +97,7 @@ def remove_easy_install_pth(files, preserve_egg_dir=False): for egg_path in glob(join(sp_dir, '*-py*.egg')): if isdir(egg_path): if preserve_egg_dir or not any(join(egg_path, i) in absfiles for i - in walk_prefix(egg_path, False, windows_forward_slashes=False)): + in walk_prefix(egg_path, False, windows_forward_slashes=False)): write_pth(egg_path) continue @@ -108,7 +122,7 @@ def remove_easy_install_pth(files, preserve_egg_dir=False): os.rename(join(egg_path, fn), join(sp_dir, fn)) elif isfile(egg_path): - if not egg_path in absfiles: + if egg_path not in absfiles: continue print('found egg:', egg_path) write_pth(egg_path) @@ -159,7 +173,7 @@ def find_lib(link, path=None): if link not in files: sys.exit("Error: Could not find %s" % link) return link - if link.startswith('/'): # but doesn't start with the build prefix + if link.startswith('/'): # but doesn't start with the build prefix return if link.startswith('@rpath/'): # Assume the rpath already points to lib, so there is no need to @@ -190,7 +204,9 @@ def find_lib(link, path=None): return file_names[link][0] print("Don't know how to find %s, skipping" % link) -def osx_ch_link(path, link): + +def osx_ch_link(path, link_dict): + link = link_dict['name'] print("Fixing linking of %s in %s" % (link, path)) link_loc = find_lib(link, path) if not link_loc: @@ -216,7 +232,7 @@ def osx_ch_link(path, link): # @loader_path/path_to_lib/lib_to_link/basename(link), like # @loader_path/../../things/libthings.dylib. - ret = '@rpath/%s/%s' % (lib_to_link, basename(link)) + ret = '@rpath/%s/%s' % (lib_to_link, basename(link)) # XXX: IF the above fails for whatever reason, the below can be used # TODO: This might contain redundant ..'s if link and path are both in @@ -224,8 +240,10 @@ def osx_ch_link(path, link): # ret = '@loader_path/%s/%s/%s' % (path_to_lib, lib_to_link, basename(link)) ret = ret.replace('/./', '/') + return ret + def mk_relative_osx(path, build_prefix=None): ''' if build_prefix is None, then this is a standard conda build. The path @@ -245,63 +263,24 @@ def mk_relative_osx(path, build_prefix=None): names = macho.otool(path) if names: - # Strictly speaking, not all object files have install names (e.g., - # bundles and executables do not). In that case, the first name here - # will not be the install name (i.e., the id), but it isn't a problem, - # because in that case it will be a no-op (with the exception of stub - # files, which give an error, which is handled below). - args = [ - 'install_name_tool', - '-id', - join('@rpath', relpath(dirname(path), - join(config.build_prefix, 'lib')), - basename(names[0])), - path, - ] - print(' '.join(args)) - p = Popen(args, stderr=PIPE) - stdout, stderr = p.communicate() - stderr = stderr.decode('utf-8') - if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s" % path) - return - else: - print(stderr, file=sys.stderr) - if p.returncode: - raise RuntimeError("install_name_tool failed with exit status %d" - % p.returncode) - # Add an rpath to every executable to increase the chances of it # being found. - args = [ - 'install_name_tool', - '-add_rpath', - join('@loader_path', - relpath(join(config.build_prefix, 'lib'), - dirname(path)), '').replace('/./', '/'), - path, - ] - print(' '.join(args)) - p = Popen(args, stderr=PIPE) - stdout, stderr = p.communicate() - stderr = stderr.decode('utf-8') - if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s\n" % path) - return - elif "would duplicate path, file already has LC_RPATH for:" in stderr: - print("Skipping -add_rpath, file already has LC_RPATH set") - return - else: - print(stderr, file=sys.stderr) - if p.returncode: - raise RuntimeError("install_name_tool failed with exit status %d" - % p.returncode) + rpath = join('@loader_path', + relpath(join(config.build_prefix, 'lib'), + dirname(path)), '').replace('/./', '/') + macho.add_rpath(path, rpath, verbose=True) + + # 10.7 install_name_tool -delete_rpath causes broken dylibs, I will revisit this ASAP. + # .. and remove config.build_prefix/lib which was added in-place of + # DYLD_FALLBACK_LIBRARY_PATH since El Capitan's SIP. + # macho.delete_rpath(path, config.build_prefix + '/lib', verbose = True) if s: # Skip for stub files, which have to use binary_has_prefix_files to be # made relocatable. assert_relative_osx(path) + def mk_relative_linux(f, rpaths=('lib',)): path = join(config.build_prefix, f) rpath = ':'.join('$ORIGIN/' + utils.relative(f, d) if not @@ -310,10 +289,12 @@ def mk_relative_linux(f, rpaths=('lib',)): print('patchelf: file: %s\n setting rpath to: %s' % (path, rpath)) call([patchelf, '--force-rpath', '--set-rpath', rpath, path]) + def assert_relative_osx(path): - for name in macho.otool(path): + for name in macho.get_dylibs(path): assert not name.startswith(config.build_prefix), path + def mk_relative(m, f): assert sys.platform != 'win32' path = join(config.build_prefix, f) @@ -335,7 +316,7 @@ def fix_permissions(files): for f in files: path = join(config.build_prefix, f) st = os.lstat(path) - lchmod(path, stat.S_IMODE(st.st_mode) | stat.S_IWUSR) # chmod u+w + lchmod(path, stat.S_IMODE(st.st_mode) | stat.S_IWUSR) # chmod u+w def post_build(m, files): @@ -391,20 +372,32 @@ def check_symlinks(files): print("Error: %s" % msg, file=sys.stderr) sys.exit(1) + def get_build_metadata(m): src_dir = source.get_dir() + if "build" not in m.meta: + m.meta["build"] = {} if exists(join(src_dir, '__conda_version__.txt')): + print("Deprecation warning: support for __conda_version__ will be removed in Conda build 2.0." # noqa + "Try Jinja templates instead: " + "http://conda.pydata.org/docs/building/environment-vars.html#git-environment-variables") # noqa with open(join(src_dir, '__conda_version__.txt')) as f: version = f.read().strip() print("Setting version from __conda_version__.txt: %s" % version) m.meta['package']['version'] = version if exists(join(src_dir, '__conda_buildnum__.txt')): + print("Deprecation warning: support for __conda_buildnum__ will be removed in Conda build 2.0." # noqa + "Try Jinja templates instead: " + "http://conda.pydata.org/docs/building/environment-vars.html#git-environment-variables") # noqa with open(join(src_dir, '__conda_buildnum__.txt')) as f: build_number = f.read().strip() print("Setting build number from __conda_buildnum__.txt: %s" % build_number) m.meta['build']['number'] = build_number if exists(join(src_dir, '__conda_buildstr__.txt')): + print("Deprecation warning: support for __conda_buildstr__ will be removed in Conda build 2.0." # noqa + "Try Jinja templates instead: " + "http://conda.pydata.org/docs/building/environment-vars.html#git-environment-variables") # noqa with open(join(src_dir, '__conda_buildstr__.txt')) as f: buildstr = f.read().strip() print("Setting version from __conda_buildstr__.txt: %s" % buildstr) diff --git a/conda_build/pypi.py b/conda_build/pypi.py index 4be696e52f..e93358d6e9 100644 --- a/conda_build/pypi.py +++ b/conda_build/pypi.py @@ -13,28 +13,32 @@ import sys from collections import defaultdict from os import makedirs, listdir, getcwd, chdir -from os.path import join, isdir, exists, isfile +from os.path import join, isdir, exists, isfile, abspath from tempfile import mkdtemp from shutil import copy2 -if sys.version_info < (3,): - from xmlrpclib import ServerProxy, Transport, ProtocolError -else: - from xmlrpc.client import ServerProxy, Transport, ProtocolError +from requests.packages.urllib3.util.url import parse_url +import yaml -from conda.fetch import (download, handle_proxy_407) +from conda.cli.common import spec_from_line +from conda.compat import input, configparser, StringIO, string_types, PY3 from conda.connection import CondaSession -from conda.utils import human_bytes, hashsum_file +from conda.fetch import (download, handle_proxy_407) from conda.install import rm_rf -from conda.compat import input, configparser, StringIO, string_types, PY3 -from conda.config import get_proxy_servers -from conda.cli.common import spec_from_line +from conda.resolve import normalized_version +from conda.utils import human_bytes, hashsum_file + from conda_build.utils import tar_xf, unzip from conda_build.source import SRC_CACHE, apply_patch from conda_build.build import create_env from conda_build.config import config +from conda_build.metadata import MetaData + +if sys.version_info < (3,): + from xmlrpclib import ServerProxy, Transport, ProtocolError +else: + from xmlrpc.client import ServerProxy, Transport, ProtocolError -from requests.packages.urllib3.util.url import parse_url PYPI_META = """\ package: @@ -171,6 +175,8 @@ def run_setup (script_name, script_args=None, stop_after="run"): INDENT = '\n - ' # https://gist.github.com/chrisguitarguy/2354951 + + class RequestsTransport(Transport): """ Drop in Transport for xmlrpclib that uses Requests instead of httplib @@ -194,11 +200,14 @@ def request(self, host, handler, request_body, verbose): url = self._build_url(host, handler) try: - resp = self.session.post(url, data=request_body, headers=headers, proxies=self.session.proxies) + resp = self.session.post(url, + data=request_body, + headers=headers, + proxies=self.session.proxies) resp.raise_for_status() except requests.exceptions.HTTPError as e: - if e.response.status_code == 407: # Proxy Authentication Required + if e.response.status_code == 407: # Proxy Authentication Required handle_proxy_407(url, self.session) # Try again return self.request(host, handler, request_body, verbose) @@ -210,7 +219,7 @@ def request(self, host, handler, request_body, verbose): # error and http gives the above error. Also, there is no status_code # attribute here. We have to just check if it looks like 407. See # https://github.com/kennethreitz/requests/issues/2061. - if "407" in str(e): # Proxy Authentication Required + if "407" in str(e): # Proxy Authentication Required handle_proxy_407(url, self.session) # Try again return self.request(host, handler, request_body, verbose) @@ -240,21 +249,18 @@ def _build_url(self, host, handler): scheme = 'https' if self.use_https else 'http' return '%s://%s/%s' % (scheme, host, handler) + def get_xmlrpc_client(pypi_url): - proxies = get_proxy_servers() + return ServerProxy(pypi_url, transport=RequestsTransport()) - if proxies: - transport = RequestsTransport() - else: - transport = None - return ServerProxy(pypi_url, transport=transport) def main(args, parser): client = get_xmlrpc_client(args.pypi_url) package_dicts = {} [output_dir] = args.output_dir - all_packages = client.list_packages() + # searching is faster than listing all packages + all_packages = [match["name"] for match in client.search({"name": args.packages}, "or")] all_packages_lower = [i.lower() for i in all_packages] args.created_recipes = [] @@ -437,9 +443,6 @@ def version_compare(args, package, versions): # to a method in main() to take care of that. return - from os.path import abspath, isdir - from conda_build.metadata import MetaData - from conda.resolve import normalized_version nv = normalized_version norm_versions = [nv(ver) for ver in versions] @@ -519,10 +522,8 @@ def get_package_metadata(args, package, d, data): if set(entry_points.keys()) - {'console_scripts', 'gui_scripts'}: setuptools_build = True setuptools_run = True - entry_list = ( - cs - # TODO: Use pythonw for these - + gs) + # TODO: Use pythonw for gui scripts + entry_list = (cs + gs) if len(cs + gs) != 0: d['entry_points'] = INDENT.join([''] + entry_list) d['entry_comment'] = '' @@ -541,9 +542,9 @@ def get_package_metadata(args, package, d, data): # Every item may be a single requirement # or a multiline requirements string... for dep in deptext: - #... and may also contain comments... + # ... and may also contain comments... dep = dep.split('#')[0].strip() - if dep: #... and empty (or comment only) lines + if dep: # ... and empty (or comment only) lines spec = spec_from_line(dep) if spec is None: sys.exit("Error: Could not parse: %s" % dep) @@ -653,8 +654,7 @@ def get_package_metadata(args, package, d, data): def valid(name): - if (re.match("[_A-Za-z][_a-zA-Z0-9]*$", name) - and not keyword.iskeyword(name)): + if (re.match("[_A-Za-z][_a-zA-Z0-9]*$", name) and not keyword.iskeyword(name)): return name else: return '' @@ -717,7 +717,6 @@ def get_pkginfo(package, filename, pypiurl, md5, python_version): # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. - import yaml tempdir = mkdtemp('conda_skeleton_' + filename) if not isdir(SRC_CACHE): @@ -768,11 +767,12 @@ def run_setuppy(src_dir, temp_dir, python_version): patch = join(temp_dir, 'pypi-distutils.patch') with open(patch, 'w') as f: - f.write(DISTUTILS_PATCH.format(temp_dir.replace('\\','\\\\'))) + f.write(DISTUTILS_PATCH.format(temp_dir.replace('\\', '\\\\'))) if exists(join(stdlib_dir, 'distutils', 'core.py-copy')): rm_rf(join(stdlib_dir, 'distutils', 'core.py')) - copy2(join(stdlib_dir, 'distutils', 'core.py-copy'), join(stdlib_dir, 'distutils', 'core.py')) + copy2(join(stdlib_dir, 'distutils', 'core.py-copy'), + join(stdlib_dir, 'distutils', 'core.py')) # Avoid race conditions. Invalidate the cache. if PY3: rm_rf(join(stdlib_dir, 'distutils', '__pycache__', @@ -804,6 +804,7 @@ def run_setuppy(src_dir, temp_dir, python_version): finally: chdir(cwd) + def make_entry_tests(entry_list): tests = [] for entry_point in entry_list: diff --git a/conda_build/render.py b/conda_build/render.py new file mode 100644 index 0000000000..8e767e06df --- /dev/null +++ b/conda_build/render.py @@ -0,0 +1,190 @@ +# (c) Continuum Analytics, Inc. / http://continuum.io +# All Rights Reserved +# +# conda is distributed under the terms of the BSD 3-clause license. +# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause. + +from __future__ import absolute_import, division, print_function + +from locale import getpreferredencoding +import shutil +import sys +import tarfile +import tempfile +import os +from os.path import isdir, isfile, abspath +import re +import subprocess + +import yaml + +from conda.compat import PY3 +from conda.lock import Locked + +from conda_build import exceptions +from conda_build.config import config +from conda_build.metadata import MetaData +import conda_build.source as source +from conda_build.completers import all_versions, conda_version + + +def set_language_env_vars(args, parser, execute=None): + """Given args passed into conda command, set language env vars""" + for lang in all_versions: + versions = getattr(args, lang) + if not versions: + continue + if versions == ['all']: + if all_versions[lang]: + versions = all_versions[lang] + else: + parser.error("'all' is not supported for --%s" % lang) + if len(versions) > 1: + for ver in versions[:]: + setattr(args, lang, [str(ver)]) + if execute: + execute(args, parser) + # This is necessary to make all combinations build. + setattr(args, lang, versions) + return + else: + version = versions[0] + if lang in ('python', 'numpy'): + version = int(version.replace('.', '')) + setattr(config, conda_version[lang], version) + if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']: + if all_versions[lang]: + raise RuntimeError("%s must be major.minor, like %s, not %s" % + (conda_version[lang], all_versions[lang][-1] / 10, version)) + else: + raise RuntimeError("%s must be major.minor, not %s" % + (conda_version[lang], version)) + + # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc. + # Auto-set those env variables + for var in conda_version.values(): + if hasattr(config, var) and getattr(config, var): + # Set the env variable. + os.environ[var] = str(getattr(config, var)) + + +def bldpkg_path(m): + ''' + Returns path to built package's tarball given its ``Metadata``. + ''' + return os.path.join(config.bldpkgs_dir, '%s.tar.bz2' % m.dist()) + + +def has_vcs_metadata(metadata): + """returns true if recie contains metadata associated with version control systems. + If this metadata is present, a download/copy will be forced in parse_or_try_download. + """ + with open(metadata.meta_path) as f: + matches = re.findall(r"GIT_[^\.\s\'\"]+", f.read()) + # TODO: extend with other VCS systems (SVN, hg, anything else?) + return len(matches) > 0 + + +def parse_or_try_download(metadata, no_download_source, verbose, + force_download=False, dirty=False): + + if (force_download or (not no_download_source and has_vcs_metadata(metadata))): + # this try/catch is for when the tool to download source is actually in + # meta.yaml, and not previously installed in builder env. + try: + if not dirty: + source.provide(metadata.path, metadata.get_section('source'), + verbose=verbose) + metadata.parse_again(permit_undefined_jinja=False) + need_source_download = False + except subprocess.CalledProcessError as error: + print("Warning: failed to download source. If building, will try " + "again after downloading recipe dependencies.") + print("Error was: ") + print(error) + need_source_download = True + else: + # we have not downloaded source in the render phase. Download it in + # the build phase + need_source_download = True + metadata.parse_again(permit_undefined_jinja=False) + return metadata, need_source_download + + +def render_recipe(recipe_path, no_download_source, verbose, dirty=False): + with Locked(config.croot): + arg = recipe_path + # Don't use byte literals for paths in Python 2 + if not PY3: + arg = arg.decode(getpreferredencoding() or 'utf-8') + if isfile(arg): + if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): + recipe_dir = tempfile.mkdtemp() + t = tarfile.open(arg, 'r:*') + t.extractall(path=recipe_dir) + t.close() + need_cleanup = True + else: + print("Ignoring non-recipe: %s" % arg) + return + else: + recipe_dir = abspath(arg) + need_cleanup = False + + if not isdir(recipe_dir): + sys.exit("Error: no such directory: %s" % recipe_dir) + + try: + m = MetaData(recipe_dir) + except exceptions.YamlParsingError as e: + sys.stderr.write(e.error_msg()) + sys.exit(1) + + m = parse_or_try_download(m, no_download_source=no_download_source, + verbose=verbose, dirty=dirty) + + if need_cleanup: + shutil.rmtree(recipe_dir) + + return m + + +# Next bit of stuff is to support YAML output in the order we expect. +# http://stackoverflow.com/a/17310199/1170370 +class _MetaYaml(dict): + fields = ["package", "source", "build", "requirements", "test", "about", "extra"] + + def to_omap(self): + return [(field, self[field]) for field in _MetaYaml.fields if field in self] + + +def _represent_omap(dumper, data): + return dumper.represent_mapping(u'tag:yaml.org,2002:map', data.to_omap()) + + +def _unicode_representer(dumper, uni): + node = yaml.ScalarNode(tag=u'tag:yaml.org,2002:str', value=uni) + return node + + +class _IndentDumper(yaml.Dumper): + def increase_indent(self, flow=False, indentless=False): + return super(_IndentDumper, self).increase_indent(flow, False) + +yaml.add_representer(_MetaYaml, _represent_omap) +if PY3: + yaml.add_representer(str, _unicode_representer) + unicode = None # silence pyflakes about unicode not existing in py3 +else: + yaml.add_representer(unicode, _unicode_representer) + + +def output_yaml(metadata, filename=None): + output = yaml.dump(_MetaYaml(metadata.meta), Dumper=_IndentDumper, + default_flow_style=False, indent=4) + if filename: + with open(filename, "w") as f: + f.write(output) + return("Wrote yaml to %s" % filename) + else: + return(output) diff --git a/conda_build/scripts.py b/conda_build/scripts.py index 3ffd42274e..78fd52ff5f 100644 --- a/conda_build/scripts.py +++ b/conda_build/scripts.py @@ -10,6 +10,7 @@ import shutil from os.path import dirname, isdir, join +import conda.install import conda.config as cc from conda_build.config import config @@ -40,6 +41,10 @@ def create_entry_point(path, module, func): pyscript = PY_TMPL % {'module': module, 'func': func} if sys.platform == 'win32': with open(path + '-script.py', 'w') as fo: + packages = conda.install.linked(config.build_prefix) + packages_names = (pkg.split('-')[0] for pkg in packages) + if 'debug' in packages_names: + fo.write('#!python_d\n') fo.write(pyscript) shutil.copyfile(join(dirname(__file__), 'cli-%d.exe' % cc.bits), path + '.exe') @@ -61,9 +66,14 @@ def create_entry_points(items): def prepend_bin_path(env, prefix, prepend_prefix=False): + # bin_dirname takes care of bin on *nix, Scripts on win env['PATH'] = join(prefix, bin_dirname) + os.pathsep + env['PATH'] if sys.platform == "win32": - env['PATH'] = join(prefix, "Library", "bin") + os.pathsep + env['PATH'] + env['PATH'] = join(prefix, "Library", "mingw-w64", "bin") + os.pathsep + \ + join(prefix, "Library", "usr", "bin") + os.pathsep + os.pathsep + \ + join(prefix, "Library", "bin") + os.pathsep + \ + env['PATH'] + prepend_prefix = True # windows has Python in the prefix. Use it. if prepend_prefix: env['PATH'] = prefix + os.pathsep + env['PATH'] return env diff --git a/conda_build/source.py b/conda_build/source.py index e56055ed83..d1050837d0 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -1,13 +1,16 @@ from __future__ import absolute_import, division, print_function import os +import re import sys -from os.path import join, isdir, isfile, abspath, expanduser +from os.path import join, isdir, isfile, abspath, expanduser, basename from shutil import copytree, copy2 -from subprocess import check_call, Popen, PIPE, CalledProcessError +from subprocess import check_call, Popen, PIPE, check_output import locale +import time from conda.fetch import download +from conda.install import move_to_trash from conda.utils import hashsum_file from conda_build import external @@ -39,9 +42,8 @@ def download_to_cache(meta): if not isdir(SRC_CACHE): os.makedirs(SRC_CACHE) - fn = meta['fn'] + fn = meta['fn'] if 'fn' in meta else basename(meta['url']) path = join(SRC_CACHE, fn) - if isfile(path): print('Found source in cache: %s' % fn) else: @@ -58,7 +60,7 @@ def download_to_cache(meta): else: print("Success") break - else: # no break + else: # no break raise RuntimeError("Could not download %s" % fn) for tp in 'md5', 'sha1', 'sha256': @@ -69,14 +71,16 @@ def download_to_cache(meta): return path -def unpack(meta): +def unpack(meta, verbose=False): ''' Uncompress a downloaded source. ''' src_path = download_to_cache(meta) - os.makedirs(WORK_DIR) - print("Extracting download") + if not isdir(WORK_DIR): + os.makedirs(WORK_DIR) + if verbose: + print("Extracting download") if src_path.lower().endswith(('.tar.gz', '.tar.bz2', '.tgz', '.tar.xz', - '.tar', 'tar.z')): + '.tar', 'tar.z')): tar_xf(src_path, WORK_DIR) elif src_path.lower().endswith('.zip'): unzip(src_path, WORK_DIR) @@ -86,8 +90,14 @@ def unpack(meta): copy2(src_path, WORK_DIR) -def git_source(meta, recipe_dir): +def git_source(meta, recipe_dir, verbose=False): ''' Download a source from Git repo. ''' + if verbose: + stdout = None + else: + FNULL = open(os.devnull, 'w') + stdout = FNULL + if not isdir(GIT_CACHE): os.makedirs(GIT_CACHE) @@ -112,13 +122,25 @@ def git_source(meta, recipe_dir): # update (or create) the cache repo if isdir(cache_repo): - check_call([git, 'fetch'], cwd=cache_repo) + if meta.get('git_rev', 'HEAD') != 'HEAD': + check_call([git, 'fetch'], cwd=cache_repo, stdout=stdout) + else: + # Unlike 'git clone', fetch doesn't automatically update the cache's HEAD, + # So here we explicitly store the remote HEAD in the cache's local refs/heads, + # and then explicitly set the cache's HEAD. + # This is important when the git repo is a local path like "git_url: ../", + # but the user is working with a branch other than 'master' without + # explicitly providing git_rev. + check_call([git, 'fetch', 'origin', '+HEAD:_conda_cache_origin_head'], + cwd=cache_repo, stdout=stdout) + check_call([git, 'symbolic-ref', 'HEAD', 'refs/heads/_conda_cache_origin_head'], + cwd=cache_repo, stdout=stdout) else: args = [git, 'clone', '--mirror'] if git_depth > 0: - args += ['--depth', git_depth] + args += ['--depth', str(git_depth)] - check_call(args + [git_url, cache_repo_arg], cwd=recipe_dir) + check_call(args + [git_url, cache_repo_arg], stdout=stdout) assert isdir(cache_repo) # now clone into the work directory @@ -127,22 +149,25 @@ def git_source(meta, recipe_dir): # assume the user wants the current HEAD if not checkout and git_url.startswith('.'): process = Popen(["git", "rev-parse", "HEAD"], - stdout=PIPE, stderr=PIPE, - cwd=git_url) + stdout=PIPE, cwd=git_url) output = process.communicate()[0].strip() checkout = output.decode('utf-8') - if checkout: + if checkout and verbose: print('checkout: %r' % checkout) - check_call([git, 'clone', '--recursive', cache_repo_arg, WORK_DIR]) + check_call([git, 'clone', '--recursive', cache_repo_arg, WORK_DIR], stdout=stdout) if checkout: - check_call([git, 'checkout', checkout], cwd=WORK_DIR) + check_call([git, 'checkout', checkout], cwd=WORK_DIR, stdout=stdout) + + git_info(verbose=verbose) + + if not verbose: + FNULL.close() - git_info() return WORK_DIR -def git_info(fo=None): +def git_info(fo=None, verbose=False): ''' Print info about a Git repo. ''' assert isdir(WORK_DIR) @@ -152,9 +177,9 @@ def git_info(fo=None): env['GIT_DIR'] = join(WORK_DIR, '.git') env = {str(key): str(value) for key, value in env.items()} for cmd, check_error in [ - ('git log -n1', True), - ('git describe --tags --dirty', False), - ('git status', True)]: + ('git log -n1', True), + ('git describe --tags --dirty', False), + ('git status', True)]: p = Popen(cmd.split(), stdout=PIPE, stderr=PIPE, cwd=WORK_DIR, env=env) stdout, stderr = p.communicate() encoding = locale.getpreferredencoding() @@ -167,14 +192,24 @@ def git_info(fo=None): raise Exception("git error: %s" % stderr) if fo: fo.write(u'==> %s <==\n' % cmd) - fo.write(stdout + u'\n') + if verbose: + fo.write(stdout + u'\n') else: - print(u'==> %s <==\n' % cmd) - safe_print_unicode(stdout + u'\n') + if verbose: + print(u'==> %s <==\n' % cmd) + safe_print_unicode(stdout + u'\n') -def hg_source(meta): +def hg_source(meta, verbose=False): ''' Download a source from Mercurial repo. ''' + if verbose: + stdout = None + stderr = None + else: + FNULL = open(os.devnull, 'w') + stdout = FNULL + stderr = FNULL + hg = external.find_executable('hg') if not hg: sys.exit('Error: hg not installed') @@ -184,23 +219,35 @@ def hg_source(meta): hg_dn = hg_url.split(':')[-1].replace('/', '_') cache_repo = join(HG_CACHE, hg_dn) if isdir(cache_repo): - check_call([hg, 'pull'], cwd=cache_repo) + check_call([hg, 'pull'], cwd=cache_repo, stdout=stdout, stderr=stderr) else: - check_call([hg, 'clone', hg_url, cache_repo]) + check_call([hg, 'clone', hg_url, cache_repo], stdout=stdout, stderr=stderr) assert isdir(cache_repo) # now clone in to work directory update = meta.get('hg_tag') or 'tip' - print('checkout: %r' % update) + if verbose: + print('checkout: %r' % update) - check_call([hg, 'clone', cache_repo, WORK_DIR]) - check_call([hg, 'update', '-C', update], cwd=WORK_DIR) - return WORK_DIR + check_call([hg, 'clone', cache_repo, WORK_DIR], stdout=stdout, stderr=stderr) + check_call([hg, 'update', '-C', update], cwd=WORK_DIR, stdout=stdout, stderr=stderr) + if not verbose: + FNULL.close() + + return WORK_DIR -def svn_source(meta): +def svn_source(meta, verbose=False): ''' Download a source from SVN repo. ''' + if verbose: + stdout = None + stderr = None + else: + FNULL = open(os.devnull, 'w') + stdout = FNULL + stderr = FNULL + def parse_bool(s): return str(s).lower().strip() in ('yes', 'true', '1', 'on') @@ -219,17 +266,44 @@ def parse_bool(s): else: extra_args = [] if isdir(cache_repo): - check_call([svn, 'up', '-r', svn_revision] + extra_args, cwd=cache_repo) + check_call([svn, 'up', '-r', svn_revision] + extra_args, cwd=cache_repo, + stdout=stdout, stderr=stderr) else: - check_call([svn, 'co', '-r', svn_revision] + extra_args + [svn_url, - cache_repo]) + check_call([svn, 'co', '-r', svn_revision] + extra_args + [svn_url, cache_repo], + stdout=stdout, stderr=stderr) assert isdir(cache_repo) # now copy into work directory copytree(cache_repo, WORK_DIR, symlinks=True) + + if not verbose: + FNULL.close() + return WORK_DIR +def get_repository_info(recipe_path): + """This tries to get information about where a recipe came from. This is different + from the source - you can have a recipe in svn that gets source via git.""" + if isdir(join(recipe_path, ".git")): + origin = check_output(["git", "config", "--get", "remote.origin.url"], cwd=recipe_path) + rev = check_output(["git", "rev-parse", "HEAD"], cwd=recipe_path) + return "Origin {}, commit {}".format(origin, rev) + elif isdir(join(recipe_path, ".hg")): + origin = check_output(["hg", "paths", "default"], cwd=recipe_path) + rev = check_output(["hg", "id"], cwd=recipe_path).split()[0] + return "Origin {}, commit {}".format(origin, rev) + elif isdir(join(recipe_path, ".svn")): + info = check_output(["svn", "info"], cwd=recipe_path) + server = re.search("Repository Root: (.*)$", info, flags=re.M).group(1) + revision = re.search("Revision: (.*)$", info, flags=re.M).group(1) + return "{}, Revision {}".format(server, revision) + else: + return "{}, last modified {}".format(recipe_path, + time.ctime(os.path.getmtime( + join(recipe_path, "meta.yaml")))) + + def _ensure_unix_line_endings(path): """Replace windows line endings with Unix. Return path to modified file.""" out_path = path + "_unix" @@ -239,6 +313,63 @@ def _ensure_unix_line_endings(path): outputfile.write(line.replace("\r\n", "\n")) return out_path + +def _commonpath(paths): + """Python 2 doesn't have os.path.commonpath(), so roll our own""" + folders = [path.split(b'/') for path in paths] + minfolders = min(folders) + maxfolders = max(folders) + common = [] + for minf, maxf in zip(minfolders, maxfolders[:len(minfolders)]): + if minf != maxf: + break + common.append(minf) + if len(common): + return b'/'.join(common) + b'/' + return b'' + + +def _guess_patch_strip_level(filesstr, src_dir): + """ Determine the patch strip level automatically. """ + maxlevel = None + files = {filestr.encode(errors='ignore') for filestr in filesstr} + src_dir = src_dir.encode(errors='ignore') + for file in files: + numslash = file.count(b'/') + maxlevel = numslash if not maxlevel else min(maxlevel, numslash) + if maxlevel == 0: + patchlevel = 0 + else: + histo = dict() + histo = {i: 0 for i in range(maxlevel + 1)} + if len(files) == 1: + (common,) = files + else: + common = _commonpath(files) + maxlevel = common.count(b'/') + for file in files: + parts = file.split(b'/') + for level in range(maxlevel + 1): + if os.path.exists(join(src_dir, *parts[-len(parts) + level:])): + histo[level] += 1 + order = sorted(histo, key=histo.get, reverse=True) + if histo[order[0]] == histo[order[1]]: + print("Patch level ambiguous, selecting least deep") + patchlevel = min([key for key, value + in histo.items() if value == histo[order[0]]]) + return patchlevel + + +def _source_files_from_patch_file(path): + re_source_files = re.compile('^--- ([^\n\t]+)') + files = set() + with open(path) as f: + files = {m.group(1) for l in f.readlines() + for m in [re_source_files.search(l)] + if m and m.group(1) != '/dev/null'} + return files + + def apply_patch(src_dir, path): print('Applying patch: %r' % path) if not isfile(path): @@ -250,42 +381,49 @@ def apply_patch(src_dir, path): Error: Did not find 'patch' in: %s You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX), - or conda, cygwin (Windows), + or conda, m2-patch (Windows), """ % (os.pathsep.join(external.dir_paths))) - patch_args = ['-p0', '-i', path] + files = _source_files_from_patch_file(path) + patch_strip_level = _guess_patch_strip_level(files, src_dir) + patch_args = ['-p%d' % patch_strip_level, '-i', path] if sys.platform == 'win32': - patch_args[-1] = _ensure_unix_line_endings(path) - try: - check_call([patch] + patch_args, cwd=src_dir) - except CalledProcessError: - sys.exit(1) + patch_args[-1] = _ensure_unix_line_endings(path) + check_call([patch] + patch_args, cwd=src_dir) if sys.platform == 'win32' and os.path.exists(patch_args[-1]): os.remove(patch_args[-1]) # clean up .patch_unix file - -def provide(recipe_dir, meta, patch=True): +def provide(recipe_dir, meta, verbose=False, patch=True): """ given a recipe_dir: - download (if necessary) - unpack - apply patches (if any) """ - print("Removing old work directory") - rm_rf(WORK_DIR) - if 'fn' in meta: - unpack(meta) + + if sys.platform == 'win32': + if isdir(WORK_DIR): + move_to_trash(WORK_DIR, '') + else: + rm_rf(WORK_DIR) + + if any(k in meta for k in ('fn', 'url')): + unpack(meta, verbose=verbose) elif 'git_url' in meta: - git_source(meta, recipe_dir) + git_source(meta, recipe_dir, verbose=verbose) + # build to make sure we have a work directory with source in it. We want to make sure that + # whatever version that is does not interfere with the test we run next. elif 'hg_url' in meta: - hg_source(meta) + hg_source(meta, verbose=verbose) elif 'svn_url' in meta: - svn_source(meta) + svn_source(meta, verbose=verbose) elif 'path' in meta: - print("Copying %s to %s" % (abspath(join(recipe_dir, meta.get('path'))), WORK_DIR)) + if verbose: + print("Copying %s to %s" % (abspath(join(recipe_dir, meta.get('path'))), WORK_DIR)) copytree(abspath(join(recipe_dir, meta.get('path'))), WORK_DIR) - else: # no source - os.makedirs(WORK_DIR) + else: # no source + if not isdir(WORK_DIR): + os.makedirs(WORK_DIR) if patch: src_dir = get_dir() diff --git a/conda_build/utils.py b/conda_build/utils.py index 7fed5904f2..0148204f20 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -1,5 +1,6 @@ from __future__ import absolute_import, division, print_function +import fnmatch import os import sys import shutil @@ -19,6 +20,25 @@ from conda.install import rm_rf rm_rf + +def find_recipe(path): + """recurse through a folder, locating meta.yaml. Raises error if more than one is found. + + Returns folder containing meta.yaml, to be built. + + If we have a base level meta.yaml and other supplemental ones, use that first""" + results = rec_glob(path, ["meta.yaml", "conda.yaml"]) + if len(results) > 1: + base_recipe = os.path.join(path, "meta.yaml") + if base_recipe in results: + return os.path.dirname(base_recipe) + else: + raise IOError("More than one meta.yaml files found in %s" % path) + elif not results: + raise IOError("No meta.yaml files found in %s" % path) + return os.path.dirname(results[0]) + + def copy_into(src, dst): "Copy all the files and directories in src to the directory dst" @@ -104,6 +124,7 @@ def file_info(path): # Taken from toolz + def groupby(key, seq): """ Group a collection by a key function >>> names = ['Alice', 'Bob', 'Charlie', 'Dan', 'Edith', 'Frank'] @@ -132,6 +153,7 @@ def groupby(key, seq): rv[k] = v.__self__ return rv + def getter(index): if isinstance(index, list): if len(index) == 1: @@ -144,6 +166,7 @@ def getter(index): else: return operator.itemgetter(index) + def comma_join(items): """ Like ', '.join(items) but with and @@ -180,3 +203,14 @@ def safe_print_unicode(*args, **kwargs): line = sep.join(args) + end encoding = sys.stdout.encoding or 'utf8' func(line.encode(encoding, errors)) + + +def rec_glob(path, patterns): + result = [] + for d_f in os.walk(path): + m = [] + for pattern in patterns: + m.extend(fnmatch.filter(d_f[2], pattern)) + if m: + result.extend([os.path.join(d_f[0], f) for f in m]) + return result diff --git a/conda_build/windows.py b/conda_build/windows.py index 8ef7ff4b2c..df334d636c 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -3,7 +3,7 @@ import os import sys import shutil -from os.path import dirname, isdir, isfile, join, exists +from os.path import dirname, isdir, isfile, join import conda.config as cc @@ -14,6 +14,51 @@ assert sys.platform == 'win32' +# Set up a load of paths that can be imported from the tests +if 'ProgramFiles(x86)' in os.environ: + PROGRAM_FILES_PATH = os.environ['ProgramFiles(x86)'] +else: + PROGRAM_FILES_PATH = os.environ['ProgramFiles'] + +# Note that we explicitly want "Program Files" and not "Program Files (x86)" +WIN_SDK_BAT_PATH = os.path.join(PROGRAM_FILES_PATH.replace(" (x86)", ""), + 'Microsoft SDKs', 'Windows', 'v7.1', 'Bin', 'SetEnv.cmd') +VS_TOOLS_PY_LOCAL_PATH = os.path.join( + os.getenv('localappdata', os.path.abspath(os.sep)), + 'Programs', 'Common', 'Microsoft', 'Visual C++ for Python', '9.0', + 'vcvarsall.bat') +VS_TOOLS_PY_COMMON_PATH = os.path.join(PROGRAM_FILES_PATH, 'Common Files', + 'Microsoft', 'Visual C++ for Python', + '9.0', 'vcvarsall.bat') +VCVARS64_VS9_BAT_PATH = os.path.join(PROGRAM_FILES_PATH, + 'Microsoft Visual Studio 9.0', 'VC', 'bin', + 'vcvars64.bat') +VS_VERSION_STRING = { + '8.0': 'Visual Studio 8 2005', + '9.0': 'Visual Studio 9 2008', + '10.0': 'Visual Studio 10 2010', + '11.0': 'Visual Studio 11 2012', + '12.0': 'Visual Studio 12 2013', + '14.0': 'Visual Studio 14 2015' +} + + +def build_vcvarsall_vs_path(version): + """ + Given the Visual Studio version, returns the default path to the + Microsoft Visual Studio vcvarsall.bat file. + + Expected versions are of the form {9, 10, 12, 14} + """ + vstools = "VS{0}0COMNTOOLS".format(version) + if vstools in os.environ: + return os.path.join(os.environ[vstools], '..\\..\\VC\\vcvarsall.bat') + else: + # prefer looking at env var; fall back to program files defaults + return os.path.join(PROGRAM_FILES_PATH, + 'Microsoft Visual Studio {}'.format(version), 'VC', + 'vcvarsall.bat') + def fix_staged_scripts(): """ @@ -46,44 +91,75 @@ def fix_staged_scripts(): os.remove(join(scripts_dir, fn)) -def msvc_env_cmd(override=None): - if 'ProgramFiles(x86)' in os.environ: - program_files = os.environ['ProgramFiles(x86)'] - else: - program_files = os.environ['ProgramFiles'] +def msvc_env_cmd(bits, override=None): + arch_selector = 'x86' if bits == 32 else 'amd64' msvc_env_lines = [] - if config.PY3K and config.use_MSVC2015: - version = '14.0' - elif config.PY3K: - version = '10.0' - else: - version = '9.0' - + version = None if override is not None: version = override msvc_env_lines.append('set DISTUTILS_USE_SDK=1') msvc_env_lines.append('set MSSdk=1') - vcvarsall = os.path.join(program_files, - r'Microsoft Visual Studio {version}'.format(version=version), - 'VC', 'vcvarsall.bat') - - # Try the Microsoft Visual C++ Compiler for Python 2.7 - localappdata = os.environ.get("localappdata") - not_vcvars = not isfile(vcvarsall) - if not_vcvars and localappdata and not config.PY3K: - vcvarsall = os.path.join(localappdata, "Programs", "Common", - "Microsoft", "Visual C++ for Python", "9.0", "vcvarsall.bat") - if not_vcvars and program_files and not config.PY3K: - vcvarsall = os.path.join(program_files, 'Common Files', - 'Microsoft', 'Visual C++ for Python', "9.0", "vcvarsall.bat") - if not_vcvars: - print("Warning: Couldn't find Visual Studio: %r" % vcvarsall) - return '' - - msvc_env_lines.append('call "%s" %s' % (vcvarsall, 'x86' if cc.bits == 32 else 'amd64')) + if not version: + if config.PY3K and config.use_MSVC2015: + version = '14.0' + elif config.PY3K: + version = '10.0' + else: + version = '9.0' + + vcvarsall_vs_path = build_vcvarsall_vs_path(version) + + def build_vcvarsall_cmd(cmd, arch=arch_selector): + return 'call "{cmd}" {arch}'.format(cmd=cmd, arch=arch) + + msvc_env_lines.append('set "VS_VERSION={}"'.format(version)) + msvc_env_lines.append('set "VS_MAJOR={}"'.format(version.split('.')[0])) + msvc_env_lines.append('set "VS_YEAR={}"'.format(VS_VERSION_STRING[version][-4:])) + msvc_env_lines.append('set "CMAKE_GENERATOR={}"'.format(VS_VERSION_STRING[version] + + {64: ' Win64', 32: ''}[bits])) + # tell msys2 to ignore path conversions for issue-causing windows-style flags in build + # See https://github.com/conda-forge/icu-feedstock/pull/5 + msvc_env_lines.append('set "MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out;%MSYS2_ARG_CONV_EXCL%"') + msvc_env_lines.append('set "MSYS2_ENV_CONV_EXCL=CL"') + if version == '10.0': + win_sdk_arch = '/Release /x86' if bits == 32 else '/Release /x64' + win_sdk_cmd = build_vcvarsall_cmd(WIN_SDK_BAT_PATH, arch=win_sdk_arch) + + # Always call the Windows SDK first - if VS 2010 exists but was + # installed using the broken installer then it will try and call the + # vcvars script, which will fail but NOT EXIT 1. To work around this, + # we always call the Windows SDK, and then try calling VS 2010 which + # will overwrite any environemnt variables it needs, if necessary. + msvc_env_lines.append(win_sdk_cmd) + msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path)) + + elif version == '9.0': + error1 = 'if errorlevel 1 {}' + + # First, check for Microsoft Visual C++ Compiler for Python 2.7 + msvc_env_lines.append(build_vcvarsall_cmd(VS_TOOLS_PY_LOCAL_PATH)) + msvc_env_lines.append(error1.format( + build_vcvarsall_cmd(VS_TOOLS_PY_COMMON_PATH))) + # The Visual Studio 2008 Express edition does not properly contain + # the amd64 build files, so we call the vcvars64.bat manually, + # rather than using the vcvarsall.bat which would try and call the + # missing bat file. + if arch_selector == 'amd64': + msvc_env_lines.append(error1.format( + build_vcvarsall_cmd(VCVARS64_VS9_BAT_PATH))) + msvc_env_lines.append(error1.format( + build_vcvarsall_cmd(vcvarsall_vs_path))) + else: + msvc_env_lines.append(error1.format( + build_vcvarsall_cmd(vcvarsall_vs_path))) + + else: + # Visual Studio 14 or otherwise + msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path)) + return '\n'.join(msvc_env_lines) @@ -110,9 +186,9 @@ def kill_processes(process_names=["msbuild.exe"]): continue -def build(m): +def build(m, bld_bat, dirty=False): env = dict(os.environ) - env.update(environ.get_dict(m)) + env.update(environ.get_dict(m, dirty=dirty)) env = environ.prepend_bin_path(env, config.build_prefix, True) for name in 'BIN', 'INC', 'LIB': @@ -121,21 +197,22 @@ def build(m): os.makedirs(path) src_dir = source.get_dir() - bld_bat = join(m.path, 'bld.bat') - if exists(bld_bat): + if os.path.isfile(bld_bat): with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: - fo.write(msvc_env_cmd(override=m.get_value('build/msvc_compiler', None))) - fo.write('\n') # more debuggable with echo on fo.write('@echo on\n') + for key, value in env.items(): + fo.write('set "{key}={value}"\n'.format(key=key, value=value)) fo.write("set INCLUDE={};%INCLUDE%\n".format(env["LIBRARY_INC"])) fo.write("set LIB={};%LIB%\n".format(env["LIBRARY_LIB"])) + fo.write(msvc_env_cmd(bits=cc.bits, override=m.get_value('build/msvc_compiler', None))) + fo.write('\n') fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = [os.environ['COMSPEC'], '/c', 'call', 'bld.bat'] - _check_call(cmd, cwd=src_dir, env={str(k): str(v) for k, v in env.items()}) + _check_call(cmd, cwd=src_dir) kill_processes() fix_staged_scripts() diff --git a/example_packages/noarch_python/setup.py b/example_packages/noarch_python/setup.py index 0b1fcd7485..a5cce40b91 100644 --- a/example_packages/noarch_python/setup.py +++ b/example_packages/noarch_python/setup.py @@ -1,11 +1,11 @@ from distutils.core import setup setup( - name = "foo", - version = '1.2.3', - author = "Ilan Schnell", - py_modules = ["foo"], - entry_points = { + name="foo", + version='1.2.3', + author="Ilan Schnell", + py_modules=["foo"], + entry_points={ 'console_scripts': ['foo = foo:main'], }, ) diff --git a/example_packages/setuptools/package1/scripts.py b/example_packages/setuptools/package1/scripts.py index 96ef209329..4848e8e8ca 100644 --- a/example_packages/setuptools/package1/scripts.py +++ b/example_packages/setuptools/package1/scripts.py @@ -5,5 +5,6 @@ ''' from __future__ import print_function + def main(): print('hello!') diff --git a/example_packages/setuptools/setup.py b/example_packages/setuptools/setup.py index 31da63a923..4ea5552c94 100644 --- a/example_packages/setuptools/setup.py +++ b/example_packages/setuptools/setup.py @@ -9,8 +9,8 @@ packages=find_packages(), install_requires=['Flask', 'werkzeug'], entry_points={ - 'console_scripts' : [ + 'console_scripts': [ 'script1 = package1.scripts:main' - ] + ] } ) diff --git a/setup.cfg b/setup.cfg index bc1bb21e05..02ed3db507 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,14 @@ +[flake8] +max-line-length = 100 +ignore = E122,E123,E126,E127,E128,E731 +exclude = build,conda_build/_version.py + [pytest] -norecursedirs= tests/test-recipes \ No newline at end of file +norecursedirs= tests/test-recipes .* *.egg* build dist +addopts = + --junitxml=junit.xml + --ignore setup.py + --ignore run_test.py + --cov-report term-missing + --tb native + --strict diff --git a/setup.py b/setup.py index f8911e72bb..e6d3945455 100755 --- a/setup.py +++ b/setup.py @@ -2,14 +2,13 @@ import sys from glob import glob +import versioneer + if 'develop' in sys.argv: from setuptools import setup else: from distutils.core import setup -import versioneer - - if sys.version_info[:2] < (2, 7): sys.exit("conda-build is only meant for Python >=2.7" "Current Python version: %d.%d" % sys.version_info[:2]) diff --git a/tests/install_miniconda.py b/tests/install_miniconda.py index 475980ebc3..b669f8a09c 100644 --- a/tests/install_miniconda.py +++ b/tests/install_miniconda.py @@ -59,6 +59,7 @@ def remove_readonly(func, path, excinfo): # Final time. pass exceptions to caller. shutil.rmtree(path) + def download_file(url, md5): urlparts = requests.packages.urllib3.util.url.parse_url(url) local_filename = urlparts.path.split('/')[-1] @@ -84,8 +85,8 @@ def download_file(url, md5): size = int(r.headers.get('Content-Length')) with open(file_path, 'wb') as f: for i, chunk in enumerate(r.iter_content(chunk_size=2**20)): - if chunk: # filter out keep-alive new chunks - print("writing %s/%s MB" % (r.raw.tell()/2**20, size/2**20)) + if chunk: # filter out keep-alive new chunks + print("writing %s/%s MB" % (r.raw.tell() / 2**20, size / 2**20)) f.write(chunk) f.flush() return file_path @@ -101,6 +102,7 @@ def hashsum_file(path, mode='md5'): h.update(chunk) return h.hexdigest() + def install_miniconda(path): prefix = os.path.join(tempdir, 'conda-build-miniconda') print("Installing Miniconda %s to %s" % (path, prefix)) @@ -126,15 +128,15 @@ def main(): arch = os.environ['BINSTAR_PLATFORM'] pyver = str(sys.version_info[0]) for url, md5, plat in [ - ('http://repo.continuum.io/miniconda/Miniconda-3.5.5-Windows-x86_64.exe', - 'b6285db92cc042a44b2afaaf1a99b8cc', 'win-64-2'), - ('http://repo.continuum.io/miniconda/Miniconda-3.5.5-Windows-x86.exe', - '67a6efb324491928f9aaa447ab5491ac', 'win-32-2'), - ('http://repo.continuum.io/miniconda/Miniconda3-3.5.5-Windows-x86_64.exe', - '6c6643ae90028d89e3ef72889bf8bb36', 'win-64-3'), - ('http://repo.continuum.io/miniconda/Miniconda3-3.5.5-Windows-x86.exe', - '2aae7daffbbd4a3f2b775c85a1500a47', 'win-32-3'), - ]: + ('http://repo.continuum.io/miniconda/Miniconda-3.5.5-Windows-x86_64.exe', + 'b6285db92cc042a44b2afaaf1a99b8cc', 'win-64-2'), + ('http://repo.continuum.io/miniconda/Miniconda-3.5.5-Windows-x86.exe', + '67a6efb324491928f9aaa447ab5491ac', 'win-32-2'), + ('http://repo.continuum.io/miniconda/Miniconda3-3.5.5-Windows-x86_64.exe', + '6c6643ae90028d89e3ef72889bf8bb36', 'win-64-3'), + ('http://repo.continuum.io/miniconda/Miniconda3-3.5.5-Windows-x86.exe', + '2aae7daffbbd4a3f2b775c85a1500a47', 'win-32-3'), + ]: if plat == '%s-%s' % (arch, pyver): f = download_file(url, md5) install_miniconda(f) diff --git a/tests/test-recipes/build_recipes.sh b/tests/test-recipes/build_recipes.sh deleted file mode 100755 index e32ce6b924..0000000000 --- a/tests/test-recipes/build_recipes.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash - -set -e -set -x - -cd "$(dirname "${BASH_SOURCE[0]}")" - -# These variables are defined solely for testing purposes, -# so they can be checked within build scripts -export CONDA_TEST_VAR="conda_test" -export CONDA_TEST_VAR_2="conda_test_2" - -for recipe in metadata/*/; do - if [[ $(ls -A "$recipe") ]]; then - if [[ $recipe =~ .*osx_is_app.* && $(uname) != "Darwin" ]]; then - continue - fi - conda build --no-anaconda-upload $recipe - fi -done - -# Recipes that should fail and give some error -cd fail - -# We use 2>&1 as the error is printed to stderr. We could do >/dev/null to -# ensure it is printed to stderr, but then we would hide the output of the -# command from the test output. The ! ensures that the command fails. -! OUTPUT=$(conda build --no-anaconda-upload symlinks/ 2>&1) -echo "$OUTPUT" | grep "Error" | wc -l | grep 6 - -! OUTPUT=$(conda build --no-anaconda-upload conda-meta/ 2>&1) -echo "$OUTPUT" | grep 'Error: Untracked file(s) ('\''conda-meta/nope'\'',)' - -! OUTPUT=$(conda build --no-anaconda-upload recursive-build/ 2>&1) -echo "$OUTPUT" | grep 'No packages found in current .* channels matching: recursive-build2 2\.0' - -! OUTPUT=$(conda build --no-anaconda-upload source_git_jinja2_oops/ 2>&1) -echo "$OUTPUT" | grep '\''GIT_DSECRIBE_TAG'\'' is undefined' - -echo "TESTS PASSED" diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/README.txt b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/README.txt new file mode 100644 index 0000000000..a692a6a27a --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/README.txt @@ -0,0 +1,5 @@ +This is a Subversion repository; use the 'svnadmin' tool to examine +it. Do not add, delete, or modify files here unless you know how +to avoid corrupting the repository. + +Visit http://subversion.apache.org/ for more information. diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/conf/authz b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/conf/authz new file mode 100644 index 0000000000..0b9a41074e --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/conf/authz @@ -0,0 +1,32 @@ +### This file is an example authorization file for svnserve. +### Its format is identical to that of mod_authz_svn authorization +### files. +### As shown below each section defines authorizations for the path and +### (optional) repository specified by the section name. +### The authorizations follow. An authorization line can refer to: +### - a single user, +### - a group of users defined in a special [groups] section, +### - an alias defined in a special [aliases] section, +### - all authenticated users, using the '$authenticated' token, +### - only anonymous users, using the '$anonymous' token, +### - anyone, using the '*' wildcard. +### +### A match can be inverted by prefixing the rule with '~'. Rules can +### grant read ('r') access, read-write ('rw') access, or no access +### (''). + +[aliases] +# joe = /C=XZ/ST=Dessert/L=Snake City/O=Snake Oil, Ltd./OU=Research Institute/CN=Joe Average + +[groups] +# harry_and_sally = harry,sally +# harry_sally_and_joe = harry,sally,&joe + +# [/foo/bar] +# harry = rw +# &joe = r +# * = + +# [repository:/baz/fuz] +# @harry_and_sally = rw +# * = r diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/conf/passwd b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/conf/passwd new file mode 100644 index 0000000000..ecaa08dcec --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/conf/passwd @@ -0,0 +1,8 @@ +### This file is an example password file for svnserve. +### Its format is similar to that of svnserve.conf. As shown in the +### example below it contains one section labelled [users]. +### The name and password for each user follow, one account per line. + +[users] +# harry = harryssecret +# sally = sallyssecret diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/conf/svnserve.conf b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/conf/svnserve.conf new file mode 100644 index 0000000000..c198b0ddb3 --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/conf/svnserve.conf @@ -0,0 +1,61 @@ +### This file controls the configuration of the svnserve daemon, if you +### use it to allow access to this repository. (If you only allow +### access through http: and/or file: URLs, then this file is +### irrelevant.) + +### Visit http://subversion.apache.org/ for more information. + +[general] +### The anon-access and auth-access options control access to the +### repository for unauthenticated (a.k.a. anonymous) users and +### authenticated users, respectively. +### Valid values are "write", "read", and "none". +### Setting the value to "none" prohibits both reading and writing; +### "read" allows read-only access, and "write" allows complete +### read/write access to the repository. +### The sample settings below are the defaults and specify that anonymous +### users have read-only access to the repository, while authenticated +### users have read and write access to the repository. +# anon-access = read +# auth-access = write +### The password-db option controls the location of the password +### database file. Unless you specify a path starting with a /, +### the file's location is relative to the directory containing +### this configuration file. +### If SASL is enabled (see below), this file will NOT be used. +### Uncomment the line below to use the default password file. +# password-db = passwd +### The authz-db option controls the location of the authorization +### rules for path-based access control. Unless you specify a path +### starting with a /, the file's location is relative to the the +### directory containing this file. If you don't specify an +### authz-db, no path-based access control is done. +### Uncomment the line below to use the default authorization file. +# authz-db = authz +### This option specifies the authentication realm of the repository. +### If two repositories have the same authentication realm, they should +### have the same password database, and vice versa. The default realm +### is repository's uuid. +# realm = My First Repository +### The force-username-case option causes svnserve to case-normalize +### usernames before comparing them against the authorization rules in the +### authz-db file configured above. Valid values are "upper" (to upper- +### case the usernames), "lower" (to lowercase the usernames), and +### "none" (to compare usernames as-is without case conversion, which +### is the default behavior). +# force-username-case = none + +[sasl] +### This option specifies whether you want to use the Cyrus SASL +### library for authentication. Default is false. +### This section will be ignored if svnserve is not built with Cyrus +### SASL support; to check, run 'svnserve --version' and look for a line +### reading 'Cyrus SASL authentication is available.' +# use-sasl = true +### These options specify the desired strength of the security layer +### that you want SASL to provide. 0 means no encryption, 1 means +### integrity-checking only, values larger than 1 are correlated +### to the effective key length for encryption (e.g. 128 means 128-bit +### encryption). The values below are the defaults. +# min-encryption = 0 +# max-encryption = 256 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/current b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/current new file mode 100644 index 0000000000..d00491fd7e --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/current @@ -0,0 +1 @@ +1 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/format b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/format new file mode 100644 index 0000000000..db06890e25 --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/format @@ -0,0 +1,2 @@ +4 +layout sharded 1000 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/fs-type b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/fs-type new file mode 100644 index 0000000000..4fdd95313f --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/fs-type @@ -0,0 +1 @@ +fsfs diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/fsfs.conf b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/fsfs.conf new file mode 100644 index 0000000000..76e581b64f --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/fsfs.conf @@ -0,0 +1,38 @@ +### This file controls the configuration of the FSFS filesystem. + +[memcached-servers] +### These options name memcached servers used to cache internal FSFS +### data. See http://www.danga.com/memcached/ for more information on +### memcached. To use memcached with FSFS, run one or more memcached +### servers, and specify each of them as an option like so: +# first-server = 127.0.0.1:11211 +# remote-memcached = mymemcached.corp.example.com:11212 +### The option name is ignored; the value is of the form HOST:PORT. +### memcached servers can be shared between multiple repositories; +### however, if you do this, you *must* ensure that repositories have +### distinct UUIDs and paths, or else cached data from one repository +### might be used by another accidentally. Note also that memcached has +### no authentication for reads or writes, so you must ensure that your +### memcached servers are only accessible by trusted users. + +[caches] +### When a cache-related error occurs, normally Subversion ignores it +### and continues, logging an error if the server is appropriately +### configured (and ignoring it with file:// access). To make +### Subversion never ignore cache errors, uncomment this line. +# fail-stop = true + +[rep-sharing] +### To conserve space, the filesystem can optionally avoid storing +### duplicate representations. This comes at a slight cost in +### performance, as maintaining a database of shared representations can +### increase commit times. The space savings are dependent upon the size +### of the repository, the number of objects it contains and the amount of +### duplication between them, usually a function of the branching and +### merging process. +### +### The following parameter enables rep-sharing in the repository. It can +### be switched on and off at will, but for best space-saving results +### should be enabled consistently over the life of the repository. +### rep-sharing is enabled by default. +# enable-rep-sharing = true diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/min-unpacked-rev b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/min-unpacked-rev new file mode 100644 index 0000000000..573541ac97 --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/min-unpacked-rev @@ -0,0 +1 @@ +0 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/rep-cache.db b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/rep-cache.db new file mode 100644 index 0000000000..6fe88b7624 Binary files /dev/null and b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/rep-cache.db differ diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/revprops/0/0 b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/revprops/0/0 new file mode 100644 index 0000000000..00338becd9 --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/revprops/0/0 @@ -0,0 +1,5 @@ +K 8 +svn:date +V 27 +2016-04-04T05:59:22.204357Z +END diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/revprops/0/1 b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/revprops/0/1 new file mode 100644 index 0000000000..0424bb7ca0 --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/revprops/0/1 @@ -0,0 +1,13 @@ +K 10 +svn:author +V 6 +mjuric +K 8 +svn:date +V 27 +2016-04-04T06:00:56.867615Z +K 7 +svn:log +V 7 +Initial +END diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/revs/0/0 b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/revs/0/0 new file mode 100644 index 0000000000..10f5c45f9d --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/revs/0/0 @@ -0,0 +1,11 @@ +PLAIN +END +ENDREP +id: 0.0.r0/17 +type: dir +count: 0 +text: 0 0 4 4 2d2977d1c96f487abe4a1e202dd03b4e +cpath: / + + +17 107 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/revs/0/1 b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/revs/0/1 new file mode 100644 index 0000000000..012e81878b Binary files /dev/null and b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/revs/0/1 differ diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/txn-current b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/txn-current new file mode 100644 index 0000000000..d00491fd7e --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/txn-current @@ -0,0 +1 @@ +1 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/txn-current-lock b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/txn-current-lock new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/uuid b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/uuid new file mode 100644 index 0000000000..6ff81bd5a5 --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/uuid @@ -0,0 +1 @@ +5dded66a-0f9e-4a3a-a16a-00ca1a50f001 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/write-lock b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/db/write-lock new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/format b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/format new file mode 100644 index 0000000000..7ed6ff82de --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/format @@ -0,0 +1 @@ +5 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/post-commit.tmpl b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/post-commit.tmpl new file mode 100644 index 0000000000..98e4f8031b --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/post-commit.tmpl @@ -0,0 +1,50 @@ +#!/bin/sh + +# POST-COMMIT HOOK +# +# The post-commit hook is invoked after a commit. Subversion runs +# this hook by invoking a program (script, executable, binary, etc.) +# named 'post-commit' (for which this file is a template) with the +# following ordered arguments: +# +# [1] REPOS-PATH (the path to this repository) +# [2] REV (the number of the revision just committed) +# +# The default working directory for the invocation is undefined, so +# the program should set one explicitly if it cares. +# +# Because the commit has already completed and cannot be undone, +# the exit code of the hook program is ignored. The hook program +# can use the 'svnlook' utility to help it examine the +# newly-committed tree. +# +# On a Unix system, the normal procedure is to have 'post-commit' +# invoke other programs to do the real work, though it may do the +# work itself too. +# +# Note that 'post-commit' must be executable by the user(s) who will +# invoke it (typically the user httpd runs as), and that user must +# have filesystem-level permission to access the repository. +# +# On a Windows system, you should name the hook program +# 'post-commit.bat' or 'post-commit.exe', +# but the basic idea is the same. +# +# The hook program typically does not inherit the environment of +# its parent process. For example, a common problem is for the +# PATH environment variable to not be set to its usual value, so +# that subprograms fail to launch unless invoked via absolute path. +# If you're having unexpected problems with a hook program, the +# culprit may be unusual (or missing) environment variables. +# +# Here is an example hook script, for a Unix /bin/sh interpreter. +# For more examples and pre-written hooks, see those in +# the Subversion repository at +# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and +# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/ + + +REPOS="$1" +REV="$2" + +mailer.py commit "$REPOS" "$REV" /path/to/mailer.conf diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/post-lock.tmpl b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/post-lock.tmpl new file mode 100644 index 0000000000..c779f11d6b --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/post-lock.tmpl @@ -0,0 +1,44 @@ +#!/bin/sh + +# POST-LOCK HOOK +# +# The post-lock hook is run after a path is locked. Subversion runs +# this hook by invoking a program (script, executable, binary, etc.) +# named 'post-lock' (for which this file is a template) with the +# following ordered arguments: +# +# [1] REPOS-PATH (the path to this repository) +# [2] USER (the user who created the lock) +# +# The paths that were just locked are passed to the hook via STDIN (as +# of Subversion 1.2, only one path is passed per invocation, but the +# plan is to pass all locked paths at once, so the hook program +# should be written accordingly). +# +# The default working directory for the invocation is undefined, so +# the program should set one explicitly if it cares. +# +# Because the lock has already been created and cannot be undone, +# the exit code of the hook program is ignored. The hook program +# can use the 'svnlook' utility to help it examine the +# newly-created lock. +# +# On a Unix system, the normal procedure is to have 'post-lock' +# invoke other programs to do the real work, though it may do the +# work itself too. +# +# Note that 'post-lock' must be executable by the user(s) who will +# invoke it (typically the user httpd runs as), and that user must +# have filesystem-level permission to access the repository. +# +# On a Windows system, you should name the hook program +# 'post-lock.bat' or 'post-lock.exe', +# but the basic idea is the same. +# +# Here is an example hook script, for a Unix /bin/sh interpreter: + +REPOS="$1" +USER="$2" + +# Send email to interested parties, let them know a lock was created: +mailer.py lock "$REPOS" "$USER" /path/to/mailer.conf diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/post-revprop-change.tmpl b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/post-revprop-change.tmpl new file mode 100644 index 0000000000..3254f5e685 --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/post-revprop-change.tmpl @@ -0,0 +1,56 @@ +#!/bin/sh + +# POST-REVPROP-CHANGE HOOK +# +# The post-revprop-change hook is invoked after a revision property +# has been added, modified or deleted. Subversion runs this hook by +# invoking a program (script, executable, binary, etc.) named +# 'post-revprop-change' (for which this file is a template), with the +# following ordered arguments: +# +# [1] REPOS-PATH (the path to this repository) +# [2] REV (the revision that was tweaked) +# [3] USER (the username of the person tweaking the property) +# [4] PROPNAME (the property that was changed) +# [5] ACTION (the property was 'A'dded, 'M'odified, or 'D'eleted) +# +# [STDIN] PROPVAL ** the old property value is passed via STDIN. +# +# Because the propchange has already completed and cannot be undone, +# the exit code of the hook program is ignored. The hook program +# can use the 'svnlook' utility to help it examine the +# new property value. +# +# On a Unix system, the normal procedure is to have 'post-revprop-change' +# invoke other programs to do the real work, though it may do the +# work itself too. +# +# Note that 'post-revprop-change' must be executable by the user(s) who will +# invoke it (typically the user httpd runs as), and that user must +# have filesystem-level permission to access the repository. +# +# On a Windows system, you should name the hook program +# 'post-revprop-change.bat' or 'post-revprop-change.exe', +# but the basic idea is the same. +# +# The hook program typically does not inherit the environment of +# its parent process. For example, a common problem is for the +# PATH environment variable to not be set to its usual value, so +# that subprograms fail to launch unless invoked via absolute path. +# If you're having unexpected problems with a hook program, the +# culprit may be unusual (or missing) environment variables. +# +# Here is an example hook script, for a Unix /bin/sh interpreter. +# For more examples and pre-written hooks, see those in +# the Subversion repository at +# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and +# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/ + + +REPOS="$1" +REV="$2" +USER="$3" +PROPNAME="$4" +ACTION="$5" + +mailer.py propchange2 "$REPOS" "$REV" "$USER" "$PROPNAME" "$ACTION" /path/to/mailer.conf diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/post-unlock.tmpl b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/post-unlock.tmpl new file mode 100644 index 0000000000..ae95c4bdb6 --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/post-unlock.tmpl @@ -0,0 +1,42 @@ +#!/bin/sh + +# POST-UNLOCK HOOK +# +# The post-unlock hook runs after a path is unlocked. Subversion runs +# this hook by invoking a program (script, executable, binary, etc.) +# named 'post-unlock' (for which this file is a template) with the +# following ordered arguments: +# +# [1] REPOS-PATH (the path to this repository) +# [2] USER (the user who destroyed the lock) +# +# The paths that were just unlocked are passed to the hook via STDIN +# (as of Subversion 1.2, only one path is passed per invocation, but +# the plan is to pass all unlocked paths at once, so the hook program +# should be written accordingly). +# +# The default working directory for the invocation is undefined, so +# the program should set one explicitly if it cares. +# +# Because the lock has already been destroyed and cannot be undone, +# the exit code of the hook program is ignored. +# +# On a Unix system, the normal procedure is to have 'post-unlock' +# invoke other programs to do the real work, though it may do the +# work itself too. +# +# Note that 'post-unlock' must be executable by the user(s) who will +# invoke it (typically the user httpd runs as), and that user must +# have filesystem-level permission to access the repository. +# +# On a Windows system, you should name the hook program +# 'post-unlock.bat' or 'post-unlock.exe', +# but the basic idea is the same. +# +# Here is an example hook script, for a Unix /bin/sh interpreter: + +REPOS="$1" +USER="$2" + +# Send email to interested parties, let them know a lock was removed: +mailer.py unlock "$REPOS" "$USER" /path/to/mailer.conf diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/pre-commit.tmpl b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/pre-commit.tmpl new file mode 100644 index 0000000000..80c7afb6b9 --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/pre-commit.tmpl @@ -0,0 +1,81 @@ +#!/bin/sh + +# PRE-COMMIT HOOK +# +# The pre-commit hook is invoked before a Subversion txn is +# committed. Subversion runs this hook by invoking a program +# (script, executable, binary, etc.) named 'pre-commit' (for which +# this file is a template), with the following ordered arguments: +# +# [1] REPOS-PATH (the path to this repository) +# [2] TXN-NAME (the name of the txn about to be committed) +# +# [STDIN] LOCK-TOKENS ** the lock tokens are passed via STDIN. +# +# If STDIN contains the line "LOCK-TOKENS:\n" (the "\n" denotes a +# single newline), the lines following it are the lock tokens for +# this commit. The end of the list is marked by a line containing +# only a newline character. +# +# Each lock token line consists of a URI-escaped path, followed +# by the separator character '|', followed by the lock token string, +# followed by a newline. +# +# The default working directory for the invocation is undefined, so +# the program should set one explicitly if it cares. +# +# If the hook program exits with success, the txn is committed; but +# if it exits with failure (non-zero), the txn is aborted, no commit +# takes place, and STDERR is returned to the client. The hook +# program can use the 'svnlook' utility to help it examine the txn. +# +# On a Unix system, the normal procedure is to have 'pre-commit' +# invoke other programs to do the real work, though it may do the +# work itself too. +# +# *** NOTE: THE HOOK PROGRAM MUST NOT MODIFY THE TXN, EXCEPT *** +# *** FOR REVISION PROPERTIES (like svn:log or svn:author). *** +# +# This is why we recommend using the read-only 'svnlook' utility. +# In the future, Subversion may enforce the rule that pre-commit +# hooks should not modify the versioned data in txns, or else come +# up with a mechanism to make it safe to do so (by informing the +# committing client of the changes). However, right now neither +# mechanism is implemented, so hook writers just have to be careful. +# +# Note that 'pre-commit' must be executable by the user(s) who will +# invoke it (typically the user httpd runs as), and that user must +# have filesystem-level permission to access the repository. +# +# On a Windows system, you should name the hook program +# 'pre-commit.bat' or 'pre-commit.exe', +# but the basic idea is the same. +# +# The hook program typically does not inherit the environment of +# its parent process. For example, a common problem is for the +# PATH environment variable to not be set to its usual value, so +# that subprograms fail to launch unless invoked via absolute path. +# If you're having unexpected problems with a hook program, the +# culprit may be unusual (or missing) environment variables. +# +# Here is an example hook script, for a Unix /bin/sh interpreter. +# For more examples and pre-written hooks, see those in +# the Subversion repository at +# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and +# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/ + + +REPOS="$1" +TXN="$2" + +# Make sure that the log message contains some text. +SVNLOOK=/Applications/Xcode.app/Contents/Developer/usr/bin/svnlook +$SVNLOOK log -t "$TXN" "$REPOS" | \ + grep "[a-zA-Z0-9]" > /dev/null || exit 1 + +# Check that the author of this commit has the rights to perform +# the commit on the files and directories being modified. +commit-access-control.pl "$REPOS" "$TXN" commit-access-control.cfg || exit 1 + +# All checks passed, so allow the commit. +exit 0 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/pre-lock.tmpl b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/pre-lock.tmpl new file mode 100644 index 0000000000..50c7944d70 --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/pre-lock.tmpl @@ -0,0 +1,73 @@ +#!/bin/sh + +# PRE-LOCK HOOK +# +# The pre-lock hook is invoked before an exclusive lock is +# created. Subversion runs this hook by invoking a program +# (script, executable, binary, etc.) named 'pre-lock' (for which +# this file is a template), with the following ordered arguments: +# +# [1] REPOS-PATH (the path to this repository) +# [2] PATH (the path in the repository about to be locked) +# [3] USER (the user creating the lock) +# [4] COMMENT (the comment of the lock) +# [5] STEAL-LOCK (1 if the user is trying to steal the lock, else 0) +# +# If the hook program outputs anything on stdout, the output string will +# be used as the lock token for this lock operation. If you choose to use +# this feature, you must guarantee the tokens generated are unique across +# the repository each time. +# +# The default working directory for the invocation is undefined, so +# the program should set one explicitly if it cares. +# +# If the hook program exits with success, the lock is created; but +# if it exits with failure (non-zero), the lock action is aborted +# and STDERR is returned to the client. + +# On a Unix system, the normal procedure is to have 'pre-lock' +# invoke other programs to do the real work, though it may do the +# work itself too. +# +# Note that 'pre-lock' must be executable by the user(s) who will +# invoke it (typically the user httpd runs as), and that user must +# have filesystem-level permission to access the repository. +# +# On a Windows system, you should name the hook program +# 'pre-lock.bat' or 'pre-lock.exe', +# but the basic idea is the same. +# +# Here is an example hook script, for a Unix /bin/sh interpreter: + +REPOS="$1" +PATH="$2" +USER="$3" +COMMENT="$4" +STEAL="$5" + +# If a lock exists and is owned by a different person, don't allow it +# to be stolen (e.g., with 'svn lock --force ...'). + +# (Maybe this script could send email to the lock owner?) +SVNLOOK=/Applications/Xcode.app/Contents/Developer/usr/bin/svnlook +GREP=/bin/grep +SED=/bin/sed + +LOCK_OWNER=`$SVNLOOK lock "$REPOS" "$PATH" | \ + $GREP '^Owner: ' | $SED 's/Owner: //'` + +# If we get no result from svnlook, there's no lock, allow the lock to +# happen: +if [ "$LOCK_OWNER" = "" ]; then + exit 0 +fi + +# If the person locking matches the lock's owner, allow the lock to +# happen: +if [ "$LOCK_OWNER" = "$USER" ]; then + exit 0 +fi + +# Otherwise, we've got an owner mismatch, so return failure: +echo "Error: $PATH already locked by ${LOCK_OWNER}." 1>&2 +exit 1 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/pre-revprop-change.tmpl b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/pre-revprop-change.tmpl new file mode 100644 index 0000000000..af127ad6ae --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/pre-revprop-change.tmpl @@ -0,0 +1,66 @@ +#!/bin/sh + +# PRE-REVPROP-CHANGE HOOK +# +# The pre-revprop-change hook is invoked before a revision property +# is added, modified or deleted. Subversion runs this hook by invoking +# a program (script, executable, binary, etc.) named 'pre-revprop-change' +# (for which this file is a template), with the following ordered +# arguments: +# +# [1] REPOS-PATH (the path to this repository) +# [2] REV (the revision being tweaked) +# [3] USER (the username of the person tweaking the property) +# [4] PROPNAME (the property being set on the revision) +# [5] ACTION (the property is being 'A'dded, 'M'odified, or 'D'eleted) +# +# [STDIN] PROPVAL ** the new property value is passed via STDIN. +# +# If the hook program exits with success, the propchange happens; but +# if it exits with failure (non-zero), the propchange doesn't happen. +# The hook program can use the 'svnlook' utility to examine the +# existing value of the revision property. +# +# WARNING: unlike other hooks, this hook MUST exist for revision +# properties to be changed. If the hook does not exist, Subversion +# will behave as if the hook were present, but failed. The reason +# for this is that revision properties are UNVERSIONED, meaning that +# a successful propchange is destructive; the old value is gone +# forever. We recommend the hook back up the old value somewhere. +# +# On a Unix system, the normal procedure is to have 'pre-revprop-change' +# invoke other programs to do the real work, though it may do the +# work itself too. +# +# Note that 'pre-revprop-change' must be executable by the user(s) who will +# invoke it (typically the user httpd runs as), and that user must +# have filesystem-level permission to access the repository. +# +# On a Windows system, you should name the hook program +# 'pre-revprop-change.bat' or 'pre-revprop-change.exe', +# but the basic idea is the same. +# +# The hook program typically does not inherit the environment of +# its parent process. For example, a common problem is for the +# PATH environment variable to not be set to its usual value, so +# that subprograms fail to launch unless invoked via absolute path. +# If you're having unexpected problems with a hook program, the +# culprit may be unusual (or missing) environment variables. +# +# Here is an example hook script, for a Unix /bin/sh interpreter. +# For more examples and pre-written hooks, see those in +# the Subversion repository at +# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and +# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/ + + +REPOS="$1" +REV="$2" +USER="$3" +PROPNAME="$4" +ACTION="$5" + +if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi + +echo "Changing revision properties other than svn:log is prohibited" >&2 +exit 1 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/pre-unlock.tmpl b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/pre-unlock.tmpl new file mode 100644 index 0000000000..aacea17413 --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/pre-unlock.tmpl @@ -0,0 +1,65 @@ +#!/bin/sh + +# PRE-UNLOCK HOOK +# +# The pre-unlock hook is invoked before an exclusive lock is +# destroyed. Subversion runs this hook by invoking a program +# (script, executable, binary, etc.) named 'pre-unlock' (for which +# this file is a template), with the following ordered arguments: +# +# [1] REPOS-PATH (the path to this repository) +# [2] PATH (the path in the repository about to be unlocked) +# [3] USER (the user destroying the lock) +# [4] TOKEN (the lock token to be destroyed) +# [5] BREAK-UNLOCK (1 if the user is breaking the lock, else 0) +# +# The default working directory for the invocation is undefined, so +# the program should set one explicitly if it cares. +# +# If the hook program exits with success, the lock is destroyed; but +# if it exits with failure (non-zero), the unlock action is aborted +# and STDERR is returned to the client. + +# On a Unix system, the normal procedure is to have 'pre-unlock' +# invoke other programs to do the real work, though it may do the +# work itself too. +# +# Note that 'pre-unlock' must be executable by the user(s) who will +# invoke it (typically the user httpd runs as), and that user must +# have filesystem-level permission to access the repository. +# +# On a Windows system, you should name the hook program +# 'pre-unlock.bat' or 'pre-unlock.exe', +# but the basic idea is the same. +# +# Here is an example hook script, for a Unix /bin/sh interpreter: + +REPOS="$1" +PATH="$2" +USER="$3" +TOKEN="$4" +BREAK="$5" + +# If a lock is owned by a different person, don't allow it be broken. +# (Maybe this script could send email to the lock owner?) + +SVNLOOK=/Applications/Xcode.app/Contents/Developer/usr/bin/svnlook +GREP=/bin/grep +SED=/bin/sed + +LOCK_OWNER=`$SVNLOOK lock "$REPOS" "$PATH" | \ + $GREP '^Owner: ' | $SED 's/Owner: //'` + +# If we get no result from svnlook, there's no lock, return success: +if [ "$LOCK_OWNER" = "" ]; then + exit 0 +fi + +# If the person unlocking matches the lock's owner, return success: +if [ "$LOCK_OWNER" = "$USER" ]; then + exit 0 +fi + +# Otherwise, we've got an owner mismatch, so return failure: +echo "Error: $PATH locked by ${LOCK_OWNER}." 1>&2 +exit 1 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/start-commit.tmpl b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/start-commit.tmpl new file mode 100644 index 0000000000..53690d12fb --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/hooks/start-commit.tmpl @@ -0,0 +1,65 @@ +#!/bin/sh + +# START-COMMIT HOOK +# +# The start-commit hook is invoked before a Subversion txn is created +# in the process of doing a commit. Subversion runs this hook +# by invoking a program (script, executable, binary, etc.) named +# 'start-commit' (for which this file is a template) +# with the following ordered arguments: +# +# [1] REPOS-PATH (the path to this repository) +# [2] USER (the authenticated user attempting to commit) +# [3] CAPABILITIES (a colon-separated list of capabilities reported +# by the client; see note below) +# +# Note: The CAPABILITIES parameter is new in Subversion 1.5, and 1.5 +# clients will typically report at least the "mergeinfo" capability. +# If there are other capabilities, then the list is colon-separated, +# e.g.: "mergeinfo:some-other-capability" (the order is undefined). +# +# The list is self-reported by the client. Therefore, you should not +# make security assumptions based on the capabilities list, nor should +# you assume that clients reliably report every capability they have. +# +# The working directory for this hook program's invocation is undefined, +# so the program should set one explicitly if it cares. +# +# If the hook program exits with success, the commit continues; but +# if it exits with failure (non-zero), the commit is stopped before +# a Subversion txn is created, and STDERR is returned to the client. +# +# On a Unix system, the normal procedure is to have 'start-commit' +# invoke other programs to do the real work, though it may do the +# work itself too. +# +# Note that 'start-commit' must be executable by the user(s) who will +# invoke it (typically the user httpd runs as), and that user must +# have filesystem-level permission to access the repository. +# +# On a Windows system, you should name the hook program +# 'start-commit.bat' or 'start-commit.exe', +# but the basic idea is the same. +# +# The hook program typically does not inherit the environment of +# its parent process. For example, a common problem is for the +# PATH environment variable to not be set to its usual value, so +# that subprograms fail to launch unless invoked via absolute path. +# If you're having unexpected problems with a hook program, the +# culprit may be unusual (or missing) environment variables. +# +# Here is an example hook script, for a Unix /bin/sh interpreter. +# For more examples and pre-written hooks, see those in +# the Subversion repository at +# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and +# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/ + + +REPOS="$1" +USER="$2" + +commit-allower.pl --repository "$REPOS" --user "$USER" || exit 1 +special-auth-check.py --user "$USER" --auth-level 3 || exit 1 + +# All checks passed, so allow the commit. +exit 0 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/locks/db-logs.lock b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/locks/db-logs.lock new file mode 100644 index 0000000000..20dd6369be --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/locks/db-logs.lock @@ -0,0 +1,3 @@ +This file is not used by Subversion 1.3.x or later. +However, its existence is required for compatibility with +Subversion 1.2.x or earlier. diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/locks/db.lock b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/locks/db.lock new file mode 100644 index 0000000000..20dd6369be --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/_svn_repo/dummy/locks/db.lock @@ -0,0 +1,3 @@ +This file is not used by Subversion 1.3.x or later. +However, its existence is required for compatibility with +Subversion 1.2.x or earlier. diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/bld.bat b/tests/test-recipes/metadata/_checkout_tool_as_dependency/bld.bat new file mode 100644 index 0000000000..3d7336781e --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/bld.bat @@ -0,0 +1,6 @@ +svn info +if errorlevel 1 exit 1 +for /f "delims=" %%i in ('svn info ^| grep "Revision"') do set svnrev=%%i +if errorlevel 1 exit 1 +echo %svnrev% +if not "%svnrev%"=="Revision: 1" exit 1 diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/build.sh b/tests/test-recipes/metadata/_checkout_tool_as_dependency/build.sh new file mode 100644 index 0000000000..4e6fba82f2 --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/build.sh @@ -0,0 +1,3 @@ +# Ensure the checkout went well +svn info +[ "$(svn info | grep "Revision")" = "Revision: 1" ] diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/meta.yaml b/tests/test-recipes/metadata/_checkout_tool_as_dependency/meta.yaml new file mode 100644 index 0000000000..fd636c8ebc --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/meta.yaml @@ -0,0 +1,14 @@ +{% set recipe_dir = RECIPE_DIR if unix else "/" ~ RECIPE_DIR.replace("\\", "/") %} + +package: + name: test-checkout-tool-as-dependency + version: 1.0 + +source: + svn_url: file://{{ recipe_dir }}/_svn_repo/dummy + svn_rev: 1 + +requirements: + build: + # To test the conda_build version + - svn diff --git a/tests/test-recipes/metadata/_checkout_tool_as_dependency/readme.txt b/tests/test-recipes/metadata/_checkout_tool_as_dependency/readme.txt new file mode 100644 index 0000000000..4d7db4f64b --- /dev/null +++ b/tests/test-recipes/metadata/_checkout_tool_as_dependency/readme.txt @@ -0,0 +1 @@ +This file is run explicitly by test_build_recipes, in the test_checkout_tool_as_dependency function. diff --git a/tests/test-recipes/metadata/_cmake_generator/CMakeLists.txt b/tests/test-recipes/metadata/_cmake_generator/CMakeLists.txt new file mode 100644 index 0000000000..33670a0b7d --- /dev/null +++ b/tests/test-recipes/metadata/_cmake_generator/CMakeLists.txt @@ -0,0 +1,5 @@ +PROJECT(HELLO) + +CMAKE_MINIMUM_REQUIRED(VERSION 2.6) + +add_executable(hello hello.c) diff --git a/tests/test-recipes/metadata/_cmake_generator/bld.bat b/tests/test-recipes/metadata/_cmake_generator/bld.bat new file mode 100644 index 0000000000..c4346a1d0d --- /dev/null +++ b/tests/test-recipes/metadata/_cmake_generator/bld.bat @@ -0,0 +1,7 @@ +where cl.exe +where link.exe +:: maybe informative for MinGW? +where gcc.exe + +cmake -G "%CMAKE_GENERATOR:"=%" "%RECIPE_DIR%" +cmake --build . --config Release diff --git a/tests/test-recipes/metadata/_cmake_generator/build.sh b/tests/test-recipes/metadata/_cmake_generator/build.sh new file mode 100644 index 0000000000..d1e4539764 --- /dev/null +++ b/tests/test-recipes/metadata/_cmake_generator/build.sh @@ -0,0 +1,2 @@ +cmake -G "$CMAKE_GENERATOR" $RECIPE_DIR +cmake --build . --config Release diff --git a/tests/test-recipes/metadata/_cmake_generator/hello.c b/tests/test-recipes/metadata/_cmake_generator/hello.c new file mode 100644 index 0000000000..7c1035f664 --- /dev/null +++ b/tests/test-recipes/metadata/_cmake_generator/hello.c @@ -0,0 +1,5 @@ +#include + +int main() { + printf("Hello world!\n"); +} diff --git a/tests/test-recipes/metadata/_cmake_generator/meta.yaml b/tests/test-recipes/metadata/_cmake_generator/meta.yaml new file mode 100644 index 0000000000..09ebfd4f32 --- /dev/null +++ b/tests/test-recipes/metadata/_cmake_generator/meta.yaml @@ -0,0 +1,7 @@ +package: + name: conda-build-test-cmake-generator + version: 1.0 + +requirements: + build: + - cmake diff --git a/tests/test-recipes/metadata/_dirty_skip_section/bld.bat b/tests/test-recipes/metadata/_dirty_skip_section/bld.bat new file mode 100644 index 0000000000..2100201c67 --- /dev/null +++ b/tests/test-recipes/metadata/_dirty_skip_section/bld.bat @@ -0,0 +1,3 @@ +:: ensure that the DIRTY environment variable is available for logic in build scripts +IF "%DIRTY%" == "1" exit 0 +exit 1 \ No newline at end of file diff --git a/tests/test-recipes/metadata/_dirty_skip_section/build.sh b/tests/test-recipes/metadata/_dirty_skip_section/build.sh new file mode 100644 index 0000000000..289cdc4b50 --- /dev/null +++ b/tests/test-recipes/metadata/_dirty_skip_section/build.sh @@ -0,0 +1,3 @@ +# ensure that the DIRTY environment variable is available for logic in build scripts +[ -n "$DIRTY" ] && exit 0 +exit 1 diff --git a/tests/test-recipes/metadata/_dirty_skip_section/meta.yaml b/tests/test-recipes/metadata/_dirty_skip_section/meta.yaml new file mode 100644 index 0000000000..82352c683b --- /dev/null +++ b/tests/test-recipes/metadata/_dirty_skip_section/meta.yaml @@ -0,0 +1,6 @@ +package: + name: conda-build-test-dirty-skip-section + version: 1.0 + +about: + summary: ensure that the DIRTY environment variable is available to bld.bat and build.sh diff --git a/tests/test-recipes/metadata/_pyyaml_find_header/meta.yaml b/tests/test-recipes/metadata/_pyyaml_find_header/meta.yaml new file mode 100644 index 0000000000..d9c04895c6 --- /dev/null +++ b/tests/test-recipes/metadata/_pyyaml_find_header/meta.yaml @@ -0,0 +1,25 @@ +# recipe from conda forge, downloaded 2016/06/03 +# problem report was https://github.com/conda/conda-build/issues/993 + +{% set version = "3.11" %} + + +package: + name: pyyaml + version: {{ version }} + +source: + fn: PyYAML-{{ version }}.tar.gz + url: https://pypi.io/packages/source/P/PyYAML/PyYAML-{{ version }}.tar.gz + md5: f50e08ef0fe55178479d3a618efe21db + +build: + number: 0 + script: + - python setup.py --with-libyaml build_ext --include-dirs=%LIBRARY_INC% --library-dirs=%LIBRARY_LIB% # [win] + - if errorlevel 1 exit 1 # [win] + +requirements: + build: + - python + - yaml diff --git a/tests/test-recipes/metadata/_recursive-build-a/meta.yaml b/tests/test-recipes/metadata/_recursive-build-a/meta.yaml new file mode 100644 index 0000000000..e713ebd0fc --- /dev/null +++ b/tests/test-recipes/metadata/_recursive-build-a/meta.yaml @@ -0,0 +1,9 @@ +# this is a utility package for recursive-build-two-layer + +package: + name: _recursive-build-a + version: 1.0 + +requirements: + build: + - _recursive-build-b diff --git a/tests/test-recipes/metadata/_recursive-build-b/meta.yaml b/tests/test-recipes/metadata/_recursive-build-b/meta.yaml new file mode 100644 index 0000000000..df284cb631 --- /dev/null +++ b/tests/test-recipes/metadata/_recursive-build-b/meta.yaml @@ -0,0 +1,5 @@ +# this is a utility package for recursive-build-two-layer + +package: + name: _recursive-build-b + version: 1.0 diff --git a/tests/test-recipes/metadata/_recursive-build-c/meta.yaml b/tests/test-recipes/metadata/_recursive-build-c/meta.yaml new file mode 100644 index 0000000000..a124b13a7b --- /dev/null +++ b/tests/test-recipes/metadata/_recursive-build-c/meta.yaml @@ -0,0 +1,5 @@ +# this is a utility package for recursive-build-two-packages + +package: + name: _recursive-build-c + version: 1.0 diff --git a/tests/test-recipes/metadata/_recursive-build-d/meta.yaml b/tests/test-recipes/metadata/_recursive-build-d/meta.yaml new file mode 100644 index 0000000000..e453eca157 --- /dev/null +++ b/tests/test-recipes/metadata/_recursive-build-d/meta.yaml @@ -0,0 +1,5 @@ +# this is a utility package for recursive-build-two-packages + +package: + name: _recursive-build-d + version: 1.0 diff --git a/tests/test-recipes/metadata/_source_git_jinja2_relative_git_url/meta.yaml b/tests/test-recipes/metadata/_source_git_jinja2_relative_git_url/meta.yaml new file mode 100644 index 0000000000..5a2834c513 --- /dev/null +++ b/tests/test-recipes/metadata/_source_git_jinja2_relative_git_url/meta.yaml @@ -0,0 +1,10 @@ +package: + name: flex + version: {{ environ.get('GIT_DESCRIBE_TAG', '0.0') }} + +build: + number: {{ environ.get('GIT_DESCRIBE_NUMBER', 0) }} + string: {{ environ.get('GIT_DESCRIBE_NUMBER', '0') + '_JPMC' }} + +source: + git_url: ../../../../ diff --git a/tests/test-recipes/metadata/_source_git_jinja2_relative_path/meta.yaml b/tests/test-recipes/metadata/_source_git_jinja2_relative_path/meta.yaml new file mode 100644 index 0000000000..a807594584 --- /dev/null +++ b/tests/test-recipes/metadata/_source_git_jinja2_relative_path/meta.yaml @@ -0,0 +1,10 @@ +package: + name: flex + version: {{ environ.get('GIT_DESCRIBE_TAG', '0.0') }} + +build: + number: {{ environ.get('GIT_DESCRIBE_NUMBER', 0) }} + string: {{ environ.get('GIT_DESCRIBE_NUMBER', '0') + '_JPMC' }} + +source: + path: ../../../../ diff --git a/tests/test-recipes/metadata/always_include_files_glob/meta.yaml b/tests/test-recipes/metadata/always_include_files_glob/meta.yaml index df202a080e..4dd418d64d 100644 --- a/tests/test-recipes/metadata/always_include_files_glob/meta.yaml +++ b/tests/test-recipes/metadata/always_include_files_glob/meta.yaml @@ -11,4 +11,4 @@ build: requirements: build: - - libpng + - libpng 1.6.17 diff --git a/tests/test-recipes/metadata/always_include_files_glob/run_test.py b/tests/test-recipes/metadata/always_include_files_glob/run_test.py index 6fb0e9bca8..fb195c4e74 100644 --- a/tests/test-recipes/metadata/always_include_files_glob/run_test.py +++ b/tests/test-recipes/metadata/always_include_files_glob/run_test.py @@ -11,11 +11,19 @@ def main(): info = json.load(fh) if sys.platform == 'darwin': - assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}, info['files'] + assert set(info['files']) == {'lib/libpng.dylib', + 'lib/libpng16.16.dylib', + 'lib/libpng16.dylib'}, info['files'] elif sys.platform.startswith('linux'): - assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}, info['files'] + assert set(info['files']) == {'lib/libpng.so', + 'lib/libpng16.so', + 'lib/libpng16.so.16', + 'lib/libpng16.so.16.17.0'}, info['files'] elif sys.platform == 'win32': - assert sorted(info['files']) == ['Library/lib/libpng.lib', 'Library/lib/libpng16.lib', 'Library/lib/libpng16_static.lib', 'Library/lib/libpng_static.lib'] + assert sorted(info['files']) == ['Library/lib/libpng.lib', + 'Library/lib/libpng16.lib', + 'Library/lib/libpng16_static.lib', + 'Library/lib/libpng_static.lib'] if __name__ == '__main__': main() diff --git a/tests/test-recipes/metadata/binary_has_prefix_files/run_test.py b/tests/test-recipes/metadata/binary_has_prefix_files/run_test.py index 694e2d99e5..4d9683871b 100644 --- a/tests/test-recipes/metadata/binary_has_prefix_files/run_test.py +++ b/tests/test-recipes/metadata/binary_has_prefix_files/run_test.py @@ -1,5 +1,6 @@ import os + def main(): prefix = os.environ['PREFIX'] fn = os.path.join(prefix, 'binary-has-prefix') diff --git a/tests/test-recipes/metadata/build_env/run_test.py b/tests/test-recipes/metadata/build_env/run_test.py index 5f6ac2922d..ca08e3f397 100644 --- a/tests/test-recipes/metadata/build_env/run_test.py +++ b/tests/test-recipes/metadata/build_env/run_test.py @@ -1,5 +1,6 @@ import os + def main(): undef_var = os.environ.get("UNDEF_VAR") diff --git a/tests/test-recipes/metadata/build_number/run_test.sh b/tests/test-recipes/metadata/build_number/run_test.sh index e0c3296e85..1981471ced 100644 --- a/tests/test-recipes/metadata/build_number/run_test.sh +++ b/tests/test-recipes/metadata/build_number/run_test.sh @@ -1,6 +1,8 @@ conda list -p $PREFIX --canonical -# This is actually the build string. We test the build number below -[ "$(conda list -p $PREFIX --canonical)" = "conda-build-test-build-number-1.0-1" ] +# This is actually the build string. We test the build number below, the variant without +# 'local::' is for conda <4.1 and could be removed in the future. +[ "$(conda list -p $PREFIX --canonical)" = "conda-build-test-build-number-1.0-1" ] || \ +[ "$(conda list -p $PREFIX --canonical)" = "local::conda-build-test-build-number-1.0-1" ] cat $PREFIX/conda-meta/conda-build-test-build-number-1.0-1.json cat $PREFIX/conda-meta/conda-build-test-build-number-1.0-1.json | grep '"build_number": 1' diff --git a/tests/test-recipes/metadata/empty_sections/meta.yaml b/tests/test-recipes/metadata/empty_sections/meta.yaml new file mode 100644 index 0000000000..6b66309925 --- /dev/null +++ b/tests/test-recipes/metadata/empty_sections/meta.yaml @@ -0,0 +1,10 @@ +package: + name: empty_sections + version: 0.0 + + +build: + +requirements: + build: + diff --git a/tests/test-recipes/metadata/entry_points/meta.yaml b/tests/test-recipes/metadata/entry_points/meta.yaml index 63df28c035..6b9df5e160 100644 --- a/tests/test-recipes/metadata/entry_points/meta.yaml +++ b/tests/test-recipes/metadata/entry_points/meta.yaml @@ -3,7 +3,7 @@ package: version: 1.0 source: - git_url: ../../../../ + path: ../../../../ build: entry_points: diff --git a/tests/test-recipes/metadata/has_prefix_files/run_test.py b/tests/test-recipes/metadata/has_prefix_files/run_test.py index adb58ea26e..cf0f861d04 100644 --- a/tests/test-recipes/metadata/has_prefix_files/run_test.py +++ b/tests/test-recipes/metadata/has_prefix_files/run_test.py @@ -2,6 +2,7 @@ import os from os.path import join + def main(): prefix = os.environ['PREFIX'] diff --git a/tests/test-recipes/metadata/ignore_prefix_files/meta.yaml b/tests/test-recipes/metadata/ignore_prefix_files/meta.yaml new file mode 100644 index 0000000000..011f41d113 --- /dev/null +++ b/tests/test-recipes/metadata/ignore_prefix_files/meta.yaml @@ -0,0 +1,14 @@ +package: + name: conda-build-test-ignore-prefix-files + version: 1.0 + +build: + ignore_prefix_files: True + script: + - echo %PREFIX%\\test.txt > %PREFIX%\\test.bat # [win] + - echo %PREFIX%\\test2.txt > %PREFIX%\\test2.bat # [win] + - echo ${PREFIX}/bin/test.txt > ${PREFIX}/test.sh # [unix] + - echo ${PREFIX}/bin/test2.txt > ${PREFIX}/test2.sh # [unix] + +about: + summary: test that ignore_prefix_files with boolean setting ignores all files diff --git a/tests/test-recipes/metadata/ignore_prefix_files/run_test.py b/tests/test-recipes/metadata/ignore_prefix_files/run_test.py new file mode 100644 index 0000000000..435bd6589e --- /dev/null +++ b/tests/test-recipes/metadata/ignore_prefix_files/run_test.py @@ -0,0 +1,8 @@ +import os +import sys + +# assumes that sys.prefix is /envs/_test +pkgs = os.path.join(sys.prefix, "..", "..", "pkgs") +info_dir = os.path.join(pkgs, "conda-build-test-ignore-prefix-files-1.0-0", "info") +assert os.path.isdir(info_dir) +assert not os.path.isfile(os.path.join(info_dir, "has_prefix")) diff --git a/tests/test-recipes/metadata/ignore_some_prefix_files/meta.yaml b/tests/test-recipes/metadata/ignore_some_prefix_files/meta.yaml new file mode 100644 index 0000000000..71272da1f5 --- /dev/null +++ b/tests/test-recipes/metadata/ignore_some_prefix_files/meta.yaml @@ -0,0 +1,16 @@ +package: + name: conda-build-test-ignore-some-prefix-files + version: 1.0 + +build: + ignore_prefix_files: + - test2.bat # [win] + - test2.sh # [unix] + script: + - echo %PREFIX%\\test.txt > %PREFIX%\\test.bat # [win] + - echo %PREFIX%\\test2.txt > %PREFIX%\\test2.bat # [win] + - echo ${PREFIX}/bin/test.txt > ${PREFIX}/test.sh # [unix] + - echo ${PREFIX}/bin/test2.txt > ${PREFIX}/test2.sh # [unix] + +about: + summary: test that ignore_prefix_files with list setting ignores specified files diff --git a/tests/test-recipes/metadata/ignore_some_prefix_files/run_test.py b/tests/test-recipes/metadata/ignore_some_prefix_files/run_test.py new file mode 100644 index 0000000000..c60bbfe0d1 --- /dev/null +++ b/tests/test-recipes/metadata/ignore_some_prefix_files/run_test.py @@ -0,0 +1,11 @@ +import os +import sys + +# assumes that sys.prefix is /envs/_test +pkgs = os.path.normpath(os.path.join(sys.prefix, "..", "..", "pkgs")) +info_dir = os.path.join(pkgs, "conda-build-test-ignore-some-prefix-files-1.0-0", "info") +has_prefix_file = os.path.join(info_dir, "has_prefix") +print(info_dir) +assert os.path.isfile(has_prefix_file) +with open(has_prefix_file) as f: + assert "test2" not in f.read() diff --git a/tests/test-recipes/metadata/jinja2_build_str_template_only/bld.bat b/tests/test-recipes/metadata/jinja2_build_str_template_only/bld.bat new file mode 100644 index 0000000000..ca1489a42c --- /dev/null +++ b/tests/test-recipes/metadata/jinja2_build_str_template_only/bld.bat @@ -0,0 +1,26 @@ +if not exist .git exit 1 +git config core.fileMode false +if errorlevel 1 exit 1 +git describe --tags --dirty +if errorlevel 1 exit 1 +for /f "delims=" %%i in ('git describe') do set gitdesc=%%i +if errorlevel 1 exit 1 +echo "%gitdesc%" +if not "%gitdesc%"=="1.8.1" exit 1 +git status +if errorlevel 1 exit 1 +git diff +if errorlevel 1 exit 1 +set PYTHONPATH=. +python -c "import conda_build; assert conda_build.__version__ == '1.8.1', conda_build.__version__" +if errorlevel 1 exit 1 + + +rem check that GIT_* tags are present +for %%i in (GIT_DESCRIBE_TAG GIT_DESCRIBE_NUMBER GIT_DESCRIBE_HASH GIT_FULL_HASH) DO ( + if defined %%i ( + echo %%i + ) else ( + exit 1 + ) +) diff --git a/tests/test-recipes/metadata/jinja2_build_str_template_only/build.sh b/tests/test-recipes/metadata/jinja2_build_str_template_only/build.sh new file mode 100644 index 0000000000..74571375c8 --- /dev/null +++ b/tests/test-recipes/metadata/jinja2_build_str_template_only/build.sh @@ -0,0 +1,17 @@ +# We test the environment variables in a different recipe + +# Ensure we are in a git repo +[ -d .git ] +git describe +[ "$(git describe)" = 1.8.1 ] +PYTHONPATH=. python -c "import conda_build; assert conda_build.__version__ == '1.8.1', conda_build.__version__" + +# check if GIT_* variables are defined +for i in GIT_DESCRIBE_TAG GIT_DESCRIBE_NUMBER GIT_DESCRIBE_HASH GIT_FULL_HASH +do + if [ -n "eval $i" ]; then + eval echo \$$i + else + exit 1 + fi +done diff --git a/tests/test-recipes/metadata/jinja2_build_str_template_only/meta.yaml b/tests/test-recipes/metadata/jinja2_build_str_template_only/meta.yaml new file mode 100644 index 0000000000..032b759b2f --- /dev/null +++ b/tests/test-recipes/metadata/jinja2_build_str_template_only/meta.yaml @@ -0,0 +1,15 @@ +package: + name: conda-build-test-source-git-jinja2-2 + version: 1.0 + +source: + git_url: ../../../../ + git_tag: 1.8.1 + +build: + string: {{ PKG_BUILDNUM }}_g{{ GIT_FULL_HASH[:7] }} + +requirements: + build: + # To test the conda_build version + - python diff --git a/tests/test-recipes/metadata/jinja_vars/meta.yaml b/tests/test-recipes/metadata/jinja_vars/meta.yaml index 786c4f210e..33eeb5f679 100644 --- a/tests/test-recipes/metadata/jinja_vars/meta.yaml +++ b/tests/test-recipes/metadata/jinja_vars/meta.yaml @@ -4,7 +4,7 @@ package: build: number: 123 - string: {{CONDA_TEST_VAR}}_{{PKG_BUILDNUM}} + string: {{CONDA_TEST_VAR[:]}}_{{PKG_BUILDNUM}} # [True] script_env: - CONDA_TEST_VAR diff --git a/tests/test-recipes/metadata/nested_recipe/build_number/meta.yaml b/tests/test-recipes/metadata/nested_recipe/build_number/meta.yaml new file mode 100644 index 0000000000..6211c1fc37 --- /dev/null +++ b/tests/test-recipes/metadata/nested_recipe/build_number/meta.yaml @@ -0,0 +1,6 @@ +package: + name: conda-build-test-build-number + version: 1.0 + +build: + number: 1 diff --git a/tests/test-recipes/metadata/nested_recipe/build_number/run_test.bat b/tests/test-recipes/metadata/nested_recipe/build_number/run_test.bat new file mode 100644 index 0000000000..03efa65f8f --- /dev/null +++ b/tests/test-recipes/metadata/nested_recipe/build_number/run_test.bat @@ -0,0 +1,10 @@ +conda list -p "%PREFIX%" --canonical +if errorlevel 1 exit 1 +for /f "delims=" %%i in ('conda list -p "%PREFIX%" --canonical') do set condalist=%%i +if errorlevel 1 exit 1 +echo "%condalist%" +if not "%condalist%"=="conda-build-test-build-number-1.0-1" exit 1 +cat "%PREFIX%\conda-meta\conda-build-test-build-number-1.0-1.json" +if errorlevel 1 exit 1 +cat "%PREFIX%\conda-meta\conda-build-test-build-number-1.0-1.json" | grep '"build_number": 1' +if errorlevel 1 exit 1 diff --git a/tests/test-recipes/metadata/nested_recipe/build_number/run_test.sh b/tests/test-recipes/metadata/nested_recipe/build_number/run_test.sh new file mode 100644 index 0000000000..e0c3296e85 --- /dev/null +++ b/tests/test-recipes/metadata/nested_recipe/build_number/run_test.sh @@ -0,0 +1,6 @@ +conda list -p $PREFIX --canonical +# This is actually the build string. We test the build number below +[ "$(conda list -p $PREFIX --canonical)" = "conda-build-test-build-number-1.0-1" ] + +cat $PREFIX/conda-meta/conda-build-test-build-number-1.0-1.json +cat $PREFIX/conda-meta/conda-build-test-build-number-1.0-1.json | grep '"build_number": 1' diff --git a/tests/test-recipes/metadata/noarch/meta.yaml b/tests/test-recipes/metadata/noarch/meta.yaml new file mode 100644 index 0000000000..32e4b741e2 --- /dev/null +++ b/tests/test-recipes/metadata/noarch/meta.yaml @@ -0,0 +1,27 @@ +package: + name: noarch_test_package + version: "1.0" + +source: + path: noarch_test_package + +build: + script: python setup.py install + noarch_python: True + entry_points: + - noarch_test_package_script = noarch_test_package:main + +requirements: + build: + - python + - setuptools + + run: + - python + +test: + # Python imports + imports: + - noarch_test_package + commands: + - noarch_test_package_script diff --git a/tests/test-recipes/metadata/noarch/noarch_test_package/README b/tests/test-recipes/metadata/noarch/noarch_test_package/README new file mode 100644 index 0000000000..52d2f1286c --- /dev/null +++ b/tests/test-recipes/metadata/noarch/noarch_test_package/README @@ -0,0 +1 @@ +Simple package to test noarch package building. diff --git a/tests/test-recipes/metadata/noarch/noarch_test_package/noarch_test_package.py b/tests/test-recipes/metadata/noarch/noarch_test_package/noarch_test_package.py new file mode 100644 index 0000000000..1373380e34 --- /dev/null +++ b/tests/test-recipes/metadata/noarch/noarch_test_package/noarch_test_package.py @@ -0,0 +1,11 @@ +""" This functions as a module but also as entry point. +""" + +answer = 142 + + +def main(): + print(answer + 100) + +if __name__ == '__main__': + main() diff --git a/tests/test-recipes/metadata/noarch/noarch_test_package/setup.py b/tests/test-recipes/metadata/noarch/noarch_test_package/setup.py new file mode 100644 index 0000000000..ba673ea897 --- /dev/null +++ b/tests/test-recipes/metadata/noarch/noarch_test_package/setup.py @@ -0,0 +1,20 @@ +import setuptools +from distutils.core import setup + +setuptools # fool pyflakes + +name = 'noarch_test_package' + +setup( + name=name, + version='1.0', + author='Almar', + author_email='almar@notmyemail.com', + url='http://continuum.io', + license='(new) BSD', + description='testing noarch package building', + platforms='any', + provides=[name], + py_modules=[name], + entry_points={'console_scripts': ['%s_script = %s:main' % (name, name)], }, +) diff --git a/tests/test-recipes/metadata/noarch/run_test.py b/tests/test-recipes/metadata/noarch/run_test.py new file mode 100644 index 0000000000..b4dbbbe260 --- /dev/null +++ b/tests/test-recipes/metadata/noarch/run_test.py @@ -0,0 +1,31 @@ +import os +import sys +import subprocess + +import noarch_test_package + +pkgs_dir = os.path.abspath(os.path.join(sys.prefix, '..', '..', 'pkgs')) +pkg_dir = os.path.join(pkgs_dir, 'noarch_test_package-1.0-py_0') + +assert os.path.isdir(pkg_dir) + +# Check newlines in prelink scripts +# The one for the .sh is crucial, the one for the .bat is just good behavior + +fname_prelink_unix = os.path.join(pkg_dir, 'bin', '.noarch_test_package-pre-link.sh') +fname_prelink_win = os.path.join(pkg_dir, 'Scripts', '.noarch_test_package-pre-link.bat') + +prelink_unix = open(fname_prelink_unix, 'rb').read().decode('utf-8') +prelink_win = open(fname_prelink_win, 'rb').read().decode('utf-8') + +assert prelink_unix.count('\n') and not prelink_unix.count('\r') +assert prelink_win.count('\n') == prelink_win.count('\r') + +# Check module + +assert noarch_test_package.answer == 142 + +# Check entry point + +res = subprocess.check_output(['noarch_test_package_script']).decode('utf-8').strip() +assert res == '242' diff --git a/tests/test-recipes/metadata/numpy_build_run/run_test.py b/tests/test-recipes/metadata/numpy_build_run/run_test.py index b67ff33173..3b3045b596 100644 --- a/tests/test-recipes/metadata/numpy_build_run/run_test.py +++ b/tests/test-recipes/metadata/numpy_build_run/run_test.py @@ -2,6 +2,7 @@ import json import glob + def main(): prefix = os.environ['PREFIX'] diff --git a/tests/test-recipes/metadata/numpy_run/run_test.py b/tests/test-recipes/metadata/numpy_run/run_test.py index 16f11ff711..a60cabdeef 100644 --- a/tests/test-recipes/metadata/numpy_run/run_test.py +++ b/tests/test-recipes/metadata/numpy_run/run_test.py @@ -2,6 +2,7 @@ import json import glob + def main(): prefix = os.environ['PREFIX'] info_files = glob.glob(os.path.join(prefix, 'conda-meta', diff --git a/tests/test-recipes/metadata/osx_is_app/meta.yaml b/tests/test-recipes/metadata/osx_is_app/meta.yaml index 6fb1e64229..480f21f626 100644 --- a/tests/test-recipes/metadata/osx_is_app/meta.yaml +++ b/tests/test-recipes/metadata/osx_is_app/meta.yaml @@ -3,7 +3,7 @@ package: version: 1.0 source: - git_url: ../../../../ + path: ../../../../ build: entry_points: diff --git a/tests/test-recipes/metadata/python_build_run/run_test.py b/tests/test-recipes/metadata/python_build_run/run_test.py index 7fdf7bdd2e..3a3802812a 100644 --- a/tests/test-recipes/metadata/python_build_run/run_test.py +++ b/tests/test-recipes/metadata/python_build_run/run_test.py @@ -2,6 +2,7 @@ import json import glob + def main(): prefix = os.environ['PREFIX'] info_files = glob.glob(os.path.join(prefix, 'conda-meta', diff --git a/tests/test-recipes/metadata/python_run/run_test.py b/tests/test-recipes/metadata/python_run/run_test.py index a480b5c1ee..97391b1eef 100644 --- a/tests/test-recipes/metadata/python_run/run_test.py +++ b/tests/test-recipes/metadata/python_run/run_test.py @@ -2,6 +2,7 @@ import json import glob + def main(): prefix = os.environ['PREFIX'] info_files = glob.glob(os.path.join(prefix, 'conda-meta', diff --git a/tests/test-recipes/metadata/recursive-build-two-layers/meta.yaml b/tests/test-recipes/metadata/recursive-build-two-layers/meta.yaml new file mode 100644 index 0000000000..98526265ad --- /dev/null +++ b/tests/test-recipes/metadata/recursive-build-two-layers/meta.yaml @@ -0,0 +1,9 @@ +package: + name: conda-build-test-recursive-build-two-layers + version: 1.0 + +# This test should recursively build _recursive-build-a, +# which depends on _recursive-build-b +requirements: + build: + - _recursive-build-a diff --git a/tests/test-recipes/metadata/recursive-build-two-packages/meta.yaml b/tests/test-recipes/metadata/recursive-build-two-packages/meta.yaml new file mode 100644 index 0000000000..13a6e05f2d --- /dev/null +++ b/tests/test-recipes/metadata/recursive-build-two-packages/meta.yaml @@ -0,0 +1,10 @@ +package: + name: conda-build-test-recursive-build-two-layers + version: 1.0 + + # This test should build both _recursive-build-c and _recursive-build-d + # so that it can then build the current package +requirements: + build: + - _recursive-build-c + - _recursive-build-d diff --git a/tests/test-recipes/metadata/selectors_in_imported_templates/build.sh b/tests/test-recipes/metadata/selectors_in_imported_templates/build.sh new file mode 100644 index 0000000000..6f5e6f9743 --- /dev/null +++ b/tests/test-recipes/metadata/selectors_in_imported_templates/build.sh @@ -0,0 +1,2 @@ +# The selectors in variables.jinja make build_num == 2 +[ "${PKG_BUILDNUM}" == "2" ] diff --git a/tests/test-recipes/metadata/selectors_in_imported_templates/meta.yaml b/tests/test-recipes/metadata/selectors_in_imported_templates/meta.yaml new file mode 100644 index 0000000000..f4a01561a6 --- /dev/null +++ b/tests/test-recipes/metadata/selectors_in_imported_templates/meta.yaml @@ -0,0 +1,7 @@ +{% import 'variables.jinja' as variables %} +package: + name: selectors-in-imported-templates + version: 1.0 + +build: + number: {{ variables.build_num }} diff --git a/tests/test-recipes/metadata/selectors_in_imported_templates/variables.jinja b/tests/test-recipes/metadata/selectors_in_imported_templates/variables.jinja new file mode 100644 index 0000000000..d600b43a28 --- /dev/null +++ b/tests/test-recipes/metadata/selectors_in_imported_templates/variables.jinja @@ -0,0 +1,4 @@ + +{% set build_num = 1 %} # [False] +{% set build_num = 2 %} # [True] +{% set build_num = 3 %} # [False] diff --git a/tests/test-recipes/metadata/source_git/bld.bat b/tests/test-recipes/metadata/source_git/bld.bat index bc002292ab..ca1489a42c 100644 --- a/tests/test-recipes/metadata/source_git/bld.bat +++ b/tests/test-recipes/metadata/source_git/bld.bat @@ -14,3 +14,13 @@ if errorlevel 1 exit 1 set PYTHONPATH=. python -c "import conda_build; assert conda_build.__version__ == '1.8.1', conda_build.__version__" if errorlevel 1 exit 1 + + +rem check that GIT_* tags are present +for %%i in (GIT_DESCRIBE_TAG GIT_DESCRIBE_NUMBER GIT_DESCRIBE_HASH GIT_FULL_HASH) DO ( + if defined %%i ( + echo %%i + ) else ( + exit 1 + ) +) diff --git a/tests/test-recipes/metadata/source_git/build.sh b/tests/test-recipes/metadata/source_git/build.sh index efca936212..74571375c8 100644 --- a/tests/test-recipes/metadata/source_git/build.sh +++ b/tests/test-recipes/metadata/source_git/build.sh @@ -5,3 +5,13 @@ git describe [ "$(git describe)" = 1.8.1 ] PYTHONPATH=. python -c "import conda_build; assert conda_build.__version__ == '1.8.1', conda_build.__version__" + +# check if GIT_* variables are defined +for i in GIT_DESCRIBE_TAG GIT_DESCRIBE_NUMBER GIT_DESCRIBE_HASH GIT_FULL_HASH +do + if [ -n "eval $i" ]; then + eval echo \$$i + else + exit 1 + fi +done diff --git a/tests/test-recipes/metadata/source_git/meta.yaml b/tests/test-recipes/metadata/source_git/meta.yaml index 7b67126930..3bd01178bb 100644 --- a/tests/test-recipes/metadata/source_git/meta.yaml +++ b/tests/test-recipes/metadata/source_git/meta.yaml @@ -3,7 +3,7 @@ package: version: 1.0 source: - git_url: https://github.com/conda/conda-build + git_url: ../../../../ git_tag: 1.8.1 requirements: diff --git a/tests/test-recipes/metadata/source_git_jinja2/meta.yaml b/tests/test-recipes/metadata/source_git_jinja2/meta.yaml index e6bd0d99fe..9a92b2cf29 100644 --- a/tests/test-recipes/metadata/source_git_jinja2/meta.yaml +++ b/tests/test-recipes/metadata/source_git_jinja2/meta.yaml @@ -2,10 +2,10 @@ # including use cases involving expressions such as FOO[:7] or FOO.replace(...) package: name: conda-build-test-source-git-jinja2 - version: {{ GIT_DESCRIBE_TAG.replace('v', '') }} + version: {{ environ.get("GIT_DESCRIBE_TAG", "WRONG_ANSWER").replace('v', '') }} source: - git_url: https://github.com/conda/conda-build + git_url: ../../../../ git_tag: 1.8.1 build: diff --git a/tests/test-recipes/metadata/source_path/bld.bat b/tests/test-recipes/metadata/source_path/bld.bat index 3d9611cb95..28fa9812b0 100644 --- a/tests/test-recipes/metadata/source_path/bld.bat +++ b/tests/test-recipes/metadata/source_path/bld.bat @@ -1,3 +1,13 @@ rem If directory exists, we did it right cd tests/test-recipes/test-package if errorlevel 1 exit 1 + + +rem check that GIT_* tags are present +for %%i in (GIT_DESCRIBE_TAG GIT_DESCRIBE_NUMBER GIT_DESCRIBE_HASH GIT_FULL_HASH) DO ( + if defined %%i ( + echo %%i + ) else ( + exit 1 + ) +) diff --git a/tests/test-recipes/metadata/source_path/build.sh b/tests/test-recipes/metadata/source_path/build.sh index 59287e39cb..004a333479 100644 --- a/tests/test-recipes/metadata/source_path/build.sh +++ b/tests/test-recipes/metadata/source_path/build.sh @@ -1,2 +1,12 @@ # If directory exists, we did it right cd tests/test-recipes/test-package + +# check if GIT_* variables are defined +for i in GIT_DESCRIBE_TAG GIT_DESCRIBE_NUMBER GIT_DESCRIBE_HASH GIT_FULL_HASH +do + if [ -n "eval $i" ]; then + eval echo \$$i + else + exit 1 + fi +done diff --git a/tests/test-recipes/metadata/source_svn/bld.bat b/tests/test-recipes/skip/source_svn/bld.bat similarity index 100% rename from tests/test-recipes/metadata/source_svn/bld.bat rename to tests/test-recipes/skip/source_svn/bld.bat diff --git a/tests/test-recipes/metadata/source_svn/build.sh b/tests/test-recipes/skip/source_svn/build.sh similarity index 100% rename from tests/test-recipes/metadata/source_svn/build.sh rename to tests/test-recipes/skip/source_svn/build.sh diff --git a/tests/test-recipes/metadata/source_svn/meta.yaml b/tests/test-recipes/skip/source_svn/meta.yaml similarity index 100% rename from tests/test-recipes/metadata/source_svn/meta.yaml rename to tests/test-recipes/skip/source_svn/meta.yaml diff --git a/tests/test-recipes/test-package/conda_build_test/empty.py b/tests/test-recipes/test-package/conda_build_test/empty.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/test-skeleton/sympy-0.7.5-url/meta.yaml b/tests/test-skeleton/sympy-0.7.5-url/meta.yaml index b71bd77777..885a725c94 100644 --- a/tests/test-skeleton/sympy-0.7.5-url/meta.yaml +++ b/tests/test-skeleton/sympy-0.7.5-url/meta.yaml @@ -4,7 +4,7 @@ package: source: fn: sympy-0.7.5.tar.gz - url: https://pypi.python.org/packages/source/s/sympy/sympy-0.7.5.tar.gz#md5=7de1adb49972a15a3dd975e879a2bea9 + url: https://pypi.io/packages/source/s/sympy/sympy-0.7.5.tar.gz#md5=7de1adb49972a15a3dd975e879a2bea9 md5: 7de1adb49972a15a3dd975e879a2bea9 # patches: # List any patch files here diff --git a/tests/test-skeleton/sympy-0.7.5/meta.yaml b/tests/test-skeleton/sympy-0.7.5/meta.yaml index 99f4defc63..e798c09923 100644 --- a/tests/test-skeleton/sympy-0.7.5/meta.yaml +++ b/tests/test-skeleton/sympy-0.7.5/meta.yaml @@ -4,7 +4,7 @@ package: source: fn: sympy-0.7.5.tar.gz - url: https://pypi.python.org/packages/source/s/sympy/sympy-0.7.5.tar.gz + url: https://files.pythonhosted.org/packages/8c/a5/5fa8adee81837687f7315122769fc0b0e8b042c69e2fe5809c41191c7183/sympy-0.7.5.tar.gz md5: 7de1adb49972a15a3dd975e879a2bea9 # patches: # List any patch files here diff --git a/tests/test-skeleton/test-skeleton.sh b/tests/test-skeleton/test-skeleton.sh deleted file mode 100755 index 7c9d273fc2..0000000000 --- a/tests/test-skeleton/test-skeleton.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash -set -x -set -e -DIR=`mktemp -d -t sympy-0.7.5-XXXX` -conda skeleton pypi --output-dir $DIR --version=0.7.5 sympy -python -c " -import yaml -with open('"$(dirname "${BASH_SOURCE[0]}")"/sympy-0.7.5/meta.yaml') as f: - expected = yaml.load(f) -with open('$DIR/sympy/meta.yaml') as f: - actual = yaml.load(f) -assert expected == actual, (expected, actual) -" -# XXX: This won't run if the test fails. -rm -rf $DIR -echo passed - -DIR=`mktemp -d -t sympy-0.7.5-url-XXXX` -conda skeleton pypi --output-dir $DIR https://pypi.python.org/packages/source/s/sympy/sympy-0.7.5.tar.gz#md5=7de1adb49972a15a3dd975e879a2bea9 -python -c " -import yaml -with open('"$(dirname "${BASH_SOURCE[0]}")"/sympy-0.7.5-url/meta.yaml') as f: - expected = yaml.load(f) -with open('$DIR/sympy/meta.yaml') as f: - actual = yaml.load(f) -assert expected == actual, (expected, actual) -" -# XXX: This won't run if the test fails. -rm -rf $DIR -echo passed diff --git a/tests/test-skeleton/test_skeleton.py b/tests/test-skeleton/test_skeleton.py new file mode 100644 index 0000000000..7f7c5f27c6 --- /dev/null +++ b/tests/test-skeleton/test_skeleton.py @@ -0,0 +1,47 @@ +import os +import shutil +import subprocess +import tempfile + +import pytest +import yaml + +thisdir = os.path.dirname(os.path.realpath(__file__)) + + +@pytest.fixture(scope="function") +def tmpdir(request): + tmpdir = tempfile.mkdtemp() + + def fin(): + shutil.rmtree(tmpdir) + request.addfinalizer(fin) + return tmpdir + + +def test_skeleton_by_name(tmpdir): + cmd = "conda skeleton pypi --output-dir {} conda".format(tmpdir) + subprocess.check_call(cmd.split()) + + +def test_name_with_version_specified(tmpdir): + tmpdir = tempfile.mkdtemp() + cmd = "conda skeleton pypi --output-dir {} --version=0.7.5 sympy".format(tmpdir) + subprocess.check_call(cmd.split()) + with open('{}/sympy-0.7.5/meta.yaml'.format(thisdir)) as f: + expected = yaml.load(f) + with open('{}/sympy/meta.yaml'.format(tmpdir)) as f: + actual = yaml.load(f) + assert expected == actual, (expected, actual) + + +def test_url(tmpdir): + cmd = "conda skeleton pypi --output-dir {} \ +https://pypi.io/packages/source/s/sympy/\ +sympy-0.7.5.tar.gz#md5=7de1adb49972a15a3dd975e879a2bea9".format(tmpdir) + subprocess.check_call(cmd.split()) + with open('{}/sympy-0.7.5-url/meta.yaml'.format(thisdir)) as f: + expected = yaml.load(f) + with open('{}/sympy/meta.yaml'.format(tmpdir)) as f: + actual = yaml.load(f) + assert expected == actual, (expected, actual) diff --git a/tests/test_build.py b/tests/test_build.py new file mode 100644 index 0000000000..40cdc9140d --- /dev/null +++ b/tests/test_build.py @@ -0,0 +1,40 @@ +""" +This file tests prefix finding for Windows and *nix. +""" + +import os +import sys + +from conda_build import build +from conda.compat import TemporaryDirectory + +prefix_tests = {"normal": os.path.sep} +if sys.platform == "win32": + prefix_tests.update({"double_backslash": "\\\\", + "forward_slash": "/"}) + + +def _write_prefix(filename, prefix, replacement): + with open(filename, "w") as f: + f.write(prefix.replace(os.path.sep, replacement)) + f.write("\n") + + +def test_find_prefix_files(): + """ + Write test output that has the prefix to be found, then verify that the prefix finding + identified the correct number of files. + """ + # create a temporary folder + prefix = os.path.join(sys.prefix, "envs", "_build") + if not os.path.isdir(prefix): + os.makedirs(prefix) + with TemporaryDirectory(prefix=prefix + os.path.sep) as tmpdir: + # create text files to be replaced + files = [] + for slash_style in prefix_tests: + filename = os.path.join(tmpdir, "%s.txt" % slash_style) + _write_prefix(filename, prefix, prefix_tests[slash_style]) + files.append(filename) + + assert len(list(build.have_prefix_files(files))) == len(files) diff --git a/tests/test_build_recipes.py b/tests/test_build_recipes.py new file mode 100644 index 0000000000..f0b475aa41 --- /dev/null +++ b/tests/test_build_recipes.py @@ -0,0 +1,340 @@ +import os +import subprocess +import shutil +import sys +import tempfile + +import pytest + +from conda.compat import PY3, TemporaryDirectory +from conda.config import subdir +from conda.fetch import download +from conda_build.source import _guess_patch_strip_level, apply_patch + +thisdir = os.path.dirname(os.path.realpath(__file__)) +metadata_dir = os.path.join(thisdir, "test-recipes/metadata") +fail_dir = os.path.join(thisdir, "test-recipes/fail") + + +def is_valid_dir(parent_dir, dirname): + valid = os.path.isdir(os.path.join(parent_dir, dirname)) + valid &= not dirname.startswith("_") + valid &= ('osx_is_app' != dirname or sys.platform == "darwin") + return valid + + +@pytest.mark.skipif(sys.platform != "win32", + reason="Problem only observed on Windows with win7 sdk") +def test_header_finding(): + """ + Windows sometimes very strangely cannot find headers in %LIBRARY_INC%. This has so far + only been a problem with the recipes that use the Win 7 SDK (python 3.4 builds) + """ + cmd = 'conda build --no-anaconda-upload {}/_pyyaml_find_header'.format(metadata_dir) + try: + output = subprocess.check_output(cmd.split()) + except subprocess.CalledProcessError as error: + print(error.output) + print(os.listdir(os.path.join(sys.prefix, "envs", "_build", "Library", "include"))) + raise + if PY3: + output = output.decode("UTF-8") + assert "forcing --without-libyaml" not in output + + +def test_CONDA_BLD_PATH(): + env = dict(os.environ) + cmd = 'conda build --no-anaconda-upload {}/source_git_jinja2'.format(metadata_dir) + with TemporaryDirectory() as tmp: + env["CONDA_BLD_PATH"] = tmp + subprocess.check_call(cmd.split(), env=env) + # trick is actually a second pass, to make sure that deletion/trash moving is working OK. + subprocess.check_call(cmd.split(), env=env) + + +# TODO: this does not currently take into account post-build versioning changes with __conda_? files +def test_output_build_path_git_source(): + cmd = 'conda build --output {}'.format(os.path.join(metadata_dir, "source_git_jinja2")) + process = subprocess.Popen(cmd.split(), + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + output, error = process.communicate() + test_path = os.path.join(sys.prefix, "conda-bld", subdir, + "conda-build-test-source-git-jinja2-1.8.1-py{}{}_0_gf3d51ae.tar.bz2".format( + sys.version_info.major, sys.version_info.minor)) + if PY3: + output = output.decode("UTF-8") + assert output.rstrip() == test_path + + +@pytest.mark.skipif(sys.platform == "win32", + reason="Windows permission errors w/ git when removing repo files on cleanup.") +def test_cached_source_not_interfere_with_versioning(): + """Test that work dir does not cache and cause inaccurate test target""" + basedir = os.getcwd() + try: + with TemporaryDirectory() as tmp: + os.chdir(tmp) + subprocess.check_call(['git', 'clone', + 'https://github.com/conda/conda_build_test_recipe']) + # build to make sure we have a work directory with source in it. + # We want to make sure that whatever version that is does not + # interfere with the test we run next. + subprocess.check_call(['conda', 'build', '--no-test', + '--no-anaconda-upload', + 'conda_build_test_recipe']) + + os.chdir('conda_build_test_recipe') + subprocess.check_call(['git', 'checkout', '1.20.0']) + os.chdir('..') + + # this should fail, because we have not built v1.0, so there should + # be nothing to test. If it succeeds, it means that it used the + # cached master checkout for determining which version to test. + cmd = 'conda build --output conda_build_test_recipe' + output = subprocess.check_output(cmd.split()) + if PY3: + output = output.decode("UTF-8") + assert ("conda-build-test-source-git-jinja2-1.20.0" in output) + finally: + os.chdir(basedir) + + +def test_relative_path_git_versioning(): + tag = subprocess.check_output(["git", "describe", "--abbrev=0"]).rstrip() + cmd = 'conda build --output {}'.format(os.path.join(metadata_dir, + "_source_git_jinja2_relative_path")) + output = subprocess.check_output(cmd.split()) + assert tag in output + + +def test_relative_git_url_git_versioning(): + tag = subprocess.check_output(["git", "describe", "--abbrev=0"]).rstrip() + cmd = 'conda build --output {}'.format(os.path.join(metadata_dir, + "_source_git_jinja2_relative_git_url")) + output = subprocess.check_output(cmd.split()) + assert tag in output + + +def test_package_test(): + """Test calling conda build -t - rather than """ + filename = "jinja2-2.8-py{}{}_0.tar.bz2".format(sys.version_info.major, sys.version_info.minor) + downloaded_file = os.path.join(sys.prefix, 'conda-bld', subdir, filename) + if not os.path.isfile(downloaded_file): + download('https://anaconda.org/conda-forge/jinja2/2.8/download/{}/{}'.format(subdir, filename), # noqa + downloaded_file) + subprocess.check_call(["conda", "build", "--test", downloaded_file]) + + +@pytest.fixture(params=[dirname for dirname in os.listdir(metadata_dir) + if is_valid_dir(metadata_dir, dirname)]) +def recipe(request): + cwd = os.getcwd() + os.chdir(metadata_dir) + + def fin(): + os.chdir(cwd) + request.addfinalizer(fin) + return os.path.join(metadata_dir, request.param) + + +def test_recipe_builds(recipe): + env = dict(os.environ) + # These variables are defined solely for testing purposes, + # so they can be checked within build scripts + env["CONDA_TEST_VAR"] = "conda_test" + env["CONDA_TEST_VAR_2"] = "conda_test_2" + + cmd = 'conda build --no-anaconda-upload {}'.format(recipe) + + # allow the recipe to customize its build + driver = os.path.join(recipe, '_driver.sh') + if os.access(driver, os.X_OK): + cmd = "{} {}".format(driver, cmd) + subprocess.check_call(cmd.split(), env=env) + + +def test_dirty_variable_available_in_build_scripts(): + cmd = 'conda build --no-anaconda-upload --dirty {}'.format(os.path.join(metadata_dir, + "_dirty_skip_section")) + subprocess.check_call(cmd.split()) + with pytest.raises(subprocess.CalledProcessError): + cmd = cmd.replace(" --dirty", "") + subprocess.check_call(cmd.split()) + + +def test_checkout_tool_as_dependency(): + # "hide" svn by putting a known bad one on PATH + tmpdir = tempfile.mkdtemp() + dummyfile = os.path.join(tmpdir, "svn") + # empty prefix by default - extra bit at beginning of file + prefix = "" + if sys.platform != "win32": + prefix = "#!/bin/bash\nexec 1>&2\n" + with open(dummyfile, 'w') as f: + f.write(prefix + """ +echo +echo " ******* You've reached the dummy svn. It's likely there's a bug in conda *******" +echo " ******* that makes it not add the _build/bin directory onto the PATH *******" +echo " ******* before running the source checkout tool *******" +echo +exit -1 +""") + if sys.platform == "win32": + os.rename(dummyfile, dummyfile + ".bat") + else: + import stat + st = os.stat(dummyfile) + os.chmod(dummyfile, st.st_mode | stat.S_IEXEC) + env = dict(os.environ) + env["PATH"] = os.pathsep.join([tmpdir, env["PATH"]]) + cmd = 'conda build --no-anaconda-upload {}/_checkout_tool_as_dependency'.format(metadata_dir) + try: + subprocess.check_call(cmd.split(), env=env) + except subprocess.CalledProcessError: + raise + finally: + shutil.rmtree(tmpdir) + + +platforms = ["64" if sys.maxsize > 2**32 else "32"] +if sys.platform == "win32": + platforms = set(["32", ] + platforms) + compilers = ["2.7", "3.4", "3.5"] +else: + compilers = [".".join([str(sys.version_info.major), str(sys.version_info.minor)])] + + +@pytest.mark.parametrize("platform", platforms) +@pytest.mark.parametrize("target_compiler", compilers) +def test_cmake_generator(platform, target_compiler): + # TODO: need a better way to specify compiler more directly on win + cmd = 'conda build --no-anaconda-upload {}/_cmake_generator --python={}'.\ + format(metadata_dir, target_compiler) + subprocess.check_call(cmd.split()) + + +@pytest.mark.skipif(sys.platform == "win32", + reason="No windows symlinks") +def test_symlink_fail(): + cmd = 'conda build --no-anaconda-upload {}'.format(os.path.join(fail_dir, "symlinks")) + process = subprocess.Popen(cmd.split(), + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + output, error = process.communicate() + error = error.decode('utf-8') + assert error.count("Error") == 6 + + +@pytest.mark.skipif(sys.platform == "win32", + reason="Windows doesn't show this error") +def test_broken_conda_meta(): + cmd = 'conda build --no-anaconda-upload {}'.format(os.path.join(fail_dir, "conda-meta")) + process = subprocess.Popen(cmd.split(), + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + output, error = process.communicate() + error = error.decode('utf-8') + assert "Error: Untracked file(s) ('conda-meta/nope',)" in error + + +def test_recursive_fail(): + cmd = 'conda build --no-anaconda-upload {}'.format(os.path.join(fail_dir, "recursive-build")) + process = subprocess.Popen(cmd.split(), + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + output, error = process.communicate() + error = error.decode('utf-8') + assert "recursive-build2" in error + + +def test_jinja_typo(): + cmd = 'conda build --no-anaconda-upload {}'.format(os.path.join(fail_dir, + "source_git_jinja2_oops")) + process = subprocess.Popen(cmd.split(), + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + output, error = process.communicate() + error = error.decode('utf-8') + assert "'GIT_DSECRIBE_TAG' is undefined" in error + + +def test_skip_existing(): + # build the recipe first + cmd = 'conda build --no-anaconda-upload {}'.format(os.path.join(metadata_dir, "build_number")) + subprocess.check_call(cmd.split()) + cmd = 'conda build --no-anaconda-upload --skip-existing {}'.format(os.path.join(metadata_dir, + "build_number")) + process = subprocess.Popen(cmd.split(), + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + output, _ = process.communicate() + output = output.decode('utf-8') + assert "is already built, skipping." in output + + +def test_token_upload(): + # generated with conda_test_account user, command: + # anaconda auth --create --name CONDA_BUILD_UPLOAD_TEST --scopes 'api repos conda' + token = "co-79de533f-926f-4e5e-a766-d393e33ae98f" + # the folder with the test recipe to upload + cmd = 'conda build --token {} {}'.format(token, os.path.join(metadata_dir, "empty_sections")) + subprocess.check_call(cmd.split()) + # clean up - we don't actually want this package to exist + cmd = 'anaconda --token {} remove --force conda_test_account/conda-build-test-empty_sections'\ + .format(token) + subprocess.check_call(cmd.split()) + + +def test_patch_strip_level(): + patchfiles = set(('some/common/prefix/one.txt', + 'some/common/prefix/two.txt', + 'some/common/prefix/three.txt')) + folders = ('some', 'common', 'prefix') + files = ('one.txt', 'two.txt', 'three.txt') + basedir = os.getcwd() + with TemporaryDirectory() as tmp: + os.chdir(tmp) + os.makedirs(os.path.join(tmp, *folders)) + for file in files: + with open(os.path.join(os.path.join(tmp, *folders), file), 'w') as f: + f.write('hello\n') + assert _guess_patch_strip_level(patchfiles, os.getcwd()) == 0 + os.chdir(folders[0]) + assert _guess_patch_strip_level(patchfiles, os.getcwd()) == 1 + os.chdir(folders[1]) + assert _guess_patch_strip_level(patchfiles, os.getcwd()) == 2 + os.chdir(folders[2]) + assert _guess_patch_strip_level(patchfiles, os.getcwd()) == 3 + os.chdir(basedir) + + +def test_patch(): + basedir = os.getcwd() + with TemporaryDirectory() as tmp: + os.chdir(tmp) + with open(os.path.join(tmp, 'file-deletion.txt'), 'w') as f: + f.write('hello\n') + with open(os.path.join(tmp, 'file-modification.txt'), 'w') as f: + f.write('hello\n') + patchfile = os.path.join(tmp, 'patch') + with open(patchfile, 'w') as f: + f.write('diff file-deletion.txt file-deletion.txt\n') + f.write('--- file-deletion.txt 2016-06-07 21:55:59.549798700 +0100\n') + f.write('+++ file-deletion.txt 1970-01-01 01:00:00.000000000 +0100\n') + f.write('@@ -1 +0,0 @@\n') + f.write('-hello\n') + f.write('diff file-creation.txt file-creation.txt\n') + f.write('--- file-creation.txt 1970-01-01 01:00:00.000000000 +0100\n') + f.write('+++ file-creation.txt 2016-06-07 21:55:59.549798700 +0100\n') + f.write('@@ -0,0 +1 @@\n') + f.write('+hello\n') + f.write('diff file-modification.txt file-modification.txt.new\n') + f.write('--- file-modification.txt 2016-06-08 18:23:08.384136600 +0100\n') + f.write('+++ file-modification.txt.new 2016-06-08 18:23:37.565136200 +0100\n') + f.write('@@ -1 +1 @@\n') + f.write('-hello\n') + f.write('+43770\n') + f.close() + apply_patch(tmp, patchfile) + assert not os.path.exists(os.path.join(tmp, 'file-deletion.txt')) + assert os.path.exists(os.path.join(tmp, 'file-creation.txt')) + assert os.path.exists(os.path.join(tmp, 'file-modification.txt')) + with open('file-modification.txt', 'r') as modified: + lines = modified.readlines() + assert lines[0] == '43770\n' + os.chdir(basedir) diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 619ba337ed..42d9844aac 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -9,41 +9,56 @@ def test_select_lines(): lines = """ test test [abc] no +test [abc] # no test [abc] test # [abc] test # [abc] yes test # stuff [abc] yes +test {{ JINJA_VAR[:2] }} +test {{ JINJA_VAR[:2] }} # stuff [abc] yes +test {{ JINJA_VAR[:2] }} # stuff yes [abc] +test {{ JINJA_VAR[:2] }} # [abc] stuff yes +{{ environ["test"] }} # [abc] """ assert select_lines(lines, {'abc': True}) == """ test test [abc] no +test [abc] # no test test test test +test {{ JINJA_VAR[:2] }} +test {{ JINJA_VAR[:2] }} +test {{ JINJA_VAR[:2] }} +test {{ JINJA_VAR[:2] }} +{{ environ["test"] }} """ assert select_lines(lines, {'abc': False}) == """ test test [abc] no +test [abc] # no +test {{ JINJA_VAR[:2] }} """ + class HandleConfigVersionTests(unittest.TestCase): def test_python(self): for spec, ver, res_spec in [ - ('python', '3.4', 'python 3.4*'), - ('python 2.7.8', '2.7', 'python 2.7.8'), - ('python 2.7.8', '3.5', 'python 2.7.8'), - ('python 2.7.8', None, 'python 2.7.8'), - ('python', None, 'python'), - ('python x.x', '2.7', 'python 2.7*'), - ('python', '27', 'python 2.7*'), - ('python', 27, 'python 2.7*'), - ]: + ('python', '3.4', 'python 3.4*'), + ('python 2.7.8', '2.7', 'python 2.7.8'), + ('python 2.7.8', '3.5', 'python 2.7.8'), + ('python 2.7.8', None, 'python 2.7.8'), + ('python', None, 'python'), + ('python x.x', '2.7', 'python 2.7*'), + ('python', '27', 'python 2.7*'), + ('python', 27, 'python 2.7*'), + ]: ms = MatchSpec(spec) self.assertEqual(handle_config_version(ms, ver), MatchSpec(res_spec)) @@ -53,17 +68,18 @@ def test_python(self): MatchSpec('python x.x'), None) def test_numpy(self): - for spec, ver, res_spec in [ - ('numpy', None, 'numpy'), - ('numpy', 18, 'numpy'), - ('numpy', 110, 'numpy'), - ('numpy x.x', 17, 'numpy 1.7*'), - ('numpy x.x', 110, 'numpy 1.10*'), - ('numpy 1.9.1', 18, 'numpy 1.9.1'), - ('numpy 1.9.0 py27_2', None, 'numpy 1.9.0 py27_2'), - ]: + for spec, ver, res_spec, kwargs in [ + ('numpy', None, 'numpy', {}), + ('numpy', 18, 'numpy 1.8*', {'dep_type': 'build'}), + ('numpy', 18, 'numpy', {'dep_type': 'run'}), + ('numpy', 110, 'numpy', {}), + ('numpy x.x', 17, 'numpy 1.7*', {}), + ('numpy x.x', 110, 'numpy 1.10*', {}), + ('numpy 1.9.1', 18, 'numpy 1.9.1', {}), + ('numpy 1.9.0 py27_2', None, 'numpy 1.9.0 py27_2', {}), + ]: ms = MatchSpec(spec) - self.assertEqual(handle_config_version(ms, ver), + self.assertEqual(handle_config_version(ms, ver, **kwargs), MatchSpec(res_spec)) self.assertRaises(RuntimeError, diff --git a/tests/test_misc.py b/tests/test_misc.py index 30d78c5af8..4291d3584d 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -3,7 +3,6 @@ import conda_build._link as _link - class TestLink(unittest.TestCase): def test_pyc_f_2(self): @@ -12,9 +11,9 @@ def test_pyc_f_2(self): def test_pyc_f_3(self): for f, r in [ - ('sp/utils.py', - 'sp/__pycache__/utils.cpython-34.pyc'), - ('sp/foo/utils.py', - 'sp/foo/__pycache__/utils.cpython-34.pyc'), - ]: + ('sp/utils.py', + 'sp/__pycache__/utils.cpython-34.pyc'), + ('sp/foo/utils.py', + 'sp/foo/__pycache__/utils.cpython-34.pyc'), + ]: self.assertEqual(_link.pyc_f(f, (3, 4, 2)), r) diff --git a/tests/test_render.py b/tests/test_render.py new file mode 100644 index 0000000000..bae26e33cd --- /dev/null +++ b/tests/test_render.py @@ -0,0 +1,22 @@ +import os +import subprocess +import sys + +from conda.compat import PY3 +from conda.config import subdir + +thisdir = os.path.dirname(os.path.realpath(__file__)) +metadata_dir = os.path.join(thisdir, "test-recipes/metadata") + + +def test_output_build_path(): + cmd = 'conda render --output {}'.format(os.path.join(metadata_dir, "python_run")) + process = subprocess.Popen(cmd.split(), + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + output, error = process.communicate() + test_path = os.path.join(sys.prefix, "conda-bld", subdir, + "conda-build-test-python-run-1.0-py{}{}_0.tar.bz2".format( + sys.version_info.major, sys.version_info.minor)) + if PY3: + output = output.decode("UTF-8") + assert output.rstrip() == test_path, error diff --git a/tests/test_utils.py b/tests/test_utils.py index fe589a5ba2..1940f3971d 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -3,89 +3,88 @@ import conda_build.utils as utils - class TestUtils(unittest.TestCase): def test_relative_default(self): for f, r in [ - ('bin/python', '../lib'), - ('lib/libhdf5.so', '.'), - ('lib/python2.6/foobar.so', '..'), - ('lib/python2.6/lib-dynload/zlib.so', '../..'), - ('lib/python2.6/site-packages/pyodbc.so', '../..'), - ('lib/python2.6/site-packages/bsdiff4/core.so', '../../..'), - ('xyz', './lib'), - ('bin/somedir/cmd', '../../lib'), - ]: + ('bin/python', '../lib'), + ('lib/libhdf5.so', '.'), + ('lib/python2.6/foobar.so', '..'), + ('lib/python2.6/lib-dynload/zlib.so', '../..'), + ('lib/python2.6/site-packages/pyodbc.so', '../..'), + ('lib/python2.6/site-packages/bsdiff4/core.so', '../../..'), + ('xyz', './lib'), + ('bin/somedir/cmd', '../../lib'), + ]: self.assertEqual(utils.relative(f), r) def test_relative_lib(self): for f, r in [ - ('bin/python', '../lib'), - ('lib/libhdf5.so', '.'), - ('lib/python2.6/foobar.so', '..'), - ('lib/python2.6/lib-dynload/zlib.so', '../..'), - ('lib/python2.6/site-packages/pyodbc.so', '../..'), - ('lib/python2.6/site-packages/bsdiff3/core.so', '../../..'), - ('xyz', './lib'), - ('bin/somedir/cmd', '../../lib'), - ('bin/somedir/somedir2/cmd', '../../../lib'), - ]: + ('bin/python', '../lib'), + ('lib/libhdf5.so', '.'), + ('lib/python2.6/foobar.so', '..'), + ('lib/python2.6/lib-dynload/zlib.so', '../..'), + ('lib/python2.6/site-packages/pyodbc.so', '../..'), + ('lib/python2.6/site-packages/bsdiff3/core.so', '../../..'), + ('xyz', './lib'), + ('bin/somedir/cmd', '../../lib'), + ('bin/somedir/somedir2/cmd', '../../../lib'), + ]: self.assertEqual(utils.relative(f, 'lib'), r) def test_relative_subdir(self): for f, r in [ - ('lib/libhdf5.so', './sub'), - ('lib/sub/libhdf5.so', '.'), - ('bin/python', '../lib/sub'), - ('bin/somedir/cmd', '../../lib/sub'), - ]: + ('lib/libhdf5.so', './sub'), + ('lib/sub/libhdf5.so', '.'), + ('bin/python', '../lib/sub'), + ('bin/somedir/cmd', '../../lib/sub'), + ]: self.assertEqual(utils.relative(f, 'lib/sub'), r) def test_relative_prefix(self): for f, r in [ - ('xyz', '.'), - ('a/xyz', '..'), - ('a/b/xyz', '../..'), - ('a/b/c/xyz', '../../..'), - ('a/b/c/d/xyz', '../../../..'), - ]: + ('xyz', '.'), + ('a/xyz', '..'), + ('a/b/xyz', '../..'), + ('a/b/c/xyz', '../../..'), + ('a/b/c/d/xyz', '../../../..'), + ]: self.assertEqual(utils.relative(f, '.'), r) def test_relative_2(self): for f, r in [ - ('a/b/c/d/libhdf5.so', '../..'), - ('a/b/c/libhdf5.so', '..'), - ('a/b/libhdf5.so', '.'), - ('a/libhdf5.so', './b'), - ('x/x/libhdf5.so', '../../a/b'), - ('x/b/libhdf5.so', '../../a/b'), - ('x/libhdf5.so', '../a/b'), - ('libhdf5.so', './a/b'), - ]: + ('a/b/c/d/libhdf5.so', '../..'), + ('a/b/c/libhdf5.so', '..'), + ('a/b/libhdf5.so', '.'), + ('a/libhdf5.so', './b'), + ('x/x/libhdf5.so', '../../a/b'), + ('x/b/libhdf5.so', '../../a/b'), + ('x/libhdf5.so', '../a/b'), + ('libhdf5.so', './a/b'), + ]: self.assertEqual(utils.relative(f, 'a/b'), r) def test_relative_3(self): for f, r in [ - ('a/b/c/d/libhdf5.so', '..'), - ('a/b/c/libhdf5.so', '.'), - ('a/b/libhdf5.so', './c'), - ('a/libhdf5.so', './b/c'), - ('libhdf5.so', './a/b/c'), - ('a/b/x/libhdf5.so', '../c'), - ('a/x/x/libhdf5.so', '../../b/c'), - ('x/x/x/libhdf5.so', '../../../a/b/c'), - ('x/x/libhdf5.so', '../../a/b/c'), - ('x/libhdf5.so', '../a/b/c'), - ]: + ('a/b/c/d/libhdf5.so', '..'), + ('a/b/c/libhdf5.so', '.'), + ('a/b/libhdf5.so', './c'), + ('a/libhdf5.so', './b/c'), + ('libhdf5.so', './a/b/c'), + ('a/b/x/libhdf5.so', '../c'), + ('a/x/x/libhdf5.so', '../../b/c'), + ('x/x/x/libhdf5.so', '../../../a/b/c'), + ('x/x/libhdf5.so', '../../a/b/c'), + ('x/libhdf5.so', '../a/b/c'), + ]: self.assertEqual(utils.relative(f, 'a/b/c'), r) def test_relative_4(self): for f, r in [ - ('a/b/c/d/libhdf5.so', '.'), - ('a/b/c/x/libhdf5.so', '../d'), - ('a/b/x/x/libhdf5.so', '../../c/d'), - ('a/x/x/x/libhdf5.so', '../../../b/c/d'), - ('x/x/x/x/libhdf5.so', '../../../../a/b/c/d'), - ]: + ('a/b/c/d/libhdf5.so', '.'), + ('a/b/c/x/libhdf5.so', '../d'), + ('a/b/x/x/libhdf5.so', '../../c/d'), + ('a/x/x/x/libhdf5.so', '../../../b/c/d'), + ('x/x/x/x/libhdf5.so', '../../../../a/b/c/d'), + ]: self.assertEqual(utils.relative(f, 'a/b/c/d'), r) diff --git a/tests/test_win_vs_activate.py b/tests/test_win_vs_activate.py new file mode 100644 index 0000000000..1f255f41f3 --- /dev/null +++ b/tests/test_win_vs_activate.py @@ -0,0 +1,115 @@ +from __future__ import print_function + +import os +import subprocess +import sys + +import pytest + +vcvars_backup_files = {} +if sys.platform == "win32": + from conda_build.windows import (build_vcvarsall_vs_path, + VCVARS64_VS9_BAT_PATH, + VS_TOOLS_PY_LOCAL_PATH, + VS_TOOLS_PY_COMMON_PATH) + + vcvars_backup_files = {"vs{}".format(version): [build_vcvarsall_vs_path(version)] + for version in ["9.0", "10.0", "14.0"]} + vcvars_backup_files['vs9.0'].append(VCVARS64_VS9_BAT_PATH) + # VC9 compiler for python - local user install + vcvars_backup_files["python_local"] = [VS_TOOLS_PY_LOCAL_PATH] + # VC9 compiler for python - common files + vcvars_backup_files["python_system"] = [VS_TOOLS_PY_COMMON_PATH] + + vs9 = {key: vcvars_backup_files[key] for key in ['vs9.0', 'python_local', 'python_system']} + vs10 = {key: vcvars_backup_files[key] for key in ['vs10.0']} + vs14 = {key: vcvars_backup_files[key] for key in ['vs14.0']} + + vcs = {"9.0": vs9, "10.0": vs10, "14.0": vs14} + + +def write_bat_files(good_locations): + for label, locations in vcvars_backup_files.items(): + for location in locations: + # these should all have been moved! bad to overwrite them! + assert not os.path.exists(location) + if not os.path.isdir(os.path.dirname(location)): + # if any of these are made, they are not currently cleaned up. Sorry. + os.makedirs(os.path.dirname(location)) + with open(location, "w") as f: + print("writing {} (exit /b {})".format(location, int(label not in good_locations))) + f.write(":: NOTE: exit code of 1 here means incorrect VS version activated. " + "check logic.\n") + f.write("exit /b {}\n".format(int(label not in good_locations))) + + +@pytest.fixture(scope="function") +def setup_teardown(request): + def fin(): + for locations in vcvars_backup_files.values(): + for location in locations: + # clean up any of the custom scripts we wrote to test + if os.path.exists(location): + os.remove(location) + # restore the backups + if os.path.exists(location[:-1] + 'k'): + os.rename(location[:-1] + 'k', location) + request.addfinalizer(fin) + + # backup known files + for locations in vcvars_backup_files.values(): + for location in locations: + if os.path.exists(location): + os.rename(location, location[:-1] + 'k') + + return request + + +@pytest.fixture(scope="function", params=vcvars_backup_files.keys()) +def compiler(request, setup_teardown): + return request.param + + +@pytest.fixture(params=[32, 64]) +def bits(request): + return request.param + + +@pytest.mark.skipif(sys.platform != "win32", reason="windows-only test") +@pytest.mark.xfail(reason="verification of test logic", strict=True) +def test_activation_logic(bits, compiler): + from conda_build.windows import msvc_env_cmd + # empty list here means no configuration is valid. We should get a + # failure. + write_bat_files([]) + # look up which VS version we're forcing here + compiler_version = [key for key in vcs if compiler in vcs[key]][0] + with open('tmp_call.bat', "w") as f: + f.write(msvc_env_cmd(bits, compiler_version)) + subprocess.check_call(['cmd.exe', '/C', 'tmp_call.bat'], shell=True) + + +@pytest.mark.skipif(sys.platform != "win32", reason="windows-only test") +def test_activation(bits, compiler): + write_bat_files([compiler]) + from conda_build.windows import msvc_env_cmd, VS_VERSION_STRING + # look up which VS version we're forcing here + compiler_version = [key for key in vcs if compiler in vcs[key]][0] + # this will throw an exception if the subprocess return code is not 0 + # this is effectively the test condition for all below tests. + with open('tmp_call.bat', "w") as f: + f.write(msvc_env_cmd(bits, compiler_version)) + f.write('\nif not "%VS_VERSION%" == "{}" exit /b 1'.format(compiler_version)) + f.write('\nif not "%VS_MAJOR%" == "{}" exit /b 1'.format(compiler_version.split('.')[0])) + f.write('\nif not "%VS_YEAR%" == "{}" exit /b 1' + .format(VS_VERSION_STRING[compiler_version][-4:])) + f.write('\nif not "%CMAKE_GENERATOR%" == "{}" exit /b 1' + .format(VS_VERSION_STRING[compiler_version] + + {64: ' Win64', 32: ''}[bits])) + try: + subprocess.check_call(['cmd.exe', '/C', 'tmp_call.bat'], shell=True) + except subprocess.CalledProcessError: + print("failed activation: {}, {}".format(bits, compiler)) + raise + finally: + os.remove('tmp_call.bat') diff --git a/versioneer.py b/versioneer.py index c00770fe4f..7a333fbecf 100644 --- a/versioneer.py +++ b/versioneer.py @@ -620,7 +620,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose=False): # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: - print("discarding '%s', no digits" % ",".join(refs-tags)) + print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): @@ -650,13 +650,13 @@ def git_parse_vcs_describe(git_describe, tag_prefix, verbose=False): # now we have TAG-NUM-gHEX or HEX if "-" not in git_describe: # just HEX - return "0+untagged.g"+git_describe+dirty_suffix, dirty + return "0+untagged.g" + git_describe + dirty_suffix, dirty # just TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? - return "0+unparseable"+dirty_suffix, dirty + return "0+unparseable" + dirty_suffix, dirty # tag full_tag = mo.group(1)