diff --git a/.appveyor.yml b/.appveyor.yml index b5bb25f..b95086c 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -1,169 +1,10 @@ -# As config was originally based on an example by Olivier Grisel. Thanks! -# https://github.com/ogrisel/python-appveyor-demo/blob/master/appveyor.yml -clone_depth: 50 +image: Ubuntu -# No reason for us to restrict the number concurrent jobs -max_jobs: 100 - -cache: - - '%LOCALAPPDATA%\pip\Cache' - -environment: - global: - MINGW_32: C:\mingw-w64\i686-6.3.0-posix-dwarf-rt_v5-rev1\mingw32\bin - MINGW_64: C:\mingw-w64\x86_64-6.3.0-posix-seh-rt_v5-rev1\mingw64\bin - - CYTHON_BUILD_DEP: Cython - TEST_MODE: fast - APPVEYOR_SAVE_CACHE_ON_ERROR: true - APPVEYOR_SKIP_FINALIZE_ON_EXIT: true - WHEELHOUSE_UPLOADER_USERNAME: travis-worker - WHEELHOUSE_UPLOADER_SECRET: - secure: - 9s0gdDGnNnTt7hvyNpn0/ZzOMGPdwPp2SewFTfGzYk7uI+rdAN9rFq2D1gAP4NQh - BUILD_COMMIT: e94cec800304a6a467cf90ce4e7d3e207770b4b4 - DAILY_COMMIT: master - - matrix: - - PYTHON: C:\Python36 - PYTHON_VERSION: 3.6 - PYTHON_ARCH: 32 - - - PYTHON: C:\Python36-x64 - PYTHON_VERSION: 3.6 - PYTHON_ARCH: 64 - - - PYTHON: C:\Python37 - PYTHON_VERSION: 3.7 - PYTHON_ARCH: 32 - - - PYTHON: C:\Python37-x64 - PYTHON_VERSION: 3.7 - PYTHON_ARCH: 64 - - - PYTHON: C:\Python38 - PYTHON_VERSION: 3.8 - PYTHON_ARCH: 32 - - - PYTHON: C:\Python38-x64 - PYTHON_VERSION: 3.8 - PYTHON_ARCH: 64 - - -init: - - "ECHO %PYTHON% %PYTHON_VERSION% %PYTHON_ARCH%" - - "ECHO \"%APPVEYOR_SCHEDULED_BUILD%\"" - - ps: >- - if ($env:APPVEYOR_REPO_BRANCH -eq "master") { - $env:CONTAINER = "pre-release" - if ($env:DAILY_COMMIT) { $env:BUILD_COMMIT = $env:DAILY_COMMIT } - $env:NPY_RELAXED_STRIDES_DEBUG = 1 - } else { - $env:CONTAINER = "wheels" - $env:UPLOAD_ARGS = "--no-update-index" - } install: - - cmd: echo "Filesystem root:" - - dir C:\ - - - echo "Installed SDKs:" - - dir "C:/Program Files/Microsoft SDKs/Windows" - - # Get needed submodules - - git submodule update --init - - # Install new Python if necessary - - ps: .\multibuild\install_python.ps1 - - # Prepend required Python to the PATH of this build (this cannot be - # done from inside the powershell script as it would require to restart - # the parent CMD process). - - SET PATH=%PYTHON%;%PYTHON%\Scripts;%PATH% - - # Check that we have the expected version and architecture for Python - - python --version - - >- - python -c "import sys,platform,struct; - print(sys.platform, platform.machine(), struct.calcsize('P') * 8, )" - - # Upgrade to the latest pip, setuptools, and wheel. - - python -m pip install -U pip setuptools wheel urllib3 - - # Download and install static "openblas.a" to PYTHON\lib - - ps: | - $PYTHON = $env:PYTHON - $lib = python numpy/tools/openblas_support.py - $destination = "$PYTHON\lib\openblas.a" - echo $lib - cp $lib $destination - ls $destination - - # Install build requirements. - - pip install "%CYTHON_BUILD_DEP%" --install-option="--no-cython-compile" - -build_script: - - cd numpy - - git checkout %BUILD_COMMIT% - # Create _distributor_init.py - - cd .. - - python -c "import openblas_support; openblas_support.make_init('numpy/numpy')" - - cd numpy - # Append license text relevant for the built wheel - - type ..\LICENSE_win32.txt >> LICENSE.txt - - ps: | - $PYTHON_ARCH = $env:PYTHON_ARCH - If ($PYTHON_ARCH -eq 32) { - $MINGW = $env:MINGW_32 - } Else { - $MINGW = $env:MINGW_64 - } - $env:Path += ";$MINGW" - $env:NPY_NUM_BUILD_JOBS = "4" - - python setup.py bdist_wheel - - ps: | - # Upload artifact to Appveyor immediately after build - ls dist -r | Foreach-Object { - appveyor PushArtifact $_.FullName - pip install $_.FullName - } +- echo done test_script: - - pip install pytest hypothesis - - cd .. - - python check_license.py - - mkdir tmp_for_test - - cd tmp_for_test - - pytest --pyargs numpy - - cd .. +- echo done -after_test: - # Upload test results to Appveyor - - ps: | - If (Test-Path .\junit-results.xml) { - (new-object net.webclient).UploadFile( - "https://ci.appveyor.com/api/testresults/junit/$($env:APPVEYOR_JOB_ID)", - (Resolve-Path .\junit-results.xml) - ) - } - $LastExitCode = 0 - # Remove old or huge cache files to hopefully not exceed the 1GB cache limit. - # - # If the cache limit is reached, the cache will not be updated (of not even - # created in the first run). So this is a trade of between keeping the cache - # current and having a cache at all. - # NB: This is done only `on_success` since the cache in uploaded only on - # success anyway. - - C:\cygwin\bin\find "%LOCALAPPDATA%\pip" -type f -mtime +360 -delete - - C:\cygwin\bin\find "%LOCALAPPDATA%\pip" -type f -size +10M -delete - - C:\cygwin\bin\find "%LOCALAPPDATA%\pip" -empty -delete - # Show size of cache - - C:\cygwin\bin\du -hs "%LOCALAPPDATA%\pip\Cache" +build: off -on_success: - # Upload the generated wheel package to Rackspace - - cd numpy - - pip install wheelhouse-uploader certifi - - python -m wheelhouse_uploader upload - --local-folder=dist - %UPLOAD_ARGS% - %CONTAINER% diff --git a/.appveyor.yml.back b/.appveyor.yml.back new file mode 100644 index 0000000..b5bb25f --- /dev/null +++ b/.appveyor.yml.back @@ -0,0 +1,169 @@ +# As config was originally based on an example by Olivier Grisel. Thanks! +# https://github.com/ogrisel/python-appveyor-demo/blob/master/appveyor.yml +clone_depth: 50 + +# No reason for us to restrict the number concurrent jobs +max_jobs: 100 + +cache: + - '%LOCALAPPDATA%\pip\Cache' + +environment: + global: + MINGW_32: C:\mingw-w64\i686-6.3.0-posix-dwarf-rt_v5-rev1\mingw32\bin + MINGW_64: C:\mingw-w64\x86_64-6.3.0-posix-seh-rt_v5-rev1\mingw64\bin + + CYTHON_BUILD_DEP: Cython + TEST_MODE: fast + APPVEYOR_SAVE_CACHE_ON_ERROR: true + APPVEYOR_SKIP_FINALIZE_ON_EXIT: true + WHEELHOUSE_UPLOADER_USERNAME: travis-worker + WHEELHOUSE_UPLOADER_SECRET: + secure: + 9s0gdDGnNnTt7hvyNpn0/ZzOMGPdwPp2SewFTfGzYk7uI+rdAN9rFq2D1gAP4NQh + BUILD_COMMIT: e94cec800304a6a467cf90ce4e7d3e207770b4b4 + DAILY_COMMIT: master + + matrix: + - PYTHON: C:\Python36 + PYTHON_VERSION: 3.6 + PYTHON_ARCH: 32 + + - PYTHON: C:\Python36-x64 + PYTHON_VERSION: 3.6 + PYTHON_ARCH: 64 + + - PYTHON: C:\Python37 + PYTHON_VERSION: 3.7 + PYTHON_ARCH: 32 + + - PYTHON: C:\Python37-x64 + PYTHON_VERSION: 3.7 + PYTHON_ARCH: 64 + + - PYTHON: C:\Python38 + PYTHON_VERSION: 3.8 + PYTHON_ARCH: 32 + + - PYTHON: C:\Python38-x64 + PYTHON_VERSION: 3.8 + PYTHON_ARCH: 64 + + +init: + - "ECHO %PYTHON% %PYTHON_VERSION% %PYTHON_ARCH%" + - "ECHO \"%APPVEYOR_SCHEDULED_BUILD%\"" + - ps: >- + if ($env:APPVEYOR_REPO_BRANCH -eq "master") { + $env:CONTAINER = "pre-release" + if ($env:DAILY_COMMIT) { $env:BUILD_COMMIT = $env:DAILY_COMMIT } + $env:NPY_RELAXED_STRIDES_DEBUG = 1 + } else { + $env:CONTAINER = "wheels" + $env:UPLOAD_ARGS = "--no-update-index" + } +install: + - cmd: echo "Filesystem root:" + - dir C:\ + + - echo "Installed SDKs:" + - dir "C:/Program Files/Microsoft SDKs/Windows" + + # Get needed submodules + - git submodule update --init + + # Install new Python if necessary + - ps: .\multibuild\install_python.ps1 + + # Prepend required Python to the PATH of this build (this cannot be + # done from inside the powershell script as it would require to restart + # the parent CMD process). + - SET PATH=%PYTHON%;%PYTHON%\Scripts;%PATH% + + # Check that we have the expected version and architecture for Python + - python --version + - >- + python -c "import sys,platform,struct; + print(sys.platform, platform.machine(), struct.calcsize('P') * 8, )" + + # Upgrade to the latest pip, setuptools, and wheel. + - python -m pip install -U pip setuptools wheel urllib3 + + # Download and install static "openblas.a" to PYTHON\lib + - ps: | + $PYTHON = $env:PYTHON + $lib = python numpy/tools/openblas_support.py + $destination = "$PYTHON\lib\openblas.a" + echo $lib + cp $lib $destination + ls $destination + + # Install build requirements. + - pip install "%CYTHON_BUILD_DEP%" --install-option="--no-cython-compile" + +build_script: + - cd numpy + - git checkout %BUILD_COMMIT% + # Create _distributor_init.py + - cd .. + - python -c "import openblas_support; openblas_support.make_init('numpy/numpy')" + - cd numpy + # Append license text relevant for the built wheel + - type ..\LICENSE_win32.txt >> LICENSE.txt + - ps: | + $PYTHON_ARCH = $env:PYTHON_ARCH + If ($PYTHON_ARCH -eq 32) { + $MINGW = $env:MINGW_32 + } Else { + $MINGW = $env:MINGW_64 + } + $env:Path += ";$MINGW" + $env:NPY_NUM_BUILD_JOBS = "4" + - python setup.py bdist_wheel + - ps: | + # Upload artifact to Appveyor immediately after build + ls dist -r | Foreach-Object { + appveyor PushArtifact $_.FullName + pip install $_.FullName + } + +test_script: + - pip install pytest hypothesis + - cd .. + - python check_license.py + - mkdir tmp_for_test + - cd tmp_for_test + - pytest --pyargs numpy + - cd .. + +after_test: + # Upload test results to Appveyor + - ps: | + If (Test-Path .\junit-results.xml) { + (new-object net.webclient).UploadFile( + "https://ci.appveyor.com/api/testresults/junit/$($env:APPVEYOR_JOB_ID)", + (Resolve-Path .\junit-results.xml) + ) + } + $LastExitCode = 0 + # Remove old or huge cache files to hopefully not exceed the 1GB cache limit. + # + # If the cache limit is reached, the cache will not be updated (of not even + # created in the first run). So this is a trade of between keeping the cache + # current and having a cache at all. + # NB: This is done only `on_success` since the cache in uploaded only on + # success anyway. + - C:\cygwin\bin\find "%LOCALAPPDATA%\pip" -type f -mtime +360 -delete + - C:\cygwin\bin\find "%LOCALAPPDATA%\pip" -type f -size +10M -delete + - C:\cygwin\bin\find "%LOCALAPPDATA%\pip" -empty -delete + # Show size of cache + - C:\cygwin\bin\du -hs "%LOCALAPPDATA%\pip\Cache" + +on_success: + # Upload the generated wheel package to Rackspace + - cd numpy + - pip install wheelhouse-uploader certifi + - python -m wheelhouse_uploader upload + --local-folder=dist + %UPLOAD_ARGS% + %CONTAINER% diff --git a/.travis.yml b/.travis.yml.back similarity index 100% rename from .travis.yml rename to .travis.yml.back diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 0000000..be7193c --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,90 @@ +schedules: +- cron: "27 3 * * 0" + # 3:27am UTC every Sunday + displayName: Weekly build + branches: + include: + - master + always: true + +trigger: +- master + +pr: +- master + +jobs: + - template: azure/windows.yml + parameters: + name: windows + vmImage: vs2017-win2016 + matrix: + py_3.6_32: + PYTHON_VERSION: "3.6.x" + PYTHON_ARCH: "x86" + BITS: 32 + py_3.6_64: + PYTHON_VERSION: "3.6.x" + PYTHON_ARCH: 'x64' + BITS: 64 + py_3.7_32: + PYTHON_VERSION: "3.7.x" + PYTHON_ARCH: "x86" + BITS: 32 + py_3.7_64: + PYTHON_VERSION: "3.7.x" + PYTHON_ARCH: 'x64' + BITS: 64 + py_3.8_32: + PYTHON_VERSION: "3.8.x" + PYTHON_ARCH: "x86" + BITS: 32 + py_3.8_64: + PYTHON_VERSION: "3.8.x" + PYTHON_ARCH: 'x64' + BITS: 64 + + - template: azure/posix.yml + parameters: + name: linux + vmImage: ubuntu-18.04 + matrix: + py_3.6_32: + MB_PYTHON_VERSION: "3.6" + PLAT: "i686" + MB_ML_VER: "2010" + ENV_VARS_PATH: "env_vars_32.sh" + DOCKER_TEST_IMAGE: "multibuild/xenial_{PLAT}" + py_3.6_64: + MB_PYTHON_VERSION: "3.6" + MB_ML_VER: "2010" + py_3.7_32: + MB_PYTHON_VERSION: "3.7" + PLAT: "i686" + MB_ML_VER: "2010" + ENV_VARS_PATH: "env_vars_32.sh" + DOCKER_TEST_IMAGE: "multibuild/xenial_{PLAT}" + py_3.7_64: + MB_PYTHON_VERSION: "3.7" + MB_ML_VER: "2010" + py_3.8_32: + MB_PYTHON_VERSION: "3.8" + PLAT: "i686" + MB_ML_VER: "2010" + ENV_VARS_PATH: "env_vars_32.sh" + DOCKER_TEST_IMAGE: "multibuild/xenial_{PLAT}" + py_3.8_64: + MB_PYTHON_VERSION: "3.8" + MB_ML_VER: "2010" + + - template: azure/posix.yml + parameters: + name: macOS + vmImage: macOS-10.14 + matrix: + py_3.6_64: + MB_PYTHON_VERSION: "3.6" + py_3.7_64: + MB_PYTHON_VERSION: "3.7" + py_3.8_64: + MB_PYTHON_VERSION: "3.8" diff --git a/azure/posix.yml b/azure/posix.yml new file mode 100644 index 0000000..01d8253 --- /dev/null +++ b/azure/posix.yml @@ -0,0 +1,119 @@ +parameters: + name: "" + vmImage: "" + matrix: [] + +jobs: + - job: ${{ parameters.name }} + pool: + vmImage: ${{ parameters.vmImage }} + variables: + REPO_DIR: "numpy" + BUILD_COMMIT: 'v1.18.3' + PLAT: "x86_64" + CYTHON_BUILD_DEP: "cython==0.29.16" + NIGHTLY_BUILD_COMMIT: "master" + TEST_DEPENDS: "pytest hypothesis cffi pytz" + JUNITXML: "test-data.xml" + TEST_DIR: "tmp_for_test" + strategy: + matrix: + ${{ insert }}: ${{ parameters.matrix }} + + steps: + - checkout: self + submodules: true + + - task: UsePythonVersion@0 + inputs: + versionSpec: $(MB_PYTHON_VERSION) + displayName: Set python version + + - bash: | + set -e + + if [ "$BUILD_REASON" == "Schedule" ]; then + BUILD_COMMIT=$NIGHTLY_BUILD_COMMIT + fi + echo "Building numpy@$BUILD_COMMIT" + echo "##vso[task.setvariable variable=BUILD_COMMIT]$BUILD_COMMIT" + + # Platform variables used in multibuild scripts + if [ `uname` == 'Darwin' ]; then + echo "##vso[task.setvariable variable=TRAVIS_OS_NAME]osx" + echo "##vso[task.setvariable variable=MACOSX_DEPLOYMENT_TARGET]10.9" + else + echo "##vso[task.setvariable variable=TRAVIS_OS_NAME]linux" + fi + + # Store original Python path to be able to create test_venv pointing + # to same Python version. + PYTHON_EXE=`which python` + echo "##vso[task.setvariable variable=PYTHON_EXE]$PYTHON_EXE" + displayName: Define build env variables + + - bash: | + set -e + echo $BUILD_COMMIT + pip install virtualenv wheel + BUILD_DEPENDS="$CYTHON_BUILD_DEP" + + source multibuild/common_utils.sh + source multibuild/travis_steps.sh + source extra_functions.sh + + # Setup build dependencies + before_install + + clean_code $REPO_DIR $BUILD_COMMIT + ./patch_code.sh $REPO_DIR + build_wheel $REPO_DIR $PLAT + displayName: Build wheel + + - bash: | + set -e + source multibuild/common_utils.sh + source multibuild/travis_steps.sh + source extra_functions.sh + setup_test_venv + install_run $PLAT + teardown_test_venv + displayName: Install wheel and test + + - bash: | + echo "##vso[task.prependpath]$CONDA/bin" + sudo chown -R $USER $CONDA + displayName: Add conda to PATH + + - bash: conda install -q -y anaconda-client + displayName: Install anaconda-client + + - bash: | + set -e + if [ "$BUILD_REASON" == "Schedule" ]; then + ANACONDA_ORG="scipy-wheels-nightly" + TOKEN="$MAPPED_NUMPY_NIGHTLY_UPLOAD_TOKEN" + else + ANACONDA_ORG="multibuild-wheels-staging" + TOKEN="$MAPPED_NUMPY_STAGING_UPLOAD_TOKEN" + fi + if [ "$TOKEN" == "" ]; then + echo "##[warning] Could not find anaconda.org upload token in secret variables" + fi + echo "##vso[task.setvariable variable=TOKEN]$TOKEN" + echo "##vso[task.setvariable variable=ANACONDA_ORG]$ANACONDA_ORG" + displayName: Retrieve secret upload token + env: + # Secret variables need to mapped to env variables explicitly: + MAPPED_NUMPY_NIGHTLY_UPLOAD_TOKEN: $(NUMPY_NIGHTLY_UPLOAD_TOKEN) + MAPPED_NUMPY_STAGING_UPLOAD_TOKEN: $(NUMPY_STAGING_UPLOAD_TOKEN) + + - bash: | + set -e + # The --force option forces a replacement if the remote file already + # exists. + echo uploading wheelhouse/*.whl + anaconda -t $TOKEN upload --force -u $ANACONDA_ORG wheelhouse/*.whl + echo "PyPI-style index: https://pypi.anaconda.org/$ANACONDA_ORG/simple" + displayName: Upload to anaconda.org (only if secret token is retrieved) + condition: ne(variables['TOKEN'], '') diff --git a/azure/windows.yml b/azure/windows.yml new file mode 100644 index 0000000..4ffee1b --- /dev/null +++ b/azure/windows.yml @@ -0,0 +1,147 @@ +parameters: + name: "" + vmImage: "" + matrix: [] + +jobs: + - job: ${{ parameters.name }} + pool: + vmImage: ${{ parameters.vmImage }} + variables: + BUILD_COMMIT: "v1.18.3" + NIGHTLY_BUILD_COMMIT: "master" + JUNITXML: "test-data.xml" + TEST_DIR: '$(Agent.WorkFolder)/tmp_for_test' + strategy: + matrix: + ${{ insert }}: ${{ parameters.matrix }} + steps: + - checkout: self + submodules: true + + - task: UsePythonVersion@0 + inputs: + versionSpec: $(PYTHON_VERSION) + addToPath: true + architecture: $(PYTHON_ARCH) + displayName: Set python version + + - bash: | + set -e + echo PYTHON $PYTHON_VERSION $PYTHON_ARCH + echo Build Reason: $BUILD_REASON + python --version + python -c "import struct; print(struct.calcsize('P') * 8)" + pip --version + displayName: Check that we have the expected version and architecture for Python + - bash: | + set -e + if [ "$BUILD_REASON" == "Schedule" ]; then + BUILD_COMMIT=$NIGHTLY_BUILD_COMMIT + fi + echo "Building numpy@$BUILD_COMMIT" + echo "##vso[task.setvariable variable=BUILD_COMMIT]$BUILD_COMMIT" + # Store original Python path to be able to create test_venv pointing + # to same Python version. + PYTHON_EXE=`which python` + echo "##vso[task.setvariable variable=PYTHON_EXE]$PYTHON_EXE" + displayName: Define build env variables + - bash: | + set -e + cd numpy + git fetch origin HEAD + git checkout $BUILD_COMMIT + git clean -fxd + git reset --hard + displayName: Checkout numpy commit + + - powershell: | + choco install -y mingw --forcex86 --force --version=5.3.0 + displayName: 'Install 32-bit mingw for 32-bit builds' + condition: eq(variables['BITS'], 32) + - bash: | + set -xe + pushd numpy + pip install twine wheel urllib3 + # a bit overkill, all we really need is cython + pip install --timeout=60 -r test_requirements.txt + + # handle license + cp ../LICENSE_win32.txt LICENSE.txt + + # handle _distributor_init.py + PYTHONPATH=tools python -c "import openblas_support; openblas_support.make_init('numpy')" + + # Download and get the path to "openblas.a". We cannot copy it + # to $PYTHON_EXE's directory since that is on a different drive which + # mingw does not like. Instead copy it to a directory and set OPENBLAS + target=$(python tools/openblas_support.py) + mkdir -p openblas + echo Copying $target to openblas + cp $target openblas + echo "##vso[task.setvariable variable=OPENBLAS]openblas" + displayName: Prepare the build + + - powershell: | + If ($(BITS) -eq 32) { + $env:CFLAGS = "-m32" + $env:LDFLAGS = "-m32" + $env:PATH = "C:\\tools\\mingw32\\bin;" + $env:PATH + refreshenv + } + # Build the wheel + pushd numpy + python setup.py build + python setup.py bdist_wheel + twine check dist/* + popd + displayName: Build wheel + - bash: | + set -ex + source extra_functions.sh + source config.sh + setup_test_venv + python -m pip install -r numpy/test_requirements.txt + python -m pip install numpy/dist/numpy-*.whl + mkdir -p for_test + pushd for_test + run_tests + popd + teardown_test_venv + displayName: Install wheel and test + + - bash: echo "##vso[task.prependpath]$CONDA/Scripts" + displayName: Add conda to PATH + + - bash: conda install -q -y anaconda-client + displayName: Install anaconda-client + + - bash: | + set -e + if [ "$BUILD_REASON" == "Schedule" ]; then + ANACONDA_ORG="scipy-wheels-nightly" + TOKEN="$MAPPED_NUMPY_NIGHTLY_UPLOAD_TOKEN" + else + ANACONDA_ORG="multibuild-wheels-staging" + TOKEN="$MAPPED_NUMPY_STAGING_UPLOAD_TOKEN" + fi + if [ "$TOKEN" == "" ]; then + echo "##[warning] Could not find anaconda.org upload token in secret variables" + fi + echo "##vso[task.setvariable variable=TOKEN]$TOKEN" + echo "##vso[task.setvariable variable=ANACONDA_ORG]$ANACONDA_ORG" + displayName: Retrieve secret upload token + env: + # Secret variables need to mapped to env variables explicitly: + MAPPED_NUMPY_NIGHTLY_UPLOAD_TOKEN: $(NUMPY_NIGHTLY_UPLOAD_TOKEN) + MAPPED_NUMPY_STAGING_UPLOAD_TOKEN: $(NUMPY_STAGING_UPLOAD_TOKEN) + + - bash: | + set -e + # The --force option forces a replacement if the remote file already + # exists. + echo uploading numpy/dist/numpy-*.whl + anaconda -t $TOKEN upload --force -u $ANACONDA_ORG numpy/dist/numpy-*.whl + echo "PyPI-style index: https://pypi.anaconda.org/$ANACONDA_ORG/simple" + displayName: Upload to anaconda.org (only if secret token is retrieved) + condition: ne(variables['TOKEN'], '') diff --git a/config.sh b/config.sh index e515500..bc65dfe 100644 --- a/config.sh +++ b/config.sh @@ -1,6 +1,7 @@ # Define custom utilities # Test for OSX with [ -n "$IS_OSX" ] # See env_vars.sh for extra environment variables +if [ $(uname) == "Linux" ]; then IS_LINUX=1; fi source gfortran-install/gfortran_utils.sh function build_wheel { @@ -22,6 +23,7 @@ function build_libs { # the un-tar root directory, then the files are copied into /usr/local. # Could utilize a site.cfg instead to prevent the copy. python -mpip install urllib3 + python -c"import platform; print('platform.uname().machine', platform.uname().machine)" basedir=$(python numpy/tools/openblas_support.py) $use_sudo cp -r $basedir/lib/* /usr/local/lib $use_sudo cp $basedir/include/* /usr/local/include @@ -36,13 +38,13 @@ function get_test_cmd { function run_tests { # Runs tests on installed distribution from an empty directory - if [ -z "$IS_OSX" ]; then + if [ -n "$IS_LINUX" ]; then apt-get -y update && apt-get install -y gfortran fi - python -c "$(get_test_cmd)" + $PYTHON_EXE -c "$(get_test_cmd)" # Check bundled license file - python ../check_license.py + $PYTHON_EXE ../check_license.py # Show BLAS / LAPACK used. Since this uses a wheel we cannot use # tools/openblas_config.py; tools is not part of what is shipped - python -c 'import numpy; numpy.show_config()' + $PYTHON_EXE -c 'import numpy; numpy.show_config()' } diff --git a/env_vars_32.sh b/env_vars_32.sh index 46b1aaa..8c1969b 100644 --- a/env_vars_32.sh +++ b/env_vars_32.sh @@ -1,6 +1,7 @@ # Environment variables for 32-bit build. # The important difference from the 64-bit build is `-msse2` to # compile sse loops for ufuncs. +set -x OPENBLAS_VERSION="v0.3.7" MACOSX_DEPLOYMENT_TARGET=10.9 CFLAGS="-msse2 -std=c99 -fno-strict-aliasing" diff --git a/extra_functions.sh b/extra_functions.sh new file mode 100644 index 0000000..1a58c66 --- /dev/null +++ b/extra_functions.sh @@ -0,0 +1,34 @@ +function setup_test_venv { + # Create a new empty venv dedicated to testing for non-Linux platforms. On + # Linux the tests are run in a Docker container. + if [ $(uname) != "Linux" ]; then + if type -t deactivate ; then deactivate; fi + $PYTHON_EXE -m venv test_venv + if [ $(uname) == "Darwin" ]; then + source test_venv/bin/activate + else + mkdir -p test_venv/libs + source test_venv/Scripts/activate + fi + # Note: the idiom "python -m pip install ..." is necessary to upgrade + # pip itself on Windows. Otherwise one would get a permission error on + # pip.exe. + PYTHON_EXE=python + PIP_CMD="$PYTHON_EXE -m pip" + python -m pip install --upgrade pip wheel + if [ "$TEST_DEPENDS" != "" ]; then + pip install $TEST_DEPENDS + fi + fi +} + +function teardown_test_venv { + if [ $(uname) != "Linux" ]; then + if type -t deactivate ; then deactivate; fi + if [ $(uname) == "Darwin" ]; then + source venv/bin/activate + fi + fi +} +# Work around bug in multibuild +if [ ! -o PIP_CMD ]; then PIP_CMD="$PYTHON_EXE -m pip"; fi diff --git a/multibuild b/multibuild index 3bd75ee..da36cd5 160000 --- a/multibuild +++ b/multibuild @@ -1 +1 @@ -Subproject commit 3bd75eee8fa98ee70e29d59f4ec5a15291362cc2 +Subproject commit da36cd5408e03cad504217de4a21bb8f19f2fb2f