forked from numpy/numpy
-
Notifications
You must be signed in to change notification settings - Fork 0
/
azure-pipelines.yml
300 lines (274 loc) · 10.5 KB
/
azure-pipelines.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
trigger:
# start a new build for every push
batch: False
branches:
include:
- main
- maintenance/*
pr:
branches:
include:
- '*' # must quote since "*" is a YAML reserved character; we want a string
stages:
- stage: Check
jobs:
- job: Skip
pool:
vmImage: 'ubuntu-20.04'
variables:
DECODE_PERCENTS: 'false'
RET: 'true'
steps:
- bash: |
git_log=`git log --max-count=1 --skip=1 --pretty=format:"%B" | tr "\n" " "`
echo "##vso[task.setvariable variable=log]$git_log"
- bash: echo "##vso[task.setvariable variable=RET]false"
condition: or(contains(variables.log, '[skip azp]'), contains(variables.log, '[azp skip]'), contains(variables.log, '[skip ci]'), contains(variables.log, '[ci skip]'))
- bash: echo "##vso[task.setvariable variable=start_main;isOutput=true]$RET"
name: result
- stage: InitialTests
condition: and(succeeded(), eq(dependencies.Check.outputs['Skip.result.start_main'], 'true'))
dependsOn: Check
jobs:
# Native build is based on gcc flag `-march=native`
- job: Linux_baseline_native
pool:
vmImage: 'ubuntu-20.04'
steps:
- script: |
git submodule update --init
displayName: 'Fetch submodules'
- script: |
if ! `gcc 2>/dev/null`; then
sudo apt install gcc
fi
sudo add-apt-repository ppa:deadsnakes/ppa -y
sudo apt install python3.9
sudo apt install python3.9-dev
sudo apt install python3.9-distutils
# python3 has no setuptools, so install one to get us going
python3.9 -m pip install --user --upgrade pip 'setuptools<49.2.0'
python3.9 -m pip install --user -r test_requirements.txt
displayName: 'install python/requirements'
- script: |
python3.9 runtests.py --show-build-log --cpu-baseline=native --cpu-dispatch=none \
--debug-info --mode=full -- -rsx --junitxml=junit/test-results.xml
displayName: 'Run native baseline Build / Tests'
- task: PublishTestResults@2
condition: succeededOrFailed()
inputs:
testResultsFiles: '**/test-*.xml'
failTaskOnFailedTests: true
testRunTitle: 'Publish test results for baseline/native'
- stage: ComprehensiveTests
condition: and(succeeded(), eq(dependencies.Check.outputs['Skip.result.start_main'], 'true'))
dependsOn: Check
jobs:
- job: Lint
condition: and(succeeded(), eq(variables['Build.Reason'], 'PullRequest'))
pool:
vmImage: 'ubuntu-20.04'
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '3.9'
addToPath: true
architecture: 'x64'
- script: >-
python -m pip install -r linter_requirements.txt
displayName: 'Install tools'
# pip 21.1 emits a pile of garbage messages to annoy users :)
# failOnStderr: true
- script: |
python tools/linter.py --branch origin/$(System.PullRequest.TargetBranch)
displayName: 'Run Lint Checks'
failOnStderr: true
- job: Linux_Python_39_32bit_full_with_asserts
pool:
vmImage: 'ubuntu-20.04'
steps:
- script: |
git submodule update --init
displayName: 'Fetch submodules'
- script: |
docker run -v $(pwd):/numpy -e CFLAGS="-msse2 -std=c99 -UNDEBUG" \
-e F77=gfortran-5 -e F90=gfortran-5 quay.io/pypa/manylinux2014_i686 \
/bin/bash -xc " \
git config --global --add safe.directory /numpy && \
cd /numpy && \
/opt/python/cp39-cp39/bin/python -mvenv venv && \
source venv/bin/activate && \
target=\$(python3 tools/openblas_support.py) && \
cp -r \$target/lib/* /usr/lib && \
cp \$target/include/* /usr/include && \
python3 -m pip install -r test_requirements.txt && \
echo CFLAGS \$CFLAGS && \
python3 -m pip install -v . && \
cd tools && \
python3 -m pytest --pyargs numpy"
displayName: 'Run 32-bit manylinux2014 Docker Build / Tests'
- job: macOS
pool:
vmImage: 'macOS-11'
strategy:
maxParallel: 3
matrix:
Python39:
PYTHON_VERSION: '3.9'
USE_OPENBLAS: '1'
Python39-ILP64:
PYTHON_VERSION: '3.9'
NPY_USE_BLAS_ILP64: '1'
USE_OPENBLAS: '1'
steps:
- script: |
git submodule update --init
displayName: 'Fetch submodules'
# the @0 refers to the (major) version of the *task* on Microsoft's
# end, not the order in the build matrix nor anything to do
# with version of Python selected
- task: UsePythonVersion@0
inputs:
versionSpec: $(PYTHON_VERSION)
addToPath: true
architecture: 'x64'
- script: |
set -xe
[ -n "$USE_XCODE_10" ] && /bin/bash -c "sudo xcode-select -s /Applications/Xcode_10.app/Contents/Developer"
clang --version
displayName: 'report clang version'
- script: |
if [[ $PLATFORM == "macosx-arm64" ]]; then
PLAT="arm64"
fi
source tools/wheels/gfortran_utils.sh
install_gfortran
displayName: 'install gfortran'
# use the pre-built openblas binary that most closely
# matches our MacOS wheel builds -- currently based
# primarily on file size / name details
- script: |
set -xe
target=$(python tools/openblas_support.py)
ls -lR $target
# manually link to appropriate system paths
cp $target/lib/lib* /usr/local/lib/
cp $target/include/* /usr/local/include/
otool -L /usr/local/lib/libopenblas*
displayName: 'install pre-built openblas'
condition: eq(variables['USE_OPENBLAS'], '1')
- script: python -m pip install --upgrade pip 'setuptools<49.2.0' wheel
displayName: 'Install tools'
- script: |
python -m pip install -r test_requirements.txt
# Don't use doc_requirements.txt since that messes up tests
python -m pip install vulture sphinx==4.3.0 numpydoc==1.4.0
displayName: 'Install dependencies; some are optional to avoid test skips'
- script: /bin/bash -c "! vulture . --min-confidence 100 --exclude doc/,numpy/distutils/ | grep 'unreachable'"
displayName: 'Check for unreachable code paths in Python modules'
- script: git submodule update --init
displayName: 'Fetch submodules'
# prefer usage of clang over gcc proper
# to match likely scenario on many user mac machines
- script: python setup.py build -j 4 build_src --verbose-cfg install
displayName: 'Build NumPy'
env:
BLAS: None
LAPACK: None
ATLAS: None
CC: /usr/bin/clang
# wait until after dev build of NumPy to pip
# install matplotlib to avoid pip install of older numpy
- script: python -m pip install matplotlib
displayName: 'Install matplotlib before refguide run'
- script: python runtests.py -g --refguide-check
displayName: 'Run Refguide Check'
condition: eq(variables['USE_OPENBLAS'], '1')
- script: |
echo LIBRARY_PATH ${LIBRARY_PATH}
python runtests.py -n --mode=full -- -rsx --junitxml=junit/test-results.xml
displayName: 'Run Full NumPy Test Suite'
condition: eq(variables['USE_OPENBLAS'], '1')
env:
# gfortran installed above adds -lSystem, so this is needed to find it (gh-22043)
LIBRARY_PATH: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib
- bash: |
python -m pip install threadpoolctl
python tools/openblas_support.py --check_version
displayName: 'Verify OpenBLAS version'
condition: eq(variables['USE_OPENBLAS'], '1')
# import doesn't work when in numpy src directory , so do a pip dev install of build lib to test
- script: |
#!/bin/bash -v
set +e
python -c "import numpy as np" > test_output.log 2>&1
check_output_code=$?
cat test_output.log
grep "buggy Accelerate backend" test_output.log
check_message=$?
if [ $check_output_code == 1 ] && [ $check_message == 0 ]; then exit 0; else exit 1;fi
displayName: "Check if numpy import fails with accelerate"
condition: eq(variables['USE_OPENBLAS'], '0')
- task: PublishTestResults@2
condition: succeededOrFailed()
inputs:
testResultsFiles: '**/test-*.xml'
failTaskOnFailedTests: true
testRunTitle: 'Publish test results for Python 3.9 64-bit full Mac OS'
- job: Windows
pool:
vmImage: 'windows-2019'
strategy:
maxParallel: 3
matrix:
Python310-64bit-fast:
PYTHON_VERSION: '3.10'
PYTHON_ARCH: 'x64'
TEST_MODE: fast
BITS: 64
Python311-64bit-full:
PYTHON_VERSION: '3.11'
PYTHON_ARCH: 'x64'
TEST_MODE: full
BITS: 64
# NPY_USE_BLAS_ILP64: '1'
# TODO: failing because of an issue with ILP64 BLAS being downloaded,
# and that not being handled automatically yet
# PyPy39-64bit-fast:
# PYTHON_VERSION: 'pypy3.9'
# PYTHON_ARCH: 'x64'
# TEST_MODE: fast
# BITS: 64
# NPY_USE_BLAS_ILP64: '1'
steps:
- template: azure-steps-windows.yml
- job: Linux_conda
pool:
vmImage: 'ubuntu-20.04'
steps:
- script: |
git submodule update --init
displayName: 'Fetch submodules'
- script: |
# create and activate conda environment
conda env create -f environment.yml
displayName: 'Create conda environment.'
- script: |
# >>> conda initialize >>>
# !! Contents within this block are 'conda init' !!
# see https://github.com/conda/conda/issues/7980
__conda_setup="$('conda' 'shell.bash' 'hook' 2> /dev/null)"
eval "$__conda_setup"
unset __conda_setup
# <<< conda initialize <<<
conda activate numpy-dev
# Run native baseline Build / Tests
python runtests.py --show-build-log --cpu-baseline=native --cpu-dispatch=none \
--debug-info --mode=full -- -rsx --junitxml=junit/test-results.xml
displayName: 'Run native baseline Build / Tests in conda.'
- task: PublishTestResults@2
condition: succeededOrFailed()
inputs:
testResultsFiles: '**/test-*.xml'
failTaskOnFailedTests: true
testRunTitle: 'Publish test results for conda installation'