]> Git Repo - J-u-boot.git/blob - .azure-pipelines.yml
tegra: Drop dependency on SPL_BUILD
[J-u-boot.git] / .azure-pipelines.yml
1 variables:
2   windows_vm: windows-2019
3   ubuntu_vm: ubuntu-22.04
4   macos_vm: macOS-12
5   ci_runner_image: trini/u-boot-gitlab-ci-runner:jammy-20240808-21Aug2024
6   # Add '-u 0' options for Azure pipelines, otherwise we get "permission
7   # denied" error when it tries to "useradd -m -u 1001 vsts_azpcontainer",
8   # since our $(ci_runner_image) user is not root.
9   container_option: -u 0
10   work_dir: /u
11   # We define all of these as variables so we can easily reference them twice
12   am33xx_kirkwood_ls1_mvebu_omap: "am33xx kirkwood ls1 mvebu omap -x siemens,freescale"
13   amlogic_bcm_boundary_engicam_siemens_technexion_oradex: "amlogic bcm boundary engicam siemens technexion toradex -x mips"
14   arm_nxp_minus_imx_and_at91: "at91 freescale -x powerpc,m68k,imx,mx"
15   imx: "mx imx -x boundary,engicam,technexion,toradex"
16   rk: "rk"
17   sunxi: "sunxi"
18   powerpc: "powerpc"
19   arm_catch_all: "arm -x aarch64,am33xx,at91,bcm,ls1,kirkwood,mvebu,omap,rk,siemens,mx,sunxi,technexion,toradex"
20   aarch64_catch_all: "aarch64 -x amlogic,bcm,engicam,imx,ls1,ls2,lx216,mvebu,rk,siemens,sunxi,toradex"
21   everything_but_arm_and_powerpc: "arc m68k microblaze mips nios2 riscv sandbox sh x86 xtensa -x arm,powerpc"
22
23 stages:
24 - stage: testsuites
25   jobs:
26   - job: tools_only_windows
27     displayName: 'Ensure host tools build for Windows'
28     pool:
29       vmImage: $(windows_vm)
30     steps:
31       - powershell: |
32           (New-Object Net.WebClient).DownloadFile("https://github.com/msys2/msys2-installer/releases/download/2021-06-04/msys2-base-x86_64-20210604.sfx.exe", "sfx.exe")
33         displayName: 'Install MSYS2'
34       - script: |
35           sfx.exe -y -o%CD:~0,2%\
36           %CD:~0,2%\msys64\usr\bin\bash -lc " "
37           %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm -Syuu"
38           %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm -Syuu"
39         displayName: 'Update MSYS2'
40       - script: |
41           %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm --needed -Sy make gcc bison flex diffutils openssl-devel libgnutls-devel libutil-linux-devel"
42         displayName: 'Install Toolchain'
43       - script: |
44           echo make tools-only_defconfig tools-only > build-tools.sh
45           %CD:~0,2%\msys64\usr\bin\bash -lc "bash build-tools.sh"
46         displayName: 'Build Host Tools'
47         env:
48           # Tell MSYS2 we need a POSIX emulation layer
49           MSYSTEM: MSYS
50           # Tell MSYS2 not to ‘cd’ our startup directory to HOME
51           CHERE_INVOKING: yes
52
53   - job: tools_only_macOS
54     displayName: 'Ensure host tools build for macOS X'
55     pool:
56       vmImage: $(macos_vm)
57     steps:
58       - script: brew install make ossp-uuid
59         displayName: Brew install dependencies
60       - script: |
61           gmake tools-only_config tools-only \
62             HOSTCFLAGS="-I/usr/local/opt/[email protected]/include" \
63             HOSTLDFLAGS="-L/usr/local/opt/[email protected]/lib" \
64             -j$(sysctl -n hw.logicalcpu)
65         displayName: 'Perform tools-only build'
66
67   - job: check_for_new_CONFIG_symbols_outside_Kconfig
68     displayName: 'Check for new CONFIG symbols outside Kconfig'
69     pool:
70       vmImage: $(ubuntu_vm)
71     container:
72       image: $(ci_runner_image)
73       options: $(container_option)
74     steps:
75       # If grep succeeds and finds a match the test fails as we should
76       # have no matches.
77       - script: git grep -E '^#[[:blank:]]*(define|undef)[[:blank:]]*CONFIG_'
78                   :^doc/ :^arch/arm/dts/ :^scripts/kconfig/lkc.h
79                   :^include/linux/kconfig.h :^tools/ :^dts/upstream/ &&
80                   exit 1 || exit 0
81
82   - job: docs
83     displayName: 'Build documentation'
84     pool:
85       vmImage: $(ubuntu_vm)
86     container:
87       image: $(ci_runner_image)
88       options: $(container_option)
89     steps:
90       - script: |
91           virtualenv -p /usr/bin/python3 /tmp/venvhtml
92           . /tmp/venvhtml/bin/activate
93           pip install -r doc/sphinx/requirements.txt
94           make htmldocs KDOC_WERROR=1
95           make infodocs
96
97   - job: maintainers
98     displayName: 'Ensure all configs have MAINTAINERS entries'
99     pool:
100       vmImage: $(ubuntu_vm)
101     container:
102       image: $(ci_runner_image)
103       options: $(container_option)
104     steps:
105       - script: |
106           ./tools/buildman/buildman --maintainer-check
107
108   - job: tools_only
109     displayName: 'Ensure host tools and env tools build'
110     pool:
111       vmImage: $(ubuntu_vm)
112     container:
113       image: $(ci_runner_image)
114       options: $(container_option)
115     steps:
116       - script: |
117           make tools-only_config tools-only -j$(nproc)
118           make mrproper
119           make tools-only_config envtools -j$(nproc)
120
121   - job: utils
122     displayName: 'Run binman, buildman, dtoc, Kconfig and patman testsuites'
123     pool:
124       vmImage: $(ubuntu_vm)
125     steps:
126       - script: |
127           cat << "EOF" > build.sh
128           cd $(work_dir)
129           git config --global user.name "Azure Pipelines"
130           git config --global user.email [email protected]
131           git config --global --add safe.directory $(work_dir)
132           export USER=azure
133           virtualenv -p /usr/bin/python3 /tmp/venv
134           . /tmp/venv/bin/activate
135           pip install -r test/py/requirements.txt
136           pip install -r tools/buildman/requirements.txt
137           export UBOOT_TRAVIS_BUILD_DIR=/tmp/tools-only
138           export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
139           export PATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}
140           ./tools/buildman/buildman -T0 -o ${UBOOT_TRAVIS_BUILD_DIR} -w --board tools-only
141           set -ex
142           ./tools/binman/binman --toolpath ${UBOOT_TRAVIS_BUILD_DIR}/tools test
143           ./tools/buildman/buildman -t
144           ./tools/dtoc/dtoc -t
145           ./tools/patman/patman test
146           make O=${UBOOT_TRAVIS_BUILD_DIR} testconfig
147           EOF
148           cat build.sh
149           # We cannot use "container" like other jobs above, as buildman
150           # seems to hang forever with pre-configured "container" environment
151           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh
152
153   - job: pylint
154     displayName: Check for any pylint regressions
155     pool:
156       vmImage: $(ubuntu_vm)
157     container:
158       image: $(ci_runner_image)
159       options: $(container_option)
160     steps:
161       - script: |
162           git config --global --add safe.directory $(work_dir)
163           export USER=azure
164           pip install -r test/py/requirements.txt
165           pip install -r tools/buildman/requirements.txt
166           pip install asteval pylint==2.12.2 pyopenssl
167           export PATH=${PATH}:~/.local/bin
168           echo "[MASTER]" >> .pylintrc
169           echo "load-plugins=pylint.extensions.docparams" >> .pylintrc
170           export UBOOT_TRAVIS_BUILD_DIR=/tmp/tools-only
171           ./tools/buildman/buildman -T0 -o ${UBOOT_TRAVIS_BUILD_DIR} -w --board tools-only
172           set -ex
173           pylint --version
174           export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
175           make pylint_err
176
177   - job: check_for_pre_schema_tags
178     displayName: 'Check for pre-schema driver model tags'
179     pool:
180       vmImage: $(ubuntu_vm)
181     container:
182       image: $(ci_runner_image)
183       options: $(container_option)
184     steps:
185       # If grep succeeds and finds a match the test fails as we should
186       # have no matches.
187       - script: git grep u-boot,dm- -- '*.dts*' && exit 1 || exit 0
188
189   - job: check_packing_of_python_tools
190     displayName: 'Check we can package the Python tools'
191     pool:
192       vmImage: $(ubuntu_vm)
193     container:
194       image: $(ci_runner_image)
195       options: $(container_option)
196     steps:
197       - script: make pip
198
199   - job: count_built_machines
200     displayName: 'Ensure we build all possible machines'
201     pool:
202       vmImage: $(ubuntu_vm)
203     container:
204       image: $(ci_runner_image)
205       options: $(container_option)
206     steps:
207       - script: |
208           BMANARGS="-o /tmp --dry-run -v"
209           # First get the total number of boards
210           total=$(tools/buildman/buildman ${BMANARGS} | grep "Total boards to build for each commit" | cut -d ' ' -f 8)
211           # Now build up the list of what each job built.
212           built="$(tools/buildman/buildman ${BMANARGS} $(am33xx_kirkwood_ls1_mvebu_omap) | grep '^   ')"
213           built="$built $(tools/buildman/buildman ${BMANARGS} $(amlogic_bcm_boundary_engicam_siemens_technexion_oradex) | grep '^   ')"
214           built="$built $(tools/buildman/buildman ${BMANARGS} $(arm_nxp_minus_imx_and_at91) | grep '^   ')"
215           built="$built $(tools/buildman/buildman ${BMANARGS} $(imx) | grep '^   ')"
216           built="$built $(tools/buildman/buildman ${BMANARGS} $(rk) | grep '^   ')"
217           built="$built $(tools/buildman/buildman ${BMANARGS} $(sunxi) | grep '^   ')"
218           built="$built $(tools/buildman/buildman ${BMANARGS} $(powerpc) | grep '^   ')"
219           built="$built $(tools/buildman/buildman ${BMANARGS} $(arm_catch_all) | grep '^   ')"
220           built="$built $(tools/buildman/buildman ${BMANARGS} $(aarch64_catch_all) | grep '^   ')"
221           built="$built $(tools/buildman/buildman ${BMANARGS} $(everything_but_arm_and_powerpc) | grep '^   ')"
222           # Finally see how many machines that is.
223           actual=$(tools/buildman/buildman ${BMANARGS} $built | grep "Total boards to build for each commit" | cut -d ' ' -f 8)
224           echo We would build a total of $actual out of $total platforms this CI run
225           [ $actual -eq $total ] && exit 0 || exit 1
226
227   - job: create_test_py_wrapper_script
228     displayName: 'Create and stage a wrapper for test.py runs'
229     pool:
230       vmImage: $(ubuntu_vm)
231     steps:
232       - checkout: none
233       - script: |
234           cat << EOF > test.sh
235           #!/bin/bash
236           set -ex
237           # the below corresponds to .gitlab-ci.yml "before_script"
238           cd \${WORK_DIR}
239           git config --global --add safe.directory \${WORK_DIR}
240           git clone --depth=1 https://source.denx.de/u-boot/u-boot-test-hooks /tmp/uboot-test-hooks
241           ln -s travis-ci /tmp/uboot-test-hooks/bin/\`hostname\`
242           ln -s travis-ci /tmp/uboot-test-hooks/py/\`hostname\`
243           grub-mkimage --prefix=\"\" -o ~/grub_x86.efi -O i386-efi normal echo lsefimmap lsefi lsefisystab efinet tftp minicmd
244           grub-mkimage --prefix=\"\" -o ~/grub_x64.efi -O x86_64-efi normal echo lsefimmap lsefi lsefisystab efinet tftp minicmd
245           if [[ "\${TEST_PY_BD}" == "qemu-riscv32_spl" ]]; then
246               wget -O - https://github.com/riscv-software-src/opensbi/releases/download/v1.3.1/opensbi-1.3.1-rv-bin.tar.xz | tar -C /tmp -xJ;
247               export OPENSBI=/tmp/opensbi-1.3.1-rv-bin/share/opensbi/ilp32/generic/firmware/fw_dynamic.bin;
248           fi
249           if [[ "\${TEST_PY_BD}" == "qemu-riscv64_spl" ]] || [[ "\${TEST_PY_BD}" == "sifive_unleashed" ]]; then
250               wget -O - https://github.com/riscv-software-src/opensbi/releases/download/v1.3.1/opensbi-1.3.1-rv-bin.tar.xz | tar -C /tmp -xJ;
251               export OPENSBI=/tmp/opensbi-1.3.1-rv-bin/share/opensbi/lp64/generic/firmware/fw_dynamic.bin;
252           fi
253           # the below corresponds to .gitlab-ci.yml "script"
254           cd \${WORK_DIR}
255           export UBOOT_TRAVIS_BUILD_DIR=/tmp/\${TEST_PY_BD}
256           if [ -n "\${BUILD_ENV}" ]; then
257               export \${BUILD_ENV};
258           fi
259           pip install -r tools/buildman/requirements.txt
260           tools/buildman/buildman -o \${UBOOT_TRAVIS_BUILD_DIR} -w -E -W -e --board \${TEST_PY_BD} \${OVERRIDE}
261           cp ~/grub_x86.efi \${UBOOT_TRAVIS_BUILD_DIR}/
262           cp ~/grub_x64.efi \${UBOOT_TRAVIS_BUILD_DIR}/
263           cp /opt/grub/grubriscv64.efi \${UBOOT_TRAVIS_BUILD_DIR}/grub_riscv64.efi
264           cp /opt/grub/grubaa64.efi \${UBOOT_TRAVIS_BUILD_DIR}/grub_arm64.efi
265           cp /opt/grub/grubarm.efi \${UBOOT_TRAVIS_BUILD_DIR}/grub_arm.efi
266           # create sdcard / spi-nor images for sifive unleashed using genimage
267           if [[ "\${TEST_PY_BD}" == "sifive_unleashed" ]]; then
268               mkdir -p root;
269               cp \${UBOOT_TRAVIS_BUILD_DIR}/spl/u-boot-spl.bin .;
270               cp \${UBOOT_TRAVIS_BUILD_DIR}/u-boot.itb .;
271               rm -rf tmp;
272               genimage --inputpath . --config board/sifive/unleashed/genimage_sdcard.cfg;
273               cp images/sdcard.img \${UBOOT_TRAVIS_BUILD_DIR}/;
274               rm -rf tmp;
275               genimage --inputpath . --config board/sifive/unleashed/genimage_spi-nor.cfg;
276               cp images/spi-nor.img \${UBOOT_TRAVIS_BUILD_DIR}/;
277           fi
278           if [[ "\${TEST_PY_BD}" == "coreboot" ]]; then
279               cp /opt/coreboot/coreboot.rom \${UBOOT_TRAVIS_BUILD_DIR}/coreboot.rom;
280               /opt/coreboot/cbfstool \${UBOOT_TRAVIS_BUILD_DIR}/coreboot.rom remove -n fallback/payload;
281               /opt/coreboot/cbfstool \${UBOOT_TRAVIS_BUILD_DIR}/coreboot.rom add-flat-binary -f \${UBOOT_TRAVIS_BUILD_DIR}/u-boot.bin -n fallback/payload -c LZMA -l 0x1110000 -e 0x1110000;
282           fi
283           virtualenv -p /usr/bin/python3 /tmp/venv
284           . /tmp/venv/bin/activate
285           pip install -r test/py/requirements.txt
286           pip install pytest-azurepipelines
287           export PATH=/opt/qemu/bin:/tmp/uboot-test-hooks/bin:\${PATH}
288           export PYTHONPATH=/tmp/uboot-test-hooks/py/travis-ci
289           # "\${var:+"-k \$var"}" expands to "" if \$var is empty, "-k \$var" if not
290           ./test/py/test.py -ra -o cache_dir="\$UBOOT_TRAVIS_BUILD_DIR"/.pytest_cache --bd \${TEST_PY_BD} \${TEST_PY_ID} \${TEST_PY_TEST_SPEC:+"-k \${TEST_PY_TEST_SPEC}"} --build-dir "\$UBOOT_TRAVIS_BUILD_DIR" --report-dir "\$UBOOT_TRAVIS_BUILD_DIR" --junitxml=\$(System.DefaultWorkingDirectory)/results.xml
291           # the below corresponds to .gitlab-ci.yml "after_script"
292           rm -rf /tmp/uboot-test-hooks /tmp/venv
293           EOF
294       - task: CopyFiles@2
295         displayName: 'Copy test.sh for later usage'
296         inputs:
297           contents: 'test.sh'
298           targetFolder: '$(Build.ArtifactStagingDirectory)'
299       - publish: '$(Build.ArtifactStagingDirectory)/test.sh'
300         displayName: 'Publish test.sh'
301         artifact: testsh
302
303 - stage: test_py_sandbox
304   jobs:
305   - job: test_py_sandbox
306     displayName: 'test.py for sandbox'
307     pool:
308       vmImage: $(ubuntu_vm)
309     strategy:
310       matrix:
311         sandbox:
312           TEST_PY_BD: "sandbox"
313         sandbox_asan:
314           TEST_PY_BD: "sandbox"
315           OVERRIDE: "-a ASAN"
316           TEST_PY_TEST_SPEC: "version"
317         sandbox_clang:
318           TEST_PY_BD: "sandbox"
319           OVERRIDE: "-O clang-17"
320         sandbox_clang_asan:
321           TEST_PY_BD: "sandbox"
322           OVERRIDE: "-O clang-17 -a ASAN"
323           TEST_PY_TEST_SPEC: "version"
324         sandbox64:
325           TEST_PY_BD: "sandbox64"
326         sandbox64_clang:
327           TEST_PY_BD: "sandbox64"
328           OVERRIDE: "-O clang-17"
329         sandbox_spl:
330           TEST_PY_BD: "sandbox_spl"
331           TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff or test_spl"
332         sandbox_vpl:
333           TEST_PY_BD: "sandbox_vpl"
334           TEST_PY_TEST_SPEC: "vpl or test_spl"
335         sandbox_noinst:
336           TEST_PY_BD: "sandbox_noinst"
337           TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff or test_spl"
338         sandbox_noinst_load_fit_full:
339           TEST_PY_BD: "sandbox_noinst"
340           TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff or test_spl"
341           OVERRIDE: "-a CONFIG_SPL_LOAD_FIT_FULL=y"
342         sandbox_flattree:
343           TEST_PY_BD: "sandbox_flattree"
344         sandbox_trace:
345           TEST_PY_BD: "sandbox"
346           BUILD_ENV: "FTRACE=1 NO_LTO=1"
347           TEST_PY_TEST_SPEC: "trace"
348           OVERRIDE: "-a CONFIG_TRACE=y -a CONFIG_TRACE_EARLY=y -a CONFIG_TRACE_EARLY_SIZE=0x01000000 -a CONFIG_TRACE_BUFFER_SIZE=0x02000000"
349     steps:
350       - download: current
351         artifact: testsh
352       - script: |
353           # make current directory writeable to uboot user inside the container
354           # as sandbox testing need create files like spi flash images, etc.
355           # (TODO: clean up this in the future)
356           chmod 777 .
357           chmod 755 $(Pipeline.Workspace)/testsh/test.sh
358           # Filesystem tests need extra docker args to run
359           set --
360           # mount -o loop needs the loop devices
361           if modprobe loop; then
362               for d in $(find /dev -maxdepth 1 -name 'loop*'); do
363                   set -- "$@" --device $d:$d
364               done
365           fi
366           # Needed for mount syscall (for guestmount as well)
367           set -- "$@" --cap-add SYS_ADMIN
368           # Default apparmor profile denies mounts
369           set -- "$@" --security-opt apparmor=unconfined
370           # Some tests using libguestfs-tools need the fuse device to run
371           docker run "$@" --device /dev/fuse:/dev/fuse \
372                          -v $PWD:$(work_dir) \
373                          -v $(Pipeline.Workspace):$(Pipeline.Workspace) \
374                          -v $(System.DefaultWorkingDirectory):$(System.DefaultWorkingDirectory) \
375                          -e WORK_DIR="${WORK_DIR}" \
376                          -e TEST_PY_BD="${TEST_PY_BD}" \
377                          -e TEST_PY_ID="${TEST_PY_ID}" \
378                          -e TEST_PY_TEST_SPEC="${TEST_PY_TEST_SPEC}" \
379                          -e OVERRIDE="${OVERRIDE}" \
380                          -e BUILD_ENV="${BUILD_ENV}" $(ci_runner_image) \
381                          $(Pipeline.Workspace)/testsh/test.sh
382       - task: PublishTestResults@2
383         inputs:
384           testResultsFormat: 'JUnit'
385           testResultsFiles: 'results.xml'
386
387 - stage: test_py_qemu
388   jobs:
389   - job: test_py_qemu
390     displayName: 'test.py for QEMU platforms'
391     pool:
392       vmImage: $(ubuntu_vm)
393     strategy:
394       matrix:
395         coreboot:
396           TEST_PY_BD: "coreboot"
397           TEST_PY_ID: "--id qemu"
398           TEST_PY_TEST_SPEC: "not sleep"
399         evb_ast2500:
400           TEST_PY_BD: "evb-ast2500"
401           TEST_PY_ID: "--id qemu"
402         evb_ast2600:
403           TEST_PY_BD: "evb-ast2600"
404           TEST_PY_ID: "--id qemu"
405         vexpress_ca9x4:
406           TEST_PY_BD: "vexpress_ca9x4"
407           TEST_PY_ID: "--id qemu"
408         integratorcp_cm926ejs:
409           TEST_PY_BD: "integratorcp_cm926ejs"
410           TEST_PY_ID: "--id qemu"
411           TEST_PY_TEST_SPEC: "not sleep"
412         qemu_arm:
413           TEST_PY_BD: "qemu_arm"
414           TEST_PY_TEST_SPEC: "not sleep"
415         qemu_arm64:
416           TEST_PY_BD: "qemu_arm64"
417           TEST_PY_TEST_SPEC: "not sleep"
418         qemu_m68k:
419           TEST_PY_BD: "M5208EVBE"
420           TEST_PY_ID: "--id qemu"
421           TEST_PY_TEST_SPEC: "not sleep and not efi"
422           OVERRIDE: "-a CONFIG_M68K_QEMU=y -a ~CONFIG_MCFTMR"
423         qemu_malta:
424           TEST_PY_BD: "malta"
425           TEST_PY_ID: "--id qemu"
426           TEST_PY_TEST_SPEC: "not sleep and not efi"
427         qemu_maltael:
428           TEST_PY_BD: "maltael"
429           TEST_PY_ID: "--id qemu"
430           TEST_PY_TEST_SPEC: "not sleep and not efi"
431         qemu_malta64:
432           TEST_PY_BD: "malta64"
433           TEST_PY_ID: "--id qemu"
434           TEST_PY_TEST_SPEC: "not sleep and not efi"
435         qemu_malta64el:
436           TEST_PY_BD: "malta64el"
437           TEST_PY_ID: "--id qemu"
438           TEST_PY_TEST_SPEC: "not sleep and not efi"
439         qemu_ppce500:
440           TEST_PY_BD: "qemu-ppce500"
441           TEST_PY_TEST_SPEC: "not sleep"
442         qemu_riscv32:
443           TEST_PY_BD: "qemu-riscv32"
444           TEST_PY_TEST_SPEC: "not sleep"
445         qemu_riscv64:
446           TEST_PY_BD: "qemu-riscv64"
447           TEST_PY_TEST_SPEC: "not sleep"
448         qemu_riscv32_spl:
449           TEST_PY_BD: "qemu-riscv32_spl"
450           TEST_PY_TEST_SPEC: "not sleep"
451         qemu_riscv64_spl:
452           TEST_PY_BD: "qemu-riscv64_spl"
453           TEST_PY_TEST_SPEC: "not sleep"
454         qemu_x86:
455           TEST_PY_BD: "qemu-x86"
456           TEST_PY_TEST_SPEC: "not sleep"
457         qemu_x86_64:
458           TEST_PY_BD: "qemu-x86_64"
459           TEST_PY_TEST_SPEC: "not sleep"
460         qemu_xtensa_dc233c:
461           TEST_PY_BD: "qemu-xtensa-dc233c"
462           TEST_PY_TEST_SPEC: "not sleep and not efi"
463         r2dplus_i82557c:
464           TEST_PY_BD: "r2dplus"
465           TEST_PY_ID: "--id i82557c_qemu"
466         r2dplus_pcnet:
467           TEST_PY_BD: "r2dplus"
468           TEST_PY_ID: "--id pcnet_qemu"
469         r2dplus_rtl8139:
470           TEST_PY_BD: "r2dplus"
471           TEST_PY_ID: "--id rtl8139_qemu"
472         r2dplus_tulip:
473           TEST_PY_BD: "r2dplus"
474           TEST_PY_ID: "--id tulip_qemu"
475         sifive_unleashed_sdcard:
476           TEST_PY_BD: "sifive_unleashed"
477           TEST_PY_ID: "--id sdcard_qemu"
478         sifive_unleashed_spi-nor:
479           TEST_PY_BD: "sifive_unleashed"
480           TEST_PY_ID: "--id spi-nor_qemu"
481         xilinx_zynq_virt:
482           TEST_PY_BD: "xilinx_zynq_virt"
483           TEST_PY_ID: "--id qemu"
484           TEST_PY_TEST_SPEC: "not sleep"
485         xilinx_versal_virt:
486           TEST_PY_BD: "xilinx_versal_virt"
487           TEST_PY_ID: "--id qemu"
488           TEST_PY_TEST_SPEC: "not sleep"
489         xtfpga:
490           TEST_PY_BD: "xtfpga"
491           TEST_PY_ID: "--id qemu"
492           TEST_PY_TEST_SPEC: "not sleep"
493     steps:
494       - download: current
495         artifact: testsh
496       - script: |
497           # make current directory writeable to uboot user inside the container
498           # as sandbox testing need create files like spi flash images, etc.
499           # (TODO: clean up this in the future)
500           chmod 777 .
501           chmod 755 $(Pipeline.Workspace)/testsh/test.sh
502           # Some tests using libguestfs-tools need the fuse device to run
503           docker run "$@" --device /dev/fuse:/dev/fuse \
504                          -v $PWD:$(work_dir) \
505                          -v $(Pipeline.Workspace):$(Pipeline.Workspace) \
506                          -v $(System.DefaultWorkingDirectory):$(System.DefaultWorkingDirectory) \
507                          -e WORK_DIR="${WORK_DIR}" \
508                          -e TEST_PY_BD="${TEST_PY_BD}" \
509                          -e TEST_PY_ID="${TEST_PY_ID}" \
510                          -e TEST_PY_TEST_SPEC="${TEST_PY_TEST_SPEC}" \
511                          -e OVERRIDE="${OVERRIDE}" \
512                          -e BUILD_ENV="${BUILD_ENV}" $(ci_runner_image) \
513                          $(Pipeline.Workspace)/testsh/test.sh
514         retryCountOnTaskFailure: 2 # QEMU may be too slow, etc.
515       - task: PublishTestResults@2
516         inputs:
517           testResultsFormat: 'JUnit'
518           testResultsFiles: 'results.xml'
519
520 - stage: world_build
521   jobs:
522   - job: build_the_world
523     timeoutInMinutes: 0 # Use the maximum allowed
524     displayName: 'Build the World'
525     pool:
526       vmImage: $(ubuntu_vm)
527     strategy:
528       # We split the world up in to 10 jobs as we can have at most 10
529       # parallel jobs going on the free tier of Azure.
530       matrix:
531         am33xx_kirkwood_ls1_mvebu_omap:
532           BUILDMAN: $(am33xx_kirkwood_ls1_mvebu_omap)
533         amlogic_bcm_boundary_engicam_siemens_technexion_oradex:
534           BUILDMAN: $(amlogic_bcm_boundary_engicam_siemens_technexion_oradex)
535         arm_nxp_minus_imx_and_at91:
536           BUILDMAN: $(arm_nxp_minus_imx_and_at91)
537         imx:
538           BUILDMAN: $(imx)
539         rk:
540           BUILDMAN: $(rk)
541         sunxi:
542           BUILDMAN: $(sunxi)
543         powerpc:
544           BUILDMAN: $(powerpc)
545         arm_catch_all:
546           BUILDMAN: $(arm_catch_all)
547         aarch64_catch_all:
548           BUILDMAN: $(aarch64_catch_all)
549         everything_but_arm_and_powerpc:
550           BUILDMAN: $(everything_but_arm_and_powerpc)
551     steps:
552       - script: |
553           cat << EOF > build.sh
554           set -ex
555           cd ${WORK_DIR}
556           # make environment variables available as tests are running inside a container
557           export BUILDMAN="${BUILDMAN}"
558           git config --global --add safe.directory ${WORK_DIR}
559           pip install -r tools/buildman/requirements.txt
560           EOF
561           cat << "EOF" >> build.sh
562           if [[ "${BUILDMAN}" != "" ]]; then
563               ret=0;
564               tools/buildman/buildman -o /tmp -PEWM ${BUILDMAN} ${OVERRIDE} || ret=$?;
565               if [[ $ret -ne 0 ]]; then
566                   tools/buildman/buildman -o /tmp -seP ${BUILDMAN};
567                   exit $ret;
568               fi;
569           fi
570           EOF
571           cat build.sh
572           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh
This page took 0.058316 seconds and 4 git commands to generate.