1variables:
2  windows_vm: windows-2019
3  ubuntu_vm: ubuntu-22.04
4  macos_vm: macOS-12
5  ci_runner_image: trini/u-boot-gitlab-ci-runner:jammy-20240227-14Mar2024
6  # Add '-u 0' options for Azure pipelines, otherwise we get "permission
7  # denied" error when it tries to "useradd -m -u 1001 vsts_azpcontainer",
8  # since our $(ci_runner_image) user is not root.
9  container_option: -u 0
10  work_dir: /u
11
12stages:
13- stage: testsuites
14  jobs:
15  - job: tools_only_windows
16    displayName: 'Ensure host tools build for Windows'
17    pool:
18      vmImage: $(windows_vm)
19    steps:
20      - powershell: |
21          (New-Object Net.WebClient).DownloadFile("https://github.com/msys2/msys2-installer/releases/download/2021-06-04/msys2-base-x86_64-20210604.sfx.exe", "sfx.exe")
22        displayName: 'Install MSYS2'
23      - script: |
24          sfx.exe -y -o%CD:~0,2%\
25          %CD:~0,2%\msys64\usr\bin\bash -lc " "
26          %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm -Syuu"
27          %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm -Syuu"
28        displayName: 'Update MSYS2'
29      - script: |
30          %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm --needed -Sy make gcc bison flex diffutils openssl-devel libgnutls-devel libutil-linux-devel"
31        displayName: 'Install Toolchain'
32      - script: |
33          echo make tools-only_defconfig tools-only > build-tools.sh
34          %CD:~0,2%\msys64\usr\bin\bash -lc "bash build-tools.sh"
35        displayName: 'Build Host Tools'
36        env:
37          # Tell MSYS2 we need a POSIX emulation layer
38          MSYSTEM: MSYS
39          # Tell MSYS2 not to ���cd��� our startup directory to HOME
40          CHERE_INVOKING: yes
41
42  - job: tools_only_macOS
43    displayName: 'Ensure host tools build for macOS X'
44    pool:
45      vmImage: $(macos_vm)
46    steps:
47      - script: brew install make ossp-uuid
48        displayName: Brew install dependencies
49      - script: |
50          gmake tools-only_config tools-only \
51            HOSTCFLAGS="-I/usr/local/opt/openssl@1.1/include" \
52            HOSTLDFLAGS="-L/usr/local/opt/openssl@1.1/lib" \
53            -j$(sysctl -n hw.logicalcpu)
54        displayName: 'Perform tools-only build'
55
56  - job: check_for_new_CONFIG_symbols_outside_Kconfig
57    displayName: 'Check for new CONFIG symbols outside Kconfig'
58    pool:
59      vmImage: $(ubuntu_vm)
60    container:
61      image: $(ci_runner_image)
62      options: $(container_option)
63    steps:
64      # If grep succeeds and finds a match the test fails as we should
65      # have no matches.
66      - script: git grep -E '^#[[:blank:]]*(define|undef)[[:blank:]]*CONFIG_'
67                  :^doc/ :^arch/arm/dts/ :^scripts/kconfig/lkc.h
68                  :^include/linux/kconfig.h :^tools/ :^dts/upstream/ &&
69                  exit 1 || exit 0
70
71  - job: docs
72    displayName: 'Build documentation'
73    pool:
74      vmImage: $(ubuntu_vm)
75    container:
76      image: $(ci_runner_image)
77      options: $(container_option)
78    steps:
79      - script: |
80          virtualenv -p /usr/bin/python3 /tmp/venvhtml
81          . /tmp/venvhtml/bin/activate
82          pip install -r doc/sphinx/requirements.txt
83          make htmldocs KDOC_WERROR=1
84          make infodocs
85
86  - job: maintainers
87    displayName: 'Ensure all configs have MAINTAINERS entries'
88    pool:
89      vmImage: $(ubuntu_vm)
90    container:
91      image: $(ci_runner_image)
92      options: $(container_option)
93    steps:
94      - script: |
95          ./tools/buildman/buildman --maintainer-check
96
97  - job: tools_only
98    displayName: 'Ensure host tools and env tools build'
99    pool:
100      vmImage: $(ubuntu_vm)
101    container:
102      image: $(ci_runner_image)
103      options: $(container_option)
104    steps:
105      - script: |
106          make tools-only_config tools-only -j$(nproc)
107          make mrproper
108          make tools-only_config envtools -j$(nproc)
109
110  - job: utils
111    displayName: 'Run binman, buildman, dtoc, Kconfig and patman testsuites'
112    pool:
113      vmImage: $(ubuntu_vm)
114    steps:
115      - script: |
116          cat << "EOF" > build.sh
117          cd $(work_dir)
118          git config --global user.name "Azure Pipelines"
119          git config --global user.email bmeng.cn@gmail.com
120          git config --global --add safe.directory $(work_dir)
121          export USER=azure
122          virtualenv -p /usr/bin/python3 /tmp/venv
123          . /tmp/venv/bin/activate
124          pip install -r test/py/requirements.txt
125          pip install -r tools/buildman/requirements.txt
126          export UBOOT_TRAVIS_BUILD_DIR=/tmp/tools-only
127          export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
128          export PATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}
129          ./tools/buildman/buildman -T0 -o ${UBOOT_TRAVIS_BUILD_DIR} -w --board tools-only
130          set -ex
131          ./tools/binman/binman --toolpath ${UBOOT_TRAVIS_BUILD_DIR}/tools test
132          ./tools/buildman/buildman -t
133          ./tools/dtoc/dtoc -t
134          ./tools/patman/patman test
135          make O=${UBOOT_TRAVIS_BUILD_DIR} testconfig
136          EOF
137          cat build.sh
138          # We cannot use "container" like other jobs above, as buildman
139          # seems to hang forever with pre-configured "container" environment
140          docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh
141
142  - job: pylint
143    displayName: Check for any pylint regressions
144    pool:
145      vmImage: $(ubuntu_vm)
146    container:
147      image: $(ci_runner_image)
148      options: $(container_option)
149    steps:
150      - script: |
151          git config --global --add safe.directory $(work_dir)
152          export USER=azure
153          pip install -r test/py/requirements.txt
154          pip install -r tools/buildman/requirements.txt
155          pip install asteval pylint==2.12.2 pyopenssl
156          export PATH=${PATH}:~/.local/bin
157          echo "[MASTER]" >> .pylintrc
158          echo "load-plugins=pylint.extensions.docparams" >> .pylintrc
159          export UBOOT_TRAVIS_BUILD_DIR=/tmp/tools-only
160          ./tools/buildman/buildman -T0 -o ${UBOOT_TRAVIS_BUILD_DIR} -w --board tools-only
161          set -ex
162          pylint --version
163          export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
164          make pylint_err
165
166  - job: check_for_pre_schema_tags
167    displayName: 'Check for pre-schema driver model tags'
168    pool:
169      vmImage: $(ubuntu_vm)
170    container:
171      image: $(ci_runner_image)
172      options: $(container_option)
173    steps:
174      # If grep succeeds and finds a match the test fails as we should
175      # have no matches.
176      - script: git grep u-boot,dm- -- '*.dts*' && exit 1 || exit 0
177
178  - job: check_packing_of_python_tools
179    displayName: 'Check we can package the Python tools'
180    pool:
181      vmImage: $(ubuntu_vm)
182    container:
183      image: $(ci_runner_image)
184      options: $(container_option)
185    steps:
186      - script: make pip
187
188  - job: create_test_py_wrapper_script
189    displayName: 'Create and stage a wrapper for test.py runs'
190    pool:
191      vmImage: $(ubuntu_vm)
192    steps:
193      - checkout: none
194      - script: |
195          cat << EOF > test.sh
196          #!/bin/bash
197          set -ex
198          # the below corresponds to .gitlab-ci.yml "before_script"
199          cd \${WORK_DIR}
200          git config --global --add safe.directory \${WORK_DIR}
201          git clone --depth=1 https://source.denx.de/u-boot/u-boot-test-hooks /tmp/uboot-test-hooks
202          ln -s travis-ci /tmp/uboot-test-hooks/bin/\`hostname\`
203          ln -s travis-ci /tmp/uboot-test-hooks/py/\`hostname\`
204          grub-mkimage --prefix=\"\" -o ~/grub_x86.efi -O i386-efi normal echo lsefimmap lsefi lsefisystab efinet tftp minicmd
205          grub-mkimage --prefix=\"\" -o ~/grub_x64.efi -O x86_64-efi normal echo lsefimmap lsefi lsefisystab efinet tftp minicmd
206          if [[ "\${TEST_PY_BD}" == "qemu-riscv32_spl" ]]; then
207              wget -O - https://github.com/riscv-software-src/opensbi/releases/download/v1.3.1/opensbi-1.3.1-rv-bin.tar.xz | tar -C /tmp -xJ;
208              export OPENSBI=/tmp/opensbi-1.3.1-rv-bin/share/opensbi/ilp32/generic/firmware/fw_dynamic.bin;
209          fi
210          if [[ "\${TEST_PY_BD}" == "qemu-riscv64_spl" ]] || [[ "\${TEST_PY_BD}" == "sifive_unleashed" ]]; then
211              wget -O - https://github.com/riscv-software-src/opensbi/releases/download/v1.3.1/opensbi-1.3.1-rv-bin.tar.xz | tar -C /tmp -xJ;
212              export OPENSBI=/tmp/opensbi-1.3.1-rv-bin/share/opensbi/lp64/generic/firmware/fw_dynamic.bin;
213          fi
214          # the below corresponds to .gitlab-ci.yml "script"
215          cd \${WORK_DIR}
216          export UBOOT_TRAVIS_BUILD_DIR=/tmp/\${TEST_PY_BD}
217          if [ -n "\${BUILD_ENV}" ]; then
218              export \${BUILD_ENV};
219          fi
220          pip install -r tools/buildman/requirements.txt
221          tools/buildman/buildman -o \${UBOOT_TRAVIS_BUILD_DIR} -w -E -W -e --board \${TEST_PY_BD} \${OVERRIDE}
222          cp ~/grub_x86.efi \${UBOOT_TRAVIS_BUILD_DIR}/
223          cp ~/grub_x64.efi \${UBOOT_TRAVIS_BUILD_DIR}/
224          cp /opt/grub/grubriscv64.efi \${UBOOT_TRAVIS_BUILD_DIR}/grub_riscv64.efi
225          cp /opt/grub/grubaa64.efi \${UBOOT_TRAVIS_BUILD_DIR}/grub_arm64.efi
226          cp /opt/grub/grubarm.efi \${UBOOT_TRAVIS_BUILD_DIR}/grub_arm.efi
227          # create sdcard / spi-nor images for sifive unleashed using genimage
228          if [[ "\${TEST_PY_BD}" == "sifive_unleashed" ]]; then
229              mkdir -p root;
230              cp \${UBOOT_TRAVIS_BUILD_DIR}/spl/u-boot-spl.bin .;
231              cp \${UBOOT_TRAVIS_BUILD_DIR}/u-boot.itb .;
232              rm -rf tmp;
233              genimage --inputpath . --config board/sifive/unleashed/genimage_sdcard.cfg;
234              cp images/sdcard.img \${UBOOT_TRAVIS_BUILD_DIR}/;
235              rm -rf tmp;
236              genimage --inputpath . --config board/sifive/unleashed/genimage_spi-nor.cfg;
237              cp images/spi-nor.img \${UBOOT_TRAVIS_BUILD_DIR}/;
238          fi
239          if [[ "\${TEST_PY_BD}" == "coreboot" ]]; then
240              cp /opt/coreboot/coreboot.rom \${UBOOT_TRAVIS_BUILD_DIR}/coreboot.rom;
241              /opt/coreboot/cbfstool \${UBOOT_TRAVIS_BUILD_DIR}/coreboot.rom remove -n fallback/payload;
242              /opt/coreboot/cbfstool \${UBOOT_TRAVIS_BUILD_DIR}/coreboot.rom add-flat-binary -f \${UBOOT_TRAVIS_BUILD_DIR}/u-boot.bin -n fallback/payload -c LZMA -l 0x1110000 -e 0x1110000;
243          fi
244          virtualenv -p /usr/bin/python3 /tmp/venv
245          . /tmp/venv/bin/activate
246          pip install -r test/py/requirements.txt
247          pip install pytest-azurepipelines
248          export PATH=/opt/qemu/bin:/tmp/uboot-test-hooks/bin:\${PATH}
249          export PYTHONPATH=/tmp/uboot-test-hooks/py/travis-ci
250          # "\${var:+"-k \$var"}" expands to "" if \$var is empty, "-k \$var" if not
251          ./test/py/test.py -ra -o cache_dir="\$UBOOT_TRAVIS_BUILD_DIR"/.pytest_cache --bd \${TEST_PY_BD} \${TEST_PY_ID} \${TEST_PY_TEST_SPEC:+"-k \${TEST_PY_TEST_SPEC}"} --build-dir "\$UBOOT_TRAVIS_BUILD_DIR" --report-dir "\$UBOOT_TRAVIS_BUILD_DIR"
252          # the below corresponds to .gitlab-ci.yml "after_script"
253          rm -rf /tmp/uboot-test-hooks /tmp/venv
254          EOF
255      - task: CopyFiles@2
256        displayName: 'Copy test.sh for later usage'
257        inputs:
258          contents: 'test.sh'
259          targetFolder: '$(Build.ArtifactStagingDirectory)'
260      - publish: '$(Build.ArtifactStagingDirectory)/test.sh'
261        displayName: 'Publish test.sh'
262        artifact: testsh
263
264- stage: test_py_sandbox
265  jobs:
266  - job: test_py_sandbox
267    displayName: 'test.py for sandbox'
268    pool:
269      vmImage: $(ubuntu_vm)
270    strategy:
271      matrix:
272        sandbox:
273          TEST_PY_BD: "sandbox"
274        sandbox_asan:
275          TEST_PY_BD: "sandbox"
276          OVERRIDE: "-a ASAN"
277          TEST_PY_TEST_SPEC: "version"
278        sandbox_clang:
279          TEST_PY_BD: "sandbox"
280          OVERRIDE: "-O clang-17"
281        sandbox_clang_asan:
282          TEST_PY_BD: "sandbox"
283          OVERRIDE: "-O clang-17 -a ASAN"
284          TEST_PY_TEST_SPEC: "version"
285        sandbox64:
286          TEST_PY_BD: "sandbox64"
287        sandbox64_clang:
288          TEST_PY_BD: "sandbox64"
289          OVERRIDE: "-O clang-17"
290        sandbox_spl:
291          TEST_PY_BD: "sandbox_spl"
292          TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff or test_spl"
293        sandbox_vpl:
294          TEST_PY_BD: "sandbox_vpl"
295          TEST_PY_TEST_SPEC: "vpl or test_spl"
296        sandbox_noinst:
297          TEST_PY_BD: "sandbox_noinst"
298          TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff or test_spl"
299        sandbox_noinst_load_fit_full:
300          TEST_PY_BD: "sandbox_noinst"
301          TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff or test_spl"
302          OVERRIDE: "-a CONFIG_SPL_LOAD_FIT_FULL=y"
303        sandbox_flattree:
304          TEST_PY_BD: "sandbox_flattree"
305        sandbox_trace:
306          TEST_PY_BD: "sandbox"
307          BUILD_ENV: "FTRACE=1 NO_LTO=1"
308          TEST_PY_TEST_SPEC: "trace"
309          OVERRIDE: "-a CONFIG_TRACE=y -a CONFIG_TRACE_EARLY=y -a CONFIG_TRACE_EARLY_SIZE=0x01000000 -a CONFIG_TRACE_BUFFER_SIZE=0x02000000"
310    steps:
311      - download: current
312        artifact: testsh
313      - script: |
314          # make current directory writeable to uboot user inside the container
315          # as sandbox testing need create files like spi flash images, etc.
316          # (TODO: clean up this in the future)
317          chmod 777 .
318          chmod 755 $(Pipeline.Workspace)/testsh/test.sh
319          # Filesystem tests need extra docker args to run
320          set --
321          # mount -o loop needs the loop devices
322          if modprobe loop; then
323              for d in $(find /dev -maxdepth 1 -name 'loop*'); do
324                  set -- "$@" --device $d:$d
325              done
326          fi
327          # Needed for mount syscall (for guestmount as well)
328          set -- "$@" --cap-add SYS_ADMIN
329          # Default apparmor profile denies mounts
330          set -- "$@" --security-opt apparmor=unconfined
331          # Some tests using libguestfs-tools need the fuse device to run
332          docker run "$@" --device /dev/fuse:/dev/fuse \
333                         -v $PWD:$(work_dir) \
334                         -v $(Pipeline.Workspace):$(Pipeline.Workspace) \
335                         -e WORK_DIR="${WORK_DIR}" \
336                         -e TEST_PY_BD="${TEST_PY_BD}" \
337                         -e TEST_PY_ID="${TEST_PY_ID}" \
338                         -e TEST_PY_TEST_SPEC="${TEST_PY_TEST_SPEC}" \
339                         -e OVERRIDE="${OVERRIDE}" \
340                         -e BUILD_ENV="${BUILD_ENV}" $(ci_runner_image) \
341                         $(Pipeline.Workspace)/testsh/test.sh
342
343- stage: test_py_qemu
344  jobs:
345  - job: test_py_qemu
346    displayName: 'test.py for QEMU platforms'
347    pool:
348      vmImage: $(ubuntu_vm)
349    strategy:
350      matrix:
351        coreboot:
352          TEST_PY_BD: "coreboot"
353          TEST_PY_ID: "--id qemu"
354          TEST_PY_TEST_SPEC: "not sleep"
355        evb_ast2500:
356          TEST_PY_BD: "evb-ast2500"
357          TEST_PY_ID: "--id qemu"
358        evb_ast2600:
359          TEST_PY_BD: "evb-ast2600"
360          TEST_PY_ID: "--id qemu"
361        vexpress_ca9x4:
362          TEST_PY_BD: "vexpress_ca9x4"
363          TEST_PY_ID: "--id qemu"
364        integratorcp_cm926ejs:
365          TEST_PY_BD: "integratorcp_cm926ejs"
366          TEST_PY_ID: "--id qemu"
367          TEST_PY_TEST_SPEC: "not sleep"
368        qemu_arm:
369          TEST_PY_BD: "qemu_arm"
370          TEST_PY_TEST_SPEC: "not sleep"
371        qemu_arm64:
372          TEST_PY_BD: "qemu_arm64"
373          TEST_PY_TEST_SPEC: "not sleep"
374        qemu_m68k:
375          TEST_PY_BD: "M5208EVBE"
376          TEST_PY_ID: "--id qemu"
377          TEST_PY_TEST_SPEC: "not sleep and not efi"
378          OVERRIDE: "-a CONFIG_M68K_QEMU=y -a ~CONFIG_MCFTMR"
379        qemu_malta:
380          TEST_PY_BD: "malta"
381          TEST_PY_ID: "--id qemu"
382          TEST_PY_TEST_SPEC: "not sleep and not efi"
383        qemu_maltael:
384          TEST_PY_BD: "maltael"
385          TEST_PY_ID: "--id qemu"
386          TEST_PY_TEST_SPEC: "not sleep and not efi"
387        qemu_malta64:
388          TEST_PY_BD: "malta64"
389          TEST_PY_ID: "--id qemu"
390          TEST_PY_TEST_SPEC: "not sleep and not efi"
391        qemu_malta64el:
392          TEST_PY_BD: "malta64el"
393          TEST_PY_ID: "--id qemu"
394          TEST_PY_TEST_SPEC: "not sleep and not efi"
395        qemu_ppce500:
396          TEST_PY_BD: "qemu-ppce500"
397          TEST_PY_TEST_SPEC: "not sleep"
398        qemu_riscv32:
399          TEST_PY_BD: "qemu-riscv32"
400          TEST_PY_TEST_SPEC: "not sleep"
401        qemu_riscv64:
402          TEST_PY_BD: "qemu-riscv64"
403          TEST_PY_TEST_SPEC: "not sleep"
404        qemu_riscv32_spl:
405          TEST_PY_BD: "qemu-riscv32_spl"
406          TEST_PY_TEST_SPEC: "not sleep"
407        qemu_riscv64_spl:
408          TEST_PY_BD: "qemu-riscv64_spl"
409          TEST_PY_TEST_SPEC: "not sleep"
410        qemu_x86:
411          TEST_PY_BD: "qemu-x86"
412          TEST_PY_TEST_SPEC: "not sleep"
413        qemu_x86_64:
414          TEST_PY_BD: "qemu-x86_64"
415          TEST_PY_TEST_SPEC: "not sleep"
416        r2dplus_i82557c:
417          TEST_PY_BD: "r2dplus"
418          TEST_PY_ID: "--id i82557c_qemu"
419        r2dplus_pcnet:
420          TEST_PY_BD: "r2dplus"
421          TEST_PY_ID: "--id pcnet_qemu"
422        r2dplus_rtl8139:
423          TEST_PY_BD: "r2dplus"
424          TEST_PY_ID: "--id rtl8139_qemu"
425        r2dplus_tulip:
426          TEST_PY_BD: "r2dplus"
427          TEST_PY_ID: "--id tulip_qemu"
428        sifive_unleashed_sdcard:
429          TEST_PY_BD: "sifive_unleashed"
430          TEST_PY_ID: "--id sdcard_qemu"
431        sifive_unleashed_spi-nor:
432          TEST_PY_BD: "sifive_unleashed"
433          TEST_PY_ID: "--id spi-nor_qemu"
434        xilinx_zynq_virt:
435          TEST_PY_BD: "xilinx_zynq_virt"
436          TEST_PY_ID: "--id qemu"
437          TEST_PY_TEST_SPEC: "not sleep"
438        xilinx_versal_virt:
439          TEST_PY_BD: "xilinx_versal_virt"
440          TEST_PY_ID: "--id qemu"
441          TEST_PY_TEST_SPEC: "not sleep"
442        xtfpga:
443          TEST_PY_BD: "xtfpga"
444          TEST_PY_ID: "--id qemu"
445          TEST_PY_TEST_SPEC: "not sleep"
446    steps:
447      - download: current
448        artifact: testsh
449      - script: |
450          # make current directory writeable to uboot user inside the container
451          # as sandbox testing need create files like spi flash images, etc.
452          # (TODO: clean up this in the future)
453          chmod 777 .
454          chmod 755 $(Pipeline.Workspace)/testsh/test.sh
455          # Some tests using libguestfs-tools need the fuse device to run
456          docker run "$@" --device /dev/fuse:/dev/fuse \
457                         -v $PWD:$(work_dir) \
458                         -v $(Pipeline.Workspace):$(Pipeline.Workspace) \
459                         -e WORK_DIR="${WORK_DIR}" \
460                         -e TEST_PY_BD="${TEST_PY_BD}" \
461                         -e TEST_PY_ID="${TEST_PY_ID}" \
462                         -e TEST_PY_TEST_SPEC="${TEST_PY_TEST_SPEC}" \
463                         -e OVERRIDE="${OVERRIDE}" \
464                         -e BUILD_ENV="${BUILD_ENV}" $(ci_runner_image) \
465                         $(Pipeline.Workspace)/testsh/test.sh
466        retryCountOnTaskFailure: 2 # QEMU may be too slow, etc.
467
468- stage: world_build
469  jobs:
470  - job: build_the_world
471    timeoutInMinutes: 0 # Use the maximum allowed
472    displayName: 'Build the World'
473    pool:
474      vmImage: $(ubuntu_vm)
475    strategy:
476      # Use almost the same target division in .travis.yml, only merged
477      # 3 small build jobs (arc/microblaze/xtensa) into one.
478      matrix:
479        am33xx_at91_kirkwood_mvebu_omap:
480          BUILDMAN: "am33xx at91_kirkwood mvebu omap -x siemens"
481        amlogic_bcm_boundary_engicam_siemens_technexion_oradex:
482          BUILDMAN: "amlogic bcm boundary engicam siemens technexion toradex -x mips"
483        arm_nxp_minus_imx:
484          BUILDMAN: "freescale -x powerpc,m68k,imx,mx"
485        imx:
486          BUILDMAN: "mx imx -x boundary,engicam,technexion,toradex"
487        rk:
488          BUILDMAN: "rk"
489        sunxi:
490          BUILDMAN: "sunxi"
491        powerpc:
492          BUILDMAN: "powerpc"
493        arm_catch_all:
494          BUILDMAN: "arm -x aarch64,am33xx,at91,bcm,ls1,kirkwood,mvebu,omap,rk,siemens,mx,sunxi,technexion,toradex"
495        aarch64_catch_all:
496          BUILDMAN: "aarch64 -x amlogic,bcm,engicam,imx,ls1,ls2,lx216,mvebu,rk,siemens,sunxi,toradex"
497        everything_but_arm_and_powerpc:
498          BUILDMAN: "-x arm,powerpc"
499    steps:
500      - script: |
501          cat << EOF > build.sh
502          set -ex
503          cd ${WORK_DIR}
504          # make environment variables available as tests are running inside a container
505          export BUILDMAN="${BUILDMAN}"
506          git config --global --add safe.directory ${WORK_DIR}
507          pip install -r tools/buildman/requirements.txt
508          EOF
509          cat << "EOF" >> build.sh
510          if [[ "${BUILDMAN}" != "" ]]; then
511              ret=0;
512              tools/buildman/buildman -o /tmp -PEWM ${BUILDMAN} ${OVERRIDE} || ret=$?;
513              if [[ $ret -ne 0 ]]; then
514                  tools/buildman/buildman -o /tmp -seP ${BUILDMAN};
515                  exit $ret;
516              fi;
517          fi
518          EOF
519          cat build.sh
520          docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh
521