Update CMake requirement to 3.16.3
[alexxy/gromacs.git] / admin / containers / scripted_gmx_docker_builds.py
1 #!/usr/bin/env python
2 #
3 # This file is part of the GROMACS molecular simulation package.
4 #
5 # Copyright (c) 2020,2021, by the GROMACS development team, led by
6 # Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
7 # and including many others, as listed in the AUTHORS file in the
8 # top-level source directory and at http://www.gromacs.org.
9 #
10 # GROMACS is free software; you can redistribute it and/or
11 # modify it under the terms of the GNU Lesser General Public License
12 # as published by the Free Software Foundation; either version 2.1
13 # of the License, or (at your option) any later version.
14 #
15 # GROMACS is distributed in the hope that it will be useful,
16 # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18 # Lesser General Public License for more details.
19 #
20 # You should have received a copy of the GNU Lesser General Public
21 # License along with GROMACS; if not, see
22 # http://www.gnu.org/licenses, or write to the Free Software Foundation,
23 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA.
24 #
25 # If you want to redistribute modifications to GROMACS, please
26 # consider that scientific software is very special. Version
27 # control is crucial - bugs must be traceable. We will be happy to
28 # consider code for inclusion in the official distribution, but
29 # derived work must not be called official GROMACS. Details are found
30 # in the README & COPYING files - if they are missing, get the
31 # official version at http://www.gromacs.org.
32 #
33 # To help us fund GROMACS development, we humbly ask that you cite
34 # the research papers on the package. Check out http://www.gromacs.org.
35
36 """Building block based Dockerfile generation for CI testing images.
37
38 Generates a set of docker images used for running GROMACS CI on Gitlab.
39 The images are prepared according to a selection of build configuration targets
40 that hope to cover a broad enough scope of different possible systems,
41 allowing us to check compiler types and versions, as well as libraries used
42 for accelerators and parallel communication systems. Each combinations is
43 described as an entry in the build_configs dictionary, with the script
44 analysing the logic and adding build stages as needed.
45
46 Based on the example script provided by the NVidia HPCCM repository.
47
48 Reference:
49     `NVidia HPC Container Maker <https://github.com/NVIDIA/hpc-container-maker>`__
50
51 Authors:
52     * Paul Bauer <paul.bauer.q@gmail.com>
53     * Eric Irrgang <ericirrgang@gmail.com>
54     * Joe Jordan <e.jjordan12@gmail.com>
55     * Mark Abraham <mark.j.abraham@gmail.com>
56
57 Usage::
58
59     $ python3 scripted_gmx_docker_builds.py --help
60     $ python3 scripted_gmx_docker_builds.py --format docker > Dockerfile && docker build .
61     $ python3 scripted_gmx_docker_builds.py | docker build -
62
63 See Also:
64     :file:`buildall.sh`
65
66 """
67
68 import argparse
69 import collections
70 import typing
71 from distutils.version import StrictVersion
72
73 import hpccm
74 import hpccm.config
75 from hpccm.building_blocks.base import bb_base
76
77 try:
78     import utility
79 except ImportError:
80     raise RuntimeError(
81         'This module assumes availability of supporting modules in the same directory. Add the directory to '
82         'PYTHONPATH or invoke Python from within the module directory so module location can be resolved.')
83
84 # Basic packages for all final images.
85 _common_packages = ['build-essential',
86                     'ca-certificates',
87                     'ccache',
88                     'git',
89                     'gnupg',
90                     'gpg-agent',
91                     'libfftw3-dev',
92                     'libhwloc-dev',
93                     'liblapack-dev',
94                     'libx11-dev',
95                     'moreutils',
96                     'ninja-build',
97                     'rsync',
98                     'valgrind',
99                     'vim',
100                     'wget',
101                     'xsltproc']
102
103 _opencl_extra_packages = [
104     'nvidia-opencl-dev',
105     # The following require apt_ppas=['ppa:intel-opencl/intel-opencl']
106     'intel-opencl-icd',
107     'ocl-icd-libopencl1',
108     'ocl-icd-opencl-dev',
109     'opencl-headers',
110     # The following require
111     #             apt_keys=['http://repo.radeon.com/rocm/apt/debian/rocm.gpg.key'],
112     #             apt_repositories=['deb [arch=amd64] http://repo.radeon.com/rocm/apt/debian/ xenial main']
113     'libelf1',
114     'rocm-opencl',
115     'rocm-dev',
116     'clinfo'
117 ]
118
119 # Extra packages needed to build Python installations from source.
120 _python_extra_packages = ['build-essential',
121                           'ca-certificates',
122                           'ccache',
123                           'curl',
124                           'git',
125                           'libbz2-dev',
126                           'libffi-dev',
127                           'liblzma-dev',
128                           'libncurses5-dev',
129                           'libncursesw5-dev',
130                           'libreadline-dev',
131                           'libsqlite3-dev',
132                           'libssl-dev',
133                           'llvm',
134                           'python-openssl',
135                           'vim',
136                           'wget',
137                           'zlib1g-dev']
138
139 # Extra packages needed for images for building documentation.
140 _docs_extra_packages = ['autoconf',
141                         'automake',
142                         'autopoint',
143                         'autotools-dev',
144                         'bison',
145                         'flex',
146                         'ghostscript',
147                         'graphviz',
148                         'help2man',
149                         'imagemagick',
150                         'libtool',
151                         'linkchecker',
152                         'mscgen',
153                         'm4',
154                         'openssh-client',
155                         'texinfo',
156                         'texlive-latex-base',
157                         'texlive-latex-extra',
158                         'texlive-fonts-recommended',
159                         'texlive-fonts-extra']
160
161 # Parse command line arguments
162 parser = argparse.ArgumentParser(description='GROMACS CI image creation script',
163                                  parents=[utility.parser])
164
165 parser.add_argument('--format', type=str, default='docker',
166                     choices=['docker', 'singularity'],
167                     help='Container specification format (default: docker)')
168
169
170 def base_image_tag(args) -> str:
171     # Check if we use CUDA images or plain linux images
172     if args.cuda is not None:
173         cuda_version_tag = 'nvidia/cuda:' + args.cuda + '-devel'
174         if args.centos is not None:
175             cuda_version_tag += '-centos' + args.centos
176         elif args.ubuntu is not None:
177             cuda_version_tag += '-ubuntu' + args.ubuntu
178         else:
179             raise RuntimeError('Logic error: no Linux distribution selected.')
180
181         base_image_tag = cuda_version_tag
182     else:
183         if args.centos is not None:
184             base_image_tag = 'centos:centos' + args.centos
185         elif args.ubuntu is not None:
186             base_image_tag = 'ubuntu:' + args.ubuntu
187         else:
188             raise RuntimeError('Logic error: no Linux distribution selected.')
189     return base_image_tag
190
191
192 def get_llvm_packages(args) -> typing.Iterable[str]:
193     # If we use the package version of LLVM, we need to install extra packages for it.
194     if (args.llvm is not None) and (args.tsan is None):
195         packages = [f'libomp-{args.llvm}-dev',
196                     f'libomp5-{args.llvm}',
197                     'clang-format-' + str(args.llvm),
198                     'clang-tidy-' + str(args.llvm)]
199         if args.hipsycl is not None:
200             packages += [f'llvm-{args.llvm}-dev',
201                          f'libclang-{args.llvm}-dev',
202                          f'lld-{args.llvm}']
203         return packages
204     else:
205         return []
206
207 def get_opencl_packages(args) -> typing.Iterable[str]:
208     if (args.doxygen is None) and (args.oneapi is None):
209         return _opencl_extra_packages
210     else:
211         return []
212
213 def get_compiler(args, compiler_build_stage: hpccm.Stage = None) -> bb_base:
214     # Compiler
215     if args.llvm is not None:
216         # Build our own version instead to get TSAN + OMP
217         if args.tsan is not None:
218             if compiler_build_stage is not None:
219                 compiler = compiler_build_stage.runtime(_from='tsan')
220             else:
221                 raise RuntimeError('No TSAN compiler build stage!')
222         # Build the default compiler if we don't need special support
223         else:
224             compiler = hpccm.building_blocks.llvm(extra_repository=True, version=args.llvm)
225
226     elif args.oneapi is not None:
227         if compiler_build_stage is not None:
228             compiler = compiler_build_stage.runtime(_from='oneapi')
229             # Prepare the toolchain (needed only for builds done within the Dockerfile, e.g.
230             # OpenMPI builds, which don't currently work for other reasons)
231             oneapi_toolchain = hpccm.toolchain(CC=f'/opt/intel/oneapi/compiler/{args.oneapi}/linux/bin/intel64/icx',
232                                                CXX=f'/opt/intel/oneapi/compiler/{args.oneapi}/linux/bin/intel64/icpx')
233             setattr(compiler, 'toolchain', oneapi_toolchain)
234
235         else:
236             raise RuntimeError('No oneAPI compiler build stage!')
237
238     elif args.gcc is not None:
239         compiler = hpccm.building_blocks.gnu(extra_repository=True,
240                                              version=args.gcc,
241                                              fortran=False)
242     else:
243         raise RuntimeError('Logic error: no compiler toolchain selected.')
244     return compiler
245
246
247 def get_mpi(args, compiler):
248     # If needed, add MPI to the image
249     if args.mpi is not None:
250         if args.mpi == 'openmpi':
251             use_cuda = False
252             if args.cuda is not None:
253                 use_cuda = True
254
255             if hasattr(compiler, 'toolchain'):
256                 if args.oneapi is not None:
257                     raise RuntimeError('oneAPI building OpenMPI is not supported')
258                 return hpccm.building_blocks.openmpi(toolchain=compiler.toolchain, cuda=use_cuda, infiniband=False)
259             else:
260                 raise RuntimeError('compiler is not an HPCCM compiler building block!')
261
262         elif args.mpi == 'impi':
263             # TODO Intel MPI from the oneAPI repo is not working reliably,
264             # reasons are unclear. When solved, add packagages called:
265             # 'intel-oneapi-mpi', 'intel-oneapi-mpi-devel'
266             # during the compiler stage.
267             # TODO also consider hpccm's intel_mpi package if that doesn't need
268             # a license to run.
269             raise RuntimeError('Intel MPI recipe not implemented yet.')
270         else:
271             raise RuntimeError('Requested unknown MPI implementation.')
272     else:
273         return None
274
275
276 def get_clfft(args):
277     if (args.clfft is not None):
278         return hpccm.building_blocks.generic_cmake(
279             repository='https://github.com/clMathLibraries/clFFT.git',
280             prefix='/usr/local', recursive=True, branch=args.clfft, directory='clFFT/src')
281     else:
282         return None
283
284 def get_hipsycl(args):
285     if args.hipsycl is None:
286         return None
287     if args.llvm is None:
288         raise RuntimeError('Can not build hipSYCL without llvm')
289
290     cmake_opts = [f'-DLLVM_DIR=/usr/lib/llvm-{args.llvm}/cmake',
291                   f'-DCLANG_EXECUTABLE_PATH=/usr/bin/clang++-{args.llvm}',
292                   '-DCMAKE_PREFIX_PATH=/opt/rocm/lib/cmake',
293                   '-DWITH_ROCM_BACKEND=ON']
294     if args.cuda is not None:
295         cmake_opts += [f'-DCUDA_TOOLKIT_ROOT_DIR=/usr/local/cuda',
296                        '-DWITH_CUDA_BACKEND=ON']
297
298     postinstall = [
299             # https://github.com/illuhad/hipSYCL/issues/361#issuecomment-718943645
300             'for f in /opt/rocm/amdgcn/bitcode/*.bc; do ln -s "$f" "/opt/rocm/lib/$(basename $f .bc).amdgcn.bc"; done'
301             ]
302     if args.cuda is not None:
303         postinstall += [
304             # https://github.com/illuhad/hipSYCL/issues/410#issuecomment-743301929
305             f'sed s/_OPENMP/__OPENMP_NVPTX__/ -i /usr/lib/llvm-{args.llvm}/lib/clang/*/include/__clang_cuda_complex_builtins.h',
306             # Not needed unless we're building with CUDA 11.x, but no harm in doing always
307             f'ln -s /usr/local/cuda/compat/* /usr/local/cuda/lib64/'
308             ]
309
310     return hpccm.building_blocks.generic_cmake(
311         repository='https://github.com/illuhad/hipSYCL.git',
312         directory='/var/tmp/hipSYCL',
313         prefix='/usr/local', recursive=True, commit=args.hipsycl,
314         cmake_opts=['-DCMAKE_BUILD_TYPE=Release', *cmake_opts],
315         postinstall=postinstall)
316
317 def add_tsan_compiler_build_stage(input_args, output_stages: typing.Mapping[str, hpccm.Stage]):
318     """Isolate the expensive TSAN preparation stage.
319
320     This is a very expensive stage, but has few and disjoint dependencies, and
321     its output is easily compartmentalized (/usr/local) so we can isolate this
322     build stage to maximize build cache hits and reduce rebuild time, bookkeeping,
323     and final image size.
324     """
325     if not isinstance(output_stages, collections.abc.MutableMapping):
326         raise RuntimeError('Need output_stages container.')
327     tsan_stage = hpccm.Stage()
328     tsan_stage += hpccm.primitives.baseimage(image=base_image_tag(input_args), _as='tsan')
329
330     tsan_stage += hpccm.building_blocks.packages(ospackages=['git', 'ca-certificates', 'build-essential', 'cmake'])
331     # CMake will get duplicated later, but this is an expensive image, and it isn't worth optimizing
332     # out that duplication...
333     tsan_stage += hpccm.building_blocks.python(python3=True, python2=False, devel=False)
334
335     compiler_branch = 'release/' + str(input_args.llvm) + '.x'
336     tsan_stage += hpccm.building_blocks.generic_cmake(
337         repository='https://github.com/llvm/llvm-project.git',
338         directory='/var/tmp/llvm-project/llvm/',
339         prefix='/usr/local', recursive=True, branch=compiler_branch,
340         cmake_opts=['-D CMAKE_BUILD_TYPE=Release', '-D LLVM_ENABLE_PROJECTS="clang;openmp;clang-tools-extra;compiler-rt;lld"',
341                     '-D LIBOMP_TSAN_SUPPORT=on'],
342         postinstall=['ln -s /usr/local/bin/clang++ /usr/local/bin/clang++-' + str(input_args.llvm),
343                      'ln -s /usr/local/bin/clang-format /usr/local/bin/clang-format-' + str(input_args.llvm),
344                      'ln -s /usr/local/bin/clang-tidy /usr/local/bin/clang-tidy-' + str(input_args.llvm),
345                      'ln -s /usr/local/share/clang/run-clang-tidy.py /usr/local/bin/run-clang-tidy-' + str(input_args.llvm) + '.py',
346                      'ln -s /usr/local/bin/run-clang-tidy-' + str(input_args.llvm) + '.py /usr/local/bin/run-clang-tidy-' + str(input_args.llvm),
347                      'ln -s /usr/local/libexec/c++-analyzer /usr/local/bin/c++-analyzer-' + str(input_args.llvm)])
348     output_stages['compiler_build'] = tsan_stage
349
350 def oneapi_runtime(_from='0'):
351     oneapi_runtime_stage = hpccm.Stage()
352     oneapi_runtime_stage += hpccm.primitives.copy(_from='oneapi-build',
353                                                   files={"/opt/intel": "/opt/intel",
354                                                          "/etc/bash.bashrc": "/etc/bash.bashrc"})
355     return oneapi_runtime_stage
356
357 def add_oneapi_compiler_build_stage(input_args, output_stages: typing.Mapping[str, hpccm.Stage]):
358     """Isolate the oneAPI preparation stage.
359
360     This stage is isolated so that its installed components are minimized in the
361     final image (chiefly /opt/intel) and its environment setup script can be
362     sourced. This also helps with rebuild time and final image size.
363     """
364     if not isinstance(output_stages, collections.abc.MutableMapping):
365         raise RuntimeError('Need output_stages container.')
366     oneapi_stage = hpccm.Stage()
367     oneapi_stage += hpccm.primitives.baseimage(image=base_image_tag(input_args), _as='oneapi-build')
368
369     version = str(input_args.oneapi)
370
371     # Add required components for the next stage (both for hpccm and Intel's setvars.sh script)
372     oneapi_stage += hpccm.building_blocks.packages(ospackages=['wget', 'gnupg2', 'ca-certificates', 'lsb-release'])
373     oneapi_stage += hpccm.building_blocks.packages(
374         apt_keys=['https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB'],
375         apt_repositories=['deb https://apt.repos.intel.com/oneapi all main'],
376         # Add minimal packages (not the whole HPC toolkit!)
377         ospackages=[f'intel-oneapi-dpcpp-cpp-{version}',
378             f'intel-oneapi-openmp-{version}',
379             f'intel-oneapi-mkl-{version}',
380             f'intel-oneapi-mkl-devel-{version}']
381     )
382     # Ensure that all bash shells on the final container will have access to oneAPI
383     oneapi_stage += hpccm.primitives.shell(
384             commands=['echo "source /opt/intel/oneapi/setvars.sh" >> /etc/bash.bashrc',
385                       'unlink /opt/intel/oneapi/compiler/latest',
386                      f'ln -sf /opt/intel/oneapi/compiler/{version} /opt/intel/oneapi/compiler/latest']
387             )
388     setattr(oneapi_stage, 'runtime', oneapi_runtime)
389
390     output_stages['compiler_build'] = oneapi_stage
391
392 def prepare_venv(version: StrictVersion) -> typing.Sequence[str]:
393     """Get shell commands to set up the venv for the requested Python version."""
394     major = version.version[0]
395     minor = version.version[1]  # type: int
396
397     pyenv = '$HOME/.pyenv/bin/pyenv'
398
399     py_ver = f'{major}.{minor}'
400     venv_path = f'$HOME/venv/py{py_ver}'
401     commands = [f'$({pyenv} prefix `{pyenv} whence python{py_ver}`)/bin/python -m venv {venv_path}']
402
403     commands.append(f'{venv_path}/bin/python -m pip install --upgrade pip setuptools')
404     # Install dependencies for building and testing gmxapi Python package.
405     # WARNING: Please keep this list synchronized with python_packaging/requirements-test.txt
406     # TODO: Get requirements.txt from an input argument.
407     commands.append(f"""{venv_path}/bin/python -m pip install --upgrade \
408             'cmake>=3.16.3' \
409             'flake8>=3.7.7' \
410             'gcovr>=4.2' \
411             'mpi4py>=3.0.3' \
412             'networkx>=2.0' \
413             'numpy>=1' \
414             'pip>=10.1' \
415             'Pygments>=2.2.0' \
416             'pytest>=3.9' \
417             'setuptools>=42' \
418             'scikit-build>=0.10' \
419             'Sphinx>=1.6.3' \
420             'sphinxcontrib-plantuml>=0.14'""")
421
422     # TODO: Remove 'importlib_resources' dependency when Python >=3.7 is required.
423     if minor == 6:
424         commands.append(f"""{venv_path}/bin/python -m pip install --upgrade \
425                 'importlib_resources'""")
426
427     return commands
428
429
430 def add_python_stages(building_blocks: typing.Mapping[str, bb_base],
431                       input_args,
432                       output_stages: typing.MutableMapping[str, hpccm.Stage]):
433     """Add the stage(s) necessary for the requested venvs.
434
435     One intermediate build stage is created for each venv (see --venv option).
436
437     Each stage partially populates Python installations and venvs in the home
438     directory. The home directory is collected by the 'pyenv' stage for use by
439     the main build stage.
440     """
441     if len(input_args.venvs) < 1:
442         raise RuntimeError('No venvs to build...')
443     if output_stages is None or not isinstance(output_stages, collections.abc.Mapping):
444         raise RuntimeError('Need a container for output stages.')
445
446     # Main Python stage that collects the environments from individual stages.
447     # We collect the stages individually, rather than chaining them, because the
448     # copy is a bit slow and wastes local Docker image space for each filesystem
449     # layer.
450     pyenv_stage = hpccm.Stage()
451     pyenv_stage += hpccm.primitives.baseimage(image=base_image_tag(input_args), _as='pyenv')
452     pyenv_stage += building_blocks['compiler']
453     pyenv_stage += building_blocks['mpi']
454     pyenv_stage += hpccm.building_blocks.packages(ospackages=_python_extra_packages)
455
456     for version in [StrictVersion(py_ver) for py_ver in sorted(input_args.venvs)]:
457         stage_name = 'py' + str(version)
458         stage = hpccm.Stage()
459         stage += hpccm.primitives.baseimage(image=base_image_tag(input_args), _as=stage_name)
460         stage += building_blocks['compiler']
461         stage += building_blocks['mpi']
462         stage += hpccm.building_blocks.packages(ospackages=_python_extra_packages)
463
464         # TODO: Use a non-root user for testing and Python virtual environments.
465         stage += hpccm.primitives.shell(commands=[
466             'curl https://pyenv.run | bash',
467             """echo 'export PYENV_ROOT="$HOME/.pyenv"' >> $HOME/.bashrc""",
468             """echo 'export PATH="$PYENV_ROOT/bin:$PATH"' >> $HOME/.bashrc""",
469             """echo 'eval "$(pyenv init -)"' >> $HOME/.bashrc""",
470             """echo 'eval "$(pyenv virtualenv-init -)"' >> $HOME/.bashrc"""])
471         pyenv = '$HOME/.pyenv/bin/pyenv'
472         commands = [f'PYTHON_CONFIGURE_OPTS="--enable-shared" {pyenv} install -s {version}']
473         stage += hpccm.primitives.shell(commands=commands)
474
475         commands = prepare_venv(version)
476         stage += hpccm.primitives.shell(commands=commands)
477
478         # TODO: Update user home directory.
479         pyenv_stage += hpccm.primitives.copy(_from=stage_name, _mkdir=True, src=['/root/'],
480                                              dest='/root')
481
482         # Add the intermediate build stage to the sequence
483         output_stages[stage_name] = stage
484
485     # TODO: If we activate pyenv for login shells, the `global` "version" should be full-featured.
486     # # `version` should be a system installation or pyenv environment (or pyenv-virtualenv)
487     # # with the dependencies for all of the Python aspects of CMake-driven builds.
488     # commands = '{pyenv} global {version}'.format(
489     #             pyenv=pyenv,
490     #             version=...)
491     # pyenv_stage += hpccm.primitives.shell(commands=commands)
492
493     # Add the aggregating build stage to the sequence. This allows the main stage to copy
494     # the files in a single stage, potentially reducing the overall output image size.
495     output_stages['pyenv'] = pyenv_stage
496
497
498 def add_documentation_dependencies(input_args,
499                                    output_stages: typing.MutableMapping[str, hpccm.Stage]):
500     """Add appropriate layers according to doxygen input arguments."""
501     if input_args.doxygen is None:
502         return
503     output_stages['main'] += hpccm.primitives.shell(
504         commands=['sed -i \'/\"XPS\"/d;/\"PDF\"/d;/\"PS\"/d;/\"EPS\"/d;/disable ghostscript format types/d\' /etc/ImageMagick-6/policy.xml'])
505     if input_args.doxygen == '1.8.5':
506         doxygen_commit = 'ed4ed873ab0e7f15116e2052119a6729d4589f7a'
507         output_stages['main'] += hpccm.building_blocks.generic_autotools(
508             repository='https://github.com/westes/flex.git',
509             commit='f7788a9a0ecccdc953ed12043ccb59ca25714018',
510             prefix='/tmp/install-of-flex',
511             configure_opts=['--disable-shared'],
512             preconfigure=['./autogen.sh'])
513         output_stages['main'] += hpccm.building_blocks.generic_autotools(
514             repository='https://github.com/doxygen/doxygen.git',
515             commit=doxygen_commit,
516             prefix='',
517             configure_opts=[
518                 '--flex /tmp/install-of-flex/bin/flex',
519                 '--static'])
520     else:
521         version = input_args.doxygen
522         archive_name = f'doxygen-{version}.linux.bin.tar.gz'
523         archive_url = f'https://sourceforge.net/projects/doxygen/files/rel-{version}/{archive_name}'
524         binary_path = f'doxygen-{version}/bin/doxygen'
525         commands = [
526             'mkdir doxygen && cd doxygen',
527             f'wget {archive_url}',
528             f'tar xf {archive_name} {binary_path}',
529             f'cp {binary_path} /usr/local/bin/',
530             'cd .. && rm -rf doxygen'
531         ]
532         output_stages['main'] += hpccm.primitives.shell(commands=commands)
533
534
535 def build_stages(args) -> typing.Iterable[hpccm.Stage]:
536     """Define and sequence the stages for the recipe corresponding to *args*."""
537
538     # A Dockerfile or Singularity recipe can have multiple build stages.
539     # The main build stage can copy files from previous stages, though only
540     # the last stage is included in the tagged output image. This means that
541     # large or expensive sets of build instructions can be isolated in
542     # local/temporary images, but all of the stages need to be output by this
543     # script, and need to occur in the correct order, so we create a sequence
544     # object early in this function.
545     stages = collections.OrderedDict()
546
547     # If we need TSAN or oneAPI support the early build is more complex,
548     # so that our compiler images don't have all the cruft needed to get those things
549     # installed.
550     if args.llvm is not None and args.tsan is not None:
551         add_tsan_compiler_build_stage(input_args=args, output_stages=stages)
552     if args.oneapi is not None:
553         add_oneapi_compiler_build_stage(input_args=args, output_stages=stages)
554
555     # Building blocks are chunks of container-builder instructions that can be
556     # copied to any build stage with the addition operator.
557     building_blocks = collections.OrderedDict()
558     building_blocks['base_packages'] = hpccm.building_blocks.packages(
559         ospackages=_common_packages)
560
561     # These are the most expensive and most reusable layers, so we put them first.
562     building_blocks['compiler'] = get_compiler(args, compiler_build_stage=stages.get('compiler_build'))
563     building_blocks['mpi'] = get_mpi(args, building_blocks['compiler'])
564     for i, cmake in enumerate(args.cmake):
565         building_blocks['cmake' + str(i)] = hpccm.building_blocks.cmake(
566             eula=True,
567             prefix=f'/usr/local/cmake-{cmake}',
568             version=cmake)
569
570     # Install additional packages early in the build to optimize Docker build layer cache.
571     os_packages = list(get_llvm_packages(args)) + get_opencl_packages(args)
572     if args.doxygen is not None:
573         os_packages += _docs_extra_packages
574     if args.oneapi is not None:
575         os_packages += ['lsb-release']
576     if args.hipsycl is not None:
577         os_packages += ['libboost-fiber-dev']
578     building_blocks['extra_packages'] = hpccm.building_blocks.packages(
579         ospackages=os_packages,
580         apt_ppas=['ppa:intel-opencl/intel-opencl'],
581         apt_keys=['http://repo.radeon.com/rocm/apt/debian/rocm.gpg.key'],
582         apt_repositories=['deb [arch=amd64] http://repo.radeon.com/rocm/apt/debian/ xenial main']
583     )
584
585     if args.cuda is not None and args.llvm is not None:
586         # Hack to tell clang what version of CUDA we're using
587         # based on https://github.com/llvm/llvm-project/blob/1fdec59bffc11ae37eb51a1b9869f0696bfd5312/clang/lib/Driver/ToolChains/Cuda.cpp#L43
588         building_blocks['cuda-clang-workaround'] = hpccm.primitives.shell(commands=[
589             f'echo "CUDA Version {args.cuda}" > /usr/local/cuda/version.txt'
590             ])
591
592     building_blocks['clfft'] = get_clfft(args)
593
594     building_blocks['hipSYCL'] = get_hipsycl(args)
595
596     # Add Python environments to MPI images, only, so we don't have to worry
597     # about whether to install mpi4py.
598     if args.mpi is not None and len(args.venvs) > 0:
599         add_python_stages(building_blocks=building_blocks, input_args=args, output_stages=stages)
600
601     # Create the stage from which the targeted image will be tagged.
602     stages['main'] = hpccm.Stage()
603
604     stages['main'] += hpccm.primitives.baseimage(image=base_image_tag(args))
605     for bb in building_blocks.values():
606         if bb is not None:
607             stages['main'] += bb
608
609     # We always add Python3 and Pip
610     stages['main'] += hpccm.building_blocks.python(python3=True, python2=False)
611
612     # Add documentation requirements (doxygen and sphinx + misc).
613     if args.doxygen is not None:
614         add_documentation_dependencies(args, stages)
615
616     if 'pyenv' in stages and stages['pyenv'] is not None:
617         stages['main'] += hpccm.primitives.copy(_from='pyenv', _mkdir=True, src=['/root/.pyenv/'],
618                                                 dest='/root/.pyenv')
619         stages['main'] += hpccm.primitives.copy(_from='pyenv', _mkdir=True, src=['/root/venv/'],
620                                                 dest='/root/venv')
621         # TODO: Update user home directory.
622         # TODO: If we activate pyenv for login shells, the `global` "version" should be full-featured.
623         # stages['main'] += hpccm.primitives.copy(_from='pyenv', src=['/root/.bashrc'],
624         #                                         dest='/root/')
625
626     # Make sure that `python` resolves to something.
627     stages['main'] += hpccm.primitives.shell(commands=['test -x /usr/bin/python || '
628                                                        'update-alternatives --install /usr/bin/python python /usr/bin/python3 1 && '
629                                                        '/usr/bin/python --version'])
630
631     # Note that the list of stages should be sorted in dependency order.
632     for build_stage in stages.values():
633         if build_stage is not None:
634             yield build_stage
635
636
637 if __name__ == '__main__':
638     args = parser.parse_args()
639
640     # Set container specification output format
641     hpccm.config.set_container_format(args.format)
642
643     container_recipe = build_stages(args)
644
645     # Output container specification
646     for stage in container_recipe:
647         print(stage)