3 # This file is part of the GROMACS molecular simulation package.
5 # Copyright (c) 2020, by the GROMACS development team, led by
6 # Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
7 # and including many others, as listed in the AUTHORS file in the
8 # top-level source directory and at http://www.gromacs.org.
10 # GROMACS is free software; you can redistribute it and/or
11 # modify it under the terms of the GNU Lesser General Public License
12 # as published by the Free Software Foundation; either version 2.1
13 # of the License, or (at your option) any later version.
15 # GROMACS is distributed in the hope that it will be useful,
16 # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 # Lesser General Public License for more details.
20 # You should have received a copy of the GNU Lesser General Public
21 # License along with GROMACS; if not, see
22 # http://www.gnu.org/licenses, or write to the Free Software Foundation,
23 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25 # If you want to redistribute modifications to GROMACS, please
26 # consider that scientific software is very special. Version
27 # control is crucial - bugs must be traceable. We will be happy to
28 # consider code for inclusion in the official distribution, but
29 # derived work must not be called official GROMACS. Details are found
30 # in the README & COPYING files - if they are missing, get the
31 # official version at http://www.gromacs.org.
33 # To help us fund GROMACS development, we humbly ask that you cite
34 # the research papers on the package. Check out http://www.gromacs.org.
37 Generates a set of docker images used for running GROMACS CI on Gitlab.
38 The images are prepared according to a selection of build configuration targets
39 that hope to cover a broad enough scope of different possible systems,
40 allowing us to check compiler types and versions, as well as libraries used
41 for accelerators and parallel communication systems. Each combinations is
42 described as an entry in the build_configs dictionary, with the script
43 analysing the logic and adding build stages as needed.
45 Based on the example script provided by the NVidia HPCCM repository.
48 * Paul Bauer <paul.bauer.q@gmail.com>
49 * Eric Irrgang <ericirrgang@gmail.com>
50 * Joe Jordan <e.jjordan12@gmail.com>
51 * Mark Abraham <mark.j.abraham@gmail.com>
55 $ python3 scripted_gmx_docker_builds.py --help
56 $ python3 scripted_gmx_docker_builds.py --format docker > Dockerfile && docker build .
57 $ python3 scripted_gmx_docker_builds.py | docker build -
64 from distutils.version import StrictVersion
68 from hpccm.building_blocks.base import bb_base
74 'This module assumes availability of supporting modules in the same directory. Add the directory to '
75 'PYTHONPATH or invoke Python from within the module directory so module location can be resolved.')
77 # Basic packages for all final images.
78 _common_packages = ['build-essential',
95 # Extra packages needed to build Python installations from source.
96 _python_extra_packages = ['build-essential',
115 # Extra packages needed for images for building documentation.
116 _docs_extra_packages = ['autoconf',
132 'texlive-latex-base',
133 'texlive-latex-extra',
134 'texlive-fonts-recommended',
135 'texlive-fonts-extra']
137 # Supported Python versions for maintained branches.
138 _python_versions = ['3.6.10', '3.7.7', '3.8.2']
140 # Parse command line arguments
141 parser = argparse.ArgumentParser(description='GROMACS CI image creation script', parents=[utility.parser])
143 parser.add_argument('--format', type=str, default='docker',
144 choices=['docker', 'singularity'],
145 help='Container specification format (default: docker)')
146 parser.add_argument('--venvs', nargs='*', type=str, default=_python_versions,
147 help='List of Python versions ("major.minor.patch") for which to install venvs. '
148 'Default: {}'.format(' '.join(_python_versions)))
151 def base_image_tag(args) -> str:
152 # Check if we use CUDA images or plain linux images
153 if args.cuda is not None:
154 cuda_version_tag = 'nvidia/cuda:' + args.cuda + '-devel'
155 if args.centos is not None:
156 cuda_version_tag += '-centos' + args.centos
157 elif args.ubuntu is not None:
158 cuda_version_tag += '-ubuntu' + args.ubuntu
160 raise RuntimeError('Logic error: no Linux distribution selected.')
162 base_image_tag = cuda_version_tag
164 if args.centos is not None:
165 base_image_tag = 'centos:centos' + args.centos
166 elif args.ubuntu is not None:
167 base_image_tag = 'ubuntu:' + args.ubuntu
169 raise RuntimeError('Logic error: no Linux distribution selected.')
170 return base_image_tag
173 def get_llvm_packages(args) -> typing.Iterable[str]:
174 # If we use the package version of LLVM, we need to install extra packages for it.
175 if (args.llvm is not None) and (args.tsan is None):
176 return ['libomp-dev',
178 'clang-format-' + str(args.llvm),
179 'clang-tidy-' + str(args.llvm)]
184 def get_compiler(args, compiler_build_stage: hpccm.Stage = None) -> bb_base:
186 if args.icc is not None:
187 raise RuntimeError('Intel compiler toolchain recipe not implemented yet')
189 if args.llvm is not None:
190 # Build our own version instead to get TSAN + OMP
191 if args.tsan is not None:
192 if compiler_build_stage is not None:
193 compiler = compiler_build_stage.runtime(_from='tsan')
195 raise RuntimeError('No TSAN compiler build stage!')
196 # Build the default compiler if we don't need special support
198 compiler = hpccm.building_blocks.llvm(extra_repository=True, version=args.llvm)
200 elif args.oneapi is not None:
201 if compiler_build_stage is not None:
202 compiler = compiler_build_stage.runtime(_from='oneapi')
203 # Prepare the toolchain (needed only for builds done within the Dockerfile, e.g.
204 # OpenMPI builds, which don't currently work for other reasons)
205 oneapi_toolchain = hpccm.toolchain(CC='/opt/intel/oneapi/compiler/latest/linux/bin/intel64/icc',
206 CXX='/opt/intel/oneapi/compiler/latest/linux/bin/intel64/icpc')
207 setattr(compiler, 'toolchain', oneapi_toolchain)
210 raise RuntimeError('No oneAPI compiler build stage!')
212 elif args.gcc is not None:
213 compiler = hpccm.building_blocks.gnu(extra_repository=True,
217 raise RuntimeError('Logic error: no compiler toolchain selected.')
221 def get_mpi(args, compiler):
222 # If needed, add MPI to the image
223 if args.mpi is not None:
224 if args.mpi == 'openmpi':
226 if args.cuda is not None:
229 if hasattr(compiler, 'toolchain'):
230 if args.oneapi is not None:
231 raise RuntimeError('oneAPI building OpenMPI is not supported')
232 return hpccm.building_blocks.openmpi(toolchain=compiler.toolchain, cuda=use_cuda, infiniband=False)
234 raise RuntimeError('compiler is not an HPCCM compiler building block!')
236 elif args.mpi == 'impi':
237 # TODO Intel MPI from the oneAPI repo is not working reliably,
238 # reasons are unclear. When solved, add packagages called:
239 # 'intel-oneapi-mpi', 'intel-oneapi-mpi-devel'
240 # during the compiler stage.
241 # TODO also consider hpccm's intel_mpi package if that doesn't need
243 raise RuntimeError('Intel MPI recipe not implemented yet.')
245 raise RuntimeError('Requested unknown MPI implementation.')
250 def get_opencl(args):
251 # Add OpenCL environment if needed
252 if (args.opencl is not None):
253 if args.opencl == 'nvidia':
254 if (args.cuda is None):
255 raise RuntimeError('Need Nvidia environment for Nvidia OpenCL image')
257 return hpccm.building_blocks.packages(ospackages=['nvidia-opencl-dev'])
259 elif args.opencl == 'intel':
260 # Note, when using oneapi, there is bundled OpenCL support, so this
261 # installation is not needed.
262 return hpccm.building_blocks.packages(
263 apt_ppas=['ppa:intel-opencl/intel-opencl'],
264 ospackages=['opencl-headers', 'ocl-icd-libopencl1',
265 'ocl-icd-opencl-dev', 'intel-opencl-icd'])
267 elif args.opencl == 'amd':
268 # libelf1 is a necessary dependency for something in the ROCm stack,
269 # which they should set up, but seem to have omitted.
270 return hpccm.building_blocks.packages(
271 apt_keys=['http://repo.radeon.com/rocm/apt/debian/rocm.gpg.key'],
272 apt_repositories=['deb [arch=amd64] http://repo.radeon.com/rocm/apt/debian/ xenial main'],
273 ospackages=['ocl-icd-libopencl1', 'ocl-icd-opencl-dev', 'opencl-headers', 'libelf1', 'rocm-opencl', 'rocm-dev', 'clinfo'])
279 if (args.clfft is not None):
280 return hpccm.building_blocks.generic_cmake(
281 repository='https://github.com/clMathLibraries/clFFT.git',
282 prefix='/usr/local', recursive=True, branch=args.clfft, directory='clFFT/src')
287 def add_tsan_compiler_build_stage(input_args, output_stages: typing.Mapping[str, hpccm.Stage]):
288 """Isolate the expensive TSAN preparation stage.
290 This is a very expensive stage, but has few and disjoint dependencies, and
291 its output is easily compartmentalized (/usr/local) so we can isolate this
292 build stage to maximize build cache hits and reduce rebuild time, bookkeeping,
293 and final image size.
295 if not isinstance(output_stages, collections.abc.MutableMapping):
296 raise RuntimeError('Need output_stages container.')
297 tsan_stage = hpccm.Stage()
298 tsan_stage += hpccm.primitives.baseimage(image=base_image_tag(input_args), _as='tsan')
300 tsan_stage += hpccm.building_blocks.packages(ospackages=['git', 'ca-certificates', 'build-essential', 'cmake'])
301 # CMake will get duplicated later, but this is an expensive image, and it isn't worth optimizing
302 # out that duplication...
303 tsan_stage += hpccm.building_blocks.python(python3=True, python2=False, devel=False)
305 compiler_branch = 'release_' + str(input_args.llvm) + '0'
306 tsan_stage += hpccm.building_blocks.generic_cmake(
307 repository='https://git.llvm.org/git/llvm.git',
308 prefix='/usr/local', recursive=True, branch=compiler_branch,
309 cmake_opts=['-D CMAKE_BUILD_TYPE=Release', '-D LLVM_ENABLE_PROJECTS="clang;openmp;clang-tools-extra"',
310 '-D LIBOMP_TSAN_SUPPORT=on'],
311 preconfigure=['export branch=' + compiler_branch,
312 '(cd projects; git clone --depth=1 --branch $branch https://git.llvm.org/git/libcxx.git)',
313 '(cd projects; git clone --depth=1 --branch $branch https://git.llvm.org/git/libcxxabi.git)',
314 '(cd projects; git clone --depth=1 --branch $branch https://git.llvm.org/git/compiler-rt.git)',
315 '(cd ..; git clone --depth=1 --branch $branch https://git.llvm.org/git/openmp.git)',
316 '(cd ..; git clone --depth=1 --branch $branch https://git.llvm.org/git/clang.git)',
317 '(cd ..; git clone --depth=1 --branch $branch https://git.llvm.org/git/clang-tools-extra.git)'],
318 postinstall=['ln -s /usr/local/bin/clang++ /usr/local/bin/clang++-' + str(input_args.llvm),
319 'ln -s /usr/local/bin/clang-format /usr/local/bin/clang-format-' + str(input_args.llvm),
320 'ln -s /usr/local/bin/clang-tidy /usr/local/bin/clang-tidy-' + str(input_args.llvm),
321 'ln -s /usr/local/libexec/c++-analyzer /usr/local/bin/c++-analyzer-' + str(input_args.llvm)])
322 output_stages['compiler_build'] = tsan_stage
324 def oneapi_runtime(_from='0'):
325 oneapi_runtime_stage = hpccm.Stage()
326 oneapi_runtime_stage += hpccm.primitives.copy(_from='oneapi-build',
327 files={"/opt/intel": "/opt/intel",
328 "/etc/bash.bashrc": "/etc/bash.bashrc"})
329 return oneapi_runtime_stage
331 def add_oneapi_compiler_build_stage(input_args, output_stages: typing.Mapping[str, hpccm.Stage]):
332 """Isolate the oneAPI preparation stage.
334 This stage is isolated so that its installed components are minimized in the
335 final image (chiefly /opt/intel) and its environment setup script can be
336 sourced. This also helps with rebuild time and final image size.
338 Note that the ICC compiler inside oneAPI on linux also needs
339 gcc to build other components and provide libstdc++.
341 if not isinstance(output_stages, collections.abc.MutableMapping):
342 raise RuntimeError('Need output_stages container.')
343 oneapi_stage = hpccm.Stage()
344 oneapi_stage += hpccm.primitives.baseimage(image=base_image_tag(input_args), _as='oneapi-build')
346 version = str(input_args.oneapi)
348 # Add required components for the next stage (both for hpccm and Intel's setvars.sh script)
349 oneapi_stage += hpccm.building_blocks.packages(ospackages=['wget', 'gnupg2', 'ca-certificates', 'lsb-release'])
350 oneapi_stage += hpccm.building_blocks.packages(
351 apt_keys=['https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB'],
352 apt_repositories=['deb https://apt.repos.intel.com/oneapi all main'],
353 # Add minimal packages (not the whole HPC toolkit!)
354 ospackages=['intel-oneapi-dpcpp-cpp-compiler-pro-{}'.format(version),
355 'intel-oneapi-openmp-{}'.format(version),
356 'intel-oneapi-mkl-{}'.format(version),
357 'intel-oneapi-mkl-devel-{}'.format(version)]
359 # Ensure that all bash shells on the final container will have access to oneAPI
360 oneapi_stage += hpccm.primitives.shell(
361 commands=['echo "source /opt/intel/oneapi/setvars.sh" >> /etc/bash.bashrc']
363 setattr(oneapi_stage, 'runtime', oneapi_runtime)
365 output_stages['compiler_build'] = oneapi_stage
367 def prepare_venv(version: StrictVersion) -> typing.Sequence[str]:
368 """Get shell commands to set up the venv for the requested Python version."""
369 major = version.version[0]
370 minor = version.version[1] # type: int
372 pyenv = '$HOME/.pyenv/bin/pyenv'
374 py_ver = '{}.{}'.format(major, minor)
375 venv_path = '$HOME/venv/py{}'.format(py_ver)
376 commands = ['$({pyenv} prefix `{pyenv} whence python{py_ver}`)/bin/python -m venv {path}'.format(
382 commands.append('{path}/bin/python -m pip install --upgrade pip setuptools'.format(
385 # Install dependencies for building and testing gmxapi Python package.
386 # WARNING: Please keep this list synchronized with python_packaging/requirements-test.txt
387 # TODO: Get requirements.txt from an input argument.
388 commands.append("""{path}/bin/python -m pip install --upgrade \
397 'scikit-build>=0.10'""".format(path=venv_path))
399 # TODO: Remove 'importlib_resources' dependency when Python >=3.7 is required.
401 commands.append("""{path}/bin/python -m pip install --upgrade \
402 'importlib_resources'""".format(path=venv_path))
407 def add_python_stages(building_blocks: typing.Mapping[str, bb_base],
409 output_stages: typing.MutableMapping[str, hpccm.Stage]):
410 """Add the stage(s) necessary for the requested venvs.
412 One intermediate build stage is created for each venv (see --venv option).
414 Each stage partially populates Python installations and venvs in the home
415 directory. The home directory is collected by the 'pyenv' stage for use by
416 the main build stage.
418 if len(input_args.venvs) < 1:
419 raise RuntimeError('No venvs to build...')
420 if output_stages is None or not isinstance(output_stages, collections.abc.Mapping):
421 raise RuntimeError('Need a container for output stages.')
423 # Main Python stage that collects the environments from individual stages.
424 # We collect the stages individually, rather than chaining them, because the
425 # copy is a bit slow and wastes local Docker image space for each filesystem
427 pyenv_stage = hpccm.Stage()
428 pyenv_stage += hpccm.primitives.baseimage(image=base_image_tag(input_args), _as='pyenv')
429 pyenv_stage += building_blocks['compiler']
430 pyenv_stage += building_blocks['mpi']
431 pyenv_stage += hpccm.building_blocks.packages(ospackages=_python_extra_packages)
433 for version in [StrictVersion(py_ver) for py_ver in sorted(input_args.venvs)]:
434 stage_name = 'py' + str(version)
435 stage = hpccm.Stage()
436 stage += hpccm.primitives.baseimage(image=base_image_tag(input_args), _as=stage_name)
437 stage += building_blocks['compiler']
438 stage += building_blocks['mpi']
439 stage += hpccm.building_blocks.packages(ospackages=_python_extra_packages)
441 # TODO: Use a non-root user for testing and Python virtual environments.
442 stage += hpccm.primitives.shell(commands=[
443 'curl https://pyenv.run | bash',
444 """echo 'export PYENV_ROOT="$HOME/.pyenv"' >> $HOME/.bashrc""",
445 """echo 'export PATH="$PYENV_ROOT/bin:$PATH"' >> $HOME/.bashrc""",
446 """echo 'eval "$(pyenv init -)"' >> $HOME/.bashrc""",
447 """echo 'eval "$(pyenv virtualenv-init -)"' >> $HOME/.bashrc"""])
448 pyenv = '$HOME/.pyenv/bin/pyenv'
449 commands = ['PYTHON_CONFIGURE_OPTS="--enable-shared" {pyenv} install -s {version}'.format(
451 version=str(version))]
452 stage += hpccm.primitives.shell(commands=commands)
454 commands = prepare_venv(version)
455 stage += hpccm.primitives.shell(commands=commands)
457 # TODO: Update user home directory.
458 pyenv_stage += hpccm.primitives.copy(_from=stage_name, _mkdir=True, src=['/root/'],
461 # Add the intermediate build stage to the sequence
462 output_stages[stage_name] = stage
464 # TODO: If we activate pyenv for login shells, the `global` "version" should be full-featured.
465 # # `version` should be a system installation or pyenv environment (or pyenv-virtualenv)
466 # # with the dependencies for all of the Python aspects of CMake-driven builds.
467 # commands = '{pyenv} global {version}'.format(
470 # pyenv_stage += hpccm.primitives.shell(commands=commands)
472 # Add the aggregating build stage to the sequence. This allows the main stage to copy
473 # the files in a single stage, potentially reducing the overall output image size.
474 output_stages['pyenv'] = pyenv_stage
477 def add_documentation_dependencies(input_args,
478 output_stages: typing.MutableMapping[str, hpccm.Stage]):
479 """Add appropriate layers according to doxygen input arguments."""
480 if input_args.doxygen is None:
482 output_stages['main'] += hpccm.primitives.shell(
483 commands=['sed -i \'/\"XPS\"/d;/\"PDF\"/d;/\"PS\"/d;/\"EPS\"/d;/disable ghostscript format types/d\' /etc/ImageMagick-6/policy.xml'])
484 output_stages['main'] += hpccm.building_blocks.pip(pip='pip3', packages=['sphinx==1.6.1', 'gcovr'])
485 if input_args.doxygen == '1.8.5':
486 doxygen_commit = 'ed4ed873ab0e7f15116e2052119a6729d4589f7a'
487 output_stages['main'] += hpccm.building_blocks.generic_autotools(
488 repository='https://github.com/westes/flex.git',
489 commit='f7788a9a0ecccdc953ed12043ccb59ca25714018',
490 prefix='/tmp/install-of-flex',
491 configure_opts=['--disable-shared'],
492 preconfigure=['./autogen.sh'])
493 output_stages['main'] += hpccm.building_blocks.generic_autotools(
494 repository='https://github.com/doxygen/doxygen.git',
495 commit=doxygen_commit,
498 '--flex /tmp/install-of-flex/bin/flex',
501 version = input_args.doxygen
502 archive_name = 'doxygen-{}.linux.bin.tar.gz'.format(version)
503 archive_url = 'https://sourceforge.net/projects/doxygen/files/rel-{}/{}'.format(
507 binary_path = 'doxygen-{}/bin/doxygen'.format(version)
509 'mkdir doxygen && cd doxygen',
510 'wget {}'.format(archive_url),
511 'tar xf {} {}'.format(archive_name, binary_path),
512 'cp {} /usr/local/bin/'.format(binary_path),
513 'cd .. && rm -rf doxygen'
515 output_stages['main'] += hpccm.primitives.shell(commands=commands)
518 def build_stages(args) -> typing.Iterable[hpccm.Stage]:
519 """Define and sequence the stages for the recipe corresponding to *args*."""
521 # A Dockerfile or Singularity recipe can have multiple build stages.
522 # The main build stage can copy files from previous stages, though only
523 # the last stage is included in the tagged output image. This means that
524 # large or expensive sets of build instructions can be isolated in
525 # local/temporary images, but all of the stages need to be output by this
526 # script, and need to occur in the correct order, so we create a sequence
527 # object early in this function.
528 stages = collections.OrderedDict()
530 # If we need TSAN or oneAPI support the early build is more complex,
531 # so that our compiler images don't have all the cruft needed to get those things
533 if args.llvm is not None and args.tsan is not None:
534 add_tsan_compiler_build_stage(input_args=args, output_stages=stages)
535 if args.oneapi is not None:
536 add_oneapi_compiler_build_stage(input_args=args, output_stages=stages)
538 # Building blocks are chunks of container-builder instructions that can be
539 # copied to any build stage with the addition operator.
540 building_blocks = collections.OrderedDict()
542 # These are the most expensive and most reusable layers, so we put them first.
543 building_blocks['compiler'] = get_compiler(args, compiler_build_stage=stages.get('compiler_build'))
544 building_blocks['mpi'] = get_mpi(args, building_blocks['compiler'])
546 # Install additional packages early in the build to optimize Docker build layer cache.
547 os_packages = _common_packages + get_llvm_packages(args)
548 if args.doxygen is not None:
549 os_packages += _docs_extra_packages
550 if args.oneapi is not None:
551 os_packages += ['lsb-release']
552 building_blocks['ospackages'] = hpccm.building_blocks.packages(ospackages=os_packages)
554 building_blocks['cmake'] = hpccm.building_blocks.cmake(eula=True, version=args.cmake)
555 building_blocks['opencl'] = get_opencl(args)
556 building_blocks['clfft'] = get_clfft(args)
558 # Add Python environments to MPI images, only, so we don't have to worry
559 # about whether to install mpi4py.
560 if args.mpi is not None and len(args.venvs) > 0:
561 add_python_stages(building_blocks=building_blocks, input_args=args, output_stages=stages)
563 # Create the stage from which the targeted image will be tagged.
564 stages['main'] = hpccm.Stage()
566 stages['main'] += hpccm.primitives.baseimage(image=base_image_tag(args))
567 for bb in building_blocks.values():
571 # We always add Python3 and Pip
572 stages['main'] += hpccm.building_blocks.python(python3=True, python2=False, devel=True)
573 stages['main'] += hpccm.building_blocks.pip(upgrade=True, pip='pip3',
574 packages=['pytest', 'networkx', 'numpy'])
576 # Add documentation requirements (doxygen and sphinx + misc).
577 if args.doxygen is not None:
578 add_documentation_dependencies(args, stages)
580 if 'pyenv' in stages and stages['pyenv'] is not None:
581 stages['main'] += hpccm.primitives.copy(_from='pyenv', _mkdir=True, src=['/root/.pyenv/'],
583 stages['main'] += hpccm.primitives.copy(_from='pyenv', _mkdir=True, src=['/root/venv/'],
585 # TODO: Update user home directory.
586 # TODO: If we activate pyenv for login shells, the `global` "version" should be full-featured.
587 # stages['main'] += hpccm.primitives.copy(_from='pyenv', src=['/root/.bashrc'],
590 # Make sure that `python` resolves to something.
591 stages['main'] += hpccm.primitives.shell(commands=['test -x /usr/bin/python || '
592 'update-alternatives --install /usr/bin/python python /usr/bin/python3 1 && '
593 '/usr/bin/python --version'])
595 # Note that the list of stages should be sorted in dependency order.
596 for build_stage in stages.values():
597 if build_stage is not None:
601 if __name__ == '__main__':
602 args = parser.parse_args()
604 # Set container specification output format
605 hpccm.config.set_container_format(args.format)
607 container_recipe = build_stages(args)
609 # Output container specification
610 for stage in container_recipe: