3 # This file is part of the GROMACS molecular simulation package.
5 # Copyright (c) 2020, by the GROMACS development team, led by
6 # Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
7 # and including many others, as listed in the AUTHORS file in the
8 # top-level source directory and at http://www.gromacs.org.
10 # GROMACS is free software; you can redistribute it and/or
11 # modify it under the terms of the GNU Lesser General Public License
12 # as published by the Free Software Foundation; either version 2.1
13 # of the License, or (at your option) any later version.
15 # GROMACS is distributed in the hope that it will be useful,
16 # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 # Lesser General Public License for more details.
20 # You should have received a copy of the GNU Lesser General Public
21 # License along with GROMACS; if not, see
22 # http://www.gnu.org/licenses, or write to the Free Software Foundation,
23 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25 # If you want to redistribute modifications to GROMACS, please
26 # consider that scientific software is very special. Version
27 # control is crucial - bugs must be traceable. We will be happy to
28 # consider code for inclusion in the official distribution, but
29 # derived work must not be called official GROMACS. Details are found
30 # in the README & COPYING files - if they are missing, get the
31 # official version at http://www.gromacs.org.
33 # To help us fund GROMACS development, we humbly ask that you cite
34 # the research papers on the package. Check out http://www.gromacs.org.
37 Generates a set of docker images used for running GROMACS CI on Gitlab.
38 The images are prepared according to a selection of build configuration targets
39 that hope to cover a broad enough scope of different possible systems,
40 allowing us to check compiler types and versions, as well as libraries used
41 for accelerators and parallel communication systems. Each combinations is
42 described as an entry in the build_configs dictionary, with the script
43 analysing the logic and adding build stages as needed.
45 Based on the example script provided by the NVidia HPCCM repository.
48 * Paul Bauer <paul.bauer.q@gmail.com>
49 * Eric Irrgang <ericirrgang@gmail.com>
50 * Joe Jordan <e.jjordan12@gmail.com>
54 $ python3 scripted_gmx_docker_builds.py --help
55 $ python3 scripted_gmx_docker_builds.py --format docker > Dockerfile && docker build .
56 $ python3 scripted_gmx_docker_builds.py | docker build -
63 from distutils.version import StrictVersion
67 from hpccm.building_blocks.base import bb_base
73 'This module assumes availability of supporting modules in the same directory. Add the directory to '
74 'PYTHONPATH or invoke Python from within the module directory so module location can be resolved.')
76 # Basic packages for all final images.
77 _common_packages = ['build-essential',
94 # Extra packages needed to build Python installations from source.
95 _python_extra_packages = ['build-essential',
114 # Extra packages needed for images for building documentation.
115 _docs_extra_packages = ['autoconf',
130 'texlive-latex-base',
131 'texlive-latex-extra',
132 'texlive-fonts-recommended',
133 'texlive-fonts-extra']
135 # Supported Python versions for maintained branches.
136 # TODO: Remove '3.5.9' from defaults in master once script in release-2020 diverges.
137 _python_versions = ['3.5.9', '3.6.10', '3.7.7', '3.8.2']
139 # Parse command line arguments
140 parser = argparse.ArgumentParser(description='GROMACS CI image creation script', parents=[utility.parser])
142 parser.add_argument('--format', type=str, default='docker',
143 choices=['docker', 'singularity'],
144 help='Container specification format (default: docker)')
145 parser.add_argument('--venvs', nargs='*', type=str, default=_python_versions,
146 help='List of Python versions ("major.minor.patch") for which to install venvs. '
147 'Default: {}'.format(' '.join(_python_versions)))
150 def base_image_tag(args) -> str:
151 # Check if we use CUDA images or plain linux images
152 if args.cuda is not None:
153 cuda_version_tag = 'nvidia/cuda:' + args.cuda + '-devel'
154 if args.centos is not None:
155 cuda_version_tag += '-centos' + args.centos
156 elif args.ubuntu is not None:
157 cuda_version_tag += '-ubuntu' + args.ubuntu
159 raise RuntimeError('Logic error: no Linux distribution selected.')
161 base_image_tag = cuda_version_tag
163 if args.centos is not None:
164 base_image_tag = 'centos:centos' + args.centos
165 elif args.ubuntu is not None:
166 base_image_tag = 'ubuntu:' + args.ubuntu
168 raise RuntimeError('Logic error: no Linux distribution selected.')
169 return base_image_tag
172 def get_llvm_packages(args) -> typing.Iterable[str]:
173 # If we use the package version of LLVM, we need to install extra packages for it.
174 if (args.llvm is not None) and (args.tsan is None):
175 return ['libomp-dev',
176 'clang-format-' + str(args.llvm),
177 'clang-tidy-' + str(args.llvm)]
182 def get_compiler(args, tsan_stage: hpccm.Stage = None) -> bb_base:
184 if args.icc is not None:
185 raise RuntimeError('Intel compiler toolchain recipe not implemented yet')
187 if args.llvm is not None:
188 # Build our own version instead to get TSAN + OMP
189 if args.tsan is not None:
190 if tsan_stage is not None:
191 compiler = tsan_stage.runtime(_from='tsan')
193 raise RuntimeError('No TSAN stage!')
194 # Build the default compiler if we don't need special support
196 compiler = hpccm.building_blocks.llvm(extra_repository=True, version=args.llvm)
198 elif (args.gcc is not None):
199 compiler = hpccm.building_blocks.gnu(extra_repository=True,
203 raise RuntimeError('Logic error: no compiler toolchain selected.')
207 def get_mpi(args, compiler):
208 # If needed, add MPI to the image
209 if args.mpi is not None:
210 if args.mpi == 'openmpi':
212 if args.cuda is not None:
215 if hasattr(compiler, 'toolchain'):
216 return hpccm.building_blocks.openmpi(toolchain=compiler.toolchain, cuda=use_cuda, infiniband=False)
218 raise RuntimeError('compiler is not an HPCCM compiler building block!')
220 elif args.mpi == 'impi':
221 raise RuntimeError('Intel MPI recipe not implemented yet.')
223 raise RuntimeError('Requested unknown MPI implementation.')
228 def get_opencl(args):
229 # Add OpenCL environment if needed
230 if (args.opencl is not None):
231 if args.opencl == 'nvidia':
232 if (args.cuda is None):
233 raise RuntimeError('Need Nvidia environment for Nvidia OpenCL image')
235 return hpccm.building_blocks.packages(ospackages=['nvidia-opencl-dev'])
237 elif args.opencl == 'intel':
238 return hpccm.building_blocks.packages(
239 apt_ppas=['ppa:intel-opencl/intel-opencl'],
240 ospackages=['opencl-headers', 'ocl-icd-libopencl1',
241 'ocl-icd-opencl-dev', 'intel-opencl-icd'])
243 elif args.opencl == 'amd':
244 # libelf1 is a necessary dependency for something in the ROCm stack,
245 # which they should set up, but seem to have omitted.
246 return hpccm.building_blocks.packages(
247 apt_keys=['http://repo.radeon.com/rocm/apt/debian/rocm.gpg.key'],
248 apt_repositories=['deb [arch=amd64] http://repo.radeon.com/rocm/apt/debian/ xenial main'],
249 ospackages=['ocl-icd-libopencl1', 'ocl-icd-opencl-dev', 'opencl-headers', 'libelf1', 'rocm-opencl'])
255 if (args.clfft is not None):
256 return hpccm.building_blocks.generic_cmake(
257 repository='https://github.com/clMathLibraries/clFFT.git',
258 prefix='/usr/local', recursive=True, branch=args.clfft, directory='clFFT/src')
263 def add_tsan_stage(input_args, output_stages: typing.Mapping[str, hpccm.Stage]):
264 """Isolate the expensive TSAN preparation stage.
266 This is a very expensive stage, but has few and disjoint dependencies, and
267 its output is easily compartmentalized (/usr/local) so we can isolate this
268 build stage to maximize build cache hits and reduce rebuild time, bookkeeping,
269 and final image size.
271 if not isinstance(output_stages, collections.abc.MutableMapping):
272 raise RuntimeError('Need output_stages container.')
273 tsan_stage = hpccm.Stage()
274 tsan_stage += hpccm.primitives.baseimage(image=base_image_tag(input_args), _as='tsan')
276 tsan_stage += hpccm.building_blocks.packages(ospackages=['git', 'ca-certificates', 'build-essential', 'cmake'])
277 # CMake will get duplicated later, but this is an expensive image, and it isn't worth optimizing
278 # out that duplication...
279 tsan_stage += hpccm.building_blocks.python(python3=True, python2=False, devel=False)
281 compiler_branch = 'release_' + str(input_args.llvm) + '0'
282 tsan_stage += hpccm.building_blocks.generic_cmake(
283 repository='https://git.llvm.org/git/llvm.git',
284 prefix='/usr/local', recursive=True, branch=compiler_branch,
285 cmake_opts=['-D CMAKE_BUILD_TYPE=Release', '-D LLVM_ENABLE_PROJECTS="clang;openmp;clang-tools-extra"',
286 '-D LIBOMP_TSAN_SUPPORT=on'],
287 preconfigure=['export branch=' + compiler_branch,
288 '(cd projects; git clone --depth=1 --branch $branch https://git.llvm.org/git/libcxx.git)',
289 '(cd projects; git clone --depth=1 --branch $branch https://git.llvm.org/git/libcxxabi.git)',
290 '(cd projects; git clone --depth=1 --branch $branch https://git.llvm.org/git/compiler-rt.git)',
291 '(cd ..; git clone --depth=1 --branch $branch https://git.llvm.org/git/openmp.git)',
292 '(cd ..; git clone --depth=1 --branch $branch https://git.llvm.org/git/clang.git)',
293 '(cd ..; git clone --depth=1 --branch $branch https://git.llvm.org/git/clang-tools-extra.git)'],
294 postinstall=['ln -s /usr/local/bin/clang++ /usr/local/bin/clang++-' + str(input_args.llvm),
295 'ln -s /usr/local/bin/clang-format /usr/local/bin/clang-format-' + str(input_args.llvm),
296 'ln -s /usr/local/bin/clang-tidy /usr/local/bin/clang-tidy-' + str(input_args.llvm),
297 'ln -s /usr/local/libexec/c++-analyzer /usr/local/bin/c++-analyzer-' + str(input_args.llvm)])
298 output_stages['tsan'] = tsan_stage
301 def prepare_venv(version: StrictVersion) -> typing.Sequence[str]:
302 """Get shell commands to set up the venv for the requested Python version."""
303 major = version.version[0]
304 minor = version.version[1]
306 pyenv = '$HOME/.pyenv/bin/pyenv'
308 py_ver = '{}.{}'.format(major, minor)
309 venv_path = '$HOME/venv/py{}'.format(py_ver)
310 commands = ['$({pyenv} prefix `{pyenv} whence python{py_ver}`)/bin/python -m venv {path}'.format(
316 commands.append('{path}/bin/python -m pip install --upgrade pip setuptools'.format(
319 # Install dependencies for building and testing gmxapi Python package.
320 # WARNING: Please keep this list synchronized with python_packaging/requirements-test.txt
321 # TODO: Get requirements.txt from an input argument.
322 commands.append("""{path}/bin/python -m pip install --upgrade \
330 'setuptools>=28.0.0' \
331 'scikit-build>=0.7'""".format(path=venv_path))
336 def add_python_stages(building_blocks: typing.Mapping[str, bb_base],
338 output_stages: typing.MutableMapping[str, hpccm.Stage]):
339 """Add the stage(s) necessary for the requested venvs.
341 One intermediate build stage is created for each venv (see --venv option).
343 Each stage partially populates Python installations and venvs in the home
344 directory. The home directory is collected by the 'pyenv' stage for use by
345 the main build stage.
347 if len(input_args.venvs) < 1:
348 raise RuntimeError('No venvs to build...')
349 if output_stages is None or not isinstance(output_stages, collections.abc.Mapping):
350 raise RuntimeError('Need a container for output stages.')
352 # Main Python stage that collects the environments from individual stages.
353 # We collect the stages individually, rather than chaining them, because the
354 # copy is a bit slow and wastes local Docker image space for each filesystem
356 pyenv_stage = hpccm.Stage()
357 pyenv_stage += hpccm.primitives.baseimage(image=base_image_tag(input_args), _as='pyenv')
358 pyenv_stage += building_blocks['compiler']
359 pyenv_stage += building_blocks['mpi']
360 pyenv_stage += hpccm.building_blocks.packages(ospackages=_python_extra_packages)
362 for version in [StrictVersion(py_ver) for py_ver in sorted(input_args.venvs)]:
363 stage_name = 'py' + str(version)
364 stage = hpccm.Stage()
365 stage += hpccm.primitives.baseimage(image=base_image_tag(input_args), _as=stage_name)
366 stage += building_blocks['compiler']
367 stage += building_blocks['mpi']
368 stage += hpccm.building_blocks.packages(ospackages=_python_extra_packages)
370 # TODO: Use a non-root user for testing and Python virtual environments.
371 stage += hpccm.primitives.shell(commands=[
372 'curl https://pyenv.run | bash',
373 """echo 'export PYENV_ROOT="$HOME/.pyenv"' >> $HOME/.bashrc""",
374 """echo 'export PATH="$PYENV_ROOT/bin:$PATH"' >> $HOME/.bashrc""",
375 """echo 'eval "$(pyenv init -)"' >> $HOME/.bashrc""",
376 """echo 'eval "$(pyenv virtualenv-init -)"' >> $HOME/.bashrc"""])
377 pyenv = '$HOME/.pyenv/bin/pyenv'
378 commands = ['PYTHON_CONFIGURE_OPTS="--enable-shared" {pyenv} install -s {version}'.format(
380 version=str(version))]
381 stage += hpccm.primitives.shell(commands=commands)
383 commands = prepare_venv(version)
384 stage += hpccm.primitives.shell(commands=commands)
386 # TODO: Update user home directory.
387 pyenv_stage += hpccm.primitives.copy(_from=stage_name, _mkdir=True, src=['/root/'],
390 # Add the intermediate build stage to the sequence
391 output_stages[stage_name] = stage
393 # TODO: If we activate pyenv for login shells, the `global` "version" should be full-featured.
394 # # `version` should be a system installation or pyenv environment (or pyenv-virtualenv)
395 # # with the dependencies for all of the Python aspects of CMake-driven builds.
396 # commands = '{pyenv} global {version}'.format(
399 # pyenv_stage += hpccm.primitives.shell(commands=commands)
401 # Add the aggregating build stage to the sequence. This allows the main stage to copy
402 # the files in a single stage, potentially reducing the overall output image size.
403 output_stages['pyenv'] = pyenv_stage
406 def build_stages(args) -> typing.Iterable[hpccm.Stage]:
407 """Define and sequence the stages for the recipe corresponding to *args*."""
409 # A Dockerfile or Singularity recipe can have multiple build stages.
410 # The main build stage can copy files from previous stages, though only
411 # the last stage is included in the tagged output image. This means that
412 # large or expensive sets of build instructions can be isolated in
413 # local/temporary images, but all of the stages need to be output by this
414 # script, and need to occur in the correct order, so we create a sequence
415 # object early in this function.
416 stages = collections.OrderedDict()
418 # If we need the TSAN compilers, the early build is more involved.
419 if args.llvm is not None and args.tsan is not None:
420 add_tsan_stage(input_args=args, output_stages=stages)
422 # Building blocks are chunks of container-builder instructions that can be
423 # copied to any build stage with the addition operator.
424 building_blocks = collections.OrderedDict()
426 # These are the most expensive and most reusable layers, so we put them first.
427 building_blocks['compiler'] = get_compiler(args, tsan_stage=stages.get('tsan'))
428 building_blocks['mpi'] = get_mpi(args, building_blocks['compiler'])
430 # Install additional packages early in the build to optimize Docker build layer cache.
431 os_packages = _common_packages + get_llvm_packages(args)
432 if args.doxygen is not None:
433 os_packages += _docs_extra_packages
434 building_blocks['ospackages'] = hpccm.building_blocks.packages(ospackages=os_packages)
436 building_blocks['cmake'] = hpccm.building_blocks.cmake(eula=True, version=args.cmake)
437 building_blocks['opencl'] = get_opencl(args)
438 building_blocks['clfft'] = get_clfft(args)
440 # Add Python environments to MPI images, only, so we don't have to worry
441 # about whether to install mpi4py.
442 if args.mpi is not None and len(args.venvs) > 0:
443 add_python_stages(building_blocks=building_blocks, input_args=args, output_stages=stages)
445 # Create the stage from which the targeted image will be tagged.
446 stages['main'] = hpccm.Stage()
448 stages['main'] += hpccm.primitives.baseimage(image=base_image_tag(args))
449 for bb in building_blocks.values():
453 # We always add Python3 and Pip
454 stages['main'] += hpccm.building_blocks.python(python3=True, python2=False, devel=True)
455 stages['main'] += hpccm.building_blocks.pip(upgrade=True, pip='pip3',
456 packages=['pytest', 'networkx', 'numpy'])
458 # Add documentation requirements (doxygen and sphinx + misc).
459 if (args.doxygen is not None):
460 if (args.doxygen == '1.8.5'):
461 doxygen_commit = 'ed4ed873ab0e7f15116e2052119a6729d4589f7a'
463 doxygen_commit = 'a6d4f4df45febe588c38de37641513fd576b998f'
464 stages['main'] += hpccm.building_blocks.generic_autotools(
465 repository='https://github.com/westes/flex.git',
466 commit='f7788a9a0ecccdc953ed12043ccb59ca25714018',
467 prefix='/tmp/install-of-flex',
468 configure_opts=['--disable-shared'],
469 preconfigure=['./autogen.sh'])
470 stages['main'] += hpccm.building_blocks.generic_autotools(
471 repository='https://github.com/doxygen/doxygen.git',
472 commit=doxygen_commit,
475 '--flex /tmp/install-of-flex/bin/flex',
478 'sed -i \'/\"XPS\"/d;/\"PDF\"/d;/\"PS\"/d;/\"EPS\"/d;/disable ghostscript format types/d\' /etc/ImageMagick-6/policy.xml'])
479 stages['main'] += hpccm.building_blocks.pip(pip='pip3', packages=['sphinx==1.6.1'])
481 if 'pyenv' in stages and stages['pyenv'] is not None:
482 stages['main'] += hpccm.primitives.copy(_from='pyenv', _mkdir=True, src=['/root/.pyenv/'],
484 stages['main'] += hpccm.primitives.copy(_from='pyenv', _mkdir=True, src=['/root/venv/'],
486 # TODO: Update user home directory.
487 # TODO: If we activate pyenv for login shells, the `global` "version" should be full-featured.
488 # stages['main'] += hpccm.primitives.copy(_from='pyenv', src=['/root/.bashrc'],
491 # Note that the list of stages should be sorted in dependency order.
492 for build_stage in stages.values():
493 if build_stage is not None:
497 if __name__ == '__main__':
498 args = parser.parse_args()
500 # Set container specification output format
501 hpccm.config.set_container_format(args.format)
503 container_recipe = build_stages(args)
505 # Output container specification
506 for stage in container_recipe: