#
# Copyright (c) 2009,2010,2011,2012,2013 by the GROMACS development team.
# Copyright (c) 2014,2015,2016,2017,2018 by the GROMACS development team.
-# Copyright (c) 2019,2020, by the GROMACS development team, led by
+# Copyright (c) 2019,2020,2021, by the GROMACS development team, led by
# Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
# and including many others, as listed in the AUTHORS file in the
# top-level source directory and at http://www.gromacs.org.
option(GMX_USE_TNG "Use the TNG library for trajectory I/O" ON)
-option(GMX_BUILD_MDRUN_ONLY "Build and install only the mdrun binary" OFF)
-
option(GMX_CYCLE_SUBCOUNTERS "Enable cycle subcounters to get a more detailed cycle timings" OFF)
mark_as_advanced(GMX_CYCLE_SUBCOUNTERS)
option(GMX_PYTHON_PACKAGE "Configure gmxapi Python package" OFF)
mark_as_advanced(GMX_PYTHON_PACKAGE)
-if (NOT GMX_BUILD_MDRUN_ONLY)
- find_package(ImageMagick QUIET COMPONENTS convert)
- include(gmxTestImageMagick)
- GMX_TEST_IMAGEMAGICK(IMAGE_CONVERT_POSSIBLE)
- # TODO: Resolve circular dependency between docs, gromacs, and python_packaging
- add_subdirectory(docs)
- add_subdirectory(share)
- add_subdirectory(scripts)
-endif()
+find_package(ImageMagick QUIET COMPONENTS convert)
+include(gmxTestImageMagick)
+GMX_TEST_IMAGEMAGICK(IMAGE_CONVERT_POSSIBLE)
+# TODO: Resolve circular dependency between docs, gromacs, and python_packaging
+add_subdirectory(docs)
+add_subdirectory(share)
+add_subdirectory(scripts)
add_subdirectory(api)
add_subdirectory(src)
add_subdirectory(tests)
endif()
-if(GMX_PYTHON_PACKAGE AND NOT GMX_BUILD_MDRUN_ONLY)
+if(GMX_PYTHON_PACKAGE)
add_subdirectory(python_packaging)
endif()
CMAKE: /usr/local/cmake-3.15.7/bin/cmake
COMPILER_MAJOR_VERSION: 8
RELEASE_BUILD_DIR: release-builds-gcc
- CMAKE_EXTRA_OPTIONS: "-DGMX_BUILD_MDRUN_ONLY=ON"
CMAKE_BUILD_TYPE_OPTIONS : "-DCMAKE_BUILD_TYPE=RelWithAssert"
CMAKE_REGRESSIONTEST_OPTIONS: ""
dependencies:
- job: gromacs:gcc-9:release:build
- job: regressiontests:package
+gromacs:gcc-8-cuda-11.0:release:regressiontest:
+ extends:
+ - .gromacs:base:regressiontest
+ - .rules:nightly-only-for-release
+ stage: release-tests
+ image: ${CI_REGISTRY}/gromacs/gromacs/ci-ubuntu-18.04-gcc-8-cuda-11.0
+ variables:
+ CMAKE: /usr/local/cmake-3.15.7/bin/cmake
+ BUILD_DIR: release-builds-gcc
+ KUBERNETES_EXTENDED_RESOURCE_NAME: "nvidia.com/gpu"
+ KUBERNETES_EXTENDED_RESOURCE_LIMIT: 1
+ REGRESSIONTEST_PME_RANK_NUMBER: 0
+ REGRESSIONTEST_TOTAL_RANK_NUMBER: 2
+ REGRESSIONTEST_OMP_RANK_NUMBER: 1
+ REGRESSIONTEST_PARALLEL: "-np"
+ tags:
+ - k8s-scilifelab
+ needs:
+ - job: gromacs:gcc-8-cuda-11.0:release:build
+ - job: regressiontests:package
+
gromacs:clang-9:release:regressiontest:
extends:
- .gromacs:base:regressiontest
#
# This file is part of the GROMACS molecular simulation package.
#
-# Copyright (c) 2018,2019,2020, by the GROMACS development team, led by
+# Copyright (c) 2018,2019,2020,2021, by the GROMACS development team, led by
# Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
# and including many others, as listed in the AUTHORS file in the
# top-level source directory and at http://www.gromacs.org.
add_subdirectory(legacy)
# Activate targets for new C++ API components and docs.
-if(GMX_NATIVE_WINDOWS OR GMX_BUILD_MDRUN_ONLY)
+if(GMX_NATIVE_WINDOWS)
# GMXAPI has not been tested in Microsoft environments.
- # GMXAPI relies on libgromacs and is incompatible with an `mdrun`-only build.
# GMXAPI requires position-independent code
set(_GMXAPI_DEFAULT OFF)
else()
endif()
# Activate targets NBLIB
-if(GMX_NATIVE_WINDOWS OR GMX_BUILD_MDRUN_ONLY OR NOT BUILD_SHARED_LIBS OR CMAKE_CXX_COMPILER_ID MATCHES "Intel")
+if(GMX_NATIVE_WINDOWS OR NOT BUILD_SHARED_LIBS OR CMAKE_CXX_COMPILER_ID MATCHES "Intel")
# NBLIB has not been tested in Microsoft environments.
- # NBLIB relies on libgromacs and is incompatible with an `mdrun`-only build.
# NBLIB requires position-independent code
# NBLIB causes an ICE in icc 19.1.2.20200623
set(_NBLIB_DEFAULT OFF)
#
# This file is part of the GROMACS molecular simulation package.
#
-# Copyright (c) 2012,2013,2014,2015,2018,2020, by the GROMACS development team, led by
+# Copyright (c) 2012,2013,2014,2015,2018,2020,2021, by the GROMACS development team, led by
# Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
# and including many others, as listed in the AUTHORS file in the
# top-level source directory and at http://www.gromacs.org.
########################################################################
# Determine the defaults (this block has no effect if the variables have
# already been set)
-if((APPLE OR CYGWIN OR ${CMAKE_SYSTEM_NAME} MATCHES "Linux|.*BSD|GNU") AND NOT GMX_BUILD_MDRUN_ONLY)
+if((APPLE OR CYGWIN OR ${CMAKE_SYSTEM_NAME} MATCHES "Linux|.*BSD|GNU"))
# Maybe Solaris should be here? Patch this if you know!
SET(SHARED_LIBS_DEFAULT ON)
elseif(WIN32)
set(GMX_PREFER_STATIC_LIBS_DEFAULT ON)
endif()
-if(BUILD_SHARED_LIBS AND GMX_BUILD_MDRUN_ONLY)
- message(WARNING "Both BUILD_SHARED_LIBS and GMX_BUILD_MDRUN_ONLY are set. Generally, an mdrun-only build should prefer to use static libraries, which is the default if you make a fresh build tree. You may be re-using an old build tree, and so may wish to set BUILD_SHARED_LIBS=off yourself.")
-endif()
-
if (UNIX)
set(GMX_PREFER_STATIC_LIBS_DESCRIPTION
"When finding libraries prefer static archives (it will only work if static versions of external dependencies are available and found)")
#
# This file is part of the GROMACS molecular simulation package.
#
-# Copyright (c) 2013,2014, by the GROMACS development team, led by
+# Copyright (c) 2013,2014,2021, by the GROMACS development team, led by
# Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
# and including many others, as listed in the AUTHORS file in the
# top-level source directory and at http://www.gromacs.org.
endif()
endif()
unset(SUFFIXES_CHANGED)
-
-if (GMX_BUILD_MDRUN_ONLY)
- set(GMX_LIBS_SUFFIX "_mdrun${GMX_LIBS_SUFFIX}")
-endif()
.. todo:: This could likely be replaced by a (yet another) build type.
-.. cmake:: GMX_BUILD_MDRUN_ONLY
-
- If set ``ON``, the build system is configured to only build and install a
- single :file:`mdrun` executable. To be fully functional, the installed
- :file:`mdrun` requires a standard |Gromacs| installation (with
- ``GMX_BUILD_MDRUN_ONLY=OFF``) in the same installation prefix, as the
- mdrun-only build does not install any data files or scripts, only the
- binary. This is intended for cases where one wants to/needs to compile one
- or more instances of :file:`mdrun` with different build options (e.g., MPI
- or SIMD) than the full installation with the other utilities.
- Defaults to ``OFF``, in which case a single :file:`gmx` executable is built
- and installed, together with all the supporting files. :command:`mdrun` can
- be executed as :command:`gmx mdrun`.
-
.. cmake:: GMX_BUILD_OWN_FFTW
.. cmake:: GMX_BUILD_SHARED_EXE
to build the ``man`` target manually before installing). See
:cmake:`GMX_BUILD_HELP`.
-Some documentation cannot be built if the CMake option
-``GMX_BUILD_MDRUN_ONLY`` is enabled, or when cross-compiling, as it
+Some documentation cannot be built when cross-compiling, as it
requires executing the ``gmx`` binary.
The following make targets are the most useful:
The following figure shows a high-level view of components of what gets built
from the source code under :file:`src/` and how the code is organized.
+Arrows indicate the direction of dependencies.
The build system is described in detail in :doc:`build-system`.
With default options, the green and white components are built as part of the
-default target. If ``GMX_BUILD_MDRUN_ONLY`` is ``ON``, then the blue and white
-components are built instead; :file:`libgromacs_mdrun` is built from a subset
-of the code used for :file:`libgromacs`.
+default target.
The gray parts are for testing, and are by default only built as part of the
``tests`` target, but if ``GMX_DEVELOPER_BUILD`` is ``ON``, then these are
included in the default build target.
label="externals\nsrc/external/", group=common, style=rounded
]
gtest [
- label="Google Test & Mock\nsrc/external/gmock-1.7.0/", group=test
+ label="Google Test & Mock\nsrc/external/googletest/", group=test
style="rounded,filled", fillcolor="0 0 0.9"
]
}
libgromacs [
label="libgromacs\nsrc/gromacs/", group=gmx, fillcolor="0.33 0.3 1"
]
- libgromacs_mdrun [
- label="libgromacs_mdrun\nsrc/gromacs/", group=mdrun, fillcolor="0.66 0.3 1"
- ]
}
testutils [
label="testutils\nsrc/testutils/", group=test
style="rounded,filled", fillcolor="0 0 0.9"
]
- mdrun_objlib [
- label="mdrun object lib.\nsrc/programs/mdrun/", group=common, style=rouded
- ]
subgraph {
rank = same
gmx [
label="gmx\nsrc/programs/", group=gmx, fillcolor="0.33 0.3 1"
]
- mdrun [
- label="mdrun\nsrc/programs/", group=mdrun, fillcolor="0.66 0.3 1"
- ]
tests [
label="test binaries\nsrc/.../tests/", group=test
style="rounded,filled", fillcolor="0 0 0.9"
]
gmx -> template [ style=invis, constraint=no ]
- template -> mdrun [ style=invis, constraint=no ]
}
libgromacs -> externals
- libgromacs_mdrun -> externals
- mdrun_objlib -> libgromacs
gmx -> libgromacs
- gmx -> mdrun_objlib
- mdrun -> libgromacs_mdrun
- mdrun -> mdrun_objlib
testutils -> externals
testutils -> gtest
testutils -> libgromacs
tests -> gtest
tests -> libgromacs
- tests -> mdrun_objlib
tests -> testutils
template -> libgromacs
- template -> mdrun_objlib [ style=invis ]
- mdrun_objlib -> externals [ style=invis ]
-
All the source code (except for the analysis template) is under the
:file:`src/` directory. Only a few files related to the build system are
included at the root level. All actual code is in subdirectories:
This is the main part of the code, and is organized into further subdirectories
as *modules*. See below for details.
:file:`src/programs/`
- |Gromacs| executables are built from code under this directory.
- Although some build options can change this, there is typically only a single
- binary, :file:`gmx`, built.
+ The |Gromacs| executable ``gmx`` is built from code under this directory.
+ Also found here is some of the driver code for the ``mdrun`` module called
+ by ``gmx``, the whole of the ``gmx view`` visualization module, and numerous
+ end-to-end tests of ``gmx mdrun``.
:file:`src/{...}/tests/`
Various subdirectories under :file:`src/` contain a subdirectory named
necessary for that file. You can use the public API header if you
really require everything declared in it.
-intra-module/intra-file.
-
See :doc:`naming` for some common naming patterns for files that can help
locating declarations.
On a cluster where users are expected to be running across multiple
nodes using MPI, make one installation similar to the above, and
-another using ``-DGMX_MPI=on`` and which is `building only
-mdrun`_, because that is the only component of |Gromacs| that uses
-MPI. The latter will install a single simulation engine binary,
-i.e. ``mdrun_mpi`` when the default suffix is used. Hence it is safe
+another using ``-DGMX_MPI=on``.
+The latter will install binaries and libraries named using
+a default suffix of ``_mpi`` ie ``gmx_mpi``. Hence it is safe
and common practice to install this into the same location where
the non-MPI build is installed.
* ``-DCMAKE_C_COMPILER=xxx`` equal to the name of the C99 `Compiler`_ you wish to use (or the environment variable ``CC``)
* ``-DCMAKE_CXX_COMPILER=xxx`` equal to the name of the C++98 `compiler`_ you wish to use (or the environment variable ``CXX``)
-* ``-DGMX_MPI=on`` to build using `MPI support`_ (generally good to combine with `building only mdrun`_)
+* ``-DGMX_MPI=on`` to build using `MPI support`_
* ``-DGMX_GPU=CUDA`` to build with NVIDIA CUDA support enabled.
* ``-DGMX_GPU=OpenCL`` to build with OpenCL_ support enabled.
* ``-DGMX_SIMD=xxx`` to specify the level of `SIMD support`_ of the node on which |Gromacs| will run
-* ``-DGMX_BUILD_MDRUN_ONLY=on`` for `building only mdrun`_, e.g. for compute cluster back-end nodes
* ``-DGMX_DOUBLE=on`` to build |Gromacs| in double precision (slower, and not normally useful)
* ``-DCMAKE_PREFIX_PATH=xxx`` to add a non-standard location for CMake to `search for libraries, headers or programs`_
* ``-DCMAKE_INSTALL_PREFIX=xxx`` to install |Gromacs| to a `non-standard location`_ (default ``/usr/local/gromacs``)
mdrun) that run slowly on the new hardware. Building two full
installations and locally managing how to call the correct one
(e.g. using a module system) is the recommended
-approach. Alternatively, as at the moment the |Gromacs| tools do not
-make strong use of SIMD acceleration, it can be convenient to create
-an installation with tools portable across different x86 machines, but
-with separate mdrun binaries for each architecture. To achieve this,
+approach. Alternatively, one can use different suffixes to install
+several versions of |Gromacs| in the same location. To achieve this,
one can first build a full installation with the
least-common-denominator SIMD instruction set, e.g. ``-DGMX_SIMD=SSE2``,
-then build separate mdrun binaries for each architecture present in
+in order for simple commands like ``gmx grompp`` to work on all machines,
+then build specialized ``gmx`` binaries for each architecture present in
the heterogeneous environment. By using custom binary and library
-suffixes for the mdrun-only builds, these can be installed to the
-same location as the "generic" tools installation.
-`Building just the mdrun binary`_ is possible by setting the
-``-DGMX_BUILD_MDRUN_ONLY=ON`` option.
+suffixes (with CMake variables ``-DGMX_BINARY_SUFFIX=xxx`` and
+``-DGMX_LIBS_SUFFIX=xxx``), these can be installed to the same
+location.
Linear algebra libraries
~~~~~~~~~~~~~~~~~~~~~~~~
.. _building just the mdrun binary:
-Building only mdrun
-~~~~~~~~~~~~~~~~~~~
-
-This is now deprecated, but still supported with the ``cmake`` option
-``-DGMX_BUILD_MDRUN_ONLY=ON``, which will build a different version of
-``libgromacs`` and the ``mdrun`` program. Naturally, now ``make
-install`` installs only those products. By default, mdrun-only builds
-will default to static linking against |Gromacs| libraries, because
-this is generally a good idea for the targets for which an mdrun-only
-build is desirable.
-
Installing |Gromacs|
^^^^^^^^^^^^^^^^^^^^
your hardware, and the output of ``gmx mdrun -version`` (which contains
valuable diagnostic information in the header).
-Testing for MDRUN_ONLY executables
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-A build with ``-DGMX_BUILD_MDRUN_ONLY`` cannot be tested with
-``make check`` from the build tree, because most of the tests
-require a full build to run things like ``grompp``. To test such an
-mdrun fully requires installing it to the same location as a normal
-build of |Gromacs|, downloading the regression tests tarball manually
-as described above, sourcing the correct ``GMXRC`` and running the
-perl script manually. For example, from your |Gromacs| source
-directory:
-
-::
-
- mkdir build-normal
- cd build-normal
- # First, build and install normally to allow full testing of the standalone simulator.
- cmake .. -DGMX_MPI=ON -DCMAKE_INSTALL_PREFIX=/your/installation/prefix/here
- make -j 4
- make install
- cd ..
- mkdir build-mdrun-only
- cd build-mdrun-only
- # Next, build and install the GMX_BUILD_MDRUN_ONLY version (optional).
- cmake .. -DGMX_MPI=ON -DGMX_BUILD_MDRUN_ONLY=ON -DCMAKE_INSTALL_PREFIX=/your/installation/prefix/here
- make -j 4
- make install
- cd /to/your/unpacked/regressiontests
- source /your/installation/prefix/here/bin/GMXRC
- ./gmxtest.pl all -np 2
-
Non-standard suffix
~~~~~~~~~~~~~~~~~~~
-If your mdrun program has been suffixed in a non-standard way, then
-the ``./gmxtest.pl -mdrun`` option will let you specify that name to the
+If your ``gmx`` program has been suffixed in a non-standard way, then
+the ``./gmxtest.pl -suffix`` option will let you specify that suffix to the
test machinery. You can use ``./gmxtest.pl -double`` to test the
double-precision version. You can use ``./gmxtest.pl -crosscompiling``
to stop the test harness attempting to check that the programs can
-DCMAKE_PREFIX_PATH=/your/fftw/installation/prefix \
-DCMAKE_INSTALL_PREFIX=/where/gromacs/should/be/installed \
-DGMX_MPI=ON \
- -DGMX_BUILD_MDRUN_ONLY=ON \
-DGMX_RELAXED_DOUBLE_PRECISION=ON
make
make install
Also, please use the syntax :issue:`number` to reference issues on GitLab, without the
a space between the colon and number!
+Removed mdrun-only build configuration
+""""""""""""""""""""""""""""""""""""""
+
+The need for the mdrun-only build of |Gromacs| has expired, as it has
+the same set of dependencies as regular |Gromacs|. It was deprecated
+in GROMACS 2021. Removing it will simplify maintenance, testing,
+documentation, installation, and teaching new users.
+
+:issue:`3808`
|Gromacs| includes many tools for preparing, running and analyzing
molecular dynamics simulations. These are all structured as part of a single
:command:`gmx` wrapper binary, and invoked with commands like :command:`gmx grompp`.
-:ref:`mdrun <gmx mdrun>` is the only other binary that
-:ref:`can be built <building just the mdrun binary>`; in the normal
-build it can be run with :command:`gmx mdrun`. Documentation for these can
+or :command:`gmx mdrun`. Documentation for these can
be found at the respective sections below, as well as on man pages (e.g.,
:manpage:`gmx-grompp(1)`) and with :samp:`gmx help {command}` or
:samp:`gmx {command} -h`.
#. Do I need to compile all utilities with MPI?
- With one rarely-used exception (:ref:`pme_error <gmx pme_error>`), only the
- :ref:`mdrun <gmx mdrun>` binary is able to use the :ref:`MPI <mpi-support>`
+ With one rarely-used exception (:ref:`pme_error <gmx pme_error>`), only
+ :ref:`mdrun <gmx mdrun>` is able to use the :ref:`MPI <mpi-support>`
parallelism. So you only need to use the ``-DGMX_MPI=on`` flag
when :ref:`configuring <configure-cmake>` for a build intended to run
- the main simulation engine :ref:`mdrun <gmx mdrun>`.
+ the main simulation engine :ref:`mdrun <gmx mdrun>`. Generally that
+ is desirable when running on a multi-node cluster, and necessary
+ when using multi-simulation algorithms. Usually also installing a
+ build of GROMACS configured without MPI is convenient for users.
#. Should my version be compiled using double precision?
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Once the run input file is available, we can start the simulation. The
-program which starts the simulation is called :ref:`gmx mdrun` (or
-sometimes just mdrun, or mdrun_mpi). The only input file of :ref:`gmx mdrun`
+program which starts the simulation is called :ref:`gmx mdrun`.
+The only input file of :ref:`gmx mdrun`
that you usually need in order to start a run is the run input
file (:ref:`tpr` file). The typical output files of :ref:`gmx mdrun` are the
trajectory file (:ref:`trr` file), a logfile (:ref:`log` file), and perhaps a
:ref:`configuring |Gromacs| with an external MPI library <mpi-support>`
so that the set of
simulations can communicate. The ``n`` simulations within the set can
-use internal MPI parallelism also, so that ``mpirun -np x mdrun_mpi``
+use internal MPI parallelism also, so that ``mpirun -np x gmx_mpi mdrun``
for ``x`` a multiple of ``n`` will use ``x/n`` ranks per simulation.
There are two ways of organizing files when running such
This requires configuring |Gromacs| to build with an external MPI
library. By default, this :ref:`mdrun <gmx mdrun>` executable is run with
-:ref:`mdrun_mpi`. All of the considerations for running single-node
+``gmx_mpi mdrun``. All of the considerations for running single-node
:ref:`mdrun <gmx mdrun>` still apply, except that ``-ntmpi`` and ``-nt`` cause a fatal
error, and instead the number of ranks is controlled by the
MPI environment.
mpirun -np 16 gmx_mpi mdrun
-Starts :ref:`mdrun_mpi` with 16 ranks, which are mapped to
+Starts :ref:`gmx mdrun` with 16 ranks, which are mapped to
the hardware by the MPI library, e.g. as specified
in an MPI hostfile. The available cores will be
automatically split among ranks using OpenMP threads,
mpirun -np 16 gmx_mpi mdrun -npme 5
-Starts :ref:`mdrun_mpi` with 16 ranks, as above, and
+Starts :ref:`gmx mdrun` with 16 ranks, as above, and
require that 5 of them are dedicated to the PME
component.
mpirun -np 11 gmx_mpi mdrun -ntomp 2 -npme 6 -ntomp_pme 1
-Starts :ref:`mdrun_mpi` with 11 ranks, as above, and
+Starts :ref:`gmx mdrun` with 11 ranks, as above, and
require that six of them are dedicated to the PME
component with one OpenMP thread each. The remaining
five do the PP component, with two OpenMP threads
mpirun -np 4 gmx_mpi mdrun -ntomp 6 -nb gpu -gputasks 00
-Starts :ref:`mdrun_mpi` on a machine with two nodes, using
+Starts :ref:`gmx mdrun` on a machine with two nodes, using
four total ranks, each rank with six OpenMP threads,
and both ranks on a node sharing GPU with ID 0.
mpirun -np 8 gmx_mpi mdrun -ntomp 3 -gputasks 0000
Using a same/similar hardware as above,
-starts :ref:`mdrun_mpi` on a machine with two nodes, using
+starts :ref:`gmx mdrun` on a machine with two nodes, using
eight total ranks, each rank with three OpenMP threads,
and all four ranks on a node sharing GPU with ID 0.
This may or may not be faster than the previous setup
mpirun -np 20 gmx_mpi mdrun -ntomp 4 -gputasks 00
-Starts :ref:`mdrun_mpi` with 20 ranks, and assigns the CPU cores evenly
+Starts :ref:`gmx mdrun` with 20 ranks, and assigns the CPU cores evenly
across ranks each to one OpenMP thread. This setup is likely to be
suitable when there are ten nodes, each with one GPU, and each node
has two sockets each of four cores.
mpirun -np 10 gmx_mpi mdrun -gpu_id 1
-Starts :ref:`mdrun_mpi` with 20 ranks, and assigns the CPU cores evenly
+Starts :ref:`gmx mdrun` with 20 ranks, and assigns the CPU cores evenly
across ranks each to one OpenMP thread. This setup is likely to be
suitable when there are ten nodes, each with two GPUs, but another
job on each node is using GPU 0. The job scheduler should set the
mpirun -np 20 gmx_mpi mdrun -gpu_id 01
-Starts :ref:`mdrun_mpi` with 20 ranks. This setup is likely
+Starts :ref:`gmx mdrun` with 20 ranks. This setup is likely
to be suitable when there are ten nodes, each with two
GPUs, but there is no need to specify ``-gpu_id`` for the
normal case where all the GPUs on the node are available
#
# This file is part of the GROMACS molecular simulation package.
#
-# Copyright (c) 2018,2019,2020, by the GROMACS development team, led by
+# Copyright (c) 2018,2019,2020,2021, by the GROMACS development team, led by
# Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
# and including many others, as listed in the AUTHORS file in the
# top-level source directory and at http://www.gromacs.org.
# Activate targets for new C++ API components and docs.
if (GMXAPI)
- if (GMX_BUILD_MDRUN_ONLY)
- message(FATAL_ERROR "GMXAPI relies on libgromacs and is incompatible with GMX_BUILD_MDRUN_ONLY.")
- endif()
if(NOT ${BUILD_SHARED_LIBS})
# Note: this conditional should check for the existence of a libgromacs target supporting PIC
# using the POSITION_INDEPENDENT_CODE property, but for now the only facility we have is the global
# This file is part of the GROMACS molecular simulation package.
#
# Copyright (c) 2010,2011,2012,2013,2014 by the GROMACS development team.
-# Copyright (c) 2015,2016,2017,2018,2019,2020, by the GROMACS development team, led by
+# Copyright (c) 2015,2016,2017,2018,2019,2020,2021, by the GROMACS development team, led by
# Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
# and including many others, as listed in the AUTHORS file in the
# top-level source directory and at http://www.gromacs.org.
add_subdirectory(compat)
add_subdirectory(mimic)
add_subdirectory(modularsimulator)
-# Note that this subset should match the subset of module
-# BUILD_INTERFACEs added to libgromacs below.
-if (NOT GMX_BUILD_MDRUN_ONLY)
- add_subdirectory(gmxana)
- add_subdirectory(gmxpreprocess)
- add_subdirectory(correlationfunctions)
- add_subdirectory(statistics)
- add_subdirectory(analysisdata)
- add_subdirectory(coordinateio)
- add_subdirectory(trajectoryanalysis)
- add_subdirectory(energyanalysis)
- add_subdirectory(tools)
-endif()
+add_subdirectory(gmxana)
+add_subdirectory(gmxpreprocess)
+add_subdirectory(correlationfunctions)
+add_subdirectory(statistics)
+add_subdirectory(analysisdata)
+add_subdirectory(coordinateio)
+add_subdirectory(trajectoryanalysis)
+add_subdirectory(energyanalysis)
+add_subdirectory(tools)
get_property(PROPERTY_SOURCES GLOBAL PROPERTY GMX_LIBGROMACS_SOURCES)
list(APPEND LIBGROMACS_SOURCES ${GMXLIB_SOURCES} ${MDLIB_SOURCES} ${PROPERTY_SOURCES})
# responsibility for setting this up will move to the respective
# modules.
target_link_libraries(libgromacs PRIVATE
+ $<BUILD_INTERFACE:analysisdata>
$<BUILD_INTERFACE:applied_forces>
$<BUILD_INTERFACE:commandline>
$<BUILD_INTERFACE:compat>
+ $<BUILD_INTERFACE:coordinateio>
+ $<BUILD_INTERFACE:correlationfunctions>
$<BUILD_INTERFACE:domdec>
# $<BUILD_INTERFACE:energyanalysis>
$<BUILD_INTERFACE:essentialdynamics>
$<BUILD_INTERFACE:ewald>
$<BUILD_INTERFACE:fft>
$<BUILD_INTERFACE:fileio>
+ $<BUILD_INTERFACE:gmxana>
$<BUILD_INTERFACE:gmxlib>
+ $<BUILD_INTERFACE:gmxpreprocess>
$<BUILD_INTERFACE:gpu_utils>
$<BUILD_INTERFACE:hardware>
$<BUILD_INTERFACE:imd>
$<BUILD_INTERFACE:restraint>
$<BUILD_INTERFACE:selection>
$<BUILD_INTERFACE:simd>
+ $<BUILD_INTERFACE:statistics>
$<BUILD_INTERFACE:swap>
$<BUILD_INTERFACE:tables>
$<BUILD_INTERFACE:taskassignment>
$<BUILD_INTERFACE:timing>
+ $<BUILD_INTERFACE:tools>
$<BUILD_INTERFACE:topology>
$<BUILD_INTERFACE:trajectory>
+ $<BUILD_INTERFACE:trajectoryanalysis>
$<BUILD_INTERFACE:utility>
- )
-# Note that this subset should match the subset of module
-# subdirectories added above.
-if (NOT GMX_BUILD_MDRUN_ONLY)
- target_link_libraries(libgromacs PRIVATE
- $<BUILD_INTERFACE:analysisdata>
- $<BUILD_INTERFACE:coordinateio>
- $<BUILD_INTERFACE:correlationfunctions>
- $<BUILD_INTERFACE:gmxana>
- $<BUILD_INTERFACE:gmxpreprocess>
- $<BUILD_INTERFACE:statistics>
- $<BUILD_INTERFACE:tools>
- $<BUILD_INTERFACE:trajectoryanalysis>
- )
-endif()
+ )
if (GMX_OPENMP)
target_link_libraries(libgromacs PUBLIC OpenMP::OpenMP_CXX)
endif()
target_compile_options(libgromacs PRIVATE $<$<COMPILE_LANGUAGE:CXX>:-w>)
endif()
-# Only install the library in mdrun-only mode if it is actually necessary
-# for the binary
# TODO: Stop installing libgromacs. Possibly allow installation during deprecation period with GMX_INSTALL_LEGACY_API.
-if (NOT GMX_BUILD_MDRUN_ONLY OR BUILD_SHARED_LIBS)
+if (BUILD_SHARED_LIBS)
install(TARGETS libgromacs
EXPORT libgromacs
LIBRARY
endif()
add_library(Gromacs::libgromacs ALIAS libgromacs)
-if (NOT GMX_BUILD_MDRUN_ONLY)
- include(InstallLibInfo.cmake)
-endif()
+include(InstallLibInfo.cmake)
# Technically, the user could want to do this for an OpenCL build
# using the CUDA runtime, but currently there's no reason to want to
* This file is part of the GROMACS molecular simulation package.
*
* Copyright (c) 2012,2013,2014,2015,2016 by the GROMACS development team.
- * Copyright (c) 2017,2018,2019,2020, by the GROMACS development team, led by
+ * Copyright (c) 2017,2018,2019,2020,2021, by the GROMACS development team, led by
* Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
* and including many others, as listed in the AUTHORS file in the
* top-level source directory and at http://www.gromacs.org.
outputRedirector_->openTextOutputFile("onlinehelp/" + tag + ".rst");
TextWriter writer(file);
writer.writeLine(formatString(".. _%s:", displayName.c_str()));
- if (displayName == binaryName_ + " mdrun")
- {
- // Make an extra link target for the convenience of
- // MPI-specific documentation
- writer.writeLine(".. _mdrun_mpi:");
- }
writer.ensureEmptyLine();
CommandLineHelpContext context(&writer, eHelpOutputFormat_Rst, &links_, binaryName_);
* This file is part of the GROMACS molecular simulation package.
*
* Copyright (c) 2009-2018, The GROMACS development team.
- * Copyright (c) 2019,2020, by the GROMACS development team, led by
+ * Copyright (c) 2019,2020,2021, by the GROMACS development team, led by
* Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
* and including many others, as listed in the AUTHORS file in the
* top-level source directory and at http://www.gromacs.org.
"[THISMODULE] needs to call [gmx-mdrun] and so requires that you",
"specify how to call mdrun with the argument to the [TT]-mdrun[tt]",
"parameter. Depending how you have built GROMACS, values such as",
- "'gmx mdrun', 'gmx_d mdrun', or 'mdrun_mpi' might be needed.[PAR]",
+ "'gmx mdrun', 'gmx_d mdrun', or 'gmx_mpi mdrun' might be needed.[PAR]",
"The program that runs MPI programs can be set in the environment variable",
"MPIRUN (defaults to 'mpirun'). Note that for certain MPI frameworks,",
"you need to provide a machine- or hostfile. This can also be passed",
FALSE,
etSTR,
{ &cmd_mdrun },
- "Command line to run a simulation, e.g. 'gmx mdrun' or 'mdrun_mpi'" },
+ "Command line to run a simulation, e.g. 'gmx mdrun' or 'gmx_mpi mdrun'" },
{ "-np",
FALSE,
etINT,
# This file is part of the GROMACS molecular simulation package.
#
# Copyright (c) 2010,2011,2012,2013,2014 by the GROMACS development team.
-# Copyright (c) 2015,2016,2018,2019,2020, by the GROMACS development team, led by
+# Copyright (c) 2015,2016,2018,2019,2020,2021, by the GROMACS development team, led by
# Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
# and including many others, as listed in the AUTHORS file in the
# top-level source directory and at http://www.gromacs.org.
# so, we can consider adding some dummy file to make it work.
add_library(fahcore $<TARGET_OBJECTS:mdrun_objlib>)
target_link_libraries(fahcore PRIVATE ${GMX_COMMON_LIBRARIES} legacy_api)
-elseif(GMX_BUILD_MDRUN_ONLY)
- message(STATUS "The mdrun-only build is deprecated")
- add_executable(mdrun-only $<TARGET_OBJECTS:mdrun_objlib> mdrun_main.cpp)
- gmx_target_compile_options(mdrun-only)
- target_include_directories(mdrun-only SYSTEM BEFORE PRIVATE ${PROJECT_SOURCE_DIR}/src/external/thread_mpi/include)
- target_compile_definitions(mdrun-only PRIVATE HAVE_CONFIG_H)
- target_link_libraries(mdrun-only PRIVATE
- common
- legacy_modules
- libgromacs
- ${GMX_COMMON_LIBRARIES}
- ${GMX_EXE_LINKER_FLAGS}
- )
- set(BINARY_NAME "mdrun${GMX_BINARY_SUFFIX}")
- set_target_properties(mdrun-only PROPERTIES
- OUTPUT_NAME "${BINARY_NAME}")
- install(TARGETS mdrun-only DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT mdrun-only)
- file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/gmx-completion-${BINARY_NAME}.bash
- "complete -o nospace -F _gmx_mdrun_compl ${BINARY_NAME}")
- install(FILES ${CMAKE_CURRENT_BINARY_DIR}/gmx-completion-${BINARY_NAME}.bash
- DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT runtime)
else()
file(GLOB GMX_MAIN_SOURCES gmx.cpp legacymodules.cpp)
if(GMX_X11)
# This file is part of the GROMACS molecular simulation package.
#
# Copyright (c) 2012,2013,2014,2015,2016, The GROMACS development team.
-# Copyright (c) 2017,2018,2019,2020, by the GROMACS development team, led by
+# Copyright (c) 2017,2018,2019,2020,2021, by the GROMACS development team, led by
# Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
# and including many others, as listed in the AUTHORS file in the
# top-level source directory and at http://www.gromacs.org.
set(REGRESSIONTEST_DOWNLOAD OFF CACHE BOOL "Tests already downloaded. Set to yes to download again" FORCE)
endif()
-if(REGRESSIONTEST_PATH AND (CMAKE_CROSSCOMPILING OR CMAKE_CONFIGURATION_TYPES OR GMX_BUILD_MDRUN_ONLY))
+if(REGRESSIONTEST_PATH AND (CMAKE_CROSSCOMPILING OR CMAKE_CONFIGURATION_TYPES))
# TODO: It would be nicer to do these checks before potentially downloading the tests.
# Cross-compiling toolchains require us to compile both front-end and
# back-end binaries to run gmxtest.pl.
- # Testing an mdrun-only builds require supporting binaries from a full build
message(WARNING
- "With cross-compiling, multi-configuration generators (e.g. Visual Studio), or with mdrun-only builds, running regressiontests from build system is not supported. Please run gmxtest.pl directly.")
+ "With cross-compiling or multi-configuration generators (e.g. Visual Studio), running regressiontests from build system is not supported. Please run gmxtest.pl directly.")
set(REGRESSIONTEST_PATH OFF CACHE BOOL
"With cross-compiling or multi-configuration generators, running regressiontests from build system is not supported." FORCE)
endif()
list(APPEND ARGS -suffix ${GMX_BINARY_SUFFIX})
endif()
#crosscompile is only used to disable checking whether binaries work
- #given that we know they are there and that mdrun might not be exectuable
+ #given that we know they are there and that mdrun might not be executable
#(e.g. Cray) we enable it.
list(APPEND ARGS -crosscompile)
"GMX_PHYSICAL_VALIDATION set, but physical validation script not found in ${PHYSVALTEST_SOURCE_PATH}.")
endif()
- if(CMAKE_CROSSCOMPILING OR CMAKE_CONFIGURATION_TYPES OR GMX_BUILD_MDRUN_ONLY)
+ if(CMAKE_CROSSCOMPILING OR CMAKE_CONFIGURATION_TYPES)
# The following comment is copied from regression tests:
# Cross-compiling toolchains require us to compile both front-end and
# back-end binaries to run gmxtest.pl.
- # Testing an mdrun-only builds require supporting binaries from a full build
# TODO: Look into the details of this.
# For now, turn it off - our python-gmx interface is probably not that stable for special cases anyway
message(WARNING
- "With cross-compiling, multi-configuration generators (e.g. Visual Studio), or with mdrun-only builds,\
+ "With cross-compiling or multi-configuration generators (e.g. Visual Studio),\
running physical validation tests from build system is not supported.\
Please run physicalvalidation.py directly.")
set(GMX_PHYSICAL_VALIDATION OFF CACHE BOOL