2 * This file is part of the GROMACS molecular simulation package.
4 * Copyright (c) 1991-2000, University of Groningen, The Netherlands.
5 * Copyright (c) 2001-2004, The GROMACS development team.
6 * Copyright (c) 2013,2014,2015,2018, by the GROMACS development team, led by
7 * Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
8 * and including many others, as listed in the AUTHORS file in the
9 * top-level source directory and at http://www.gromacs.org.
11 * GROMACS is free software; you can redistribute it and/or
12 * modify it under the terms of the GNU Lesser General Public License
13 * as published by the Free Software Foundation; either version 2.1
14 * of the License, or (at your option) any later version.
16 * GROMACS is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 * Lesser General Public License for more details.
21 * You should have received a copy of the GNU Lesser General Public
22 * License along with GROMACS; if not, see
23 * http://www.gnu.org/licenses, or write to the Free Software Foundation,
24 * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
26 * If you want to redistribute modifications to GROMACS, please
27 * consider that scientific software is very special. Version
28 * control is crucial - bugs must be traceable. We will be happy to
29 * consider code for inclusion in the official distribution, but
30 * derived work must not be called official GROMACS. Details are found
31 * in the README & COPYING files - if they are missing, get the
32 * official version at http://www.gromacs.org.
34 * To help us fund GROMACS development, we humbly ask that you cite
35 * the research papers on the package. Check out http://www.gromacs.org.
37 /*! \libinternal \file
38 * \brief Declares structures related to domain decomposition.
40 * \author Berk Hess <hess@kth.se>
41 * \author David van der Spoel <david.vanderspoel@icm.uu.se>
43 * \ingroup module_domdec
45 #ifndef GMX_DOMDEC_DOMDEC_STRUCT_H
46 #define GMX_DOMDEC_DOMDEC_STRUCT_H
53 #include "gromacs/math/vectypes.h"
54 #include "gromacs/topology/block.h"
55 #include "gromacs/utility/basedefinitions.h"
56 #include "gromacs/utility/gmxmpi.h"
57 #include "gromacs/utility/real.h"
59 //! Max number of zones in domain decomposition
61 //! Max number of izones in domain decomposition
63 //! Are we the master node for domain decomposition
64 #define DDMASTER(dd) ((dd)->rank == (dd)->masterrank)
66 struct AtomDistribution;
67 struct gmx_domdec_comm_t;
68 struct gmx_domdec_constraints_t;
69 struct gmx_domdec_specat_comm_t;
72 struct gmx_pme_comm_n_box_t;
73 struct gmx_reverse_top_t;
77 class LocalAtomSetManager;
81 int j0; /* j-zone start */
82 int j1; /* j-zone end */
83 int cg1; /* i-charge-group end */
84 int jcg0; /* j-charge-group start */
85 int jcg1; /* j-charge-group end */
86 ivec shift0; /* Minimum shifts to consider */
87 ivec shift1; /* Maximum shifts to consider */
88 } gmx_domdec_ns_ranges_t;
91 rvec x0; /* Zone lower corner in triclinic coordinates */
92 rvec x1; /* Zone upper corner in triclinic coordinates */
93 rvec bb_x0; /* Zone bounding box lower corner in Cartesian coords */
94 rvec bb_x1; /* Zone bounding box upper corner in Cartesian coords */
95 } gmx_domdec_zone_size_t;
97 struct gmx_domdec_zones_t {
98 /* The number of zones including the home zone */
100 /* The shift of the zones with respect to the home zone */
101 ivec shift[DD_MAXZONE];
102 /* The charge group boundaries for the zones */
103 int cg_range[DD_MAXZONE+1];
104 /* The number of neighbor search zones with i-particles */
106 /* The neighbor search charge group ranges for each i-zone */
107 gmx_domdec_ns_ranges_t izone[DD_MAXIZONE];
108 /* Boundaries of the zones */
109 gmx_domdec_zone_size_t size[DD_MAXZONE];
110 /* The cg density of the home zone */
119 /* Tells if the box is skewed for each of the three cartesian directions */
122 /* Orthogonal vectors for triclinic cells, Cartesian index */
124 /* Normal vectors for the cells walls */
129 struct gmx_domdec_t {
130 /* The DD particle-particle nodes only */
131 /* The communication setup within the communicator all
132 * defined in dd->comm in domdec.c
135 MPI_Comm mpi_comm_all;
136 /* Use MPI_Sendrecv communication instead of non-blocking calls */
138 /* The local DD cell index and rank */
143 /* Communication with the PME only nodes */
145 gmx_bool pme_receive_vir_ener;
146 gmx_pme_comm_n_box_t *cnb = nullptr;
148 MPI_Request req_pme[8];
151 /* The communication setup, identical for each cell, cartesian index */
154 ivec dim; /* indexed by 0 to ndim */
156 /* PBC from dim 0 to npbcdim */
162 /* Forward and backward neighboring cells, indexed by 0 to ndim */
163 int neighbor[DIM][2];
165 /* Only available on the master node */
166 std::unique_ptr<AtomDistribution> ma;
168 /* Are there inter charge group constraints */
169 gmx_bool bInterCGcons;
170 gmx_bool bInterCGsettles;
172 /* Global atom number to interaction list */
173 gmx_reverse_top_t *reverse_top;
177 /* The number of inter charge-group exclusions */
181 gmx_hash_t *ga2la_vsite = nullptr;
182 gmx_domdec_specat_comm_t *vsite_comm = nullptr;
183 std::vector<int> vsite_requestedGlobalAtomIndices;
185 /* Constraint stuff */
186 gmx_domdec_constraints_t *constraints = nullptr;
187 gmx_domdec_specat_comm_t *constraint_comm = nullptr;
189 /* The number of home atom groups */
191 /* Global atom group indices for the home and all non-home groups */
192 std::vector<int> globalAtomGroupIndices;
193 /* The atom groups for the home and all non-home groups, todo: make private */
194 gmx::RangePartitioning atomGrouping_;
195 const gmx::RangePartitioning &atomGrouping() const
197 return atomGrouping_;
199 /* Local atom to local atom-group index, only used for checking bondeds */
200 std::vector<int> localAtomGroupFromAtom;
202 /* Index from the local atoms to the global atoms, covers home and received zones */
203 std::vector<int> globalAtomIndices;
205 /* Global atom number to local atom number list */
206 gmx_ga2la_t *ga2la = nullptr;
208 /* Communication stuff */
209 gmx_domdec_comm_t *comm;
211 /* The partioning count, to keep track of the state */
214 /* The managed atom sets that are updated in domain decomposition */
215 gmx::LocalAtomSetManager * atomSets;
217 /* gmx_pme_recv_f buffer */
218 int pme_recv_f_alloc = 0;
219 rvec *pme_recv_f_buf = nullptr;