2 * This file is part of the GROMACS molecular simulation package.
4 * Copyright (c) 2005,2006,2007,2008,2009,2010,2012,2013,2014, by the GROMACS development team, led by
5 * Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
6 * and including many others, as listed in the AUTHORS file in the
7 * top-level source directory and at http://www.gromacs.org.
9 * GROMACS is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public License
11 * as published by the Free Software Foundation; either version 2.1
12 * of the License, or (at your option) any later version.
14 * GROMACS is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with GROMACS; if not, see
21 * http://www.gnu.org/licenses, or write to the Free Software Foundation,
22 * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24 * If you want to redistribute modifications to GROMACS, please
25 * consider that scientific software is very special. Version
26 * control is crucial - bugs must be traceable. We will be happy to
27 * consider code for inclusion in the official distribution, but
28 * derived work must not be called official GROMACS. Details are found
29 * in the README & COPYING files - if they are missing, get the
30 * official version at http://www.gromacs.org.
32 * To help us fund GROMACS development, we humbly ask that you cite
33 * the research papers on the package. Check out http://www.gromacs.org.
47 int ddglatnr(gmx_domdec_t *dd, int i);
48 /* Returns the global topology atom number belonging to local atom index i.
49 * This function is intended for writing ascii output
50 * and returns atom numbers starting at 1.
51 * When dd=NULL returns i+1.
54 t_block *dd_charge_groups_global(gmx_domdec_t *dd);
55 /* Return a block struct for the charge groups of the whole system */
57 gmx_bool dd_filled_nsgrid_home(gmx_domdec_t *dd);
58 /* Is the ns grid already filled with the home particles? */
60 void dd_store_state(gmx_domdec_t *dd, t_state *state);
61 /* Store the global cg indices of the home cgs in state,
62 * so it can be reset, even after a new DD partitioning.
65 gmx_domdec_zones_t *domdec_zones(gmx_domdec_t *dd);
67 void dd_get_ns_ranges(gmx_domdec_t *dd, int icg,
68 int *jcg0, int *jcg1, ivec shift0, ivec shift1);
70 int dd_natoms_vsite(gmx_domdec_t *dd);
72 void dd_get_constraint_range(gmx_domdec_t *dd,
73 int *at_start, int *at_end);
75 real dd_cutoff_mbody(gmx_domdec_t *dd);
77 real dd_cutoff_twobody(gmx_domdec_t *dd);
79 void get_pme_nnodes(const gmx_domdec_t *dd,
80 int *npmenodes_x, int *npmenodes_y);
81 /* Get the number of PME nodes along x and y, can be called with dd=NULL */
83 gmx_bool gmx_pmeonlynode(t_commrec *cr, int nodeid);
84 /* Return if nodeid in cr->mpi_comm_mysim is a PME-only node */
86 void get_pme_ddnodes(t_commrec *cr, int pmenodeid,
87 int *nmy_ddnodes, int **my_ddnodes, int *node_peer);
88 /* Returns the set of DD nodes that communicate with pme node cr->nodeid */
90 int dd_pme_maxshift_x(gmx_domdec_t *dd);
91 /* Returns the maximum shift for coordinate communication in PME, dim x */
93 int dd_pme_maxshift_y(gmx_domdec_t *dd);
94 /* Returns the maximum shift for coordinate communication in PME, dim y */
96 void make_dd_communicators(FILE *fplog, t_commrec *cr, int dd_node_order);
99 init_domain_decomposition(FILE *fplog,
103 real comm_distance_min, real rconstr,
104 const char *dlb_opt, real dlb_scale,
105 const char *sizex, const char *sizey, const char *sizez,
106 gmx_mtop_t *mtop, t_inputrec *ir,
109 int *npme_x, int *npme_y);
111 void dd_init_bondeds(FILE *fplog,
112 gmx_domdec_t *dd, gmx_mtop_t *mtop,
114 t_inputrec *ir, gmx_bool bBCheck, cginfo_mb_t *cginfo_mb);
115 /* Initialize data structures for bonded interactions */
117 gmx_bool dd_bonded_molpbc(gmx_domdec_t *dd, int ePBC);
118 /* Returns if we need to do pbc for calculating bonded interactions */
120 void set_dd_parameters(FILE *fplog, gmx_domdec_t *dd, real dlb_scale,
123 /* Set DD grid dimensions and limits,
124 * should be called after calling dd_init_bondeds.
127 gmx_bool change_dd_cutoff(t_commrec *cr, t_state *state, t_inputrec *ir,
129 /* Change the DD non-bonded communication cut-off.
130 * This could fail when trying to increase the cut-off,
131 * then FALSE will be returned and the cut-off is not modified.
134 void change_dd_dlb_cutoff_limit(t_commrec *cr);
135 /* Domain boundary changes due to the DD dynamic load balancing can limit
136 * the cut-off distance that can be set in change_dd_cutoff. This function
137 * limits the DLB such that using the currently set cut-off should still be
138 * possible after subsequently setting a shorter cut-off with change_dd_cutoff.
141 void dd_setup_dlb_resource_sharing(t_commrec *cr,
142 const gmx_hw_info_t *hwinfo,
143 const gmx_hw_opt_t *hw_opt);
144 /* When domains (PP MPI ranks) share a GPU, the individual GPU wait times
145 * are meaningless, as it depends on the order in which tasks on the same
146 * GPU finish. Therefore there wait times need to be averaged over the ranks
147 * sharing the same GPU. This function sets up the communication for that.
150 void setup_dd_grid(FILE *fplog, gmx_domdec_t *dd);
152 void dd_collect_vec(gmx_domdec_t *dd,
153 t_state *state_local, rvec *lv, rvec *v);
155 void dd_collect_state(gmx_domdec_t *dd,
156 t_state *state_local, t_state *state);
159 ddCyclStep, ddCyclPPduringPME, ddCyclF, ddCyclWaitGPU, ddCyclPME, ddCyclNr
162 void dd_cycles_add(gmx_domdec_t *dd, float cycles, int ddCycl);
163 /* Add the wallcycle count to the DD counter */
165 void dd_force_flop_start(gmx_domdec_t *dd, t_nrnb *nrnb);
166 /* Start the force flop count */
168 void dd_force_flop_stop(gmx_domdec_t *dd, t_nrnb *nrnb);
169 /* Stop the force flop count */
171 float dd_pme_f_ratio(gmx_domdec_t *dd);
172 /* Return the PME/PP force load ratio, or -1 if nothing was measured.
173 * Should only be called on the DD master node.
176 void dd_move_x(gmx_domdec_t *dd, matrix box, rvec x[]);
177 /* Communicate the coordinates to the neighboring cells and do pbc. */
179 void dd_move_f(gmx_domdec_t *dd, rvec f[], rvec *fshift);
180 /* Sum the forces over the neighboring cells.
181 * When fshift!=NULL the shift forces are updated to obtain
182 * the correct virial from the single sum including f.
185 void dd_atom_spread_real(gmx_domdec_t *dd, real v[]);
186 /* Communicate a real for each atom to the neighboring cells. */
188 void dd_atom_sum_real(gmx_domdec_t *dd, real v[]);
189 /* Sum the contributions to a real for each atom over the neighboring cells. */
191 void dd_partition_system(FILE *fplog,
194 gmx_bool bMasterState,
196 t_state *state_global,
197 gmx_mtop_t *top_global,
199 t_state *state_local,
202 gmx_localtop_t *top_local,
205 gmx_shellfc_t shellfc,
208 gmx_wallcycle_t wcycle,
210 /* Partition the system over the nodes.
211 * step is only used for printing error messages.
212 * If bMasterState==TRUE then state_global from the master node is used,
213 * else state_local is redistributed between the nodes.
214 * When f!=NULL, *f will be reallocated to the size of state_local.
217 void reset_dd_statistics_counters(gmx_domdec_t *dd);
218 /* Reset all the statistics and counters for total run counting */
220 void print_dd_statistics(t_commrec *cr, t_inputrec *ir, FILE *fplog);
222 /* In domdec_con.c */
224 void dd_move_f_vsites(gmx_domdec_t *dd, rvec *f, rvec *fshift);
226 void dd_clear_f_vsites(gmx_domdec_t *dd, rvec *f);
228 void dd_move_x_constraints(gmx_domdec_t *dd, matrix box,
229 rvec *x0, rvec *x1, gmx_bool bX1IsCoord);
230 /* Move x0 and also x1 if x1!=NULL. bX1IsCoord tells if to do PBC on x1 */
232 void dd_move_x_vsites(gmx_domdec_t *dd, matrix box, rvec *x);
234 int *dd_constraints_nlocalatoms(gmx_domdec_t *dd);
236 void dd_clear_local_constraint_indices(gmx_domdec_t *dd);
238 void dd_clear_local_vsite_indices(gmx_domdec_t *dd);
240 int dd_make_local_vsites(gmx_domdec_t *dd, int at_start, t_ilist *lil);
242 int dd_make_local_constraints(gmx_domdec_t *dd, int at_start,
243 const gmx_mtop_t *mtop,
245 gmx_constr_t constr, int nrec,
248 void init_domdec_constraints(gmx_domdec_t *dd,
251 void init_domdec_vsites(gmx_domdec_t *dd, int n_intercg_vsite);
254 /* In domdec_top.c */
256 void dd_print_missing_interactions(FILE *fplog, t_commrec *cr,
257 int local_count, gmx_mtop_t *top_global, t_state *state_local);
259 void dd_make_reverse_top(FILE *fplog,
260 gmx_domdec_t *dd, gmx_mtop_t *mtop,
262 t_inputrec *ir, gmx_bool bBCheck);
264 void dd_make_local_cgs(gmx_domdec_t *dd, t_block *lcgs);
266 void dd_make_local_top(gmx_domdec_t *dd, gmx_domdec_zones_t *zones,
267 int npbcdim, matrix box,
268 rvec cellsize_min, ivec npulse,
272 gmx_mtop_t *top, gmx_localtop_t *ltop);
274 void dd_sort_local_top(gmx_domdec_t *dd, t_mdatoms *mdatoms,
275 gmx_localtop_t *ltop);
276 /* Sort ltop->ilist when we are doing free energy. */
278 gmx_localtop_t *dd_init_local_top(gmx_mtop_t *top_global);
280 void dd_init_local_state(gmx_domdec_t *dd,
281 t_state *state_global, t_state *local_state);
283 t_blocka *make_charge_group_links(gmx_mtop_t *mtop, gmx_domdec_t *dd,
284 cginfo_mb_t *cginfo_mb);
286 void dd_bonded_cg_distance(FILE *fplog, gmx_mtop_t *mtop,
287 t_inputrec *ir, rvec *x, matrix box,
289 real *r_2b, real *r_mb);
291 void write_dd_pdb(const char *fn, gmx_int64_t step, const char *title,
294 int natoms, rvec x[], matrix box);
295 /* Dump a pdb file with the current DD home + communicated atoms.
296 * When natoms=-1, dump all known atoms.
300 /* In domdec_setup.c */
302 real comm_box_frac(ivec dd_nc, real cutoff, gmx_ddbox_t *ddbox);
303 /* Returns the volume fraction of the system that is communicated */
305 real dd_choose_grid(FILE *fplog,
306 t_commrec *cr, gmx_domdec_t *dd, t_inputrec *ir,
307 gmx_mtop_t *mtop, matrix box, gmx_ddbox_t *ddbox,
308 gmx_bool bDynLoadBal, real dlb_scale,
309 real cellsize_limit, real cutoff_dd,
310 gmx_bool bInterCGBondeds);
311 /* Determines the optimal DD cell setup dd->nc and possibly npmenodes
313 * On the master node returns the actual cellsize limit used.
317 /* In domdec_box.c */
319 void set_ddbox(gmx_domdec_t *dd, gmx_bool bMasterState, t_commrec *cr_sum,
320 t_inputrec *ir, matrix box,
321 gmx_bool bCalcUnboundedSize, t_block *cgs, rvec *x,
324 void set_ddbox_cr(t_commrec *cr, ivec *dd_nc,
325 t_inputrec *ir, matrix box, t_block *cgs, rvec *x,
332 #endif /* _domdec_h */