From: Andrey Alekseenko Date: Thu, 19 Aug 2021 09:51:13 +0000 (+0300) Subject: Merge remote-tracking branch 'origin/release-2021' into merge-2021-into-master X-Git-Url: http://biod.pnpi.spb.ru/gitweb/?a=commitdiff_plain;h=45ba560ef782eb047e43997cc14629ff86cf91ac;p=alexxy%2Fgromacs.git Merge remote-tracking branch 'origin/release-2021' into merge-2021-into-master Resolved conflicts: - cmake/gmxVersionInfo.cmake - docs/CMakeLists.txt - python_packaging/src/gmxapi/export_system.cpp - python_packaging/src/setup.py - src/gromacs/domdec/domdec.cpp - src/gromacs/gmxana/gmx_chi.cpp --- 45ba560ef782eb047e43997cc14629ff86cf91ac diff --cc docs/CMakeLists.txt index 468c8ed31a,c43d24b279..6eefe6ed5f --- a/docs/CMakeLists.txt +++ b/docs/CMakeLists.txt @@@ -364,16 -364,7 +364,17 @@@ if (SPHINX_FOUND how-to/visualize.rst install-guide/index.rst release-notes/index.rst + release-notes/2022/major/highlights.rst + release-notes/2022/major/features.rst + release-notes/2022/major/performance.rst + release-notes/2022/major/tools.rst + release-notes/2022/major/bugs-fixed.rst + release-notes/2022/major/removed-functionality.rst + release-notes/2022/major/deprecated-functionality.rst + release-notes/2022/major/portability.rst + release-notes/2022/major/miscellaneous.rst + release-notes/2022/major/api.rst + release-notes/2021/2021.4.rst release-notes/2021/2021.3.rst release-notes/2021/2021.2.rst release-notes/2021/2021.1.rst diff --cc python_packaging/src/gmxapi/export_system.cpp index b504ccb9e7,273ad5087a..39eeb91d78 --- a/python_packaging/src/gmxapi/export_system.cpp +++ b/python_packaging/src/gmxapi/export_system.cpp @@@ -81,18 -81,10 +82,11 @@@ void export_system(py::module& m // Export system container class py::class_> system(m, "MDSystem"); - system.def( - "launch", - [](System* system, std::shared_ptr context) { - auto work = gmxapi::getWork(*system->get()); - auto newSession = context->launch(*work); - return newSession; - }, - "Launch the configured workflow in the provided context."); + system.def("launch", &launch, "Launch the configured workflow in the provided context."); // Module-level function - m.def("from_tpr", &gmxpy::from_tpr, + m.def("from_tpr", + &gmxpy::from_tpr, "Return a system container initialized from the given input record."); } diff --cc python_packaging/src/setup.py index 27f3408f4c,b5826a1656..025aea1c68 --- a/python_packaging/src/setup.py +++ b/python_packaging/src/setup.py @@@ -87,7 -87,7 +87,7 @@@ exist in the same location (with differ used when guessing a toolchain, because setup.py does not know which corresponds to the gmxapi support library. --If specifying GMXTOOLCHAINDIR and gmxapi_DIR, the tool chain directory must be ++If specifying GMXTOOLCHAINDIR and gmxapi_DIR, the tool chain directory must be located within a subdirectory of gmxapi_DIR. Refer to project web site for complete documentation. diff --cc src/gromacs/domdec/domdec.cpp index 3324ca2fea,d75345d592..3627ea02d6 --- a/src/gromacs/domdec/domdec.cpp +++ b/src/gromacs/domdec/domdec.cpp @@@ -706,9 -721,10 +706,10 @@@ static int ddcoord2simnodeid(const t_co } else { - if (cr->dd->comm->ddRankSetup.usePmeOnlyRanks) + const DDRankSetup& rankSetup = cr->dd->comm->ddRankSetup; + if (rankSetup.rankOrder != DdRankOrder::pp_pme && rankSetup.usePmeOnlyRanks) { - nodeid = ddindex + gmx_ddcoord2pmeindex(cr, x, y, z); + nodeid = ddindex + gmx_ddcoord2pmeindex(*cr->dd, x, y, z); } else { @@@ -2970,11 -3012,11 +2974,12 @@@ DomainDecompositionBuilder::Impl::Impl( cr_->npmenodes = ddGridSetup_.numPmeOnlyRanks; - ddRankSetup_ = getDDRankSetup(mdlog_, cr_->sizeOfDefaultCommunicator, ddGridSetup_, ir_); - ddRankSetup_ = getDDRankSetup(mdlog_, cr_->sizeOfDefaultCommunicator, options_.rankOrder, - ddGridSetup_, ir_); ++ ddRankSetup_ = getDDRankSetup( ++ mdlog_, cr_->sizeOfDefaultCommunicator, options_.rankOrder, ddGridSetup_, ir_); /* Generate the group communicator, also decides the duty of each rank */ - cartSetup_ = makeGroupCommunicators(mdlog_, ddSettings_, ddRankSetup_, cr_, ddCellIndex_, &pmeRanks_); + cartSetup_ = makeGroupCommunicators( + mdlog_, ddSettings_, options_.rankOrder, ddRankSetup_, cr_, ddCellIndex_, &pmeRanks_); } gmx_domdec_t* DomainDecompositionBuilder::Impl::build(LocalAtomSetManager* atomSets) @@@ -3212,31 -3222,41 +3217,70 @@@ void communicateGpuHaloForces(const t_c } } +const gmx::LocalTopologyChecker& dd_localTopologyChecker(const gmx_domdec_t& dd) +{ + return *dd.localTopologyChecker; +} + +gmx::LocalTopologyChecker* dd_localTopologyChecker(gmx_domdec_t* dd) +{ + return dd->localTopologyChecker.get(); +} + +void dd_init_local_state(const gmx_domdec_t& dd, const t_state* state_global, t_state* state_local) +{ + std::array buf; + + if (DDMASTER(dd)) + { + buf[0] = state_global->flags; + buf[1] = state_global->ngtc; + buf[2] = state_global->nnhpres; + buf[3] = state_global->nhchainlength; + buf[4] = state_global->dfhist ? state_global->dfhist->nlambda : 0; + } + dd_bcast(&dd, buf.size() * sizeof(int), buf.data()); + + init_gtc_state(state_local, buf[1], buf[2], buf[3]); + init_dfhist_state(state_local, buf[4]); + state_local->flags = buf[0]; +} ++ + void putUpdateGroupAtomsInSamePeriodicImage(const gmx_domdec_t& dd, + const gmx_mtop_t& mtop, + const matrix box, + gmx::ArrayRef positions) + { + int atomOffset = 0; + for (const gmx_molblock_t& molblock : mtop.molblock) + { - const auto& updateGrouping = dd.comm->systemInfo.updateGroupingPerMoleculetype[molblock.type]; ++ const auto& updateGrouping = dd.comm->systemInfo.updateGroupingsPerMoleculeType[molblock.type]; + + for (int mol = 0; mol < molblock.nmol; mol++) + { + for (int g = 0; g < updateGrouping.numBlocks(); g++) + { + const auto& block = updateGrouping.block(g); + const int atomBegin = atomOffset + block.begin(); + const int atomEnd = atomOffset + block.end(); + for (int a = atomBegin + 1; a < atomEnd; a++) + { + // Make sure that atoms in the same update group + // are in the same periodic image after restarts. + for (int d = DIM - 1; d >= 0; d--) + { + while (positions[a][d] - positions[atomBegin][d] > 0.5_real * box[d][d]) + { + positions[a] -= box[d]; + } + while (positions[a][d] - positions[atomBegin][d] < -0.5_real * box[d][d]) + { + positions[a] += box[d]; + } + } + } + } + atomOffset += updateGrouping.fullRange().end(); + } + } + } diff --cc src/gromacs/gmxana/gmx_chi.cpp index 0abd978d19,d049f74b88..deed54bfdf --- a/src/gromacs/gmxana/gmx_chi.cpp +++ b/src/gromacs/gmxana/gmx_chi.cpp @@@ -1680,20 -1607,8 +1681,20 @@@ int gmx_chi(int argc, char* argv[] } mk_chi_lookup(chi_lookup, maxchi, nlist, dlist); - get_chi_product_traj(dih, nf, nlist, maxchi, dlist, time, chi_lookup, multiplicity, FALSE, - bNormHisto, core_frac, bAll, opt2fn("-cp", NFILE, fnm), oenv); + get_chi_product_traj(dih, + nf, - nactdih, ++ nlist, + maxchi, + dlist, + time, + chi_lookup, + multiplicity, + FALSE, + bNormHisto, + core_frac, + bAll, + opt2fn("-cp", NFILE, fnm), + oenv); for (i = 0; i < nlist; i++) { diff --cc src/gromacs/mdrun/runner.cpp index e3eb7ff8f2,8d19c598c2..2462285bef --- a/src/gromacs/mdrun/runner.cpp +++ b/src/gromacs/mdrun/runner.cpp @@@ -1389,9 -1232,22 +1389,22 @@@ int Mdrunner::mdrunner( ddBuilder.reset(nullptr); // Note that local state still does not exist yet. } + // Ensure that all atoms within the same update group are in the + // same periodic image. Otherwise, a simulation that did not use + // update groups (e.g. a single-rank simulation) cannot always be + // correctly restarted in a way that does use update groups + // (e.g. a multi-rank simulation). + if (isSimulationMasterRank) + { + const bool useUpdateGroups = cr->dd ? ddUsesUpdateGroups(*cr->dd) : false; + if (useUpdateGroups) + { + putUpdateGroupAtomsInSamePeriodicImage(*cr->dd, mtop, globalState->box, globalState->x); + } + } // The GPU update is decided here because we need to know whether the constraints or - // SETTLEs can span accross the domain borders (i.e. whether or not update groups are + // SETTLEs can span across the domain borders (i.e. whether or not update groups are // defined). This is only known after DD is initialized, hence decision on using GPU // update is done so late. try