Gromacs  2024.2
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
pme_redistribute.cpp File Reference
#include "gmxpre.h"
#include "pme_redistribute.h"
#include "config.h"
#include <algorithm>
#include "gromacs/math/vec.h"
#include "gromacs/mdtypes/commrec.h"
#include "gromacs/utility/exceptions.h"
#include "gromacs/utility/fatalerror.h"
#include "gromacs/utility/gmxmpi.h"
#include "gromacs/utility/smalloc.h"
#include "pme_internal.h"
+ Include dependency graph for pme_redistribute.cpp:


This file contains function definitions for redistributing atoms over the PME domains.

Berk Hess


static void pme_calc_pidx (int start, int end, const matrix recipbox, gmx::ArrayRef< const gmx::RVec > x, PmeAtomComm *atc, int *count)
 Calculate the slab indices and store in atc, store counts in count.
static void pme_calc_pidx_wrapper (gmx::ArrayRef< const gmx::RVec > x, const matrix recipbox, PmeAtomComm *atc)
 Wrapper function for calculating slab indices, stored in atc.
static void pme_realloc_splinedata (splinedata_t *spline, const PmeAtomComm *atc)
 Reallocates all buffers in spline to fit atoms in atc.
static void pme_dd_sendrecv (PmeAtomComm gmx_unused *atc, gmx_bool gmx_unused bBackward, int gmx_unused shift, void gmx_unused *buf_s, int gmx_unused nbyte_s, void gmx_unused *buf_r, int gmx_unused nbyte_r)
 Communicates buffers between rank separated by shift slabs.
static void dd_pmeredist_pos_coeffs (gmx_pme_t *pme, const gmx_bool bX, gmx::ArrayRef< const gmx::RVec > x, gmx::ArrayRef< const real > data, PmeAtomComm *atc)
 Redistristributes data and optionally coordinates between MPI ranks.
void dd_pmeredist_f (struct gmx_pme_t *pme, PmeAtomComm *atc, gmx::ArrayRef< gmx::RVec > f, gmx_bool bAddF)
 Redistributes forces along the dimension gives by atc.
void do_redist_pos_coeffs (struct gmx_pme_t *pme, const t_commrec *cr, gmx_bool bFirst, gmx::ArrayRef< const gmx::RVec > x, gmx::ArrayRef< const real > data)
 Redistributes coefficients and when bFirst=true coordinates over MPI ranks.