Revert "Wrap more device pointers in DeviceBuffer" (!1244)
[alexxy/gromacs.git] / src / gromacs / ewald / pme_pp_comm_gpu_impl.h
index 58e22c2e46b832fbf1f2e65d0c1b8d5f08d3776d..042891a04d3aaf044914e9cbf666c4c3254712f1 100644 (file)
@@ -44,7 +44,6 @@
 #define GMX_PME_PP_COMM_GPU_IMPL_H
 
 #include "gromacs/ewald/pme_pp_comm_gpu.h"
-#include "gromacs/gpu_utils/devicebuffer_datatype.h"
 #include "gromacs/gpu_utils/gpueventsynchronizer.cuh"
 #include "gromacs/math/vectypes.h"
 #include "gromacs/utility/gmxmpi.h"
@@ -111,7 +110,7 @@ public:
     /*! \brief
      * Return pointer to buffer used for staging PME force on GPU
      */
-    DeviceBuffer<gmx::RVec> getGpuForceStagingPtr();
+    void* getGpuForceStagingPtr();
 
     /*! \brief
      * Return pointer to event recorded when forces are ready
@@ -124,15 +123,15 @@ private:
     //! Handle for CUDA stream used for the communication operations in this class
     const DeviceStream& pmePpCommStream_;
     //! Remote location of PME coordinate data buffer
-    DeviceBuffer<gmx::RVec> remotePmeXBuffer_ = nullptr;
+    void* remotePmeXBuffer_ = nullptr;
     //! Remote location of PME force data buffer
-    DeviceBuffer<gmx::RVec> remotePmeFBuffer_ = nullptr;
+    void* remotePmeFBuffer_ = nullptr;
     //! communicator for simulation
     MPI_Comm comm_;
     //! Rank of PME task
     int pmeRank_ = -1;
     //! Buffer for staging PME force on GPU
-    DeviceBuffer<gmx::RVec> d_pmeForces_ = nullptr;
+    rvec* d_pmeForces_ = nullptr;
     //! number of atoms in PME force staging array
     int d_pmeForcesSize_ = -1;
     //! number of atoms allocated in recvbuf array