Line data Source code
1 : //
2 : // Buffers.hpp
3 : // Interface for globally accessible buffer factory for communication
4 : //
5 : // Data sent between MPI ranks has to be stored in a buffer for sending and receiving.
6 : // To reduce the number of times memory has to be allocated and freed, the buffer
7 : // factory interface allows buffers to be reused. This is especially relevant on
8 : // GPUs, as Cuda allocation calls are expensive. To avoid reallocating the buffers
9 : // in the case that the amount of data to be exchanged increases, when a new buffer
10 : // is created, an amount of memory greater than the requested size is allocated
11 : // for the new buffer. The factor by which memory is overallocated is determined by
12 : // a data member in Communicator, which can be set and queried at runtime. Only new
13 : // buffers are overallocated. If a buffer is requested with the same ID as a buffer
14 : // that has been previously allocated, the same buffer will be used. If the requested
15 : // size exceeds the buffer size, that buffer will be resized to have exactly
16 : // the requested size.
17 : //
18 : // Currently, the buffer factory is used for application of periodic boundary
19 : // conditions; halo cell exchange along faces, edges, and vertices; as well as
20 : // exchanging particle data between ranks.
21 : //
22 :
23 : namespace ippl {
24 : namespace mpi {
25 :
26 : template <typename MemorySpace, typename T>
27 0 : Communicator::buffer_type<MemorySpace> Communicator::getBuffer(size_type size,
28 : double overallocation) {
29 0 : auto& buffer_handler = buffer_handlers_m.get<MemorySpace>();
30 :
31 0 : return buffer_handler.getBuffer(size * sizeof(T),
32 0 : std::max(overallocation, defaultOveralloc_m));
33 : }
34 :
35 : template <typename MemorySpace>
36 : void Communicator::freeBuffer(Communicator::buffer_type<MemorySpace> buffer) {
37 : auto& buffer_handler = buffer_handlers_m.get<MemorySpace>();
38 :
39 : buffer_handler.freeBuffer(buffer);
40 : }
41 :
42 : } // namespace mpi
43 :
44 : } // namespace ippl
|