27 struct IntegerMessage;
52 [[clang::map_mpi_datatype]]
55 [[clang::map_mpi_datatype]]
58 [[clang::map_mpi_datatype]]
61 [[clang::map_mpi_datatype]]
64 [[clang::map_mpi_datatype]]
67 [[clang::map_mpi_datatype]]
70 [[clang::map_mpi_datatype]]
73 [[clang::map_mpi_datatype]]
76 [[clang::map_mpi_datatype]]
79 [[clang::map_mpi_datatype]]
116 static void send(
const tarch::mpi::IntegerMessage& buffer,
int destination,
int tag, std::function<
void()> startCommunicationFunctor, std::function<
void()> waitFunctor, MPI_Comm communicator );
117 static void receive(
tarch::mpi::IntegerMessage& buffer,
int source,
int tag, std::function<
void()> startCommunicationFunctor, std::function<
void()> waitFunctor, MPI_Comm communicator );
145 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
static Rank & getInstance()
This operation returns the singleton instance.
Have to include this header, as I need access to the SYCL_EXTERNAL keyword.
static void freeForkDatatype()
static void freeBoundaryExchangeDatatype()
static MPI_Datatype getForkDatatype()
Hands out MPI datatype if we work without the LLVM MPI extension.
int getSenderRank() const
static void shutdownDatatype()
Free the underlying MPI datatype.
static void sendAndPollDanglingMessages(const tarch::mpi::IntegerMessage &message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
static MPI_Datatype getMultiscaleDataExchangeDatatype()
static MPI_Datatype getGlobalCommunciationDatatype()
static void receive(tarch::mpi::IntegerMessage &buffer, int source, int tag, std::function< void()> startCommunicationFunctor, std::function< void()> waitFunctor, MPI_Comm communicator)
std::string toString() const
static void send(const tarch::mpi::IntegerMessage &buffer, int destination, int tag, std::function< void()> startCommunicationFunctor, std::function< void()> waitFunctor, MPI_Comm communicator)
Alternative to the other send() where I trigger a non-blocking send an then invoke the functor until ...
static void send(const tarch::mpi::IntegerMessage &buffer, int destination, int tag, MPI_Comm communicator)
In DaStGen (the first version), I had a non-static version of the send as well as the receive.
static void freeMultiscaleDataExchangeDatatype()
static void receiveAndPollDanglingMessages(tarch::mpi::IntegerMessage &message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
IntegerMessage(int __value)
static MPI_Datatype Datatype
Whenever we use LLVM's MPI extension (DaStGe), we rely on lazy initialisation of the datatype.
IntegerMessage(ObjectConstruction)
static void initDatatype()
Wrapper around getDatatype() to trigger lazy evaluation if we use the lazy initialisation.
static void receive(tarch::mpi::IntegerMessage &buffer, int source, int tag, MPI_Comm communicator)
static MPI_Datatype getJoinDatatype()
static void freeGlobalCommunciationDatatype()
static void freeJoinDatatype()
static MPI_Datatype getBoundaryExchangeDatatype()
int _senderDestinationRank
IntegerMessage(const IntegerMessage ©)=default