30 struct GridControlEvent;
52 void setWidth(
int index,
double value);
55 double getH(
int index)
const;
56 void setH(
int index,
double value);
66 [[clang::map_mpi_datatype]]
69 [[clang::map_mpi_datatype]]
72 [[clang::map_mpi_datatype]]
75 [[clang::map_mpi_datatype]]
78 [[clang::map_mpi_datatype]]
81 [[clang::map_mpi_datatype]]
84 [[clang::map_mpi_datatype]]
87 [[clang::map_mpi_datatype]]
90 [[clang::map_mpi_datatype]]
93 [[clang::map_mpi_datatype]]
130 static void send(
const peano4::grid::GridControlEvent& buffer,
int destination,
int tag, std::function<
void()> startCommunicationFunctor, std::function<
void()> waitFunctor, MPI_Comm communicator );
131 static void receive(
peano4::grid::GridControlEvent& buffer,
int source,
int tag, std::function<
void()> startCommunicationFunctor, std::function<
void()> waitFunctor, MPI_Comm communicator );
160 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
static Rank & getInstance()
This operation returns the singleton instance.
tarch::la::Vector< Dimensions, double > _offset
int _senderDestinationRank
static MPI_Datatype getBoundaryExchangeDatatype()
GridControlEvent(ObjectConstruction)
static void send(const peano4::grid::GridControlEvent &buffer, int destination, int tag, MPI_Comm communicator)
In DaStGen (the first version), I had a non-static version of the send as well as the receive.
static void sendAndPollDanglingMessages(const peano4::grid::GridControlEvent &message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
static MPI_Datatype getJoinDatatype()
static void freeGlobalCommunciationDatatype()
void setOffset(const tarch::la::Vector< Dimensions, double > &value)
static void initDatatype()
Wrapper around getDatatype() to trigger lazy evaluation if we use the lazy initialisation.
static void freeJoinDatatype()
static void freeForkDatatype()
static void shutdownDatatype()
Free the underlying MPI datatype.
void setH(const tarch::la::Vector< Dimensions, double > &value)
RefinementControl _refinementControl
tarch::la::Vector< Dimensions, double > _width
tarch::la::Vector< Dimensions, double > getWidth() const
peano4::grid::GridControlEvent::RefinementControl getRefinementControl() const
tarch::la::Vector< Dimensions, double > _h
static void receive(peano4::grid::GridControlEvent &buffer, int source, int tag, MPI_Comm communicator)
static MPI_Datatype Datatype
Whenever we use LLVM's MPI extension (DaStGe), we rely on lazy initialisation of the datatype.
static MPI_Datatype getForkDatatype()
Hands out MPI datatype if we work without the LLVM MPI extension.
static void freeMultiscaleDataExchangeDatatype()
static void freeBoundaryExchangeDatatype()
static MPI_Datatype getGlobalCommunciationDatatype()
void setRefinementControl(RefinementControl value)
static MPI_Datatype getMultiscaleDataExchangeDatatype()
tarch::la::Vector< Dimensions, double > getOffset() const
int getSenderRank() const
GridControlEvent(const GridControlEvent ©)=default
std::string toString() const
tarch::la::Vector< Dimensions, double > getH() const
void setWidth(const tarch::la::Vector< Dimensions, double > &value)
static void receiveAndPollDanglingMessages(peano4::grid::GridControlEvent &message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())