44 bool __hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep,
45 bool __isAntecessorOfRefinedVertexInCurrentTreeSweep,
46 bool __hasBeenParentOfSubtreeVertexInPreviousTreeSweep,
47 bool __isParentOfSubtreeVertexInCurrentTreeSweep,
48 int __numberOfAdjacentRefinedLocalCells,
76 double getX(
int index)
const;
77 void setX(
int index,
double value);
90 [[clang::map_mpi_datatype]]
93 [[clang::map_mpi_datatype]]
96 [[clang::map_mpi_datatype]]
99 [[clang::map_mpi_datatype]]
102 [[clang::map_mpi_datatype]]
105 [[clang::map_mpi_datatype]]
108 [[clang::map_mpi_datatype]]
111 [[clang::map_mpi_datatype]]
114 [[clang::map_mpi_datatype]]
117 [[clang::map_mpi_datatype]]
159 std::function<
void()> startCommunicationFunctor,
160 std::function<
void()> waitFunctor,
161 MPI_Comm communicator
167 std::function<
void()> startCommunicationFunctor,
168 std::function<
void()> waitFunctor,
169 MPI_Comm communicator
197#if defined(__PACKED_ATTRIBUTES_LANGUAGE_EXTENSION__)
200#if !defined(__PACKED_ATTRIBUTES_LANGUAGE_EXTENSION__)
210#if defined(__PACKED_ATTRIBUTES_LANGUAGE_EXTENSION__)
211 [[clang::truncate_mantissa(48)]]
double _x[Dimensions];
213#if !defined(__PACKED_ATTRIBUTES_LANGUAGE_EXTENSION__)
224#if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
static Rank & getInstance()
This operation returns the singleton instance.
bool getIsParentOfSubtreeVertexInCurrentTreeSweep() const
static void freeBoundaryExchangeDatatype()
int getAdjacentRanks(int index) const
tarch::la::Vector< TwoPowerD, int > getAdjacentRanks() const
static void receiveAndPollDanglingMessages(peano4::grid::GridVertex &message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
tarch::la::Vector< TwoPowerD, int > _backupOfAdjacentRanks
void setBackupOfAdjacentRanks(int index, int value)
int getSenderRank() const
int getNumberOfAdjacentRefinedLocalCells() const
static MPI_Datatype Datatype
Whenever we use LLVM's MPI extension (DaStGe), we rely on lazy initialisation of the datatype.
static void send(const peano4::grid::GridVertex &buffer, int destination, int tag, MPI_Comm communicator)
In DaStGen (the first version), I had a non-static version of the send as well as the receive.
void setBackupOfAdjacentRanks(const tarch::la::Vector< TwoPowerD, int > &value)
void setIsAntecessorOfRefinedVertexInCurrentTreeSweep(bool value)
bool _hasBeenParentOfSubtreeVertexInPreviousTreeSweep
static MPI_Datatype getGlobalCommunciationDatatype()
bool getHasBeenAntecessorOfRefinedVertexInPreviousTreeSweep() const
GridVertex(const GridVertex ©)
static MPI_Datatype getJoinDatatype()
GridVertex & operator=(const GridVertex &other)
GridVertex(ObjectConstruction)
tarch::la::Vector< TwoPowerD, int > getBackupOfAdjacentRanks() const
static void freeMultiscaleDataExchangeDatatype()
static MPI_Datatype getBoundaryExchangeDatatype()
GridVertex(State __state, tarch::la::Vector< TwoPowerD, int > __adjacentRanks, tarch::la::Vector< TwoPowerD, int > __backupOfAdjacentRanks, bool __hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep, bool __isAntecessorOfRefinedVertexInCurrentTreeSweep, bool __hasBeenParentOfSubtreeVertexInPreviousTreeSweep, bool __isParentOfSubtreeVertexInCurrentTreeSweep, int __numberOfAdjacentRefinedLocalCells, tarch::la::Vector< Dimensions, double > __x, int __level)
static void shutdownDatatype()
Free the underlying MPI datatype.
static void freeGlobalCommunciationDatatype()
static void receive(peano4::grid::GridVertex &buffer, int source, int tag, std::function< void()> startCommunicationFunctor, std::function< void()> waitFunctor, MPI_Comm communicator)
bool _isParentOfSubtreeVertexInCurrentTreeSweep
int getBackupOfAdjacentRanks(int index) const
void setAdjacentRanks(int index, int value)
bool _hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep
bool _isAntecessorOfRefinedVertexInCurrentTreeSweep
void setNumberOfAdjacentRefinedLocalCells(int value)
static void sendAndPollDanglingMessages(const peano4::grid::GridVertex &message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
void setIsParentOfSubtreeVertexInCurrentTreeSweep(bool value)
std::string toString() const
tarch::la::Vector< TwoPowerD, int > _adjacentRanks
bool getIsAntecessorOfRefinedVertexInCurrentTreeSweep() const
int _senderDestinationRank
void setHasBeenParentOfSubtreeVertexInPreviousTreeSweep(bool value)
static void send(const peano4::grid::GridVertex &buffer, int destination, int tag, std::function< void()> startCommunicationFunctor, std::function< void()> waitFunctor, MPI_Comm communicator)
Alternative to the other send() where I trigger a non-blocking send an then invoke the functor until ...
void setHasBeenAntecessorOfRefinedVertexInPreviousTreeSweep(bool value)
void setState(State value)
static void freeJoinDatatype()
static void initDatatype()
Wrapper around getDatatype() to trigger lazy evaluation if we use the lazy initialisation.
void setAdjacentRanks(const tarch::la::Vector< TwoPowerD, int > &value)
static void freeForkDatatype()
bool getHasBeenParentOfSubtreeVertexInPreviousTreeSweep() const
peano4::grid::GridVertex::State getState() const
static void receive(peano4::grid::GridVertex &buffer, int source, int tag, MPI_Comm communicator)
static MPI_Datatype getForkDatatype()
Hands out MPI datatype if we work without the LLVM MPI extension.
int _numberOfAdjacentRefinedLocalCells
static MPI_Datatype getMultiscaleDataExchangeDatatype()