7 [[maybe_unused]]
State __state,
10 [[maybe_unused]]
bool __hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep,
11 [[maybe_unused]]
bool __isAntecessorOfRefinedVertexInCurrentTreeSweep,
12 [[maybe_unused]]
bool __hasBeenParentOfSubtreeVertexInPreviousTreeSweep,
13 [[maybe_unused]]
bool __isParentOfSubtreeVertexInCurrentTreeSweep,
14 [[maybe_unused]]
int __numberOfAdjacentRefinedLocalCells,
16 [[maybe_unused]]
int __level
33 setState( copy.getState() );
34 setAdjacentRanks( copy.getAdjacentRanks() );
35 setBackupOfAdjacentRanks( copy.getBackupOfAdjacentRanks() );
36 setHasBeenAntecessorOfRefinedVertexInPreviousTreeSweep( copy.getHasBeenAntecessorOfRefinedVertexInPreviousTreeSweep() );
37 setIsAntecessorOfRefinedVertexInCurrentTreeSweep( copy.getIsAntecessorOfRefinedVertexInCurrentTreeSweep() );
38 setHasBeenParentOfSubtreeVertexInPreviousTreeSweep( copy.getHasBeenParentOfSubtreeVertexInPreviousTreeSweep() );
39 setIsParentOfSubtreeVertexInCurrentTreeSweep( copy.getIsParentOfSubtreeVertexInCurrentTreeSweep() );
40 setNumberOfAdjacentRefinedLocalCells( copy.getNumberOfAdjacentRefinedLocalCells() );
69 std::ostringstream out;
71 out <<
"state=" << (
_state==State::HangingVertex?
"HangingVertex" :
"") << (
_state==State::New?
"New" :
"") << (
_state==State::Unrefined?
"Unrefined" :
"") << (
_state==State::Refined?
"Refined" :
"") << (
_state==State::RefinementTriggered?
"RefinementTriggered" :
"") << (
_state==State::Refining?
"Refining" :
"") << (
_state==State::EraseTriggered?
"EraseTriggered" :
"") << (
_state==State::Erasing?
"Erasing" :
"") << (
_state==State::Delete?
"Delete" :
"") ;
73 out <<
"adjacentRanks=" << getAdjacentRanks();
75 out <<
"backupOfAdjacentRanks=" << _backupOfAdjacentRanks;
77 out <<
"hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep=" << _hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep;
79 out <<
"isAntecessorOfRefinedVertexInCurrentTreeSweep=" << _isAntecessorOfRefinedVertexInCurrentTreeSweep;
81 out <<
"hasBeenParentOfSubtreeVertexInPreviousTreeSweep=" << _hasBeenParentOfSubtreeVertexInPreviousTreeSweep;
83 out <<
"isParentOfSubtreeVertexInCurrentTreeSweep=" << _isParentOfSubtreeVertexInCurrentTreeSweep;
85 out <<
"numberOfAdjacentRefinedLocalCells=" << _numberOfAdjacentRefinedLocalCells;
88 out <<
"x=" << getX();
91 out <<
"level=" << _level;
114 result(i) = _adjacentRanks[i];
123 _adjacentRanks[i] = value(i);
129 return _adjacentRanks[index];
134 _adjacentRanks[index] = value;
139 return _backupOfAdjacentRanks;
144 _backupOfAdjacentRanks = value;
149 return _backupOfAdjacentRanks(index);
154 _backupOfAdjacentRanks(index) = value;
159 return _hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep;
164 _hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep = value;
169 return _isAntecessorOfRefinedVertexInCurrentTreeSweep;
174 _isAntecessorOfRefinedVertexInCurrentTreeSweep = value;
179 return _hasBeenParentOfSubtreeVertexInPreviousTreeSweep;
184 _hasBeenParentOfSubtreeVertexInPreviousTreeSweep = value;
189 return _isParentOfSubtreeVertexInCurrentTreeSweep;
194 _isParentOfSubtreeVertexInCurrentTreeSweep = value;
199 return _numberOfAdjacentRefinedLocalCells;
204 _numberOfAdjacentRefinedLocalCells = value;
212 for(
int i=0; i<Dimensions; i++) {
221 for(
int i=0; i<Dimensions; i++) {
227double peano4::grid::GridVertex::getX(
int index)
const {
232void peano4::grid::GridVertex::setX(
int index,
double value) {
256#if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
261[[clang::map_mpi_datatype]]
267[[clang::map_mpi_datatype]]
273[[clang::map_mpi_datatype]]
279[[clang::map_mpi_datatype]]
285[[clang::map_mpi_datatype]]
291[[clang::map_mpi_datatype]]
293 if (Datatype != MPI_DATATYPE_NULL){
294 MPI_Type_free(&Datatype);
295 Datatype = MPI_DATATYPE_NULL;
300[[clang::map_mpi_datatype]]
302 if (Datatype != MPI_DATATYPE_NULL){
303 MPI_Type_free(&Datatype);
304 Datatype = MPI_DATATYPE_NULL;
309[[clang::map_mpi_datatype]]
311 if (Datatype != MPI_DATATYPE_NULL){
312 MPI_Type_free(&Datatype);
313 Datatype = MPI_DATATYPE_NULL;
318[[clang::map_mpi_datatype]]
320 if (Datatype != MPI_DATATYPE_NULL){
321 MPI_Type_free(&Datatype);
322 Datatype = MPI_DATATYPE_NULL;
327[[clang::map_mpi_datatype]]
329 if (Datatype != MPI_DATATYPE_NULL){
330 MPI_Type_free(&Datatype);
331 Datatype = MPI_DATATYPE_NULL;
337 return _senderDestinationRank;
342 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
345 int NumberOfAttributes = 0;
346 NumberOfAttributes++;
347 NumberOfAttributes++;
348 NumberOfAttributes++;
349 NumberOfAttributes++;
350 NumberOfAttributes++;
351 NumberOfAttributes++;
352 NumberOfAttributes++;
353 NumberOfAttributes++;
355 NumberOfAttributes++;
357 NumberOfAttributes++;
359 MPI_Datatype* subtypes =
new MPI_Datatype[NumberOfAttributes];
360 int* blocklen =
new int[NumberOfAttributes];
361 MPI_Aint* disp =
new MPI_Aint[NumberOfAttributes];
364 subtypes[counter] = MPI_INT;
365 blocklen[counter] = 1;
367 subtypes[counter] = MPI_INT;
370 subtypes[counter] = MPI_INT;
373 subtypes[counter] = MPI_BYTE;
374 blocklen[counter] = 1;
376 subtypes[counter] = MPI_BYTE;
377 blocklen[counter] = 1;
379 subtypes[counter] = MPI_BYTE;
380 blocklen[counter] = 1;
382 subtypes[counter] = MPI_BYTE;
383 blocklen[counter] = 1;
385 subtypes[counter] = MPI_INT;
386 blocklen[counter] = 1;
389 subtypes[counter] = MPI_DOUBLE;
390 blocklen[counter] = Dimensions;
393 subtypes[counter] = MPI_INT;
394 blocklen[counter] = 1;
397 MPI_Aint baseFirstInstance;
398 MPI_Aint baseSecondInstance;
399 MPI_Get_address( &instances[0], &baseFirstInstance );
400 MPI_Get_address( &instances[1], &baseSecondInstance );
403 MPI_Get_address( &(instances[0].
_state), &disp[counter] );
405 MPI_Get_address( &(instances[0]._adjacentRanks.data()[0]), &disp[counter] );
407 MPI_Get_address( &(instances[0]._backupOfAdjacentRanks.data()[0]), &disp[counter] );
409 MPI_Get_address( &(instances[0]._hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep), &disp[counter] );
411 MPI_Get_address( &(instances[0]._isAntecessorOfRefinedVertexInCurrentTreeSweep), &disp[counter] );
413 MPI_Get_address( &(instances[0]._hasBeenParentOfSubtreeVertexInPreviousTreeSweep), &disp[counter] );
415 MPI_Get_address( &(instances[0]._isParentOfSubtreeVertexInCurrentTreeSweep), &disp[counter] );
417 MPI_Get_address( &(instances[0]._numberOfAdjacentRefinedLocalCells), &disp[counter] );
421 MPI_Get_address( &(instances[0]._x.data()[0]), &disp[counter] );
424 MPI_Get_address( &(instances[0]._level), &disp[counter] );
427 MPI_Aint offset = disp[0] - baseFirstInstance;
428 MPI_Aint extent = baseSecondInstance - baseFirstInstance - offset;
429 for (
int i=NumberOfAttributes-1; i>=0; i--) {
430 disp[i] = disp[i] - disp[0];
434 MPI_Datatype tmpType;
435 errorCode += MPI_Type_create_struct( NumberOfAttributes, blocklen, disp, subtypes, &tmpType );
436 errorCode += MPI_Type_create_resized( tmpType, offset, extent, &Datatype );
437 errorCode += MPI_Type_commit( &Datatype );
438 errorCode += MPI_Type_free( &tmpType );
439 if (errorCode) std::cerr <<
"error constructing MPI datatype in " << __FILE__ <<
":" << __LINE__ << std::endl;
449 getBoundaryExchangeDatatype();
450 getMultiscaleDataExchangeDatatype();
451 getGlobalCommunciationDatatype();
457 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
460 freeBoundaryExchangeDatatype();
461 freeMultiscaleDataExchangeDatatype();
462 freeGlobalCommunciationDatatype();
464 MPI_Datatype type = Datatype;
465 MPI_Type_free( &type );
471 MPI_Send( &buffer, 1, Datatype, destination, tag, communicator);
477 MPI_Recv( &buffer, 1, Datatype, source, tag, communicator, &status);
486 std::function<
void()> startCommunicationFunctor,
487 std::function<
void()> waitFunctor,
488 MPI_Comm communicator
490 MPI_Request sendRequestHandle;
492 MPI_Isend( &buffer, 1, Datatype, destination, tag, communicator, &sendRequestHandle );
493 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
494 startCommunicationFunctor();
497 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
506 std::function<
void()> startCommunicationFunctor,
507 std::function<
void()> waitFunctor,
508 MPI_Comm communicator
511 MPI_Request receiveRequestHandle;
513 MPI_Irecv( &buffer, 1, Datatype, source, tag, communicator, &receiveRequestHandle );
514 MPI_Test( &receiveRequestHandle, &flag, &status );
515 startCommunicationFunctor();
518 MPI_Test( &receiveRequestHandle, &flag, &status );
527 message, destination, tag,
544 message, source, tag,
void triggerDeadlockTimeOut(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1, const std::string &comment="")
Triggers a time out and shuts down the cluster if a timeout is violated.
void setDeadlockWarningTimeStamp()
Memorise global timeout.
void writeTimeOutWarning(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1)
Writes a warning if relevant.
void setDeadlockTimeOutTimeStamp()
static Rank & getInstance()
This operation returns the singleton instance.
virtual void receiveDanglingMessages() override
Answer to MPI Messages.
static ServiceRepository & getInstance()
bool getIsParentOfSubtreeVertexInCurrentTreeSweep() const
tarch::la::Vector< TwoPowerD, int > getAdjacentRanks() const
static MPI_Datatype getJoinDatatype()
int getSenderRank() const
int getNumberOfAdjacentRefinedLocalCells() const
static MPI_Datatype Datatype
Whenever we use LLVM's MPI extension (DaStGe), we rely on lazy initialisation of the datatype.
static void freeMultiscaleDataExchangeDatatype()
static void freeBoundaryExchangeDatatype()
void setBackupOfAdjacentRanks(const tarch::la::Vector< TwoPowerD, int > &value)
void setIsAntecessorOfRefinedVertexInCurrentTreeSweep(bool value)
static MPI_Datatype getForkDatatype()
Hands out MPI datatype if we work without the LLVM MPI extension.
static MPI_Datatype getBoundaryExchangeDatatype()
bool getHasBeenAntecessorOfRefinedVertexInPreviousTreeSweep() const
static void freeJoinDatatype()
static void freeForkDatatype()
static MPI_Datatype getMultiscaleDataExchangeDatatype()
static MPI_Datatype getGlobalCommunciationDatatype()
tarch::la::Vector< TwoPowerD, int > getBackupOfAdjacentRanks() const
static void receiveAndPollDanglingMessages(peano4::grid::GridVertex &message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
static void shutdownDatatype()
Free the underlying MPI datatype.
static void receive(peano4::grid::GridVertex &buffer, int source, int tag, MPI_Comm communicator)
static void sendAndPollDanglingMessages(const peano4::grid::GridVertex &message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
void setNumberOfAdjacentRefinedLocalCells(int value)
void setIsParentOfSubtreeVertexInCurrentTreeSweep(bool value)
std::string toString() const
static void send(const peano4::grid::GridVertex &buffer, int destination, int tag, MPI_Comm communicator)
In DaStGen (the first version), I had a non-static version of the send as well as the receive.
bool getIsAntecessorOfRefinedVertexInCurrentTreeSweep() const
int _senderDestinationRank
void setHasBeenParentOfSubtreeVertexInPreviousTreeSweep(bool value)
GridVertex & operator=(const GridVertex &other)
void setHasBeenAntecessorOfRefinedVertexInPreviousTreeSweep(bool value)
void setState(State value)
void setAdjacentRanks(const tarch::la::Vector< TwoPowerD, int > &value)
bool getHasBeenParentOfSubtreeVertexInPreviousTreeSweep() const
peano4::grid::GridVertex::State getState() const
static void freeGlobalCommunciationDatatype()
static void initDatatype()
Wrapper around getDatatype() to trigger lazy evaluation if we use the lazy initialisation.