18 std::ostringstream out;
20 out <<
"numberOfLocalUnrefinedCells=" << _numberOfLocalUnrefinedCells;
22 out <<
"numberOfRemoteUnrefinedCells=" << _numberOfRemoteUnrefinedCells;
24 out <<
"numberOfLocalRefinedCells=" << _numberOfLocalRefinedCells;
26 out <<
"numberOfRemoteRefinedCells=" << _numberOfRemoteRefinedCells;
28 out <<
"stationarySweeps=" << _stationarySweeps;
30 out <<
"coarseningHasBeenVetoed=" << _coarseningHasBeenVetoed;
32 out <<
"removedEmptySubtree=" << _removedEmptySubtree;
34 out <<
"minH=" << _minH;
40 return _numberOfLocalUnrefinedCells;
44 _numberOfLocalUnrefinedCells = value;
48 return _numberOfRemoteUnrefinedCells;
52 _numberOfRemoteUnrefinedCells = value;
56 return _numberOfLocalRefinedCells;
60 _numberOfLocalRefinedCells = value;
64 return _numberOfRemoteRefinedCells;
68 _numberOfRemoteRefinedCells = value;
72 return _stationarySweeps;
76 _stationarySweeps = value;
80 return _coarseningHasBeenVetoed;
84 _coarseningHasBeenVetoed = value;
88 return _removedEmptySubtree;
92 _removedEmptySubtree = value;
108 _minH(index) = value;
118#if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
123[[clang::map_mpi_datatype]]
128[[clang::map_mpi_datatype]]
133[[clang::map_mpi_datatype]]
138[[clang::map_mpi_datatype]]
143[[clang::map_mpi_datatype]]
149[[clang::map_mpi_datatype]]
151 if (Datatype != MPI_DATATYPE_NULL){
152 MPI_Type_free(&Datatype);
153 Datatype = MPI_DATATYPE_NULL;
158[[clang::map_mpi_datatype]]
160 if (Datatype != MPI_DATATYPE_NULL){
161 MPI_Type_free(&Datatype);
162 Datatype = MPI_DATATYPE_NULL;
167[[clang::map_mpi_datatype]]
169 if (Datatype != MPI_DATATYPE_NULL){
170 MPI_Type_free(&Datatype);
171 Datatype = MPI_DATATYPE_NULL;
176[[clang::map_mpi_datatype]]
178 if (Datatype != MPI_DATATYPE_NULL){
179 MPI_Type_free(&Datatype);
180 Datatype = MPI_DATATYPE_NULL;
185[[clang::map_mpi_datatype]]
187 if (Datatype != MPI_DATATYPE_NULL){
188 MPI_Type_free(&Datatype);
189 Datatype = MPI_DATATYPE_NULL;
195 return _senderDestinationRank;
199 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
202 int NumberOfAttributes = 0;
203 NumberOfAttributes++;
204 NumberOfAttributes++;
205 NumberOfAttributes++;
206 NumberOfAttributes++;
207 NumberOfAttributes++;
208 NumberOfAttributes++;
209 NumberOfAttributes++;
210 NumberOfAttributes++;
212 MPI_Datatype* subtypes =
new MPI_Datatype[NumberOfAttributes];
213 int* blocklen =
new int[NumberOfAttributes];
214 MPI_Aint* disp =
new MPI_Aint[NumberOfAttributes];
217 subtypes[counter] = MPI_INT;
218 blocklen[counter] = 1;
220 subtypes[counter] = MPI_INT;
221 blocklen[counter] = 1;
223 subtypes[counter] = MPI_INT;
224 blocklen[counter] = 1;
226 subtypes[counter] = MPI_INT;
227 blocklen[counter] = 1;
229 subtypes[counter] = MPI_INT;
230 blocklen[counter] = 1;
232 subtypes[counter] = MPI_BYTE;
233 blocklen[counter] = 1;
235 subtypes[counter] = MPI_BYTE;
236 blocklen[counter] = 1;
238 subtypes[counter] = MPI_DOUBLE;
239 blocklen[counter] = Dimensions;
242 MPI_Aint baseFirstInstance;
243 MPI_Aint baseSecondInstance;
244 MPI_Get_address( &instances[0], &baseFirstInstance );
245 MPI_Get_address( &instances[1], &baseSecondInstance );
248 MPI_Get_address( &(instances[0]._numberOfLocalUnrefinedCells), &disp[counter] );
250 MPI_Get_address( &(instances[0]._numberOfRemoteUnrefinedCells), &disp[counter] );
252 MPI_Get_address( &(instances[0]._numberOfLocalRefinedCells), &disp[counter] );
254 MPI_Get_address( &(instances[0]._numberOfRemoteRefinedCells), &disp[counter] );
256 MPI_Get_address( &(instances[0]._stationarySweeps), &disp[counter] );
258 MPI_Get_address( &(instances[0]._coarseningHasBeenVetoed), &disp[counter] );
260 MPI_Get_address( &(instances[0]._removedEmptySubtree), &disp[counter] );
262 MPI_Get_address( &(instances[0]._minH.data()[0]), &disp[counter] );
265 MPI_Aint offset = disp[0] - baseFirstInstance;
266 MPI_Aint extent = baseSecondInstance - baseFirstInstance - offset;
267 for (
int i=NumberOfAttributes-1; i>=0; i--) {
268 disp[i] = disp[i] - disp[0];
272 MPI_Datatype tmpType;
273 errorCode += MPI_Type_create_struct( NumberOfAttributes, blocklen, disp, subtypes, &tmpType );
274 errorCode += MPI_Type_create_resized( tmpType, offset, extent, &Datatype );
275 errorCode += MPI_Type_commit( &Datatype );
276 errorCode += MPI_Type_free( &tmpType );
277 if (errorCode) std::cerr <<
"error constructing MPI datatype in " << __FILE__ <<
":" << __LINE__ << std::endl;
287 getBoundaryExchangeDatatype();
288 getMultiscaleDataExchangeDatatype();
289 getGlobalCommunciationDatatype();
295 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
298 freeBoundaryExchangeDatatype();
299 freeMultiscaleDataExchangeDatatype();
300 freeGlobalCommunciationDatatype();
302 MPI_Datatype type = Datatype;
303 MPI_Type_free( &type );
308 MPI_Send( &buffer, 1, Datatype, destination, tag, communicator);
313 MPI_Recv( &buffer, 1, Datatype, source, tag, communicator, &status);
321 std::function<
void()> startCommunicationFunctor,
322 std::function<
void()> waitFunctor,
323 MPI_Comm communicator
325 MPI_Request sendRequestHandle;
327 MPI_Isend( &buffer, 1, Datatype, destination, tag, communicator, &sendRequestHandle );
328 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
329 startCommunicationFunctor();
332 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
340 std::function<
void()> startCommunicationFunctor,
341 std::function<
void()> waitFunctor,
342 MPI_Comm communicator
345 MPI_Request receiveRequestHandle;
347 MPI_Irecv( &buffer, 1, Datatype, source, tag, communicator, &receiveRequestHandle );
348 MPI_Test( &receiveRequestHandle, &flag, &status );
349 startCommunicationFunctor();
352 MPI_Test( &receiveRequestHandle, &flag, &status );
361 message, destination, tag,
378 message, source, tag,
void triggerDeadlockTimeOut(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1, const std::string &comment="")
Triggers a time out and shuts down the cluster if a timeout is violated.
void setDeadlockWarningTimeStamp()
Memorise global timeout.
void writeTimeOutWarning(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1)
Writes a warning if relevant.
void setDeadlockTimeOutTimeStamp()
static Rank & getInstance()
This operation returns the singleton instance.
virtual void receiveDanglingMessages() override
Answer to MPI Messages.
static ServiceRepository & getInstance()
void setNumberOfLocalRefinedCells(int value)
static void freeJoinDatatype()
static MPI_Datatype getBoundaryExchangeDatatype()
static void shutdownDatatype()
Free the underlying MPI datatype.
int getNumberOfLocalRefinedCells() const
void setNumberOfRemoteUnrefinedCells(int value)
void setMinH(const tarch::la::Vector< Dimensions, double > &value)
int getNumberOfLocalUnrefinedCells() const
static void send(const peano4::grid::GridStatistics &buffer, int destination, int tag, MPI_Comm communicator)
In DaStGen (the first version), I had a non-static version of the send as well as the receive.
static void freeForkDatatype()
std::string toString() const
void setCoarseningHasBeenVetoed(bool value)
void setStationarySweeps(int value)
int getStationarySweeps() const
static void freeBoundaryExchangeDatatype()
static void sendAndPollDanglingMessages(const peano4::grid::GridStatistics &message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
static MPI_Datatype getJoinDatatype()
static MPI_Datatype Datatype
Whenever we use LLVM's MPI extension (DaStGe), we rely on lazy initialisation of the datatype.
static MPI_Datatype getMultiscaleDataExchangeDatatype()
bool getRemovedEmptySubtree() const
void setNumberOfLocalUnrefinedCells(int value)
static void receiveAndPollDanglingMessages(peano4::grid::GridStatistics &message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
int _senderDestinationRank
static MPI_Datatype getForkDatatype()
Hands out MPI datatype if we work without the LLVM MPI extension.
void setNumberOfRemoteRefinedCells(int value)
static void receive(peano4::grid::GridStatistics &buffer, int source, int tag, MPI_Comm communicator)
int getNumberOfRemoteRefinedCells() const
int getNumberOfRemoteUnrefinedCells() const
bool getCoarseningHasBeenVetoed() const
static void freeGlobalCommunciationDatatype()
tarch::la::Vector< Dimensions, double > getMinH() const
static void initDatatype()
Wrapper around getDatatype() to trigger lazy evaluation if we use the lazy initialisation.
int getSenderRank() const
static MPI_Datatype getGlobalCommunciationDatatype()
static void freeMultiscaleDataExchangeDatatype()
void setRemovedEmptySubtree(bool value)