Peano
Loading...
Searching...
No Matches
GridStatistics.cpp
Go to the documentation of this file.
1#include "GridStatistics.h"
2
3#include <sstream>
4#include <algorithm>
5
6peano4::grid::GridStatistics::GridStatistics(int __numberOfLocalUnrefinedCells, int __numberOfRemoteUnrefinedCells, int __numberOfLocalRefinedCells, int __numberOfRemoteRefinedCells, int __stationarySweeps, bool __coarseningHasBeenVetoed, bool __removedEmptySubtree, tarch::la::Vector<Dimensions,double> __minH){
7 setNumberOfLocalUnrefinedCells( __numberOfLocalUnrefinedCells);
8 setNumberOfRemoteUnrefinedCells( __numberOfRemoteUnrefinedCells);
9 setNumberOfLocalRefinedCells( __numberOfLocalRefinedCells);
10 setNumberOfRemoteRefinedCells( __numberOfRemoteRefinedCells);
11 setStationarySweeps( __stationarySweeps);
12 setCoarseningHasBeenVetoed( __coarseningHasBeenVetoed);
13 setRemovedEmptySubtree( __removedEmptySubtree);
14 setMinH( __minH);
15}
16
18 std::ostringstream out;
19 out << "(";
20 out << "numberOfLocalUnrefinedCells=" << _numberOfLocalUnrefinedCells;
21 out << ",";
22 out << "numberOfRemoteUnrefinedCells=" << _numberOfRemoteUnrefinedCells;
23 out << ",";
24 out << "numberOfLocalRefinedCells=" << _numberOfLocalRefinedCells;
25 out << ",";
26 out << "numberOfRemoteRefinedCells=" << _numberOfRemoteRefinedCells;
27 out << ",";
28 out << "stationarySweeps=" << _stationarySweeps;
29 out << ",";
30 out << "coarseningHasBeenVetoed=" << _coarseningHasBeenVetoed;
31 out << ",";
32 out << "removedEmptySubtree=" << _removedEmptySubtree;
33 out << ",";
34 out << "minH=" << _minH;
35 out << ")";
36 return out.str();
37}
38
40 return _numberOfLocalUnrefinedCells;
41}
42
44 _numberOfLocalUnrefinedCells = value;
45}
46
48 return _numberOfRemoteUnrefinedCells;
49}
50
52 _numberOfRemoteUnrefinedCells = value;
53}
54
56 return _numberOfLocalRefinedCells;
57}
58
60 _numberOfLocalRefinedCells = value;
61}
62
64 return _numberOfRemoteRefinedCells;
65}
66
68 _numberOfRemoteRefinedCells = value;
69}
70
72 return _stationarySweeps;
73}
74
76 _stationarySweeps = value;
77}
78
80 return _coarseningHasBeenVetoed;
81}
82
84 _coarseningHasBeenVetoed = value;
85}
86
88 return _removedEmptySubtree;
89}
90
92 _removedEmptySubtree = value;
93}
94
98
102
104 return _minH(index);
105}
106
107void peano4::grid::GridStatistics::setMinH(int index, double value) {
108 _minH(index) = value;
109}
110
111
112
113
114
115
116#ifdef Parallel
117
118#if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
119MPI_Datatype peano4::grid::GridStatistics::Datatype = MPI_DATATYPE_NULL;
120#endif
121
122
123[[clang::map_mpi_datatype]]
125 return Datatype;
126}
127
128[[clang::map_mpi_datatype]]
130 return Datatype;
131}
132
133[[clang::map_mpi_datatype]]
135 return Datatype;
136}
137
138[[clang::map_mpi_datatype]]
140 return Datatype;
141}
142
143[[clang::map_mpi_datatype]]
147
148
149[[clang::map_mpi_datatype]]
151 if (Datatype != MPI_DATATYPE_NULL){
152 MPI_Type_free(&Datatype);
153 Datatype = MPI_DATATYPE_NULL;
154 }
155}
156
157
158[[clang::map_mpi_datatype]]
160 if (Datatype != MPI_DATATYPE_NULL){
161 MPI_Type_free(&Datatype);
162 Datatype = MPI_DATATYPE_NULL;
163 }
164}
165
166
167[[clang::map_mpi_datatype]]
169 if (Datatype != MPI_DATATYPE_NULL){
170 MPI_Type_free(&Datatype);
171 Datatype = MPI_DATATYPE_NULL;
172 }
173}
174
175
176[[clang::map_mpi_datatype]]
178 if (Datatype != MPI_DATATYPE_NULL){
179 MPI_Type_free(&Datatype);
180 Datatype = MPI_DATATYPE_NULL;
181 }
182}
183
184
185[[clang::map_mpi_datatype]]
187 if (Datatype != MPI_DATATYPE_NULL){
188 MPI_Type_free(&Datatype);
189 Datatype = MPI_DATATYPE_NULL;
190 }
191}
192
193
195 return _senderDestinationRank;
196}
197
199 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
200 peano4::grid::GridStatistics instances[2];
201
202 int NumberOfAttributes = 0;
203 NumberOfAttributes++;
204 NumberOfAttributes++;
205 NumberOfAttributes++;
206 NumberOfAttributes++;
207 NumberOfAttributes++;
208 NumberOfAttributes++;
209 NumberOfAttributes++;
210 NumberOfAttributes++;
211
212 MPI_Datatype* subtypes = new MPI_Datatype[NumberOfAttributes];
213 int* blocklen = new int[NumberOfAttributes];
214 MPI_Aint* disp = new MPI_Aint[NumberOfAttributes];
215
216 int counter = 0;
217 subtypes[counter] = MPI_INT;
218 blocklen[counter] = 1;
219 counter++;
220 subtypes[counter] = MPI_INT;
221 blocklen[counter] = 1;
222 counter++;
223 subtypes[counter] = MPI_INT;
224 blocklen[counter] = 1;
225 counter++;
226 subtypes[counter] = MPI_INT;
227 blocklen[counter] = 1;
228 counter++;
229 subtypes[counter] = MPI_INT;
230 blocklen[counter] = 1;
231 counter++;
232 subtypes[counter] = MPI_BYTE;
233 blocklen[counter] = 1;
234 counter++;
235 subtypes[counter] = MPI_BYTE;
236 blocklen[counter] = 1;
237 counter++;
238 subtypes[counter] = MPI_DOUBLE;
239 blocklen[counter] = Dimensions;
240 counter++;
241
242 MPI_Aint baseFirstInstance;
243 MPI_Aint baseSecondInstance;
244 MPI_Get_address( &instances[0], &baseFirstInstance );
245 MPI_Get_address( &instances[1], &baseSecondInstance );
246
247 counter = 0;
248 MPI_Get_address( &(instances[0]._numberOfLocalUnrefinedCells), &disp[counter] );
249 counter++;
250 MPI_Get_address( &(instances[0]._numberOfRemoteUnrefinedCells), &disp[counter] );
251 counter++;
252 MPI_Get_address( &(instances[0]._numberOfLocalRefinedCells), &disp[counter] );
253 counter++;
254 MPI_Get_address( &(instances[0]._numberOfRemoteRefinedCells), &disp[counter] );
255 counter++;
256 MPI_Get_address( &(instances[0]._stationarySweeps), &disp[counter] );
257 counter++;
258 MPI_Get_address( &(instances[0]._coarseningHasBeenVetoed), &disp[counter] );
259 counter++;
260 MPI_Get_address( &(instances[0]._removedEmptySubtree), &disp[counter] );
261 counter++;
262 MPI_Get_address( &(instances[0]._minH.data()[0]), &disp[counter] );
263 counter++;
264
265 MPI_Aint offset = disp[0] - baseFirstInstance;
266 MPI_Aint extent = baseSecondInstance - baseFirstInstance - offset;
267 for (int i=NumberOfAttributes-1; i>=0; i--) {
268 disp[i] = disp[i] - disp[0];
269 }
270
271 int errorCode = 0;
272 MPI_Datatype tmpType;
273 errorCode += MPI_Type_create_struct( NumberOfAttributes, blocklen, disp, subtypes, &tmpType );
274 errorCode += MPI_Type_create_resized( tmpType, offset, extent, &Datatype );
275 errorCode += MPI_Type_commit( &Datatype );
276 errorCode += MPI_Type_free( &tmpType );
277 if (errorCode) std::cerr << "error constructing MPI datatype in " << __FILE__ << ":" << __LINE__ << std::endl;
278
279 delete[] subtypes;
280 delete[] blocklen;
281 delete[] disp;
282
283 #else
284 // invoke routine once to trigger lazy initialisation
285 getForkDatatype();
286 getJoinDatatype();
287 getBoundaryExchangeDatatype();
288 getMultiscaleDataExchangeDatatype();
289 getGlobalCommunciationDatatype();
290 #endif
291}
292
293
295 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
296 freeForkDatatype();
297 freeJoinDatatype();
298 freeBoundaryExchangeDatatype();
299 freeMultiscaleDataExchangeDatatype();
300 freeGlobalCommunciationDatatype();
301 #else
302 MPI_Datatype type = Datatype;
303 MPI_Type_free( &type );
304 #endif
305}
306
307void peano4::grid::GridStatistics::send(const peano4::grid::GridStatistics& buffer, int destination, int tag, MPI_Comm communicator ) {
308 MPI_Send( &buffer, 1, Datatype, destination, tag, communicator);
309}
310
311void peano4::grid::GridStatistics::receive(peano4::grid::GridStatistics& buffer, int source, int tag, MPI_Comm communicator ) {
312 MPI_Status status;
313 MPI_Recv( &buffer, 1, Datatype, source, tag, communicator, &status);
314 buffer._senderDestinationRank = status.MPI_SOURCE;
315}
316
318 const peano4::grid::GridStatistics& buffer,
319 int destination,
320 int tag,
321 std::function<void()> startCommunicationFunctor,
322 std::function<void()> waitFunctor,
323 MPI_Comm communicator
324) {
325 MPI_Request sendRequestHandle;
326 int flag = 0;
327 MPI_Isend( &buffer, 1, Datatype, destination, tag, communicator, &sendRequestHandle );
328 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
329 startCommunicationFunctor();
330 while (!flag) {
331 waitFunctor();
332 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
333 }
334}
335
338 int source,
339 int tag,
340 std::function<void()> startCommunicationFunctor,
341 std::function<void()> waitFunctor,
342 MPI_Comm communicator
343) {
344 MPI_Status status;
345 MPI_Request receiveRequestHandle;
346 int flag = 0;
347 MPI_Irecv( &buffer, 1, Datatype, source, tag, communicator, &receiveRequestHandle );
348 MPI_Test( &receiveRequestHandle, &flag, &status );
349 startCommunicationFunctor();
350 while (!flag) {
351 waitFunctor();
352 MPI_Test( &receiveRequestHandle, &flag, &status );
353 }
354 buffer._senderDestinationRank = status.MPI_SOURCE;
355}
356#endif
357
358#ifdef Parallel
359void peano4::grid::GridStatistics::sendAndPollDanglingMessages(const peano4::grid::GridStatistics& message, int destination, int tag, MPI_Comm communicator ) {
361 message, destination, tag,
362 [&]() {
365 },
366 [&]() {
367 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "peano4::grid::GridStatistics", "sendAndPollDanglingMessages()",destination, tag );
368 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "peano4::grid::GridStatistics", "sendAndPollDanglingMessages()", destination, tag );
370 },
371 communicator
372 );
373}
374
375
376void peano4::grid::GridStatistics::receiveAndPollDanglingMessages(peano4::grid::GridStatistics& message, int source, int tag, MPI_Comm communicator ) {
378 message, source, tag,
379 [&]() {
382 },
383 [&]() {
384 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "peano4::grid::GridStatistics", "receiveAndPollDanglingMessages()", source, tag );
385 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "peano4::grid::GridStatistics", "receiveAndPollDanglingMessages()", source, tag );
387 },
388 communicator
389 );
390}
391#endif
392
void triggerDeadlockTimeOut(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1, const std::string &comment="")
Triggers a time out and shuts down the cluster if a timeout is violated.
Definition Rank.cpp:124
void setDeadlockWarningTimeStamp()
Memorise global timeout.
Definition Rank.cpp:193
void writeTimeOutWarning(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1)
Writes a warning if relevant.
Definition Rank.cpp:148
void setDeadlockTimeOutTimeStamp()
Definition Rank.cpp:198
static Rank & getInstance()
This operation returns the singleton instance.
Definition Rank.cpp:539
virtual void receiveDanglingMessages() override
Answer to MPI Messages.
static ServiceRepository & getInstance()
void setNumberOfLocalRefinedCells(int value)
static MPI_Datatype getBoundaryExchangeDatatype()
static void shutdownDatatype()
Free the underlying MPI datatype.
void setNumberOfRemoteUnrefinedCells(int value)
void setMinH(const tarch::la::Vector< Dimensions, double > &value)
static void send(const peano4::grid::GridStatistics &buffer, int destination, int tag, MPI_Comm communicator)
In DaStGen (the first version), I had a non-static version of the send as well as the receive.
void setCoarseningHasBeenVetoed(bool value)
static void sendAndPollDanglingMessages(const peano4::grid::GridStatistics &message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
static MPI_Datatype getJoinDatatype()
static MPI_Datatype Datatype
Whenever we use LLVM's MPI extension (DaStGe), we rely on lazy initialisation of the datatype.
static MPI_Datatype getMultiscaleDataExchangeDatatype()
void setNumberOfLocalUnrefinedCells(int value)
static void receiveAndPollDanglingMessages(peano4::grid::GridStatistics &message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
static MPI_Datatype getForkDatatype()
Hands out MPI datatype if we work without the LLVM MPI extension.
void setNumberOfRemoteRefinedCells(int value)
static void receive(peano4::grid::GridStatistics &buffer, int source, int tag, MPI_Comm communicator)
static void freeGlobalCommunciationDatatype()
tarch::la::Vector< Dimensions, double > getMinH() const
static void initDatatype()
Wrapper around getDatatype() to trigger lazy evaluation if we use the lazy initialisation.
static MPI_Datatype getGlobalCommunciationDatatype()
static void freeMultiscaleDataExchangeDatatype()
void setRemovedEmptySubtree(bool value)
Simple vector class.
Definition Vector.h:134