Peano 4
Loading...
Searching...
No Matches
GridStatistics.cpp
Go to the documentation of this file.
1#include "GridStatistics.h"
2
3#include <sstream>
4#include <algorithm>
5
6peano4::grid::GridStatistics::GridStatistics(int __numberOfLocalUnrefinedCells, int __numberOfRemoteUnrefinedCells, int __numberOfLocalRefinedCells, int __numberOfRemoteRefinedCells, int __stationarySweeps, bool __coarseningHasBeenVetoed, bool __removedEmptySubtree, tarch::la::Vector<Dimensions,double> __minH){
7 setNumberOfLocalUnrefinedCells( __numberOfLocalUnrefinedCells);
8 setNumberOfRemoteUnrefinedCells( __numberOfRemoteUnrefinedCells);
9 setNumberOfLocalRefinedCells( __numberOfLocalRefinedCells);
10 setNumberOfRemoteRefinedCells( __numberOfRemoteRefinedCells);
11 setStationarySweeps( __stationarySweeps);
12 setCoarseningHasBeenVetoed( __coarseningHasBeenVetoed);
13 setRemovedEmptySubtree( __removedEmptySubtree);
14 setMinH( __minH);
15}
16
18 std::ostringstream out;
19 out << "(";
20 out << "numberOfLocalUnrefinedCells=" << _numberOfLocalUnrefinedCells;
21 out << ",";
22 out << "numberOfRemoteUnrefinedCells=" << _numberOfRemoteUnrefinedCells;
23 out << ",";
24 out << "numberOfLocalRefinedCells=" << _numberOfLocalRefinedCells;
25 out << ",";
26 out << "numberOfRemoteRefinedCells=" << _numberOfRemoteRefinedCells;
27 out << ",";
28 out << "stationarySweeps=" << _stationarySweeps;
29 out << ",";
30 out << "coarseningHasBeenVetoed=" << _coarseningHasBeenVetoed;
31 out << ",";
32 out << "removedEmptySubtree=" << _removedEmptySubtree;
33 out << ",";
34 out << "minH=" << _minH;
35 out << ")";
36 return out.str();
37}
38
40 return _numberOfLocalUnrefinedCells;
41}
42
44 _numberOfLocalUnrefinedCells = value;
45}
46
48 return _numberOfRemoteUnrefinedCells;
49}
50
52 _numberOfRemoteUnrefinedCells = value;
53}
54
56 return _numberOfLocalRefinedCells;
57}
58
60 _numberOfLocalRefinedCells = value;
61}
62
64 return _numberOfRemoteRefinedCells;
65}
66
68 _numberOfRemoteRefinedCells = value;
69}
70
72 return _stationarySweeps;
73}
74
76 _stationarySweeps = value;
77}
78
80 return _coarseningHasBeenVetoed;
81}
82
84 _coarseningHasBeenVetoed = value;
85}
86
88 return _removedEmptySubtree;
89}
90
92 _removedEmptySubtree = value;
93}
94
98
102
104 return _minH(index);
105}
106
107void peano4::grid::GridStatistics::setMinH(int index, double value) {
108 _minH(index) = value;
109}
110
111#ifdef Parallel
112
113#if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
115#endif
116
117[[clang::map_mpi_datatype]]
119 return Datatype;
120}
121
122[[clang::map_mpi_datatype]]
124 return Datatype;
125}
126
127[[clang::map_mpi_datatype]]
129 return Datatype;
130}
131
132[[clang::map_mpi_datatype]]
134 return Datatype;
135}
136
137[[clang::map_mpi_datatype]]
141
143 return _senderDestinationRank;
144}
145
147 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
148 peano4::grid::GridStatistics instances[2];
149
150 int NumberOfAttributes = 0;
151 NumberOfAttributes++;
152 NumberOfAttributes++;
153 NumberOfAttributes++;
154 NumberOfAttributes++;
155 NumberOfAttributes++;
156 NumberOfAttributes++;
157 NumberOfAttributes++;
158 NumberOfAttributes++;
159
160 MPI_Datatype* subtypes = new MPI_Datatype[NumberOfAttributes];
161 int* blocklen = new int[NumberOfAttributes];
162 MPI_Aint* disp = new MPI_Aint[NumberOfAttributes];
163
164 int counter = 0;
165 subtypes[counter] = MPI_INT;
166 blocklen[counter] = 1;
167 counter++;
168 subtypes[counter] = MPI_INT;
169 blocklen[counter] = 1;
170 counter++;
171 subtypes[counter] = MPI_INT;
172 blocklen[counter] = 1;
173 counter++;
174 subtypes[counter] = MPI_INT;
175 blocklen[counter] = 1;
176 counter++;
177 subtypes[counter] = MPI_INT;
178 blocklen[counter] = 1;
179 counter++;
180 subtypes[counter] = MPI_BYTE;
181 blocklen[counter] = 1;
182 counter++;
183 subtypes[counter] = MPI_BYTE;
184 blocklen[counter] = 1;
185 counter++;
186 subtypes[counter] = MPI_DOUBLE;
187 blocklen[counter] = Dimensions;
188 counter++;
189
190 MPI_Aint baseFirstInstance;
191 MPI_Aint baseSecondInstance;
192 MPI_Get_address( &instances[0], &baseFirstInstance );
193 MPI_Get_address( &instances[1], &baseSecondInstance );
194
195 counter = 0;
196 MPI_Get_address( &(instances[0]._numberOfLocalUnrefinedCells), &disp[counter] );
197 counter++;
198 MPI_Get_address( &(instances[0]._numberOfRemoteUnrefinedCells), &disp[counter] );
199 counter++;
200 MPI_Get_address( &(instances[0]._numberOfLocalRefinedCells), &disp[counter] );
201 counter++;
202 MPI_Get_address( &(instances[0]._numberOfRemoteRefinedCells), &disp[counter] );
203 counter++;
204 MPI_Get_address( &(instances[0]._stationarySweeps), &disp[counter] );
205 counter++;
206 MPI_Get_address( &(instances[0]._coarseningHasBeenVetoed), &disp[counter] );
207 counter++;
208 MPI_Get_address( &(instances[0]._removedEmptySubtree), &disp[counter] );
209 counter++;
210 MPI_Get_address( &(instances[0]._minH.data()[0]), &disp[counter] );
211 counter++;
212
213 MPI_Aint offset = disp[0] - baseFirstInstance;
214 MPI_Aint extent = baseSecondInstance - baseFirstInstance - offset;
215 for (int i=NumberOfAttributes-1; i>=0; i--) {
216 disp[i] = disp[i] - disp[0];
217 }
218
219 int errorCode = 0;
220 MPI_Datatype tmpType;
221 errorCode += MPI_Type_create_struct( NumberOfAttributes, blocklen, disp, subtypes, &tmpType );
222 errorCode += MPI_Type_create_resized( tmpType, offset, extent, &Datatype );
223 errorCode += MPI_Type_commit( &Datatype );
224 errorCode += MPI_Type_free( &tmpType );
225 if (errorCode) std::cerr << "error constructing MPI datatype in " << __FILE__ << ":" << __LINE__ << std::endl;
226
227 delete[] subtypes;
228 delete[] blocklen;
229 delete[] disp;
230
231 #else
232 // invoke routine once to trigger lazy initialisation
233 getForkDatatype();
234 getJoinDatatype();
235 getBoundaryExchangeDatatype();
236 getMultiscaleDataExchangeDatatype();
237 getGlobalCommunciationDatatype();
238 #endif
239}
240
242 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
243 MPI_Datatype type;
244 type = getForkDatatype();
245 MPI_Type_free( &type );
246 type = getJoinDatatype();
247 MPI_Type_free( &type );
248 type = getBoundaryExchangeDatatype();
249 MPI_Type_free( &type );
250 type = getMultiscaleDataExchangeDatatype();
251 MPI_Type_free( &type );
252 type = getGlobalCommunciationDatatype();
253 MPI_Type_free( &type );
254 #else
255 MPI_Datatype type = Datatype;
256 MPI_Type_free( &type );
257 #endif
258}
259
260void peano4::grid::GridStatistics::send(const peano4::grid::GridStatistics& buffer, int destination, int tag, MPI_Comm communicator ) {
261 MPI_Send( &buffer, 1, Datatype, destination, tag, communicator);
262}
263
264void peano4::grid::GridStatistics::receive(peano4::grid::GridStatistics& buffer, int source, int tag, MPI_Comm communicator ) {
265 MPI_Status status;
266 MPI_Recv( &buffer, 1, Datatype, source, tag, communicator, &status);
267 buffer._senderDestinationRank = status.MPI_SOURCE;
268}
269
271 const peano4::grid::GridStatistics& buffer,
272 int destination,
273 int tag,
274 std::function<void()> startCommunicationFunctor,
275 std::function<void()> waitFunctor,
276 MPI_Comm communicator
277) {
278 MPI_Request sendRequestHandle;
279 int flag = 0;
280 MPI_Isend( &buffer, 1, Datatype, destination, tag, communicator, &sendRequestHandle );
281 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
282 startCommunicationFunctor();
283 while (!flag) {
284 waitFunctor();
285 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
286 }
287}
288
291 int source,
292 int tag,
293 std::function<void()> startCommunicationFunctor,
294 std::function<void()> waitFunctor,
295 MPI_Comm communicator
296) {
297 MPI_Status status;
298 MPI_Request receiveRequestHandle;
299 int flag = 0;
300 MPI_Irecv( &buffer, 1, Datatype, source, tag, communicator, &receiveRequestHandle );
301 MPI_Test( &receiveRequestHandle, &flag, &status );
302 startCommunicationFunctor();
303 while (!flag) {
304 waitFunctor();
305 MPI_Test( &receiveRequestHandle, &flag, &status );
306 }
307 buffer._senderDestinationRank = status.MPI_SOURCE;
308}
309#endif
310
311#ifdef Parallel
312void peano4::grid::GridStatistics::sendAndPollDanglingMessages(const peano4::grid::GridStatistics& message, int destination, int tag, MPI_Comm communicator ) {
314 message, destination, tag,
315 [&]() {
318 },
319 [&]() {
320 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "peano4::grid::GridStatistics", "sendAndPollDanglingMessages()",destination, tag );
321 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "peano4::grid::GridStatistics", "sendAndPollDanglingMessages()", destination, tag );
323 },
324 communicator
325 );
326}
327
328
329void peano4::grid::GridStatistics::receiveAndPollDanglingMessages(peano4::grid::GridStatistics& message, int source, int tag, MPI_Comm communicator ) {
331 message, source, tag,
332 [&]() {
335 },
336 [&]() {
337 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "peano4::grid::GridStatistics", "receiveAndPollDanglingMessages()", source, tag );
338 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "peano4::grid::GridStatistics", "receiveAndPollDanglingMessages()", source, tag );
340 },
341 communicator
342 );
343}
344#endif
345
void triggerDeadlockTimeOut(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1, const std::string &comment="")
Triggers a time out and shuts down the cluster if a timeout is violated.
Definition Rank.cpp:119
void setDeadlockWarningTimeStamp()
Memorise global timeout.
Definition Rank.cpp:188
void writeTimeOutWarning(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1)
Writes a warning if relevant.
Definition Rank.cpp:143
void setDeadlockTimeOutTimeStamp()
Definition Rank.cpp:193
static Rank & getInstance()
This operation returns the singleton instance.
Definition Rank.cpp:538
virtual void receiveDanglingMessages() override
Answer to MPI Messages.
static ServiceRepository & getInstance()
void setNumberOfLocalRefinedCells(int value)
static MPI_Datatype getBoundaryExchangeDatatype()
static void shutdownDatatype()
Free the underlying MPI datatype.
void setNumberOfRemoteUnrefinedCells(int value)
void setMinH(const tarch::la::Vector< Dimensions, double > &value)
static void send(const peano4::grid::GridStatistics &buffer, int destination, int tag, MPI_Comm communicator)
In DaStGen (the first version), I had a non-static version of the send as well as the receive.
void setCoarseningHasBeenVetoed(bool value)
static void sendAndPollDanglingMessages(const peano4::grid::GridStatistics &message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
static MPI_Datatype getJoinDatatype()
static MPI_Datatype Datatype
Whenever we use LLVM's MPI extension (DaStGe), we rely on lazy initialisation of the datatype.
static MPI_Datatype getMultiscaleDataExchangeDatatype()
void setNumberOfLocalUnrefinedCells(int value)
static void receiveAndPollDanglingMessages(peano4::grid::GridStatistics &message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
PragmaPush static SilenceUnknownAttribute MPI_Datatype getForkDatatype()
Hands out MPI datatype if we work without the LLVM MPI extension.
void setNumberOfRemoteRefinedCells(int value)
static void receive(peano4::grid::GridStatistics &buffer, int source, int tag, MPI_Comm communicator)
tarch::la::Vector< Dimensions, double > getMinH() const
static void initDatatype()
Wrapper around getDatatype() to trigger lazy evaluation if we use the lazy initialisation.
PragmaPop int getSenderRank() const
static MPI_Datatype getGlobalCommunciationDatatype()
void setRemovedEmptySubtree(bool value)
Simple vector class.
Definition Vector.h:134