Peano
Loading...
Searching...
No Matches
GridControlEvent.cpp
Go to the documentation of this file.
1#include "GridControlEvent.h"
2
3#include <sstream>
4#include <algorithm>
5
12
14 std::ostringstream out;
15 out << "(";
16 out << "refinementControl=" << (_refinementControl==RefinementControl::Refine? "Refine" : "") << (_refinementControl==RefinementControl::Erase? "Erase" : "") ;
17 out << ",";
18 out << "offset=" << _offset;
19 out << ",";
20 out << "width=" << _width;
21 out << ",";
22 out << "h=" << _h;
23 out << ")";
24 return out.str();
25}
26
30
32 _refinementControl = value;
33}
34
38
42
44 return _offset(index);
45}
46
47void peano4::grid::GridControlEvent::setOffset(int index, double value) {
48 _offset(index) = value;
49}
50
54
58
60 return _width(index);
61}
62
63void peano4::grid::GridControlEvent::setWidth(int index, double value) {
64 _width(index) = value;
65}
66
70
74
75double peano4::grid::GridControlEvent::getH(int index) const {
76 return _h(index);
77}
78
79void peano4::grid::GridControlEvent::setH(int index, double value) {
80 _h(index) = value;
81}
82
83
84
85
86
87
88#ifdef Parallel
89
90#if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
91MPI_Datatype peano4::grid::GridControlEvent::Datatype = MPI_DATATYPE_NULL;
92#endif
93
94
95[[clang::map_mpi_datatype]]
97 return Datatype;
98}
99
100[[clang::map_mpi_datatype]]
102 return Datatype;
103}
104
105[[clang::map_mpi_datatype]]
107 return Datatype;
108}
109
110[[clang::map_mpi_datatype]]
112 return Datatype;
113}
114
115[[clang::map_mpi_datatype]]
119
120
121[[clang::map_mpi_datatype]]
123 if (Datatype != MPI_DATATYPE_NULL){
124 MPI_Type_free(&Datatype);
125 Datatype = MPI_DATATYPE_NULL;
126 }
127}
128
129
130[[clang::map_mpi_datatype]]
132 if (Datatype != MPI_DATATYPE_NULL){
133 MPI_Type_free(&Datatype);
134 Datatype = MPI_DATATYPE_NULL;
135 }
136}
137
138
139[[clang::map_mpi_datatype]]
141 if (Datatype != MPI_DATATYPE_NULL){
142 MPI_Type_free(&Datatype);
143 Datatype = MPI_DATATYPE_NULL;
144 }
145}
146
147
148[[clang::map_mpi_datatype]]
150 if (Datatype != MPI_DATATYPE_NULL){
151 MPI_Type_free(&Datatype);
152 Datatype = MPI_DATATYPE_NULL;
153 }
154}
155
156
157[[clang::map_mpi_datatype]]
159 if (Datatype != MPI_DATATYPE_NULL){
160 MPI_Type_free(&Datatype);
161 Datatype = MPI_DATATYPE_NULL;
162 }
163}
164
165
167 return _senderDestinationRank;
168}
169
171 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
173
174 int NumberOfAttributes = 0;
175 NumberOfAttributes++;
176 NumberOfAttributes++;
177 NumberOfAttributes++;
178 NumberOfAttributes++;
179
180 MPI_Datatype* subtypes = new MPI_Datatype[NumberOfAttributes];
181 int* blocklen = new int[NumberOfAttributes];
182 MPI_Aint* disp = new MPI_Aint[NumberOfAttributes];
183
184 int counter = 0;
185 subtypes[counter] = MPI_INT;
186 blocklen[counter] = 1;
187 counter++;
188 subtypes[counter] = MPI_DOUBLE;
189 blocklen[counter] = Dimensions;
190 counter++;
191 subtypes[counter] = MPI_DOUBLE;
192 blocklen[counter] = Dimensions;
193 counter++;
194 subtypes[counter] = MPI_DOUBLE;
195 blocklen[counter] = Dimensions;
196 counter++;
197
198 MPI_Aint baseFirstInstance;
199 MPI_Aint baseSecondInstance;
200 MPI_Get_address( &instances[0], &baseFirstInstance );
201 MPI_Get_address( &instances[1], &baseSecondInstance );
202
203 counter = 0;
204 MPI_Get_address( &(instances[0]._refinementControl), &disp[counter] );
205 counter++;
206 MPI_Get_address( &(instances[0]._offset.data()[0]), &disp[counter] );
207 counter++;
208 MPI_Get_address( &(instances[0]._width.data()[0]), &disp[counter] );
209 counter++;
210 MPI_Get_address( &(instances[0]._h.data()[0]), &disp[counter] );
211 counter++;
212
213 MPI_Aint offset = disp[0] - baseFirstInstance;
214 MPI_Aint extent = baseSecondInstance - baseFirstInstance - offset;
215 for (int i=NumberOfAttributes-1; i>=0; i--) {
216 disp[i] = disp[i] - disp[0];
217 }
218
219 int errorCode = 0;
220 MPI_Datatype tmpType;
221 errorCode += MPI_Type_create_struct( NumberOfAttributes, blocklen, disp, subtypes, &tmpType );
222 errorCode += MPI_Type_create_resized( tmpType, offset, extent, &Datatype );
223 errorCode += MPI_Type_commit( &Datatype );
224 errorCode += MPI_Type_free( &tmpType );
225 if (errorCode) std::cerr << "error constructing MPI datatype in " << __FILE__ << ":" << __LINE__ << std::endl;
226
227 delete[] subtypes;
228 delete[] blocklen;
229 delete[] disp;
230
231 #else
232 // invoke routine once to trigger lazy initialisation
233 getForkDatatype();
234 getJoinDatatype();
235 getBoundaryExchangeDatatype();
236 getMultiscaleDataExchangeDatatype();
237 getGlobalCommunciationDatatype();
238 #endif
239}
240
241
243 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
244 freeForkDatatype();
245 freeJoinDatatype();
246 freeBoundaryExchangeDatatype();
247 freeMultiscaleDataExchangeDatatype();
248 freeGlobalCommunciationDatatype();
249 #else
250 MPI_Datatype type = Datatype;
251 MPI_Type_free( &type );
252 #endif
253}
254
255void peano4::grid::GridControlEvent::send(const peano4::grid::GridControlEvent& buffer, int destination, int tag, MPI_Comm communicator ) {
256 MPI_Send( &buffer, 1, Datatype, destination, tag, communicator);
257}
258
259void peano4::grid::GridControlEvent::receive(peano4::grid::GridControlEvent& buffer, int source, int tag, MPI_Comm communicator ) {
260 MPI_Status status;
261 MPI_Recv( &buffer, 1, Datatype, source, tag, communicator, &status);
262 buffer._senderDestinationRank = status.MPI_SOURCE;
263}
264
266 const peano4::grid::GridControlEvent& buffer,
267 int destination,
268 int tag,
269 std::function<void()> startCommunicationFunctor,
270 std::function<void()> waitFunctor,
271 MPI_Comm communicator
272) {
273 MPI_Request sendRequestHandle;
274 int flag = 0;
275 MPI_Isend( &buffer, 1, Datatype, destination, tag, communicator, &sendRequestHandle );
276 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
277 startCommunicationFunctor();
278 while (!flag) {
279 waitFunctor();
280 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
281 }
282}
283
286 int source,
287 int tag,
288 std::function<void()> startCommunicationFunctor,
289 std::function<void()> waitFunctor,
290 MPI_Comm communicator
291) {
292 MPI_Status status;
293 MPI_Request receiveRequestHandle;
294 int flag = 0;
295 MPI_Irecv( &buffer, 1, Datatype, source, tag, communicator, &receiveRequestHandle );
296 MPI_Test( &receiveRequestHandle, &flag, &status );
297 startCommunicationFunctor();
298 while (!flag) {
299 waitFunctor();
300 MPI_Test( &receiveRequestHandle, &flag, &status );
301 }
302 buffer._senderDestinationRank = status.MPI_SOURCE;
303}
304#endif
305
306#ifdef Parallel
307void peano4::grid::GridControlEvent::sendAndPollDanglingMessages(const peano4::grid::GridControlEvent& message, int destination, int tag, MPI_Comm communicator ) {
309 message, destination, tag,
310 [&]() {
313 },
314 [&]() {
315 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "peano4::grid::GridControlEvent", "sendAndPollDanglingMessages()",destination, tag );
316 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "peano4::grid::GridControlEvent", "sendAndPollDanglingMessages()", destination, tag );
318 },
319 communicator
320 );
321}
322
323
326 message, source, tag,
327 [&]() {
330 },
331 [&]() {
332 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "peano4::grid::GridControlEvent", "receiveAndPollDanglingMessages()", source, tag );
333 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "peano4::grid::GridControlEvent", "receiveAndPollDanglingMessages()", source, tag );
335 },
336 communicator
337 );
338}
339#endif
340
void triggerDeadlockTimeOut(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1, const std::string &comment="")
Triggers a time out and shuts down the cluster if a timeout is violated.
Definition Rank.cpp:124
void setDeadlockWarningTimeStamp()
Memorise global timeout.
Definition Rank.cpp:193
void writeTimeOutWarning(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1)
Writes a warning if relevant.
Definition Rank.cpp:148
void setDeadlockTimeOutTimeStamp()
Definition Rank.cpp:198
static Rank & getInstance()
This operation returns the singleton instance.
Definition Rank.cpp:539
virtual void receiveDanglingMessages() override
Answer to MPI Messages.
static ServiceRepository & getInstance()
static MPI_Datatype getBoundaryExchangeDatatype()
static void send(const peano4::grid::GridControlEvent &buffer, int destination, int tag, MPI_Comm communicator)
In DaStGen (the first version), I had a non-static version of the send as well as the receive.
static void sendAndPollDanglingMessages(const peano4::grid::GridControlEvent &message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
static MPI_Datatype getJoinDatatype()
void setOffset(const tarch::la::Vector< Dimensions, double > &value)
static void initDatatype()
Wrapper around getDatatype() to trigger lazy evaluation if we use the lazy initialisation.
static void shutdownDatatype()
Free the underlying MPI datatype.
void setH(const tarch::la::Vector< Dimensions, double > &value)
tarch::la::Vector< Dimensions, double > getWidth() const
peano4::grid::GridControlEvent::RefinementControl getRefinementControl() const
static void receive(peano4::grid::GridControlEvent &buffer, int source, int tag, MPI_Comm communicator)
static MPI_Datatype Datatype
Whenever we use LLVM's MPI extension (DaStGe), we rely on lazy initialisation of the datatype.
static MPI_Datatype getForkDatatype()
Hands out MPI datatype if we work without the LLVM MPI extension.
static MPI_Datatype getGlobalCommunciationDatatype()
void setRefinementControl(RefinementControl value)
static MPI_Datatype getMultiscaleDataExchangeDatatype()
tarch::la::Vector< Dimensions, double > getOffset() const
tarch::la::Vector< Dimensions, double > getH() const
void setWidth(const tarch::la::Vector< Dimensions, double > &value)
static void receiveAndPollDanglingMessages(peano4::grid::GridControlEvent &message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
Simple vector class.
Definition Vector.h:134