Peano
Loading...
Searching...
No Matches
AutomatonState.cpp
Go to the documentation of this file.
1#include "AutomatonState.h"
2
3#include <sstream>
4#include <algorithm>
5
7 setLevel( __level);
8 setX( __x);
9 setH( __h);
10 setInverted( __inverted);
11 setEvenFlags( __evenFlags);
12 setAccessNumber( __accessNumber);
13}
14
16 setLevel( copy.getLevel() );
17 setX( copy.getX() );
18 setH( copy.getH() );
19 setInverted( copy.getInverted() );
20 setEvenFlags( copy.getEvenFlags() );
21 setAccessNumber( copy.getAccessNumber() );
22}
23
25 if (this == &other) {
26 return *this; // Self-assignment check
27 }
28
29 setLevel(other.getLevel());
30 setX(other.getX());
31 setH(other.getH());
32 setInverted(other.getInverted());
34 setAccessNumber(other.getAccessNumber());
35
36 return *this;
37}
38
40 std::ostringstream out;
41 out << "(";
42 out << "level=" << _level;
43 out << ",";
44 out << "x=" << getX();
45 out << ",";
46 out << "h=" << getH();
47 out << ",";
48 out << "inverted=" << _inverted;
49 out << ",";
50 out << "evenFlags=" << getEvenFlags();
51 out << ",";
52 out << "accessNumber=" << getAccessNumber();
53 out << ")";
54 return out.str();
55}
56
57
58
59
60
62 return _level;
63}
64
65
67 _level = value;
68}
69
70
72
74 for( int i=0; i<Dimensions; i++) {
75 result(i) = _x[i];
76 }
77 return result;
78 }
79
80
82
83 for( int i=0; i<Dimensions; i++) {
84 _x[i] = value(i);
85 }
86 }
87
88
89double peano4::grid::AutomatonState::getX(int index) const {
90 return _x[index];
91}
92
93
94void peano4::grid::AutomatonState::setX(int index, double value) {
95 _x[index] = value;
96}
97
98
100
102 for( int i=0; i<Dimensions; i++) {
103 result(i) = _h[i];
104 }
105 return result;
106 }
107
108
110
111 for( int i=0; i<Dimensions; i++) {
112 _h[i] = value(i);
113 }
114 }
115
116
117double peano4::grid::AutomatonState::getH(int index) const {
118 return _h[index];
119}
120
121
122void peano4::grid::AutomatonState::setH(int index, double value) {
123 _h[index] = value;
124}
125
126
128 return _inverted;
129}
130
131
133 _inverted = value;
134}
135
136
137std::bitset<Dimensions> peano4::grid::AutomatonState::getEvenFlags() const {
138
139 std::bitset<Dimensions> result;
140 for (int i=0; i<Dimensions; i++) result[i] = _evenFlags[i];
141 return result;
142}
143
144
145void peano4::grid::AutomatonState::setEvenFlags(const std::bitset<Dimensions>& value) {
146
147 for (int i=0; i<Dimensions; i++) _evenFlags[i]=value[i];
148}
149
150
152 return _evenFlags[index];
153}
154
155
157 _evenFlags[index] = value;
158}
159
160
162 _evenFlags[index] = not _evenFlags[index];
163}
164
165
167
169 for( int i=0; i<DimensionsTimesTwo; i++) {
170 result(i) = _accessNumber[i];
171 }
172 return result;
173 }
174
175
177
178 for( int i=0; i<DimensionsTimesTwo; i++) {
179 _accessNumber[i] = value(i);
180 }
181 }
182
183
185 return _accessNumber[index];
186}
187
188
190 _accessNumber[index] = value;
191}
192
193
194
195
196
197
198#ifdef Parallel
199
200#if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
201MPI_Datatype peano4::grid::AutomatonState::Datatype = MPI_DATATYPE_NULL;
202#endif
203
204
205[[clang::map_mpi_datatype]]
207 return Datatype;
208}
209
210
211[[clang::map_mpi_datatype]]
213 return Datatype;
214}
215
216
217[[clang::map_mpi_datatype]]
219 return Datatype;
220}
221
222
223[[clang::map_mpi_datatype]]
225 return Datatype;
226}
227
228
229[[clang::map_mpi_datatype]]
233
234
235[[clang::map_mpi_datatype]]
237 if (Datatype != MPI_DATATYPE_NULL){
238 MPI_Type_free(&Datatype);
239 Datatype = MPI_DATATYPE_NULL;
240 }
241}
242
243
244[[clang::map_mpi_datatype]]
246 if (Datatype != MPI_DATATYPE_NULL){
247 MPI_Type_free(&Datatype);
248 Datatype = MPI_DATATYPE_NULL;
249 }
250}
251
252
253[[clang::map_mpi_datatype]]
255 if (Datatype != MPI_DATATYPE_NULL){
256 MPI_Type_free(&Datatype);
257 Datatype = MPI_DATATYPE_NULL;
258 }
259}
260
261
262[[clang::map_mpi_datatype]]
264 if (Datatype != MPI_DATATYPE_NULL){
265 MPI_Type_free(&Datatype);
266 Datatype = MPI_DATATYPE_NULL;
267 }
268}
269
270
271[[clang::map_mpi_datatype]]
273 if (Datatype != MPI_DATATYPE_NULL){
274 MPI_Type_free(&Datatype);
275 Datatype = MPI_DATATYPE_NULL;
276 }
277}
278
279
281 return _senderDestinationRank;
282}
283
284
286 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
287 peano4::grid::AutomatonState instances[2];
288
289 int NumberOfAttributes = 0;
290 NumberOfAttributes++;
291 NumberOfAttributes++;
292 NumberOfAttributes++;
293 NumberOfAttributes++;
294 NumberOfAttributes++;
295 NumberOfAttributes++;
296
297 MPI_Datatype* subtypes = new MPI_Datatype[NumberOfAttributes];
298 int* blocklen = new int[NumberOfAttributes];
299 MPI_Aint* disp = new MPI_Aint[NumberOfAttributes];
300
301 int counter = 0;
302 subtypes[counter] = MPI_INT;
303 blocklen[counter] = 1;
304 counter++;
305 subtypes[counter] = MPI_DOUBLE;
306 blocklen[counter] = Dimensions;
307 counter++;
308 subtypes[counter] = MPI_DOUBLE;
309 blocklen[counter] = Dimensions;
310 counter++;
311 subtypes[counter] = MPI_BYTE;
312 blocklen[counter] = 1;
313 counter++;
314 subtypes[counter] = MPI_UNSIGNED_LONG;
315 blocklen[counter] = 1;
316 counter++;
317 subtypes[counter] = MPI_INT;
318 blocklen[counter] = DimensionsTimesTwo;
319 counter++;
320
321 MPI_Aint baseFirstInstance;
322 MPI_Aint baseSecondInstance;
323 MPI_Get_address( &instances[0], &baseFirstInstance );
324 MPI_Get_address( &instances[1], &baseSecondInstance );
325
326 counter = 0;
327 MPI_Get_address( &(instances[0]._level), &disp[counter] );
328 counter++;
329 MPI_Get_address( &(instances[0]._x.data()[0]), &disp[counter] );
330 counter++;
331 MPI_Get_address( &(instances[0]._h.data()[0]), &disp[counter] );
332 counter++;
333 MPI_Get_address( &(instances[0]._inverted), &disp[counter] );
334 counter++;
335 MPI_Get_address( &(instances[0]._evenFlags), &disp[counter] );
336 counter++;
337 MPI_Get_address( &(instances[0]._accessNumber.data()[0]), &disp[counter] );
338 counter++;
339
340 MPI_Aint offset = disp[0] - baseFirstInstance;
341 MPI_Aint extent = baseSecondInstance - baseFirstInstance - offset;
342 for (int i=NumberOfAttributes-1; i>=0; i--) {
343 disp[i] = disp[i] - disp[0];
344 }
345
346 int errorCode = 0;
347 MPI_Datatype tmpType;
348 errorCode += MPI_Type_create_struct( NumberOfAttributes, blocklen, disp, subtypes, &tmpType );
349 errorCode += MPI_Type_create_resized( tmpType, offset, extent, &Datatype );
350 errorCode += MPI_Type_commit( &Datatype );
351 errorCode += MPI_Type_free( &tmpType );
352 if (errorCode) std::cerr << "error constructing MPI datatype in " << __FILE__ << ":" << __LINE__ << std::endl;
353
354 delete[] subtypes;
355 delete[] blocklen;
356 delete[] disp;
357
358 #else
359 // invoke routine once to trigger lazy initialisation
360 getForkDatatype();
361 getJoinDatatype();
362 getBoundaryExchangeDatatype();
363 getMultiscaleDataExchangeDatatype();
364 getGlobalCommunciationDatatype();
365 #endif
366}
367
368
370 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
371 freeForkDatatype();
372 freeJoinDatatype();
373 freeBoundaryExchangeDatatype();
374 freeMultiscaleDataExchangeDatatype();
375 freeGlobalCommunciationDatatype();
376 #else
377 MPI_Datatype type = Datatype;
378 MPI_Type_free( &type );
379 #endif
380}
381
382
383void peano4::grid::AutomatonState::send(const peano4::grid::AutomatonState& buffer, int destination, int tag, MPI_Comm communicator ) {
384 MPI_Send( &buffer, 1, Datatype, destination, tag, communicator);
385}
386
387
388void peano4::grid::AutomatonState::receive(peano4::grid::AutomatonState& buffer, int source, int tag, MPI_Comm communicator ) {
389 MPI_Status status;
390 MPI_Recv( &buffer, 1, Datatype, source, tag, communicator, &status);
391 buffer._senderDestinationRank = status.MPI_SOURCE;
392}
393
394
396 const peano4::grid::AutomatonState& buffer,
397 int destination,
398 int tag,
399 std::function<void()> startCommunicationFunctor,
400 std::function<void()> waitFunctor,
401 MPI_Comm communicator
402) {
403 MPI_Request sendRequestHandle;
404 int flag = 0;
405 MPI_Isend( &buffer, 1, Datatype, destination, tag, communicator, &sendRequestHandle );
406 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
407 startCommunicationFunctor();
408 while (!flag) {
409 waitFunctor();
410 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
411 }
412}
413
414
417 int source,
418 int tag,
419 std::function<void()> startCommunicationFunctor,
420 std::function<void()> waitFunctor,
421 MPI_Comm communicator
422) {
423 MPI_Status status;
424 MPI_Request receiveRequestHandle;
425 int flag = 0;
426 MPI_Irecv( &buffer, 1, Datatype, source, tag, communicator, &receiveRequestHandle );
427 MPI_Test( &receiveRequestHandle, &flag, &status );
428 startCommunicationFunctor();
429 while (!flag) {
430 waitFunctor();
431 MPI_Test( &receiveRequestHandle, &flag, &status );
432 }
433 buffer._senderDestinationRank = status.MPI_SOURCE;
434}
435#endif
436
437#ifdef Parallel
438void peano4::grid::AutomatonState::sendAndPollDanglingMessages(const peano4::grid::AutomatonState& message, int destination, int tag, MPI_Comm communicator ) {
440 message, destination, tag,
441 [&]() {
444 },
445 [&]() {
446 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "peano4::grid::AutomatonState", "sendAndPollDanglingMessages()",destination, tag );
447 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "peano4::grid::AutomatonState", "sendAndPollDanglingMessages()", destination, tag );
449 },
450 communicator
451 );
452}
453
454
455void peano4::grid::AutomatonState::receiveAndPollDanglingMessages(peano4::grid::AutomatonState& message, int source, int tag, MPI_Comm communicator ) {
457 message, source, tag,
458 [&]() {
461 },
462 [&]() {
463 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "peano4::grid::AutomatonState", "receiveAndPollDanglingMessages()", source, tag );
464 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "peano4::grid::AutomatonState", "receiveAndPollDanglingMessages()", source, tag );
466 },
467 communicator
468 );
469}
470#endif
471
#define DimensionsTimesTwo
Definition Globals.h:17
state setH(1.0)
state setX(0.0)
state setEvenFlags(0)
state setLevel(0)
state setInverted(false)
void triggerDeadlockTimeOut(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1, const std::string &comment="")
Triggers a time out and shuts down the cluster if a timeout is violated.
Definition Rank.cpp:124
void setDeadlockWarningTimeStamp()
Memorise global timeout.
Definition Rank.cpp:193
void writeTimeOutWarning(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1)
Writes a warning if relevant.
Definition Rank.cpp:148
void setDeadlockTimeOutTimeStamp()
Definition Rank.cpp:198
static Rank & getInstance()
This operation returns the singleton instance.
Definition Rank.cpp:539
virtual void receiveDanglingMessages() override
Answer to MPI Messages.
static ServiceRepository & getInstance()
tarch::la::Vector< Dimensions, double > getX() const
void setAccessNumber(const tarch::la::Vector< DimensionsTimesTwo, int > &value)
std::bitset< Dimensions > getEvenFlags() const
static MPI_Datatype getBoundaryExchangeDatatype()
AutomatonState & operator=(const AutomatonState &other)
static void send(const peano4::grid::AutomatonState &buffer, int destination, int tag, MPI_Comm communicator)
In DaStGen (the first version), I had a non-static version of the send as well as the receive.
tarch::la::Vector< Dimensions, double > getH() const
static MPI_Datatype Datatype
Whenever we use LLVM's MPI extension (DaStGe), we rely on lazy initialisation of the datatype.
static void shutdownDatatype()
Free the underlying MPI datatype.
static MPI_Datatype getJoinDatatype()
void setH(const tarch::la::Vector< Dimensions, double > &value)
static MPI_Datatype getGlobalCommunciationDatatype()
static MPI_Datatype getForkDatatype()
Hands out MPI datatype if we work without the LLVM MPI extension.
static void freeMultiscaleDataExchangeDatatype()
static void freeGlobalCommunciationDatatype()
tarch::la::Vector< DimensionsTimesTwo, int > getAccessNumber() const
static void sendAndPollDanglingMessages(const peano4::grid::AutomatonState &message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
static void initDatatype()
Wrapper around getDatatype() to trigger lazy evaluation if we use the lazy initialisation.
void setEvenFlags(const std::bitset< Dimensions > &value)
void setX(const tarch::la::Vector< Dimensions, double > &value)
static MPI_Datatype getMultiscaleDataExchangeDatatype()
static void receive(peano4::grid::AutomatonState &buffer, int source, int tag, MPI_Comm communicator)
static void receiveAndPollDanglingMessages(peano4::grid::AutomatonState &message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
Simple vector class.
Definition Vector.h:150