Peano 4
Loading...
Searching...
No Matches
AutomatonState.cpp
Go to the documentation of this file.
1#include "AutomatonState.h"
2
3#include <sstream>
4#include <algorithm>
5
7 setLevel( __level);
8 setX( __x);
9 setH( __h);
10 setInverted( __inverted);
11 setEvenFlags( __evenFlags);
12 setAccessNumber( __accessNumber);
13}
14
16 setLevel( copy.getLevel() );
17 setX( copy.getX() );
18 setH( copy.getH() );
19 setInverted( copy.getInverted() );
20 setEvenFlags( copy.getEvenFlags() );
21 setAccessNumber( copy.getAccessNumber() );
22}
23
25 if (this == &other) {
26 return *this; // Self-assignment check
27 }
28
29 setLevel(other.getLevel());
30 setX(other.getX());
31 setH(other.getH());
32 setInverted(other.getInverted());
34 setAccessNumber(other.getAccessNumber());
35
36 return *this;
37}
38
40 std::ostringstream out;
41 out << "(";
42 out << "level=" << _level;
43 out << ",";
44 out << "x=" << getX();
45 out << ",";
46 out << "h=" << getH();
47 out << ",";
48 out << "inverted=" << _inverted;
49 out << ",";
50 out << "evenFlags=" << getEvenFlags();
51 out << ",";
52 out << "accessNumber=" << getAccessNumber();
53 out << ")";
54 return out.str();
55}
56
57
58
59
60
62 return _level;
63}
64
65
67 _level = value;
68}
69
70
72
74 for( int i=0; i<Dimensions; i++) {
75 result(i) = _x[i];
76 }
77 return result;
78 }
79
80
82
83 for( int i=0; i<Dimensions; i++) {
84 _x[i] = value(i);
85 }
86 }
87
88
89double peano4::grid::AutomatonState::getX(int index) const {
90 return _x[index];
91}
92
93
94void peano4::grid::AutomatonState::setX(int index, double value) {
95 _x[index] = value;
96}
97
98
100
102 for( int i=0; i<Dimensions; i++) {
103 result(i) = _h[i];
104 }
105 return result;
106 }
107
108
110
111 for( int i=0; i<Dimensions; i++) {
112 _h[i] = value(i);
113 }
114 }
115
116
117double peano4::grid::AutomatonState::getH(int index) const {
118 return _h[index];
119}
120
121
122void peano4::grid::AutomatonState::setH(int index, double value) {
123 _h[index] = value;
124}
125
126
128 return _inverted;
129}
130
131
133 _inverted = value;
134}
135
136
137std::bitset<Dimensions> peano4::grid::AutomatonState::getEvenFlags() const {
138
139 std::bitset<Dimensions> result;
140 for (int i=0; i<Dimensions; i++) result[i] = _evenFlags[i];
141 return result;
142}
143
144
145void peano4::grid::AutomatonState::setEvenFlags(const std::bitset<Dimensions>& value) {
146
147 for (int i=0; i<Dimensions; i++) _evenFlags[i]=value[i];
148}
149
150
152 return _evenFlags[index];
153}
154
155
157 _evenFlags[index] = value;
158}
159
160
162 _evenFlags[index] = not _evenFlags[index];
163}
164
165
167
169 for( int i=0; i<DimensionsTimesTwo; i++) {
170 result(i) = _accessNumber[i];
171 }
172 return result;
173 }
174
175
177
178 for( int i=0; i<DimensionsTimesTwo; i++) {
179 _accessNumber[i] = value(i);
180 }
181 }
182
183
185 return _accessNumber[index];
186}
187
188
190 _accessNumber[index] = value;
191}
192
193
194
195
196
197
198#ifdef Parallel
199
200#if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
202#endif
203
204
205[[clang::map_mpi_datatype]]
207 return Datatype;
208}
209
210
211[[clang::map_mpi_datatype]]
213 return Datatype;
214}
215
216
217[[clang::map_mpi_datatype]]
219 return Datatype;
220}
221
222
223[[clang::map_mpi_datatype]]
225 return Datatype;
226}
227
228
229[[clang::map_mpi_datatype]]
233
234
236 return _senderDestinationRank;
237}
238
239
241 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
242 peano4::grid::AutomatonState instances[2];
243
244 int NumberOfAttributes = 0;
245 NumberOfAttributes++;
246 NumberOfAttributes++;
247 NumberOfAttributes++;
248 NumberOfAttributes++;
249 NumberOfAttributes++;
250 NumberOfAttributes++;
251
252 MPI_Datatype* subtypes = new MPI_Datatype[NumberOfAttributes];
253 int* blocklen = new int[NumberOfAttributes];
254 MPI_Aint* disp = new MPI_Aint[NumberOfAttributes];
255
256 int counter = 0;
257 subtypes[counter] = MPI_INT;
258 blocklen[counter] = 1;
259 counter++;
260 subtypes[counter] = MPI_DOUBLE;
261 blocklen[counter] = Dimensions;
262 counter++;
263 subtypes[counter] = MPI_DOUBLE;
264 blocklen[counter] = Dimensions;
265 counter++;
266 subtypes[counter] = MPI_BYTE;
267 blocklen[counter] = 1;
268 counter++;
269 subtypes[counter] = MPI_UNSIGNED_LONG;
270 blocklen[counter] = 1;
271 counter++;
272 subtypes[counter] = MPI_INT;
273 blocklen[counter] = DimensionsTimesTwo;
274 counter++;
275
276 MPI_Aint baseFirstInstance;
277 MPI_Aint baseSecondInstance;
278 MPI_Get_address( &instances[0], &baseFirstInstance );
279 MPI_Get_address( &instances[1], &baseSecondInstance );
280
281 counter = 0;
282 MPI_Get_address( &(instances[0]._level), &disp[counter] );
283 counter++;
284 MPI_Get_address( &(instances[0]._x.data()[0]), &disp[counter] );
285 counter++;
286 MPI_Get_address( &(instances[0]._h.data()[0]), &disp[counter] );
287 counter++;
288 MPI_Get_address( &(instances[0]._inverted), &disp[counter] );
289 counter++;
290 MPI_Get_address( &(instances[0]._evenFlags), &disp[counter] );
291 counter++;
292 MPI_Get_address( &(instances[0]._accessNumber.data()[0]), &disp[counter] );
293 counter++;
294
295 MPI_Aint offset = disp[0] - baseFirstInstance;
296 MPI_Aint extent = baseSecondInstance - baseFirstInstance - offset;
297 for (int i=NumberOfAttributes-1; i>=0; i--) {
298 disp[i] = disp[i] - disp[0];
299 }
300
301 int errorCode = 0;
302 MPI_Datatype tmpType;
303 errorCode += MPI_Type_create_struct( NumberOfAttributes, blocklen, disp, subtypes, &tmpType );
304 errorCode += MPI_Type_create_resized( tmpType, offset, extent, &Datatype );
305 errorCode += MPI_Type_commit( &Datatype );
306 errorCode += MPI_Type_free( &tmpType );
307 if (errorCode) std::cerr << "error constructing MPI datatype in " << __FILE__ << ":" << __LINE__ << std::endl;
308
309 delete[] subtypes;
310 delete[] blocklen;
311 delete[] disp;
312
313 #else
314 // invoke routine once to trigger lazy initialisation
315 getForkDatatype();
316 getJoinDatatype();
317 getBoundaryExchangeDatatype();
318 getMultiscaleDataExchangeDatatype();
319 getGlobalCommunciationDatatype();
320 #endif
321}
322
323
325 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
326 MPI_Datatype type;
327 type = getForkDatatype();
328 MPI_Type_free( &type );
329 type = getJoinDatatype();
330 MPI_Type_free( &type );
331 type = getBoundaryExchangeDatatype();
332 MPI_Type_free( &type );
333 type = getMultiscaleDataExchangeDatatype();
334 MPI_Type_free( &type );
335 type = getGlobalCommunciationDatatype();
336 MPI_Type_free( &type );
337 #else
338 MPI_Datatype type = Datatype;
339 MPI_Type_free( &type );
340 #endif
341}
342
343
344void peano4::grid::AutomatonState::send(const peano4::grid::AutomatonState& buffer, int destination, int tag, MPI_Comm communicator ) {
345 MPI_Send( &buffer, 1, Datatype, destination, tag, communicator);
346}
347
348
349void peano4::grid::AutomatonState::receive(peano4::grid::AutomatonState& buffer, int source, int tag, MPI_Comm communicator ) {
350 MPI_Status status;
351 MPI_Recv( &buffer, 1, Datatype, source, tag, communicator, &status);
352 buffer._senderDestinationRank = status.MPI_SOURCE;
353}
354
355
357 const peano4::grid::AutomatonState& buffer,
358 int destination,
359 int tag,
360 std::function<void()> startCommunicationFunctor,
361 std::function<void()> waitFunctor,
362 MPI_Comm communicator
363) {
364 MPI_Request sendRequestHandle;
365 int flag = 0;
366 MPI_Isend( &buffer, 1, Datatype, destination, tag, communicator, &sendRequestHandle );
367 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
368 startCommunicationFunctor();
369 while (!flag) {
370 waitFunctor();
371 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
372 }
373}
374
375
378 int source,
379 int tag,
380 std::function<void()> startCommunicationFunctor,
381 std::function<void()> waitFunctor,
382 MPI_Comm communicator
383) {
384 MPI_Status status;
385 MPI_Request receiveRequestHandle;
386 int flag = 0;
387 MPI_Irecv( &buffer, 1, Datatype, source, tag, communicator, &receiveRequestHandle );
388 MPI_Test( &receiveRequestHandle, &flag, &status );
389 startCommunicationFunctor();
390 while (!flag) {
391 waitFunctor();
392 MPI_Test( &receiveRequestHandle, &flag, &status );
393 }
394 buffer._senderDestinationRank = status.MPI_SOURCE;
395}
396#endif
397
398#ifdef Parallel
399void peano4::grid::AutomatonState::sendAndPollDanglingMessages(const peano4::grid::AutomatonState& message, int destination, int tag, MPI_Comm communicator ) {
401 message, destination, tag,
402 [&]() {
405 },
406 [&]() {
407 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "peano4::grid::AutomatonState", "sendAndPollDanglingMessages()",destination, tag );
408 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "peano4::grid::AutomatonState", "sendAndPollDanglingMessages()", destination, tag );
410 },
411 communicator
412 );
413}
414
415
416void peano4::grid::AutomatonState::receiveAndPollDanglingMessages(peano4::grid::AutomatonState& message, int source, int tag, MPI_Comm communicator ) {
418 message, source, tag,
419 [&]() {
422 },
423 [&]() {
424 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "peano4::grid::AutomatonState", "receiveAndPollDanglingMessages()", source, tag );
425 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "peano4::grid::AutomatonState", "receiveAndPollDanglingMessages()", source, tag );
427 },
428 communicator
429 );
430}
431#endif
432
#define DimensionsTimesTwo
Definition Globals.h:17
state setH(1.0)
state setX(0.0)
state setEvenFlags(0)
state setLevel(0)
state setInverted(false)
void triggerDeadlockTimeOut(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1, const std::string &comment="")
Triggers a time out and shuts down the cluster if a timeout is violated.
Definition Rank.cpp:119
void setDeadlockWarningTimeStamp()
Memorise global timeout.
Definition Rank.cpp:188
void writeTimeOutWarning(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1)
Writes a warning if relevant.
Definition Rank.cpp:143
void setDeadlockTimeOutTimeStamp()
Definition Rank.cpp:193
static Rank & getInstance()
This operation returns the singleton instance.
Definition Rank.cpp:538
virtual void receiveDanglingMessages() override
Answer to MPI Messages.
static ServiceRepository & getInstance()
tarch::la::Vector< Dimensions, double > getX() const
void setAccessNumber(const tarch::la::Vector< DimensionsTimesTwo, int > &value)
std::bitset< Dimensions > getEvenFlags() const
static MPI_Datatype getBoundaryExchangeDatatype()
AutomatonState & operator=(const AutomatonState &other)
static void send(const peano4::grid::AutomatonState &buffer, int destination, int tag, MPI_Comm communicator)
In DaStGen (the first version), I had a non-static version of the send as well as the receive.
PragmaPop int getSenderRank() const
tarch::la::Vector< Dimensions, double > getH() const
static MPI_Datatype Datatype
Whenever we use LLVM's MPI extension (DaStGe), we rely on lazy initialisation of the datatype.
static void shutdownDatatype()
Free the underlying MPI datatype.
static MPI_Datatype getJoinDatatype()
void setH(const tarch::la::Vector< Dimensions, double > &value)
static MPI_Datatype getGlobalCommunciationDatatype()
PragmaPush static SilenceUnknownAttribute MPI_Datatype getForkDatatype()
Hands out MPI datatype if we work without the LLVM MPI extension.
tarch::la::Vector< DimensionsTimesTwo, int > getAccessNumber() const
static void sendAndPollDanglingMessages(const peano4::grid::AutomatonState &message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
static void initDatatype()
Wrapper around getDatatype() to trigger lazy evaluation if we use the lazy initialisation.
void setEvenFlags(const std::bitset< Dimensions > &value)
void setX(const tarch::la::Vector< Dimensions, double > &value)
static MPI_Datatype getMultiscaleDataExchangeDatatype()
static void receive(peano4::grid::AutomatonState &buffer, int source, int tag, MPI_Comm communicator)
static void receiveAndPollDanglingMessages(peano4::grid::AutomatonState &message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
Simple vector class.
Definition Vector.h:134