Peano 4
Loading...
Searching...
No Matches
GridVertex.cpp
Go to the documentation of this file.
1#include "GridVertex.h"
2
3#include <sstream>
4#include <algorithm>
5
7 [[maybe_unused]] State __state,
8 [[maybe_unused]] tarch::la::Vector<TwoPowerD, int> __adjacentRanks,
9 [[maybe_unused]] tarch::la::Vector<TwoPowerD, int> __backupOfAdjacentRanks,
10 [[maybe_unused]] bool __hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep,
11 [[maybe_unused]] bool __isAntecessorOfRefinedVertexInCurrentTreeSweep,
12 [[maybe_unused]] bool __hasBeenParentOfSubtreeVertexInPreviousTreeSweep,
13 [[maybe_unused]] bool __isParentOfSubtreeVertexInCurrentTreeSweep,
14 [[maybe_unused]] int __numberOfAdjacentRefinedLocalCells,
15 [[maybe_unused]] tarch::la::Vector<Dimensions, double> __x,
16 [[maybe_unused]] int __level
17) {
18 setState( __state);
19 setAdjacentRanks( __adjacentRanks);
20 setBackupOfAdjacentRanks( __backupOfAdjacentRanks);
21 setHasBeenAntecessorOfRefinedVertexInPreviousTreeSweep( __hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep);
22 setIsAntecessorOfRefinedVertexInCurrentTreeSweep( __isAntecessorOfRefinedVertexInCurrentTreeSweep);
23 setHasBeenParentOfSubtreeVertexInPreviousTreeSweep( __hasBeenParentOfSubtreeVertexInPreviousTreeSweep);
24 setIsParentOfSubtreeVertexInCurrentTreeSweep( __isParentOfSubtreeVertexInCurrentTreeSweep);
25 setNumberOfAdjacentRefinedLocalCells( __numberOfAdjacentRefinedLocalCells);
26 #if PeanoDebug>0
27 setX( __x);
28 #endif
29 setLevel( __level);
30}
31
33 setState( copy.getState() );
34 setAdjacentRanks( copy.getAdjacentRanks() );
35 setBackupOfAdjacentRanks( copy.getBackupOfAdjacentRanks() );
36 setHasBeenAntecessorOfRefinedVertexInPreviousTreeSweep( copy.getHasBeenAntecessorOfRefinedVertexInPreviousTreeSweep() );
37 setIsAntecessorOfRefinedVertexInCurrentTreeSweep( copy.getIsAntecessorOfRefinedVertexInCurrentTreeSweep() );
38 setHasBeenParentOfSubtreeVertexInPreviousTreeSweep( copy.getHasBeenParentOfSubtreeVertexInPreviousTreeSweep() );
39 setIsParentOfSubtreeVertexInCurrentTreeSweep( copy.getIsParentOfSubtreeVertexInCurrentTreeSweep() );
40 setNumberOfAdjacentRefinedLocalCells( copy.getNumberOfAdjacentRefinedLocalCells() );
41#if PeanoDebug>0
42 setX( copy.getX() );
43#endif
44 setLevel( copy.getLevel() );
45}
46
48 if (this == &other) {
49 return *this; // Self-assignment check
50 }
51
52 setState(other.getState());
53 setAdjacentRanks(other.getAdjacentRanks());
54 setBackupOfAdjacentRanks(other.getBackupOfAdjacentRanks());
55 setHasBeenAntecessorOfRefinedVertexInPreviousTreeSweep(other.getHasBeenAntecessorOfRefinedVertexInPreviousTreeSweep());
56 setIsAntecessorOfRefinedVertexInCurrentTreeSweep(other.getIsAntecessorOfRefinedVertexInCurrentTreeSweep());
57 setHasBeenParentOfSubtreeVertexInPreviousTreeSweep(other.getHasBeenParentOfSubtreeVertexInPreviousTreeSweep());
58 setIsParentOfSubtreeVertexInCurrentTreeSweep(other.getIsParentOfSubtreeVertexInCurrentTreeSweep());
59 setNumberOfAdjacentRefinedLocalCells(other.getNumberOfAdjacentRefinedLocalCells());
60#if PeanoDebug>0
61 setX(other.getX());
62#endif
63 setLevel(other.getLevel());
64
65 return *this;
66}
67
69 std::ostringstream out;
70 out << "(";
71 out << "state=" << (_state==State::HangingVertex? "HangingVertex" : "") << (_state==State::New? "New" : "") << (_state==State::Unrefined? "Unrefined" : "") << (_state==State::Refined? "Refined" : "") << (_state==State::RefinementTriggered? "RefinementTriggered" : "") << (_state==State::Refining? "Refining" : "") << (_state==State::EraseTriggered? "EraseTriggered" : "") << (_state==State::Erasing? "Erasing" : "") << (_state==State::Delete? "Delete" : "") ;
72 out << ",";
73 out << "adjacentRanks=" << getAdjacentRanks();
74 out << ",";
75 out << "backupOfAdjacentRanks=" << _backupOfAdjacentRanks;
76 out << ",";
77 out << "hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep=" << _hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep;
78 out << ",";
79 out << "isAntecessorOfRefinedVertexInCurrentTreeSweep=" << _isAntecessorOfRefinedVertexInCurrentTreeSweep;
80 out << ",";
81 out << "hasBeenParentOfSubtreeVertexInPreviousTreeSweep=" << _hasBeenParentOfSubtreeVertexInPreviousTreeSweep;
82 out << ",";
83 out << "isParentOfSubtreeVertexInCurrentTreeSweep=" << _isParentOfSubtreeVertexInCurrentTreeSweep;
84 out << ",";
85 out << "numberOfAdjacentRefinedLocalCells=" << _numberOfAdjacentRefinedLocalCells;
86#if PeanoDebug>0
87 out << ",";
88 out << "x=" << getX();
89#endif
90 out << ",";
91 out << "level=" << _level;
92 out << ")";
93 return out.str();
94}
95
96
97
98
99
103
104
106 _state = value;
107}
108
109
111
113 for( int i=0; i<TwoPowerD; i++) {
114 result(i) = _adjacentRanks[i];
115 }
116 return result;
117 }
118
119
121
122 for( int i=0; i<TwoPowerD; i++) {
123 _adjacentRanks[i] = value(i);
124 }
125 }
126
127
129 return _adjacentRanks[index];
130}
131
132
134 _adjacentRanks[index] = value;
135}
136
137
141
142
144 _backupOfAdjacentRanks = value;
145}
146
147
149 return _backupOfAdjacentRanks(index);
150}
151
152
154 _backupOfAdjacentRanks(index) = value;
155}
156
157
159 return _hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep;
160}
161
162
164 _hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep = value;
165}
166
167
169 return _isAntecessorOfRefinedVertexInCurrentTreeSweep;
170}
171
172
174 _isAntecessorOfRefinedVertexInCurrentTreeSweep = value;
175}
176
177
179 return _hasBeenParentOfSubtreeVertexInPreviousTreeSweep;
180}
181
182
184 _hasBeenParentOfSubtreeVertexInPreviousTreeSweep = value;
185}
186
187
189 return _isParentOfSubtreeVertexInCurrentTreeSweep;
190}
191
192
194 _isParentOfSubtreeVertexInCurrentTreeSweep = value;
195}
196
197
199 return _numberOfAdjacentRefinedLocalCells;
200}
201
202
204 _numberOfAdjacentRefinedLocalCells = value;
205}
206
207
208#if PeanoDebug>0
209tarch::la::Vector<Dimensions,double> peano4::grid::GridVertex::getX() const {
210
212 for( int i=0; i<Dimensions; i++) {
213 result(i) = _x[i];
214 }
215 return result;
216 }
217
218
219void peano4::grid::GridVertex::setX(const tarch::la::Vector<Dimensions,double>& value) {
220
221 for( int i=0; i<Dimensions; i++) {
222 _x[i] = value(i);
223 }
224 }
225
226
227double peano4::grid::GridVertex::getX(int index) const {
228 return _x[index];
229}
230
231
232void peano4::grid::GridVertex::setX(int index, double value) {
233 _x[index] = value;
234}
235
236
237#endif
238
239
241 return _level;
242}
243
244
246 _level = value;
247}
248
249
250
251
252
253
254#ifdef Parallel
255
256#if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
258#endif
259
260
261[[clang::map_mpi_datatype]]
263 return Datatype;
264}
265
266
267[[clang::map_mpi_datatype]]
269 return Datatype;
270}
271
272
273[[clang::map_mpi_datatype]]
275 return Datatype;
276}
277
278
279[[clang::map_mpi_datatype]]
281 return Datatype;
282}
283
284
285[[clang::map_mpi_datatype]]
287 return Datatype;
288}
289
290
292 return _senderDestinationRank;
293}
294
295
297 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
298 peano4::grid::GridVertex instances[2];
299
300 int NumberOfAttributes = 0;
301 NumberOfAttributes++;
302 NumberOfAttributes++;
303 NumberOfAttributes++;
304 NumberOfAttributes++;
305 NumberOfAttributes++;
306 NumberOfAttributes++;
307 NumberOfAttributes++;
308 NumberOfAttributes++;
309#if PeanoDebug>0
310 NumberOfAttributes++;
311#endif
312 NumberOfAttributes++;
313
314 MPI_Datatype* subtypes = new MPI_Datatype[NumberOfAttributes];
315 int* blocklen = new int[NumberOfAttributes];
316 MPI_Aint* disp = new MPI_Aint[NumberOfAttributes];
317
318 int counter = 0;
319 subtypes[counter] = MPI_INT;
320 blocklen[counter] = 1;
321 counter++;
322 subtypes[counter] = MPI_INT;
323 blocklen[counter] = TwoPowerD;
324 counter++;
325 subtypes[counter] = MPI_INT;
326 blocklen[counter] = TwoPowerD;
327 counter++;
328 subtypes[counter] = MPI_BYTE;
329 blocklen[counter] = 1;
330 counter++;
331 subtypes[counter] = MPI_BYTE;
332 blocklen[counter] = 1;
333 counter++;
334 subtypes[counter] = MPI_BYTE;
335 blocklen[counter] = 1;
336 counter++;
337 subtypes[counter] = MPI_BYTE;
338 blocklen[counter] = 1;
339 counter++;
340 subtypes[counter] = MPI_INT;
341 blocklen[counter] = 1;
342 counter++;
343#if PeanoDebug>0
344 subtypes[counter] = MPI_DOUBLE;
345 blocklen[counter] = Dimensions;
346 counter++;
347#endif
348 subtypes[counter] = MPI_INT;
349 blocklen[counter] = 1;
350 counter++;
351
352 MPI_Aint baseFirstInstance;
353 MPI_Aint baseSecondInstance;
354 MPI_Get_address( &instances[0], &baseFirstInstance );
355 MPI_Get_address( &instances[1], &baseSecondInstance );
356
357 counter = 0;
358 MPI_Get_address( &(instances[0]._state), &disp[counter] );
359 counter++;
360 MPI_Get_address( &(instances[0]._adjacentRanks.data()[0]), &disp[counter] );
361 counter++;
362 MPI_Get_address( &(instances[0]._backupOfAdjacentRanks.data()[0]), &disp[counter] );
363 counter++;
364 MPI_Get_address( &(instances[0]._hasBeenAntecessorOfRefinedVertexInPreviousTreeSweep), &disp[counter] );
365 counter++;
366 MPI_Get_address( &(instances[0]._isAntecessorOfRefinedVertexInCurrentTreeSweep), &disp[counter] );
367 counter++;
368 MPI_Get_address( &(instances[0]._hasBeenParentOfSubtreeVertexInPreviousTreeSweep), &disp[counter] );
369 counter++;
370 MPI_Get_address( &(instances[0]._isParentOfSubtreeVertexInCurrentTreeSweep), &disp[counter] );
371 counter++;
372 MPI_Get_address( &(instances[0]._numberOfAdjacentRefinedLocalCells), &disp[counter] );
373 counter++;
374
375#if PeanoDebug>0
376 MPI_Get_address( &(instances[0]._x.data()[0]), &disp[counter] );
377 counter++;
378#endif
379 MPI_Get_address( &(instances[0]._level), &disp[counter] );
380 counter++;
381
382 MPI_Aint offset = disp[0] - baseFirstInstance;
383 MPI_Aint extent = baseSecondInstance - baseFirstInstance - offset;
384 for (int i=NumberOfAttributes-1; i>=0; i--) {
385 disp[i] = disp[i] - disp[0];
386 }
387
388 int errorCode = 0;
389 MPI_Datatype tmpType;
390 errorCode += MPI_Type_create_struct( NumberOfAttributes, blocklen, disp, subtypes, &tmpType );
391 errorCode += MPI_Type_create_resized( tmpType, offset, extent, &Datatype );
392 errorCode += MPI_Type_commit( &Datatype );
393 errorCode += MPI_Type_free( &tmpType );
394 if (errorCode) std::cerr << "error constructing MPI datatype in " << __FILE__ << ":" << __LINE__ << std::endl;
395
396 delete[] subtypes;
397 delete[] blocklen;
398 delete[] disp;
399
400 #else
401 // invoke routine once to trigger lazy initialisation
402 getForkDatatype();
403 getJoinDatatype();
404 getBoundaryExchangeDatatype();
405 getMultiscaleDataExchangeDatatype();
406 getGlobalCommunciationDatatype();
407 #endif
408}
409
410
412 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
413 MPI_Datatype type;
414 type = getForkDatatype();
415 MPI_Type_free( &type );
416 type = getJoinDatatype();
417 MPI_Type_free( &type );
418 type = getBoundaryExchangeDatatype();
419 MPI_Type_free( &type );
420 type = getMultiscaleDataExchangeDatatype();
421 MPI_Type_free( &type );
422 type = getGlobalCommunciationDatatype();
423 MPI_Type_free( &type );
424 #else
425 MPI_Datatype type = Datatype;
426 MPI_Type_free( &type );
427 #endif
428}
429
430
431void peano4::grid::GridVertex::send(const peano4::grid::GridVertex& buffer, int destination, int tag, MPI_Comm communicator ) {
432 MPI_Send( &buffer, 1, Datatype, destination, tag, communicator);
433}
434
435
436void peano4::grid::GridVertex::receive(peano4::grid::GridVertex& buffer, int source, int tag, MPI_Comm communicator ) {
437 MPI_Status status;
438 MPI_Recv( &buffer, 1, Datatype, source, tag, communicator, &status);
439 buffer._senderDestinationRank = status.MPI_SOURCE;
440}
441
442
444 const peano4::grid::GridVertex& buffer,
445 int destination,
446 int tag,
447 std::function<void()> startCommunicationFunctor,
448 std::function<void()> waitFunctor,
449 MPI_Comm communicator
450) {
451 MPI_Request sendRequestHandle;
452 int flag = 0;
453 MPI_Isend( &buffer, 1, Datatype, destination, tag, communicator, &sendRequestHandle );
454 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
455 startCommunicationFunctor();
456 while (!flag) {
457 waitFunctor();
458 MPI_Test( &sendRequestHandle, &flag, MPI_STATUS_IGNORE );
459 }
460}
461
462
465 int source,
466 int tag,
467 std::function<void()> startCommunicationFunctor,
468 std::function<void()> waitFunctor,
469 MPI_Comm communicator
470) {
471 MPI_Status status;
472 MPI_Request receiveRequestHandle;
473 int flag = 0;
474 MPI_Irecv( &buffer, 1, Datatype, source, tag, communicator, &receiveRequestHandle );
475 MPI_Test( &receiveRequestHandle, &flag, &status );
476 startCommunicationFunctor();
477 while (!flag) {
478 waitFunctor();
479 MPI_Test( &receiveRequestHandle, &flag, &status );
480 }
481 buffer._senderDestinationRank = status.MPI_SOURCE;
482}
483#endif
484
485#ifdef Parallel
486void peano4::grid::GridVertex::sendAndPollDanglingMessages(const peano4::grid::GridVertex& message, int destination, int tag, MPI_Comm communicator ) {
488 message, destination, tag,
489 [&]() {
492 },
493 [&]() {
494 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "peano4::grid::GridVertex", "sendAndPollDanglingMessages()",destination, tag );
495 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "peano4::grid::GridVertex", "sendAndPollDanglingMessages()", destination, tag );
497 },
498 communicator
499 );
500}
501
502
503void peano4::grid::GridVertex::receiveAndPollDanglingMessages(peano4::grid::GridVertex& message, int source, int tag, MPI_Comm communicator ) {
505 message, source, tag,
506 [&]() {
509 },
510 [&]() {
511 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "peano4::grid::GridVertex", "receiveAndPollDanglingMessages()", source, tag );
512 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "peano4::grid::GridVertex", "receiveAndPollDanglingMessages()", source, tag );
514 },
515 communicator
516 );
517}
518#endif
519
#define TwoPowerD
Definition Globals.h:19
state setX(0.0)
state setLevel(0)
void triggerDeadlockTimeOut(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1, const std::string &comment="")
Triggers a time out and shuts down the cluster if a timeout is violated.
Definition Rank.cpp:119
void setDeadlockWarningTimeStamp()
Memorise global timeout.
Definition Rank.cpp:188
void writeTimeOutWarning(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1)
Writes a warning if relevant.
Definition Rank.cpp:143
void setDeadlockTimeOutTimeStamp()
Definition Rank.cpp:193
static Rank & getInstance()
This operation returns the singleton instance.
Definition Rank.cpp:538
virtual void receiveDanglingMessages() override
Answer to MPI Messages.
static ServiceRepository & getInstance()
index
Definition makeIC.py:38
bool getIsParentOfSubtreeVertexInCurrentTreeSweep() const
tarch::la::Vector< TwoPowerD, int > getAdjacentRanks() const
static MPI_Datatype getJoinDatatype()
PragmaPop int getSenderRank() const
int getNumberOfAdjacentRefinedLocalCells() const
static MPI_Datatype Datatype
Whenever we use LLVM's MPI extension (DaStGe), we rely on lazy initialisation of the datatype.
Definition GridVertex.h:216
void setBackupOfAdjacentRanks(const tarch::la::Vector< TwoPowerD, int > &value)
void setIsAntecessorOfRefinedVertexInCurrentTreeSweep(bool value)
PragmaPush static SilenceUnknownAttribute MPI_Datatype getForkDatatype()
Hands out MPI datatype if we work without the LLVM MPI extension.
static MPI_Datatype getBoundaryExchangeDatatype()
bool getHasBeenAntecessorOfRefinedVertexInPreviousTreeSweep() const
static MPI_Datatype getMultiscaleDataExchangeDatatype()
static MPI_Datatype getGlobalCommunciationDatatype()
tarch::la::Vector< TwoPowerD, int > getBackupOfAdjacentRanks() const
static void receiveAndPollDanglingMessages(peano4::grid::GridVertex &message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
static void shutdownDatatype()
Free the underlying MPI datatype.
void setLevel(int value)
static void receive(peano4::grid::GridVertex &buffer, int source, int tag, MPI_Comm communicator)
static void sendAndPollDanglingMessages(const peano4::grid::GridVertex &message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
void setNumberOfAdjacentRefinedLocalCells(int value)
void setIsParentOfSubtreeVertexInCurrentTreeSweep(bool value)
std::string toString() const
static void send(const peano4::grid::GridVertex &buffer, int destination, int tag, MPI_Comm communicator)
In DaStGen (the first version), I had a non-static version of the send as well as the receive.
bool getIsAntecessorOfRefinedVertexInCurrentTreeSweep() const
void setHasBeenParentOfSubtreeVertexInPreviousTreeSweep(bool value)
GridVertex & operator=(const GridVertex &other)
void setHasBeenAntecessorOfRefinedVertexInPreviousTreeSweep(bool value)
void setState(State value)
void setAdjacentRanks(const tarch::la::Vector< TwoPowerD, int > &value)
bool getHasBeenParentOfSubtreeVertexInPreviousTreeSweep() const
peano4::grid::GridVertex::State getState() const
static void initDatatype()
Wrapper around getDatatype() to trigger lazy evaluation if we use the lazy initialisation.
Simple vector class.
Definition Vector.h:134