Peano
Loading...
Searching...
No Matches
CellMarker.cpp
Go to the documentation of this file.
1#include "CellMarker.h"
2
6
7std::ostream& operator<<(std::ostream& out, const peano4::datamanagement::CellMarker& marker) {
8 out << marker.toString();
9 return out;
10}
11
13 _centre(event.getX()),
14 _h(event.getH()),
15 _hasBeenRefined(event.getHasBeenRefined() != 0),
16 _willBeRefined(event.getWillBeRefined() != 0),
17 _isLocal(event.getIsCellLocal()),
18 _isParentLocal(event.getIsParentCellLocal()),
19 _isOneVertexHanging(false),
20 _isOneVertexCreatedOrDestroyed(false),
21 _areAllVerticesInsideDomain(event.getIsVertexAdjacentToParallelDomainBoundary().none()),
22 _invokingSpacetreeIsNotInvolvedInAnyDynamicLoadBalancing(
23 event.getInvokingSpacetreeIsNotInvolvedInAnyDynamicLoadBalancing()
24 ) {
25 for (int i = 0; i < TwoPowerD; i++) {
27 _isOneVertexHanging |= event.getVertexDataFrom(i)
29
30 _isOneVertexCreatedOrDestroyed |= event.getVertexDataTo(i)
32 _isOneVertexCreatedOrDestroyed |= event.getVertexDataFrom(i)
34 }
35 _relativePositionOfCellWithinFatherCell = event.getRelativePositionToFather();
36
38 and not _isOneVertexHanging;
39
40 _willBeEnclave = couldBeEnclave and not _willBeRefined and not _isOneVertexCreatedOrDestroyed;
41 _hasBeenEnclave = couldBeEnclave and not _hasBeenRefined and not _isOneVertexCreatedOrDestroyed;
42}
43
47
48#if PeanoDebug > 0
49void peano4::datamanagement::CellMarker::setRelativePositionWithinFatherCell(int axis, int value) {
50 _relativePositionOfCellWithinFatherCell(axis) = value;
51}
52#endif
53
55 const {
56 return isContained(
57 x,
58 _centre,
59 _h,
60 tolerance
61 );
62}
63
64
69 double tolerance
70) {
71 bool result = true;
72 for (int d = 0; d < Dimensions; d++) {
73 result &= tarch::la::greaterEquals(x(d), cellCentre(d) - h(d) / 2.0, tolerance);
74 result &= tarch::la::smallerEquals(x(d), cellCentre(d) + h(d) / 2.0, tolerance);
75 }
76 return result;
77}
78
79
82) const {
83 bool overlaps = true;
84
85 for (int d = 0; d < Dimensions; d++) {
86 overlaps &= tarch::la::smallerEquals(_centre(d) - _h(d) / 2.0, offset(d) + size(d));
87 overlaps &= tarch::la::greaterEquals(_centre(d) + _h(d) / 2.0, offset(d));
88 }
89
90 return overlaps;
91}
92
93bool peano4::datamanagement::CellMarker::willBeRefined() const { return _willBeRefined; }
94
95bool peano4::datamanagement::CellMarker::hasBeenRefined() const { return _hasBeenRefined; }
96
97bool peano4::datamanagement::CellMarker::isLocal() const { return _isLocal; }
98
99bool peano4::datamanagement::CellMarker::isParentLocal() const { return _isParentLocal; }
100
102
105 for (int d=0; d<Dimensions; d++) {
106 assertion1( _relativePositionOfCellWithinFatherCell(d)>=0, toString() );
107 assertion1( _relativePositionOfCellWithinFatherCell(d)<=2, toString() );
108 if (_relativePositionOfCellWithinFatherCell(d)==0) {
109 result(d) += _h(d);
110 }
111 if (_relativePositionOfCellWithinFatherCell(d)==2) {
112 result(d) -= _h(d);
113 }
114 }
115 return result;
116}
117
119
123
125 std::ostringstream msg;
126 msg
127 << "(centre=" << _centre << ",h=" << _h << ",has-been-refined=" << _hasBeenRefined << ",will-be-refined="
128 << _willBeRefined << ",is-local=" << _isLocal << ",one-vertex-hanging=" << _isOneVertexHanging
129 << ",one-vertex-destroyed/created=" << _isOneVertexCreatedOrDestroyed << ",all-vertices-inside-domain="
130 << _areAllVerticesInsideDomain << ",no-lb=" << _invokingSpacetreeIsNotInvolvedInAnyDynamicLoadBalancing
131 << ",rel-pos=" << _relativePositionOfCellWithinFatherCell << ",has-been-enclave=" << _hasBeenEnclave
132 << ",will-be-enclave=" << _willBeEnclave << ")";
133 return msg.str();
134}
135
136bool peano4::datamanagement::CellMarker::willBeEnclaveCell() const { return _willBeEnclave; }
137
138bool peano4::datamanagement::CellMarker::willBeSkeletonCell() const { return not _willBeEnclave; }
139
140bool peano4::datamanagement::CellMarker::hasBeenEnclaveCell() const { return _hasBeenEnclave; }
141
142bool peano4::datamanagement::CellMarker::hasBeenSkeletonCell() const { return not _hasBeenEnclave; }
143
144#ifdef Parallel
146
148 const peano4::datamanagement::CellMarker& message, int destination, int tag, MPI_Comm communicator
149) {
151 message,
152 destination,
153 tag,
154 [&]() {
157 },
158 [&]() {
160 "peano4::datamanagement::CellMarker", "sendAndPollDanglingMessages()", destination, tag
161 );
163 "peano4::datamanagement::CellMarker", "sendAndPollDanglingMessages()", destination, tag
164 );
166 },
167 communicator
168 );
169}
170
172 peano4::datamanagement::CellMarker& message, int source, int tag, MPI_Comm communicator
173) {
175 message,
176 source,
177 tag,
178 [&]() {
181 },
182 [&]() {
184 "peano4::datamanagement::CellMarker", "receiveAndPollDanglingMessages()", source, tag
185 );
187 "peano4::datamanagement::CellMarker", "receiveAndPollDanglingMessages()", source, tag
188 );
190 },
191 communicator
192 );
193}
194
197
200
201 MPI_Datatype subtypes[] = {
202 MPI_DOUBLE, MPI_DOUBLE, MPI_BYTE, MPI_BYTE, MPI_BYTE, MPI_BYTE, MPI_BYTE, MPI_BYTE, MPI_BYTE, MPI_INT};
203
204 int blocklen[] = {Dimensions, Dimensions, 1, 1, 1, 1, 1, 1, Dimensions};
205
206 const int NumberOfAttributes = 9;
207
208 MPI_Aint baseFirstInstance;
209 MPI_Aint baseSecondInstance;
210 MPI_Get_address(&instances[0], &baseFirstInstance);
211 MPI_Get_address(&instances[1], &baseSecondInstance);
212 MPI_Aint disp[NumberOfAttributes];
213 int currentAddress = 0;
214 MPI_Get_address(&(instances[0]._centre), &disp[currentAddress]);
215 currentAddress++;
216 MPI_Get_address(&(instances[0]._h), &disp[currentAddress]);
217 currentAddress++;
218 MPI_Get_address(&(instances[0]._hasBeenRefined), &disp[currentAddress]);
219 currentAddress++;
220 MPI_Get_address(&(instances[0]._willBeRefined), &disp[currentAddress]);
221 currentAddress++;
222 MPI_Get_address(&(instances[0]._isLocal), &disp[currentAddress]);
223 currentAddress++;
224 MPI_Get_address(&(instances[0]._areAllVerticesRefined), &disp[currentAddress]);
225 currentAddress++;
226 MPI_Get_address(&(instances[0]._isOneVertexHanging), &disp[currentAddress]);
227 currentAddress++;
228 MPI_Get_address(&(instances[0]._areAllVerticesInsideDomain), &disp[currentAddress]);
229 currentAddress++;
230 MPI_Get_address(&(instances[0]._invokingSpacetreeIsNotInvolvedInAnyDynamicLoadBalancing), &disp[currentAddress]);
231 currentAddress++;
232 MPI_Get_address(&(instances[0]._relativePositionOfCellWithinFatherCell), &disp[currentAddress]);
233 currentAddress++;
234
235 MPI_Aint offset = disp[0] - baseFirstInstance;
236 MPI_Aint extent = baseSecondInstance - baseFirstInstance - offset;
237 for (int i = NumberOfAttributes - 1; i >= 0; i--) {
238 disp[i] = disp[i] - disp[0];
239 }
240
241 int errorCode = 0;
242 MPI_Datatype tmpType;
243 errorCode += MPI_Type_create_struct(NumberOfAttributes, blocklen, disp, subtypes, &tmpType);
244 errorCode += MPI_Type_create_resized(tmpType, offset, extent, &Datatype);
245 errorCode += MPI_Type_commit(&Datatype);
246 errorCode += MPI_Type_free(&tmpType);
247 if (errorCode)
248 std::cerr << "error constructing MPI datatype in " << __FILE__ << ":" << __LINE__ << std::endl;
249}
250
252
254 const peano4::datamanagement::CellMarker& buffer, int destination, int tag, MPI_Comm communicator
255) {
256 MPI_Send(&buffer, 1, Datatype, destination, tag, communicator);
257}
258
260 peano4::datamanagement::CellMarker& buffer, int source, int tag, MPI_Comm communicator
261) {
262 MPI_Status status;
263 MPI_Recv(&buffer, 1, Datatype, source, tag, communicator, &status);
264 // buffer._senderDestinationRank = status.MPI_SOURCE;
265}
266
269 int destination,
270 int tag,
271 std::function<void()> startCommunicationFunctor,
272 std::function<void()> waitFunctor,
273 MPI_Comm communicator
274) {
275 MPI_Request sendRequestHandle;
276 int flag = 0;
277 MPI_Isend(&buffer, 1, Datatype, destination, tag, communicator, &sendRequestHandle);
278 MPI_Test(&sendRequestHandle, &flag, MPI_STATUS_IGNORE);
279 startCommunicationFunctor();
280 while (!flag) {
281 waitFunctor();
282 MPI_Test(&sendRequestHandle, &flag, MPI_STATUS_IGNORE);
283 }
284}
285
288 int source,
289 int tag,
290 std::function<void()> startCommunicationFunctor,
291 std::function<void()> waitFunctor,
292 MPI_Comm communicator
293) {
294 MPI_Status status;
295 MPI_Request receiveRequestHandle;
296 int flag = 0;
297 MPI_Irecv(&buffer, 1, Datatype, source, tag, communicator, &receiveRequestHandle);
298 MPI_Test(&receiveRequestHandle, &flag, &status);
299 startCommunicationFunctor();
300 while (!flag) {
301 waitFunctor();
302 MPI_Test(&receiveRequestHandle, &flag, &status);
303 }
304 // buffer._senderDestinationRank = status.MPI_SOURCE;
305}
306
307#endif
#define assertion1(expr, param)
std::ostream & operator<<(std::ostream &out, const peano4::datamanagement::CellMarker &marker)
Definition CellMarker.cpp:7
#define TwoPowerD
Definition Globals.h:19
static constexpr int CreateOrDestroyHangingGridEntity
Implies that the data will then be local or had been local.
static constexpr int CreateOrDestroyPersistentGridEntity
Implies that the data will then be local or had been local.
void triggerDeadlockTimeOut(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1, const std::string &comment="")
Triggers a time out and shuts down the cluster if a timeout is violated.
Definition Rank.cpp:124
void setDeadlockWarningTimeStamp()
Memorise global timeout.
Definition Rank.cpp:193
void writeTimeOutWarning(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1)
Writes a warning if relevant.
Definition Rank.cpp:148
void setDeadlockTimeOutTimeStamp()
Definition Rank.cpp:198
static Rank & getInstance()
This operation returns the singleton instance.
Definition Rank.cpp:539
virtual void receiveDanglingMessages() override
Answer to MPI Messages.
static ServiceRepository & getInstance()
std::string toString(Filter filter)
Definition convert.cpp:170
bool greaterEquals(double lhs, double rhs, double tolerance=NUMERICAL_ZERO_DIFFERENCE)
bool smallerEquals(double lhs, double rhs, double tolerance=NUMERICAL_ZERO_DIFFERENCE)
tarch::la::Vector< Dimensions, double > x() const
Centre of a cell.
CellMarker(const peano4::grid::GridTraversalEvent &event)
bool isLocal() const
Usually if you have an event of a cell, then the cell is alo local.
tarch::la::Vector< Dimensions, double > getOffset() const
bool overlaps(const tarch::la::Vector< Dimensions, double > &offset, const tarch::la::Vector< Dimensions, double > &size) const
bool _invokingSpacetreeIsNotInvolvedInAnyDynamicLoadBalancing
Definition CellMarker.h:54
static void receive(CellMarker &buffer, int source, int tag, MPI_Comm communicator)
tarch::la::Vector< Dimensions, double > getInvokingParentCellsCentre() const
See getInvokingCellCentre().
bool hasBeenRefined() const
Has the cell been refined when we kicked off this mesh traversal.
tarch::la::Vector< Dimensions, int > getRelativePositionWithinFatherCell() const
tarch::la::Vector< Dimensions, int > _relativePositionOfCellWithinFatherCell
Entries from (0,1,2).
Definition CellMarker.h:62
bool hasBeenSkeletonCell() const
A skeleton cell is a not-enclave cell.
bool isParentLocal() const
A cell can be local and its parent still might not be local.
static void initDatatype()
To be called prior to any MPI usage of this class.
bool isContained(const tarch::la::Vector< Dimensions, double > &x, double tolerance=tarch::la::NUMERICAL_ZERO_DIFFERENCE) const
Is x contained within cell identified by marker object.
bool hasBeenEnclaveCell() const
Define enclave cell.
static void receiveAndPollDanglingMessages(CellMarker &message, int source, int tag, MPI_Comm communicator)
tarch::la::Vector< Dimensions, double > h() const
static void sendAndPollDanglingMessages(const CellMarker &message, int destination, int tag, MPI_Comm communicator)
bool willBeRefined() const
Will the cell be refined in the subsequent iteration.
static void send(const CellMarker &buffer, int destination, int tag, MPI_Comm communicator)
In DaStGen (the first version), I had a non-static version of the send as well as the receive.
Simple vector class.
Definition Vector.h:150