8 out << marker.toString();
13 _centre(event.getX()),
15 _hasBeenRefined(event.getHasBeenRefined() != 0),
16 _willBeRefined(event.getWillBeRefined() != 0),
17 _isLocal(event.getIsCellLocal()),
18 _isParentLocal(event.getIsParentCellLocal()),
19 _isOneVertexHanging(false),
20 _isOneVertexCreatedOrDestroyed(false),
21 _areAllVerticesInsideDomain(event.getIsVertexAdjacentToParallelDomainBoundary().none()),
22 _invokingSpacetreeIsNotInvolvedInAnyDynamicLoadBalancing(
23 event.getInvokingSpacetreeIsNotInvolvedInAnyDynamicLoadBalancing()
45 return _relativePositionOfCellWithinFatherCell;
49void peano4::datamanagement::CellMarker::setRelativePositionWithinFatherCell(
int axis,
int value) {
50 _relativePositionOfCellWithinFatherCell(axis) = value;
72 for (
int d = 0; d < Dimensions; d++) {
85 for (
int d = 0; d < Dimensions; d++) {
105 for (
int d=0; d<Dimensions; d++) {
108 if (_relativePositionOfCellWithinFatherCell(d)==0) {
111 if (_relativePositionOfCellWithinFatherCell(d)==2) {
121 return _centre - 0.5 * _h;
125 std::ostringstream msg;
127 <<
"(centre=" << _centre <<
",h=" << _h <<
",has-been-refined=" << _hasBeenRefined <<
",will-be-refined="
128 << _willBeRefined <<
",is-local=" << _isLocal <<
",one-vertex-hanging=" << _isOneVertexHanging
129 <<
",one-vertex-destroyed/created=" << _isOneVertexCreatedOrDestroyed <<
",all-vertices-inside-domain="
130 << _areAllVerticesInsideDomain <<
",no-lb=" << _invokingSpacetreeIsNotInvolvedInAnyDynamicLoadBalancing
131 <<
",rel-pos=" << _relativePositionOfCellWithinFatherCell <<
",has-been-enclave=" << _hasBeenEnclave
132 <<
",will-be-enclave=" << _willBeEnclave <<
")";
160 "peano4::datamanagement::CellMarker",
"sendAndPollDanglingMessages()", destination, tag
163 "peano4::datamanagement::CellMarker",
"sendAndPollDanglingMessages()", destination, tag
184 "peano4::datamanagement::CellMarker",
"receiveAndPollDanglingMessages()", source, tag
187 "peano4::datamanagement::CellMarker",
"receiveAndPollDanglingMessages()", source, tag
201 MPI_Datatype subtypes[] = {
202 MPI_DOUBLE, MPI_DOUBLE, MPI_BYTE, MPI_BYTE, MPI_BYTE, MPI_BYTE, MPI_BYTE, MPI_BYTE, MPI_BYTE, MPI_INT};
204 int blocklen[] = {Dimensions, Dimensions, 1, 1, 1, 1, 1, 1, Dimensions};
206 const int NumberOfAttributes = 9;
208 MPI_Aint baseFirstInstance;
209 MPI_Aint baseSecondInstance;
210 MPI_Get_address(&instances[0], &baseFirstInstance);
211 MPI_Get_address(&instances[1], &baseSecondInstance);
212 MPI_Aint disp[NumberOfAttributes];
213 int currentAddress = 0;
214 MPI_Get_address(&(instances[0]._centre), &disp[currentAddress]);
216 MPI_Get_address(&(instances[0]._h), &disp[currentAddress]);
218 MPI_Get_address(&(instances[0]._hasBeenRefined), &disp[currentAddress]);
220 MPI_Get_address(&(instances[0]._willBeRefined), &disp[currentAddress]);
222 MPI_Get_address(&(instances[0]._isLocal), &disp[currentAddress]);
224 MPI_Get_address(&(instances[0]._areAllVerticesRefined), &disp[currentAddress]);
226 MPI_Get_address(&(instances[0]._isOneVertexHanging), &disp[currentAddress]);
228 MPI_Get_address(&(instances[0]._areAllVerticesInsideDomain), &disp[currentAddress]);
230 MPI_Get_address(&(instances[0]._invokingSpacetreeIsNotInvolvedInAnyDynamicLoadBalancing), &disp[currentAddress]);
232 MPI_Get_address(&(instances[0]._relativePositionOfCellWithinFatherCell), &disp[currentAddress]);
235 MPI_Aint offset = disp[0] - baseFirstInstance;
236 MPI_Aint extent = baseSecondInstance - baseFirstInstance - offset;
237 for (
int i = NumberOfAttributes - 1; i >= 0; i--) {
238 disp[i] = disp[i] - disp[0];
242 MPI_Datatype tmpType;
243 errorCode += MPI_Type_create_struct(NumberOfAttributes, blocklen, disp, subtypes, &tmpType);
244 errorCode += MPI_Type_create_resized(tmpType, offset, extent, &Datatype);
245 errorCode += MPI_Type_commit(&Datatype);
246 errorCode += MPI_Type_free(&tmpType);
248 std::cerr <<
"error constructing MPI datatype in " << __FILE__ <<
":" << __LINE__ << std::endl;
256 MPI_Send(&buffer, 1, Datatype, destination, tag, communicator);
263 MPI_Recv(&buffer, 1, Datatype, source, tag, communicator, &status);
271 std::function<
void()> startCommunicationFunctor,
272 std::function<
void()> waitFunctor,
273 MPI_Comm communicator
275 MPI_Request sendRequestHandle;
277 MPI_Isend(&buffer, 1, Datatype, destination, tag, communicator, &sendRequestHandle);
278 MPI_Test(&sendRequestHandle, &flag, MPI_STATUS_IGNORE);
279 startCommunicationFunctor();
282 MPI_Test(&sendRequestHandle, &flag, MPI_STATUS_IGNORE);
290 std::function<
void()> startCommunicationFunctor,
291 std::function<
void()> waitFunctor,
292 MPI_Comm communicator
295 MPI_Request receiveRequestHandle;
297 MPI_Irecv(&buffer, 1, Datatype, source, tag, communicator, &receiveRequestHandle);
298 MPI_Test(&receiveRequestHandle, &flag, &status);
299 startCommunicationFunctor();
302 MPI_Test(&receiveRequestHandle, &flag, &status);
#define assertion1(expr, param)
std::ostream & operator<<(std::ostream &out, const peano4::datamanagement::CellMarker &marker)
static constexpr int CreateOrDestroyHangingGridEntity
Implies that the data will then be local or had been local.
static constexpr int CreateOrDestroyPersistentGridEntity
Implies that the data will then be local or had been local.
void triggerDeadlockTimeOut(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1, const std::string &comment="")
Triggers a time out and shuts down the cluster if a timeout is violated.
void setDeadlockWarningTimeStamp()
Memorise global timeout.
void writeTimeOutWarning(const std::string &className, const std::string &methodName, int communicationPartnerRank, int tag, int numberOfExpectedMessages=1)
Writes a warning if relevant.
void setDeadlockTimeOutTimeStamp()
static Rank & getInstance()
This operation returns the singleton instance.
virtual void receiveDanglingMessages() override
Answer to MPI Messages.
static ServiceRepository & getInstance()
std::string toString(Filter filter)
bool greaterEquals(double lhs, double rhs, double tolerance=NUMERICAL_ZERO_DIFFERENCE)
bool smallerEquals(double lhs, double rhs, double tolerance=NUMERICAL_ZERO_DIFFERENCE)
tarch::la::Vector< Dimensions, double > x() const
Centre of a cell.
CellMarker(const peano4::grid::GridTraversalEvent &event)
bool isLocal() const
Usually if you have an event of a cell, then the cell is alo local.
tarch::la::Vector< Dimensions, double > getOffset() const
bool overlaps(const tarch::la::Vector< Dimensions, double > &offset, const tarch::la::Vector< Dimensions, double > &size) const
bool _invokingSpacetreeIsNotInvolvedInAnyDynamicLoadBalancing
static void shutdownDatatype()
static void receive(CellMarker &buffer, int source, int tag, MPI_Comm communicator)
tarch::la::Vector< Dimensions, double > getInvokingParentCellsCentre() const
See getInvokingCellCentre().
bool hasBeenRefined() const
Has the cell been refined when we kicked off this mesh traversal.
static MPI_Datatype Datatype
tarch::la::Vector< Dimensions, int > getRelativePositionWithinFatherCell() const
tarch::la::Vector< Dimensions, int > _relativePositionOfCellWithinFatherCell
Entries from (0,1,2).
bool _areAllVerticesInsideDomain
bool hasBeenSkeletonCell() const
A skeleton cell is a not-enclave cell.
bool _isOneVertexCreatedOrDestroyed
bool isParentLocal() const
A cell can be local and its parent still might not be local.
static void initDatatype()
To be called prior to any MPI usage of this class.
bool isContained(const tarch::la::Vector< Dimensions, double > &x, double tolerance=tarch::la::NUMERICAL_ZERO_DIFFERENCE) const
Is x contained within cell identified by marker object.
bool hasBeenEnclaveCell() const
Define enclave cell.
std::string toString() const
static void receiveAndPollDanglingMessages(CellMarker &message, int source, int tag, MPI_Comm communicator)
bool willBeEnclaveCell() const
tarch::la::Vector< Dimensions, double > h() const
bool willBeSkeletonCell() const
static void sendAndPollDanglingMessages(const CellMarker &message, int destination, int tag, MPI_Comm communicator)
bool willBeRefined() const
Will the cell be refined in the subsequent iteration.
static void send(const CellMarker &buffer, int destination, int tag, MPI_Comm communicator)
In DaStGen (the first version), I had a non-static version of the send as well as the receive.