12 Represents Peano's MPI and storage aspect injected into a DaStGen model.
15 This is an aspect to a DaStGen object, i.e. something that's
16 added to a data model to augment it with some behaviour. The
17 realisation of this aspect is manifold yet all serves the
18 purpose to make data fit for MPI:
20 - The aspect ensures that we include the right headers.
21 - The aspect ensures that the generated has the right signature
22 which in turn depends on the fact to which grid entity the type
24 - The aspect lets you embed a data merge operation into the
27 The aspect also determines how and
28 if we store data or not in Peano. Therefore, it covers more than
29 solely MPI. Use the store and load attribute to control these
30 predicates. Please study @ref page_peano_localisation for further
36 The instance is to be added to a DaStGen2 model through add_aspect().
38 If you want to inject a particular merge code, just set the internal
39 string self.merge_implementation.
44 dof_association: DoFAssociation
45 Clarifies which grid entity the underlying datatype is associated
48@todo A nicer implementation would introduce an Aspect interface. See MPI aspect in Peano 4. There's a similar comment in there.
56 @param includes: C++ expression
57 Typically a full-blown set of include statements, but it can also
58 hold preprocessor macros or similar.
60 @param implementation_file_includes: C++ expression
61 If you add includes to the includes attribute, they are embedded into
62 the header. This can lead to cyclic dependencies. If you want the
63 includes only in the implementation file, use this attribute instead.
66 super(MPIAndStorageAspect, self).
__init__()
71 self.
load_store_compute_flag =
"::peano4::grid::LoadStoreComputeFlag::LoadFromInputStream_ProvideToCalculations_StoreToOutputStream"
85 result +=
"not-associated"
88 result +=
",merge-impl="
91 result +=
",empty-merge-impl"
94 result +=
",compute=" + self.compute
107#include "tarch/la/Vector.h"
108#include "tarch/la/SmartPointerVector.h"
109#include "tarch/mpi/Rank.h"
110#include "tarch/services/ServiceRepository.h"
111#include "peano4/grid/LoadStoreComputeFlag.h"
112#include "peano4/utils/Globals.h"
113#include "peano4/grid/TraversalObserver.h"
118#include "peano4/datamanagement/CellMarker.h"
119#include "peano4/datamanagement/FaceMarker.h"
120#include "peano4/datamanagement/VertexMarker.h"
128 "full_qualified_name": full_qualified_name,
129 "name": full_qualified_name.split(
"::")[-1],
134 enum ObjectConstruction {
138 {{name}}( ObjectConstruction ):
142 static void sendAndPollDanglingMessages(const {{full_qualified_name}}& message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator());
143 static void receiveAndPollDanglingMessages({{full_qualified_name}}& message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator() );
150 void merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::VertexMarker& marker, int spacetreeId);
152 bool receiveAndMerge(const peano4::datamanagement::VertexMarker& marker) const;
153 bool send(const peano4::datamanagement::VertexMarker& marker) const;
154 static ::peano4::grid::LoadStoreComputeFlag loadStoreComputeFlag(const peano4::datamanagement::VertexMarker& marker);
158 void merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::FaceMarker& marker, int spacetreeId);
160 bool receiveAndMerge(const peano4::datamanagement::FaceMarker& marker) const;
161 bool send(const peano4::datamanagement::FaceMarker& marker) const;
162 static ::peano4::grid::LoadStoreComputeFlag loadStoreComputeFlag(const peano4::datamanagement::FaceMarker& marker);
166 void merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::CellMarker& marker, int spacetreeId);
168 bool receiveAndMerge(const peano4::datamanagement::CellMarker& marker) const;
169 bool send(const peano4::datamanagement::CellMarker& marker) const;
170 static ::peano4::grid::LoadStoreComputeFlag loadStoreComputeFlag(const peano4::datamanagement::CellMarker& marker);
181 return jinja2.Template(result).render(**d)
185 "full_qualified_name": full_qualified_name,
195void {{full_qualified_name}}::sendAndPollDanglingMessages(const {{full_qualified_name}}& message, int destination, int tag, MPI_Comm communicator ) {
196 {{full_qualified_name}}::send(
197 message, destination, tag,
199 tarch::mpi::Rank::getInstance().setDeadlockWarningTimeStamp();
200 tarch::mpi::Rank::getInstance().setDeadlockTimeOutTimeStamp();
203 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "{{full_qualified_name}}", "sendAndPollDanglingMessages()",destination, tag );
204 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "{{full_qualified_name}}", "sendAndPollDanglingMessages()", destination, tag );
205 tarch::services::ServiceRepository::getInstance().receiveDanglingMessages();
212void {{full_qualified_name}}::receiveAndPollDanglingMessages({{full_qualified_name}}& message, int source, int tag, MPI_Comm communicator ) {
213 {{full_qualified_name}}::receive(
214 message, source, tag,
216 tarch::mpi::Rank::getInstance().setDeadlockWarningTimeStamp();
217 tarch::mpi::Rank::getInstance().setDeadlockTimeOutTimeStamp();
220 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "{{full_qualified_name}}", "receiveAndPollDanglingMessages()", source, tag );
221 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "{{full_qualified_name}}", "receiveAndPollDanglingMessages()", source, tag );
222 tarch::services::ServiceRepository::getInstance().receiveDanglingMessages();
233void {{full_qualified_name}}::merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::VertexMarker& marker, int spacetreeId) {
234 {{merge_implementation}}
238bool {{full_qualified_name}}::receiveAndMerge(
239 const peano4::datamanagement::VertexMarker& marker
240 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
242 return {{receive_predicate}};
246bool {{full_qualified_name}}::send(
247 const peano4::datamanagement::VertexMarker& marker
248 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
250 return {{send_predicate}};
254::peano4::grid::LoadStoreComputeFlag {{full_qualified_name}}::loadStoreComputeFlag(
255 const peano4::datamanagement::VertexMarker& marker
256 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
258 return {{load_store_compute_flag}};
263void {{full_qualified_name}}::merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::FaceMarker& marker, int spacetreeId) {
264 {{merge_implementation}}
268bool {{full_qualified_name}}::receiveAndMerge(
269 const peano4::datamanagement::FaceMarker& marker
270 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
272 return {{receive_predicate}};
276bool {{full_qualified_name}}::send(
277 const peano4::datamanagement::FaceMarker& marker
278 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
280 return {{send_predicate}};
284::peano4::grid::LoadStoreComputeFlag {{full_qualified_name}}::loadStoreComputeFlag(
285 const peano4::datamanagement::FaceMarker& marker
286 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
288 return {{load_store_compute_flag}};
293void {{full_qualified_name}}::merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::CellMarker& marker, int spacetreeId) {
294 {{merge_implementation}}
298bool {{full_qualified_name}}::receiveAndMerge(
299 const peano4::datamanagement::CellMarker& marker
300 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
302 return {{receive_predicate}};
306bool {{full_qualified_name}}::send(
307 const peano4::datamanagement::CellMarker& marker
308 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
310 return {{send_predicate}};
314::peano4::grid::LoadStoreComputeFlag {{full_qualified_name}}::loadStoreComputeFlag(
315 const peano4::datamanagement::CellMarker& marker
316 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
318 return {{load_store_compute_flag}};
329 return jinja2.Template(result).render(**d)
Superclass of each and every MPI aspect in Peano 4.
Represents Peano's MPI and storage aspect injected into a DaStGen model.
get_method_declarations(self, full_qualified_name)
__init__(self, dof_association_)
Create the aspect.
get_implementation(self, full_qualified_name)
implementation_file_includes