Peano
Loading...
Searching...
No Matches
MPIAndStorageAspect.py
Go to the documentation of this file.
1# This file is part of the Peano project. For conditions of distribution and
2# use, please see the copyright notice at www.peano-framework.org
3from peano4.datamodel.DoF import DoFAssociation
4
5import jinja2
6import dastgen2
7
8
10 """!
11
12 Represents Peano's MPI and storage aspect injected into a DaStGen model.
13
14
15 This is an aspect to a DaStGen object, i.e. something that's
16 added to a data model to augment it with some behaviour. The
17 realisation of this aspect is manifold yet all serves the
18 purpose to make data fit for MPI:
19
20 - The aspect ensures that we include the right headers.
21 - The aspect ensures that the generated has the right signature
22 which in turn depends on the fact to which grid entity the type
23 is associated to
24 - The aspect lets you embed a data merge operation into the
25 generated data.
26
27 The aspect also determines how and
28 if we store data or not in Peano. Therefore, it covers more than
29 solely MPI. Use the store and load attribute to control these
30 predicates. Please study @ref page_peano_localisation for further
31 documentation.
32
33
34 ## Usage
35
36 The instance is to be added to a DaStGen2 model through add_aspect().
37
38 If you want to inject a particular merge code, just set the internal
39 string self.merge_implementation.
40
41
42 ## Attributes
43
44 dof_association: DoFAssociation
45 Clarifies which grid entity the underlying datatype is associated
46 to.
47
48@todo A nicer implementation would introduce an Aspect interface. See MPI aspect in Peano 4. There's a similar comment in there.
49 """
50
51 def __init__(self, dof_association_):
52 """!
53
54 Create the aspect
55
56 @param includes: C++ expression
57 Typically a full-blown set of include statements, but it can also
58 hold preprocessor macros or similar.
59
60 @param implementation_file_includes: C++ expression
61 If you add includes to the includes attribute, they are embedded into
62 the header. This can lead to cyclic dependencies. If you want the
63 includes only in the implementation file, use this attribute instead.
64
65 """
66 super(MPIAndStorageAspect, self).__init__()
67 self.dof_association = dof_association_
69 self.receive_predicate = "true"
70 self.send_predicate = "true"
71 self.load_store_compute_flag = "::peano4::grid::LoadStoreComputeFlag::LoadFromInputStream_ProvideToCalculations_StoreToOutputStream"
72 self.includes = ""
74 pass
75
76 def __str__(self):
77 result = "("
78 if self.dof_association == DoFAssociation.Vertex:
79 result += "vertex"
80 elif self.dof_association == DoFAssociation.Face:
81 result += "face"
82 elif self.dof_association == DoFAssociation.Cell:
83 result += "cell"
84 else:
85 result += "not-associated"
86
87 if self.merge_implementation != "":
88 result += ",merge-impl="
89 result += self.merge_implementation
90 else:
91 result += ",empty-merge-impl"
92
93 result += ",load/store/compute flag=" + self.load_store_compute_flag
94 result += ",compute=" + self.compute
95 result += ",send=" + self.send_predicate
96 result += ",receive=" + self.receive_predicate
97
98 result += ")"
99 return result
100
101 def get_attributes(self):
102 return ""
103
104 def get_include(self):
105 result = (
106 """
107#include "tarch/la/Vector.h"
108#include "tarch/la/SmartPointerVector.h"
109#include "tarch/mpi/Rank.h"
110#include "tarch/services/ServiceRepository.h"
111#include "peano4/grid/LoadStoreComputeFlag.h"
112#include "peano4/utils/Globals.h"
113#include "peano4/grid/TraversalObserver.h"
114"""
115 )
116 if self.dof_association != DoFAssociation.Generic:
117 result += """
118#include "peano4/datamanagement/CellMarker.h"
119#include "peano4/datamanagement/FaceMarker.h"
120#include "peano4/datamanagement/VertexMarker.h"
121"""
122 for include in self.includes:
123 result += include
124 return result
125
126 def get_method_declarations(self, full_qualified_name):
127 d = {
128 "full_qualified_name": full_qualified_name,
129 "name": full_qualified_name.split("::")[-1],
130 }
131 result = (
132"""
133
134 enum ObjectConstruction {
135 NoData
136 };
137
138 {{name}}( ObjectConstruction ):
139 {{name}}() {}
140
141#ifdef Parallel
142 static void sendAndPollDanglingMessages(const {{full_qualified_name}}& message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator());
143 static void receiveAndPollDanglingMessages({{full_qualified_name}}& message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator() );
144#endif
145 """
146 )
147
148 if self.dof_association == DoFAssociation.Vertex:
149 result += """
150 void merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::VertexMarker& marker, int spacetreeId);
151
152 bool receiveAndMerge(const peano4::datamanagement::VertexMarker& marker) const;
153 bool send(const peano4::datamanagement::VertexMarker& marker) const;
154 static ::peano4::grid::LoadStoreComputeFlag loadStoreComputeFlag(const peano4::datamanagement::VertexMarker& marker);
155"""
156 elif self.dof_association == DoFAssociation.Face:
157 result += """
158 void merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::FaceMarker& marker, int spacetreeId);
159
160 bool receiveAndMerge(const peano4::datamanagement::FaceMarker& marker) const;
161 bool send(const peano4::datamanagement::FaceMarker& marker) const;
162 static ::peano4::grid::LoadStoreComputeFlag loadStoreComputeFlag(const peano4::datamanagement::FaceMarker& marker);
163"""
164 elif self.dof_association == DoFAssociation.Cell:
165 result += """
166 void merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::CellMarker& marker, int spacetreeId);
167
168 bool receiveAndMerge(const peano4::datamanagement::CellMarker& marker) const;
169 bool send(const peano4::datamanagement::CellMarker& marker) const;
170 static ::peano4::grid::LoadStoreComputeFlag loadStoreComputeFlag(const peano4::datamanagement::CellMarker& marker);
171"""
172 pass
173 elif (
174 self.dof_association == DoFAssociation.Generic
175 or self.dof_association == DoFAssociation.Global
176 ):
177 pass
178 else:
179 assert False
180
181 return jinja2.Template(result).render(**d)
182
183 def get_implementation(self, full_qualified_name):
184 d = {
185 "full_qualified_name": full_qualified_name,
186 "merge_implementation": self.merge_implementation,
187 "receive_predicate": self.receive_predicate,
188 "send_predicate": self.send_predicate,
189 "load_store_compute_flag": self.load_store_compute_flag,
190 }
191
192 result = (
193 """
194#ifdef Parallel
195void {{full_qualified_name}}::sendAndPollDanglingMessages(const {{full_qualified_name}}& message, int destination, int tag, MPI_Comm communicator ) {
196 {{full_qualified_name}}::send(
197 message, destination, tag,
198 [&]() {
199 tarch::mpi::Rank::getInstance().setDeadlockWarningTimeStamp();
200 tarch::mpi::Rank::getInstance().setDeadlockTimeOutTimeStamp();
201 },
202 [&]() {
203 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "{{full_qualified_name}}", "sendAndPollDanglingMessages()",destination, tag );
204 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "{{full_qualified_name}}", "sendAndPollDanglingMessages()", destination, tag );
205 tarch::services::ServiceRepository::getInstance().receiveDanglingMessages();
206 },
207 communicator
208 );
209}
210
211
212void {{full_qualified_name}}::receiveAndPollDanglingMessages({{full_qualified_name}}& message, int source, int tag, MPI_Comm communicator ) {
213 {{full_qualified_name}}::receive(
214 message, source, tag,
215 [&]() {
216 tarch::mpi::Rank::getInstance().setDeadlockWarningTimeStamp();
217 tarch::mpi::Rank::getInstance().setDeadlockTimeOutTimeStamp();
218 },
219 [&]() {
220 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "{{full_qualified_name}}", "receiveAndPollDanglingMessages()", source, tag );
221 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "{{full_qualified_name}}", "receiveAndPollDanglingMessages()", source, tag );
222 tarch::services::ServiceRepository::getInstance().receiveDanglingMessages();
223 },
224 communicator
225 );
226}
227#endif
228 """
229 )
230
231 if self.dof_association == DoFAssociation.Vertex:
232 result += """
233void {{full_qualified_name}}::merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::VertexMarker& marker, int spacetreeId) {
234 {{merge_implementation}}
235}
236
237
238bool {{full_qualified_name}}::receiveAndMerge(
239 const peano4::datamanagement::VertexMarker& marker
240 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
241) const {
242 return {{receive_predicate}};
243}
244
245
246bool {{full_qualified_name}}::send(
247 const peano4::datamanagement::VertexMarker& marker
248 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
249) const {
250 return {{send_predicate}};
251}
252
253
254::peano4::grid::LoadStoreComputeFlag {{full_qualified_name}}::loadStoreComputeFlag(
255 const peano4::datamanagement::VertexMarker& marker
256 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
257) {
258 return {{load_store_compute_flag}};
259}
260"""
261 elif self.dof_association == DoFAssociation.Face:
262 result += """
263void {{full_qualified_name}}::merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::FaceMarker& marker, int spacetreeId) {
264 {{merge_implementation}}
265}
266
267
268bool {{full_qualified_name}}::receiveAndMerge(
269 const peano4::datamanagement::FaceMarker& marker
270 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
271) const {
272 return {{receive_predicate}};
273}
274
275
276bool {{full_qualified_name}}::send(
277 const peano4::datamanagement::FaceMarker& marker
278 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
279) const {
280 return {{send_predicate}};
281}
282
283
284::peano4::grid::LoadStoreComputeFlag {{full_qualified_name}}::loadStoreComputeFlag(
285 const peano4::datamanagement::FaceMarker& marker
286 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
287) {
288 return {{load_store_compute_flag}};
289}
290"""
291 elif self.dof_association == DoFAssociation.Cell:
292 result += """
293void {{full_qualified_name}}::merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::CellMarker& marker, int spacetreeId) {
294 {{merge_implementation}}
295}
296
297
298bool {{full_qualified_name}}::receiveAndMerge(
299 const peano4::datamanagement::CellMarker& marker
300 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
301) const {
302 return {{receive_predicate}};
303}
304
305
306bool {{full_qualified_name}}::send(
307 const peano4::datamanagement::CellMarker& marker
308 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
309) const {
310 return {{send_predicate}};
311}
312
313
314::peano4::grid::LoadStoreComputeFlag {{full_qualified_name}}::loadStoreComputeFlag(
315 const peano4::datamanagement::CellMarker& marker
316 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
317) {
318 return {{load_store_compute_flag}};
319}
320"""
321 elif (
322 self.dof_association == DoFAssociation.Generic
323 or self.dof_association == DoFAssociation.Global
324 ):
325 pass
326 else:
327 assert False
328
329 return jinja2.Template(result).render(**d)
Superclass of each and every MPI aspect in Peano 4.
Definition Aspect.py:6
Represents Peano's MPI and storage aspect injected into a DaStGen model.
__init__(self, dof_association_)
Create the aspect.