Peano
Loading...
Searching...
No Matches
MPIAndStorageAspect.py
Go to the documentation of this file.
1# This file is part of the Peano project. For conditions of distribution and
2# use, please see the copyright notice at www.peano-framework.org
3from peano4.datamodel.DoF import DoFAssociation
4
5import jinja2
6import dastgen2
7
8
10 """!
11
12 Represents Peano's MPI and storage aspect injected into a DaStGen model.
13
14
15 This is an aspect to a DaStGen object, i.e. something that's
16 added to a data model to augment it with some behaviour. The
17 realisation of this aspect is manifold yet all serves the
18 purpose to make data fit for MPI:
19
20 - The aspect ensures that we include the right headers.
21 - The aspect ensures that the generated has the right signature
22 which in turn depends on the fact to which grid entity the type
23 is associated to
24 - The aspect lets you embed a data merge operation into the
25 generated data.
26
27 The aspect also determines how and
28 if we store data or not in Peano. Therefore, it covers more than
29 solely MPI. Use the store and load attribute to control these
30 predicates. Please study @ref page_peano_localisation for further
31 documentation.
32
33
34 ## Usage
35
36 The instance is to be added to a DaStGen2 model through add_aspect().
37
38 If you want to inject a particular merge code, just set the internal
39 string self.merge_implementation.
40
41
42 ## Attributes
43
44 dof_association: DoFAssociation
45 Clarifies which grid entity the underlying datatype is associated
46 to.
47
48 """
49
50 def __init__(self, dof_association_):
51 super(MPIAndStorageAspect, self).__init__()
52 self.dof_association = dof_association_
54 self.receive_predicate = "true"
55 self.send_predicate = "true"
56 self.load_store_compute_flag = "::peano4::grid::LoadStoreComputeFlag::LoadFromInputStream_ProvideToCalculations_StoreToOutputStream"
57 self.includes = ""
58 pass
59
60 def __str__(self):
61 result = "("
62 if self.dof_association == DoFAssociation.Vertex:
63 result += "vertex"
64 elif self.dof_association == DoFAssociation.Face:
65 result += "face"
66 elif self.dof_association == DoFAssociation.Cell:
67 result += "cell"
68 else:
69 result += "not-associated"
70
71 if self.merge_implementation != "":
72 result += ",merge-impl="
73 result += self.merge_implementation
74 else:
75 result += ",empty-merge-impl"
76
77 result += ",load/store/compute flag=" + self.load_store_compute_flag
78 result += ",compute=" + self.compute
79 result += ",send=" + self.send_predicate
80 result += ",receive=" + self.receive_predicate
81
82 result += ")"
83 return result
84
85 def get_include(self):
86 result = (
87 super(MPIAndStorageAspect, self).get_include()
88 + """
89#include "tarch/la/Vector.h"
90#include "tarch/mpi/Rank.h"
91#include "tarch/services/ServiceRepository.h"
92#include "peano4/grid/LoadStoreComputeFlag.h"
93#include "peano4/utils/Globals.h"
94#include "peano4/grid/TraversalObserver.h"
95"""
96 )
97 if self.dof_association != DoFAssociation.Generic:
98 result += """
99#include "peano4/datamanagement/CellMarker.h"
100#include "peano4/datamanagement/FaceMarker.h"
101#include "peano4/datamanagement/VertexMarker.h"
102"""
103 for include in self.includes:
104 result += include
105 return result
106
107 def get_method_declarations(self, full_qualified_name):
108 d = {
109 "full_qualified_name": full_qualified_name,
110 "name": full_qualified_name.split("::")[-1],
111 }
112 result = (
113 super(MPIAndStorageAspect, self).get_method_declarations(
114 full_qualified_name
115 )
116 + """
117
118 enum ObjectConstruction {
119 NoData
120 };
121
122 {{name}}( ObjectConstruction ):
123 {{name}}() {}
124
125#ifdef Parallel
126 static void sendAndPollDanglingMessages(const {{full_qualified_name}}& message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator());
127 static void receiveAndPollDanglingMessages({{full_qualified_name}}& message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator() );
128#endif
129 """
130 )
131
132 if self.dof_association == DoFAssociation.Vertex:
133 result += """
134 void merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::VertexMarker& marker, int spacetreeId);
135
136 bool receiveAndMerge(const peano4::datamanagement::VertexMarker& marker) const;
137 bool send(const peano4::datamanagement::VertexMarker& marker) const;
138 static ::peano4::grid::LoadStoreComputeFlag loadStoreComputeFlag(const peano4::datamanagement::VertexMarker& marker);
139"""
140 elif self.dof_association == DoFAssociation.Face:
141 result += """
142 void merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::FaceMarker& marker, int spacetreeId);
143
144 bool receiveAndMerge(const peano4::datamanagement::FaceMarker& marker) const;
145 bool send(const peano4::datamanagement::FaceMarker& marker) const;
146 static ::peano4::grid::LoadStoreComputeFlag loadStoreComputeFlag(const peano4::datamanagement::FaceMarker& marker);
147"""
148 elif self.dof_association == DoFAssociation.Cell:
149 result += """
150 void merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::CellMarker& marker, int spacetreeId);
151
152 bool receiveAndMerge(const peano4::datamanagement::CellMarker& marker) const;
153 bool send(const peano4::datamanagement::CellMarker& marker) const;
154 static ::peano4::grid::LoadStoreComputeFlag loadStoreComputeFlag(const peano4::datamanagement::CellMarker& marker);
155"""
156 pass
157 elif (
158 self.dof_association == DoFAssociation.Generic
159 or self.dof_association == DoFAssociation.Global
160 ):
161 pass
162 else:
163 assert False
164
165 return jinja2.Template(result).render(**d)
166
167 def get_implementation(self, full_qualified_name):
168 d = {
169 "full_qualified_name": full_qualified_name,
170 "merge_implementation": self.merge_implementation,
171 "receive_predicate": self.receive_predicate,
172 "send_predicate": self.send_predicate,
173 "load_store_compute_flag": self.load_store_compute_flag,
174 }
175
176 result = (
177 super(MPIAndStorageAspect, self).get_implementation(full_qualified_name)
178 + """
179#ifdef Parallel
180void {{full_qualified_name}}::sendAndPollDanglingMessages(const {{full_qualified_name}}& message, int destination, int tag, MPI_Comm communicator ) {
181 {{full_qualified_name}}::send(
182 message, destination, tag,
183 [&]() {
184 tarch::mpi::Rank::getInstance().setDeadlockWarningTimeStamp();
185 tarch::mpi::Rank::getInstance().setDeadlockTimeOutTimeStamp();
186 },
187 [&]() {
188 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "{{full_qualified_name}}", "sendAndPollDanglingMessages()",destination, tag );
189 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "{{full_qualified_name}}", "sendAndPollDanglingMessages()", destination, tag );
190 tarch::services::ServiceRepository::getInstance().receiveDanglingMessages();
191 },
192 communicator
193 );
194}
195
196
197void {{full_qualified_name}}::receiveAndPollDanglingMessages({{full_qualified_name}}& message, int source, int tag, MPI_Comm communicator ) {
198 {{full_qualified_name}}::receive(
199 message, source, tag,
200 [&]() {
201 tarch::mpi::Rank::getInstance().setDeadlockWarningTimeStamp();
202 tarch::mpi::Rank::getInstance().setDeadlockTimeOutTimeStamp();
203 },
204 [&]() {
205 tarch::mpi::Rank::getInstance().writeTimeOutWarning( "{{full_qualified_name}}", "receiveAndPollDanglingMessages()", source, tag );
206 tarch::mpi::Rank::getInstance().triggerDeadlockTimeOut( "{{full_qualified_name}}", "receiveAndPollDanglingMessages()", source, tag );
207 tarch::services::ServiceRepository::getInstance().receiveDanglingMessages();
208 },
209 communicator
210 );
211}
212#endif
213 """
214 )
215
216 if self.dof_association == DoFAssociation.Vertex:
217 result += """
218void {{full_qualified_name}}::merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::VertexMarker& marker, int spacetreeId) {
219 {{merge_implementation}}
220}
221
222
223bool {{full_qualified_name}}::receiveAndMerge(
224 const peano4::datamanagement::VertexMarker& marker
225 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
226) const {
227 return {{receive_predicate}};
228}
229
230
231bool {{full_qualified_name}}::send(
232 const peano4::datamanagement::VertexMarker& marker
233 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
234) const {
235 return {{send_predicate}};
236}
237
238
239::peano4::grid::LoadStoreComputeFlag {{full_qualified_name}}::loadStoreComputeFlag(
240 const peano4::datamanagement::VertexMarker& marker
241 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
242) {
243 return {{load_store_compute_flag}};
244}
245"""
246 elif self.dof_association == DoFAssociation.Face:
247 result += """
248void {{full_qualified_name}}::merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::FaceMarker& marker, int spacetreeId) {
249 {{merge_implementation}}
250}
251
252
253bool {{full_qualified_name}}::receiveAndMerge(
254 const peano4::datamanagement::FaceMarker& marker
255 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
256) const {
257 return {{receive_predicate}};
258}
259
260
261bool {{full_qualified_name}}::send(
262 const peano4::datamanagement::FaceMarker& marker
263 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
264) const {
265 return {{send_predicate}};
266}
267
268
269::peano4::grid::LoadStoreComputeFlag {{full_qualified_name}}::loadStoreComputeFlag(
270 const peano4::datamanagement::FaceMarker& marker
271 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
272) {
273 return {{load_store_compute_flag}};
274}
275"""
276 elif self.dof_association == DoFAssociation.Cell:
277 result += """
278void {{full_qualified_name}}::merge(peano4::grid::TraversalObserver::SendReceiveContext context, const {{full_qualified_name}}& neighbour, const peano4::datamanagement::CellMarker& marker, int spacetreeId) {
279 {{merge_implementation}}
280}
281
282
283bool {{full_qualified_name}}::receiveAndMerge(
284 const peano4::datamanagement::CellMarker& marker
285 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
286) const {
287 return {{receive_predicate}};
288}
289
290
291bool {{full_qualified_name}}::send(
292 const peano4::datamanagement::CellMarker& marker
293 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
294) const {
295 return {{send_predicate}};
296}
297
298
299::peano4::grid::LoadStoreComputeFlag {{full_qualified_name}}::loadStoreComputeFlag(
300 const peano4::datamanagement::CellMarker& marker
301 {% for arg in ADDITIONAL_LOAD_STORE_ARGUMENTS %}, const {{arg[0]}}& {{arg[1]}} {% endfor %}
302) {
303 return {{load_store_compute_flag}};
304}
305"""
306 elif (
307 self.dof_association == DoFAssociation.Generic
308 or self.dof_association == DoFAssociation.Global
309 ):
310 pass
311 else:
312 assert False
313
314 return jinja2.Template(result).render(**d)
Represents the MPI aspect injected into a DaStGen model.
Definition MPI.py:7
Represents Peano's MPI and storage aspect injected into a DaStGen model.