Peano
Loading...
Searching...
No Matches
DoubleMessage.h
Go to the documentation of this file.
1
2//
3// Generated by DaStGen2 (C) 2020 Tobias Weinzierl
4//
5// For DaStGen's copyright, visit www.peano-framework.org. These generated files
6// however are not subject of copyright, i.e. feel free to add your copyright in
7// here
8//
9#pragma once
10
11#include <string>
12
13
14#ifdef Parallel
15 #include <mpi.h>
16 #include <functional>
17#endif
18
19#include "tarch/la/Vector.h"
20#include "tarch/mpi/Rank.h"
22
23
24namespace tarch{
25namespace mpi{
26
27 struct DoubleMessage;
28}
29}
30
31
32
34 public:
35
36
38 DoubleMessage(double __value);
39
40
41 double getValue() const;
42 void setValue(double value);
43 DoubleMessage(const DoubleMessage& copy) = default;
44
45
46 #ifdef Parallel
52 [[clang::map_mpi_datatype]]
53 static MPI_Datatype getForkDatatype();
54
55 [[clang::map_mpi_datatype]]
56 static MPI_Datatype getJoinDatatype();
57
58 [[clang::map_mpi_datatype]]
59 static MPI_Datatype getBoundaryExchangeDatatype();
60
61 [[clang::map_mpi_datatype]]
62 static MPI_Datatype getMultiscaleDataExchangeDatatype();
63
64 [[clang::map_mpi_datatype]]
65 static MPI_Datatype getGlobalCommunciationDatatype();
66
67 [[clang::map_mpi_datatype]]
68 static void freeForkDatatype();
69
70 [[clang::map_mpi_datatype]]
71 static void freeJoinDatatype();
72
73 [[clang::map_mpi_datatype]]
74 static void freeBoundaryExchangeDatatype();
75
76 [[clang::map_mpi_datatype]]
78
79 [[clang::map_mpi_datatype]]
81
86 int getSenderRank() const;
87
92 static void initDatatype();
93
97 static void shutdownDatatype();
98
107 static void send(const tarch::mpi::DoubleMessage& buffer, int destination, int tag, MPI_Comm communicator );
108 static void receive(tarch::mpi::DoubleMessage& buffer, int source, int tag, MPI_Comm communicator );
109
116 static void send(const tarch::mpi::DoubleMessage& buffer, int destination, int tag, std::function<void()> startCommunicationFunctor, std::function<void()> waitFunctor, MPI_Comm communicator );
117 static void receive( tarch::mpi::DoubleMessage& buffer, int source, int tag, std::function<void()> startCommunicationFunctor, std::function<void()> waitFunctor, MPI_Comm communicator );
118 #endif
119
120
124
127
128#ifdef Parallel
129 static void sendAndPollDanglingMessages(const tarch::mpi::DoubleMessage& message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator());
130 static void receiveAndPollDanglingMessages(tarch::mpi::DoubleMessage& message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator() );
131#endif
132
133
134 std::string toString() const;
135
136 private:
137 double _value;
138
139
140
141 #ifdef Parallel
142 private:
144
145 #if !defined(__MPI_ATTRIBUTES_LANGUAGE_EXTENSION__)
152 static MPI_Datatype Datatype;
153 #endif
154 #endif
155
156
157
158};
159
160
161
162
static Rank & getInstance()
This operation returns the singleton instance.
Definition Rank.cpp:539
Have to include this header, as I need access to the SYCL_EXTERNAL keyword.
Definition accelerator.h:19
static MPI_Datatype getJoinDatatype()
static void freeMultiscaleDataExchangeDatatype()
static MPI_Datatype getForkDatatype()
Hands out MPI datatype if we work without the LLVM MPI extension.
static MPI_Datatype getBoundaryExchangeDatatype()
std::string toString() const
static void freeBoundaryExchangeDatatype()
static void freeGlobalCommunciationDatatype()
static MPI_Datatype Datatype
Whenever we use LLVM's MPI extension (DaStGe), we rely on lazy initialisation of the datatype.
static void sendAndPollDanglingMessages(const tarch::mpi::DoubleMessage &message, int destination, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
static void receive(tarch::mpi::DoubleMessage &buffer, int source, int tag, MPI_Comm communicator)
DoubleMessage(const DoubleMessage &copy)=default
DoubleMessage(ObjectConstruction)
static void send(const tarch::mpi::DoubleMessage &buffer, int destination, int tag, MPI_Comm communicator)
In DaStGen (the first version), I had a non-static version of the send as well as the receive.
void setValue(double value)
static MPI_Datatype getMultiscaleDataExchangeDatatype()
static void shutdownDatatype()
Free the underlying MPI datatype.
static void receiveAndPollDanglingMessages(tarch::mpi::DoubleMessage &message, int source, int tag, MPI_Comm communicator=tarch::mpi::Rank::getInstance().getCommunicator())
static MPI_Datatype getGlobalCommunciationDatatype()
static void initDatatype()
Wrapper around getDatatype() to trigger lazy evaluation if we use the lazy initialisation.