SUNphi  1.0
Mpi.hpp
Go to the documentation of this file.
1 #ifndef _MPI_HPP
2 #define _MPI_HPP
3 
4 /// \file Mpi.hpp
5 ///
6 /// \brief Incapsulate all functionalities of MPI into a more
7 /// convenient form
8 
9 #ifdef HAVE_CONFIG_H
10  #include "config.hpp"
11 #endif
12 
13 #ifdef USE_MPI
14  #include <mpi.h>
15 #endif
16 
17 #include <ios/MinimalLogger.hpp>
18 #include <metaprogramming/TypeTraits.hpp>
19 #include <system/Timer.hpp>
20 #include <serialize/Binarize.hpp>
21 #include <utility/SingleInstance.hpp>
22 #include <Threads.hpp>
23 
24 namespace SUNphi
25 {
26  /// Makes all thread print for current scope
27 #define ALLOWS_ALL_RANKS_TO_PRINT_FOR_THIS_SCOPE(LOGGER)
28  SET_FOR_CURRENT_SCOPE(LOGGER_ALL_RANKS_PRINT,LOGGER.onlyMasterRankPrint,false)
29 
30 #ifdef USE_MPI
31  /// Provides the \c MPI_Datatype of an any unknown type
32  template <typename T>
33  inline MPI_Datatype mpiType()
34  {
35  return
36  nullptr;
37  }
38 
39  /// Provides the \c MPI_Datatype of a given type
40 #define PROVIDE_MPI_DATATYPE(MPI_TYPE,TYPE)
41  template <>
42  inline MPI_Datatype mpiType<TYPE>()
43  {
44  return
45  MPI_TYPE;
46  }
48 
49  PROVIDE_MPI_DATATYPE(MPI_CHAR,char);
50 
51  PROVIDE_MPI_DATATYPE(MPI_INT,int);
52 
53  PROVIDE_MPI_DATATYPE(MPI_DOUBLE,double);
54 #endif
55 
56  /// Class wrapping all MPI functionalities
57  class Mpi : public SingleInstance<Mpi>
58  {
59 #ifdef USE_MPI
60  /// Crash on MPI error, providing a meaningful error
61 #define MPI_CRASH_ON_ERROR(...)
62  Mpi::crashOnError(__LINE__,__FILE__,__PRETTY_FUNCTION__,__VA_ARGS__)
63 #endif
64 
65  /// Decrypt the returned value of an MPI call
66  ///
67  /// Returns the value of \c rc
68  template <typename...Args>
69  int crashOnError(const int line, ///< Line of file where the error needs to be checked
70  const char *file, ///< File where the error must be checked
71  const char *function, ///< Function where the error was possibly raised
72  const int rc, ///< Exit condition of the called routine
73  Args&&... args) ///< Other arguments
74  const
75  {
76 #ifdef USE_MPI
77 
78  if(rc!=MPI_SUCCESS and rank()==0)
79  {
80  /// Length of the error message
81  int len;
82 
83  /// Error message
84  char err[MPI_MAX_ERROR_STRING];
85  MPI_Error_string(rc,err,&len);
86 
87  minimalCrash(file,line,__PRETTY_FUNCTION__,"(args ignored!), raised error %d, err: %s",rc,err);
88  }
89 
90 #endif
91 
92  return
93  rc;
94  }
95 
96  public:
97 
98  /// Id of master rank
99  static constexpr int MASTER_RANK=
100  0;
101 
102  /// Placeholder for all ranks
103  [[ maybe_unused ]]
104  static constexpr int ALL_RANKS=
105  -1;
106 
107  /// Initialize MPI
108  Mpi()
109  {
110 #ifdef USE_MPI
111 
112  /// Takes the time
113  Duration initDur;
114 
115  MPI_CRASH_ON_ERROR(durationOf(initDur,MPI_Init,nullptr,nullptr),"Error initializing MPI");
116 
117  minimalLogger(runLog,"MPI initialized in %lg s",durationInSec(initDur));
118 #endif
119  }
120 
121  /// Check initialization flag
123  const
124  {
125 
126 #ifdef USE_MPI
127 
128  /// Initialization flag
129  int res;
130  MPI_CRASH_ON_ERROR(MPI_Initialized(&res),"Checking MPI initialization");
131 
132  return
133  res;
134 
135 #else
136 
137  return
138  true;
139 
140 #endif
141  }
142 
143  /// Finalize MPI
144  ~Mpi()
145  {
146 
147 #ifdef USE_MPI
148 
149  MPI_CRASH_ON_ERROR(MPI_Finalize(),"Finalizing MPI");
150 
151 #endif
152  }
153 
154  /// Get current rank calling explicitly MPI
155  int getRank()
156  const
157  {
158 
159 #ifdef USE_MPI
160 
161  /// Returned value
162  int res;
163  MPI_CRASH_ON_ERROR(MPI_Comm_rank(MPI_COMM_WORLD,&res),"Getting current rank");
164 
165  return
166  res;
167 
168 #else
169 
170  return
171  0;
172 
173 #endif
174 
175  }
176 
177  /// Cached value of current rank
178  int rank()
179  const
180  {
181  /// Stored value
182  static int _rank=
183  getRank();
184 
185  return
186  _rank;
187  }
188 
189  /// Get the total number of ranks, calling explicitly MPI
190  int getNRanks()
191  const
192  {
193 
194 #ifdef USE_MPI
195 
196  /// Returned value
197  int res;
198  MPI_CRASH_ON_ERROR(MPI_Comm_size(MPI_COMM_WORLD,&res),"Getting total number of ranks");
199 
200  return
201  res;
202 
203 #else
204 
205  return
206  1;
207 
208 #endif
209  }
210 
211  /// Check if this is the master rank
213  const
214  {
215  /// Store the result
216  static bool is=
217  (rank()==MASTER_RANK);
218 
219  return
220  is;
221  }
222 
223  /// Cached value of total number of ranks
224  int nRanks()
225  const
226  {
227  /// Stored value
228  static int _nRanks=
229  getNRanks();
230 
231  return
232  _nRanks;
233  }
234 
235  /// Reduces among all MPI process
236  template <typename T>
237  T allReduce(const T& in)
238  const
239  {
240 
241 #ifdef USE_MPI
242 
243  /// Result
244  T out;
245 
246  minimalLogger(runLog,"%p %d",&out,rank());
247 
248  MPI_CRASH_ON_ERROR(MPI_Allreduce(&in,&out,1,mpiType<T>(),MPI_SUM,MPI_COMM_WORLD),"Reducing among all processes");
249 
250  return
251  out;
252 
253 #else
254 
255  return
256  in;
257 
258 #endif
259 
260  }
261 
262  /// Broadcast among all MPI process
263  ///
264  /// This is a simple wrapper around the MPI_Bcast function
265  template <typename T,
267  void broadcast(T* x, ///< Quantity to broadcast
268  const size_t& size, ///< Size of the quantity to broadcast
269  int root=MASTER_RANK) ///< Rank from which to broadcast
270  const
271  {
272 #ifdef USE_MPI
273  minimalLogger(runLog,"%p %d",x,rank());
274  MPI_CRASH_ON_ERROR(MPI_Bcast(x,size,MPI_CHAR,root,MPI_COMM_WORLD),"Broadcasting");
275 #endif
276  }
277 
278  /// Broadcast among all MPI process
279  ///
280  /// Accepts trivially copyable structures
281  template <typename T,
283  void broadcast(T& x, ///< Quantity to broadcast
284  int root=MASTER_RANK) ///< Rank from which to broadcast
285  const
286  {
287  broadcast(&x,sizeof(T),root);
288  }
289 
290  /// Broadcast among all MPI process
291  ///
292  /// Accepts all binarizable classes
293  template <typename T,
295  void broadcast(T&& val, ///< Quantity to broadcast
296  int root=MASTER_RANK) ///< Rank from which to broadcast
297  const
298  {
299 
300 #ifdef USE_MPI
301  Binarizer bin=
302  val.binarize();
303 
304  broadcast(&*bin.begin(),bin.size(),root);
305 
306  val.deBinarize(bin);
307 #endif
308  }
309  };
310 
311  /// Gloabl MPI
312  extern Mpi mpi;
313 }
314 
315 #endif
Mpi mpi
Gloabl MPI.
Definition: SUNphi.cpp:212
int rank() const
Cached value of current rank.
Definition: Mpi.hpp:178
T allReduce(const T &in) const
Reduces among all MPI process.
Definition: Mpi.hpp:237
int getRank() const
Get current rank calling explicitly MPI.
Definition: Mpi.hpp:155
bool isInitialized() const
Check initialization flag.
Definition: Mpi.hpp:122
int crashOnError(const int line, const char *file, const char *function, const int rc, Args &&...args) const
Definition: Mpi.hpp:69
#define USE_MPI
Definition: config.hpp:101
Mpi()
Initialize MPI.
Definition: Mpi.hpp:108
Logger runLog("/dev/stdout")
Global logger.
int nRanks() const
Cached value of total number of ranks.
Definition: Mpi.hpp:224
#define SWALLOW_SEMICOLON_AT_GLOBAL_SCOPE
#define SFINAE_ON_TEMPLATE_ARG(...)
Definition: SFINAE.hpp:24
int getNRanks() const
Get the total number of ranks, calling explicitly MPI.
Definition: Mpi.hpp:190
bool isMasterRank() const
Check if this is the master rank.
Definition: Mpi.hpp:212
#define SET_FOR_CURRENT_SCOPE(NAME, VAR,...)
Set for current scope.
Definition: File.hpp:21
void broadcast(T &&val, int root=MASTER_RANK) const
Definition: Mpi.hpp:295
~Mpi()
Finalize MPI.
Definition: Mpi.hpp:144