14#include "arcane/utils/ArcanePrecomp.h"
16#include "arcane/utils/Array.h"
17#include "arcane/utils/PlatformUtils.h"
18#include "arcane/utils/String.h"
19#include "arcane/utils/ITraceMng.h"
20#include "arcane/utils/Real2.h"
21#include "arcane/utils/Real3.h"
22#include "arcane/utils/Real2x2.h"
23#include "arcane/utils/Real3x3.h"
24#include "arcane/utils/HPReal.h"
25#include "arcane/utils/APReal.h"
27#include "arcane/IParallelMng.h"
29#include "arcane/parallel/mpi/MpiDatatype.h"
30#include "arcane/parallel/mpi/MpiAdapter.h"
31#include "arcane/parallel/mpi/MpiParallelDispatch.h"
32#include "arcane/parallel/mpi/MpiLock.h"
36#include "arccore/message_passing_mpi/MpiTypeDispatcherImpl.h"
46namespace MP = ::Arccore::MessagePassing;
51template<
class Type> MpiParallelDispatchT<Type>::
52MpiParallelDispatchT(ITraceMng* tm,IMessagePassingMng* parallel_mng,MpiAdapter* adapter,MpiDatatype* datatype)
54, m_mp_dispatcher(new
MP::Mpi::MpiTypeDispatcher<
Type>(parallel_mng,adapter,datatype))
55, m_min_max_sum_datatype(MPI_DATATYPE_NULL)
56, m_min_max_sum_operator(MPI_OP_NULL)
64template<
class Type> MpiParallelDispatchT<Type>::
65~MpiParallelDispatchT()
68 delete m_mp_dispatcher;
74template<
class Type>
void MpiParallelDispatchT<Type>::
77 if (m_min_max_sum_datatype!=MPI_DATATYPE_NULL){
78 MPI_Type_free(&m_min_max_sum_datatype);
79 m_min_max_sum_datatype = MPI_DATATYPE_NULL;
81 if (m_min_max_sum_operator!=MPI_OP_NULL){
82 MPI_Op_free(&m_min_max_sum_operator);
83 m_min_max_sum_operator = MPI_OP_NULL;
90template<
class Type>
void MpiParallelDispatchT<Type>::
97 MPI_Datatype oldtypes[2];
101 oldtypes[0] = MpiBuiltIn::datatype(
Integer());
104 indices[1] = (
char*)&mmsi.m_min_value - (
char*)&mmsi;
105 oldtypes[1] = _mpiDatatype();
107 MPI_Type_create_struct(2,blen,indices,oldtypes,&m_min_max_sum_datatype);
108 MPI_Type_commit(&m_min_max_sum_datatype);
110 MPI_Op_create(_MinMaxSumOperator,1,&m_min_max_sum_operator);
116template<
class Type>
void ARCANE_MPIOP_CALL MpiParallelDispatchT<Type>::
117_MinMaxSumOperator(
void* a,
void* b,
int* len,MPI_Datatype* type)
122 MinMaxSumInfo * va =
static_cast<MinMaxSumInfo*
>(a);
123 MinMaxSumInfo * vb =
static_cast<MinMaxSumInfo*
>(b);
124 for(Integer i=0;i<n;++i) {
125 MinMaxSumInfo& ma = va[i];
126 MinMaxSumInfo& mb = vb[i];
129 if (ma.m_min_value==mb.m_min_value){
130 mb.m_min_rank = math::min(mb.m_min_rank,ma.m_min_rank);
132 else if (ma.m_min_value<mb.m_min_value){
133 mb.m_min_value = ma.m_min_value;
134 mb.m_min_rank = ma.m_min_rank;
136 if (mb.m_max_value==ma.m_max_value){
137 mb.m_max_rank = math::min(mb.m_max_rank,ma.m_max_rank);
139 else if (mb.m_max_value<ma.m_max_value){
140 mb.m_max_value = ma.m_max_value;
141 mb.m_max_rank = ma.m_max_rank;
143 mb.m_sum_value = (
Type)(ma.m_sum_value + mb.m_sum_value);
150template<
class Type>
void MpiParallelDispatchT<Type>::
151computeMinMaxSumNoInit(
Type& min_val,
Type& max_val,
Type& sum_val,
155 mmsi.m_min_rank = min_rank;
156 mmsi.m_max_rank = max_rank;
157 mmsi.m_min_value = min_val;
158 mmsi.m_max_value = max_val;
159 mmsi.m_sum_value = sum_val;
160 MinMaxSumInfo mmsi_ret;
161 _adapter()->allReduce(&mmsi,&mmsi_ret,1,m_min_max_sum_datatype,
162 m_min_max_sum_operator);
163 min_val = mmsi_ret.m_min_value;
164 max_val = mmsi_ret.m_max_value;
165 sum_val = mmsi_ret.m_sum_value;
166 min_rank = mmsi_ret.m_min_rank;
167 max_rank = mmsi_ret.m_max_rank;
173template<
class Type>
void MpiParallelDispatchT<Type>::
177 min_rank = _adapter()->commRank();
178 max_rank = _adapter()->commRank();
182 computeMinMaxSumNoInit(min_val,max_val,sum_val,min_rank,max_rank);
188template<
class Type>
void MpiParallelDispatchT<Type>::
189computeMinMaxSum(ConstArrayView<Type> values,
190 ArrayView<Type> min_values,
191 ArrayView<Type> max_values,
192 ArrayView<Type> sum_values,
193 ArrayView<Int32> min_ranks,
194 ArrayView<Int32> max_ranks)
196 const Integer n = values.size();
197 UniqueArray<MinMaxSumInfo> mmsi(n);
198 const Integer comm_rank = m_mp_dispatcher->adapter()->commRank();
199 for(Integer i=0;i<n;++i) {
200 mmsi[i].m_min_rank = comm_rank;
201 mmsi[i].m_max_rank = comm_rank;
202 mmsi[i].m_min_value = values[i];
203 mmsi[i].m_max_value = values[i];
204 mmsi[i].m_sum_value = values[i];
206 UniqueArray<MinMaxSumInfo> mmsi_ret(n);
207 _adapter()->allReduce(mmsi.data(),mmsi_ret.data(),n,m_min_max_sum_datatype,
208 m_min_max_sum_operator);
209 for(Integer i=0;i<n;++i) {
210 min_values[i] = mmsi_ret[i].m_min_value;
211 max_values[i] = mmsi_ret[i].m_max_value;
212 sum_values[i] = mmsi_ret[i].m_sum_value;
213 min_ranks[i] = mmsi_ret[i].m_min_rank;
214 max_ranks[i] = mmsi_ret[i].m_max_rank;
221template<
class Type>
void MpiParallelDispatchT<Type>::
222sendRecv(ConstArrayView<Type> send_buffer,ArrayView<Type> recv_buffer,
Int32 rank)
224 MPI_Datatype type = _mpiDatatype();
225 _adapter()->directSendRecv(send_buffer.data(),send_buffer.size(),
226 recv_buffer.data(),recv_buffer.size(),
227 rank,
sizeof(
Type),type);
233template<
class Type>
Type MpiParallelDispatchT<Type>::
234scan(eReduceType op,
Type send_buf)
236 MPI_Datatype type = _mpiDatatype();
237 Type recv_buf = send_buf;
238 _adapter()->scan(&send_buf,&recv_buf,1,type,_mpiReduceOperator(op));
245template<
class Type>
void MpiParallelDispatchT<Type>::
246scan(eReduceType op,ArrayView<Type> send_buf)
248 MPI_Datatype type = _mpiDatatype();
250 UniqueArray<Type> recv_buf(s);
251 _adapter()->scan(send_buf.data(),recv_buf.data(),s,type,_mpiReduceOperator(op));
252 send_buf.copy(recv_buf);
258template<
class Type> MPI_Datatype MpiParallelDispatchT<Type>::
261 return m_mp_dispatcher->datatype()->datatype();
267template<
class Type> MPI_Op MpiParallelDispatchT<Type>::
268_mpiReduceOperator(eReduceType rt)
270 return m_mp_dispatcher->datatype()->reduceOperator(rt);
276template<
class Type> MpiAdapter* MpiParallelDispatchT<Type>::
279 return m_mp_dispatcher->adapter();
285template<
class Type> MpiDatatype* MpiParallelDispatchT<Type>::
288 return m_mp_dispatcher->datatype();
291template<
class Type> ITypeDispatcher<Type>* MpiParallelDispatchT<Type>::
294 return m_mp_dispatcher;
300template class MpiParallelDispatchT<char>;
301template class MpiParallelDispatchT<signed char>;
302template class MpiParallelDispatchT<unsigned char>;
303template class MpiParallelDispatchT<short>;
304template class MpiParallelDispatchT<unsigned short>;
305template class MpiParallelDispatchT<int>;
306template class MpiParallelDispatchT<unsigned int>;
307template class MpiParallelDispatchT<long>;
308template class MpiParallelDispatchT<unsigned long>;
309template class MpiParallelDispatchT<long long>;
310template class MpiParallelDispatchT<unsigned long long>;
311template class MpiParallelDispatchT<float>;
312template class MpiParallelDispatchT<double>;
313template class MpiParallelDispatchT<long double>;
314template class MpiParallelDispatchT<APReal>;
315template class MpiParallelDispatchT<Real2>;
316template class MpiParallelDispatchT<Real3>;
317template class MpiParallelDispatchT<Real2x2>;
318template class MpiParallelDispatchT<Real3x3>;
319template class MpiParallelDispatchT<HPReal>;
329namespace Arcane::MessagePassing::Mpi
Liste des fonctions d'échange de message.
Lecteur des fichiers de maillage via la bibliothèque LIMA.
Integer len(const char *s)
Retourne la longueur de la chaîne s.
-*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
Espace de nommage contenant les types et déclarations qui gèrent le mécanisme de parallélisme par éch...
Int32 Integer
Type représentant un entier.