14#include "arcane/utils/ArcanePrecomp.h"
16#include "arcane/utils/Array.h"
17#include "arcane/utils/PlatformUtils.h"
18#include "arcane/utils/String.h"
19#include "arcane/utils/ITraceMng.h"
20#include "arcane/utils/Real2.h"
21#include "arcane/utils/Real3.h"
22#include "arcane/utils/Real2x2.h"
23#include "arcane/utils/Real3x3.h"
24#include "arcane/utils/HPReal.h"
25#include "arcane/utils/APReal.h"
27#include "arcane/IParallelMng.h"
29#include "arcane/parallel/mpi/MpiDatatype.h"
30#include "arcane/parallel/mpi/MpiParallelDispatch.h"
34#include "arccore/message_passing_mpi/internal/MpiTypeDispatcherImpl.h"
35#include "arccore/message_passing_mpi/internal/MpiAdapter.h"
36#include "arccore/message_passing_mpi/internal/MpiLock.h"
44namespace MP = ::Arccore::MessagePassing;
53, m_min_max_sum_datatype(MPI_DATATYPE_NULL)
54, m_min_max_sum_operator(MPI_OP_NULL)
62template<
class Type> MpiParallelDispatchT<Type>::
63~MpiParallelDispatchT()
66 delete m_mp_dispatcher;
72template<
class Type>
void MpiParallelDispatchT<Type>::
75 if (m_min_max_sum_datatype!=MPI_DATATYPE_NULL){
76 MPI_Type_free(&m_min_max_sum_datatype);
77 m_min_max_sum_datatype = MPI_DATATYPE_NULL;
79 if (m_min_max_sum_operator!=MPI_OP_NULL){
80 MPI_Op_free(&m_min_max_sum_operator);
81 m_min_max_sum_operator = MPI_OP_NULL;
88template<
class Type>
void MpiParallelDispatchT<Type>::
95 MPI_Datatype oldtypes[2];
99 oldtypes[0] = MpiBuiltIn::datatype(
Integer());
102 indices[1] = (
char*)&mmsi.m_min_value - (
char*)&mmsi;
103 oldtypes[1] = _mpiDatatype();
105 MPI_Type_create_struct(2,blen,indices,oldtypes,&m_min_max_sum_datatype);
106 MPI_Type_commit(&m_min_max_sum_datatype);
108 MPI_Op_create(_MinMaxSumOperator,1,&m_min_max_sum_operator);
114template<
class Type>
void ARCANE_MPIOP_CALL MpiParallelDispatchT<Type>::
115_MinMaxSumOperator(
void* a,
void* b,
int* len,MPI_Datatype* type)
120 MinMaxSumInfo * va =
static_cast<MinMaxSumInfo*
>(a);
121 MinMaxSumInfo * vb =
static_cast<MinMaxSumInfo*
>(b);
122 for(Integer i=0;i<n;++i) {
123 MinMaxSumInfo& ma = va[i];
124 MinMaxSumInfo& mb = vb[i];
127 if (ma.m_min_value==mb.m_min_value){
128 mb.m_min_rank = math::min(mb.m_min_rank,ma.m_min_rank);
130 else if (ma.m_min_value<mb.m_min_value){
131 mb.m_min_value = ma.m_min_value;
132 mb.m_min_rank = ma.m_min_rank;
134 if (mb.m_max_value==ma.m_max_value){
135 mb.m_max_rank = math::min(mb.m_max_rank,ma.m_max_rank);
137 else if (mb.m_max_value<ma.m_max_value){
138 mb.m_max_value = ma.m_max_value;
139 mb.m_max_rank = ma.m_max_rank;
141 mb.m_sum_value = (
Type)(ma.m_sum_value + mb.m_sum_value);
148template<
class Type>
void MpiParallelDispatchT<Type>::
149computeMinMaxSumNoInit(
Type& min_val,
Type& max_val,
Type& sum_val,
150 Int32& min_rank,Int32& max_rank)
153 mmsi.m_min_rank = min_rank;
154 mmsi.m_max_rank = max_rank;
155 mmsi.m_min_value = min_val;
156 mmsi.m_max_value = max_val;
157 mmsi.m_sum_value = sum_val;
158 MinMaxSumInfo mmsi_ret;
159 _adapter()->allReduce(&mmsi,&mmsi_ret,1,m_min_max_sum_datatype,
160 m_min_max_sum_operator);
161 min_val = mmsi_ret.m_min_value;
162 max_val = mmsi_ret.m_max_value;
163 sum_val = mmsi_ret.m_sum_value;
164 min_rank = mmsi_ret.m_min_rank;
165 max_rank = mmsi_ret.m_max_rank;
171template<
class Type>
void MpiParallelDispatchT<Type>::
173 Int32& min_rank,Int32& max_rank)
175 min_rank = _adapter()->commRank();
176 max_rank = _adapter()->commRank();
180 computeMinMaxSumNoInit(min_val,max_val,sum_val,min_rank,max_rank);
186template<
class Type>
void MpiParallelDispatchT<Type>::
187computeMinMaxSum(ConstArrayView<Type> values,
188 ArrayView<Type> min_values,
189 ArrayView<Type> max_values,
190 ArrayView<Type> sum_values,
191 ArrayView<Int32> min_ranks,
192 ArrayView<Int32> max_ranks)
194 const Integer n = values.size();
195 UniqueArray<MinMaxSumInfo> mmsi(n);
196 const Integer comm_rank = m_mp_dispatcher->adapter()->commRank();
197 for(Integer i=0;i<n;++i) {
198 mmsi[i].m_min_rank = comm_rank;
199 mmsi[i].m_max_rank = comm_rank;
200 mmsi[i].m_min_value = values[i];
201 mmsi[i].m_max_value = values[i];
202 mmsi[i].m_sum_value = values[i];
204 UniqueArray<MinMaxSumInfo> mmsi_ret(n);
205 _adapter()->allReduce(mmsi.data(),mmsi_ret.data(),n,m_min_max_sum_datatype,
206 m_min_max_sum_operator);
207 for(Integer i=0;i<n;++i) {
208 min_values[i] = mmsi_ret[i].m_min_value;
209 max_values[i] = mmsi_ret[i].m_max_value;
210 sum_values[i] = mmsi_ret[i].m_sum_value;
211 min_ranks[i] = mmsi_ret[i].m_min_rank;
212 max_ranks[i] = mmsi_ret[i].m_max_rank;
219template<
class Type>
void MpiParallelDispatchT<Type>::
220sendRecv(ConstArrayView<Type> send_buffer,ArrayView<Type> recv_buffer,Int32 rank)
222 MPI_Datatype type = _mpiDatatype();
223 _adapter()->directSendRecv(send_buffer.data(),send_buffer.size(),
224 recv_buffer.data(),recv_buffer.size(),
225 rank,
sizeof(
Type),type);
231template<
class Type>
Type MpiParallelDispatchT<Type>::
232scan(eReduceType op,
Type send_buf)
234 MPI_Datatype type = _mpiDatatype();
235 Type recv_buf = send_buf;
236 _adapter()->scan(&send_buf,&recv_buf,1,type,_mpiReduceOperator(op));
243template<
class Type>
void MpiParallelDispatchT<Type>::
244scan(eReduceType op,ArrayView<Type> send_buf)
246 MPI_Datatype type = _mpiDatatype();
248 UniqueArray<Type> recv_buf(s);
249 _adapter()->scan(send_buf.data(),recv_buf.data(),s,type,_mpiReduceOperator(op));
250 send_buf.copy(recv_buf);
256template<
class Type> MPI_Datatype MpiParallelDispatchT<Type>::
259 return m_mp_dispatcher->datatype()->datatype();
265template<
class Type> MPI_Op MpiParallelDispatchT<Type>::
266_mpiReduceOperator(eReduceType rt)
268 return m_mp_dispatcher->datatype()->reduceOperator(rt);
274template<
class Type> MpiAdapter* MpiParallelDispatchT<Type>::
277 return m_mp_dispatcher->adapter();
283template<
class Type> MpiDatatype* MpiParallelDispatchT<Type>::
286 return m_mp_dispatcher->datatype();
289template<
class Type> ITypeDispatcher<Type>* MpiParallelDispatchT<Type>::
292 return m_mp_dispatcher;
298template class MpiParallelDispatchT<char>;
299template class MpiParallelDispatchT<signed char>;
300template class MpiParallelDispatchT<unsigned char>;
301template class MpiParallelDispatchT<short>;
302template class MpiParallelDispatchT<unsigned short>;
303template class MpiParallelDispatchT<int>;
304template class MpiParallelDispatchT<unsigned int>;
305template class MpiParallelDispatchT<long>;
306template class MpiParallelDispatchT<unsigned long>;
307template class MpiParallelDispatchT<long long>;
308template class MpiParallelDispatchT<unsigned long long>;
309template class MpiParallelDispatchT<float>;
310template class MpiParallelDispatchT<double>;
311template class MpiParallelDispatchT<long double>;
312template class MpiParallelDispatchT<APReal>;
313template class MpiParallelDispatchT<Real2>;
314template class MpiParallelDispatchT<Real3>;
315template class MpiParallelDispatchT<Real2x2>;
316template class MpiParallelDispatchT<Real3x3>;
317template class MpiParallelDispatchT<HPReal>;
327namespace Arcane::MessagePassing::Mpi
Liste des fonctions d'échange de message.
Interface du gestionnaire de traces.
Interface du gestionnaire des échanges de messages.
Encapsulation d'un MPI_Datatype.
Interface des messages pour le type Type.
Classe d'accès aux traces.
Integer len(const char *s)
Retourne la longueur de la chaîne s.
-*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
Int32 Integer
Type représentant un entier.
Espace de nommage contenant les types et déclarations qui gèrent le mécanisme de parallélisme par éch...