14#include "arcane/utils/ArcanePrecomp.h"
16#include "arcane/utils/Array.h"
17#include "arcane/utils/PlatformUtils.h"
18#include "arcane/utils/String.h"
19#include "arcane/utils/ITraceMng.h"
20#include "arcane/utils/Real2.h"
21#include "arcane/utils/Real3.h"
22#include "arcane/utils/Real2x2.h"
23#include "arcane/utils/Real3x3.h"
24#include "arcane/utils/HPReal.h"
25#include "arcane/utils/APReal.h"
27#include "arcane/IParallelMng.h"
29#include "arcane/parallel/mpi/MpiDatatype.h"
30#include "arcane/parallel/mpi/MpiAdapter.h"
31#include "arcane/parallel/mpi/MpiParallelDispatch.h"
32#include "arcane/parallel/mpi/MpiLock.h"
36#include "arccore/message_passing_mpi/MpiTypeDispatcherImpl.h"
45namespace MP = ::Arccore::MessagePassing;
50template<
class Type> MpiParallelDispatchT<Type>::
51MpiParallelDispatchT(ITraceMng* tm,IMessagePassingMng* parallel_mng,MpiAdapter* adapter,MpiDatatype* datatype)
53, m_mp_dispatcher(new
MP::Mpi::MpiTypeDispatcher<
Type>(parallel_mng,adapter,datatype))
54, m_min_max_sum_datatype(MPI_DATATYPE_NULL)
55, m_min_max_sum_operator(MPI_OP_NULL)
63template<
class Type> MpiParallelDispatchT<Type>::
64~MpiParallelDispatchT()
67 delete m_mp_dispatcher;
73template<
class Type>
void MpiParallelDispatchT<Type>::
76 if (m_min_max_sum_datatype!=MPI_DATATYPE_NULL){
77 MPI_Type_free(&m_min_max_sum_datatype);
78 m_min_max_sum_datatype = MPI_DATATYPE_NULL;
80 if (m_min_max_sum_operator!=MPI_OP_NULL){
81 MPI_Op_free(&m_min_max_sum_operator);
82 m_min_max_sum_operator = MPI_OP_NULL;
89template<
class Type>
void MpiParallelDispatchT<Type>::
96 MPI_Datatype oldtypes[2];
100 oldtypes[0] = MpiBuiltIn::datatype(
Integer());
103 indices[1] = (
char*)&mmsi.m_min_value - (
char*)&mmsi;
104 oldtypes[1] = _mpiDatatype();
106 MPI_Type_create_struct(2,blen,indices,oldtypes,&m_min_max_sum_datatype);
107 MPI_Type_commit(&m_min_max_sum_datatype);
109 MPI_Op_create(_MinMaxSumOperator,1,&m_min_max_sum_operator);
115template<
class Type>
void ARCANE_MPIOP_CALL MpiParallelDispatchT<Type>::
116_MinMaxSumOperator(
void* a,
void* b,
int* len,MPI_Datatype* type)
121 MinMaxSumInfo * va =
static_cast<MinMaxSumInfo*
>(a);
122 MinMaxSumInfo * vb =
static_cast<MinMaxSumInfo*
>(b);
123 for(Integer i=0;i<n;++i) {
124 MinMaxSumInfo& ma = va[i];
125 MinMaxSumInfo& mb = vb[i];
128 if (ma.m_min_value==mb.m_min_value){
129 mb.m_min_rank = math::min(mb.m_min_rank,ma.m_min_rank);
131 else if (ma.m_min_value<mb.m_min_value){
132 mb.m_min_value = ma.m_min_value;
133 mb.m_min_rank = ma.m_min_rank;
135 if (mb.m_max_value==ma.m_max_value){
136 mb.m_max_rank = math::min(mb.m_max_rank,ma.m_max_rank);
138 else if (mb.m_max_value<ma.m_max_value){
139 mb.m_max_value = ma.m_max_value;
140 mb.m_max_rank = ma.m_max_rank;
142 mb.m_sum_value = (
Type)(ma.m_sum_value + mb.m_sum_value);
149template<
class Type>
void MpiParallelDispatchT<Type>::
150computeMinMaxSumNoInit(
Type& min_val,
Type& max_val,
Type& sum_val,
154 mmsi.m_min_rank = min_rank;
155 mmsi.m_max_rank = max_rank;
156 mmsi.m_min_value = min_val;
157 mmsi.m_max_value = max_val;
158 mmsi.m_sum_value = sum_val;
159 MinMaxSumInfo mmsi_ret;
160 _adapter()->allReduce(&mmsi,&mmsi_ret,1,m_min_max_sum_datatype,
161 m_min_max_sum_operator);
162 min_val = mmsi_ret.m_min_value;
163 max_val = mmsi_ret.m_max_value;
164 sum_val = mmsi_ret.m_sum_value;
165 min_rank = mmsi_ret.m_min_rank;
166 max_rank = mmsi_ret.m_max_rank;
172template<
class Type>
void MpiParallelDispatchT<Type>::
176 min_rank = _adapter()->commRank();
177 max_rank = _adapter()->commRank();
181 computeMinMaxSumNoInit(min_val,max_val,sum_val,min_rank,max_rank);
187template<
class Type>
void MpiParallelDispatchT<Type>::
188computeMinMaxSum(ConstArrayView<Type> values,
189 ArrayView<Type> min_values,
190 ArrayView<Type> max_values,
191 ArrayView<Type> sum_values,
192 ArrayView<Int32> min_ranks,
193 ArrayView<Int32> max_ranks)
195 const Integer n = values.size();
196 UniqueArray<MinMaxSumInfo> mmsi(n);
197 const Integer comm_rank = m_mp_dispatcher->adapter()->commRank();
198 for(Integer i=0;i<n;++i) {
199 mmsi[i].m_min_rank = comm_rank;
200 mmsi[i].m_max_rank = comm_rank;
201 mmsi[i].m_min_value = values[i];
202 mmsi[i].m_max_value = values[i];
203 mmsi[i].m_sum_value = values[i];
205 UniqueArray<MinMaxSumInfo> mmsi_ret(n);
206 _adapter()->allReduce(mmsi.data(),mmsi_ret.data(),n,m_min_max_sum_datatype,
207 m_min_max_sum_operator);
208 for(Integer i=0;i<n;++i) {
209 min_values[i] = mmsi_ret[i].m_min_value;
210 max_values[i] = mmsi_ret[i].m_max_value;
211 sum_values[i] = mmsi_ret[i].m_sum_value;
212 min_ranks[i] = mmsi_ret[i].m_min_rank;
213 max_ranks[i] = mmsi_ret[i].m_max_rank;
220template<
class Type>
void MpiParallelDispatchT<Type>::
221sendRecv(ConstArrayView<Type> send_buffer,ArrayView<Type> recv_buffer,
Int32 rank)
223 MPI_Datatype type = _mpiDatatype();
224 _adapter()->directSendRecv(send_buffer.data(),send_buffer.size(),
225 recv_buffer.data(),recv_buffer.size(),
226 rank,
sizeof(
Type),type);
232template<
class Type>
Type MpiParallelDispatchT<Type>::
233scan(eReduceType op,
Type send_buf)
235 MPI_Datatype type = _mpiDatatype();
236 Type recv_buf = send_buf;
237 _adapter()->scan(&send_buf,&recv_buf,1,type,_mpiReduceOperator(op));
244template<
class Type>
void MpiParallelDispatchT<Type>::
245scan(eReduceType op,ArrayView<Type> send_buf)
247 MPI_Datatype type = _mpiDatatype();
249 UniqueArray<Type> recv_buf(s);
250 _adapter()->scan(send_buf.data(),recv_buf.data(),s,type,_mpiReduceOperator(op));
251 send_buf.copy(recv_buf);
257template<
class Type> MPI_Datatype MpiParallelDispatchT<Type>::
260 return m_mp_dispatcher->datatype()->datatype();
266template<
class Type> MPI_Op MpiParallelDispatchT<Type>::
267_mpiReduceOperator(eReduceType rt)
269 return m_mp_dispatcher->datatype()->reduceOperator(rt);
275template<
class Type> MpiAdapter* MpiParallelDispatchT<Type>::
278 return m_mp_dispatcher->adapter();
284template<
class Type> MpiDatatype* MpiParallelDispatchT<Type>::
287 return m_mp_dispatcher->datatype();
290template<
class Type> ITypeDispatcher<Type>* MpiParallelDispatchT<Type>::
293 return m_mp_dispatcher;
299template class MpiParallelDispatchT<char>;
300template class MpiParallelDispatchT<signed char>;
301template class MpiParallelDispatchT<unsigned char>;
302template class MpiParallelDispatchT<short>;
303template class MpiParallelDispatchT<unsigned short>;
304template class MpiParallelDispatchT<int>;
305template class MpiParallelDispatchT<unsigned int>;
306template class MpiParallelDispatchT<long>;
307template class MpiParallelDispatchT<unsigned long>;
308template class MpiParallelDispatchT<long long>;
309template class MpiParallelDispatchT<unsigned long long>;
310template class MpiParallelDispatchT<float>;
311template class MpiParallelDispatchT<double>;
312template class MpiParallelDispatchT<long double>;
313template class MpiParallelDispatchT<APReal>;
314template class MpiParallelDispatchT<Real2>;
315template class MpiParallelDispatchT<Real3>;
316template class MpiParallelDispatchT<Real2x2>;
317template class MpiParallelDispatchT<Real3x3>;
318template class MpiParallelDispatchT<HPReal>;
330namespace MessagePassing
Liste des fonctions d'échange de message.
Lecteur des fichiers de maillage via la bibliothèque LIMA.
Integer len(const char *s)
Retourne la longueur de la chaîne s.
-*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
Espace de nommage contenant les types et déclarations qui gèrent le mécanisme de parallélisme par éch...
Espace de nom de Arccore.
Int32 Integer
Type représentant un entier.