Arcane  v3.14.10.0
Documentation développeur
Chargement...
Recherche...
Aucune correspondance
MpiParallelNonBlockingCollectiveDispatch.cc
1// -*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
2//-----------------------------------------------------------------------------
3// Copyright 2000-2022 CEA (www.cea.fr) IFPEN (www.ifpenergiesnouvelles.com)
4// See the top-level COPYRIGHT file for details.
5// SPDX-License-Identifier: Apache-2.0
6//-----------------------------------------------------------------------------
7/*---------------------------------------------------------------------------*/
8/* MpiParallelNonBlockingCollectiveDispatch.cc (C) 2000-2018 */
9/* */
10/* Implémentation MPI des collectives non bloquantes pour un type donné. */
11/*---------------------------------------------------------------------------*/
12/*---------------------------------------------------------------------------*/
13
14#include "arcane/utils/ArcanePrecomp.h"
15
16#include "arcane/utils/Array.h"
17#include "arcane/utils/NotImplementedException.h"
18#include "arcane/utils/Real2.h"
19#include "arcane/utils/Real3.h"
20#include "arcane/utils/Real2x2.h"
21#include "arcane/utils/Real3x3.h"
22#include "arcane/utils/HPReal.h"
23#include "arcane/utils/FatalErrorException.h"
24
25#include "arcane/IParallelNonBlockingCollective.h"
26#include "arcane/ParallelMngDispatcher.h"
27
28#include "arcane/parallel/mpi/MpiParallelNonBlockingCollectiveDispatch.h"
29#include "arcane/parallel/mpi/MpiAdapter.h"
30#include "arcane/parallel/mpi/MpiLock.h"
31#include "arcane/parallel/mpi/MpiDatatype.h"
32#include "arcane/parallel/mpi/MpiParallelDispatch.h"
33
34/*---------------------------------------------------------------------------*/
35/*---------------------------------------------------------------------------*/
36
37ARCANE_BEGIN_NAMESPACE
38
39/*---------------------------------------------------------------------------*/
40/*---------------------------------------------------------------------------*/
41
42template<class Type> MpiParallelNonBlockingCollectiveDispatchT<Type>::
43MpiParallelNonBlockingCollectiveDispatchT(ITraceMng* tm,IParallelNonBlockingCollective* collective_mng,
44 MpiAdapter* adapter)
45: TraceAccessor(tm)
46, m_parallel_mng(collective_mng->parallelMng())
47, m_adapter(adapter)
48, m_datatype(nullptr)
49{
50 // Récupérer le datatype via le dispatcher MpiParallelDispatch
51 // TODO: créer un type pour contenir tous les MpiDatatype.
52 auto pmd = dynamic_cast<ParallelMngDispatcher*>(m_parallel_mng);
53 if (!pmd)
54 ARCANE_FATAL("Bad parallelMng()");
55 Type* xtype = nullptr;
56 auto dispatcher = pmd->dispatcher(xtype);
57 auto true_dispatcher = dynamic_cast< MpiParallelDispatchT<Type>* >(dispatcher);
58 if (!true_dispatcher)
59 ARCANE_FATAL("Bad dispatcher. should have type MpiParallelDispatcher");
60 m_datatype = true_dispatcher->datatype();
61}
62
63/*---------------------------------------------------------------------------*/
64/*---------------------------------------------------------------------------*/
65
66template<class Type> MpiParallelNonBlockingCollectiveDispatchT<Type>::
67~MpiParallelNonBlockingCollectiveDispatchT()
68{
69 // NOTE: m_datatype est géré par MpiParallelDispatch et ne doit pas être
70 // détruit ici.
71 finalize();
72}
73
74/*---------------------------------------------------------------------------*/
75/*---------------------------------------------------------------------------*/
76
77template<class Type> void MpiParallelNonBlockingCollectiveDispatchT<Type>::
78finalize()
79{
80}
81
82/*---------------------------------------------------------------------------*/
83/*---------------------------------------------------------------------------*/
84
85template<class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
86broadcast(ArrayView<Type> send_buf,Integer sub_domain)
87{
88 MPI_Datatype type = m_datatype->datatype();
89 return m_adapter->nonBlockingBroadcast(send_buf.data(),send_buf.size(),sub_domain,type);
90}
91
92/*---------------------------------------------------------------------------*/
93/*---------------------------------------------------------------------------*/
94
95template<class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
96allGather(ConstArrayView<Type> send_buf,ArrayView<Type> recv_buf)
97{
98 MPI_Datatype type = m_datatype->datatype();
99 return m_adapter->nonBlockingAllGather(send_buf.data(),recv_buf.data(),send_buf.size(),type);
100}
101
102/*---------------------------------------------------------------------------*/
103/*---------------------------------------------------------------------------*/
104
105template<class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
106gather(ConstArrayView<Type> send_buf,ArrayView<Type> recv_buf,Integer rank)
107{
108 MPI_Datatype type = m_datatype->datatype();
109 return m_adapter->nonBlockingGather(send_buf.data(),recv_buf.data(),send_buf.size(),rank,type);
110}
111
112/*---------------------------------------------------------------------------*/
113/*---------------------------------------------------------------------------*/
114
115template<class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
116allGatherVariable(ConstArrayView<Type> send_buf,Array<Type>& recv_buf)
117{
118 ARCANE_UNUSED(send_buf);
119 ARCANE_UNUSED(recv_buf);
120 throw NotImplementedException(A_FUNCINFO);
121#if 0
122 _gatherVariable2(send_buf,recv_buf,-1);
123#endif
124}
125
126/*---------------------------------------------------------------------------*/
127/*---------------------------------------------------------------------------*/
128
129template<class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
130gatherVariable(ConstArrayView<Type> send_buf,Array<Type>& recv_buf,Integer rank)
131{
132 ARCANE_UNUSED(send_buf);
133 ARCANE_UNUSED(recv_buf);
134 ARCANE_UNUSED(rank);
135 throw NotImplementedException(A_FUNCINFO);
136#if 0
137 _gatherVariable2(send_buf,recv_buf,rank);
138#endif
139}
140
141/*---------------------------------------------------------------------------*/
142/*---------------------------------------------------------------------------*/
143
144template<class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
145scatterVariable(ConstArrayView<Type> send_buf,ArrayView<Type> recv_buf,Integer root)
146{
147 ARCANE_UNUSED(send_buf);
148 ARCANE_UNUSED(recv_buf);
149 ARCANE_UNUSED(root);
150 throw NotImplementedException(A_FUNCINFO);
151#if 0
152 MPI_Datatype type = m_adapter->datatype(Type());
153
154 Integer comm_size = static_cast<Integer>(m_adapter->commSize());
155 UniqueArray<int> recv_counts(comm_size);
156 UniqueArray<int> recv_indexes(comm_size);
157
158 Integer nb_elem = recv_buf.size();
159 int my_buf_count = static_cast<int>(nb_elem);
160 ConstArrayView<int> count_r(1,&my_buf_count);
161
162 // Récupère le nombre d'éléments de chaque processeur
163 m_parallel_mng->allGather(count_r,recv_counts);
164
165 // Remplit le tableau des index
166 int index = 0;
167 for( Integer i=0, is=comm_size; i<is; ++i ){
168 recv_indexes[i] = index;
169 index += recv_counts[i];
170 }
171
172 m_adapter->scatterVariable(send_buf.begin(),recv_counts.begin(),recv_indexes.begin(),
173 recv_buf.begin(),nb_elem,root,type);
174#endif
175}
176
177/*---------------------------------------------------------------------------*/
178/*---------------------------------------------------------------------------*/
179
180template<class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
181allToAll(ConstArrayView<Type> send_buf,ArrayView<Type> recv_buf,Integer count)
182{
183 MPI_Datatype type = m_datatype->datatype();
184 return m_adapter->nonBlockingAllToAll(send_buf.data(),recv_buf.data(),count,type);
185}
186
187/*---------------------------------------------------------------------------*/
188/*---------------------------------------------------------------------------*/
189
190template<class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
191allToAllVariable(ConstArrayView<Type> send_buf,
192 Int32ConstArrayView send_count,
193 Int32ConstArrayView send_index,
194 ArrayView<Type> recv_buf,
195 Int32ConstArrayView recv_count,
196 Int32ConstArrayView recv_index
197 )
198{
199 MPI_Datatype type = m_datatype->datatype();
200
201 return m_adapter->nonBlockingAllToAllVariable(send_buf.data(),send_count.data(),
202 send_index.data(),recv_buf.data(),
203 recv_count.data(),
204 recv_index.data(),type);
205}
206
207/*---------------------------------------------------------------------------*/
208/*---------------------------------------------------------------------------*/
209
210template<class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
211allReduce(eReduceType op,ConstArrayView<Type> send_buf,ArrayView<Type> recv_buf)
212{
213 MPI_Datatype type = m_datatype->datatype();
214 Integer s = send_buf.size();
215 MPI_Op operation = m_datatype->reduceOperator(op);
216
217 Request request;
218 {
219 MpiLock::Section mls(m_adapter->mpiLock());
220 request = m_adapter->nonBlockingAllReduce(send_buf.data(),recv_buf.data(),
221 s,type,operation);
222 }
223 return request;
224}
225
226/*---------------------------------------------------------------------------*/
227/*---------------------------------------------------------------------------*/
228
229template class MpiParallelNonBlockingCollectiveDispatchT<char>;
230template class MpiParallelNonBlockingCollectiveDispatchT<signed char>;
231template class MpiParallelNonBlockingCollectiveDispatchT<unsigned char>;
232template class MpiParallelNonBlockingCollectiveDispatchT<short>;
233template class MpiParallelNonBlockingCollectiveDispatchT<unsigned short>;
234template class MpiParallelNonBlockingCollectiveDispatchT<int>;
235template class MpiParallelNonBlockingCollectiveDispatchT<unsigned int>;
236template class MpiParallelNonBlockingCollectiveDispatchT<long>;
237template class MpiParallelNonBlockingCollectiveDispatchT<unsigned long>;
238template class MpiParallelNonBlockingCollectiveDispatchT<long long>;
239template class MpiParallelNonBlockingCollectiveDispatchT<unsigned long long>;
240template class MpiParallelNonBlockingCollectiveDispatchT<float>;
241template class MpiParallelNonBlockingCollectiveDispatchT<double>;
242template class MpiParallelNonBlockingCollectiveDispatchT<long double>;
243template class MpiParallelNonBlockingCollectiveDispatchT<Real2>;
244template class MpiParallelNonBlockingCollectiveDispatchT<Real3>;
245template class MpiParallelNonBlockingCollectiveDispatchT<Real2x2>;
246template class MpiParallelNonBlockingCollectiveDispatchT<Real3x3>;
247template class MpiParallelNonBlockingCollectiveDispatchT<HPReal>;
248
249/*---------------------------------------------------------------------------*/
250/*---------------------------------------------------------------------------*/
251
252ARCANE_END_NAMESPACE
253
254/*---------------------------------------------------------------------------*/
255/*---------------------------------------------------------------------------*/
#define ARCANE_FATAL(...)
Macro envoyant une exception FatalErrorException.
Int32 Integer
Type représentant un entier.
Type
Type of JSON value.
Definition rapidjson.h:665