14#include "arcane/utils/Array.h" 
   15#include "arcane/utils/NotImplementedException.h" 
   16#include "arcane/utils/Real2.h" 
   17#include "arcane/utils/Real3.h" 
   18#include "arcane/utils/Real2x2.h" 
   19#include "arcane/utils/Real3x3.h" 
   20#include "arcane/utils/HPReal.h" 
   21#include "arcane/utils/FatalErrorException.h" 
   23#include "arcane/core/IParallelNonBlockingCollective.h" 
   24#include "arcane/core/ParallelMngDispatcher.h" 
   26#include "arcane/parallel/mpi/MpiParallelNonBlockingCollectiveDispatch.h" 
   27#include "arcane/parallel/mpi/MpiDatatype.h" 
   28#include "arcane/parallel/mpi/MpiParallelDispatch.h" 
   30#include "arccore/message_passing_mpi/internal/MpiAdapter.h" 
   31#include "arccore/message_passing_mpi/internal/MpiLock.h" 
   46, m_parallel_mng(collective_mng->parallelMng())
 
   52  auto pmd = 
dynamic_cast<ParallelMngDispatcher*
>(m_parallel_mng);
 
   55  Type* xtype = 
nullptr;
 
   56  auto dispatcher = pmd->dispatcher(xtype);
 
   57  auto true_dispatcher = 
dynamic_cast<MpiParallelDispatchT<Type>*
>(dispatcher);
 
   59    ARCANE_FATAL(
"Bad dispatcher. should have type MpiParallelDispatcher");
 
   60  m_datatype = true_dispatcher->datatype();
 
   66template <
class Type> MpiParallelNonBlockingCollectiveDispatchT<Type>::
 
   67~MpiParallelNonBlockingCollectiveDispatchT()
 
   77template <
class Type> 
void MpiParallelNonBlockingCollectiveDispatchT<Type>::
 
   85template <
class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
 
   86broadcast(ArrayView<Type> send_buf, Integer sub_domain)
 
   88  MPI_Datatype type = m_datatype->datatype();
 
   89  return m_adapter->nonBlockingBroadcast(send_buf.data(), send_buf.size(), sub_domain, type);
 
   95template <
class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
 
   96allGather(ConstArrayView<Type> send_buf, ArrayView<Type> recv_buf)
 
   98  MPI_Datatype type = m_datatype->datatype();
 
   99  return m_adapter->nonBlockingAllGather(send_buf.data(), recv_buf.data(), send_buf.size(), type);
 
  105template <
class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
 
  106gather(ConstArrayView<Type> send_buf, ArrayView<Type> recv_buf, Integer rank)
 
  108  MPI_Datatype type = m_datatype->datatype();
 
  109  return m_adapter->nonBlockingGather(send_buf.data(), recv_buf.data(), send_buf.size(), rank, type);
 
  115template <
class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
 
  116allGatherVariable(ConstArrayView<Type> send_buf, Array<Type>& recv_buf)
 
  118  ARCANE_UNUSED(send_buf);
 
  119  ARCANE_UNUSED(recv_buf);
 
  120  throw NotImplementedException(A_FUNCINFO);
 
  122  _gatherVariable2(send_buf,recv_buf,-1);
 
  129template <
class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
 
  130gatherVariable(ConstArrayView<Type> send_buf, Array<Type>& recv_buf, Integer rank)
 
  132  ARCANE_UNUSED(send_buf);
 
  133  ARCANE_UNUSED(recv_buf);
 
  135  throw NotImplementedException(A_FUNCINFO);
 
  137  _gatherVariable2(send_buf,recv_buf,rank);
 
  144template <
class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
 
  145scatterVariable(ConstArrayView<Type> send_buf, ArrayView<Type> recv_buf, Integer root)
 
  147  ARCANE_UNUSED(send_buf);
 
  148  ARCANE_UNUSED(recv_buf);
 
  150  throw NotImplementedException(A_FUNCINFO);
 
  152  MPI_Datatype type = m_adapter->datatype(
Type());
 
  155  UniqueArray<int> recv_counts(comm_size);
 
  156  UniqueArray<int> recv_indexes(comm_size);
 
  158  Integer nb_elem = recv_buf.size();
 
  159  int my_buf_count = 
static_cast<int>(nb_elem);
 
  160  ConstArrayView<int> count_r(1,&my_buf_count);
 
  163  m_parallel_mng->allGather(count_r,recv_counts);
 
  167  for( Integer i=0, is=comm_size; i<is; ++i ){
 
  168    recv_indexes[i] = index;
 
  169    index += recv_counts[i];
 
  172  m_adapter->scatterVariable(send_buf.begin(),recv_counts.begin(),recv_indexes.begin(),
 
  173                             recv_buf.begin(),nb_elem,root,type);
 
  180template <
class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
 
  181allToAll(ConstArrayView<Type> send_buf, ArrayView<Type> recv_buf, Integer count)
 
  183  MPI_Datatype type = m_datatype->datatype();
 
  184  return m_adapter->nonBlockingAllToAll(send_buf.data(), recv_buf.data(), count, type);
 
  190template <
class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
 
  191allToAllVariable(ConstArrayView<Type> send_buf,
 
  192                 Int32ConstArrayView send_count,
 
  193                 Int32ConstArrayView send_index,
 
  194                 ArrayView<Type> recv_buf,
 
  195                 Int32ConstArrayView recv_count,
 
  196                 Int32ConstArrayView recv_index)
 
  198  MPI_Datatype type = m_datatype->datatype();
 
  200  return m_adapter->nonBlockingAllToAllVariable(send_buf.data(), send_count.data(),
 
  201                                                send_index.data(), recv_buf.data(),
 
  203                                                recv_index.data(), type);
 
  209template <
class Type> Parallel::Request MpiParallelNonBlockingCollectiveDispatchT<Type>::
 
  210allReduce(eReduceType op, ConstArrayView<Type> send_buf, ArrayView<Type> recv_buf)
 
  212  MPI_Datatype type = m_datatype->datatype();
 
  214  MPI_Op operation = m_datatype->reduceOperator(op);
 
  218    MpiLock::Section mls(m_adapter->mpiLock());
 
  219    request = m_adapter->nonBlockingAllReduce(send_buf.data(), recv_buf.data(),
 
  228template class MpiParallelNonBlockingCollectiveDispatchT<char>;
 
  229template class MpiParallelNonBlockingCollectiveDispatchT<signed char>;
 
  230template class MpiParallelNonBlockingCollectiveDispatchT<unsigned char>;
 
  231template class MpiParallelNonBlockingCollectiveDispatchT<short>;
 
  232template class MpiParallelNonBlockingCollectiveDispatchT<unsigned short>;
 
  233template class MpiParallelNonBlockingCollectiveDispatchT<int>;
 
  234template class MpiParallelNonBlockingCollectiveDispatchT<unsigned int>;
 
  235template class MpiParallelNonBlockingCollectiveDispatchT<long>;
 
  236template class MpiParallelNonBlockingCollectiveDispatchT<unsigned long>;
 
  237template class MpiParallelNonBlockingCollectiveDispatchT<long long>;
 
  238template class MpiParallelNonBlockingCollectiveDispatchT<unsigned long long>;
 
  239template class MpiParallelNonBlockingCollectiveDispatchT<float>;
 
  240template class MpiParallelNonBlockingCollectiveDispatchT<double>;
 
  241template class MpiParallelNonBlockingCollectiveDispatchT<long double>;
 
  242template class MpiParallelNonBlockingCollectiveDispatchT<Real2>;
 
  243template class MpiParallelNonBlockingCollectiveDispatchT<Real3>;
 
  244template class MpiParallelNonBlockingCollectiveDispatchT<Real2x2>;
 
  245template class MpiParallelNonBlockingCollectiveDispatchT<Real3x3>;
 
  246template class MpiParallelNonBlockingCollectiveDispatchT<HPReal>;
 
#define ARCANE_FATAL(...)
Macro envoyant une exception FatalErrorException.
 
Interface des opérations parallèles collectives non bloquantes.
 
Interface du gestionnaire de traces.
 
Implémentation MPI des collectives non bloquantes pour le type Type.
 
Classe d'accès aux traces.
 
-*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
 
Int32 Integer
Type représentant un entier.