Arcane  v4.1.1.0
Documentation développeur
Chargement...
Recherche...
Aucune correspondance
CommonUtils.h
1// -*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
2//-----------------------------------------------------------------------------
3// Copyright 2000-2025 CEA (www.cea.fr) IFPEN (www.ifpenergiesnouvelles.com)
4// See the top-level COPYRIGHT file for details.
5// SPDX-License-Identifier: Apache-2.0
6//-----------------------------------------------------------------------------
7/*---------------------------------------------------------------------------*/
8/* CommonUtils.h (C) 2000-2025 */
9/* */
10/* Fonctions/Classes utilitaires communes à tout les runtimes. */
11/*---------------------------------------------------------------------------*/
12#ifndef ARCANE_ACCELERATOR_COMMONUTILS_H
13#define ARCANE_ACCELERATOR_COMMONUTILS_H
14/*---------------------------------------------------------------------------*/
15/*---------------------------------------------------------------------------*/
16
17#include "arcane/utils/Array.h"
18
19#include "arcane/accelerator/AcceleratorGlobal.h"
20#include "arcane/accelerator/core/RunQueue.h"
21#include "arcane/accelerator/AcceleratorUtils.h"
22
23#if defined(ARCANE_COMPILING_HIP)
24#include "arccore/accelerator_native/HipAccelerator.h"
25#include <rocprim/rocprim.hpp>
26#endif
27#if defined(ARCANE_COMPILING_CUDA)
28#include "arccore/accelerator_native/CudaAccelerator.h"
29#include <cub/cub.cuh>
30#endif
31#if defined(ARCANE_COMPILING_SYCL)
32#include "arccore/accelerator_native/SyclAccelerator.h"
33#if defined(ARCANE_HAS_ONEDPL)
34#include <oneapi/dpl/execution>
35#include <oneapi/dpl/algorithm>
36#endif
37#if defined(__ADAPTIVECPP__)
38#include <AdaptiveCpp/algorithms/algorithm.hpp>
39#endif
40#endif
41
42// A définir si on souhaite utiliser LambdaStorage
43// #ifdef ARCANE_USE_LAMBDA_STORAGE
44
45/*---------------------------------------------------------------------------*/
46/*---------------------------------------------------------------------------*/
47
48namespace Arcane::Accelerator::impl
49{
71template <typename LambdaType>
72class alignas(LambdaType) LambdaStorage
73{
74 static constexpr size_t SizeofLambda = sizeof(LambdaType);
75
76 public:
77
78 LambdaStorage() = default;
79 ARCCORE_HOST_DEVICE LambdaStorage(const LambdaType& v)
80 {
81 std::memcpy(bytes, &v, SizeofLambda);
82 }
84 ARCCORE_HOST_DEVICE operator const LambdaType&() const { return *reinterpret_cast<const LambdaType*>(&bytes); }
85
86 private:
87
88 char bytes[SizeofLambda];
89};
90
91/*---------------------------------------------------------------------------*/
92/*---------------------------------------------------------------------------*/
97class ARCANE_ACCELERATOR_EXPORT GenericDeviceStorage
98{
99 public:
100
101 GenericDeviceStorage();
102 ~GenericDeviceStorage()
103 {
104 deallocate();
105 }
106
107 public:
108
109 void* address() { return m_storage.data(); }
110 size_t size() const { return m_storage.largeSize(); }
111 void* allocate(size_t new_size)
112 {
113 m_storage.resize(new_size);
114 return m_storage.data();
115 }
116
117 void deallocate()
118 {
119 m_storage.clear();
120 }
121
122 Span<const std::byte> bytes() const
123 {
124 return m_storage.span();
125 }
126
127 private:
128
129 UniqueArray<std::byte> m_storage;
130};
131
132/*---------------------------------------------------------------------------*/
133/*---------------------------------------------------------------------------*/
138class ARCANE_ACCELERATOR_EXPORT DeviceStorageBase
139{
140 protected:
141
142 GenericDeviceStorage m_storage;
143
144 protected:
145
147 void _copyToAsync(Span<std::byte> destination, Span<const std::byte> source, const RunQueue& queue);
148};
149
150/*---------------------------------------------------------------------------*/
151/*---------------------------------------------------------------------------*/
156template <typename DataType, Int32 N = 1>
158: public DeviceStorageBase
159{
160 public:
161
162 DataType* address() { return reinterpret_cast<DataType*>(m_storage.address()); }
163 size_t size() const { return m_storage.size(); }
164 DataType* allocate()
165 {
166 m_storage.allocate(sizeof(DataType) * N);
167 return address();
168 }
169 void deallocate() { m_storage.deallocate(); }
170
172 void copyToAsync(SmallSpan<DataType> dest_ptr, const RunQueue& queue)
173 {
174 _copyToAsync(asWritableBytes(dest_ptr), m_storage.bytes(), queue);
175 }
176};
177
178/*---------------------------------------------------------------------------*/
179/*---------------------------------------------------------------------------*/
185class IndexIterator
186{
187 public:
188
189 using value_type = Int32;
190 using iterator_category = std::random_access_iterator_tag;
191 using reference = value_type&;
192 using difference_type = ptrdiff_t;
193 using pointer = void;
194
195 using ThatClass = IndexIterator;
196
197 public:
198
199 IndexIterator() = default;
200 ARCCORE_HOST_DEVICE explicit IndexIterator(Int32 v)
201 : m_value(v)
202 {}
203
204 public:
205
206 ARCCORE_HOST_DEVICE IndexIterator& operator++()
207 {
208 ++m_value;
209 return (*this);
210 }
211 ARCCORE_HOST_DEVICE IndexIterator operator+(Int32 x) const
212 {
213 return IndexIterator(m_value + x);
214 }
215 ARCCORE_HOST_DEVICE friend ThatClass operator+(Int32 x, const ThatClass& iter)
216 {
217 return ThatClass(iter.m_value + x);
218 }
219 ARCCORE_HOST_DEVICE IndexIterator operator-(Int32 x) const
220 {
221 return IndexIterator(m_value - x);
222 }
223 ARCCORE_HOST_DEVICE Int32 operator-(const ThatClass& x) const
224 {
225 return m_value - x.m_value;
226 }
227 ARCCORE_HOST_DEVICE Int32 operator*() const { return m_value; }
228 ARCCORE_HOST_DEVICE Int32 operator[](Int32 x) const { return m_value + x; }
229 ARCCORE_HOST_DEVICE friend bool operator==(const ThatClass& a, const ThatClass& b)
230 {
231 return a.m_value == b.m_value;
232 }
233 ARCCORE_HOST_DEVICE friend bool operator<(const ThatClass& iter1, const ThatClass& iter2)
234 {
235 return iter1.m_value < iter2.m_value;
236 }
237
238 private:
239
240 Int32 m_value = 0;
241};
242
243/*---------------------------------------------------------------------------*/
244/*---------------------------------------------------------------------------*/
245
247template <typename DataType>
249{
250 public:
251
252 constexpr ARCCORE_HOST_DEVICE DataType operator()(const DataType& a, const DataType& b) const
253 {
254 return a + b;
255 }
256 static DataType defaultValue() { return {}; }
257#if defined(ARCANE_COMPILING_SYCL)
258 static sycl::plus<DataType> syclFunctor() { return {}; }
259#endif
260};
261
262/*---------------------------------------------------------------------------*/
263/*---------------------------------------------------------------------------*/
264
266template <typename DataType>
268{
269 public:
270
271 constexpr ARCCORE_HOST_DEVICE DataType operator()(const DataType& a, const DataType& b) const
272 {
273 return (a < b) ? a : b;
274 }
275 static DataType defaultValue() { return std::numeric_limits<DataType>::max(); }
276#if defined(ARCANE_COMPILING_SYCL)
277 static sycl::minimum<DataType> syclFunctor() { return {}; }
278#endif
279};
280
281/*---------------------------------------------------------------------------*/
282/*---------------------------------------------------------------------------*/
283
285template <typename DataType>
287{
288 public:
289
290 constexpr ARCCORE_HOST_DEVICE DataType operator()(const DataType& a, const DataType& b) const
291 {
292 return (a < b) ? b : a;
293 }
294 static DataType defaultValue() { return std::numeric_limits<DataType>::lowest(); }
295#if defined(ARCANE_COMPILING_SYCL)
296 static sycl::maximum<DataType> syclFunctor() { return {}; }
297#endif
298};
299
300/*---------------------------------------------------------------------------*/
301/*---------------------------------------------------------------------------*/
305template <typename DataType, typename GetterLambda>
306class GetterLambdaIterator
307{
308 public:
309
310 using value_type = DataType;
311 using iterator_category = std::random_access_iterator_tag;
312 using reference = DataType&;
313 using difference_type = ptrdiff_t;
314 using pointer = void;
315 using ThatClass = GetterLambdaIterator<DataType, GetterLambda>;
316
317 public:
318
319 ARCCORE_HOST_DEVICE GetterLambdaIterator(const GetterLambda& s)
320 : m_lambda(s)
321 {}
322 ARCCORE_HOST_DEVICE explicit GetterLambdaIterator(const GetterLambda& s, Int32 v)
323 : m_index(v)
324 , m_lambda(s)
325 {}
326
327 public:
328
329 ARCCORE_HOST_DEVICE ThatClass& operator++()
330 {
331 ++m_index;
332 return (*this);
333 }
334 ARCCORE_HOST_DEVICE ThatClass& operator+=(Int32 x)
335 {
336 m_index += x;
337 return (*this);
338 }
339 ARCCORE_HOST_DEVICE friend ThatClass operator+(const ThatClass& iter, Int32 x)
340 {
341 return ThatClass(iter.m_lambda, iter.m_index + x);
342 }
343 ARCCORE_HOST_DEVICE friend ThatClass operator+(Int32 x, const ThatClass& iter)
344 {
345 return ThatClass(iter.m_lambda, iter.m_index + x);
346 }
347 ARCCORE_HOST_DEVICE friend bool operator<(const ThatClass& iter1, const ThatClass& iter2)
348 {
349 return iter1.m_index < iter2.m_index;
350 }
351
352 ARCCORE_HOST_DEVICE ThatClass operator-(Int32 x) const
353 {
354 return ThatClass(m_lambda, m_index - x);
355 }
356 ARCCORE_HOST_DEVICE Int32 operator-(const ThatClass& x) const
357 {
358 return m_index - x.m_index;
359 }
360 ARCCORE_HOST_DEVICE value_type operator*() const
361 {
362 return m_lambda(m_index);
363 }
364 ARCCORE_HOST_DEVICE value_type operator[](Int32 x) const { return m_lambda(m_index + x); }
365 ARCCORE_HOST_DEVICE friend bool operator!=(const ThatClass& a, const ThatClass& b)
366 {
367 return a.m_index != b.m_index;
368 }
369 ARCCORE_HOST_DEVICE friend bool operator==(const ThatClass& a, const ThatClass& b)
370 {
371 return a.m_index == b.m_index;
372 }
373
374 private:
375
376 Int32 m_index = 0;
377 GetterLambda m_lambda;
378};
379
380/*---------------------------------------------------------------------------*/
381/*---------------------------------------------------------------------------*/
387template <typename SetterLambda>
388class SetterLambdaIterator
389{
390 public:
391
393 class Setter
394 {
395 public:
396
397 ARCCORE_HOST_DEVICE explicit Setter(const SetterLambda& s, Int32 output_index)
398 : m_output_index(output_index)
399 , m_lambda(s)
400 {}
401 ARCCORE_HOST_DEVICE void operator=(Int32 input_index)
402 {
403 m_lambda(input_index, m_output_index);
404 }
405 Int32 m_output_index = 0;
406 SetterLambda m_lambda;
407 };
408
409 using value_type = Int32;
410 using iterator_category = std::random_access_iterator_tag;
411 using reference = Setter;
412 using difference_type = ptrdiff_t;
413 using pointer = void;
414
415 using ThatClass = SetterLambdaIterator<SetterLambda>;
416
417 private:
418
419#ifdef ARCANE_USE_LAMBDA_STORAGE
420 using StorageType = LambdaStorage<SetterLambda>;
421#else
422 using StorageType = SetterLambda;
423#endif
424
425 public:
426
427 SetterLambdaIterator() = default;
428 ARCCORE_HOST_DEVICE SetterLambdaIterator(const SetterLambda& s)
429 : m_lambda(s)
430 {}
431 ARCCORE_HOST_DEVICE SetterLambdaIterator(const SetterLambda& s, Int32 v)
432 : m_index(v)
433 , m_lambda(s)
434 {}
435
436 private:
437
438#ifdef ARCANE_USE_LAMBDA_STORAGE
439 ARCCORE_HOST_DEVICE SetterLambdaIterator(const LambdaStorage<SetterLambda>& s, Int32 v)
440 : m_index(v)
441 , m_lambda(s)
442 {}
443#endif
444
445 public:
446
447 ARCCORE_HOST_DEVICE SetterLambdaIterator<SetterLambda>& operator++()
448 {
449 ++m_index;
450 return (*this);
451 }
452 ARCCORE_HOST_DEVICE SetterLambdaIterator<SetterLambda>& operator--()
453 {
454 --m_index;
455 return (*this);
456 }
457 ARCCORE_HOST_DEVICE reference operator*() const
458 {
459 return Setter(m_lambda, m_index);
460 }
461 ARCCORE_HOST_DEVICE reference operator[](Int32 x) const { return Setter(m_lambda, m_index + x); }
462 ARCCORE_HOST_DEVICE friend ThatClass operator+(Int32 x, const ThatClass& iter)
463 {
464 return ThatClass(iter.m_lambda, iter.m_index + x);
465 }
466 ARCCORE_HOST_DEVICE friend ThatClass operator+(const ThatClass& iter, Int32 x)
467 {
468 return ThatClass(iter.m_lambda, iter.m_index + x);
469 }
470 ARCCORE_HOST_DEVICE friend ThatClass operator-(const ThatClass& iter, Int32 x)
471 {
472 return ThatClass(iter.m_lambda, iter.m_index - x);
473 }
474 ARCCORE_HOST_DEVICE friend Int32 operator-(const ThatClass& iter1, const ThatClass& iter2)
475 {
476 return iter1.m_index - iter2.m_index;
477 }
478 ARCCORE_HOST_DEVICE friend bool operator<(const ThatClass& iter1, const ThatClass& iter2)
479 {
480 return iter1.m_index < iter2.m_index;
481 }
482
483 private:
484
485 Int32 m_index = 0;
486 StorageType m_lambda;
487};
488
489/*---------------------------------------------------------------------------*/
490/*---------------------------------------------------------------------------*/
491
492} // namespace Arcane::Accelerator::impl
493
494/*---------------------------------------------------------------------------*/
495/*---------------------------------------------------------------------------*/
496
497namespace Arcane::Accelerator
498{
499
500/*---------------------------------------------------------------------------*/
501/*---------------------------------------------------------------------------*/
502
503template <typename DataType> using ScannerSumOperator = impl::SumOperator<DataType>;
504template <typename DataType> using ScannerMaxOperator = impl::MaxOperator<DataType>;
505template <typename DataType> using ScannerMinOperator = impl::MinOperator<DataType>;
506
507/*---------------------------------------------------------------------------*/
508/*---------------------------------------------------------------------------*/
509
510} // namespace Arcane::Accelerator
511
512/*---------------------------------------------------------------------------*/
513/*---------------------------------------------------------------------------*/
514
515#ifdef ARCANE_USE_LAMBDA_STORAGE
516#undef ARCANE_USE_LAMBDA_STORAGE
517#endif
518
519#endif
520
521/*---------------------------------------------------------------------------*/
522/*---------------------------------------------------------------------------*/
Gère l'allocation interne sur le device.
void _copyToAsync(Span< std::byte > destination, Span< const std::byte > source, const RunQueue &queue)
Copie l'instance dans dest_ptr.
Gère l'allocation interne sur le device pour un type donné.
void copyToAsync(SmallSpan< DataType > dest_ptr, const RunQueue &queue)
Copie l'instance dans dest_ptr.
Gère l'allocation interne sur le device.
Definition CommonUtils.h:98
Classe pour gérer la conservation d'une lambda dans un itérateur.
Definition CommonUtils.h:73
Opérateur de Scan/Reduce pour le maximum.
Opérateur de Scan/Reduce pour le minimum.
Permet de positionner un élément de l'itérateur de sortie.
Itérateur sur une lambda pour positionner une valeur via un index.
Opérateur de Scan/Reduce pour les sommes.
Vue d'un tableau d'éléments de type T.
Definition Span.h:801
Vue d'un tableau d'éléments de type T.
Definition Span.h:633
Vecteur 1D de données avec sémantique par valeur (style STL).
bool operator<(const Item &item1, const Item &item2)
Compare deux entités.
Definition Item.h:551
Impl::SpanTypeFromSize< std::byte, SizeType >::SpanType asWritableBytes(const SpanImpl< DataType, SizeType, Extent > &s)
Converti la vue en un tableau d'octets modifiables.
Definition Span.h:1057
std::int32_t Int32
Type entier signé sur 32 bits.