169:
public AbstractArrayBase
173 typedef typename ArrayTraits<T>::ConstReferenceType ConstReferenceType;
174 typedef typename ArrayTraits<T>::IsPODType IsPODType;
199 typedef std::reverse_iterator<iterator> reverse_iterator;
200 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
229 static constexpr Int64 typeSize() {
return static_cast<Int64
>(
sizeof(T)); }
230 AllocatedMemoryInfo _currentMemoryInfo()
const
232 return AllocatedMemoryInfo(m_ptr, m_md->size * typeSize(), m_md->capacity * typeSize());
247 _createRange(0, asize, view.
data());
262 void* pre_allocated_buffer =
nullptr)
264 _directFirstAllocateWithAllocator(acapacity, o, pre_allocated_buffer);
274 _internalDeallocate();
278 if (options.allocator() != m_md->_allocator())
279 _directFirstAllocateWithAllocator(0, options);
289 operator Span<const T>()
const
291 return Span<const T>(m_ptr, m_md->
size);
293 operator SmallSpan<const T>()
const
295 return SmallSpan<const T>(m_ptr, ARCCORE_CAST_SMALL_SIZE(
size()));
301 Integer size()
const {
return ARCCORE_CAST_SMALL_SIZE(m_md->size); }
313 bool empty()
const {
return m_md->size == 0; }
317 const T* ptr = m_ptr;
318 for (
Int64 i = 0, n = m_md->size; i < n; ++i) {
330 ARCCORE_CHECK_AT(i, m_md->size);
336 ARCCORE_CHECK_AT(i, m_md->size);
345 m_md->_setMemoryLocationHint(new_hint, m_ptr,
sizeof(T));
356 m_md->_setHostDeviceMemoryLocation(location);
362 return m_md->allocation_options.hostDeviceMemoryLocation();
374 return !(rhs == lhs);
377 friend bool operator==(
const AbstractArray<T>& rhs,
const Span<const T>& lhs)
379 return operator==(Span<const T>(rhs), lhs);
382 friend bool operator!=(
const AbstractArray<T>& rhs,
const Span<const T>& lhs)
384 return !(rhs == lhs);
387 friend bool operator==(
const Span<const T>& rhs,
const AbstractArray<T>& lhs)
389 return operator==(rhs, Span<const T>(lhs));
392 friend bool operator!=(
const Span<const T>& rhs,
const AbstractArray<T>& lhs)
394 return !(rhs == lhs);
397 friend std::ostream& operator<<(std::ostream& o,
const AbstractArray<T>& val)
399 o << Span<const T>(val);
405 using AbstractArrayBase::m_meta_data;
428 template <
typename PodType>
431 if (_isSharedNull()) {
432 if (new_capacity != 0)
433 _internalAllocate(new_capacity, queue);
437 Int64 acapacity = new_capacity;
438 if (compute_capacity) {
439 acapacity = m_md->capacity;
441 while (new_capacity > acapacity)
442 acapacity = (acapacity == 0) ? 4 : (acapacity + 1 + acapacity / 2);
460 Int64 old_capacity = m_md->capacity;
461 _directReAllocate(new_capacity, queue);
462 bool update = (new_capacity < old_capacity) || (m_ptr != old_ptr);
474 Int64 old_size = m_md->size;
475 _directAllocate(new_capacity, queue);
476 if (m_ptr != old_ptr) {
477 for (
Int64 i = 0; i < old_size; ++i) {
478 new (m_ptr + i) T(old_ptr[i]);
481 m_md->nb_ref = old_md->
nb_ref;
482 m_md->_deallocate(old_mem_info, queue);
487 void _internalDeallocate(
RunQueue* queue =
nullptr)
489 if (!_isSharedNull())
490 m_md->_deallocate(_currentMemoryInfo(), queue);
492 _deallocateMetaData(m_md);
494 void _internalAllocate(Int64 new_capacity, RunQueue* queue)
496 _directAllocate(new_capacity, queue);
501 void _copyFromMemory(
const T* source)
503 m_md->_copyFromMemory(m_ptr, source,
sizeof(T),
_nullRunQueue());
516 void _directFirstAllocateWithAllocator(
Int64 new_capacity, MemoryAllocationOptions options,
517 void* pre_allocated_buffer =
nullptr)
519 IMemoryAllocator* wanted_allocator = options.allocator();
520 if (!wanted_allocator) {
521 wanted_allocator = ArrayMetaData::_defaultAllocator();
522 options.setAllocator(wanted_allocator);
525 m_md->allocation_options = options;
526 if (new_capacity > 0) {
527 if (!pre_allocated_buffer)
528 _allocateMP(new_capacity, options.runQueue());
530 _setMPCast(pre_allocated_buffer);
537 void _directAllocate(
Int64 new_capacity, RunQueue* queue)
539 if (!m_md->is_not_null)
541 _allocateMP(new_capacity, queue);
544 void _allocateMP(
Int64 new_capacity, RunQueue* queue)
546 _setMPCast(m_md->_allocate(new_capacity, typeSize(), queue));
549 void _directReAllocate(
Int64 new_capacity, RunQueue* queue)
551 _setMPCast(m_md->_reallocate(_currentMemoryInfo(), new_capacity, typeSize(), queue));
556 void printInfos(std::ostream& o)
558 o <<
" Infos: size=" << m_md->size <<
" capacity=" << m_md->capacity <<
'\n';
575 Int64 s = m_md->size;
576 if ((s + n) > m_md->capacity)
578 for (
Int64 i = 0; i < n; ++i)
579 new (m_ptr + s + i) T(val);
587 const T* ptr = val.
data();
588 Int64 s = m_md->size;
589 if ((s + n) > m_md->capacity)
591 _createRange(s, s + n, ptr);
598 if (m_md->nb_ref == 0) {
605 _destroyRange(0, m_md->
size, IsPODType());
607 void _destroyRange(Int64, Int64, TrueType)
611 void _destroyRange(
Int64 abegin,
Int64 aend, FalseType)
615 for (
Int64 i = abegin; i < aend; ++i)
618 void _createRangeDefault(
Int64,
Int64, TrueType)
621 void _createRangeDefault(
Int64 abegin,
Int64 aend, FalseType)
625 for (
Int64 i = abegin; i < aend; ++i)
628 void _createRange(
Int64 abegin,
Int64 aend, ConstReferenceType value, TrueType)
632 for (
Int64 i = abegin; i < aend; ++i)
635 void _createRange(
Int64 abegin,
Int64 aend, ConstReferenceType value, FalseType)
639 for (
Int64 i = abegin; i < aend; ++i)
640 new (m_ptr + i) T(value);
642 void _createRange(
Int64 abegin,
Int64 aend,
const T* values)
646 for (
Int64 i = abegin; i < aend; ++i) {
647 new (m_ptr + i) T(*values);
651 void _fill(ConstReferenceType value)
653 for (
Int64 i = 0, n =
size(); i < n; ++i)
656 void _clone(
const ThatClassType& orig_array)
658 Int64 that_size = orig_array.size();
660 m_md->size = that_size;
661 m_md->dim1_size = orig_array.m_md->dim1_size;
662 m_md->dim2_size = orig_array.m_md->dim2_size;
663 _createRange(0, that_size, orig_array.m_ptr);
665 template <
typename PodType>
666 void _resizeHelper(
Int64 s, PodType pod_type, RunQueue* queue)
670 if (s > m_md->size) {
672 this->_createRangeDefault(m_md->size, s, pod_type);
675 this->_destroyRange(s, m_md->size, pod_type);
679 void _resize(
Int64 s)
686 _resizeHelper(s,
TrueType{}, queue);
690 this->_destroyRange(0, m_md->
size, IsPODType());
698 if (s > m_md->size) {
700 this->_createRange(m_md->size, s, value, IsPODType());
703 this->_destroyRange(s, m_md->size, IsPODType());
707 void _copy(
const T* rhs_begin,
TrueType)
709 _copyFromMemory(rhs_begin);
711 void _copy(
const T* rhs_begin, FalseType)
713 for (Int64 i = 0, n = m_md->
size; i < n; ++i)
714 m_ptr[i] = rhs_begin[i];
716 void _copy(
const T* rhs_begin)
718 _copy(rhs_begin, IsPODType());
731 const T* rhs_begin = rhs.
data();
733 const Int64 current_size = m_md->size;
736 if (abegin >= rhs_begin && abegin < (rhs_begin + rhs_size))
737 ArrayMetaData::overlapError(abegin, m_md->size, rhs_begin, rhs_size);
739 if (rhs_size > current_size) {
742 this->_createRange(m_md->size, rhs_size, rhs_begin + current_size);
745 m_md->size = rhs_size;
748 this->_destroyRange(rhs_size, current_size, IsPODType{});
749 m_md->size = rhs_size;
761 void _move(ThatClassType& rhs) ARCCORE_NOEXCEPT
785 void _swap(ThatClassType& rhs) ARCCORE_NOEXCEPT
787 std::swap(m_ptr, rhs.m_ptr);
797 void _shrink(Int64 new_capacity)
802 if (new_capacity > this->
capacity())
804 if (new_capacity < 4)
822 if (asize > max_size)
824 asize =
static_cast<Integer>(max_size);
833 Span<const T> rhs_span(rhs);
834 if (rhs.allocator() == this->allocator()) {
848 void _setMP(TrueImpl* new_mp)
853 void _setMP2(TrueImpl* new_mp, ArrayMetaData* new_md)
859 if (!m_md->is_allocated_by_new)
865 return m_ptr ==
nullptr;
870 void _setToSharedNull()
873 m_meta_data = ArrayMetaData();
876 void _setMPCast(
void* p)
878 _setMP(
reinterpret_cast<TrueImpl*
>(p));