Arcane  v4.1.7.0
Documentation développeur
Chargement...
Recherche...
Aucune correspondance
Hdf5ReaderWriter.cc
1// -*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
2//-----------------------------------------------------------------------------
3// Copyright 2000-2026 CEA (www.cea.fr) IFPEN (www.ifpenergiesnouvelles.com)
4// See the top-level COPYRIGHT file for details.
5// SPDX-License-Identifier: Apache-2.0
6//-----------------------------------------------------------------------------
7/*---------------------------------------------------------------------------*/
8/* Hdf5ReaderWriter.cc (C) 2000-2026 */
9/* */
10/* Lecture/Ecriture au format HDF5. */
11/*---------------------------------------------------------------------------*/
12/*---------------------------------------------------------------------------*/
13
14#include "arcane/utils/String.h"
15#include "arcane/utils/StringBuilder.h"
16#include "arcane/utils/OStringStream.h"
17#include "arcane/utils/ScopedPtr.h"
18#include "arcane/utils/List.h"
19#include "arcane/utils/ITraceMng.h"
20#include "arcane/utils/CheckedConvert.h"
21#include "arcane/utils/ArrayShape.h"
22
23#include "arcane/core/ISubDomain.h"
24#include "arcane/core/StdNum.h"
25#include "arcane/core/IVariable.h"
26#include "arcane/core/CheckpointService.h"
27#include "arcane/core/Directory.h"
28#include "arcane/core/IParallelMng.h"
29#include "arcane/core/IParallelReplication.h"
30#include "arcane/core/ArcaneException.h"
31#include "arcane/core/VerifierService.h"
32#include "arcane/core/IVariableMng.h"
33#include "arcane/core/FactoryService.h"
34#include "arcane/core/IData.h"
35#include "arcane/core/Timer.h"
36#include "arcane/core/ISerializedData.h"
37#include "arcane/core/IIOMng.h"
38#include "arcane/core/IXmlDocumentHolder.h"
39
40#include "arcane/datatype/DataTypeTraits.h"
41
42#include "arcane/core/SerializeBuffer.h"
43#include "arcane/core/ISerializeMessageList.h"
44#include "arcane/core/internal/SerializeMessage.h"
45
46#include "arcane/hdf5/Hdf5ReaderWriter.h"
47
48#include "arcane/hdf5/Hdf5ReaderWriter_axl.h"
49
50#include <array>
51//#define ARCANE_TEST_HDF5MPI
52
53/*---------------------------------------------------------------------------*/
54/*---------------------------------------------------------------------------*/
55
56namespace Arcane
57{
58
59/*---------------------------------------------------------------------------*/
60/*---------------------------------------------------------------------------*/
61
62using namespace Hdf5Utils;
63
64static herr_t _Hdf5ReaderWriterIterateMe(hid_t,const char*,void*);
65
66/*---------------------------------------------------------------------------*/
67/*---------------------------------------------------------------------------*/
68
69namespace
70{
71constexpr Int32 VARIABLE_INFO_SIZE = 10 + ArrayShape::MAX_NB_DIMENSION;
72
73#if (defined(H5_HAVE_THREADSAFE) || defined(H5_HAVE_CONCURRENCY))
74#define ARCANE_HDF5_MUTEX
75#else
76struct ScopedMutex
77{
78 ScopedMutex()
79 {
80 _ArcaneHdf5UtilsMutex().lock();
81 }
82 ~ScopedMutex()
83 {
84 _ArcaneHdf5UtilsMutex().unlock();
85 }
86};
87#define ARCANE_HDF5_MUTEX ScopedMutex scoped_mutex
88#endif
89} // namespace
90
91/*---------------------------------------------------------------------------*/
92/*---------------------------------------------------------------------------*/
93
94Hdf5ReaderWriter::
95Hdf5ReaderWriter(ISubDomain* sd,const String& filename,
96 const String& sub_group_name,
97 Integer fileset_size, Integer currentIndex, Integer index_modulo,
98 eOpenMode open_mode,[[maybe_unused]] bool do_verif)
99: TraceAccessor(sd->traceMng())
100, m_parallel_mng(sd->parallelMng())
101, m_open_mode(open_mode)
102, m_filename(filename)
103, m_sub_group_name(sub_group_name)
104, m_is_initialized(false)
105, m_io_timer(sd,"Hdf5Timer",Timer::TimerReal)
106, m_is_parallel(false)
107, m_my_rank(m_parallel_mng->commRank())
108, m_send_rank(m_my_rank)
109, m_last_recv_rank(m_my_rank)
110, m_fileset_size(fileset_size)
111, m_index_write(currentIndex)
112, m_index_modulo(index_modulo)
113{
114
115 if (m_fileset_size!=1 && m_parallel_mng->isParallel()){
116 m_is_parallel = true;
117 Integer nb_rank = m_parallel_mng->commSize();
118 if (m_fileset_size==0){
119 m_send_rank = 0;
120 m_last_recv_rank = nb_rank;
121 }
122 else{
123 m_send_rank = (m_my_rank / m_fileset_size) * m_fileset_size;
124 m_last_recv_rank = m_send_rank + m_fileset_size;
125 if (m_last_recv_rank>nb_rank)
126 m_last_recv_rank = nb_rank;
127 --m_last_recv_rank;
128 }
129 }
130 sd->traceMng()->info() << " INFOS PARALLEL: my_rank=" << m_my_rank
131 << " send_rank=" << m_send_rank
132 << " last_recv_rank=" << m_last_recv_rank
133 << " filename=" << filename;
134}
135
136
137/*---------------------------------------------------------------------------*/
138/*---------------------------------------------------------------------------*/
139void Hdf5ReaderWriter::
140initialize()
141{
142 if (m_is_initialized)
143 return;
144 m_is_initialized = true;
145
146 HInit();
147 HInit::useMutex(m_parallel_mng->isThreadImplementation(), m_parallel_mng);
148
149 info() << "INIT HDF5 READER/WRITER";
150 {
151 unsigned vmajor = 0;
152 unsigned vminor = 0;
153 unsigned vrel = 0;
154 ARCANE_HDF5_MUTEX;
155 ::H5get_libversion(&vmajor,&vminor,&vrel);
156 info() << "HDF5 version = " << vmajor << '.' << vminor << '.' << vrel;
157 }
158 info() << "SubGroup is '" << m_sub_group_name <<"'";
159 if (m_open_mode==OpenModeRead){
160 m_file_id.openRead(m_filename);
161 m_sub_group_id.recursiveOpen(m_file_id,m_sub_group_name);
162 }
163 else{
164 // Si ce n'est pas moi qui écrit, n'ouvre pas le fichier
165 if (m_send_rank!=m_my_rank)
166 return;
167 if (m_open_mode == OpenModeTruncate) {
168 hid_t plist_id = -1;
169 {
170 ARCANE_HDF5_MUTEX;
171 plist_id = H5Pcreate(H5P_FILE_ACCESS);
172#ifdef ARCANE_TEST_HDF5MPI
173 void* arcane_comm = subDomain()->parallelMng()->getMPICommunicator();
174 if (!arcane_comm)
175 ARCANE_FATAL("No MPI environment available");
176 MPI_Comm mpi_comm = *((MPI_Comm*)arcane_comm);
177 MPI_Info mpi_info = MPI_INFO_NULL;
178 //H5Pset_fapl_mpiposix(plist_id, mpi_comm, MPI_INFO_NULL); //mpi_info);
179 H5Pset_fapl_mpio(plist_id, mpi_comm, MPI_INFO_NULL); //mpi_info);
180 H5Pset_fclose_degree(plist_id, H5F_CLOSE_STRONG);
181#endif
182 int mdc_nelmts;
183 size_t rdcc_nelmts;
184 size_t rdcc_nbytes;
185 double rdcc_w0;
186 herr_t r = H5Pget_cache(plist_id, &mdc_nelmts, &rdcc_nelmts, &rdcc_nbytes, &rdcc_w0);
187 info() << " CACHE SIZE r=" << r << " mdc=" << mdc_nelmts
188 << " rdcc=" << rdcc_nelmts << " rdcc_bytes=" << rdcc_nbytes << " w0=" << rdcc_w0;
189 mdc_nelmts *= 10;
190 rdcc_nelmts *= 10;
191 rdcc_nbytes = 10000000;
192 r = H5Pset_cache(plist_id, mdc_nelmts, rdcc_nelmts, rdcc_nbytes, rdcc_w0);
193 info() << " SET CACHE SIZE R1=" << r;
194 //r = H5Pset_fapl_stdio(plist_id);
195 //info() << " R2=" << r;
196 hsize_t sieve_buf = (1024 << 12);
197 r = H5Pset_sieve_buf_size(plist_id, sieve_buf);
198 info() << " SIEVE_BUF=" << sieve_buf << " r=" << r;
199 hsize_t small_block_size = 0;
200 r = H5Pget_small_data_block_size(plist_id, &small_block_size);
201 info() << " SMALL BLOCK SIZE=" << small_block_size;
202 small_block_size <<= 10;
203 r = H5Pset_small_data_block_size(plist_id, small_block_size);
204 info() << " SET SMALL BLOCK SIZE s=" << small_block_size << " r=" << r;
205 }
206
207 m_file_id.openTruncate(m_filename,plist_id);
208 }
209 else if (m_open_mode==OpenModeAppend){
210 m_file_id.openAppend(m_filename);
211 }
212 if (m_sub_group_name!="/"){
213 m_sub_group_id.checkDelete(m_file_id,m_sub_group_name);
214 m_sub_group_id.recursiveCreate(m_file_id,m_sub_group_name);
215 }
216 else
217 m_sub_group_id.open(m_file_id,m_sub_group_name);
218 }
219 if (m_file_id.isBad())
220 ARCANE_THROW(ReaderWriterException,"Unable to open file '{0}'",m_filename);
221
222 if (m_sub_group_id.isBad())
223 ARCANE_THROW(ReaderWriterException,"HDF5 group '{0}' not found",m_sub_group_name);
224
225 if (m_open_mode==OpenModeRead){
226 int index = 0;
227 ARCANE_HDF5_MUTEX;
228 //H5Giterate(m_sub_group_id.id(),"Variables",&index,_Hdf5ReaderWriterIterateMe,this);
229 H5Giterate(m_file_id.id(),m_sub_group_name.localstr(),&index,_Hdf5ReaderWriterIterateMe,this);
230 }
231}
232
233
234/*---------------------------------------------------------------------------*/
235/*---------------------------------------------------------------------------*/
236
237Hdf5ReaderWriter::
238~Hdf5ReaderWriter()
239{
240}
241
242/*---------------------------------------------------------------------------*/
243/*---------------------------------------------------------------------------*/
244
245void Hdf5ReaderWriter::
246_checkValid()
247{
248 if (m_is_initialized)
249 return;
250 fatal() << "Use of a Hdf5ReaderWriter instance not initialized";
251}
252
253/*---------------------------------------------------------------------------*/
254/*---------------------------------------------------------------------------*/
255
256String Hdf5ReaderWriter::
257_variableGroupName(IVariable* var)
258{
259 return var->fullName();
260}
261
262/*---------------------------------------------------------------------------*/
263/*---------------------------------------------------------------------------*/
269void Hdf5ReaderWriter::
270_writeValParallel(IVariable* v,const ISerializedData* sdata)
271{
273 sb.setMode(ISerializer::ModeReserve);
274 sb.reserve(DT_Int32,1); // Pour indiquer la fin des envois
275 sb.reserve(v->fullName());
277 sb.reserve(DT_Int32,1); // Pour indiquer le rand duquel le message provient
278 sdata->serialize(&sb);
279 sb.allocateBuffer();
281 sb.putInt32(1); // Indique qu'il s'agit d'un message non vide
282 sb.put(v->fullName());
284 sb.put(m_my_rank);
285 sdata->serialize(&sb);
286 m_parallel_mng->sendSerializer(&sb,m_send_rank);
287}
288
289/*---------------------------------------------------------------------------*/
290/*---------------------------------------------------------------------------*/
291
292void Hdf5ReaderWriter::
293_directReadVal(IVariable* v,IData* data)
294{
295 _checkValid();
296 info(4) << "DIRECT READ VAL v=" << v->name();
297 _readVal(v,data);
298}
299
300/*---------------------------------------------------------------------------*/
301/*---------------------------------------------------------------------------*/
302
303void Hdf5ReaderWriter::
304_directWriteVal(IVariable* v,IData* data)
305{
306 _checkValid();
307 Ref<ISerializedData> sdata(data->createSerializedDataRef(false));
308 if (m_is_parallel && m_send_rank!=m_my_rank){
309 _writeValParallel(v,sdata.get());
310 }
311 else{
312 _writeVal(v->fullName(),m_sub_group_name,sdata.get());
313 }
314}
315
316/*---------------------------------------------------------------------------*/
317/*---------------------------------------------------------------------------*/
318
319static herr_t
320_Hdf5ReaderWriterIterateMe(hid_t g,const char* mn,void* ptr)
321{
322 Hdf5ReaderWriter* rw = reinterpret_cast<Hdf5ReaderWriter*>(ptr);
323 return rw->iterateMe(g,mn);
324}
325
326/*---------------------------------------------------------------------------*/
327/*---------------------------------------------------------------------------*/
328
329herr_t Hdf5ReaderWriter::
330iterateMe(hid_t group_id,const char* member_name)
331{
332 ARCANE_UNUSED(group_id);
333 m_variables_name.add(StringView(member_name));
334 return 0;
335}
336
337/*---------------------------------------------------------------------------*/
338/*---------------------------------------------------------------------------*/
339
340void Hdf5ReaderWriter::
341_writeVal(const String& var_group_name,
342 const String& sub_group_name,
343 const ISerializedData* sdata,
344 const Int32 from_rank)
345{
346 const bool hits_modulo=(m_index_modulo!=0) && (m_index_write!=0) && ((m_index_write%m_index_modulo)==0);
347 Timer::Sentry ts(&m_io_timer);
348
349 info(4) << " SDATA name=" << var_group_name << " nb_element=" << sdata->nbElement()
350 << " dim=" << sdata->nbDimension() << " datatype=" << sdata->baseDataType()
351 << " nb_basic_element=" << sdata->nbBaseElement()
352 << " is_multi=" << sdata->isMultiSize()
353 << " dimensions_size=" << sdata->extents().size()
354 << " memory_size=" << sdata->memorySize()
355 << " bytes_size=" << sdata->constBytes().size()
356 << " shape=" << sdata->shape().dimensions();
357
358 Integer nb_dimension = sdata->nbDimension();
359 Int64ConstArrayView dimensions = sdata->extents();
360
361 hid_t save_typeid = m_types.saveType(sdata->baseDataType());
362 hid_t trueid = m_types.nativeType(sdata->baseDataType());
363 const void* ptr = sdata->constBytes().data();
364 Int64 nb_base_element = sdata->nbBaseElement();
365
366 HGroup var_base_group;
367 var_base_group.recursiveCreate(m_file_id,sub_group_name);
368
369 // Création du groupe contenant les informations de la variable
370 HGroup group_id;
371 group_id.recursiveCreate(var_base_group,var_group_name);
372 if (group_id.isBad())
373 ARCANE_THROW(ReaderWriterException,"HDF5 group '{0}' not found",var_group_name);
374
375 Int64 nb_element = sdata->nbElement();
376 bool is_multi_size = sdata->isMultiSize();
377 Int64 dim2_size = 0;
378 Int64 dim1_size = 0;
379 if (nb_dimension==2 && !is_multi_size){
380 dim1_size = dimensions[0];
381 dim2_size = dimensions[1];
382 }
383 Integer dimension_array_size = dimensions.size();
384
385 // Sauve les informations concernant les tailles et dimensions de la variable
386 {
387 hsize_t att_dims[1];
388 att_dims[0] = VARIABLE_INFO_SIZE;
389 HSpace space_id;
390 space_id.createSimple(1,att_dims);
391 std::array<Int64,VARIABLE_INFO_SIZE> dim_val_buf;
392 SmallSpan<Int64> dim_val(dim_val_buf);
393 dim_val.fill(0);
394
395 dim_val[0] = nb_dimension;
396 dim_val[1] = dim1_size;
397 dim_val[2] = dim2_size;
398 dim_val[3] = nb_element;
399 dim_val[4] = nb_base_element;
400 dim_val[5] = dimension_array_size;
401 dim_val[6] = is_multi_size ? 1 : 0;
402 dim_val[7] = sdata->baseDataType();
403 dim_val[8] = sdata->memorySize();
404 {
405 ArrayShape shape = sdata->shape();
406 Int32 shape_nb_dim = shape.nbDimension();
407 auto shape_dims = shape.dimensions();
408 dim_val[9] = shape_nb_dim;
409 for (Integer i=0; i<shape_nb_dim; ++i )
410 dim_val[10+i] = shape_dims[i];
411 }
412 HAttribute att_id;
413 if (m_is_parallel && hits_modulo && (from_rank!=0))
414 att_id.remove(group_id,"Dims");
415 att_id.create(group_id,"Dims",m_types.saveType(dim1_size),space_id);
416 herr_t herr = att_id.write(m_types.nativeType(dim2_size),dim_val.data());
417 if (herr<0)
418 ARCANE_THROW(ReaderWriterException,"Wrong dimensions written for variable '{0}'",var_group_name);
419 }
420
421 // Si la variable est de type tableau à deux dimensions, sauve les
422 // tailles de la deuxième dimension par élément.
423 if (dimension_array_size!=0){
424 hsize_t att_dims[1];
425 att_dims[0] = dimension_array_size;
426 HSpace space_id;
427 HDataset array_id;
428 space_id.createSimple(1,att_dims);
429 array_id.recursiveCreate(group_id,"Dim2",m_types.saveType(dim1_size),space_id,H5P_DEFAULT);
430 herr_t herr = array_id.write(m_types.nativeType(dim1_size),dimensions.data());
431 if (herr<0)
432 ARCANE_THROW(ReaderWriterException,"Wrong dimensions written for variable '{0}'",var_group_name);
433 }
434
435 // Maintenant, sauve les valeurs si necessaire
436 if (nb_base_element!=0 && ptr!=nullptr){
437 debug(Trace::High) << "Variable " << var_group_name << " begin dumped (nb_base_element=" << nb_base_element << ").";
438 hsize_t dims[1];
439 dims[0] = nb_base_element;
440 HSpace space_id;
441 space_id.createSimple(1,dims);
442 if (space_id.isBad())
443 ARCANE_THROW(ReaderWriterException,"Wrong dataspace for variable '{0}'",var_group_name);
444
445 HDataset dataset_id;
446 hid_t plist_id = H5P_DEFAULT;
447
448#if 0
449 if (nb_element>=10000){
450 ARCANE_HDF5_MUTEX
451 plist_id = H5Pcreate(H5P_DATASET_CREATE);
452 hsize_t chunk_dim[1];
453 chunk_dim[0] = (4096 << 1);
454 herr_t r = H5Pset_chunk(plist_id,1,chunk_dim);
455 info() << " SET CHUNK FOR " << var_group_name << " s=" << nb_element;
456 }
457#endif
458 dataset_id.recursiveCreate(group_id,"Values",save_typeid,space_id,plist_id);
459 if (dataset_id.isBad())
460 ARCANE_THROW(ReaderWriterException,"Wrong dataset for variable '{0}'",var_group_name);
461
462 herr_t herr = dataset_id.write(trueid,ptr);
463 if (herr<0)
464 ARCANE_THROW(ReaderWriterException,"Wrong dataset written for variable '{0}'",var_group_name);
465 }
466}
467
468/*---------------------------------------------------------------------------*/
469/*---------------------------------------------------------------------------*/
470
471Ref<ISerializedData> Hdf5ReaderWriter::
472_readDim2(IVariable* var)
473{
474 const int max_dim = 256; // Nombre maxi de dimensions des tableaux HDF
475 String vname = _variableGroupName(var);
476 info(4) << " READ DIM name=" << vname;
477 Int64 dimension_array_size = 0;
478 Int64 nb_element = 0;
479 Integer nb_dimension = -1;
480 // Regarde si le nom correspondant est dans la liste des variables.
481 // S'il n'y est pas, cela signifie que le tableau n'a pas été sauvé et
482 // donc que ses dimensions sont nulles.
483 {
484 bool is_found = false;
485 for( StringList::Enumerator i(m_variables_name); ++i; )
486 if (*i==vname){
487 is_found = true;
488 break;
489 }
490 if (!is_found)
491 ARCANE_THROW(ReaderWriterException,"No HDF5 group named '{0} exists",vname);
492 }
493
494 // Récupère le groupe contenant les informations de la variable
495 HGroup group_id;
496 //group_id.open(m_variable_group_id,vname);
497 group_id.open(m_sub_group_id,vname);
498 if (group_id.isBad())
499 ARCANE_THROW(ReaderWriterException,"HDF5 group '{0}' not found",vname);
500
501 bool is_multi_size = false;
502 eDataType data_type = DT_Unknown;
503 Int64 memory_size = 0;
504 Int64 nb_base_element = 0;
505 Int64 dim1_size = 0;
506 Int64 dim2_size = 0;
507 UniqueArray<Int64> dims;
508 ArrayShape data_shape;
509
510 // Récupère les informations concernant les tailles et dimensions de la variable
511 {
512 HAttribute att_id;
513 att_id.open(group_id,"Dims");
514 HSpace space_id = att_id.getSpace();
515
516 // On attend une seule dimension, et le nombre d'eléments de
517 // l'attribut (hdf_dims[0]) doit être égal à 1 ou 2.
518 hsize_t hdf_dims[max_dim];
519 hsize_t max_dims[max_dim];
520 {
521 ARCANE_HDF5_MUTEX;
522 H5Sget_simple_extent_dims(space_id.id(), hdf_dims, max_dims);
523 }
524
525 if (hdf_dims[0]!=VARIABLE_INFO_SIZE)
526 ARCANE_THROW(ReaderWriterException,"Wrong dimensions for variable '{0}' (found={1} expected={2})",
527 vname, hdf_dims[0], VARIABLE_INFO_SIZE);
528
529 std::array<Int64,VARIABLE_INFO_SIZE> dim_val_buf;
530 att_id.read(m_types.nativeType(Int64()),dim_val_buf.data());
531
532 SmallSpan<const Int64> dim_val(dim_val_buf);
533
534 nb_dimension = CheckedConvert::toInteger(dim_val[0]);
535 dim1_size = dim_val[1];
536 dim2_size = dim_val[2];
537 nb_element = dim_val[3];
538 nb_base_element = dim_val[4];
539 dimension_array_size = dim_val[5];
540 is_multi_size = dim_val[6]!=0;
541 data_type = (eDataType)dim_val[7];
542 memory_size = dim_val[8];
543 Int32 shape_nb_dim = CheckedConvert::toInt32(dim_val[9]);
544 data_shape.setNbDimension(shape_nb_dim);
545 for (Integer i=0; i<shape_nb_dim; ++i )
546 data_shape.setDimension(i,CheckedConvert::toInt32(dim_val[10+i]));
547 }
548
549 info(4) << " READ DIM name=" << vname
550 << " nb_dim=" << nb_dimension << " dim1_size=" << dim1_size
551 << " dim2_size=" << dim2_size << " nb_element=" << nb_element
552 << " dimension_size=" << dimension_array_size
553 << " is_multi_size=" << is_multi_size
554 << " data_type" << data_type
555 << " shape=" << data_shape.dimensions();
556
557 if (dimension_array_size>0){
558 HDataset array_id;
559 array_id.open(group_id,"Dim2");
560 if (array_id.isBad())
561 ARCANE_THROW(ReaderWriterException,"Wrong dataset for variable '{0}'",vname);
562
563 HSpace space_id = array_id.getSpace();
564 if (space_id.isBad())
565 ARCANE_THROW(ReaderWriterException,"Wrong dataspace for variable '{0}'",vname);
566
567 hsize_t hdf_dims[max_dim];
568 hsize_t max_dims[max_dim];
569 {
570 ARCANE_HDF5_MUTEX;
571 H5Sget_simple_extent_dims(space_id.id(), hdf_dims, max_dims);
572 }
573 // Vérifie que le nombre d'éléments du dataset est bien égal à celui
574 // attendu.
575 if ((Int64)hdf_dims[0]!=dimension_array_size){
576 ARCANE_THROW(ReaderWriterException,"Wrong number of elements in 'Dim2' for variable '{0}' (found={1} expected={2})",
577 vname, hdf_dims[0], dimension_array_size);
578
579 }
580 dim2_size = 0;
581 dims.resize(dimension_array_size);
582 herr_t herr = array_id.read(m_types.nativeType(Int64()),dims.data());
583 if (herr<0)
584 ARCANE_THROW(ReaderWriterException,"Wrong dataset read for variable '{0}'",vname);
585 }
586 Ref<ISerializedData> sdata = arcaneCreateSerializedDataRef(data_type,memory_size,nb_dimension,nb_element,
587 nb_base_element,is_multi_size,dims,data_shape);
588 return sdata;
589}
590
591/*---------------------------------------------------------------------------*/
592/*---------------------------------------------------------------------------*/
593
594/*---------------------------------------------------------------------------*/
595/*---------------------------------------------------------------------------*/
596
597void Hdf5ReaderWriter::
598write(IVariable* v,IData* data)
599{
600 _directWriteVal(v,data);
601}
602
603/*---------------------------------------------------------------------------*/
604/*---------------------------------------------------------------------------*/
605
606void Hdf5ReaderWriter::
607_readVal(IVariable* v,IData* data)
608{
609 String var_group_name = _variableGroupName(v);
610 info(4) << " TRY TO READ var_group=" << var_group_name;
611 Ref<ISerializedData> sd(_readDim2(v));
612 Int64 storage_size = sd->memorySize();
613 info(4) << " READ DATA n=" << storage_size;
614 data->allocateBufferForSerializedData(sd.get());
615 if (storage_size!=0){
616 // Récupère le groupe contenant les informations de la variable
617 HGroup group_id;
618 //group_id.open(m_variable_group_id,var_group_name);
619 group_id.open(m_sub_group_id,var_group_name);
620 if (group_id.isBad())
621 ARCANE_THROW(ReaderWriterException,"No HDF5 group with name '{0}' exists",var_group_name);
622 HDataset dataset_id;
623 dataset_id.open(group_id,"Values");
624 if (dataset_id.isBad())
625 ARCANE_THROW(ReaderWriterException,"Wrong dataset for variable '{0}'",var_group_name);
626 void* ptr = sd->writableBytes().data();
627 info() << "READ Variable " << var_group_name << " ptr=" << ptr;;
628 hid_t trueid = m_types.nativeType(sd->baseDataType());
629 dataset_id.read(trueid,ptr);
630 }
631 data->assignSerializedData(sd.get());
632}
633
634
635/*---------------------------------------------------------------------------*/
636/*---------------------------------------------------------------------------*/
637
638void Hdf5ReaderWriter::
639read(IVariable* var,IData* data)
640{
641 _directReadVal(var,data);
642}
643
644/*---------------------------------------------------------------------------*/
645/*---------------------------------------------------------------------------*/
646
647void Hdf5ReaderWriter::
648setMetaData(const String& meta_data)
649{
650 if (m_is_parallel){
652 //Integer nb_rank = pm->commSize();
653 if (m_send_rank!=m_my_rank){
654 // Envoie le groupe et les meta donnees
656 sb.setMode(ISerializer::ModeReserve);
658 sb.reserve(meta_data);
659 sb.allocateBuffer();
662 sb.put(meta_data);
663 m_parallel_mng->sendSerializer(&sb,m_send_rank);
664 }
665 else{
666 _setMetaData(meta_data,m_sub_group_name);
667 for( Integer i=m_send_rank+1; i<=m_last_recv_rank; ++i ){
669 pm->recvSerializer(&sb,i);
671 String remote_group_name;
672 String remote_meta_data;
673 sb.get(remote_group_name);
674 sb.get(remote_meta_data);
675 _setMetaData(remote_meta_data,remote_group_name);
676 }
677 }
678 }
679 else
680 _setMetaData(meta_data,m_sub_group_name);
681}
682
683/*---------------------------------------------------------------------------*/
684/*---------------------------------------------------------------------------*/
685
686void Hdf5ReaderWriter::
687_setMetaData(const String& meta_data,const String& sub_group_name)
688{
689 const bool hits_modulo=(m_index_modulo!=0) && (m_index_write!=0) && ((m_index_write%m_index_modulo)==0);
690 HGroup base_group;
691 if (hits_modulo)
692 base_group.recursiveOpen(m_file_id,sub_group_name);
693 else
694 base_group.recursiveCreate(m_file_id,sub_group_name);
695
696 Span<const Byte> meta_data_bytes = meta_data.bytes();
697 const Byte* _meta_data = meta_data_bytes.data();
698 hsize_t dims[1];
699 dims[0] = meta_data_bytes.size();
700
701 HSpace space_id;
702 space_id.createSimple(1,dims);
703 if (space_id.isBad())
704 throw ReaderWriterException(A_FUNCINFO,"Wrong space for meta-data ('MetaData')");
705
706 HDataset dataset_id;
707 if (hits_modulo)
708 dataset_id.recursiveCreate(base_group,"MetaData", m_types.nativeType(Byte()), space_id, H5P_DEFAULT);
709 else
710 dataset_id.create(base_group,"MetaData", m_types.nativeType(Byte()), space_id, H5P_DEFAULT);
711 if (dataset_id.isBad())
712 throw ReaderWriterException(A_FUNCINFO,"Wrong dataset for meta-data ('MetaData')");
713
714 herr_t herr = dataset_id.write(m_types.nativeType(Byte()),_meta_data);
715 if (herr<0)
716 throw ReaderWriterException(A_FUNCINFO,"Unable to write meta-data ('MetaData')");
717}
718
719/*---------------------------------------------------------------------------*/
720/*---------------------------------------------------------------------------*/
721
722String Hdf5ReaderWriter::
723metaData()
724{
725 HDataset dataset_id;
726 dataset_id.open(m_sub_group_id,"MetaData");
727 if (dataset_id.isBad()){
728 throw ReaderWriterException(A_FUNCINFO,"Wrong dataset for meta-data ('MetaData')");
729 }
730 HSpace space_id = dataset_id.getSpace();
731 if (space_id.isBad()){
732 throw ReaderWriterException(A_FUNCINFO,"Wrong space for meta-data ('MetaData')");
733 }
734 const int max_dim = 256;
735 hsize_t hdf_dims[max_dim];
736 hsize_t max_dims[max_dim];
737 {
738 ARCANE_HDF5_MUTEX;
739 H5Sget_simple_extent_dims(space_id.id(), hdf_dims, max_dims);
740 }
741 if (hdf_dims[0]<=0)
742 throw ReaderWriterException(A_FUNCINFO,"Wrong number of elements for meta-data ('MetaData')");
743 Integer nb_byte = static_cast<Integer>(hdf_dims[0]);
744 ByteUniqueArray uchars(nb_byte);
745 dataset_id.read(m_types.nativeType(Byte()),uchars.data());
746 String s(uchars);
747 return s;
748}
749
750/*---------------------------------------------------------------------------*/
751/*---------------------------------------------------------------------------*/
752
753void Hdf5ReaderWriter::
754endWrite()
755{
756 if (m_is_parallel){
757 if (m_my_rank==m_send_rank){
758 _receiveRemoteVariables();
759 }
760 else{
761 // Envoie un message de fin
762 SerializeBuffer sb;
763 sb.setMode(ISerializer::ModeReserve);
764 sb.reserve(DT_Int32,1); // Pour indiquer la fin des envoies
765 sb.allocateBuffer();
766 sb.setMode(ISerializer::ModePut);
767 sb.putInt32(0); // Indique qu'il s'agit d'un message de fin
768 m_parallel_mng->sendSerializer(&sb,m_send_rank);
769 }
770 }
771 {
772 info() << " Hdf5Timer: nb_activated=" << m_io_timer.nbActivated()
773 << " time=" << m_io_timer.totalTime();
774 }
775}
776
777/*---------------------------------------------------------------------------*/
778/*---------------------------------------------------------------------------*/
779
780void Hdf5ReaderWriter::
781_receiveRemoteVariables()
782{
783 IParallelMng* pm = m_parallel_mng;
784 Integer nb_remaining = m_last_recv_rank - m_send_rank;
785 info() << "NB REMAINING = " << nb_remaining;
786 Ref<ISerializeMessageList> m_messages(pm->createSerializeMessageListRef());
787 while(nb_remaining>0){
788 ScopedPtrT<ISerializeMessage> sm(new SerializeMessage(m_my_rank,NULL_SUB_DOMAIN_ID,ISerializeMessage::MT_Recv));
789 m_messages->addMessage(sm.get());
790 m_messages->processPendingMessages();
791 m_messages->waitMessages(Parallel::WaitAll);
792 ISerializer* sb = sm->serializer();
793 sb->setMode(ISerializer::ModeGet);
794 Int32 id = sb->getInt32();
795 if (id==0)
796 --nb_remaining;
797 else
798 _writeRemoteVariable(sb);
799 }
800}
801
802/*---------------------------------------------------------------------------*/
803/*---------------------------------------------------------------------------*/
804
805void Hdf5ReaderWriter::
806_writeRemoteVariable(ISerializer* sb)
807{
808 String var_name;
809 sb->get(var_name);
810 String group_name;
811 sb->get(group_name);
812 Int32 rank = sb->getInt32();
813 //warning()<<"[\33[46;30m_writeRemoteVariable\33[m] rank="<<rank;
814 Ref<ISerializedData> sdata = arcaneCreateEmptySerializedDataRef();
815 sb->setReadMode(ISerializer::ReadReplace);
816 sdata->serialize(sb);
817 _writeVal(var_name,group_name,sdata.get(),rank);
818}
819
820/*---------------------------------------------------------------------------*/
821/*---------------------------------------------------------------------------*/
822
823/*---------------------------------------------------------------------------*/
824/*---------------------------------------------------------------------------*/
828class ArcaneHdf5CheckpointService2
830{
831 public:
832 ArcaneHdf5CheckpointService2(const ServiceBuildInfo& sbi)
834 , m_write_index(0)
835 , m_writer(nullptr)
836 , m_reader(nullptr)
837 , m_fileset_size(1)
838 , m_index_modulo(0)
839 {}
840
841 virtual IDataWriter* dataWriter() { return m_writer; }
842 virtual IDataReader* dataReader() { return m_reader; }
843
844 virtual void notifyBeginWrite();
845 virtual void notifyEndWrite();
846 virtual void notifyBeginRead();
847 virtual void notifyEndRead();
848 virtual void close() {}
849 virtual String readerServiceName() const { return "ArcaneHdf5CheckpointReader2"; }
850
851 private:
852
853 Integer m_write_index;
854 Hdf5ReaderWriter* m_writer;
855 Hdf5ReaderWriter* m_reader;
856 Integer m_fileset_size;
857 Integer m_index_modulo;
858
859 private:
860
861 String _defaultFileName()
862 {
863 info() << "USE DEFAULT FILE NAME";
864 IParallelMng* pm = subDomain()->parallelMng();
865 Integer rank = pm->commRank();
866 StringBuilder buf;
867
868 // Ajoute si besoin le numero du processeur
869 if (pm->isParallel()){
870 Integer file_id = rank;
871 if (m_fileset_size!=0)
872 file_id = (rank / m_fileset_size) * m_fileset_size;
873 buf = "arcanedump.";
874 buf += file_id;
875 }
876 else{
877 buf = "arcanedump";
878 }
879
880 // Ajoute si besoin le numero du replica
881 IParallelReplication* pr = subDomain()->parallelMng()->replication();
882 if (pr->hasReplication()){
883 buf += "_r";
884 buf += pr->replicationRank();
885 }
886
887 buf += ".h5";
888 return buf.toString();
889 }
890
891 Directory _defaultDirectory(){
892 return Directory(baseDirectoryName());
893 }
894 void _parseMetaData(String meta_data);
895};
896
897/*---------------------------------------------------------------------------*/
898/*---------------------------------------------------------------------------*/
899
900void ArcaneHdf5CheckpointService2::
901_parseMetaData(String meta_data)
902{
903 IIOMng* io_mng = subDomain()->ioMng();
904 ScopedPtrT<IXmlDocumentHolder> xml_doc(io_mng->parseXmlBuffer(meta_data.utf8(),"MetaData"));
905 XmlNode root = xml_doc->documentNode().documentElement();
906 Integer version = root.attr("version").valueAsInteger();
907 if (version!=1){
908 throw ReaderWriterException(A_FUNCINFO,"Bad version (expected 1)");
909 }
910 {
911 Integer fileset_size = root.child("fileset-size").valueAsInteger();
912 if (fileset_size<0) fileset_size = 0;
913 m_fileset_size = fileset_size;
914 }
915 {
916 Integer index_modulo = root.child("index-modulo").valueAsInteger();
917 if (index_modulo<0) index_modulo = 0;
918 m_index_modulo=index_modulo;
919 }
920 info() << " FileSet size=" << m_fileset_size;
921 info() << " Index modulo=" << m_index_modulo;
922}
923
924/*---------------------------------------------------------------------------*/
925/*---------------------------------------------------------------------------*/
926
927void ArcaneHdf5CheckpointService2::
928notifyBeginRead()
929{
930 String meta_data = readerMetaData();
931 _parseMetaData(meta_data);
932
933 info() << " GET META DATA READER " << readerMetaData()
934 << " filename=" << fileName();
935
936 if (fileName().null()){
937 Directory dump_dir(_defaultDirectory());
938 setFileName(dump_dir.file(_defaultFileName()));
939 }
940 info() << " READ CHECKPOINT FILENAME = " << fileName();
941 StringBuilder sub_group;
942 sub_group = "SubDomain";
943 sub_group += subDomain()->subDomainId();
944 sub_group += "/Index";
945
946 Integer index = currentIndex();
947 if (m_index_modulo!=0)
948 index %= m_index_modulo;
949 sub_group += index;
950
951 m_reader = new Hdf5ReaderWriter(subDomain(),
952 fileName(),
953 sub_group.toString(),
954 0,
955 currentIndex(),
956 m_index_modulo,
957 Hdf5ReaderWriter::OpenModeRead);
958 m_reader->initialize();
959}
960
961/*---------------------------------------------------------------------------*/
962/*---------------------------------------------------------------------------*/
963
964void ArcaneHdf5CheckpointService2::
965notifyEndRead()
966{
967 delete m_reader;
968 m_reader = 0;
969}
970
971/*---------------------------------------------------------------------------*/
972/*---------------------------------------------------------------------------*/
973
974void ArcaneHdf5CheckpointService2::
975notifyBeginWrite()
976{
977 if (options()){
978 // Récupération du nombre de fichiers par groupe
979 m_fileset_size = options()->filesetSize();
980 // Récupération du nombre d'indexes au maximum par fichiers
981 m_index_modulo = options()->indexModulo();
982 }
983
984 if (fileName().null()){
985 Directory dump_dir(_defaultDirectory());
986 setFileName(dump_dir.file(_defaultFileName()));
987 }
988 Hdf5ReaderWriter::eOpenMode open_mode = Hdf5ReaderWriter::OpenModeAppend;
989 Integer write_index = checkpointTimes().size();
990 --write_index;
991
992 if (write_index==0)
993 open_mode = Hdf5ReaderWriter::OpenModeTruncate;
994
995 // Test de l'option m_index_modulo pour savoir la profondeur du modulo
996 if (m_index_modulo!=0)
997 write_index%=m_index_modulo;
998
999 StringBuilder sub_group;
1000 sub_group = "SubDomain";
1001 sub_group += subDomain()->parallelMng()->commRank();
1002 sub_group += "/Index";
1003 sub_group += write_index;
1004
1005 m_writer = new Hdf5ReaderWriter(subDomain(),
1006 fileName(),
1007 sub_group,
1008 m_fileset_size,
1009 checkpointTimes().size()-1,
1010 m_index_modulo,
1011 open_mode);
1012 m_writer->initialize();
1013}
1014
1015
1016/*---------------------------------------------------------------------------*/
1017/*---------------------------------------------------------------------------*/
1018
1019void ArcaneHdf5CheckpointService2::
1020notifyEndWrite()
1021{
1022 OStringStream ostr;
1023 ostr() << "<infos version='1'>\n";
1024 ostr() << " <fileset-size>" << m_fileset_size << "</fileset-size>\n";
1025 ostr() << " <index-modulo>" << m_index_modulo << "</index-modulo>\n";
1026 ostr() << "</infos>\n";
1027 setReaderMetaData(ostr.str());
1028 ++m_write_index;
1029 delete m_writer;
1030 m_writer = 0;
1031}
1032
1033/*---------------------------------------------------------------------------*/
1034/*---------------------------------------------------------------------------*/
1035
1037 ServiceProperty("ArcaneHdf5CheckpointReader2",ST_SubDomain),
1039
1041 ServiceProperty("ArcaneHdf5CheckpointWriter2",ST_SubDomain),
1043
1044ARCANE_REGISTER_SERVICE_HDF5READERWRITER(ArcaneHdf5Checkpoint2,
1046
1047/*---------------------------------------------------------------------------*/
1048/*---------------------------------------------------------------------------*/
1049
1050} // End namespace Arcane
1051
1052/*---------------------------------------------------------------------------*/
1053/*---------------------------------------------------------------------------*/
#define ARCANE_THROW(exception_class,...)
Macro pour envoyer une exception avec formattage.
#define ARCANE_FATAL(...)
Macro envoyant une exception FatalErrorException.
#define ARCANE_SERVICE_INTERFACE(ainterface)
Macro pour déclarer une interface lors de l'enregistrement d'un service.
Protection/reprise au format ArcaneHdf5.
virtual IDataReader * dataReader()
Retourne le lecteur associé
virtual String readerServiceName() const
Nom du service du lecteur associé à cet écrivain.
virtual IDataWriter * dataWriter()
Retourne l'écrivain associé.
virtual void close()
Ferme les protections.
CaseOptionsHdf5ReaderWriter * options() const
Options du jeu de données du service.
ArcaneHdf5ReaderWriterObject(const Arcane::ServiceBuildInfo &sbi)
Constructeur.
const T * data() const
Accès à la racine du tableau hors toute protection.
void put(Span< const Real > values) override
Ajoute le tableau values.
void reserve(eBasicDataType dt, Int64 n) override
Réserve de la mémoire pour n objets de type dt.
void putInt32(Int32 value) override
Ajoute l'entier value.
void allocateBuffer() override
Alloue la mémoire du sérialiseur.
void get(ArrayView< Real > values) override
Récupère le tableau values.
void setMode(eMode new_mode) override
Positionne le fonctionnement actuel.
String readerMetaData() const override
Méta données pour le lecteur associé à cet écrivain.
void setReaderMetaData(const String &s) override
Méta données associées à ce lecteur.
void setFileName(const String &file_name) override
Positionne le nom du fichier de la protection.
RealConstArrayView checkpointTimes() const override
Temps des protections.
String fileName() const override
Nom du fichier de la protection.
constexpr const_pointer data() const noexcept
Pointeur sur la mémoire allouée.
Classe gérant un répertoire.
Definition Directory.h:35
String file(const String &file_name) const override
Retourne le chemin complet du fichier file_name dans le répertoire.
Definition Directory.cc:120
Lecture/Ecriture au format HDF5.
bool m_is_parallel
Mode parallèle actif: ATTENTION: en cours de test uniquement.
String m_sub_group_name
Nom du fichier.
IParallelMng * m_parallel_mng
Gestionnaire du parallélisme;.
Hdf5Utils::HGroup m_sub_group_id
Identifiant HDF du groupe contenant la protection.
Encapsule un hid_t pour un dataset.
Encapsule un hid_t pour un groupe.
Encapsule un hid_t pour un dataspace.
Interface du service de lecture d'une protection/reprise.
Interface du service d'écriture d'une protection/reprise.
Interface de lecture des données d'une variable.
Definition IDataReader.h:34
Interface d'écriture des données d'une variable.
Definition IDataWriter.h:44
Interface d'une donnée.
Definition IData.h:33
virtual void allocateBufferForSerializedData(ISerializedData *sdata)=0
Alloue la mémoire pour lire les valeurs sérialisées sdata.
virtual void assignSerializedData(const ISerializedData *sdata)=0
Assigne à la donnée les valeurs sérialisées sdata.
Interface du gestionnaire de parallélisme pour un sous-domaine.
virtual Int32 commRank() const =0
Rang de cette instance dans le communicateur.
virtual bool isParallel() const =0
Retourne true si l'exécution est parallèle.
Interface d'une donnée sérialisée.
virtual void serialize(ISerializer *buffer)=0
Serialize en lecture ou écriture la donnée.
@ ModePut
Le sérialiseur attend des reserve()
Interface du gestionnaire d'un sous-domaine.
Definition ISubDomain.h:74
Interface d'une variable.
Definition IVariable.h:39
virtual String fullName() const =0
Nom complet de la variable (avec le préfixe de la famille)
virtual String name() const =0
Nom de la variable.
Flot de sortie lié à une String.
Exception dans un lecteur ou écrivain.
Référence à une instance.
Implémentation d'un tampon pour la sérialisation.
Structure contenant les informations pour créer un service.
Propriétés de création d'un service.
constexpr __host__ __device__ pointer data() const noexcept
Pointeur sur le début de la vue.
Definition Span.h:537
constexpr __host__ __device__ SizeType size() const noexcept
Retourne la taille du tableau.
Definition Span.h:325
Vue d'un tableau d'éléments de type T.
Definition Span.h:633
Constructeur de chaîne de caractère unicode.
String toString() const
Retourne la chaîne de caractères construite.
Chaîne de caractères unicode.
Span< const Byte > bytes() const
Retourne la conversion de l'instance dans l'encodage UTF-8.
Definition String.cc:292
Gestion d'un timer.
Definition Timer.h:62
TraceMessage info() const
Flot pour un message d'information.
ITraceMng * traceMng() const
Gestionnaire de trace.
#define ARCANE_REGISTER_SERVICE(aclass, a_service_property,...)
Macro pour enregistrer un service.
Fonctions utilitaires pour Hdf5.
Definition Hdf5Utils.cc:34
-*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
Ref< ISerializedData > arcaneCreateSerializedDataRef(eDataType data_type, Int64 memory_size, Integer nb_dim, Int64 nb_element, Int64 nb_base_element, bool is_multi_size, Int64ConstArrayView dimensions)
Créé des données sérialisées.
std::int64_t Int64
Type entier signé sur 64 bits.
Int32 Integer
Type représentant un entier.
@ ST_SubDomain
Le service s'utilise au niveau du sous-domaine.
ConstArrayView< Int64 > Int64ConstArrayView
Equivalent C d'un tableau à une dimension d'entiers 64 bits.
Definition UtilsTypes.h:480
UniqueArray< Byte > ByteUniqueArray
Tableau dynamique à une dimension de caractères.
Definition UtilsTypes.h:335
Ref< ISerializedData > arcaneCreateEmptySerializedDataRef()
Créé des données sérialisées.
unsigned char Byte
Type d'un octet.
Definition BaseTypes.h:43
eDataType
Type d'une donnée.
Definition DataTypes.h:39
@ DT_Int32
Donnée de type entier 32 bits.
Definition DataTypes.h:43
@ DT_Unknown
Donnée de type inconnue ou non initialisée.
Definition DataTypes.h:56
std::int32_t Int32
Type entier signé sur 32 bits.