Arcane  v3.16.6.0
Documentation développeur
Chargement...
Recherche...
Aucune correspondance
Hdf5ReaderWriter.cc
1// -*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
2//-----------------------------------------------------------------------------
3// Copyright 2000-2025 CEA (www.cea.fr) IFPEN (www.ifpenergiesnouvelles.com)
4// See the top-level COPYRIGHT file for details.
5// SPDX-License-Identifier: Apache-2.0
6//-----------------------------------------------------------------------------
7/*---------------------------------------------------------------------------*/
8/* Hdf5ReaderWriter.cc (C) 2000-2025 */
9/* */
10/* Lecture/Ecriture au format HDF5. */
11/*---------------------------------------------------------------------------*/
12/*---------------------------------------------------------------------------*/
13
14#include "arcane/utils/String.h"
15#include "arcane/utils/StringBuilder.h"
16#include "arcane/utils/OStringStream.h"
17#include "arcane/utils/ScopedPtr.h"
18#include "arcane/utils/List.h"
19#include "arcane/utils/ITraceMng.h"
20#include "arcane/utils/CheckedConvert.h"
21#include "arcane/utils/ArrayShape.h"
22
23#include "arcane/core/ISubDomain.h"
24#include "arcane/core/StdNum.h"
25#include "arcane/core/IVariable.h"
26#include "arcane/core/CheckpointService.h"
27#include "arcane/core/Directory.h"
28#include "arcane/core/IParallelMng.h"
29#include "arcane/core/IParallelReplication.h"
30#include "arcane/core/ArcaneException.h"
31#include "arcane/core/VerifierService.h"
32#include "arcane/core/IVariableMng.h"
33#include "arcane/core/FactoryService.h"
34#include "arcane/core/IData.h"
35#include "arcane/core/Timer.h"
36#include "arcane/core/ISerializedData.h"
37#include "arcane/core/IIOMng.h"
38#include "arcane/core/IXmlDocumentHolder.h"
39
40#include "arcane/datatype/DataTypeTraits.h"
41
42#include "arcane/core/SerializeBuffer.h"
43#include "arcane/core/ISerializeMessageList.h"
44#include "arcane/core/internal/SerializeMessage.h"
45
46#include "arcane/hdf5/Hdf5ReaderWriter.h"
47
48#include "arcane/hdf5/Hdf5ReaderWriter_axl.h"
49
50#include <array>
51//#define ARCANE_TEST_HDF5MPI
52
53/*---------------------------------------------------------------------------*/
54/*---------------------------------------------------------------------------*/
55
56namespace Arcane
57{
58
59/*---------------------------------------------------------------------------*/
60/*---------------------------------------------------------------------------*/
61
62using namespace Hdf5Utils;
63
64static herr_t _Hdf5ReaderWriterIterateMe(hid_t,const char*,void*);
65
66/*---------------------------------------------------------------------------*/
67/*---------------------------------------------------------------------------*/
68
69namespace
70{
71constexpr Int32 VARIABLE_INFO_SIZE = 10 + ArrayShape::MAX_NB_DIMENSION;
72}
73
74/*---------------------------------------------------------------------------*/
75/*---------------------------------------------------------------------------*/
76
77Hdf5ReaderWriter::
78Hdf5ReaderWriter(ISubDomain* sd,const String& filename,
79 const String& sub_group_name,
80 Integer fileset_size, Integer currentIndex, Integer index_modulo,
81 eOpenMode open_mode,[[maybe_unused]] bool do_verif)
82: TraceAccessor(sd->traceMng())
83, m_parallel_mng(sd->parallelMng())
84, m_open_mode(open_mode)
85, m_filename(filename)
86, m_sub_group_name(sub_group_name)
87, m_is_initialized(false)
88, m_io_timer(sd,"Hdf5Timer",Timer::TimerReal)
89, m_is_parallel(false)
90, m_my_rank(m_parallel_mng->commRank())
91, m_send_rank(m_my_rank)
92, m_last_recv_rank(m_my_rank)
93, m_fileset_size(fileset_size)
94, m_index_write(currentIndex)
95, m_index_modulo(index_modulo)
96{
97
98 if (m_fileset_size!=1 && m_parallel_mng->isParallel()){
99 m_is_parallel = true;
100 Integer nb_rank = m_parallel_mng->commSize();
101 if (m_fileset_size==0){
102 m_send_rank = 0;
103 m_last_recv_rank = nb_rank;
104 }
105 else{
106 m_send_rank = (m_my_rank / m_fileset_size) * m_fileset_size;
107 m_last_recv_rank = m_send_rank + m_fileset_size;
108 if (m_last_recv_rank>nb_rank)
109 m_last_recv_rank = nb_rank;
110 --m_last_recv_rank;
111 }
112 }
113 sd->traceMng()->info() << " INFOS PARALLEL: my_rank=" << m_my_rank
114 << " send_rank=" << m_send_rank
115 << " last_recv_rank=" << m_last_recv_rank
116 << " filename=" << filename;
117}
118
119
120/*---------------------------------------------------------------------------*/
121/*---------------------------------------------------------------------------*/
122void Hdf5ReaderWriter::
123initialize()
124{
125 if (m_is_initialized)
126 return;
127 m_is_initialized = true;
128 HInit();
129 info() << "INIT HDF5 READER/WRITER";
130 {
131 unsigned vmajor = 0;
132 unsigned vminor = 0;
133 unsigned vrel = 0;
134 ::H5get_libversion(&vmajor,&vminor,&vrel);
135 info() << "HDF5 version = " << vmajor << '.' << vminor << '.' << vrel;
136 }
137 info() << "SubGroup is '" << m_sub_group_name <<"'";
138 if (m_open_mode==OpenModeRead){
139 m_file_id.openRead(m_filename);
140 m_sub_group_id.recursiveOpen(m_file_id,m_sub_group_name);
141 }
142 else{
143 // Si ce n'est pas moi qui écrit, n'ouvre pas le fichier
144 if (m_send_rank!=m_my_rank)
145 return;
146 if (m_open_mode==OpenModeTruncate){
147 hid_t plist_id = H5Pcreate(H5P_FILE_ACCESS);
148#ifdef ARCANE_TEST_HDF5MPI
149 void* arcane_comm = subDomain()->parallelMng()->getMPICommunicator();
150 if (!arcane_comm)
151 ARCANE_FATAL("No MPI environment available");
152 MPI_Comm mpi_comm = *((MPI_Comm*)arcane_comm);
153 MPI_Info mpi_info = MPI_INFO_NULL;
154 //H5Pset_fapl_mpiposix(plist_id, mpi_comm, MPI_INFO_NULL); //mpi_info);
155 H5Pset_fapl_mpio(plist_id, mpi_comm, MPI_INFO_NULL); //mpi_info);
156 H5Pset_fclose_degree(plist_id,H5F_CLOSE_STRONG);
157#endif
158 int mdc_nelmts;
159 size_t rdcc_nelmts;
160 size_t rdcc_nbytes;
161 double rdcc_w0;
162 herr_t r = H5Pget_cache(plist_id,&mdc_nelmts,&rdcc_nelmts,&rdcc_nbytes,&rdcc_w0);
163 info() << " CACHE SIZE r=" << r << " mdc=" << mdc_nelmts
164 << " rdcc=" << rdcc_nelmts << " rdcc_bytes=" << rdcc_nbytes << " w0=" << rdcc_w0;
165 mdc_nelmts *= 10;
166 rdcc_nelmts *= 10;
167 rdcc_nbytes = 10000000;
168 r = H5Pset_cache(plist_id,mdc_nelmts,rdcc_nelmts,rdcc_nbytes,rdcc_w0);
169 info() << " SET CACHE SIZE R1=" << r;
170 //r = H5Pset_fapl_stdio(plist_id);
171 //info() << " R2=" << r;
172 hsize_t sieve_buf = (1024 << 12);
173 r = H5Pset_sieve_buf_size(plist_id,sieve_buf);
174 info() << " SIEVE_BUF=" << sieve_buf << " r=" << r;
175 hsize_t small_block_size = 0;
176 r = H5Pget_small_data_block_size(plist_id,&small_block_size);
177 info() << " SMALL BLOCK SIZE=" << small_block_size;
178 small_block_size <<= 10;
179 r = H5Pset_small_data_block_size(plist_id,small_block_size);
180 info() << " SET SMALL BLOCK SIZE s=" << small_block_size << " r=" << r;
181
182 m_file_id.openTruncate(m_filename,plist_id);
183 }
184 else if (m_open_mode==OpenModeAppend){
185 m_file_id.openAppend(m_filename);
186 }
187 if (m_sub_group_name!="/"){
188 m_sub_group_id.checkDelete(m_file_id,m_sub_group_name);
189 m_sub_group_id.recursiveCreate(m_file_id,m_sub_group_name);
190 }
191 else
192 m_sub_group_id.open(m_file_id,m_sub_group_name);
193 }
194 if (m_file_id.isBad())
195 ARCANE_THROW(ReaderWriterException,"Unable to open file '{0}'",m_filename);
196
197 if (m_sub_group_id.isBad())
198 ARCANE_THROW(ReaderWriterException,"HDF5 group '{0}' not found",m_sub_group_name);
199
200 if (m_open_mode==OpenModeRead){
201 int index = 0;
202 //H5Giterate(m_sub_group_id.id(),"Variables",&index,_Hdf5ReaderWriterIterateMe,this);
203 H5Giterate(m_file_id.id(),m_sub_group_name.localstr(),&index,_Hdf5ReaderWriterIterateMe,this);
204 }
205}
206
207
208/*---------------------------------------------------------------------------*/
209/*---------------------------------------------------------------------------*/
210
211Hdf5ReaderWriter::
212~Hdf5ReaderWriter()
213{
214}
215
216/*---------------------------------------------------------------------------*/
217/*---------------------------------------------------------------------------*/
218
219void Hdf5ReaderWriter::
220_checkValid()
221{
222 if (m_is_initialized)
223 return;
224 fatal() << "Use of a Hdf5ReaderWriter instance not initialized";
225}
226
227/*---------------------------------------------------------------------------*/
228/*---------------------------------------------------------------------------*/
229
230String Hdf5ReaderWriter::
231_variableGroupName(IVariable* var)
232{
233 return var->fullName();
234}
235
236/*---------------------------------------------------------------------------*/
237/*---------------------------------------------------------------------------*/
243void Hdf5ReaderWriter::
244_writeValParallel(IVariable* v,const ISerializedData* sdata)
245{
247 sb.setMode(ISerializer::ModeReserve);
248 sb.reserve(DT_Int32,1); // Pour indiquer la fin des envois
249 sb.reserve(v->fullName());
251 sb.reserve(DT_Int32,1); // Pour indiquer le rand duquel le message provient
252 sdata->serialize(&sb);
253 sb.allocateBuffer();
255 sb.putInt32(1); // Indique qu'il s'agit d'un message non vide
256 sb.put(v->fullName());
258 sb.put(m_my_rank);
259 sdata->serialize(&sb);
260 m_parallel_mng->sendSerializer(&sb,m_send_rank);
261}
262
263/*---------------------------------------------------------------------------*/
264/*---------------------------------------------------------------------------*/
265
266void Hdf5ReaderWriter::
267_directReadVal(IVariable* v,IData* data)
268{
269 _checkValid();
270 info(4) << "DIRECT READ VAL v=" << v->name();
271 _readVal(v,data);
272}
273
274/*---------------------------------------------------------------------------*/
275/*---------------------------------------------------------------------------*/
276
277void Hdf5ReaderWriter::
278_directWriteVal(IVariable* v,IData* data)
279{
280 _checkValid();
281 Ref<ISerializedData> sdata(data->createSerializedDataRef(false));
282 if (m_is_parallel && m_send_rank!=m_my_rank){
283 _writeValParallel(v,sdata.get());
284 }
285 else{
286 _writeVal(v->fullName(),m_sub_group_name,sdata.get());
287 }
288}
289
290/*---------------------------------------------------------------------------*/
291/*---------------------------------------------------------------------------*/
292
293static herr_t
294_Hdf5ReaderWriterIterateMe(hid_t g,const char* mn,void* ptr)
295{
296 Hdf5ReaderWriter* rw = reinterpret_cast<Hdf5ReaderWriter*>(ptr);
297 return rw->iterateMe(g,mn);
298}
299
300/*---------------------------------------------------------------------------*/
301/*---------------------------------------------------------------------------*/
302
303herr_t Hdf5ReaderWriter::
304iterateMe(hid_t group_id,const char* member_name)
305{
306 ARCANE_UNUSED(group_id);
307 m_variables_name.add(StringView(member_name));
308 return 0;
309}
310
311/*---------------------------------------------------------------------------*/
312/*---------------------------------------------------------------------------*/
313
314void Hdf5ReaderWriter::
315_writeVal(const String& var_group_name,
316 const String& sub_group_name,
317 const ISerializedData* sdata,
318 const Int32 from_rank)
319{
320 const bool hits_modulo=(m_index_modulo!=0) && (m_index_write!=0) && ((m_index_write%m_index_modulo)==0);
321 Timer::Sentry ts(&m_io_timer);
322
323 info(4) << " SDATA name=" << var_group_name << " nb_element=" << sdata->nbElement()
324 << " dim=" << sdata->nbDimension() << " datatype=" << sdata->baseDataType()
325 << " nb_basic_element=" << sdata->nbBaseElement()
326 << " is_multi=" << sdata->isMultiSize()
327 << " dimensions_size=" << sdata->extents().size()
328 << " memory_size=" << sdata->memorySize()
329 << " bytes_size=" << sdata->constBytes().size()
330 << " shape=" << sdata->shape().dimensions();
331
332 Integer nb_dimension = sdata->nbDimension();
333 Int64ConstArrayView dimensions = sdata->extents();
334
335 hid_t save_typeid = m_types.saveType(sdata->baseDataType());
336 hid_t trueid = m_types.nativeType(sdata->baseDataType());
337 const void* ptr = sdata->constBytes().data();
338 Int64 nb_base_element = sdata->nbBaseElement();
339
340 HGroup var_base_group;
341 var_base_group.recursiveCreate(m_file_id,sub_group_name);
342
343 // Création du groupe contenant les informations de la variable
344 HGroup group_id;
345 group_id.recursiveCreate(var_base_group,var_group_name);
346 if (group_id.isBad())
347 ARCANE_THROW(ReaderWriterException,"HDF5 group '{0}' not found",var_group_name);
348
349 Int64 nb_element = sdata->nbElement();
350 bool is_multi_size = sdata->isMultiSize();
351 Int64 dim2_size = 0;
352 Int64 dim1_size = 0;
353 if (nb_dimension==2 && !is_multi_size){
354 dim1_size = dimensions[0];
355 dim2_size = dimensions[1];
356 }
357 Integer dimension_array_size = dimensions.size();
358
359 // Sauve les informations concernant les tailles et dimensions de la variable
360 {
361 hsize_t att_dims[1];
362 att_dims[0] = VARIABLE_INFO_SIZE;
363 HSpace space_id;
364 space_id.createSimple(1,att_dims);
365 std::array<Int64,VARIABLE_INFO_SIZE> dim_val_buf;
366 SmallSpan<Int64> dim_val(dim_val_buf);
367 dim_val.fill(0);
368
369 dim_val[0] = nb_dimension;
370 dim_val[1] = dim1_size;
371 dim_val[2] = dim2_size;
372 dim_val[3] = nb_element;
373 dim_val[4] = nb_base_element;
374 dim_val[5] = dimension_array_size;
375 dim_val[6] = is_multi_size ? 1 : 0;
376 dim_val[7] = sdata->baseDataType();
377 dim_val[8] = sdata->memorySize();
378 {
379 ArrayShape shape = sdata->shape();
380 Int32 shape_nb_dim = shape.nbDimension();
381 auto shape_dims = shape.dimensions();
382 dim_val[9] = shape_nb_dim;
383 for (Integer i=0; i<shape_nb_dim; ++i )
384 dim_val[10+i] = shape_dims[i];
385 }
386 HAttribute att_id;
387 if (m_is_parallel && hits_modulo && (from_rank!=0))
388 att_id.remove(group_id,"Dims");
389 att_id.create(group_id,"Dims",m_types.saveType(dim1_size),space_id);
390 herr_t herr = att_id.write(m_types.nativeType(dim2_size),dim_val.data());
391 if (herr<0)
392 ARCANE_THROW(ReaderWriterException,"Wrong dimensions written for variable '{0}'",var_group_name);
393 }
394
395 // Si la variable est de type tableau à deux dimensions, sauve les
396 // tailles de la deuxième dimension par élément.
397 if (dimension_array_size!=0){
398 hsize_t att_dims[1];
399 att_dims[0] = dimension_array_size;
400 HSpace space_id;
401 HDataset array_id;
402 space_id.createSimple(1,att_dims);
403 array_id.recursiveCreate(group_id,"Dim2",m_types.saveType(dim1_size),space_id,H5P_DEFAULT);
404 herr_t herr = array_id.write(m_types.nativeType(dim1_size),dimensions.data());
405 if (herr<0)
406 ARCANE_THROW(ReaderWriterException,"Wrong dimensions written for variable '{0}'",var_group_name);
407 }
408
409 // Maintenant, sauve les valeurs si necessaire
410 if (nb_base_element!=0 && ptr!=nullptr){
411 debug(Trace::High) << "Variable " << var_group_name << " begin dumped (nb_base_element=" << nb_base_element << ").";
412 hsize_t dims[1];
413 dims[0] = nb_base_element;
414 HSpace space_id;
415 space_id.createSimple(1,dims);
416 if (space_id.isBad())
417 ARCANE_THROW(ReaderWriterException,"Wrong dataspace for variable '{0}'",var_group_name);
418
419 HDataset dataset_id;
420 hid_t plist_id = H5P_DEFAULT;
421
422#if 0
423 if (nb_element>=10000){
424 plist_id = H5Pcreate(H5P_DATASET_CREATE);
425 hsize_t chunk_dim[1];
426 chunk_dim[0] = (4096 << 1);
427 herr_t r = H5Pset_chunk(plist_id,1,chunk_dim);
428 info() << " SET CHUNK FOR " << var_group_name << " s=" << nb_element;
429 }
430#endif
431 dataset_id.recursiveCreate(group_id,"Values",save_typeid,space_id,plist_id);
432 if (dataset_id.isBad())
433 ARCANE_THROW(ReaderWriterException,"Wrong dataset for variable '{0}'",var_group_name);
434
435 herr_t herr = dataset_id.write(trueid,ptr);
436 if (herr<0)
437 ARCANE_THROW(ReaderWriterException,"Wrong dataset written for variable '{0}'",var_group_name);
438 }
439}
440
441/*---------------------------------------------------------------------------*/
442/*---------------------------------------------------------------------------*/
443
444Ref<ISerializedData> Hdf5ReaderWriter::
445_readDim2(IVariable* var)
446{
447 const int max_dim = 256; // Nombre maxi de dimensions des tableaux HDF
448 String vname = _variableGroupName(var);
449 info(4) << " READ DIM name=" << vname;
450 Int64 dimension_array_size = 0;
451 Int64 nb_element = 0;
452 Integer nb_dimension = -1;
453 // Regarde si le nom correspondant est dans la liste des variables.
454 // S'il n'y est pas, cela signifie que le tableau n'a pas été sauvé et
455 // donc que ses dimensions sont nulles.
456 {
457 bool is_found = false;
458 for( StringList::Enumerator i(m_variables_name); ++i; )
459 if (*i==vname){
460 is_found = true;
461 break;
462 }
463 if (!is_found)
464 ARCANE_THROW(ReaderWriterException,"No HDF5 group named '{0} exists",vname);
465 }
466
467 // Récupère le groupe contenant les informations de la variable
468 HGroup group_id;
469 //group_id.open(m_variable_group_id,vname);
470 group_id.open(m_sub_group_id,vname);
471 if (group_id.isBad())
472 ARCANE_THROW(ReaderWriterException,"HDF5 group '{0}' not found",vname);
473
474 bool is_multi_size = false;
475 eDataType data_type = DT_Unknown;
476 Int64 memory_size = 0;
477 Int64 nb_base_element = 0;
478 Int64 dim1_size = 0;
479 Int64 dim2_size = 0;
480 UniqueArray<Int64> dims;
481 ArrayShape data_shape;
482
483 // Récupère les informations concernant les tailles et dimensions de la variable
484 {
485 HAttribute att_id;
486 att_id.open(group_id,"Dims");
487 HSpace space_id = att_id.getSpace();
488
489 // On attend une seule dimension, et le nombre d'eléments de
490 // l'attribut (hdf_dims[0]) doit être égal à 1 ou 2.
491 hsize_t hdf_dims[max_dim];
492 hsize_t max_dims[max_dim];
493 H5Sget_simple_extent_dims(space_id.id(),hdf_dims,max_dims);
494
495 if (hdf_dims[0]!=VARIABLE_INFO_SIZE)
496 ARCANE_THROW(ReaderWriterException,"Wrong dimensions for variable '{0}' (found={1} expected={2})",
497 vname, hdf_dims[0], VARIABLE_INFO_SIZE);
498
499 std::array<Int64,VARIABLE_INFO_SIZE> dim_val_buf;
500 att_id.read(m_types.nativeType(Int64()),dim_val_buf.data());
501
502 SmallSpan<const Int64> dim_val(dim_val_buf);
503
504 nb_dimension = CheckedConvert::toInteger(dim_val[0]);
505 dim1_size = dim_val[1];
506 dim2_size = dim_val[2];
507 nb_element = dim_val[3];
508 nb_base_element = dim_val[4];
509 dimension_array_size = dim_val[5];
510 is_multi_size = dim_val[6]!=0;
511 data_type = (eDataType)dim_val[7];
512 memory_size = dim_val[8];
513 Int32 shape_nb_dim = CheckedConvert::toInt32(dim_val[9]);
514 data_shape.setNbDimension(shape_nb_dim);
515 for (Integer i=0; i<shape_nb_dim; ++i )
516 data_shape.setDimension(i,CheckedConvert::toInt32(dim_val[10+i]));
517 }
518
519 info(4) << " READ DIM name=" << vname
520 << " nb_dim=" << nb_dimension << " dim1_size=" << dim1_size
521 << " dim2_size=" << dim2_size << " nb_element=" << nb_element
522 << " dimension_size=" << dimension_array_size
523 << " is_multi_size=" << is_multi_size
524 << " data_type" << data_type
525 << " shape=" << data_shape.dimensions();
526
527 if (dimension_array_size>0){
528 HDataset array_id;
529 array_id.open(group_id,"Dim2");
530 if (array_id.isBad())
531 ARCANE_THROW(ReaderWriterException,"Wrong dataset for variable '{0}'",vname);
532
533 HSpace space_id = array_id.getSpace();
534 if (space_id.isBad())
535 ARCANE_THROW(ReaderWriterException,"Wrong dataspace for variable '{0}'",vname);
536
537 hsize_t hdf_dims[max_dim];
538 hsize_t max_dims[max_dim];
539 H5Sget_simple_extent_dims(space_id.id(),hdf_dims,max_dims);
540 // Vérifie que le nombre d'éléments du dataset est bien égal à celui
541 // attendu.
542 if ((Int64)hdf_dims[0]!=dimension_array_size){
543 ARCANE_THROW(ReaderWriterException,"Wrong number of elements in 'Dim2' for variable '{0}' (found={1} expected={2})",
544 vname, hdf_dims[0], dimension_array_size);
545
546 }
547 dim2_size = 0;
548 dims.resize(dimension_array_size);
549 herr_t herr = array_id.read(m_types.nativeType(Int64()),dims.data());
550 if (herr<0)
551 ARCANE_THROW(ReaderWriterException,"Wrong dataset read for variable '{0}'",vname);
552 }
553 Ref<ISerializedData> sdata = arcaneCreateSerializedDataRef(data_type,memory_size,nb_dimension,nb_element,
554 nb_base_element,is_multi_size,dims,data_shape);
555 return sdata;
556}
557
558/*---------------------------------------------------------------------------*/
559/*---------------------------------------------------------------------------*/
560
561/*---------------------------------------------------------------------------*/
562/*---------------------------------------------------------------------------*/
563
564void Hdf5ReaderWriter::
565write(IVariable* v,IData* data)
566{
567 _directWriteVal(v,data);
568}
569
570/*---------------------------------------------------------------------------*/
571/*---------------------------------------------------------------------------*/
572
573void Hdf5ReaderWriter::
574_readVal(IVariable* v,IData* data)
575{
576 String var_group_name = _variableGroupName(v);
577 info(4) << " TRY TO READ var_group=" << var_group_name;
578 Ref<ISerializedData> sd(_readDim2(v));
579 Int64 storage_size = sd->memorySize();
580 info(4) << " READ DATA n=" << storage_size;
581 data->allocateBufferForSerializedData(sd.get());
582 if (storage_size!=0){
583 // Récupère le groupe contenant les informations de la variable
584 HGroup group_id;
585 //group_id.open(m_variable_group_id,var_group_name);
586 group_id.open(m_sub_group_id,var_group_name);
587 if (group_id.isBad())
588 ARCANE_THROW(ReaderWriterException,"No HDF5 group with name '{0}' exists",var_group_name);
589 HDataset dataset_id;
590 dataset_id.open(group_id,"Values");
591 if (dataset_id.isBad())
592 ARCANE_THROW(ReaderWriterException,"Wrong dataset for variable '{0}'",var_group_name);
593 void* ptr = sd->writableBytes().data();
594 info() << "READ Variable " << var_group_name << " ptr=" << ptr;;
595 hid_t trueid = m_types.nativeType(sd->baseDataType());
596 dataset_id.read(trueid,ptr);
597 }
598 data->assignSerializedData(sd.get());
599}
600
601
602/*---------------------------------------------------------------------------*/
603/*---------------------------------------------------------------------------*/
604
605void Hdf5ReaderWriter::
606read(IVariable* var,IData* data)
607{
608 _directReadVal(var,data);
609}
610
611/*---------------------------------------------------------------------------*/
612/*---------------------------------------------------------------------------*/
613
614void Hdf5ReaderWriter::
615setMetaData(const String& meta_data)
616{
617 if (m_is_parallel){
619 //Integer nb_rank = pm->commSize();
620 if (m_send_rank!=m_my_rank){
621 // Envoie le groupe et les meta donnees
623 sb.setMode(ISerializer::ModeReserve);
625 sb.reserve(meta_data);
626 sb.allocateBuffer();
629 sb.put(meta_data);
630 m_parallel_mng->sendSerializer(&sb,m_send_rank);
631 }
632 else{
633 _setMetaData(meta_data,m_sub_group_name);
634 for( Integer i=m_send_rank+1; i<=m_last_recv_rank; ++i ){
636 pm->recvSerializer(&sb,i);
638 String remote_group_name;
639 String remote_meta_data;
640 sb.get(remote_group_name);
641 sb.get(remote_meta_data);
642 _setMetaData(remote_meta_data,remote_group_name);
643 }
644 }
645 }
646 else
647 _setMetaData(meta_data,m_sub_group_name);
648}
649
650/*---------------------------------------------------------------------------*/
651/*---------------------------------------------------------------------------*/
652
653void Hdf5ReaderWriter::
654_setMetaData(const String& meta_data,const String& sub_group_name)
655{
656 const bool hits_modulo=(m_index_modulo!=0) && (m_index_write!=0) && ((m_index_write%m_index_modulo)==0);
657 HGroup base_group;
658 if (hits_modulo)
659 base_group.recursiveOpen(m_file_id,sub_group_name);
660 else
661 base_group.recursiveCreate(m_file_id,sub_group_name);
662
663 Span<const Byte> meta_data_bytes = meta_data.bytes();
664 const Byte* _meta_data = meta_data_bytes.data();
665 hsize_t dims[1];
666 dims[0] = meta_data_bytes.size();
667
668 HSpace space_id;
669 space_id.createSimple(1,dims);
670 if (space_id.isBad())
671 throw ReaderWriterException(A_FUNCINFO,"Wrong space for meta-data ('MetaData')");
672
673 HDataset dataset_id;
674 if (hits_modulo)
675 dataset_id.recursiveCreate(base_group,"MetaData", m_types.nativeType(Byte()), space_id, H5P_DEFAULT);
676 else
677 dataset_id.create(base_group,"MetaData", m_types.nativeType(Byte()), space_id, H5P_DEFAULT);
678 if (dataset_id.isBad())
679 throw ReaderWriterException(A_FUNCINFO,"Wrong dataset for meta-data ('MetaData')");
680
681 herr_t herr = dataset_id.write(m_types.nativeType(Byte()),_meta_data);
682 if (herr<0)
683 throw ReaderWriterException(A_FUNCINFO,"Unable to write meta-data ('MetaData')");
684}
685
686/*---------------------------------------------------------------------------*/
687/*---------------------------------------------------------------------------*/
688
689String Hdf5ReaderWriter::
690metaData()
691{
692 HDataset dataset_id;
693 dataset_id.open(m_sub_group_id,"MetaData");
694 if (dataset_id.isBad()){
695 throw ReaderWriterException(A_FUNCINFO,"Wrong dataset for meta-data ('MetaData')");
696 }
697 HSpace space_id = dataset_id.getSpace();
698 if (space_id.isBad()){
699 throw ReaderWriterException(A_FUNCINFO,"Wrong space for meta-data ('MetaData')");
700 }
701 const int max_dim = 256;
702 hsize_t hdf_dims[max_dim];
703 hsize_t max_dims[max_dim];
704 H5Sget_simple_extent_dims(space_id.id(),hdf_dims,max_dims);
705 if (hdf_dims[0]<=0)
706 throw ReaderWriterException(A_FUNCINFO,"Wrong number of elements for meta-data ('MetaData')");
707 Integer nb_byte = static_cast<Integer>(hdf_dims[0]);
708 ByteUniqueArray uchars(nb_byte);
709 dataset_id.read(m_types.nativeType(Byte()),uchars.data());
710 String s(uchars);
711 return s;
712}
713
714/*---------------------------------------------------------------------------*/
715/*---------------------------------------------------------------------------*/
716
717void Hdf5ReaderWriter::
718endWrite()
719{
720 if (m_is_parallel){
721 if (m_my_rank==m_send_rank){
722 _receiveRemoteVariables();
723 }
724 else{
725 // Envoie un message de fin
726 SerializeBuffer sb;
727 sb.setMode(ISerializer::ModeReserve);
728 sb.reserve(DT_Int32,1); // Pour indiquer la fin des envoies
729 sb.allocateBuffer();
730 sb.setMode(ISerializer::ModePut);
731 sb.putInt32(0); // Indique qu'il s'agit d'un message de fin
732 m_parallel_mng->sendSerializer(&sb,m_send_rank);
733 }
734 }
735 {
736 info() << " Hdf5Timer: nb_activated=" << m_io_timer.nbActivated()
737 << " time=" << m_io_timer.totalTime();
738 }
739}
740
741/*---------------------------------------------------------------------------*/
742/*---------------------------------------------------------------------------*/
743
744void Hdf5ReaderWriter::
745_receiveRemoteVariables()
746{
747 IParallelMng* pm = m_parallel_mng;
748 Integer nb_remaining = m_last_recv_rank - m_send_rank;
749 info() << "NB REMAINING = " << nb_remaining;
750 Ref<ISerializeMessageList> m_messages(pm->createSerializeMessageListRef());
751 while(nb_remaining>0){
752 ScopedPtrT<ISerializeMessage> sm(new SerializeMessage(m_my_rank,NULL_SUB_DOMAIN_ID,ISerializeMessage::MT_Recv));
753 m_messages->addMessage(sm.get());
754 m_messages->processPendingMessages();
755 m_messages->waitMessages(Parallel::WaitAll);
756 ISerializer* sb = sm->serializer();
757 sb->setMode(ISerializer::ModeGet);
758 Int32 id = sb->getInt32();
759 if (id==0)
760 --nb_remaining;
761 else
762 _writeRemoteVariable(sb);
763 }
764}
765
766/*---------------------------------------------------------------------------*/
767/*---------------------------------------------------------------------------*/
768
769void Hdf5ReaderWriter::
770_writeRemoteVariable(ISerializer* sb)
771{
772 String var_name;
773 sb->get(var_name);
774 String group_name;
775 sb->get(group_name);
776 Int32 rank = sb->getInt32();
777 //warning()<<"[\33[46;30m_writeRemoteVariable\33[m] rank="<<rank;
778 Ref<ISerializedData> sdata = arcaneCreateEmptySerializedDataRef();
779 sb->setReadMode(ISerializer::ReadReplace);
780 sdata->serialize(sb);
781 _writeVal(var_name,group_name,sdata.get(),rank);
782}
783
784/*---------------------------------------------------------------------------*/
785/*---------------------------------------------------------------------------*/
786
787/*---------------------------------------------------------------------------*/
788/*---------------------------------------------------------------------------*/
792class ArcaneHdf5CheckpointService2
794{
795 public:
796 ArcaneHdf5CheckpointService2(const ServiceBuildInfo& sbi)
798 m_write_index(0),
799 m_writer(nullptr),
800 m_reader(nullptr),
801 m_fileset_size(1),
802 m_index_modulo(0){}
803
804 virtual IDataWriter* dataWriter() { return m_writer; }
805 virtual IDataReader* dataReader() { return m_reader; }
806
807 virtual void notifyBeginWrite();
808 virtual void notifyEndWrite();
809 virtual void notifyBeginRead();
810 virtual void notifyEndRead();
811 virtual void close() {}
812 virtual String readerServiceName() const { return "ArcaneHdf5CheckpointReader2"; }
813
814 private:
815
816 Integer m_write_index;
817 Hdf5ReaderWriter* m_writer;
818 Hdf5ReaderWriter* m_reader;
819 Integer m_fileset_size;
820 Integer m_index_modulo;
821
822 private:
823
824 String _defaultFileName()
825 {
826 info() << "USE DEFAULT FILE NAME";
827 IParallelMng* pm = subDomain()->parallelMng();
828 Integer rank = pm->commRank();
829 StringBuilder buf;
830
831 // Ajoute si besoin le numero du processeur
832 if (pm->isParallel()){
833 Integer file_id = rank;
834 if (m_fileset_size!=0)
835 file_id = (rank / m_fileset_size) * m_fileset_size;
836 buf = "arcanedump.";
837 buf += file_id;
838 }
839 else{
840 buf = "arcanedump";
841 }
842
843 // Ajoute si besoin le numero du replica
844 IParallelReplication* pr = subDomain()->parallelMng()->replication();
845 if (pr->hasReplication()){
846 buf += "_r";
847 buf += pr->replicationRank();
848 }
849
850 buf += ".h5";
851 return buf.toString();
852 }
853
854 Directory _defaultDirectory(){
855 return Directory(baseDirectoryName());
856 }
857 void _parseMetaData(String meta_data);
858};
859
860/*---------------------------------------------------------------------------*/
861/*---------------------------------------------------------------------------*/
862
863void ArcaneHdf5CheckpointService2::
864_parseMetaData(String meta_data)
865{
866 IIOMng* io_mng = subDomain()->ioMng();
867 ScopedPtrT<IXmlDocumentHolder> xml_doc(io_mng->parseXmlBuffer(meta_data.utf8(),"MetaData"));
868 XmlNode root = xml_doc->documentNode().documentElement();
869 Integer version = root.attr("version").valueAsInteger();
870 if (version!=1){
871 throw ReaderWriterException(A_FUNCINFO,"Bad version (expected 1)");
872 }
873 {
874 Integer fileset_size = root.child("fileset-size").valueAsInteger();
875 if (fileset_size<0) fileset_size = 0;
876 m_fileset_size = fileset_size;
877 }
878 {
879 Integer index_modulo = root.child("index-modulo").valueAsInteger();
880 if (index_modulo<0) index_modulo = 0;
881 m_index_modulo=index_modulo;
882 }
883 info() << " FileSet size=" << m_fileset_size;
884 info() << " Index modulo=" << m_index_modulo;
885}
886
887/*---------------------------------------------------------------------------*/
888/*---------------------------------------------------------------------------*/
889
890void ArcaneHdf5CheckpointService2::
891notifyBeginRead()
892{
893 String meta_data = readerMetaData();
894 _parseMetaData(meta_data);
895
896 info() << " GET META DATA READER " << readerMetaData()
897 << " filename=" << fileName();
898
899 if (fileName().null()){
900 Directory dump_dir(_defaultDirectory());
901 setFileName(dump_dir.file(_defaultFileName()));
902 }
903 info() << " READ CHECKPOINT FILENAME = " << fileName();
904 StringBuilder sub_group;
905 sub_group = "SubDomain";
906 sub_group += subDomain()->subDomainId();
907 sub_group += "/Index";
908
909 Integer index = currentIndex();
910 if (m_index_modulo!=0)
911 index %= m_index_modulo;
912 sub_group += index;
913
914 m_reader = new Hdf5ReaderWriter(subDomain(),
915 fileName(),
916 sub_group.toString(),
917 0,
918 currentIndex(),
919 m_index_modulo,
920 Hdf5ReaderWriter::OpenModeRead);
921 m_reader->initialize();
922}
923
924/*---------------------------------------------------------------------------*/
925/*---------------------------------------------------------------------------*/
926
927void ArcaneHdf5CheckpointService2::
928notifyEndRead()
929{
930 delete m_reader;
931 m_reader = 0;
932}
933
934/*---------------------------------------------------------------------------*/
935/*---------------------------------------------------------------------------*/
936
937void ArcaneHdf5CheckpointService2::
938notifyBeginWrite()
939{
940 if (options()){
941 // Récupération du nombre de fichiers par groupe
942 m_fileset_size = options()->filesetSize();
943 // Récupération du nombre d'indexes au maximum par fichiers
944 m_index_modulo = options()->indexModulo();
945 }
946
947 if (fileName().null()){
948 Directory dump_dir(_defaultDirectory());
949 setFileName(dump_dir.file(_defaultFileName()));
950 }
951 Hdf5ReaderWriter::eOpenMode open_mode = Hdf5ReaderWriter::OpenModeAppend;
952 Integer write_index = checkpointTimes().size();
953 --write_index;
954
955 if (write_index==0)
956 open_mode = Hdf5ReaderWriter::OpenModeTruncate;
957
958 // Test de l'option m_index_modulo pour savoir la profondeur du modulo
959 if (m_index_modulo!=0)
960 write_index%=m_index_modulo;
961
962 StringBuilder sub_group;
963 sub_group = "SubDomain";
964 sub_group += subDomain()->parallelMng()->commRank();
965 sub_group += "/Index";
966 sub_group += write_index;
967
968 m_writer = new Hdf5ReaderWriter(subDomain(),
969 fileName(),
970 sub_group,
971 m_fileset_size,
972 checkpointTimes().size()-1,
973 m_index_modulo,
974 open_mode);
975 m_writer->initialize();
976}
977
978
979/*---------------------------------------------------------------------------*/
980/*---------------------------------------------------------------------------*/
981
982void ArcaneHdf5CheckpointService2::
983notifyEndWrite()
984{
985 OStringStream ostr;
986 ostr() << "<infos version='1'>\n";
987 ostr() << " <fileset-size>" << m_fileset_size << "</fileset-size>\n";
988 ostr() << " <index-modulo>" << m_index_modulo << "</index-modulo>\n";
989 ostr() << "</infos>\n";
990 setReaderMetaData(ostr.str());
991 ++m_write_index;
992 delete m_writer;
993 m_writer = 0;
994}
995
996/*---------------------------------------------------------------------------*/
997/*---------------------------------------------------------------------------*/
998
1000 ServiceProperty("ArcaneHdf5CheckpointReader2",ST_SubDomain),
1002
1004 ServiceProperty("ArcaneHdf5CheckpointWriter2",ST_SubDomain),
1006
1007ARCANE_REGISTER_SERVICE_HDF5READERWRITER(ArcaneHdf5Checkpoint2,
1009
1010/*---------------------------------------------------------------------------*/
1011/*---------------------------------------------------------------------------*/
1012
1013} // End namespace Arcane
1014
1015/*---------------------------------------------------------------------------*/
1016/*---------------------------------------------------------------------------*/
#define ARCANE_THROW(exception_class,...)
Macro pour envoyer une exception avec formattage.
#define ARCANE_FATAL(...)
Macro envoyant une exception FatalErrorException.
#define ARCANE_SERVICE_INTERFACE(ainterface)
Macro pour déclarer une interface lors de l'enregistrement d'un service.
Protection/reprise au format ArcaneHdf5.
virtual IDataReader * dataReader()
Retourne le lecteur associé
virtual String readerServiceName() const
Nom du service du lecteur associé à cet écrivain.
virtual IDataWriter * dataWriter()
Retourne l'écrivain associé.
virtual void close()
Ferme les protections.
CaseOptionsHdf5ReaderWriter * options() const
Options du jeu de données du service.
ArcaneHdf5ReaderWriterObject(const Arcane::ServiceBuildInfo &sbi)
Constructeur.
const T * data() const
Accès à la racine du tableau hors toute protection.
void put(Span< const Real > values) override
Ajoute le tableau values.
void reserve(eBasicDataType dt, Int64 n) override
Réserve de la mémoire pour n objets de type dt.
void putInt32(Int32 value) override
Ajoute l'entier value.
void allocateBuffer() override
Alloue la mémoire du sérialiseur.
void get(ArrayView< Real > values) override
Récupère le tableau values.
void setMode(eMode new_mode) override
Positionne le fonctionnement actuel.
String readerMetaData() const override
Méta données pour le lecteur associé à cet écrivain.
void setReaderMetaData(const String &s) override
Méta données associées à ce lecteur.
void setFileName(const String &file_name) override
Positionne le nom du fichier de la protection.
RealConstArrayView checkpointTimes() const override
Temps des protections.
String fileName() const override
Nom du fichier de la protection.
constexpr const_pointer data() const noexcept
Pointeur sur la mémoire allouée.
Classe gérant un répertoire.
Definition Directory.h:35
String file(const String &file_name) const override
Retourne le chemin complet du fichier file_name dans le répertoire.
Definition Directory.cc:120
Lecture/Ecriture au format HDF5.
bool m_is_parallel
Mode parallèle actif: ATTENTION: en cours de test uniquement.
String m_sub_group_name
Nom du fichier.
IParallelMng * m_parallel_mng
Gestionnaire du parallélisme;.
Hdf5Utils::HGroup m_sub_group_id
Identifiant HDF du groupe contenant la protection.
Encapsule un hid_t pour un dataset.
Encapsule un hid_t pour un groupe.
Encapsule un hid_t pour un dataspace.
Interface du service de lecture d'une protection/reprise.
Interface du service d'écriture d'une protection/reprise.
Interface de lecture des données d'une variable.
Definition IDataReader.h:34
Interface d'écriture des données d'une variable.
Definition IDataWriter.h:44
Interface d'une donnée.
Definition IData.h:33
virtual void allocateBufferForSerializedData(ISerializedData *sdata)=0
Alloue la mémoire pour lire les valeurs sérialisées sdata.
virtual void assignSerializedData(const ISerializedData *sdata)=0
Assigne à la donnée les valeurs sérialisées sdata.
Interface du gestionnaire de parallélisme pour un sous-domaine.
virtual Int32 commRank() const =0
Rang de cette instance dans le communicateur.
virtual bool isParallel() const =0
Retourne true si l'exécution est parallèle.
Interface d'une donnée sérialisée.
virtual void serialize(ISerializer *buffer)=0
Serialize en lecture ou écriture la donnée.
@ ModePut
Le sérialiseur attend des reserve()
Interface du gestionnaire d'un sous-domaine.
Definition ISubDomain.h:74
Interface d'une variable.
Definition IVariable.h:39
virtual String fullName() const =0
Nom complet de la variable (avec le préfixe de la famille)
virtual String name() const =0
Nom de la variable.
Flot de sortie lié à une String.
Exception dans un lecteur ou écrivain.
Référence à une instance.
Implémentation d'un tampon pour la sérialisation.
Structure contenant les informations pour créer un service.
Propriétés de création d'un service.
constexpr __host__ __device__ SizeType size() const noexcept
Retourne la taille du tableau.
Definition Span.h:212
constexpr __host__ __device__ pointer data() const noexcept
Pointeur sur le début de la vue.
Definition Span.h:422
Vue d'un tableau d'éléments de type T.
Definition Span.h:513
Constructeur de chaîne de caractère unicode.
String toString() const
Retourne la chaîne de caractères construite.
Chaîne de caractères unicode.
Span< const Byte > bytes() const
Retourne la conversion de l'instance dans l'encodage UTF-8.
Definition String.cc:291
Gestion d'un timer.
Definition Timer.h:62
TraceMessage info() const
Flot pour un message d'information.
ITraceMng * traceMng() const
Gestionnaire de trace.
#define ARCANE_REGISTER_SERVICE(aclass, a_service_property,...)
Macro pour enregistrer un service.
Fonctions utilitaires pour Hdf5.
Definition Hdf5Utils.cc:36
-*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
Ref< ISerializedData > arcaneCreateSerializedDataRef(eDataType data_type, Int64 memory_size, Integer nb_dim, Int64 nb_element, Int64 nb_base_element, bool is_multi_size, Int64ConstArrayView dimensions)
Créé des données sérialisées.
std::int64_t Int64
Type entier signé sur 64 bits.
Int32 Integer
Type représentant un entier.
@ ST_SubDomain
Le service s'utilise au niveau du sous-domaine.
ConstArrayView< Int64 > Int64ConstArrayView
Equivalent C d'un tableau à une dimension d'entiers 64 bits.
Definition UtilsTypes.h:567
UniqueArray< Byte > ByteUniqueArray
Tableau dynamique à une dimension de caractères.
Definition UtilsTypes.h:422
Ref< ISerializedData > arcaneCreateEmptySerializedDataRef()
Créé des données sérialisées.
unsigned char Byte
Type d'un octet.
Definition BaseTypes.h:43
eDataType
Type d'une donnée.
Definition DataTypes.h:39
@ DT_Int32
Donnée de type entier 32 bits.
Definition DataTypes.h:43
@ DT_Unknown
Donnée de type inconnue ou non initialisée.
Definition DataTypes.h:56
std::int32_t Int32
Type entier signé sur 32 bits.