Arcane  v3.14.10.0
Documentation développeur
Chargement...
Recherche...
Aucune correspondance
Hdf5ReaderWriter.cc
1// -*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
2//-----------------------------------------------------------------------------
3// Copyright 2000-2023 CEA (www.cea.fr) IFPEN (www.ifpenergiesnouvelles.com)
4// See the top-level COPYRIGHT file for details.
5// SPDX-License-Identifier: Apache-2.0
6//-----------------------------------------------------------------------------
7/*---------------------------------------------------------------------------*/
8/* Hdf5ReaderWriter.cc (C) 2000-2023 */
9/* */
10/* Lecture/Ecriture au format HDF5. */
11/*---------------------------------------------------------------------------*/
12/*---------------------------------------------------------------------------*/
13
14#include "arcane/utils/String.h"
15#include "arcane/utils/StringBuilder.h"
16#include "arcane/utils/OStringStream.h"
17#include "arcane/utils/ScopedPtr.h"
18#include "arcane/utils/List.h"
19#include "arcane/utils/ArcanePrecomp.h"
20#include "arcane/utils/ITraceMng.h"
21#include "arcane/utils/PlatformUtils.h"
22#include "arcane/utils/CheckedConvert.h"
23#include "arcane/utils/ArrayShape.h"
24
25#include "arcane/Item.h"
26#include "arcane/ISubDomain.h"
27#include "arcane/StdNum.h"
28#include "arcane/IVariable.h"
29#include "arcane/CheckpointService.h"
30#include "arcane/Directory.h"
31#include "arcane/IParallelMng.h"
32#include "arcane/IParallelReplication.h"
33#include "arcane/Service.h"
34#include "arcane/ArcaneException.h"
35#include "arcane/ItemGroup.h"
36#include "arcane/ItemEnumerator.h"
37#include "arcane/VerifierService.h"
38#include "arcane/IVariableMng.h"
39#include "arcane/FactoryService.h"
40#include "arcane/IData.h"
41#include "arcane/Timer.h"
42#include "arcane/ISerializedData.h"
43#include "arcane/IIOMng.h"
44#include "arcane/IXmlDocumentHolder.h"
45
46#include "arcane/datatype/DataTypeTraits.h"
47
48#include "arcane/SerializeBuffer.h"
49#include "arcane/ISerializeMessageList.h"
50#include "arcane/SerializeMessage.h"
51
52#include "arcane/hdf5/Hdf5ReaderWriter.h"
53
54#include "arcane/hdf5/Hdf5ReaderWriter_axl.h"
55
56#include <array>
57//#define ARCANE_TEST_HDF5MPI
58
59/*---------------------------------------------------------------------------*/
60/*---------------------------------------------------------------------------*/
61
62namespace Arcane
63{
64
65/*---------------------------------------------------------------------------*/
66/*---------------------------------------------------------------------------*/
67
68using namespace Hdf5Utils;
69
70static herr_t _Hdf5ReaderWriterIterateMe(hid_t,const char*,void*);
71
72/*---------------------------------------------------------------------------*/
73/*---------------------------------------------------------------------------*/
74
75namespace
76{
77constexpr Int32 VARIABLE_INFO_SIZE = 10 + ArrayShape::MAX_NB_DIMENSION;
78}
79
80/*---------------------------------------------------------------------------*/
81/*---------------------------------------------------------------------------*/
82
83Hdf5ReaderWriter::
84Hdf5ReaderWriter(ISubDomain* sd,const String& filename,
85 const String& sub_group_name,
86 Integer fileset_size, Integer currentIndex, Integer index_modulo,
87 eOpenMode open_mode,[[maybe_unused]] bool do_verif)
88: TraceAccessor(sd->traceMng())
89, m_parallel_mng(sd->parallelMng())
90, m_open_mode(open_mode)
91, m_filename(filename)
92, m_sub_group_name(sub_group_name)
93, m_is_initialized(false)
94, m_io_timer(sd,"Hdf5Timer",Timer::TimerReal)
95, m_is_parallel(false)
96, m_my_rank(m_parallel_mng->commRank())
97, m_send_rank(m_my_rank)
98, m_last_recv_rank(m_my_rank)
99, m_fileset_size(fileset_size)
100, m_index_write(currentIndex)
101, m_index_modulo(index_modulo)
102{
103
104 if (m_fileset_size!=1 && m_parallel_mng->isParallel()){
105 m_is_parallel = true;
106 Integer nb_rank = m_parallel_mng->commSize();
107 if (m_fileset_size==0){
108 m_send_rank = 0;
109 m_last_recv_rank = nb_rank;
110 }
111 else{
112 m_send_rank = (m_my_rank / m_fileset_size) * m_fileset_size;
113 m_last_recv_rank = m_send_rank + m_fileset_size;
114 if (m_last_recv_rank>nb_rank)
115 m_last_recv_rank = nb_rank;
116 --m_last_recv_rank;
117 }
118 }
119 sd->traceMng()->info() << " INFOS PARALLEL: my_rank=" << m_my_rank
120 << " send_rank=" << m_send_rank
121 << " last_recv_rank=" << m_last_recv_rank
122 << " filename=" << filename;
123}
124
125
126/*---------------------------------------------------------------------------*/
127/*---------------------------------------------------------------------------*/
128void Hdf5ReaderWriter::
129initialize()
130{
131 if (m_is_initialized)
132 return;
133 m_is_initialized = true;
134 HInit();
135 info() << "INIT HDF5 READER/WRITER";
136 {
137 unsigned vmajor = 0;
138 unsigned vminor = 0;
139 unsigned vrel = 0;
140 ::H5get_libversion(&vmajor,&vminor,&vrel);
141 info() << "HDF5 version = " << vmajor << '.' << vminor << '.' << vrel;
142 }
143 info() << "SubGroup is '" << m_sub_group_name <<"'";
144 if (m_open_mode==OpenModeRead){
145 m_file_id.openRead(m_filename);
146 m_sub_group_id.recursiveOpen(m_file_id,m_sub_group_name);
147 }
148 else{
149 // Si ce n'est pas moi qui écrit, n'ouvre pas le fichier
150 if (m_send_rank!=m_my_rank)
151 return;
152 if (m_open_mode==OpenModeTruncate){
153 hid_t plist_id = H5Pcreate(H5P_FILE_ACCESS);
154#ifdef ARCANE_TEST_HDF5MPI
155 void* arcane_comm = subDomain()->parallelMng()->getMPICommunicator();
156 if (!arcane_comm)
157 ARCANE_FATAL("No MPI environment available");
158 MPI_Comm mpi_comm = *((MPI_Comm*)arcane_comm);
159 MPI_Info mpi_info = MPI_INFO_NULL;
160 //H5Pset_fapl_mpiposix(plist_id, mpi_comm, MPI_INFO_NULL); //mpi_info);
161 H5Pset_fapl_mpio(plist_id, mpi_comm, MPI_INFO_NULL); //mpi_info);
162 H5Pset_fclose_degree(plist_id,H5F_CLOSE_STRONG);
163#endif
164 int mdc_nelmts;
165 size_t rdcc_nelmts;
166 size_t rdcc_nbytes;
167 double rdcc_w0;
168 herr_t r = H5Pget_cache(plist_id,&mdc_nelmts,&rdcc_nelmts,&rdcc_nbytes,&rdcc_w0);
169 info() << " CACHE SIZE r=" << r << " mdc=" << mdc_nelmts
170 << " rdcc=" << rdcc_nelmts << " rdcc_bytes=" << rdcc_nbytes << " w0=" << rdcc_w0;
171 mdc_nelmts *= 10;
172 rdcc_nelmts *= 10;
173 rdcc_nbytes = 10000000;
174 r = H5Pset_cache(plist_id,mdc_nelmts,rdcc_nelmts,rdcc_nbytes,rdcc_w0);
175 info() << " SET CACHE SIZE R1=" << r;
176 //r = H5Pset_fapl_stdio(plist_id);
177 //info() << " R2=" << r;
178 hsize_t sieve_buf = (1024 << 12);
179 r = H5Pset_sieve_buf_size(plist_id,sieve_buf);
180 info() << " SIEVE_BUF=" << sieve_buf << " r=" << r;
181 hsize_t small_block_size = 0;
182 r = H5Pget_small_data_block_size(plist_id,&small_block_size);
183 info() << " SMALL BLOCK SIZE=" << small_block_size;
184 small_block_size <<= 10;
185 r = H5Pset_small_data_block_size(plist_id,small_block_size);
186 info() << " SET SMALL BLOCK SIZE s=" << small_block_size << " r=" << r;
187
188 m_file_id.openTruncate(m_filename,plist_id);
189 }
190 else if (m_open_mode==OpenModeAppend){
191 m_file_id.openAppend(m_filename);
192 }
193 if (m_sub_group_name!="/"){
194 m_sub_group_id.checkDelete(m_file_id,m_sub_group_name);
195 m_sub_group_id.recursiveCreate(m_file_id,m_sub_group_name);
196 }
197 else
198 m_sub_group_id.open(m_file_id,m_sub_group_name);
199 }
200 if (m_file_id.isBad())
201 ARCANE_THROW(ReaderWriterException,"Unable to open file '{0}'",m_filename);
202
203 if (m_sub_group_id.isBad())
204 ARCANE_THROW(ReaderWriterException,"HDF5 group '{0}' not found",m_sub_group_name);
205
206 if (m_open_mode==OpenModeRead){
207 int index = 0;
208 //H5Giterate(m_sub_group_id.id(),"Variables",&index,_Hdf5ReaderWriterIterateMe,this);
209 H5Giterate(m_file_id.id(),m_sub_group_name.localstr(),&index,_Hdf5ReaderWriterIterateMe,this);
210 }
211}
212
213
214/*---------------------------------------------------------------------------*/
215/*---------------------------------------------------------------------------*/
216
217Hdf5ReaderWriter::
218~Hdf5ReaderWriter()
219{
220}
221
222/*---------------------------------------------------------------------------*/
223/*---------------------------------------------------------------------------*/
224
225void Hdf5ReaderWriter::
226_checkValid()
227{
228 if (m_is_initialized)
229 return;
230 fatal() << "Use of a Hdf5ReaderWriter instance not initialized";
231}
232
233/*---------------------------------------------------------------------------*/
234/*---------------------------------------------------------------------------*/
235
236String Hdf5ReaderWriter::
237_variableGroupName(IVariable* var)
238{
239 return var->fullName();
240}
241
242/*---------------------------------------------------------------------------*/
243/*---------------------------------------------------------------------------*/
249void Hdf5ReaderWriter::
250_writeValParallel(IVariable* v,const ISerializedData* sdata)
251{
253 sb.setMode(ISerializer::ModeReserve);
254 sb.reserve(DT_Int32,1); // Pour indiquer la fin des envois
255 sb.reserve(v->fullName());
256 sb.reserve(m_sub_group_name);
257 sb.reserve(DT_Int32,1); // Pour indiquer le rand duquel le message provient
258 sdata->serialize(&sb);
259 sb.allocateBuffer();
260 sb.setMode(ISerializer::ModePut);
261 sb.putInt32(1); // Indique qu'il s'agit d'un message non vide
262 sb.put(v->fullName());
263 sb.put(m_sub_group_name);
264 sb.put(m_my_rank);
265 sdata->serialize(&sb);
266 m_parallel_mng->sendSerializer(&sb,m_send_rank);
267}
268
269/*---------------------------------------------------------------------------*/
270/*---------------------------------------------------------------------------*/
271
272void Hdf5ReaderWriter::
273_directReadVal(IVariable* v,IData* data)
274{
275 _checkValid();
276 info(4) << "DIRECT READ VAL v=" << v->name();
277 _readVal(v,data);
278}
279
280/*---------------------------------------------------------------------------*/
281/*---------------------------------------------------------------------------*/
282
283void Hdf5ReaderWriter::
284_directWriteVal(IVariable* v,IData* data)
285{
286 _checkValid();
287 Ref<ISerializedData> sdata(data->createSerializedDataRef(false));
288 if (m_is_parallel && m_send_rank!=m_my_rank){
289 _writeValParallel(v,sdata.get());
290 }
291 else{
292 _writeVal(v->fullName(),m_sub_group_name,sdata.get());
293 }
294}
295
296/*---------------------------------------------------------------------------*/
297/*---------------------------------------------------------------------------*/
298
299static herr_t
300_Hdf5ReaderWriterIterateMe(hid_t g,const char* mn,void* ptr)
301{
302 Hdf5ReaderWriter* rw = reinterpret_cast<Hdf5ReaderWriter*>(ptr);
303 return rw->iterateMe(g,mn);
304}
305
306/*---------------------------------------------------------------------------*/
307/*---------------------------------------------------------------------------*/
308
309herr_t Hdf5ReaderWriter::
310iterateMe(hid_t group_id,const char* member_name)
311{
312 ARCANE_UNUSED(group_id);
313 m_variables_name.add(StringView(member_name));
314 return 0;
315}
316
317/*---------------------------------------------------------------------------*/
318/*---------------------------------------------------------------------------*/
319
320void Hdf5ReaderWriter::
321_writeVal(const String& var_group_name,
322 const String& sub_group_name,
323 const ISerializedData* sdata,
324 const Int32 from_rank)
325{
326 const bool hits_modulo=(m_index_modulo!=0) && (m_index_write!=0) && ((m_index_write%m_index_modulo)==0);
327 Timer::Sentry ts(&m_io_timer);
328
329 info(4) << " SDATA name=" << var_group_name << " nb_element=" << sdata->nbElement()
330 << " dim=" << sdata->nbDimension() << " datatype=" << sdata->baseDataType()
331 << " nb_basic_element=" << sdata->nbBaseElement()
332 << " is_multi=" << sdata->isMultiSize()
333 << " dimensions_size=" << sdata->extents().size()
334 << " memory_size=" << sdata->memorySize()
335 << " bytes_size=" << sdata->constBytes().size()
336 << " shape=" << sdata->shape().dimensions();
337
338 Integer nb_dimension = sdata->nbDimension();
339 Int64ConstArrayView dimensions = sdata->extents();
340
341 hid_t save_typeid = m_types.saveType(sdata->baseDataType());
342 hid_t trueid = m_types.nativeType(sdata->baseDataType());
343 const void* ptr = sdata->constBytes().data();
344 Int64 nb_base_element = sdata->nbBaseElement();
345
346 HGroup var_base_group;
347 var_base_group.recursiveCreate(m_file_id,sub_group_name);
348
349 // Création du groupe contenant les informations de la variable
350 HGroup group_id;
351 group_id.recursiveCreate(var_base_group,var_group_name);
352 if (group_id.isBad())
353 ARCANE_THROW(ReaderWriterException,"HDF5 group '{0}' not found",var_group_name);
354
355 Int64 nb_element = sdata->nbElement();
356 bool is_multi_size = sdata->isMultiSize();
357 Int64 dim2_size = 0;
358 Int64 dim1_size = 0;
359 if (nb_dimension==2 && !is_multi_size){
360 dim1_size = dimensions[0];
361 dim2_size = dimensions[1];
362 }
363 Integer dimension_array_size = dimensions.size();
364
365 // Sauve les informations concernant les tailles et dimensions de la variable
366 {
367 hsize_t att_dims[1];
368 att_dims[0] = VARIABLE_INFO_SIZE;
369 HSpace space_id;
370 space_id.createSimple(1,att_dims);
371 std::array<Int64,VARIABLE_INFO_SIZE> dim_val_buf;
372 SmallSpan<Int64> dim_val(dim_val_buf);
373 dim_val.fill(0);
374
375 dim_val[0] = nb_dimension;
376 dim_val[1] = dim1_size;
377 dim_val[2] = dim2_size;
378 dim_val[3] = nb_element;
379 dim_val[4] = nb_base_element;
380 dim_val[5] = dimension_array_size;
381 dim_val[6] = is_multi_size ? 1 : 0;
382 dim_val[7] = sdata->baseDataType();
383 dim_val[8] = sdata->memorySize();
384 {
385 ArrayShape shape = sdata->shape();
386 Int32 shape_nb_dim = shape.nbDimension();
387 auto shape_dims = shape.dimensions();
388 dim_val[9] = shape_nb_dim;
389 for (Integer i=0; i<shape_nb_dim; ++i )
390 dim_val[10+i] = shape_dims[i];
391 }
392 HAttribute att_id;
393 if (m_is_parallel && hits_modulo && (from_rank!=0))
394 att_id.remove(group_id,"Dims");
395 att_id.create(group_id,"Dims",m_types.saveType(dim1_size),space_id);
396 herr_t herr = att_id.write(m_types.nativeType(dim2_size),dim_val.data());
397 if (herr<0)
398 ARCANE_THROW(ReaderWriterException,"Wrong dimensions written for variable '{0}'",var_group_name);
399 }
400
401 // Si la variable est de type tableau à deux dimensions, sauve les
402 // tailles de la deuxième dimension par élément.
403 if (dimension_array_size!=0){
404 hsize_t att_dims[1];
405 att_dims[0] = dimension_array_size;
406 HSpace space_id;
407 HDataset array_id;
408 space_id.createSimple(1,att_dims);
409 array_id.recursiveCreate(group_id,"Dim2",m_types.saveType(dim1_size),space_id,H5P_DEFAULT);
410 herr_t herr = array_id.write(m_types.nativeType(dim1_size),dimensions.data());
411 if (herr<0)
412 ARCANE_THROW(ReaderWriterException,"Wrong dimensions written for variable '{0}'",var_group_name);
413 }
414
415 // Maintenant, sauve les valeurs si necessaire
416 if (nb_base_element!=0 && ptr!=nullptr){
417 debug(Trace::High) << "Variable " << var_group_name << " begin dumped (nb_base_element=" << nb_base_element << ").";
418 hsize_t dims[1];
419 dims[0] = nb_base_element;
420 HSpace space_id;
421 space_id.createSimple(1,dims);
422 if (space_id.isBad())
423 ARCANE_THROW(ReaderWriterException,"Wrong dataspace for variable '{0}'",var_group_name);
424
425 HDataset dataset_id;
426 hid_t plist_id = H5P_DEFAULT;
427
428#if 0
429 if (nb_element>=10000){
430 plist_id = H5Pcreate(H5P_DATASET_CREATE);
431 hsize_t chunk_dim[1];
432 chunk_dim[0] = (4096 << 1);
433 herr_t r = H5Pset_chunk(plist_id,1,chunk_dim);
434 info() << " SET CHUNK FOR " << var_group_name << " s=" << nb_element;
435 }
436#endif
437 dataset_id.recursiveCreate(group_id,"Values",save_typeid,space_id,plist_id);
438 if (dataset_id.isBad())
439 ARCANE_THROW(ReaderWriterException,"Wrong dataset for variable '{0}'",var_group_name);
440
441 herr_t herr = dataset_id.write(trueid,ptr);
442 if (herr<0)
443 ARCANE_THROW(ReaderWriterException,"Wrong dataset written for variable '{0}'",var_group_name);
444 }
445}
446
447/*---------------------------------------------------------------------------*/
448/*---------------------------------------------------------------------------*/
449
450Ref<ISerializedData> Hdf5ReaderWriter::
451_readDim2(IVariable* var)
452{
453 const int max_dim = 256; // Nombre maxi de dimensions des tableaux HDF
454 String vname = _variableGroupName(var);
455 info(4) << " READ DIM name=" << vname;
456 Int64 dimension_array_size = 0;
457 Int64 nb_element = 0;
458 Integer nb_dimension = -1;
459 // Regarde si le nom correspondant est dans la liste des variables.
460 // S'il n'y est pas, cela signifie que le tableau n'a pas été sauvé et
461 // donc que ses dimensions sont nulles.
462 {
463 bool is_found = false;
464 for( StringList::Enumerator i(m_variables_name); ++i; )
465 if (*i==vname){
466 is_found = true;
467 break;
468 }
469 if (!is_found)
470 ARCANE_THROW(ReaderWriterException,"No HDF5 group named '{0} exists",vname);
471 }
472
473 // Récupère le groupe contenant les informations de la variable
474 HGroup group_id;
475 //group_id.open(m_variable_group_id,vname);
476 group_id.open(m_sub_group_id,vname);
477 if (group_id.isBad())
478 ARCANE_THROW(ReaderWriterException,"HDF5 group '{0}' not found",vname);
479
480 bool is_multi_size = false;
481 eDataType data_type = DT_Unknown;
482 Int64 memory_size = 0;
483 Int64 nb_base_element = 0;
484 Int64 dim1_size = 0;
485 Int64 dim2_size = 0;
486 UniqueArray<Int64> dims;
487 ArrayShape data_shape;
488
489 // Récupère les informations concernant les tailles et dimensions de la variable
490 {
491 HAttribute att_id;
492 att_id.open(group_id,"Dims");
493 HSpace space_id = att_id.getSpace();
494
495 // On attend une seule dimension, et le nombre d'eléments de
496 // l'attribut (hdf_dims[0]) doit être égal à 1 ou 2.
497 hsize_t hdf_dims[max_dim];
498 hsize_t max_dims[max_dim];
499 H5Sget_simple_extent_dims(space_id.id(),hdf_dims,max_dims);
500
501 if (hdf_dims[0]!=VARIABLE_INFO_SIZE)
502 ARCANE_THROW(ReaderWriterException,"Wrong dimensions for variable '{0}' (found={1} expected={2})",
503 vname, hdf_dims[0], VARIABLE_INFO_SIZE);
504
505 std::array<Int64,VARIABLE_INFO_SIZE> dim_val_buf;
506 att_id.read(m_types.nativeType(Int64()),dim_val_buf.data());
507
508 SmallSpan<const Int64> dim_val(dim_val_buf);
509
510 nb_dimension = CheckedConvert::toInteger(dim_val[0]);
511 dim1_size = dim_val[1];
512 dim2_size = dim_val[2];
513 nb_element = dim_val[3];
514 nb_base_element = dim_val[4];
515 dimension_array_size = dim_val[5];
516 is_multi_size = dim_val[6]!=0;
517 data_type = (eDataType)dim_val[7];
518 memory_size = dim_val[8];
519 Int32 shape_nb_dim = CheckedConvert::toInt32(dim_val[9]);
520 data_shape.setNbDimension(shape_nb_dim);
521 for (Integer i=0; i<shape_nb_dim; ++i )
522 data_shape.setDimension(i,CheckedConvert::toInt32(dim_val[10+i]));
523 }
524
525 info(4) << " READ DIM name=" << vname
526 << " nb_dim=" << nb_dimension << " dim1_size=" << dim1_size
527 << " dim2_size=" << dim2_size << " nb_element=" << nb_element
528 << " dimension_size=" << dimension_array_size
529 << " is_multi_size=" << is_multi_size
530 << " data_type" << data_type
531 << " shape=" << data_shape.dimensions();
532
533 if (dimension_array_size>0){
534 HDataset array_id;
535 array_id.open(group_id,"Dim2");
536 if (array_id.isBad())
537 ARCANE_THROW(ReaderWriterException,"Wrong dataset for variable '{0}'",vname);
538
539 HSpace space_id = array_id.getSpace();
540 if (space_id.isBad())
541 ARCANE_THROW(ReaderWriterException,"Wrong dataspace for variable '{0}'",vname);
542
543 hsize_t hdf_dims[max_dim];
544 hsize_t max_dims[max_dim];
545 H5Sget_simple_extent_dims(space_id.id(),hdf_dims,max_dims);
546 // Vérifie que le nombre d'éléments du dataset est bien égal à celui
547 // attendu.
548 if ((Int64)hdf_dims[0]!=dimension_array_size){
549 ARCANE_THROW(ReaderWriterException,"Wrong number of elements in 'Dim2' for variable '{0}' (found={1} expected={2})",
550 vname, hdf_dims[0], dimension_array_size);
551
552 }
553 dim2_size = 0;
554 dims.resize(dimension_array_size);
555 herr_t herr = array_id.read(m_types.nativeType(Int64()),dims.data());
556 if (herr<0)
557 ARCANE_THROW(ReaderWriterException,"Wrong dataset read for variable '{0}'",vname);
558 }
559 Ref<ISerializedData> sdata = arcaneCreateSerializedDataRef(data_type,memory_size,nb_dimension,nb_element,
560 nb_base_element,is_multi_size,dims,data_shape);
561 return sdata;
562}
563
564/*---------------------------------------------------------------------------*/
565/*---------------------------------------------------------------------------*/
566
567/*---------------------------------------------------------------------------*/
568/*---------------------------------------------------------------------------*/
569
570void Hdf5ReaderWriter::
571write(IVariable* v,IData* data)
572{
573 _directWriteVal(v,data);
574}
575
576/*---------------------------------------------------------------------------*/
577/*---------------------------------------------------------------------------*/
578
579void Hdf5ReaderWriter::
580_readVal(IVariable* v,IData* data)
581{
582 String var_group_name = _variableGroupName(v);
583 info(4) << " TRY TO READ var_group=" << var_group_name;
584 Ref<ISerializedData> sd(_readDim2(v));
585 Int64 storage_size = sd->memorySize();
586 info(4) << " READ DATA n=" << storage_size;
588 if (storage_size!=0){
589 // Récupère le groupe contenant les informations de la variable
591 //group_id.open(m_variable_group_id,var_group_name);
592 group_id.open(m_sub_group_id,var_group_name);
593 if (group_id.isBad())
594 ARCANE_THROW(ReaderWriterException,"No HDF5 group with name '{0}' exists",var_group_name);
596 dataset_id.open(group_id,"Values");
597 if (dataset_id.isBad())
598 ARCANE_THROW(ReaderWriterException,"Wrong dataset for variable '{0}'",var_group_name);
599 void* ptr = sd->writableBytes().data();
600 info() << "READ Variable " << var_group_name << " ptr=" << ptr;;
601 hid_t trueid = m_types.nativeType(sd->baseDataType());
602 dataset_id.read(trueid,ptr);
603 }
604 data->assignSerializedData(sd.get());
605}
606
607
608/*---------------------------------------------------------------------------*/
609/*---------------------------------------------------------------------------*/
610
611void Hdf5ReaderWriter::
612read(IVariable* var,IData* data)
613{
614 _directReadVal(var,data);
615}
616
617/*---------------------------------------------------------------------------*/
618/*---------------------------------------------------------------------------*/
619
620void Hdf5ReaderWriter::
621setMetaData(const String& meta_data)
622{
623 if (m_is_parallel){
624 IParallelMng* pm = m_parallel_mng;
625 //Integer nb_rank = pm->commSize();
626 if (m_send_rank!=m_my_rank){
627 // Envoie le groupe et les meta donnees
629 sb.setMode(ISerializer::ModeReserve);
630 sb.reserve(m_sub_group_name);
631 sb.reserve(meta_data);
632 sb.allocateBuffer();
633 sb.setMode(ISerializer::ModePut);
634 sb.put(m_sub_group_name);
635 sb.put(meta_data);
636 m_parallel_mng->sendSerializer(&sb,m_send_rank);
637 }
638 else{
639 _setMetaData(meta_data,m_sub_group_name);
640 for( Integer i=m_send_rank+1; i<=m_last_recv_rank; ++i ){
642 pm->recvSerializer(&sb,i);
643 sb.setMode(ISerializer::ModeGet);
647 sb.get(remote_meta_data);
649 }
650 }
651 }
652 else
653 _setMetaData(meta_data,m_sub_group_name);
654}
655
656/*---------------------------------------------------------------------------*/
657/*---------------------------------------------------------------------------*/
658
659void Hdf5ReaderWriter::
660_setMetaData(const String& meta_data,const String& sub_group_name)
661{
662 const bool hits_modulo=(m_index_modulo!=0) && (m_index_write!=0) && ((m_index_write%m_index_modulo)==0);
664 if (hits_modulo)
665 base_group.recursiveOpen(m_file_id,sub_group_name);
666 else
667 base_group.recursiveCreate(m_file_id,sub_group_name);
668
670 const Byte* _meta_data = meta_data_utf8.data();
671 hsize_t dims[1];
672 dims[0] = meta_data_utf8.size() + 1;
673
675 space_id.createSimple(1,dims);
676 if (space_id.isBad())
677 throw ReaderWriterException(A_FUNCINFO,"Wrong space for meta-data ('MetaData')");
678
680 if (hits_modulo)
681 dataset_id.recursiveCreate(base_group,"MetaData", m_types.nativeType(Byte()), space_id, H5P_DEFAULT);
682 else
683 dataset_id.create(base_group,"MetaData", m_types.nativeType(Byte()), space_id, H5P_DEFAULT);
684 if (dataset_id.isBad())
685 throw ReaderWriterException(A_FUNCINFO,"Wrong dataset for meta-data ('MetaData')");
686
687 herr_t herr = dataset_id.write(m_types.nativeType(Byte()),_meta_data);
688 if (herr<0)
689 throw ReaderWriterException(A_FUNCINFO,"Unable to write meta-data ('MetaData')");
690}
691
692/*---------------------------------------------------------------------------*/
693/*---------------------------------------------------------------------------*/
694
695String Hdf5ReaderWriter::
696metaData()
697{
699 dataset_id.open(m_sub_group_id,"MetaData");
700 if (dataset_id.isBad()){
701 throw ReaderWriterException(A_FUNCINFO,"Wrong dataset for meta-data ('MetaData')");
702 }
703 HSpace space_id = dataset_id.getSpace();
704 if (space_id.isBad()){
705 throw ReaderWriterException(A_FUNCINFO,"Wrong space for meta-data ('MetaData')");
706 }
707 const int max_dim = 256;
711 if (hdf_dims[0]<=0)
712 throw ReaderWriterException(A_FUNCINFO,"Wrong number of elements for meta-data ('MetaData')");
713 Integer nb_byte = static_cast<Integer>(hdf_dims[0]);
715 dataset_id.read(m_types.nativeType(Byte()),uchars.data());
716 String s(uchars);
717 return s;
718}
719
720/*---------------------------------------------------------------------------*/
721/*---------------------------------------------------------------------------*/
722
723void Hdf5ReaderWriter::
724endWrite()
725{
726 if (m_is_parallel){
727 if (m_my_rank==m_send_rank){
728 _receiveRemoteVariables();
729 }
730 else{
731 // Envoie un message de fin
732 SerializeBuffer sb;
733 sb.setMode(ISerializer::ModeReserve);
734 sb.reserve(DT_Int32,1); // Pour indiquer la fin des envoies
735 sb.allocateBuffer();
736 sb.setMode(ISerializer::ModePut);
737 sb.putInt32(0); // Indique qu'il s'agit d'un message de fin
738 m_parallel_mng->sendSerializer(&sb,m_send_rank);
739 }
740 }
741 {
742 info() << " Hdf5Timer: nb_activated=" << m_io_timer.nbActivated()
743 << " time=" << m_io_timer.totalTime();
744 }
745}
746
747/*---------------------------------------------------------------------------*/
748/*---------------------------------------------------------------------------*/
749
750void Hdf5ReaderWriter::
751_receiveRemoteVariables()
752{
753 IParallelMng* pm = m_parallel_mng;
754 Integer nb_remaining = m_last_recv_rank - m_send_rank;
755 info() << "NB REMAINING = " << nb_remaining;
756 Ref<ISerializeMessageList> m_messages(pm->createSerializeMessageListRef());
757 while(nb_remaining>0){
758 ScopedPtrT<ISerializeMessage> sm(new SerializeMessage(m_my_rank,NULL_SUB_DOMAIN_ID,ISerializeMessage::MT_Recv));
759 m_messages->addMessage(sm.get());
760 m_messages->processPendingMessages();
761 m_messages->waitMessages(Parallel::WaitAll);
762 ISerializer* sb = sm->serializer();
763 sb->setMode(ISerializer::ModeGet);
764 Int32 id = sb->getInt32();
765 if (id==0)
766 --nb_remaining;
767 else
768 _writeRemoteVariable(sb);
769 }
770}
771
772/*---------------------------------------------------------------------------*/
773/*---------------------------------------------------------------------------*/
774
775void Hdf5ReaderWriter::
776_writeRemoteVariable(ISerializer* sb)
777{
778 String var_name;
779 sb->get(var_name);
780 String group_name;
781 sb->get(group_name);
782 Int32 rank = sb->getInt32();
783 //warning()<<"[\33[46;30m_writeRemoteVariable\33[m] rank="<<rank;
784 Ref<ISerializedData> sdata = arcaneCreateEmptySerializedDataRef();
785 sb->setReadMode(ISerializer::ReadReplace);
786 sdata->serialize(sb);
787 _writeVal(var_name,group_name,sdata.get(),rank);
788}
789
790/*---------------------------------------------------------------------------*/
791/*---------------------------------------------------------------------------*/
792
793/*---------------------------------------------------------------------------*/
794/*---------------------------------------------------------------------------*/
800{
801 public:
804 m_write_index(0),
805 m_writer(nullptr),
806 m_reader(nullptr),
807 m_fileset_size(1),
808 m_index_modulo(0){}
809
810 virtual IDataWriter* dataWriter() { return m_writer; }
811 virtual IDataReader* dataReader() { return m_reader; }
812
813 virtual void notifyBeginWrite();
814 virtual void notifyEndWrite();
815 virtual void notifyBeginRead();
816 virtual void notifyEndRead();
817 virtual void close() {}
818 virtual String readerServiceName() const { return "ArcaneHdf5CheckpointReader2"; }
819
820 private:
821
822 Integer m_write_index;
823 Hdf5ReaderWriter* m_writer;
824 Hdf5ReaderWriter* m_reader;
825 Integer m_fileset_size;
826 Integer m_index_modulo;
827
828 private:
829
830 String _defaultFileName()
831 {
832 info() << "USE DEFAULT FILE NAME";
833 IParallelMng* pm = subDomain()->parallelMng();
834 Integer rank = pm->commRank();
835 StringBuilder buf;
836
837 // Ajoute si besoin le numero du processeur
838 if (pm->isParallel()){
839 Integer file_id = rank;
840 if (m_fileset_size!=0)
841 file_id = (rank / m_fileset_size) * m_fileset_size;
842 buf = "arcanedump.";
843 buf += file_id;
844 }
845 else{
846 buf = "arcanedump";
847 }
848
849 // Ajoute si besoin le numero du replica
850 IParallelReplication* pr = subDomain()->parallelMng()->replication();
851 if (pr->hasReplication()){
852 buf += "_r";
853 buf += pr->replicationRank();
854 }
855
856 buf += ".h5";
857 return buf.toString();
858 }
859
860 Directory _defaultDirectory(){
861 return Directory(baseDirectoryName());
862 }
863 void _parseMetaData(String meta_data);
864};
865
866/*---------------------------------------------------------------------------*/
867/*---------------------------------------------------------------------------*/
868
869void ArcaneHdf5CheckpointService2::
870_parseMetaData(String meta_data)
871{
872 IIOMng* io_mng = subDomain()->ioMng();
873 ScopedPtrT<IXmlDocumentHolder> xml_doc(io_mng->parseXmlBuffer(meta_data.utf8(),"MetaData"));
874 XmlNode root = xml_doc->documentNode().documentElement();
875 Integer version = root.attr("version").valueAsInteger();
876 if (version!=1){
877 throw ReaderWriterException(A_FUNCINFO,"Bad version (expected 1)");
878 }
879 {
880 Integer fileset_size = root.child("fileset-size").valueAsInteger();
881 if (fileset_size<0) fileset_size = 0;
882 m_fileset_size = fileset_size;
883 }
884 {
885 Integer index_modulo = root.child("index-modulo").valueAsInteger();
886 if (index_modulo<0) index_modulo = 0;
887 m_index_modulo=index_modulo;
888 }
889 info() << " FileSet size=" << m_fileset_size;
890 info() << " Index modulo=" << m_index_modulo;
891}
892
893/*---------------------------------------------------------------------------*/
894/*---------------------------------------------------------------------------*/
895
896void ArcaneHdf5CheckpointService2::
897notifyBeginRead()
898{
899 String meta_data = readerMetaData();
900 _parseMetaData(meta_data);
901
902 info() << " GET META DATA READER " << readerMetaData()
903 << " filename=" << fileName();
904
905 if (fileName().null()){
906 Directory dump_dir(_defaultDirectory());
907 setFileName(dump_dir.file(_defaultFileName()));
908 }
909 info() << " READ CHECKPOINT FILENAME = " << fileName();
911 sub_group = "SubDomain";
912 sub_group += subDomain()->subDomainId();
913 sub_group += "/Index";
914
915 Integer index = currentIndex();
916 if (m_index_modulo!=0)
917 index %= m_index_modulo;
918 sub_group += index;
919
920 m_reader = new Hdf5ReaderWriter(subDomain(),
921 fileName(),
922 sub_group.toString(),
923 0,
924 currentIndex(),
925 m_index_modulo,
926 Hdf5ReaderWriter::OpenModeRead);
927 m_reader->initialize();
928}
929
930/*---------------------------------------------------------------------------*/
931/*---------------------------------------------------------------------------*/
932
933void ArcaneHdf5CheckpointService2::
934notifyEndRead()
935{
936 delete m_reader;
937 m_reader = 0;
938}
939
940/*---------------------------------------------------------------------------*/
941/*---------------------------------------------------------------------------*/
942
943void ArcaneHdf5CheckpointService2::
944notifyBeginWrite()
945{
946 if (options()){
947 // Récupération du nombre de fichiers par groupe
948 m_fileset_size = options()->filesetSize();
949 // Récupération du nombre d'indexes au maximum par fichiers
950 m_index_modulo = options()->indexModulo();
951 }
952
953 if (fileName().null()){
954 Directory dump_dir(_defaultDirectory());
955 setFileName(dump_dir.file(_defaultFileName()));
956 }
957 Hdf5ReaderWriter::eOpenMode open_mode = Hdf5ReaderWriter::OpenModeAppend;
958 Integer write_index = checkpointTimes().size();
959 --write_index;
960
961 if (write_index==0)
962 open_mode = Hdf5ReaderWriter::OpenModeTruncate;
963
964 // Test de l'option m_index_modulo pour savoir la profondeur du modulo
965 if (m_index_modulo!=0)
966 write_index%=m_index_modulo;
967
969 sub_group = "SubDomain";
970 sub_group += subDomain()->parallelMng()->commRank();
971 sub_group += "/Index";
973
974 m_writer = new Hdf5ReaderWriter(subDomain(),
975 fileName(),
976 sub_group,
977 m_fileset_size,
978 checkpointTimes().size()-1,
979 m_index_modulo,
980 open_mode);
981 m_writer->initialize();
982}
983
984
985/*---------------------------------------------------------------------------*/
986/*---------------------------------------------------------------------------*/
987
988void ArcaneHdf5CheckpointService2::
989notifyEndWrite()
990{
992 ostr() << "<infos version='1'>\n";
993 ostr() << " <fileset-size>" << m_fileset_size << "</fileset-size>\n";
994 ostr() << " <index-modulo>" << m_index_modulo << "</index-modulo>\n";
995 ostr() << "</infos>\n";
996 setReaderMetaData(ostr.str());
997 ++m_write_index;
998 delete m_writer;
999 m_writer = 0;
1000}
1001
1002/*---------------------------------------------------------------------------*/
1003/*---------------------------------------------------------------------------*/
1004
1006 ServiceProperty("ArcaneHdf5CheckpointReader2",ST_SubDomain),
1008
1010 ServiceProperty("ArcaneHdf5CheckpointWriter2",ST_SubDomain),
1012
1013ARCANE_REGISTER_SERVICE_HDF5READERWRITER(ArcaneHdf5Checkpoint2,
1015
1016/*---------------------------------------------------------------------------*/
1017/*---------------------------------------------------------------------------*/
1018
1019} // End namespace Arcane
1020
1021/*---------------------------------------------------------------------------*/
1022/*---------------------------------------------------------------------------*/
#define ARCANE_THROW(exception_class,...)
Macro pour envoyer une exception avec formattage.
#define ARCANE_FATAL(...)
Macro envoyant une exception FatalErrorException.
#define ARCANE_SERVICE_INTERFACE(ainterface)
Macro pour déclarer une interface lors de l'enregistrement d'un service.
Protection/reprise au format ArcaneHdf5.
virtual IDataReader * dataReader()
Retourne le lecteur associé
virtual String readerServiceName() const
Nom du service du lecteur associé à cet écrivain.
virtual IDataWriter * dataWriter()
Retourne l'écrivain associé.
virtual void close()
Ferme les protections.
Generation de la classe de base du Service.
Classe gérant un répertoire.
Definition Directory.h:33
Lecture/Ecriture au format HDF5.
Encapsule un hid_t pour un dataset.
Encapsule un hid_t pour un groupe.
Encapsule un hid_t pour un dataspace.
Interface du service de lecture d'une protection/reprise.
Interface du service d'écriture d'une protection/reprise.
Interface de lecture des données d'une variable.
Definition IDataReader.h:41
Interface d'écriture des données d'une variable.
Definition IDataWriter.h:49
Interface d'une donnée.
Definition IData.h:33
virtual void allocateBufferForSerializedData(ISerializedData *sdata)=0
Alloue la mémoire pour lire les valeurs sérialisées sdata.
virtual void assignSerializedData(const ISerializedData *sdata)=0
Assigne à la donnée les valeurs sérialisées sdata.
Interface du gestionnaire de parallélisme pour un sous-domaine.
virtual Int32 commRank() const =0
Rang de cette instance dans le communicateur.
virtual Ref< ISerializeMessageList > createSerializeMessageListRef()=0
Créé une liste pour gérer les 'ISerializeMessage'.
virtual bool isParallel() const =0
Retourne true si l'exécution est parallèle.
Interface d'une donnée sérialisée.
Interface d'une variable.
Definition IVariable.h:54
virtual String fullName() const =0
Nom complet de la variable (avec le préfixe de la famille)
virtual String name() const =0
Nom de la variable.
Lecteur des fichiers de maillage via la bibliothèque LIMA.
Definition Lima.cc:120
Flot de sortie lié à une String.
Exception dans un lecteur ou écrivain.
Implémentation d'un tampon pour la sérialisation.
Structure contenant les informations pour créer un service.
Propriétés de création d'un service.
Vue constante d'un tableau de type T.
constexpr const_pointer data() const noexcept
Pointeur sur la mémoire allouée.
Constructeur de chaîne de caractère unicode.
String toString() const
Retourne la chaîne de caractères construite.
Chaîne de caractères unicode.
Vecteur 1D de données avec sémantique par valeur (style STL).
#define ARCANE_REGISTER_SERVICE(aclass, a_service_property,...)
Macro pour enregistrer un service.
-*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
Ref< ISerializedData > arcaneCreateSerializedDataRef(eDataType data_type, Int64 memory_size, Integer nb_dim, Int64 nb_element, Int64 nb_base_element, bool is_multi_size, Int64ConstArrayView dimensions)
Créé des données sérialisées.
Ref< ISerializedData > arcaneCreateEmptySerializedDataRef()
Créé des données sérialisées.
eDataType
Type d'une donnée.
Definition DataTypes.h:39
@ DT_Int32
Donnée de type entier 32 bits.
Definition DataTypes.h:43
@ DT_Unknown
Donnée de type inconnue ou non initialisée.
Definition DataTypes.h:54
ConstArrayView< Int64 > Int64ConstArrayView
Equivalent C d'un tableau à une dimension d'entiers 64 bits.
Definition UtilsTypes.h:638
Int32 Integer
Type représentant un entier.