Arcane  v3.14.10.0
Documentation développeur
Chargement...
Recherche...
Aucune correspondance
Hdf5MpiReaderWriter.cc
1// -*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
2//-----------------------------------------------------------------------------
3// Copyright 2000-2023 CEA (www.cea.fr) IFPEN (www.ifpenergiesnouvelles.com)
4// See the top-level COPYRIGHT file for details.
5// SPDX-License-Identifier: Apache-2.0
6//-----------------------------------------------------------------------------
7/*---------------------------------------------------------------------------*/
8/* Hdf5MpiReaderWriter.cc (C) 2000-2023 */
9/* */
10/* Lecture/Ecriture au format HDF5. */
11/*---------------------------------------------------------------------------*/
12/*---------------------------------------------------------------------------*/
13
14#include "arcane/utils/String.h"
15#include "arcane/utils/StringBuilder.h"
16#include "arcane/utils/OStringStream.h"
17#include "arcane/utils/ScopedPtr.h"
18#include "arcane/utils/List.h"
19#include "arcane/utils/ArcanePrecomp.h"
20#include "arcane/utils/ITraceMng.h"
21#include "arcane/utils/PlatformUtils.h"
22#include "arcane/utils/StringBuilder.h"
23
24#include "arcane/Item.h"
25#include "arcane/IDataReader.h"
26#include "arcane/IDataWriter.h"
27#include "arcane/ISubDomain.h"
28#include "arcane/StdNum.h"
29#include "arcane/IVariable.h"
30#include "arcane/CheckpointService.h"
31#include "arcane/Directory.h"
32#include "arcane/IParallelMng.h"
33#include "arcane/Service.h"
34#include "arcane/ArcaneException.h"
35#include "arcane/ItemGroup.h"
36#include "arcane/ItemEnumerator.h"
37#include "arcane/VerifierService.h"
38#include "arcane/IVariableMng.h"
39#include "arcane/FactoryService.h"
40#include "arcane/IData.h"
41#include "arcane/Timer.h"
42#include "arcane/ISerializedData.h"
43#include "arcane/IIOMng.h"
44#include "arcane/IXmlDocumentHolder.h"
45#include "arcane/VariableCollection.h"
46
47#include "arcane/datatype/DataTypeTraits.h"
48
49#include "arcane/SerializeBuffer.h"
50#include "arcane/ISerializeMessageList.h"
51#include "arcane/SerializeMessage.h"
52
53#include "arcane/hdf5/Hdf5MpiReaderWriter.h"
54
55#include "arcane/hdf5/Hdf5MpiReaderWriter_axl.h"
56
57#include "arcane_packages.h"
58
59#ifdef ARCANE_HAS_PACKAGE_MPI
60#ifndef OMPI_SKIP_MPICXX
61#define OMPI_SKIP_MPICXX
62#endif
63#ifndef MPICH_SKIP_MPICXX
64#define MPICH_SKIP_MPICXX
65#endif
66#include <mpi.h>
67
68//#define ARCANE_TEST_HDF5MPI
69
70// Pour l'instant (1.8.0 beta 2), cela ne fonctionne pas sur tera 10
71// #define ARCANE_TEST_HDF5DIRECT
72
73/*---------------------------------------------------------------------------*/
74/*---------------------------------------------------------------------------*/
75
76namespace Arcane
77{
78
79/*---------------------------------------------------------------------------*/
80/*---------------------------------------------------------------------------*/
81
82using namespace Hdf5Utils;
83
84static herr_t _Hdf5MpiReaderWriterIterateMe(hid_t,const char*,void*);
85
86/*---------------------------------------------------------------------------*/
87/*---------------------------------------------------------------------------*/
88
89Hdf5MpiReaderWriter::
90Hdf5MpiReaderWriter(ISubDomain* sd,const String& filename,
91 const String& sub_group_name,Integer fileset_size,
92 eOpenMode open_mode,bool do_verif)
93: TraceAccessor(sd->traceMng())
94, m_sub_domain(sd)
95, m_parallel_mng(sd->parallelMng())
96, m_open_mode(open_mode)
97, m_filename(filename)
98, m_sub_group_name(sub_group_name)
99, m_is_initialized(false)
100, m_io_timer(sd,"Hdf5TimerHd",Timer::TimerReal)
101, m_write_timer(sd,"Hdf5TimerWrite",Timer::TimerReal)
102, m_is_parallel(false)
103, m_my_rank(m_parallel_mng->commRank())
104, m_send_rank(m_my_rank)
105, m_last_recv_rank(m_my_rank)
106, m_fileset_size(fileset_size)
107{
108 ARCANE_UNUSED(do_verif);
109 if (m_fileset_size!=1 && m_parallel_mng->isParallel()){
110 m_is_parallel = true;
111 Integer nb_rank = m_parallel_mng->commSize();
112 if (m_fileset_size==0){
113 m_send_rank = 0;
114 m_last_recv_rank = nb_rank;
115 --m_last_recv_rank;
116 }
117 else{
118 m_send_rank = (m_my_rank / m_fileset_size) * m_fileset_size;
119 m_last_recv_rank = m_send_rank + m_fileset_size;
120 if (m_last_recv_rank>nb_rank)
121 m_last_recv_rank = nb_rank;
122 --m_last_recv_rank;
123 }
124 }
125 sd->traceMng()->info() << " INFOS PARALLEL: my_rank=" << m_my_rank
126 << " send_rank=" << m_send_rank
127 << " last_recv_rank=" << m_last_recv_rank
128 << " filename=" << filename
129 << " fileset_size=" << m_fileset_size;
130}
131
132/*---------------------------------------------------------------------------*/
133/*---------------------------------------------------------------------------*/
134
135void Hdf5MpiReaderWriter::
136initialize()
137{
138 if (m_is_initialized)
139 return;
140
141 m_is_initialized = true;
142
143 const char* func_name = "Hdf5MpiReaderWriter::initialize()";
144
145 HInit();
146
147 if (m_open_mode==OpenModeRead){
148 m_file_id.openRead(m_filename);
149 m_sub_group_id.recursiveOpen(m_file_id,m_sub_group_name);
150 //m_variable_group_id.open(m_sub_group_id,"Variables");
151 }
152 else{
153 void* arcane_comm = m_sub_domain->parallelMng()->getMPICommunicator();
154 if (!arcane_comm)
155 throw FatalErrorException("No MPI environment available");
156 MPI_Comm mpi_comm = *((MPI_Comm*)arcane_comm);
157 Integer nb_rank = m_parallel_mng->commSize();
158 if (m_fileset_size>1){
159 UniqueArray<int> senders;
160 for( Integer i=0; i<nb_rank; ++i ){
161 Integer modulo = i % m_fileset_size;
162 if (modulo==0){
163 info() << " ADD SENDER n=" << i;
164 senders.add(i);
165 }
166 }
167 MPI_Group all_group;
168 if (MPI_Comm_group(mpi_comm,&all_group)!=MPI_SUCCESS)
169 fatal() << "Error in MPI_Comm_group";
170 MPI_Group writer_group;
171 if (MPI_Group_incl(all_group,senders.size(),senders.data(),&writer_group)!=MPI_SUCCESS)
172 fatal() << "Error in MPI_Group_incl";
173 if (MPI_Comm_create(mpi_comm,writer_group,&mpi_comm)!=MPI_SUCCESS)
174 fatal() << "Error in MPI_Comm_create";
175 }
176
177 // Si ce n'est pas moi qui écrit, n'ouvre pas le fichier
178 if (m_send_rank!=m_my_rank)
179 return;
180 if (m_open_mode==OpenModeTruncate || m_open_mode==OpenModeAppend){
181 hid_t plist_id = H5Pcreate(H5P_FILE_ACCESS);
182 //bool use_gpfs = false;
183 info() << " USE MPI-POSIX";
184 //H5Pset_fapl_mpiposix(plist_id, mpi_comm, true);
185#ifdef H5_HAVE_PARALLEL
186 H5Pset_fapl_mpio(plist_id, mpi_comm, MPI_INFO_NULL); //mpi_info);
187#endif
188
189#ifdef ARCANE_TEST_HDF5DIRECT
190# ifdef H5_HAVE_DIRECT
191 info() << " HAVE DIRECT DRIVER";
192 H5Pset_fapl_direct(plist_id,4096,512,16*1024*1024);
193# endif
194#endif
195 int mdc_nelmts;
196 size_t rdcc_nelmts;
197 size_t rdcc_nbytes;
198 double rdcc_w0;
199 herr_t r = H5Pget_cache(plist_id,&mdc_nelmts,&rdcc_nelmts,&rdcc_nbytes,&rdcc_w0);
200 info() << " CACHE SIZE r=" << r << " mdc=" << mdc_nelmts
201 << " rdcc=" << rdcc_nelmts << " rdcc_bytes=" << rdcc_nbytes << " w0=" << rdcc_w0;
202 mdc_nelmts *= 10;
203 rdcc_nelmts *= 10;
204 rdcc_nbytes = 10000000;
205 r = H5Pset_cache(plist_id,mdc_nelmts,rdcc_nelmts,rdcc_nbytes,rdcc_w0);
206 info() << " SET CACHE SIZE R1=" << r;
207 //r = H5Pset_fapl_stdio(plist_id);
208 //info() << " R2=" << r;
209 hsize_t sieve_buf = (1024 << 12);
210 r = H5Pset_sieve_buf_size(plist_id,sieve_buf);
211 info() << " SIEVE_BUF=" << sieve_buf << " r=" << r;
212 hsize_t small_block_size = 0;
213 r = H5Pget_small_data_block_size(plist_id,&small_block_size);
214 info() << " SMALL BLOCK SIZE=" << small_block_size;
215 small_block_size <<= 10;
216 r = H5Pset_small_data_block_size(plist_id,small_block_size);
217 info() << " SET SMALL BLOCK SIZE s=" << small_block_size << " r=" << r;
218 //hsize_t block_size = 0;
219 //block_size = H5Pget_buffer(plist_id,0,0);
220 //info() << " BLOCK SIZE s=" << block_size;
221 //block_size = 10000000;
222 //herr_t r = H5Pset_buffer(plist_id,block_size,0,0);
223 //info() << " BLOCK SIZE r=" << r << " s=" << block_size;
224 //if (m_parallel_mng->commRank()==0){
225 //herr_t r = H5Pset_fapl_core(plist_id,1000000,1);
226 //else
227 //herr_t r = H5Pset_fapl_core(plist_id,1000000,0);
228 //m_file_id.openTruncate("toto",plist_id);
229 //}
230 //else
231 if (m_open_mode==OpenModeTruncate){
232 info() << " BEGIN OPEN TRUNCATE";
233 m_file_id.openTruncate(m_filename,plist_id);
234 info() << " END OPEN TRUNCATE";
235 }
236 else if (m_open_mode==OpenModeAppend){
237 info() << " BEGIN OPEN ADD";
238 m_file_id.openAppend(m_filename,plist_id);
239 info() << " END OPEN ADD";
240 }
241 }
242 if (m_sub_group_name!="/"){
243 info() << " CHECK CREATE GROUP name=" << m_sub_group_name;
244 //m_sub_group_id.checkDelete(m_file_id,m_sub_group_name);
245 m_sub_group_id.recursiveCreate(m_file_id,m_sub_group_name);
246 info() << " END CHECK CREATE GROUP name=" << m_sub_group_name;
247 }
248 else
249 m_sub_group_id.open(m_file_id,m_sub_group_name);
250 m_variable_group_id.create(m_sub_group_id,"Variables");
251 }
252
253 if (m_file_id.isBad()){
254 OStringStream ostr;
255 ostr() << "Unable to open file <" << m_filename << ">";
256 throw ReaderWriterException(func_name,ostr.str());
257 }
258 if (m_sub_group_id.isBad()){
259 OStringStream ostr;
260 ostr() << "HDF5 group '" << m_sub_group_name << "' not found";
261 throw ReaderWriterException(func_name,ostr.str());
262 }
263#if 0
264 if (m_variable_group_id.isBad()){
265 OStringStream ostr;
266 ostr() << "Group HDF5 'Variables' not found";
267 throw ReaderWriterException(func_name,ostr.str());
268 }
269#endif
270
271 info() << " INFO END INITIALIZE";
272
273
274 if (m_open_mode==OpenModeRead){
275 int index = 0;
276 //H5Giterate(m_sub_group_id.id(),"Variables",&index,_Hdf5MpiReaderWriterIterateMe,this);
277 H5Giterate(m_file_id.id(),m_sub_group_name.localstr(),&index,_Hdf5MpiReaderWriterIterateMe,this);
278 }
279}
280
281/*---------------------------------------------------------------------------*/
282/*---------------------------------------------------------------------------*/
283
284Hdf5MpiReaderWriter::
285~Hdf5MpiReaderWriter()
286{
287}
288
289/*---------------------------------------------------------------------------*/
290/*---------------------------------------------------------------------------*/
291
292void Hdf5MpiReaderWriter::
293_checkValid()
294{
295 if (m_is_initialized)
296 return;
297 fatal() << "Use of a Hdf5MpiReaderWriter instance not initialized";
298}
299
300/*---------------------------------------------------------------------------*/
301/*---------------------------------------------------------------------------*/
302
303String Hdf5MpiReaderWriter::
304_variableGroupName(IVariable* var)
305{
306 return var->fullName();
307}
308
309/*---------------------------------------------------------------------------*/
310/*---------------------------------------------------------------------------*/
311
312void Hdf5MpiReaderWriter::
313beginWrite(const VariableCollection& vars)
314{
315 IParallelMng* pm = m_parallel_mng;
316 Integer nb_rank = pm->commSize();
317
318 pwarning() << "Implementation of this checkpoint format is not operational yet";
319
320 for( VariableCollection::Enumerator i(vars); ++i; ){
321 IVariable* v = *i;
322 if (v->itemKind()==IK_Unknown)
323 continue;
324
325 Ref<ISerializedData> sdata(v->data()->createSerializedDataRef(false));
326 Int64 nb_base_element = sdata->nbBaseElement();
327
328 Int64 my_size = nb_base_element;
329 Int64ConstArrayView a_my_size(1,&my_size);
330 SharedArray<Int64> all_sizes(nb_rank);
331 pm->allGather(a_my_size,all_sizes);
332
333 Int64 total_size = 0;
334 for( Integer i=0; i<nb_rank; ++i )
335 total_size += all_sizes[i];
336 Int64 my_index = 0;
337 for( Integer i=0; i<m_my_rank; ++i )
338 my_index += all_sizes[i];
339 m_variables_offset.insert(std::make_pair(v->fullName(),VarOffset(my_index,total_size,all_sizes)));
340 info() << " ADD OFFSET v=" << v->fullName() << " offset=" << my_index
341 << " total_size=" << total_size;
342 }
343
344}
345
346/*---------------------------------------------------------------------------*/
347/*---------------------------------------------------------------------------*/
353void Hdf5MpiReaderWriter::
354_writeValParallel(IVariable* v,const ISerializedData* sdata)
355{
356 SerializeBuffer sb;
357 sb.setMode(ISerializer::ModeReserve);
358 sb.reserve(DT_Int32,1); // Pour indiquer la fin des envois
359 sb.reserve(v->fullName());
360 sb.reserve(m_sub_group_name);
361 //sb.reserveInteger(1); // Pour le type de données
362 //sb.reserveInteger(1); // Pour la dimension
363 //v->serialize(&sb,0);
364 sdata->serialize(&sb);
365 sb.allocateBuffer();
366 sb.setMode(ISerializer::ModePut);
367 sb.putInt32(1); // Indique qu'il s'agit d'un message non vide
368 sb.put(v->fullName());
369 sb.put(m_sub_group_name);
370 //sb.putInteger(v->dataType()); // Pour le type de données
371 //sb.putInteger(v->dimension()); // Pour la dimension
372 //v->serialize(&sb,0);
373 sdata->serialize(&sb);
374
375 m_parallel_mng->sendSerializer(&sb,m_send_rank);
376}
377
378/*---------------------------------------------------------------------------*/
379/*---------------------------------------------------------------------------*/
380
381void Hdf5MpiReaderWriter::
382_directReadVal(IVariable* v,IData* data)
383{
384 _checkValid();
385
386 info() << "DIRECT READ VAL v=" << v->name();
387 _readVal(v,data);
388}
389
390/*---------------------------------------------------------------------------*/
391/*---------------------------------------------------------------------------*/
392
393void Hdf5MpiReaderWriter::
394_directWriteVal(IVariable* v,IData* data)
395{
396 _checkValid();
397
398 Ref<ISerializedData> sdata(data->createSerializedDataRef(false));
399
400 _writeVal(v->fullName(),m_sub_group_name,sdata.get());
401}
402
403/*---------------------------------------------------------------------------*/
404/*---------------------------------------------------------------------------*/
405
406static herr_t
407_Hdf5MpiReaderWriterIterateMe(hid_t g,const char* mn,void* ptr)
408{
409 Hdf5MpiReaderWriter* rw = reinterpret_cast<Hdf5MpiReaderWriter*>(ptr);
410 return rw->iterateMe(g,mn);
411}
412
413/*---------------------------------------------------------------------------*/
414/*---------------------------------------------------------------------------*/
415
416herr_t Hdf5MpiReaderWriter::
417iterateMe(hid_t group_id,const char* member_name)
418{
419 ARCANE_UNUSED(group_id);
420
421 m_variables_name.add(std::string_view(member_name));
422 return 0;
423}
424
425/*---------------------------------------------------------------------------*/
426/*---------------------------------------------------------------------------*/
429void Hdf5MpiReaderWriter::
430_writeVal(const String& var_group_name,const String& sub_group_name,
431 const ISerializedData* sdata)
432{
433 ARCANE_UNUSED(sub_group_name);
434 const char* func_name = "Hdf5MpiReaderWriter::_writeVal() ";
435 Timer::Sentry ts(&m_io_timer);
436 double v0 = ::MPI_Wtime();
437 info() << " SDATA name=" << var_group_name << " nb_element=" << sdata->nbElement()
438 << " dim=" << sdata->nbDimension() << " datatype=" << sdata->baseDataType()
439 << " nb_basic_element=" << sdata->nbBaseElement()
440 << " is_multi=" << sdata->isMultiSize()
441 << " dimensions_size=" << sdata->extents().size()
442 << " memory_size=" << sdata->memorySize()
443 << " bytes_size=" << sdata->constBytes().size();
444
445 hid_t save_typeid = m_types.saveType(sdata->baseDataType());
446 hid_t trueid = m_types.nativeType(sdata->baseDataType());
447 const void* ptr = sdata->constBytes().data();
448 Int64 nb_base_element = sdata->nbBaseElement();
449
450 OffsetMap::const_iterator offset_info = m_variables_offset.find(var_group_name);
451 if (offset_info==m_variables_offset.end()){
452 fatal() << "Can not find offset informations for ->" << var_group_name;
453 }
454 Int64 nb_element_to_write = nb_base_element;
455
456 //String var_group_name = _variableGroupName(v);
457 RealUniqueArray real_array;
458 Real3UniqueArray real3_array;
459 Real3x3UniqueArray real3x3_array;
460 Int32UniqueArray int32_array;
461 if (m_is_parallel && m_fileset_size!=1){
462 if (m_send_rank==m_my_rank){
463 // Je recois les valeurs des autres
464 nb_element_to_write = 0;
465 for( Integer i=m_send_rank; i<=m_last_recv_rank; ++i ){
466 nb_element_to_write += offset_info->second.m_all_sizes[i];
467 //info() << "ADD TO WRITE n=" << nb_element_to_write << " add=" << offset_info->second.m_all_sizes[i];
468 }
469 switch(sdata->baseDataType()){
470 case DT_Real:
471 real_array.resize(nb_element_to_write);
472 ptr = real_array.data();
473 break;
474 case DT_Real3:
475 real3_array.resize(nb_element_to_write);
476 ptr = real3_array.data();
477 break;
478 case DT_Real3x3:
479 real3x3_array.resize(nb_element_to_write);
480 ptr = real3x3_array.data();
481 break;
482 case DT_Int32:
483 int32_array.resize(nb_element_to_write);
484 ptr = int32_array.data();
485 break;
486 default:
487 fatal() << "Type not handled "<< dataTypeName(sdata->baseDataType());
488 }
489 }
490 else{
491 return;
492 // J'envoie à mon référent
493 //switch(sdata->baseDataType()){
494 //case DT_Real:
495 //_send(sdata,Real());
496 // break;
497 //}
498 }
499 }
500
501#if 0
502 HGroup var_base_group;
503 var_base_group.recursiveCreate(m_file_id,sub_group_name);
504
505 // Création du groupe contenant les informations de la variable
506 HGroup group_id;
507 //group_id.create(m_variable_group_id,var_group_name);
508 group_id.create(var_base_group,var_group_name);
509 if (group_id.isBad()){
510 OStringStream ostr;
511 ostr() << "Group HDF5 '" << var_group_name << "' not found";
512 throw ReaderWriterException(func_name,ostr.str());
513 }
514#endif
515
516 //Integer dim2 = dim2_array.size();
517 //Integer nb_element = sdata->nbElement();
518#if 0
519 bool is_multi_size = sdata->isMultiSize();
520 Integer dim2_size = 0;
521 Integer dim1_size = 0;
522 if (nb_dimension==2 && !is_multi_size){
523 dim1_size = dimensions[0];
524 dim2_size = dimensions[1];
525 }
526#endif
527 //Integer dimension_array_size = dimensions.size();
528
529#if 0
530 // Sauve les informations concernant les tailles et dimensions de la variable
531 {
532 hsize_t att_dims[1];
533 att_dims[0] = 9;
534 HSpace space_id;
535 space_id.createSimple(1,att_dims);
536 Integer dim_val[9];
537
538 dim_val[0] = nb_dimension;
539 dim_val[1] = dim1_size;
540 dim_val[2] = dim2_size;
541 dim_val[3] = nb_element;
542 dim_val[4] = nb_base_element;
543 dim_val[5] = dimension_array_size;
544 dim_val[6] = is_multi_size ? 1 : 0;
545 dim_val[7] = (Integer)sdata->baseDataType();
546 dim_val[8] = sdata->memorySize();
547
548 HAttribute att_id;
549
550 att_id.create(group_id,"Dims",m_types.saveType(dim1_size),space_id);
551 herr_t herr = att_id.write(m_types.nativeType(dim2_size),dim_val);
552 if (herr<0){
553 OStringStream ostr;
554 ostr() << "Bad writing of the dimensions for the variable '" << var_group_name << "'";
555 throw ReaderWriterException(func_name,ostr.str());
556 }
557 }
558#endif
559
560#if 0
561 // Si la variable est de type tableau à deux dimensions, sauve les
562 // tailles de la deuxième dimension par élément.
563 if (dimension_array_size!=0){
564 hsize_t att_dims[1];
565 att_dims[0] = dimension_array_size;
566 HSpace space_id;
567 HDataset array_id;
568
569 space_id.createSimple(1,att_dims);
570
571 array_id.create(group_id,"Dim2",m_types.saveType(dim1_size),space_id,H5P_DEFAULT);
572 herr_t herr = array_id.write(m_types.nativeType(dim1_size),dimensions.begin());
573 if (herr<0){
574 OStringStream ostr;
575 ostr() << "Bad writing of the dimensions for the variable '" << var_group_name << "'";
576 throw ReaderWriterException(func_name,ostr.str());
577 }
578 }
579#endif
580
581 //IParallelMng* pm = m_parallel_mng;
582 //Integer nb_rank = pm->commSize();
583
584 // Maintenant, sauve les valeurs si necessaire
585 if (nb_base_element!=0 && ptr!=0){
586 debug(Trace::High) << "Variable " << var_group_name << " begin dumped (nb_base_element=" << nb_base_element << ").";
587
588 hsize_t offset[1];
589 hsize_t count[1];
590 offset[0] = 0;
591 count[0] = nb_element_to_write;
592
593 //Int64UniqueArray all_sizes(nb_rank);
594 //Int64 my_size = nb_base_element;
595 //Int64ConstArrayView a_my_size(1,&my_size);
596 //double v1 = MPI_Wtime();
597 //pm->allGather(a_my_size,all_sizes);
598 //info() << " CLOCK GATHER = " << (MPI_Wtime() - v1);
599
600 //Int64 total_size = 0;
601 //for( Integer i=0; i<nb_rank; ++i )
602 //total_size += all_sizes[i];
603 //Int64 my_index = 0;
604 // for( Integer i=0; i<m_my_rank; ++i )
605 //my_index += all_sizes[i];
606 //my_index -= nb_base_element;
607 Int64 my_index = offset_info->second.m_offset;
608 Int64 total_size = offset_info->second.m_total_size;
609 offset[0] = my_index;
610
611 double v1 = MPI_Wtime();
612 hsize_t dims[1];
613 dims[0] = total_size;
614 HSpace filespace_id;
615 filespace_id.createSimple(1,dims);
616 HSpace memspace_id;
617 memspace_id.createSimple(1,count);
618 if (memspace_id.isBad()){
619 OStringStream ostr;
620 ostr() << "Wrong dataspace for variable '" << var_group_name << "'";
621 throw ReaderWriterException(func_name,ostr.str());
622 }
623
624
625 HDataset dataset_id;
626
627 //hid_t plist_id = H5P_DEFAULT;
628 hid_t write_plist_id = H5Pcreate(H5P_DATASET_XFER);
629#ifdef H5_HAVE_PARALLEL
630 H5Pset_dxpl_mpio(write_plist_id, H5FD_MPIO_COLLECTIVE);
631#endif
632 //H5Pset_dxpl_mpio(write_plist_id, H5FD_MPIO_INDEPENDENT);
633
634 hid_t create_dataset_plist_id = H5P_DEFAULT;
635#if 0
636 Integer chunk_size = (4096 << 9);
637 if (total_size>chunk_size){
638 create_dataset_plist_id = H5Pcreate(H5P_DATASET_CREATE);
639 H5Pcreate(H5P_DATASET_CREATE);
640 hsize_t chunk_dim[1];
641 chunk_dim[0] = chunk_size;
642 herr_t r = H5Pset_chunk(create_dataset_plist_id,1,chunk_dim);
643 info() << " SET CHUNK FOR " << var_group_name << " total=" << total_size << " chunk=" << chunk_dim[0];
644 }
645#endif
646
647 //dataset_id.create(group_id,"Values",save_typeid,filespace_id,plist_id);
648 v1 = MPI_Wtime();
649 dataset_id.create(m_variable_group_id,var_group_name,save_typeid,filespace_id,create_dataset_plist_id);
650 if (dataset_id.isBad()){
651 OStringStream ostr;
652 ostr() << "Wrong dataset for variable '" << var_group_name << "'";
653 throw ReaderWriterException(func_name,ostr.str());
654 }
655 H5Sselect_hyperslab(filespace_id.id(), H5S_SELECT_SET, offset, NULL, count, NULL);
656
657
658 v1 = MPI_Wtime();
659 {
660 Timer::Sentry ts(&m_write_timer);
661 herr_t herr = dataset_id.write(trueid,ptr,memspace_id,filespace_id,write_plist_id);
662 if (herr<0){
663 OStringStream ostr;
664 ostr() << "Wrong dataset written for variable '" << var_group_name << "'";
665 throw ReaderWriterException(func_name,ostr.str());
666 }
667 }
668 if (create_dataset_plist_id!=H5P_DEFAULT)
669 H5Pclose(create_dataset_plist_id);
670 H5Pclose(write_plist_id);
671
672 info() << " WRITE DATASET name=" << var_group_name
673 << " offset=" << offset[0]
674 << " mysize=" << nb_base_element
675 << " write_size=" << count[0]
676 << " total=" << total_size
677 << " rank=" << m_my_rank
678 << " clock=" << (MPI_Wtime() - v1);
679
680 //pinfo() << " CLOCK WRITE = " << << " CPU=" << m_my_rank;
681 //pm->barrier();
682 //info() << " CLOCK BARRIER = " << (MPI_Wtime() - v1);
683
684 dataset_id.close();
685 }
686 info() << "TOTAL = " << (MPI_Wtime()-v0);
687}
688
689/*---------------------------------------------------------------------------*/
690/*---------------------------------------------------------------------------*/
691
692Ref<ISerializedData> Hdf5MpiReaderWriter::
693_readDim2(IVariable* var)
694{
695 const char* func_name = "Hdf5MpiReaderWriter::_readDim2()";
696
697 const int max_dim = 256; // Nombre maxi de dimensions des tableaux HDF
698
699 String vname = _variableGroupName(var);
700
701 info() << " READ DIM name=" << vname;
702
703 Integer dimension_array_size = 0;
704 Integer nb_element = 0;
705 Integer nb_dimension = -1;
706 // Regarde si le nom correspondant est dans la liste des variables.
707 // S'il n'y est pas, cela signifie que le tableau n'a pas été sauvé et
708 // donc que ses dimensions sont nulles.
709 {
710 bool is_found = false;
711 for( StringList::Enumerator i(m_variables_name); ++i; )
712 if (*i==vname){
713 is_found = true;
714 break;
715 }
716 if (!is_found){
717 OStringStream ostr;
718 ostr() << "No HDF5 group with name '" << vname << "' exists";
719 throw ReaderWriterException(func_name,ostr.str());
720 }
721 }
722
723 // Récupère le groupe contenant les informations de la variable
724 HGroup group_id;
725 //group_id.open(m_variable_group_id,vname);
726 group_id.open(m_sub_group_id,vname);
727 if (group_id.isBad()){
728 OStringStream ostr;
729 ostr() << "No HDF5 with name '" << vname << "' exists";
730 throw ReaderWriterException(func_name,ostr.str());
731 }
732 bool is_multi_size = false;
733 eDataType data_type = DT_Unknown;
734 Integer memory_size = 0;
735 Integer nb_base_element = 0;
736 Integer dim1_size = 0;
737 Integer dim2_size = 0;
738 Int64UniqueArray dims;
739 // Récupère les informations concernant les tailles et dimensions de la variable
740 {
741 HAttribute att_id;
742 att_id.open(group_id,"Dims");
743 HSpace space_id = att_id.getSpace();
744
745 // On attend une seule dimension, et le nombre d'eléments de
746 // l'attribut (hdf_dims[0]) doit être égal à 1 ou 2.
747 hsize_t hdf_dims[max_dim];
748 hsize_t max_dims[max_dim];
749 H5Sget_simple_extent_dims(space_id.id(),hdf_dims,max_dims);
750
751 Integer dim_val[9];
752 //herr_t herr = H5Aread(att_id,nativeType(Integer()),dim_val);
753 att_id.read(m_types.nativeType(Integer()),dim_val);
754 if (hdf_dims[0]!=9){
755 OStringStream ostr;
756 ostr() << "Wrong dimensions for variable '" << vname
757 << "' (found: " << (int)hdf_dims[0] << " expected 9)";
758 throw ReaderWriterException(func_name,ostr.str());
759 }
760 nb_dimension = dim_val[0];
761 dim1_size = dim_val[1];
762 dim2_size = dim_val[2];
763 nb_element = dim_val[3];
764 nb_base_element = dim_val[4];
765 dimension_array_size = dim_val[5];
766 is_multi_size = dim_val[6]!=0;
767 data_type = (eDataType)dim_val[7];
768 memory_size = dim_val[8];
769 }
770
771 info() << " READ DIM name=" << vname
772 << " nb_dim=" << nb_dimension << " dim1_size=" << dim1_size
773 << " dim2_size=" << dim2_size << " nb_element=" << nb_element
774 << " dimension_size=" << dimension_array_size
775 << " is_multi_size=" << is_multi_size
776 << " data_type" << data_type;
777
778 if (dimension_array_size>0){
779 HDataset array_id;
780 array_id.open(group_id,"Dim2");
781 //hid_t array_id = H5Dopen(group_id.id(),"Dim2");
782 if (array_id.isBad()){
783 OStringStream ostr;
784 ostr() << "Wrong dataset for variable '" << vname << "'";
785 throw ReaderWriterException(func_name,ostr.str());
786 }
787 HSpace space_id = array_id.getSpace();
788 if (space_id.isBad()){
789 OStringStream ostr;
790 ostr() << "Wrong dataspace for variable '" << vname << "'";
791 throw ReaderWriterException(func_name,ostr.str());
792 }
793 hsize_t hdf_dims[max_dim];
794 hsize_t max_dims[max_dim];
795 H5Sget_simple_extent_dims(space_id.id(),hdf_dims,max_dims);
796 // Vérifie que le nombre d'éléments du dataset est bien égal à celui
797 // attendu.
798 if ((Integer)hdf_dims[0]!=dimension_array_size){
799 OStringStream ostr;
800 ostr() << "Wrong number of elements in 'Dim2' for variable '"
801 << vname << "' (found: " << hdf_dims[0]
802 << " expected " << dimension_array_size << ")";
803 throw ReaderWriterException(func_name,ostr.str());
804 }
805 dim2_size = 0;
806 dims.resize(dimension_array_size);
807 herr_t herr = array_id.read(m_types.nativeType(Integer()),dims.data());
808 if (herr<0){
809 OStringStream ostr;
810 ostr() << "Wrong dataset read for variable '" << vname << "'";
811 throw ReaderWriterException(func_name,ostr.str());
812 }
813 }
814
815 Ref<ISerializedData> sdata = arcaneCreateSerializedDataRef(data_type,memory_size,nb_dimension,nb_element,
816 nb_base_element,is_multi_size,dims);
817 return sdata;
818}
819
820/*---------------------------------------------------------------------------*/
821/*---------------------------------------------------------------------------*/
822
823/*---------------------------------------------------------------------------*/
824/*---------------------------------------------------------------------------*/
825
826void Hdf5MpiReaderWriter::
827write(IVariable* v,IData* data)
828{
829 if (v->itemKind()==IK_Unknown)
830 return;
831 //if (v->dataType()==DT_Real3)
832 //return;
833 _directWriteVal(v,data);
834}
835
836/*---------------------------------------------------------------------------*/
837/*---------------------------------------------------------------------------*/
838
839void Hdf5MpiReaderWriter::
840_readVal(IVariable* v,IData* data)
841{
842 const char* func_name = "Hdf5MpiReaderWriter::_readVal() ";
843
844 String var_group_name = _variableGroupName(v);
845
846 info() << " TRY TO READ var_group=" << var_group_name;
847
848 Ref<ISerializedData> sd(_readDim2(v));
849 Int64 storage_size = sd->memorySize();
850 //ByteUniqueArray byte_values(storage_size);
851 info() << " READ DATA n=" << storage_size;
852
853 data->allocateBufferForSerializedData(sd.get());
854
855 //bool no_dump = v.property() & IVariable::PNoDump;
856 // Lit toujours, car le fait de sauver ou non se fait en amont
857 //bool no_dump = false;
858 if (storage_size!=0){
859 // Récupère le groupe contenant les informations de la variable
860 HGroup group_id;
861 //group_id.open(m_variable_group_id,var_group_name);
862 group_id.open(m_sub_group_id,var_group_name);
863 if (group_id.isBad()){
864 OStringStream ostr;
865 ostr() << "No HDF5 group with name '" << var_group_name << "' exists";
866 throw ReaderWriterException(func_name,ostr.str());
867 }
868
869 HDataset dataset_id;
870 dataset_id.open(group_id,"Values");
871 if (dataset_id.isBad()){
872 OStringStream ostr;
873 ostr() << "Wrong dataset for variable '" << var_group_name << "'";
874 throw ReaderWriterException(func_name,ostr.str());
875 }
876
877 //dataset_id.read(trueid,ptr);
878 //debug(Trace::High) << "Variable " << var_group_name << " readed (nb_element=" << nb_element << ").";
879 void* ptr = sd->writableBytes().data();
880 info() << "READ Variable " << var_group_name << " ptr=" << ptr;;
881 hid_t trueid = m_types.nativeType(sd->baseDataType());
882 dataset_id.read(trueid,ptr);
883 }
884
885 data->assignSerializedData(sd.get());
886}
887
888/*---------------------------------------------------------------------------*/
889/*---------------------------------------------------------------------------*/
890
891void Hdf5MpiReaderWriter::
892read(IVariable* var,IData* data)
893{
894 _directReadVal(var,data);
895}
896
897/*---------------------------------------------------------------------------*/
898/*---------------------------------------------------------------------------*/
899
900void Hdf5MpiReaderWriter::
901setMetaData(const String& meta_data)
902{
903 ARCANE_UNUSED(meta_data);
904#if 0
905 if (m_is_parallel){
906 IParallelMng* pm = m_parallel_mng;
907 Integer nb_rank = pm->commSize();
908 if (m_send_rank!=m_my_rank){
909 // Envoie le groupe et les meta donnees
910 SerializeBuffer sb;
911 sb.setMode(ISerializer::ModeReserve);
912 sb.reserve(m_sub_group_name);
913 sb.reserve(meta_data);
914 sb.allocateBuffer();
915 sb.setMode(ISerializer::ModePut);
916 sb.put(m_sub_group_name);
917 sb.put(meta_data);
918 m_parallel_mng->sendSerializer(&sb,m_send_rank);
919 }
920 else{
921 _setMetaData(meta_data,m_sub_group_name);
922 for( Integer i=m_send_rank+1; i<=m_last_recv_rank; ++i ){
923 SerializeBuffer sb;
924 pm->recvSerializer(&sb,i);
925 sb.setMode(ISerializer::ModeGet);
926 String remote_group_name;
927 String remote_meta_data;
928 sb.get(remote_group_name);
929 sb.get(remote_meta_data);
930 _setMetaData(remote_meta_data,remote_group_name);
931 }
932 }
933 }
934 else
935 _setMetaData(meta_data,m_sub_group_name);
936#endif
937}
938
939/*---------------------------------------------------------------------------*/
940/*---------------------------------------------------------------------------*/
941
942void Hdf5MpiReaderWriter::
943_setMetaData(const String& meta_data,const String& sub_group_name)
944{
945 ARCANE_UNUSED(meta_data);
946 ARCANE_UNUSED(sub_group_name);
947#if 0
948 const char* func_name ="Hdf5MpiReaderWriter::setMetaData()";
949
950 HGroup base_group;
951 base_group.recursiveCreate(m_file_id,sub_group_name);
952
953 ByteConstArrayView meta_data_utf8 = meta_data.utf8();
954 const Byte* _meta_data = meta_data_utf8.begin();
955
956 hsize_t dims[1];
957 dims[0] = meta_data_utf8.size() + 1;
958 HSpace space_id;
959 space_id.createSimple(1,dims);
960 if (space_id.isBad())
961 throw ReaderWriterException(func_name,"Bad 'space' for the meta-data ('MetaData')");
962
963 HDataset dataset_id;
964 dataset_id.create(base_group,"MetaData",m_types.nativeType(Byte()),space_id,H5P_DEFAULT);
965 if (dataset_id.isBad())
966 throw ReaderWriterException(func_name,"Bad 'dataset' for the meta-data ('MetaData')");
967
968 herr_t herr = dataset_id.write(m_types.nativeType(Byte()),_meta_data);
969 if (herr<0)
970 throw ReaderWriterException(func_name,"Can't write the meta-data ('MetaData')");
971#endif
972}
973
974/*---------------------------------------------------------------------------*/
975/*---------------------------------------------------------------------------*/
976
977String Hdf5MpiReaderWriter::
978metaData()
979{
980 const char* func_name ="Hdf5MpiReaderWriter::readMetaData()";
981 HDataset dataset_id;
982 dataset_id.open(m_sub_group_id,"MetaData");
983 if (dataset_id.isBad()){
984 throw ReaderWriterException(func_name,"Wrong dataset for meta-data ('MetaData')");
985 }
986 HSpace space_id = dataset_id.getSpace();
987 if (space_id.isBad()){
988 throw ReaderWriterException(func_name,"Wrong space for meta-data ('MetaData')");
989 }
990 const int max_dim = 256;
991 hsize_t hdf_dims[max_dim];
992 hsize_t max_dims[max_dim];
993 H5Sget_simple_extent_dims(space_id.id(),hdf_dims,max_dims);
994 if (hdf_dims[0]<=0)
995 throw ReaderWriterException(func_name,"Wrong number of elements for meta-data ('MetaData')");
996 Integer nb_byte = static_cast<Integer>(hdf_dims[0]);
997 ByteUniqueArray uchars(nb_byte);
998 dataset_id.read(m_types.nativeType(Byte()),uchars.data());
999 String s(uchars);
1000 return s;
1001}
1002
1003/*---------------------------------------------------------------------------*/
1004/*---------------------------------------------------------------------------*/
1005
1006void Hdf5MpiReaderWriter::
1007endWrite()
1008{
1009#if 0
1010 if (m_is_parallel){
1011 if (m_my_rank==m_send_rank){
1012 _receiveRemoteVariables();
1013 }
1014 else{
1015 // Envoie un message de fin
1016 SerializeBuffer sb;
1017 sb.setMode(ISerializer::ModeReserve);
1018 sb.reserve(DT_Int32,1); // Pour indiquer la fin des envoies
1019 sb.allocateBuffer();
1020 sb.setMode(ISerializer::ModePut);
1021 sb.putInt32(0); // Indique qu'il s'agit d'un message de fin
1022 m_parallel_mng->sendSerializer(&sb,m_send_rank);
1023 }
1024 }
1025#endif
1026 {
1027 info() << " Hdf5Timer: nb_activated=" << m_io_timer.nbActivated()
1028 << " time=" << m_io_timer.totalTime()
1029 << " write=" << m_write_timer.nbActivated()
1030 << " timewrite=" << m_write_timer.totalTime();
1031 }
1032}
1033
1034/*---------------------------------------------------------------------------*/
1035/*---------------------------------------------------------------------------*/
1036
1037void Hdf5MpiReaderWriter::
1038_receiveRemoteVariables()
1039{
1040 IParallelMng* pm = m_parallel_mng;
1041 Integer nb_remaining = m_last_recv_rank - m_send_rank;
1042 info() << "NB REMAINING = " << nb_remaining;
1043 Ref<ISerializeMessageList> m_messages(pm->createSerializeMessageListRef());
1044
1045 while(nb_remaining>0){
1046 ISerializeMessage* sm = new SerializeMessage(m_my_rank,NULL_SUB_DOMAIN_ID,ISerializeMessage::MT_Recv);
1047 m_messages->addMessage(sm);
1048 m_messages->processPendingMessages();
1049 m_messages->waitMessages(Parallel::WaitAll);
1050
1051 ISerializer* sb = sm->serializer();
1052 sb->setMode(ISerializer::ModeGet);
1053 //info() << " RECEIVING BUFFER!";
1054 Int32 id = sb->getInt32();
1055 if (id==0){
1056 //info() << " LAST MESSAGE!";
1057 --nb_remaining;
1058 }
1059 else
1060 _writeRemoteVariable(sb);
1061 delete sm;
1062 }
1063}
1064
1065/*---------------------------------------------------------------------------*/
1066/*---------------------------------------------------------------------------*/
1067
1068void Hdf5MpiReaderWriter::
1069_writeRemoteVariable(ISerializer* sb)
1070{
1071 String var_name;
1072 sb->get(var_name);
1073 String group_name;
1074 sb->get(group_name);
1075 //eDataType data_type = (eDataType)sb->getInteger();
1076 //Integer dim = sb->getInteger();
1077 //info() << " REMOTE VAR = name=" << var_name << " data_type=" << data_type
1078 // << " dim=" << dim << " group=" << group_name;
1079 Ref<ISerializedData> sdata = arcaneCreateEmptySerializedDataRef();
1080 sb->setReadMode(ISerializer::ReadReplace);
1081 sdata->serialize(sb);
1082 _writeVal(var_name,group_name,sdata.get());
1083}
1084
1085/*---------------------------------------------------------------------------*/
1086/*---------------------------------------------------------------------------*/
1087
1088/*---------------------------------------------------------------------------*/
1089/*---------------------------------------------------------------------------*/
1093class ArcaneHdf5MpiCheckpointService2
1094: public ArcaneHdf5MpiReaderWriterObject
1095{
1096 public:
1097
1098 ArcaneHdf5MpiCheckpointService2(const ServiceBuildInfo& sbi)
1099 : ArcaneHdf5MpiReaderWriterObject(sbi), m_write_index(0), m_writer(0), m_reader(0)
1100 , m_fileset_size(0)
1101 {
1102 }
1103 virtual IDataWriter* dataWriter() { return m_writer; }
1104 virtual IDataReader* dataReader() { return m_reader; }
1105
1106 virtual void notifyBeginWrite();
1107 virtual void notifyEndWrite();
1108 virtual void notifyBeginRead();
1109 virtual void notifyEndRead();
1110 virtual void close() {}
1111 virtual String readerServiceName() const { return "ArcaneHdf5MpiCheckpointReader2"; }
1112
1113 private:
1114
1115 Integer m_write_index;
1116 Hdf5MpiReaderWriter* m_writer;
1117 Hdf5MpiReaderWriter* m_reader;
1118 Integer m_fileset_size;
1119
1120 private:
1121
1122 String _defaultFileName()
1123 {
1124 return "arcanedump.mpi.h5";
1125 }
1126 Directory _defaultDirectory()
1127 {
1128 return Directory(baseDirectoryName());
1129 }
1130 void _parseMetaData(String meta_data);
1131};
1132
1133/*---------------------------------------------------------------------------*/
1134/*---------------------------------------------------------------------------*/
1135
1136void ArcaneHdf5MpiCheckpointService2::
1137_parseMetaData(String meta_data)
1138{
1139 IIOMng* io_mng = subDomain()->ioMng();
1140 ScopedPtrT<IXmlDocumentHolder> xml_doc(io_mng->parseXmlBuffer(meta_data.utf8(),"MetaData"));
1141 XmlNode root = xml_doc->documentNode().documentElement();
1142 Integer version = root.attr("version").valueAsInteger();
1143 if (version!=1){
1144 throw ReaderWriterException("ArcaneHdf5MpiCheckpointService2::_parseMetaData","Bad version (expected 1)");
1145 }
1146 m_fileset_size = 0;
1147
1148 info() << " FileSet size=" << m_fileset_size;
1149}
1150
1151/*---------------------------------------------------------------------------*/
1152/*---------------------------------------------------------------------------*/
1153
1154void ArcaneHdf5MpiCheckpointService2::
1155notifyBeginRead()
1156{
1157 String meta_data = readerMetaData();
1158 _parseMetaData(meta_data);
1159
1160 info() << " GET META DATA READER " << readerMetaData()
1161 << " filename=" << fileName();
1162
1163 if (fileName().null()){
1164 Directory dump_dir(_defaultDirectory());
1165 //Directory dump_dir(subDomain()->exportDirectory(),"protection");
1166 //Directory dump_dir("/tmp/grospelx/");
1167 setFileName(dump_dir.file(_defaultFileName()));
1168 //setFileName(dump_dir.file("arcanedump.0.h5"));
1169 //setFileName(_defaultFileName());
1170 }
1171 info() << " READ CHECKPOINT FILENAME = " << fileName();
1172 StringBuilder sub_group;
1173 //sub_group = "SubDomain";
1174 //sub_group += subDomain()->subDomainId();
1175 //sub_group += "/Index";
1176 //sub_group += currentIndex();
1177 sub_group = "Index";
1178 sub_group += currentIndex();
1179 m_reader = new Hdf5MpiReaderWriter(subDomain(),fileName(),sub_group.toString(),0,Hdf5MpiReaderWriter::OpenModeRead);
1180 m_reader->initialize();
1181}
1182
1183/*---------------------------------------------------------------------------*/
1184/*---------------------------------------------------------------------------*/
1185
1186void ArcaneHdf5MpiCheckpointService2::
1187notifyEndRead()
1188{
1189 delete m_reader;
1190 m_reader = 0;
1191}
1192
1193/*---------------------------------------------------------------------------*/
1194/*---------------------------------------------------------------------------*/
1195
1196void ArcaneHdf5MpiCheckpointService2::
1197notifyBeginWrite()
1198{
1199 if (options())
1200 m_fileset_size = options()->filesetSize();
1201
1202 if (fileName().null()){
1203 Directory dump_dir(_defaultDirectory());
1204 //info() << "USE TMP DIRECTORY\n";
1205 //Directory dump_dir("/tmp/grospelx/");
1206 //dump_dir.createDirectory();
1207 setFileName(dump_dir.file(_defaultFileName()));
1208 //setFileName(_defaultFileName());
1209 }
1210 Hdf5MpiReaderWriter::eOpenMode open_mode = Hdf5MpiReaderWriter::OpenModeAppend;
1211 Integer write_index = checkpointTimes().size();
1212 --write_index;
1213 if (write_index==0)
1214 open_mode = Hdf5MpiReaderWriter::OpenModeTruncate;
1215
1216 //IParallelMng* pm = subDomain()->parallelMng();
1217 //Integer sid = pm->commRank();
1218
1219 StringBuilder sub_group;
1220 //sub_group = "SubDomain";
1221 //sub_group += sid;
1222 //sub_group += "/Index";
1223 //sub_group += write_index;
1224
1225 sub_group = "Index";
1226 sub_group += write_index;
1227
1228 m_writer = new Hdf5MpiReaderWriter(subDomain(),fileName(),sub_group.toString(),m_fileset_size,open_mode);
1229 m_writer->initialize();
1230}
1231
1232/*---------------------------------------------------------------------------*/
1233/*---------------------------------------------------------------------------*/
1234
1235void ArcaneHdf5MpiCheckpointService2::
1236notifyEndWrite()
1237{
1238 OStringStream ostr;
1239 ostr() << "<infos version='1'>\n";
1240 ostr() << " <fileset-size>" << m_fileset_size << "</fileset-size>\n";
1241 ostr() << "</infos>\n";
1242 setReaderMetaData(ostr.str());
1243 ++m_write_index;
1244 delete m_writer;
1245 m_writer = 0;
1246}
1247
1248/*---------------------------------------------------------------------------*/
1249/*---------------------------------------------------------------------------*/
1250
1251ARCANE_REGISTER_SUB_DOMAIN_FACTORY(ArcaneHdf5MpiCheckpointService2,
1252 ICheckpointReader,
1253 ArcaneHdf5MpiCheckpointReader2);
1254
1255ARCANE_REGISTER_SUB_DOMAIN_FACTORY(ArcaneHdf5MpiCheckpointService2,
1256 ICheckpointWriter,
1257 ArcaneHdf5MpiCheckpointWriter2);
1258
1259ARCANE_REGISTER_SERVICE_HDF5MPIREADERWRITER(ArcaneHdf5MpiCheckpoint2,
1260 ArcaneHdf5MpiCheckpointService2);
1261
1262/*---------------------------------------------------------------------------*/
1263/*---------------------------------------------------------------------------*/
1264
1265} // End namespace Arcane
1266
1267/*---------------------------------------------------------------------------*/
1268/*---------------------------------------------------------------------------*/
1269
1270#endif
#define ARCANE_REGISTER_SUB_DOMAIN_FACTORY(aclass, ainterface, aname)
Enregistre un service de fabrique pour la classe aclass.
virtual Ref< ISerializeMessageList > createSerializeMessageListRef()=0
Créé une liste pour gérer les 'ISerializeMessage'.
virtual Int32 commSize() const =0
Nombre d'instance dans le communicateur.
virtual void allGather(ConstArrayView< char > send_buf, ArrayView< char > recv_buf)=0
Effectue un regroupement sur tous les processeurs. Il s'agit d'une opération collective....
Lecteur des fichiers de maillage via la bibliothèque LIMA.
Definition Lima.cc:120
-*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
Ref< ISerializedData > arcaneCreateSerializedDataRef(eDataType data_type, Int64 memory_size, Integer nb_dim, Int64 nb_element, Int64 nb_base_element, bool is_multi_size, Int64ConstArrayView dimensions)
Créé des données sérialisées.
UniqueArray< Real3x3 > Real3x3UniqueArray
Tableau dynamique à une dimension de tenseurs de rang 3.
Definition UtilsTypes.h:533
UniqueArray< Int64 > Int64UniqueArray
Tableau dynamique à une dimension d'entiers 64 bits.
Definition UtilsTypes.h:513
UniqueArray< Real3 > Real3UniqueArray
Tableau dynamique à une dimension de vecteurs de rang 3.
Definition UtilsTypes.h:529
UniqueArray< Byte > ByteUniqueArray
Tableau dynamique à une dimension de caractères.
Definition UtilsTypes.h:509
ConstArrayView< Byte > ByteConstArrayView
Equivalent C d'un tableau à une dimension de caractères.
Definition UtilsTypes.h:634
UniqueArray< Int32 > Int32UniqueArray
Tableau dynamique à une dimension d'entiers 32 bits.
Definition UtilsTypes.h:515
UniqueArray< Real > RealUniqueArray
Tableau dynamique à une dimension de réels.
Definition UtilsTypes.h:521
Ref< ISerializedData > arcaneCreateEmptySerializedDataRef()
Créé des données sérialisées.
unsigned char Byte
Type d'un octet.
Definition UtilsTypes.h:142
eDataType
Type d'une donnée.
Definition DataTypes.h:39
@ DT_Real3x3
Donnée de type tenseur 3x3.
Definition DataTypes.h:49
@ DT_Int32
Donnée de type entier 32 bits.
Definition DataTypes.h:43
@ DT_Real3
Donnée de type vecteur 3.
Definition DataTypes.h:47
@ DT_Unknown
Donnée de type inconnue ou non initialisée.
Definition DataTypes.h:54
@ DT_Real
Donnée de type réel.
Definition DataTypes.h:41
ConstArrayView< Int64 > Int64ConstArrayView
Equivalent C d'un tableau à une dimension d'entiers 64 bits.
Definition UtilsTypes.h:638
const char * dataTypeName(eDataType type)
Nom du type de donnée.
Definition DataTypes.cc:70
Int32 Integer
Type représentant un entier.