Arcane  v3.15.3.0
Documentation développeur
Chargement...
Recherche...
Aucune correspondance
AlephPETSc.cc
1// -*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
2//-----------------------------------------------------------------------------
3// Copyright 2000-2025 CEA (www.cea.fr) IFPEN (www.ifpenergiesnouvelles.com)
4// See the top-level COPYRIGHT file for details.
5// SPDX-License-Identifier: Apache-2.0
6//-----------------------------------------------------------------------------
7/*---------------------------------------------------------------------------*/
8/* AlephPETSc.cc (C) 2000-2025 */
9/* */
10/* Implémentation PETSc de Aleph. */
11/*---------------------------------------------------------------------------*/
12/*---------------------------------------------------------------------------*/
13
14#include "arcane/aleph/AlephArcane.h"
15
16#include "arcane/utils/StringList.h"
17
18#define MPI_COMM_SUB *(MPI_Comm*)(m_kernel->subParallelMng(m_index)->getMPICommunicator())
19
20#include "petscksp.h"
21#include "petscsys.h"
22
23// TODO: mieux gérer les sous-versions de PETSC
24#if PETSC_VERSION_GE(3,6,1)
25#include "petscmat.h"
26#elif PETSC_VERSION_(3,3,0)
27#include "petscpcmg.h"
28#elif PETSC_VERSION_(3,0,0)
29#include "petscmg.h"
30#else
31#error "Bad version of PETSc: should be 3.0.0, 3.3.0 or greater than 3.6.1."
32#endif
33
34/*---------------------------------------------------------------------------*/
35/*---------------------------------------------------------------------------*/
36
37namespace Arcane
38{
39
40namespace
41{
42// To convert from AlephInt data to PetscInt data
43PetscInt* _toPetscInt(AlephInt* v)
44{
45 static_assert(sizeof(PetscInt)==sizeof(AlephInt),"AlephInt and PetscInt should have the same size");
46 return reinterpret_cast<PetscInt*>(v);
47}
48// To convert from AlephInt data to PetscInt data
49const PetscInt* _toPetscInt(const AlephInt* v)
50{
51 static_assert(sizeof(PetscInt)==sizeof(AlephInt),"AlephInt and PetscInt should have the same size");
52 return reinterpret_cast<const PetscInt*>(v);
53}
54
55inline void
56petscCheck(PetscErrorCode error_code)
57{
58 // Note GG: Je ne sais pas vraiment à partir de quelle version de PETSc
59 // la valeur 'PETSC_SUCCESS' la fonction 'PetscCallVoid' sont
60 // disponibles mais elles le sont dans la 3.18.0.
61#if PETSC_VERSION_GE(3,19,0)
62 if (error_code==PETSC_SUCCESS)
63 return;
64 PetscCallVoid(error_code);
65 ARCANE_FATAL("Error in PETSc backend errcode={0}",error_code);
66#else
67 if (error_code!=0)
68 ARCANE_FATAL("Error in PETSc backend errcode={0}",error_code);
69#endif
70}
71
72}
73
74/*---------------------------------------------------------------------------*/
75/*---------------------------------------------------------------------------*/
76
78: public IAlephTopology
79{
80 public:
81
82 AlephTopologyPETSc(ITraceMng* tm, AlephKernel* kernel, Integer index, Integer nb_row_size);
83 ~AlephTopologyPETSc() override
84 {
85 debug() << "\33[1;5;32m\t\t\t[~AlephTopologyPETSc]\33[0m";
86 }
87
88 public:
89
90 void backupAndInitialize() override {}
91 void restore() override {}
92
93 private:
94
95 void _checkInitPETSc();
96};
97
98/*---------------------------------------------------------------------------*/
99/*---------------------------------------------------------------------------*/
100
102: public IAlephVector
103{
104 public:
105
108 void AlephVectorCreate(void) override;
109 void AlephVectorSet(const double*, const AlephInt*, Integer) override;
110 int AlephVectorAssemble(void) override;
111 void AlephVectorGet(double*, const AlephInt*, Integer) override;
112 void writeToFile(const String) override;
113 Real LinftyNorm(void);
114
115 public:
116
117 Vec m_petsc_vector = nullptr;
118 PetscInt jSize = 0;
119 PetscInt jUpper = 0;
120 PetscInt jLower = -1;
121};
122
123/*---------------------------------------------------------------------------*/
124/*---------------------------------------------------------------------------*/
125
127: public IAlephMatrix
128{
129 public:
130
133 void AlephMatrixCreate(void) override;
134 void AlephMatrixSetFilled(bool) override;
135 int AlephMatrixAssemble(void) override;
136 void AlephMatrixFill(int, AlephInt*, AlephInt*, double*) override;
137 Real LinftyNormVectorProductAndSub(AlephVector*, AlephVector*);
138 bool isAlreadySolved(AlephVectorPETSc*, AlephVectorPETSc*,
139 AlephVectorPETSc*, Real*, AlephParams*);
140 int AlephMatrixSolve(AlephVector*, AlephVector*,
141 AlephVector*, Integer&, Real*, AlephParams*) override;
142 void writeToFile(const String) override;
143
144 private:
145
146 Mat m_petsc_matrix = nullptr;
147 KSP m_ksp_solver = nullptr;
148};
149
150/*---------------------------------------------------------------------------*/
151/*---------------------------------------------------------------------------*/
152
153namespace
154{
155bool global_need_petsc_finalize = false;
156}
157
158/*---------------------------------------------------------------------------*/
159/*---------------------------------------------------------------------------*/
160
162: public AbstractService
163, public IAlephFactoryImpl
164{
165 public:
168 {}
169 ~PETScAlephFactoryImpl() override
170 {
171 for (auto* v : m_IAlephVectors)
172 delete v;
173 for (auto* v : m_IAlephMatrixs)
174 delete v;
175 for (auto* v : m_IAlephTopologys)
176 delete v;
178 // TODO: Il faudrait appeler PETscFinalize() mais il
179 // faut le faire faire avant le MPI_Finalize() ce qui
180 // n'est pas toujours le cas suivant quand cette fabrique
181 // est détruite. En attendant, on n'appelle pas la fonction
182 //PetscFinalize();
184 }
185 }
186
187 public:
188
189 void initialize() override {}
190 IAlephTopology* createTopology(ITraceMng* tm,
191 AlephKernel* kernel,
192 Integer index,
193 Integer nb_row_size) override
194 {
195 IAlephTopology* new_topology = new AlephTopologyPETSc(tm, kernel, index, nb_row_size);
196 m_IAlephTopologys.add(new_topology);
197 return new_topology;
198 }
199 IAlephVector* createVector(ITraceMng* tm,
200 AlephKernel* kernel,
201 Integer index) override
202 {
203 IAlephVector* new_vector = new AlephVectorPETSc(tm, kernel, index);
204 m_IAlephVectors.add(new_vector);
205 return new_vector;
206 }
207
208 IAlephMatrix* createMatrix(ITraceMng* tm,
209 AlephKernel* kernel,
210 Integer index) override
211 {
212 IAlephMatrix* new_matrix = new AlephMatrixPETSc(tm, kernel, index);
213 m_IAlephMatrixs.add(new_matrix);
214 return new_matrix;
215 }
216
217 private:
218
219 UniqueArray<IAlephVector*> m_IAlephVectors;
220 UniqueArray<IAlephMatrix*> m_IAlephMatrixs;
221 UniqueArray<IAlephTopology*> m_IAlephTopologys;
222};
223
224/*---------------------------------------------------------------------------*/
225/*---------------------------------------------------------------------------*/
226
227/*---------------------------------------------------------------------------*/
228/*---------------------------------------------------------------------------*/
229
230AlephTopologyPETSc::
231AlephTopologyPETSc(ITraceMng* tm, AlephKernel* kernel, Integer index, Integer nb_row_size)
232: IAlephTopology(tm, kernel, index, nb_row_size)
233{
234 ARCANE_CHECK_POINTER(kernel);
235 if (!m_participating_in_solver) {
236 debug() << "\33[1;32m\t[AlephTopologyPETSc] Not concerned with this solver, returning\33[0m";
237 return;
238 }
239 debug() << "\33[1;32m\t\t[AlephTopologyPETSc] @" << this << "\33[0m";
240 if (!m_kernel->isParallel()) {
241 PETSC_COMM_WORLD = PETSC_COMM_SELF;
242 }
243 else {
244 PETSC_COMM_WORLD = *(MPI_Comm*)(kernel->subParallelMng(index)->getMPICommunicator());
245 }
246
247 _checkInitPETSc();
248}
249
250/*---------------------------------------------------------------------------*/
251/*---------------------------------------------------------------------------*/
252
253void AlephTopologyPETSc::
254_checkInitPETSc()
255{
256 // Ne fait rien si PETSc a déjà été initialisé.
257 PetscBool is_petsc_initialized = PETSC_FALSE;
258 PetscInitialized(&is_petsc_initialized);
259 if (is_petsc_initialized==PETSC_TRUE)
260 return;
261
262 AlephKernelSolverInitializeArguments& init_args = m_kernel->solverInitializeArgs();
263 bool has_args = init_args.hasValues();
264 debug() << Trace::Color::cyan() << "[AlephTopologyPETSc] Initializing PETSc. UseArg=" << has_args;
265
266 if (has_args){
267 const CommandLineArguments& args = init_args.commandLineArguments();
268 PetscInitialize(args.commandLineArgc(),args.commandLineArgv(),nullptr,nullptr);
269 }
270 else{
271 PetscInitializeNoArguments();
272 }
273 global_need_petsc_finalize = true;
274}
275
276/*---------------------------------------------------------------------------*/
277/*---------------------------------------------------------------------------*/
278
279// ****************************************************************************
280// * AlephVectorPETSc
281// ****************************************************************************
282AlephVectorPETSc::
283AlephVectorPETSc(ITraceMng *tm,AlephKernel *kernel,Integer index)
284: IAlephVector(tm,kernel,index)
285, m_petsc_vector()
286, jSize(0)
287, jUpper(0)
288, jLower(-1)
289{
290 debug()<<"\t\t[AlephVectorPETSc::AlephVectorPETSc] new SolverVectorPETSc";
291}
292
293/*---------------------------------------------------------------------------*/
294/*---------------------------------------------------------------------------*/
295
296AlephVectorPETSc::
297~AlephVectorPETSc()
298{
299 VecDestroy(&m_petsc_vector);
300}
301
302/*---------------------------------------------------------------------------*/
303/*---------------------------------------------------------------------------*/
304
305// ****************************************************************************
306// * AlephVectorCreate
307// ****************************************************************************
308void AlephVectorPETSc::
309AlephVectorCreate(void)
310{
311 for( int iCpu=0;iCpu<m_kernel->size();++iCpu){
312 if (m_kernel->rank()!=m_kernel->solverRanks(m_index)[iCpu])
313 continue;
314 if (jLower==-1)
315 jLower=m_kernel->topology()->gathered_nb_row(iCpu);
316 jUpper=m_kernel->topology()->gathered_nb_row(iCpu+1);
317 }
318 // Mise à jour de la taille locale du buffer pour le calcul plus tard de la norme max, par exemple
319 jSize=jUpper-jLower;
320 VecCreateMPI(MPI_COMM_SUB,
321 jSize, // n
322 m_kernel->topology()->nb_row_size(), // N
323 &m_petsc_vector);
324 debug()<<"\t\t[AlephVectorPETSc::AlephVectorCreate] PETSC VectorCreate"
325 <<" of local size=" <<jSize<<"/"<<m_kernel->topology()->nb_row_size()
326 <<", done";
327}
328
329// ****************************************************************************
330// * AlephVectorSet
331// ****************************************************************************
332void AlephVectorPETSc::
333AlephVectorSet(const double *bfr_val, const AlephInt *bfr_idx, Integer size)
334{
335 VecSetValues(m_petsc_vector,size,_toPetscInt(bfr_idx),bfr_val,INSERT_VALUES);
336 debug()<<"\t\t[AlephVectorPETSc::AlephVectorSet] "<<size<<" values inserted!";
337 // En séquentiel, il faut le fair aussi avec PETSc
338 AlephVectorAssemble();
339}
340
341
342// ****************************************************************************
343// * AlephVectorAssemble
344// ****************************************************************************
345int AlephVectorPETSc::
346AlephVectorAssemble(void)
347{
348 VecAssemblyBegin(m_petsc_vector);
349 VecAssemblyEnd(m_petsc_vector);
350 debug()<<"\t\t[AlephVectorPETSc::AlephVectorAssemble]";
351 return 0;
352}
353
354
355// ****************************************************************************
356// * AlephVectorGet
357// ****************************************************************************
358void AlephVectorPETSc::
359AlephVectorGet(double* bfr_val, const AlephInt* bfr_idx, Integer size)
360{
361 VecGetValues(m_petsc_vector,size,_toPetscInt(bfr_idx),bfr_val);
362 debug()<<"\t\t[AlephVectorPETSc::AlephVectorGet] fetched "<< size <<" values!";
363}
364
365
366// ****************************************************************************
367// * writeToFile
368// ****************************************************************************
369void AlephVectorPETSc::
370writeToFile(const String filename)
371{
372 ARCANE_UNUSED(filename);
373 debug()<<"\t\t[AlephVectorPETSc::writeToFile]";
374}
375
376// ****************************************************************************
377// * LinftyNorm
378// ****************************************************************************
379Real AlephVectorPETSc::
380LinftyNorm(void)
381{
382 PetscReal val;
383 VecNorm(m_petsc_vector,NORM_INFINITY,&val);
384 return val;
385}
386
387// ****************************************************************************
388// * AlephMatrixPETSc
389// ****************************************************************************
390
391/*---------------------------------------------------------------------------*/
392/*---------------------------------------------------------------------------*/
393
394AlephMatrixPETSc::
395AlephMatrixPETSc(ITraceMng *tm, AlephKernel *kernel, Integer index)
396: IAlephMatrix(tm,kernel,index)
397{
398 debug()<<"\t\t[AlephMatrixPETSc::AlephMatrixPETSc] new AlephMatrixPETSc";
399}
400
401/*---------------------------------------------------------------------------*/
402/*---------------------------------------------------------------------------*/
403
404AlephMatrixPETSc::
405~AlephMatrixPETSc()
406{
407 if (m_petsc_matrix)
408 MatDestroy(&m_petsc_matrix);
409 if (m_ksp_solver)
410 KSPDestroy(&m_ksp_solver);
411}
412
413/*---------------------------------------------------------------------------*/
414/*---------------------------------------------------------------------------*/
415
416// ****************************************************************************
417// * AlephMatrixCreate
418// ****************************************************************************
419void AlephMatrixPETSc::
420AlephMatrixCreate(void)
421{
422 AlephInt ilower=-1;
423 AlephInt iupper=0;
424
425 for( int iCpu=0;iCpu<m_kernel->size();++iCpu ){
426 if (m_kernel->rank()!=m_kernel->solverRanks(m_index)[iCpu]) continue;
427 if (ilower==-1) ilower=m_kernel->topology()->gathered_nb_row(iCpu);
428 iupper=m_kernel->topology()->gathered_nb_row(iCpu+1);
429 }
430 AlephInt size = iupper-ilower;
431 AlephInt jlower=ilower;
432 AlephInt jupper=iupper;
433
434#if PETSC_VERSION_GE(3,3,0)
435 #define PETSC_VERSION_MatCreate MatCreateAIJ
436#elif PETSC_VERSION_(3,0,0)
437 #define PETSC_VERSION_MatCreate MatCreateMPIAIJ
438#endif
439 PetscInt* row_data = _toPetscInt(m_kernel->topology()->gathered_nb_row_elements().subView(ilower,size).data());
440 PETSC_VERSION_MatCreate(MPI_COMM_SUB,
441 iupper-ilower, // m = number of rows
442 jupper-jlower, // n = number of columns
443 m_kernel->topology()->nb_row_size(), // M = number of global rows
444 m_kernel->topology()->nb_row_size(), // N = number of global columns
445 0, // ignored (number of nonzeros per row in DIAGONAL portion of local submatrix)
446 // array containing the number of nonzeros in the various rows of the DIAGONAL portion of local submatrix
447 row_data,
448 0, // ignored (number of nonzeros per row in the OFF-DIAGONAL portion of local submatrix)
449 // array containing the number of nonzeros in the various rows of the OFF-DIAGONAL portion of local submatrix
450 row_data,
451 &m_petsc_matrix);
452 MatSetOption(m_petsc_matrix, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_FALSE);
453 MatSetUp(m_petsc_matrix);
454 debug()<<"\t\t[AlephMatrixPetsc::AlephMatrixCreate] PETSC MatrixCreate idx:"<<m_index
455 <<", ("<<ilower<<"->"<<(iupper-1)<<")";
456}
457
458
459// ****************************************************************************
460// * AlephMatrixSetFilled
461// ****************************************************************************
462void AlephMatrixPETSc::
463AlephMatrixSetFilled(bool)
464{
465 debug()<<"\t\t[AlephMatrixPETSc::AlephMatrixSetFilled] done";
466 MatSetOption(m_petsc_matrix, MAT_NEW_NONZERO_LOCATIONS, PETSC_FALSE);
467}
468
469
470// ****************************************************************************
471// * AlephMatrixAssemble
472// ****************************************************************************
473int AlephMatrixPETSc::
474AlephMatrixAssemble(void)
475{
476 debug()<<"\t\t[AlephMatrixPETSc::AlephMatrixAssemble] AlephMatrixAssemble";
477 MatAssemblyBegin(m_petsc_matrix,MAT_FINAL_ASSEMBLY);
478 MatAssemblyEnd(m_petsc_matrix,MAT_FINAL_ASSEMBLY);
479 return 0;
480}
481
482
483// ****************************************************************************
484// * AlephMatrixFill
485// ****************************************************************************
486void AlephMatrixPETSc::
487AlephMatrixFill(int size, AlephInt* rows, AlephInt* cols, double *values)
488{
489 debug()<<"\t\t[AlephMatrixPETSc::AlephMatrixFill] size="<<size;
490 for(int i=0;i<size;i++){
491 //debug()<<"\t\t[AlephMatrixPETSc::AlephMatrixFill] i="<<i;
492 MatSetValue(m_petsc_matrix, rows[i], cols[i], values[i], INSERT_VALUES);
493 }
494 debug()<<"\t\t[AlephMatrixPETSc::AlephMatrixFill] done";
495 // PETSc réclame systématiquement l'assemblage
496 AlephMatrixAssemble();
497}
498
499
500
501// ****************************************************************************
502// * LinftyNormVectorProductAndSub
503// ****************************************************************************
504Real AlephMatrixPETSc::
505LinftyNormVectorProductAndSub(AlephVector* x,AlephVector* b)
506{
507 ARCANE_UNUSED(x);
508 ARCANE_UNUSED(b);
509 throw FatalErrorException("LinftyNormVectorProductAndSub", "error");
510}
511
512
513// ****************************************************************************
514// * isAlreadySolved
515// ****************************************************************************
516bool AlephMatrixPETSc::
517isAlreadySolved(AlephVectorPETSc* x,
518 AlephVectorPETSc* b,
519 AlephVectorPETSc* tmp,
520 Real* residual_norm,
521 AlephParams* params)
522{
523 ARCANE_UNUSED(x);
524 ARCANE_UNUSED(b);
525 ARCANE_UNUSED(tmp);
526 ARCANE_UNUSED(residual_norm);
527 ARCANE_UNUSED(params);
528 return false;
529}
530
531
532// ****************************************************************************
533// * AlephMatrixSolve
534// ****************************************************************************
535int AlephMatrixPETSc::
536AlephMatrixSolve(AlephVector* x,AlephVector* b,AlephVector* t,
537 Integer& nb_iteration,Real* residual_norm,
538 AlephParams* solver_param)
539{
540 ARCANE_UNUSED(t);
541
542 PC prec;
543 PetscInt its;
544 PetscReal norm;
545 KSPConvergedReason reason;
546
547 AlephVectorPETSc* x_petsc = dynamic_cast<AlephVectorPETSc*> (x->implementation());
548 AlephVectorPETSc* b_petsc = dynamic_cast<AlephVectorPETSc*> (b->implementation());
549
550 ARCANE_CHECK_POINTER(x_petsc);
551 ARCANE_CHECK_POINTER(b_petsc);
552 const bool is_parallel = m_kernel->subParallelMng(m_index)->isParallel();
553
554 Vec solution = x_petsc->m_petsc_vector;
555 Vec RHS = b_petsc->m_petsc_vector;
556
557 debug()<<"[AlephMatrixSolve]";
558 if (m_ksp_solver)
559 KSPDestroy(&m_ksp_solver);
560 KSPCreate(MPI_COMM_SUB,&m_ksp_solver);
561#if PETSC_VERSION_GE(3,6,1)
562 KSPSetOperators(m_ksp_solver,m_petsc_matrix,m_petsc_matrix);//SAME_PRECONDITIONER);
563#else
564 KSPSetOperators(m_ksp_solver,m_petsc_matrix,m_petsc_matrix,SAME_NONZERO_PATTERN);//SAME_PRECONDITIONER);
565#endif
566 // Builds KSP for a particular solver
567 switch(solver_param->method()){
568 // Preconditioned conjugate gradient (PCG) iterative method
569 case TypesSolver::PCG : KSPSetType(m_ksp_solver,KSPCG); break;
570 // BiCGStab (Stabilized version of BiConjugate Gradient Squared) method
571 case TypesSolver::BiCGStab : KSPSetType(m_ksp_solver,KSPBCGS); break;
572 // IBiCGStab (Improved Stabilized version of BiConjugate Gradient Squared) method
573 // in an alternative form to have only a single global reduction operation instead of the usual 3 (or 4)
574 case TypesSolver::BiCGStab2: KSPSetType(m_ksp_solver,KSPIBCGS); break;
575 // Generalized Minimal Residual method. (Saad and Schultz, 1986) with restart
576 case TypesSolver::GMRES : KSPSetType(m_ksp_solver,KSPGMRES); break;
577 default : throw ArgumentException("AlephMatrixPETSc::AlephMatrixSolve", "Unknown solver method");
578 }
579
580 KSPGetPC(m_ksp_solver,&prec);
581
582 switch(solver_param->precond()){
583 case TypesSolver::NONE : PCSetType(prec,PCNONE); break;
584 // Jacobi (i.e. diagonal scaling preconditioning)
585 case TypesSolver::DIAGONAL : PCSetType(prec,PCJACOBI); break;
586 // Incomplete factorization preconditioners.
587 case TypesSolver::ILU:
588 // ILU ne fonctionne pas en parallèle avec PETSc (sauf si compilé avec Hypre)
589 // ILU can be set up in parallel via a sub-preconditioner to Block-Jacobi preconditioner
590 if (is_parallel)
591 PCSetType(prec,PCBJACOBI);
592 else
593 PCSetType(prec,PCILU);
594 break;
595 // Incomplete Cholesky factorization preconditioners.
596 case TypesSolver::IC:
597 // IC can be set up in parallel via a sub-preconditioner to Block-Jacobi preconditioner
598 if (is_parallel) {
599 PCSetType(prec,PCBJACOBI); // set PC to Block Jacobi
600 PetscInt nlocal, first;
601 KSP *subksp; // KSP context for subdomain
602 PC subpc; // PC context for subdomain
603 KSPSetUp(m_ksp_solver);
604 PCBJacobiGetSubKSP(prec, &nlocal, &first, &subksp); // Extract the KSP context array for the local blocks
605 for (int i = 0; i < nlocal; i++) {
606 KSPGetPC(subksp[i], &subpc);
607 PCSetType(subpc, PCICC); // set subdomain PC to IC
608 }
609 }
610 else
611 PCSetType(prec,PCICC);
612 break;
613 // Sparse Approximate Inverse method of Grote and Barnard as a preconditioner (SIAM J. Sci. Comput.; vol 18, nr 3)
614 case TypesSolver::SPAIstat : PCSetType(prec,PCSPAI); break;
615 // Use multigrid preconditioning.
616 // This preconditioner requires you provide additional information
617 // about the coarser grid matrices and restriction/interpolation operators.
618 case TypesSolver::AMG:
619 // By default PCMG uses GMRES on the fine grid smoother so this should be used with KSPFGMRES
620 // or the smoother changed to not use GMRES
621 // Implements the Flexible Generalized Minimal Residual method. developed by Saad with restart
622 KSPSetType(m_ksp_solver,KSPFGMRES);
623 PCSetType(prec,PCMG);
624 //Sets the number of levels to use with MG. Must be called before any other MG routine.
625 PCMGSetLevels(prec,1,
626 (MPI_Comm*)(m_kernel->subParallelMng(m_index)->getMPICommunicator()));
627 // Determines the form of multigrid to use: multiplicative, additive, full, or the Kaskade algorithm.
628 PCMGSetType(prec,PC_MG_MULTIPLICATIVE); // PC_MG_MULTIPLICATIVE,PC_MG_ADDITIVE,PC_MG_FULL,PC_MG_KASKADE
629 // Sets the type cycles to use. Use PCMGSetCycleTypeOnLevel() for more complicated cycling.
630 PCMGSetCycleType(prec,PC_MG_CYCLE_V); // PC_MG_CYCLE_V or PC_MG_CYCLE_W
631 //Sets the number of pre-smoothing steps to use on all levels
632#if PETSC_VERSION_GE(3,10,2)
633 PCMGSetNumberSmooth(prec, 1);
634#else
635 PCMGSetNumberSmoothDown(prec, 1);
636 PCMGSetNumberSmoothUp(prec, 1);
637#endif
638 // PCMGSetResidual: Sets the function to be used to calculate the residual on the lth level
639 // PCMGSetInterpolation: Sets the function to be used to calculate the interpolation from l-1 to the lth level
640 // PCMGSetRestriction: Sets the function to be used to restrict vector from level l to l-1
641 // PCMGSetRhs: Sets the vector space to be used to store the right-hand side on a particular level
642 // PCMGSetX: Sets the vector space to be used to store the solution on a particular level
643 // PCMGSetR: Sets the vector space to be used to store the residual on a particular level
644 //PCMGSetRestriction, PCMGSetRhs, PCMGSetX, PCMGSetR, etc.
645 //PCMGSetLevels
646 break;
647 case TypesSolver::AINV : throw ArgumentException("AlephMatrixPETSc", "preconditionnement AINV indisponible");
648 case TypesSolver::SPAIdyn : throw ArgumentException("AlephMatrixPETSc", "preconditionnement SPAIdyn indisponible");
649 case TypesSolver::ILUp : throw ArgumentException("AlephMatrixPETSc", "preconditionnement ILUp indisponible");
650 case TypesSolver::POLY : throw ArgumentException("AlephMatrixPETSc", "preconditionnement POLY indisponible");
651 default : throw ArgumentException("AlephMatrixPETSc", "preconditionnement inconnu");
652 }
653
654 if (solver_param->xoUser())
655 KSPSetInitialGuessNonzero(m_ksp_solver,PETSC_TRUE);
656
657 KSPSetTolerances(m_ksp_solver,
658 solver_param->epsilon(),
659 PETSC_DEFAULT,
660 PETSC_DEFAULT,
661 solver_param->maxIter());
662
663 // override KSP options from commandline if present//
664 KSPSetFromOptions(m_ksp_solver);
665
666 // override PC options from commandline if present//
667 PCSetFromOptions(prec) ;
668
669 // setup KSP solver and precondtioner //
670 KSPSetUp(m_ksp_solver);
671 PCSetUp(prec);
672
673 // print info which KSP solver and PC used //
674 KSPType kt;
675 KSPGetType(m_ksp_solver,&kt);
676 PCType pt;
677 PCGetType(prec,&pt);
678 info(4) << "[AlephMatrixSolve] Will use KSP type :" << kt << " PC type : "<< pt
679 << " is_parallel=" << is_parallel;
680
681 // solve the linear system //
682 debug()<<"[AlephMatrixSolve] All set up, now solving";
683 petscCheck(KSPSolve(m_ksp_solver,RHS,solution));
684 debug()<<"[AlephMatrixSolve] solved";
685 petscCheck(KSPGetConvergedReason(m_ksp_solver,&reason));
686
687 switch(reason){
688#if !PETSC_VERSION_(3,0,0)
689 case(KSP_CONVERGED_RTOL_NORMAL):{break;}
690 case(KSP_CONVERGED_ATOL_NORMAL):{break;}
691#endif
692 case(KSP_CONVERGED_RTOL):{break;}
693 case(KSP_CONVERGED_ATOL):{break;}
694 case(KSP_CONVERGED_ITS):{break;}
695#if !PETSC_VERSION_GE(3,19,0)
696 case(KSP_CONVERGED_CG_NEG_CURVE):{break;}
697 case(KSP_CONVERGED_CG_CONSTRAINED):{break;}
698#endif
699 case(KSP_CONVERGED_STEP_LENGTH):{break;}
700 case(KSP_CONVERGED_HAPPY_BREAKDOWN):{break;}
701 /* diverged */
702 case(KSP_DIVERGED_NULL):{ throw Exception("AlephMatrixPETSc::Solve", "KSP_DIVERGED_NULL");}
703 case(KSP_DIVERGED_ITS):{ throw Exception("AlephMatrixPETSc::Solve", "KSP_DIVERGED_ITS");}
704 case(KSP_DIVERGED_DTOL):{ throw Exception("AlephMatrixPETSc::Solve", "KSP_DIVERGED_DTOL");}
705 case(KSP_DIVERGED_BREAKDOWN):{ throw Exception("AlephMatrixPETSc::Solve", "KSP_DIVERGED_BREAKDOWN");}
706 case(KSP_DIVERGED_BREAKDOWN_BICG):{ throw Exception("AlephMatrixPETSc::Solve", "KSP_DIVERGED_BREAKDOWN_BICG");}
707 case(KSP_DIVERGED_NONSYMMETRIC):{ throw Exception("AlephMatrixPETSc::Solve", "KSP_DIVERGED_NONSYMMETRIC");}
708 case(KSP_DIVERGED_INDEFINITE_PC):{ throw Exception("AlephMatrixPETSc::Solve", "KSP_DIVERGED_INDEFINITE_PC");}
709#if PETSC_VERSION_GE(3,6,1)
710 case(KSP_DIVERGED_NANORINF):{ throw Exception("AlephMatrixPETSc::Solve", "KSP_DIVERGED_NANORINF");}
711#else
712 case(KSP_DIVERGED_NAN):{ throw Exception("AlephMatrixPETSc::Solve", "KSP_DIVERGED_NAN");}
713#endif
714 case(KSP_DIVERGED_INDEFINITE_MAT):{ throw Exception("AlephMatrixPETSc::Solve", "KSP_DIVERGED_INDEFINITE_MAT");}
715
716 case(KSP_CONVERGED_ITERATING):{break;}
717 default: throw Exception("AlephMatrixPETSc::Solve", "");
718 }
719
720 KSPGetIterationNumber(m_ksp_solver,&its);
721 nb_iteration=its;
722
723 KSPGetResidualNorm(m_ksp_solver,&norm);
724 *residual_norm=norm;
725
726 return 0;
727}
728
729
730// ****************************************************************************
731// * writeToFile
732// ****************************************************************************
733
734void AlephMatrixPETSc::
735writeToFile(const String filename)
736{
737 ARCANE_UNUSED(filename);
738}
739
740/*---------------------------------------------------------------------------*/
741/*---------------------------------------------------------------------------*/
742
743ARCANE_REGISTER_APPLICATION_FACTORY(PETScAlephFactoryImpl,IAlephFactoryImpl,PETScAlephFactory);
744
745/*---------------------------------------------------------------------------*/
746/*---------------------------------------------------------------------------*/
747
748}
749
750/*---------------------------------------------------------------------------*/
751/*---------------------------------------------------------------------------*/
#define ARCANE_CHECK_POINTER(ptr)
Macro retournant le pointeur ptr s'il est non nul ou lancant une exception s'il est nul.
#define ARCANE_FATAL(...)
Macro envoyant une exception FatalErrorException.
#define ARCANE_REGISTER_APPLICATION_FACTORY(aclass, ainterface, aname)
Enregistre un service de fabrique pour la classe aclass.
Classe de base d'un service.
Paramètres d'un système linéraire.
Definition AlephParams.h:34
Vecteur d'un système linéaire.
Definition AlephVector.h:33
Interface d'une fabrique d'implémentation pour Aleph.
virtual bool isParallel() const =0
Retourne true si l'exécution est parallèle.
virtual void * getMPICommunicator()=0
Adresse du communicateur MPI associé à ce gestionnaire.
Lecteur des fichiers de maillage via la bibliothèque LIMA.
Definition Lima.cc:149
Structure contenant les informations pour créer un service.
Interface du gestionnaire de traces.
Chaîne de caractères unicode.
TraceMessageDbg debug(Trace::eDebugLevel=Trace::Medium) const
Flot pour un message de debug.
TraceMessage info() const
Flot pour un message d'information.
-*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8-with-signature -*-
int AlephInt
Type par défaut pour indexer les lignes et les colonnes des matrices et vecteurs.
Definition AlephGlobal.h:50