1 /*
2 This file implements FGMRES (a Generalized Minimal Residual) method.
3 Reference: Saad, 1993.
4
5 Preconditioning: If the preconditioner is constant then this fgmres
6 code is equivalent to RIGHT-PRECONDITIONED GMRES.
7 FGMRES is a modification of gmres that allows the preconditioner to change
8 at each iteration.
9
10 Restarts: Restarts are basically solves with x0 not equal to zero.
11 */
12
13 #include <../src/ksp/ksp/impls/gmres/fgmres/fgmresimpl.h> /*I "petscksp.h" I*/
14 #define FGMRES_DELTA_DIRECTIONS 10
15 #define FGMRES_DEFAULT_MAXK 30
16 static PetscErrorCode KSPFGMRESGetNewVectors(KSP, PetscInt);
17 static PetscErrorCode KSPFGMRESUpdateHessenberg(KSP, PetscInt, PetscBool, PetscReal *);
18 static PetscErrorCode KSPFGMRESBuildSoln(PetscScalar *, Vec, Vec, KSP, PetscInt);
19
KSPSetUp_FGMRES(KSP ksp)20 static PetscErrorCode KSPSetUp_FGMRES(KSP ksp)
21 {
22 PetscInt max_k, k;
23 KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
24
25 PetscFunctionBegin;
26 max_k = fgmres->max_k;
27
28 PetscCall(KSPSetUp_GMRES(ksp));
29
30 PetscCall(PetscMalloc1(max_k + 2, &fgmres->prevecs));
31 PetscCall(PetscMalloc1(max_k + 2, &fgmres->prevecs_user_work));
32
33 /* fgmres->vv_allocated includes extra work vectors, which are not used in the additional
34 block of vectors used to store the preconditioned directions, hence the -VEC_OFFSET
35 term for this first allocation of vectors holding preconditioned directions */
36 PetscCall(KSPCreateVecs(ksp, fgmres->vv_allocated - VEC_OFFSET, &fgmres->prevecs_user_work[0], 0, NULL));
37 for (k = 0; k < fgmres->vv_allocated - VEC_OFFSET; k++) fgmres->prevecs[k] = fgmres->prevecs_user_work[0][k];
38 PetscFunctionReturn(PETSC_SUCCESS);
39 }
40
KSPFGMRESResidual(KSP ksp)41 static PetscErrorCode KSPFGMRESResidual(KSP ksp)
42 {
43 KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
44 Mat Amat, Pmat;
45
46 PetscFunctionBegin;
47 PetscCall(PCGetOperators(ksp->pc, &Amat, &Pmat));
48
49 /* put A*x into VEC_TEMP */
50 PetscCall(KSP_MatMult(ksp, Amat, ksp->vec_sol, VEC_TEMP));
51 /* now put residual (-A*x + f) into vec_vv(0) */
52 PetscCall(VecWAXPY(VEC_VV(0), -1.0, VEC_TEMP, ksp->vec_rhs));
53 PetscFunctionReturn(PETSC_SUCCESS);
54 }
55
KSPFGMRESCycle(PetscInt * itcount,KSP ksp)56 static PetscErrorCode KSPFGMRESCycle(PetscInt *itcount, KSP ksp)
57 {
58 KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
59 PetscReal res_norm;
60 PetscReal hapbnd, tt;
61 PetscBool hapend = PETSC_FALSE; /* indicates happy breakdown ending */
62 PetscInt loc_it; /* local count of # of dir. in Krylov space */
63 PetscInt max_k = fgmres->max_k; /* max # of directions Krylov space */
64 Mat Amat, Pmat;
65
66 PetscFunctionBegin;
67 /* Number of pseudo iterations since last restart is the number
68 of prestart directions */
69 loc_it = 0;
70
71 /* note: (fgmres->it) is always set one less than (loc_it) It is used in
72 KSPBUILDSolution_FGMRES, where it is passed to KSPFGMRESBuildSoln.
73 Note that when KSPFGMRESBuildSoln is called from this function,
74 (loc_it -1) is passed, so the two are equivalent */
75 fgmres->it = (loc_it - 1);
76
77 /* initial residual is in VEC_VV(0) - compute its norm*/
78 PetscCall(VecNorm(VEC_VV(0), NORM_2, &res_norm));
79 KSPCheckNorm(ksp, res_norm);
80
81 /* The first entry in the right-hand side of the Hessenberg system is just
82 the initial residual norm */
83 *RS(0) = res_norm;
84
85 ksp->rnorm = res_norm;
86 PetscCall(KSPLogResidualHistory(ksp, res_norm));
87 PetscCall(KSPMonitor(ksp, ksp->its, res_norm));
88
89 /* check for the convergence - maybe the current guess is good enough */
90 PetscCall((*ksp->converged)(ksp, ksp->its, res_norm, &ksp->reason, ksp->cnvP));
91 if (ksp->reason) {
92 if (itcount) *itcount = 0;
93 PetscFunctionReturn(PETSC_SUCCESS);
94 }
95
96 /* scale VEC_VV (the initial residual) */
97 PetscCall(VecScale(VEC_VV(0), 1.0 / res_norm));
98
99 /* MAIN ITERATION LOOP BEGINNING*/
100 /* keep iterating until we have converged OR generated the max number
101 of directions OR reached the max number of iterations for the method */
102 while (!ksp->reason && loc_it < max_k && ksp->its < ksp->max_it) {
103 if (loc_it) {
104 PetscCall(KSPLogResidualHistory(ksp, res_norm));
105 PetscCall(KSPMonitor(ksp, ksp->its, res_norm));
106 }
107 fgmres->it = (loc_it - 1);
108
109 /* see if more space is needed for work vectors */
110 if (fgmres->vv_allocated <= loc_it + VEC_OFFSET + 1) {
111 PetscCall(KSPFGMRESGetNewVectors(ksp, loc_it + 1));
112 /* (loc_it+1) is passed in as number of the first vector that should
113 be allocated */
114 }
115
116 /* CHANGE THE PRECONDITIONER? */
117 /* ModifyPC is the callback function that can be used to
118 change the PC or its attributes before its applied */
119 PetscCall((*fgmres->modifypc)(ksp, ksp->its, loc_it, res_norm, fgmres->modifyctx));
120
121 /* apply PRECONDITIONER to direction vector and store with
122 preconditioned vectors in prevec */
123 PetscCall(KSP_PCApply(ksp, VEC_VV(loc_it), PREVEC(loc_it)));
124
125 PetscCall(PCGetOperators(ksp->pc, &Amat, &Pmat));
126 /* Multiply preconditioned vector by operator - put in VEC_VV(loc_it+1) */
127 PetscCall(KSP_MatMult(ksp, Amat, PREVEC(loc_it), VEC_VV(1 + loc_it)));
128
129 /* update Hessenberg matrix and do Gram-Schmidt - new direction is in
130 VEC_VV(1+loc_it)*/
131 PetscCall((*fgmres->orthog)(ksp, loc_it));
132
133 /* new entry in hessenburg is the 2-norm of our new direction */
134 PetscCall(VecNorm(VEC_VV(loc_it + 1), NORM_2, &tt));
135 KSPCheckNorm(ksp, tt);
136
137 *HH(loc_it + 1, loc_it) = tt;
138 *HES(loc_it + 1, loc_it) = tt;
139
140 /* Happy Breakdown Check */
141 hapbnd = PetscAbsScalar((tt) / *RS(loc_it));
142 /* RS(loc_it) contains the res_norm from the last iteration */
143 hapbnd = PetscMin(fgmres->haptol, hapbnd);
144 if (tt > hapbnd) {
145 /* scale new direction by its norm */
146 PetscCall(VecScale(VEC_VV(loc_it + 1), 1.0 / tt));
147 } else {
148 /* This happens when the solution is exactly reached. */
149 /* So there is no new direction... */
150 PetscCall(VecSet(VEC_TEMP, 0.0)); /* set VEC_TEMP to 0 */
151 hapend = PETSC_TRUE;
152 }
153 /* note that for FGMRES we could get HES(loc_it+1, loc_it) = 0 and the
154 current solution would not be exact if HES was singular. Note that
155 HH non-singular implies that HES is no singular, and HES is guaranteed
156 to be nonsingular when PREVECS are linearly independent and A is
157 nonsingular (in GMRES, the nonsingularity of A implies the nonsingularity
158 of HES). So we should really add a check to verify that HES is nonsingular.*/
159
160 /* Now apply rotations to the new column of Hessenberg (and the right-hand side of the system),
161 calculate new rotation, and get new residual norm at the same time*/
162 PetscCall(KSPFGMRESUpdateHessenberg(ksp, loc_it, hapend, &res_norm));
163 if (ksp->reason) break;
164
165 loc_it++;
166 fgmres->it = (loc_it - 1); /* Add this here in case it has converged */
167
168 PetscCall(PetscObjectSAWsTakeAccess((PetscObject)ksp));
169 ksp->its++;
170 ksp->rnorm = res_norm;
171 PetscCall(PetscObjectSAWsGrantAccess((PetscObject)ksp));
172
173 PetscCall((*ksp->converged)(ksp, ksp->its, res_norm, &ksp->reason, ksp->cnvP));
174
175 /* Catch error in happy breakdown and signal convergence and break from loop */
176 if (hapend) {
177 if (!ksp->reason) {
178 PetscCheck(!ksp->errorifnotconverged, PetscObjectComm((PetscObject)ksp), PETSC_ERR_NOT_CONVERGED, "Reached happy break down, but convergence was not indicated. Residual norm = %g", (double)res_norm);
179 ksp->reason = KSP_DIVERGED_BREAKDOWN;
180 break;
181 }
182 }
183 }
184 /* END OF ITERATION LOOP */
185
186 if (itcount) *itcount = loc_it;
187
188 /*
189 Down here we have to solve for the "best" coefficients of the Krylov
190 columns, add the solution values together, and possibly unwind the
191 preconditioning from the solution
192 */
193
194 /* Form the solution (or the solution so far) */
195 /* Note: must pass in (loc_it-1) for iteration count so that KSPFGMRESBuildSoln
196 properly navigates */
197
198 PetscCall(KSPFGMRESBuildSoln(RS(0), ksp->vec_sol, ksp->vec_sol, ksp, loc_it - 1));
199
200 /* Monitor if we know that we will not return for a restart */
201 if (ksp->reason == KSP_CONVERGED_ITERATING && ksp->its >= ksp->max_it) ksp->reason = KSP_DIVERGED_ITS;
202 if (loc_it && ksp->reason) {
203 PetscCall(KSPMonitor(ksp, ksp->its, res_norm));
204 PetscCall(KSPLogResidualHistory(ksp, res_norm));
205 }
206 PetscFunctionReturn(PETSC_SUCCESS);
207 }
208
KSPSolve_FGMRES(KSP ksp)209 static PetscErrorCode KSPSolve_FGMRES(KSP ksp)
210 {
211 PetscInt cycle_its = 0; /* iterations done in a call to KSPFGMRESCycle */
212 KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
213 PetscBool diagonalscale;
214
215 PetscFunctionBegin;
216 PetscCall(PCGetDiagonalScale(ksp->pc, &diagonalscale));
217 PetscCheck(!diagonalscale, PetscObjectComm((PetscObject)ksp), PETSC_ERR_SUP, "Krylov method %s does not support diagonal scaling", ((PetscObject)ksp)->type_name);
218
219 PetscCall(PetscObjectSAWsTakeAccess((PetscObject)ksp));
220 ksp->its = 0;
221 PetscCall(PetscObjectSAWsGrantAccess((PetscObject)ksp));
222
223 /* Compute the initial (NOT preconditioned) residual */
224 if (!ksp->guess_zero) {
225 PetscCall(KSPFGMRESResidual(ksp));
226 } else { /* guess is 0 so residual is F (which is in ksp->vec_rhs) */
227 PetscCall(VecCopy(ksp->vec_rhs, VEC_VV(0)));
228 }
229 /* This may be true only on a subset of MPI ranks; setting it here so it will be detected by the first norm computation in the Krylov method */
230 PetscCall(VecFlag(VEC_VV(0), ksp->reason == KSP_DIVERGED_PC_FAILED));
231
232 /* now the residual is in VEC_VV(0) - which is what
233 KSPFGMRESCycle expects... */
234
235 PetscCall(KSPFGMRESCycle(&cycle_its, ksp));
236 while (!ksp->reason) {
237 PetscCall(KSPFGMRESResidual(ksp));
238 if (ksp->its >= ksp->max_it) break;
239 PetscCall(KSPFGMRESCycle(&cycle_its, ksp));
240 }
241 /* mark lack of convergence */
242 if (ksp->its >= ksp->max_it && !ksp->reason) ksp->reason = KSP_DIVERGED_ITS;
243 PetscFunctionReturn(PETSC_SUCCESS);
244 }
245
246 extern PetscErrorCode KSPReset_FGMRES(KSP);
247
KSPDestroy_FGMRES(KSP ksp)248 static PetscErrorCode KSPDestroy_FGMRES(KSP ksp)
249 {
250 PetscFunctionBegin;
251 PetscCall(KSPReset_FGMRES(ksp));
252 PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFlexibleSetModifyPC_C", NULL));
253 PetscCall(KSPDestroy_GMRES(ksp));
254 PetscFunctionReturn(PETSC_SUCCESS);
255 }
256
KSPFGMRESBuildSoln(PetscScalar * nrs,Vec vguess,Vec vdest,KSP ksp,PetscInt it)257 static PetscErrorCode KSPFGMRESBuildSoln(PetscScalar *nrs, Vec vguess, Vec vdest, KSP ksp, PetscInt it)
258 {
259 PetscScalar tt;
260 PetscInt ii, k, j;
261 KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
262
263 PetscFunctionBegin;
264 /* Solve for solution vector that minimizes the residual */
265
266 /* If it is < 0, no fgmres steps have been performed */
267 if (it < 0) {
268 PetscCall(VecCopy(vguess, vdest)); /* VecCopy() is smart, exists immediately if vguess == vdest */
269 PetscFunctionReturn(PETSC_SUCCESS);
270 }
271
272 /* so fgmres steps HAVE been performed */
273
274 /* solve the upper triangular system - RS is the right side and HH is
275 the upper triangular matrix - put soln in nrs */
276 if (*HH(it, it) != 0.0) {
277 nrs[it] = *RS(it) / *HH(it, it);
278 } else {
279 nrs[it] = 0.0;
280 }
281 for (ii = 1; ii <= it; ii++) {
282 k = it - ii;
283 tt = *RS(k);
284 for (j = k + 1; j <= it; j++) tt = tt - *HH(k, j) * nrs[j];
285 nrs[k] = tt / *HH(k, k);
286 }
287
288 /* Accumulate the correction to the soln of the preconditioned prob. in
289 VEC_TEMP - note that we use the preconditioned vectors */
290 PetscCall(VecMAXPBY(VEC_TEMP, it + 1, nrs, 0, &PREVEC(0)));
291
292 /* put updated solution into vdest.*/
293 if (vdest != vguess) {
294 PetscCall(VecCopy(VEC_TEMP, vdest));
295 PetscCall(VecAXPY(vdest, 1.0, vguess));
296 } else { /* replace guess with solution */
297 PetscCall(VecAXPY(vdest, 1.0, VEC_TEMP));
298 }
299 PetscFunctionReturn(PETSC_SUCCESS);
300 }
301
KSPFGMRESUpdateHessenberg(KSP ksp,PetscInt it,PetscBool hapend,PetscReal * res)302 static PetscErrorCode KSPFGMRESUpdateHessenberg(KSP ksp, PetscInt it, PetscBool hapend, PetscReal *res)
303 {
304 PetscScalar *hh, *cc, *ss, tt;
305 PetscInt j;
306 KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
307
308 PetscFunctionBegin;
309 hh = HH(0, it); /* pointer to beginning of column to update - so
310 incrementing hh "steps down" the (it+1)th col of HH*/
311 cc = CC(0); /* beginning of cosine rotations */
312 ss = SS(0); /* beginning of sine rotations */
313
314 /* Apply all the previously computed plane rotations to the new column
315 of the Hessenberg matrix */
316 /* Note: this uses the rotation [conj(c) s ; -s c], c= cos(theta), s= sin(theta),
317 and some refs have [c s ; -conj(s) c] (don't be confused!) */
318
319 for (j = 1; j <= it; j++) {
320 tt = *hh;
321 *hh = PetscConj(*cc) * tt + *ss * *(hh + 1);
322 hh++;
323 *hh = *cc++ * *hh - (*ss++ * tt);
324 /* hh, cc, and ss have all been incremented one by end of loop */
325 }
326
327 /*
328 compute the new plane rotation, and apply it to:
329 1) the right-hand side of the Hessenberg system (RS)
330 note: it affects RS(it) and RS(it+1)
331 2) the new column of the Hessenberg matrix
332 note: it affects HH(it,it) which is currently pointed to
333 by hh and HH(it+1, it) (*(hh+1))
334 thus obtaining the updated value of the residual...
335 */
336
337 /* compute new plane rotation */
338
339 if (!hapend) {
340 tt = PetscSqrtScalar(PetscConj(*hh) * *hh + PetscConj(*(hh + 1)) * *(hh + 1));
341 if (tt == 0.0) {
342 ksp->reason = KSP_DIVERGED_NULL;
343 PetscFunctionReturn(PETSC_SUCCESS);
344 }
345
346 *cc = *hh / tt; /* new cosine value */
347 *ss = *(hh + 1) / tt; /* new sine value */
348
349 /* apply to 1) and 2) */
350 *RS(it + 1) = -(*ss * *RS(it));
351 *RS(it) = PetscConj(*cc) * *RS(it);
352 *hh = PetscConj(*cc) * *hh + *ss * *(hh + 1);
353
354 /* residual is the last element (it+1) of right-hand side! */
355 *res = PetscAbsScalar(*RS(it + 1));
356
357 } else { /* happy breakdown: HH(it+1, it) = 0, therefore we don't need to apply
358 another rotation matrix (so RH doesn't change). The new residual is
359 always the new sine term times the residual from last time (RS(it)),
360 but now the new sine rotation would be zero...so the residual should
361 be zero...so we will multiply "zero" by the last residual. This might
362 not be exactly what we want to do here -could just return "zero". */
363
364 *res = 0.0;
365 }
366 PetscFunctionReturn(PETSC_SUCCESS);
367 }
368
KSPFGMRESGetNewVectors(KSP ksp,PetscInt it)369 static PetscErrorCode KSPFGMRESGetNewVectors(KSP ksp, PetscInt it)
370 {
371 KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
372 PetscInt nwork = fgmres->nwork_alloc; /* number of work vector chunks allocated */
373 PetscInt nalloc; /* number to allocate */
374 PetscInt k;
375
376 PetscFunctionBegin;
377 nalloc = fgmres->delta_allocate; /* number of vectors to allocate
378 in a single chunk */
379
380 /* Adjust the number to allocate to make sure that we don't exceed the
381 number of available slots (fgmres->vecs_allocated)*/
382 if (it + VEC_OFFSET + nalloc >= fgmres->vecs_allocated) nalloc = fgmres->vecs_allocated - it - VEC_OFFSET;
383 if (!nalloc) PetscFunctionReturn(PETSC_SUCCESS);
384
385 fgmres->vv_allocated += nalloc; /* vv_allocated is the number of vectors allocated */
386
387 /* work vectors */
388 PetscCall(KSPCreateVecs(ksp, nalloc, &fgmres->user_work[nwork], 0, NULL));
389 for (k = 0; k < nalloc; k++) fgmres->vecs[it + VEC_OFFSET + k] = fgmres->user_work[nwork][k];
390 /* specify size of chunk allocated */
391 fgmres->mwork_alloc[nwork] = nalloc;
392
393 /* preconditioned vectors */
394 PetscCall(KSPCreateVecs(ksp, nalloc, &fgmres->prevecs_user_work[nwork], 0, NULL));
395 for (k = 0; k < nalloc; k++) fgmres->prevecs[it + k] = fgmres->prevecs_user_work[nwork][k];
396
397 /* increment the number of work vector chunks */
398 fgmres->nwork_alloc++;
399 PetscFunctionReturn(PETSC_SUCCESS);
400 }
401
KSPBuildSolution_FGMRES(KSP ksp,Vec ptr,Vec * result)402 static PetscErrorCode KSPBuildSolution_FGMRES(KSP ksp, Vec ptr, Vec *result)
403 {
404 KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
405
406 PetscFunctionBegin;
407 if (!ptr) {
408 if (!fgmres->sol_temp) PetscCall(VecDuplicate(ksp->vec_sol, &fgmres->sol_temp));
409 ptr = fgmres->sol_temp;
410 }
411 if (!fgmres->nrs) {
412 /* allocate the work area */
413 PetscCall(PetscMalloc1(fgmres->max_k, &fgmres->nrs));
414 }
415
416 PetscCall(KSPFGMRESBuildSoln(fgmres->nrs, ksp->vec_sol, ptr, ksp, fgmres->it));
417 if (result) *result = ptr;
418 PetscFunctionReturn(PETSC_SUCCESS);
419 }
420
KSPSetFromOptions_FGMRES(KSP ksp,PetscOptionItems PetscOptionsObject)421 static PetscErrorCode KSPSetFromOptions_FGMRES(KSP ksp, PetscOptionItems PetscOptionsObject)
422 {
423 PetscBool flg;
424
425 PetscFunctionBegin;
426 PetscCall(KSPSetFromOptions_GMRES(ksp, PetscOptionsObject));
427 PetscOptionsHeadBegin(PetscOptionsObject, "KSP flexible GMRES Options");
428 PetscCall(PetscOptionsBoolGroupBegin("-ksp_fgmres_modifypcnochange", "do not vary the preconditioner", "KSPFlexibleSetModifyPC", &flg));
429 if (flg) PetscCall(KSPFlexibleSetModifyPC(ksp, KSPFlexibleModifyPCNoChange, NULL, NULL));
430 PetscCall(PetscOptionsBoolGroupEnd("-ksp_fgmres_modifypcksp", "vary the KSP based preconditioner", "KSPFlexibleSetModifyPC", &flg));
431 if (flg) PetscCall(KSPFlexibleSetModifyPC(ksp, KSPFlexibleModifyPCKSP, NULL, NULL));
432 PetscOptionsHeadEnd();
433 PetscFunctionReturn(PETSC_SUCCESS);
434 }
435
KSPFlexibleSetModifyPC_FGMRES(KSP ksp,KSPFlexibleModifyPCFn * fcn,PetscCtx ctx,PetscCtxDestroyFn * d)436 static PetscErrorCode KSPFlexibleSetModifyPC_FGMRES(KSP ksp, KSPFlexibleModifyPCFn *fcn, PetscCtx ctx, PetscCtxDestroyFn *d)
437 {
438 PetscFunctionBegin;
439 PetscValidHeaderSpecific(ksp, KSP_CLASSID, 1);
440 ((KSP_FGMRES *)ksp->data)->modifypc = fcn;
441 ((KSP_FGMRES *)ksp->data)->modifydestroy = d;
442 ((KSP_FGMRES *)ksp->data)->modifyctx = ctx;
443 PetscFunctionReturn(PETSC_SUCCESS);
444 }
445
KSPReset_FGMRES(KSP ksp)446 PetscErrorCode KSPReset_FGMRES(KSP ksp)
447 {
448 KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
449 PetscInt i;
450
451 PetscFunctionBegin;
452 PetscCall(PetscFree(fgmres->prevecs));
453 if (fgmres->nwork_alloc > 0) {
454 i = 0;
455 /* In the first allocation we allocated VEC_OFFSET fewer vectors in prevecs */
456 PetscCall(VecDestroyVecs(fgmres->mwork_alloc[i] - VEC_OFFSET, &fgmres->prevecs_user_work[i]));
457 for (i = 1; i < fgmres->nwork_alloc; i++) PetscCall(VecDestroyVecs(fgmres->mwork_alloc[i], &fgmres->prevecs_user_work[i]));
458 }
459 PetscCall(PetscFree(fgmres->prevecs_user_work));
460 if (fgmres->modifydestroy) PetscCall((*fgmres->modifydestroy)(&fgmres->modifyctx));
461 PetscCall(KSPReset_GMRES(ksp));
462 PetscFunctionReturn(PETSC_SUCCESS);
463 }
464
KSPGMRESSetRestart_FGMRES(KSP ksp,PetscInt max_k)465 static PetscErrorCode KSPGMRESSetRestart_FGMRES(KSP ksp, PetscInt max_k)
466 {
467 KSP_FGMRES *gmres = (KSP_FGMRES *)ksp->data;
468
469 PetscFunctionBegin;
470 PetscCheck(max_k >= 1, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Restart must be positive");
471 if (!ksp->setupstage) {
472 gmres->max_k = max_k;
473 } else if (gmres->max_k != max_k) {
474 gmres->max_k = max_k;
475 ksp->setupstage = KSP_SETUP_NEW;
476 /* free the data structures, then create them again */
477 PetscCall(KSPReset_FGMRES(ksp));
478 }
479 PetscFunctionReturn(PETSC_SUCCESS);
480 }
481
KSPGMRESGetRestart_FGMRES(KSP ksp,PetscInt * max_k)482 static PetscErrorCode KSPGMRESGetRestart_FGMRES(KSP ksp, PetscInt *max_k)
483 {
484 KSP_FGMRES *gmres = (KSP_FGMRES *)ksp->data;
485
486 PetscFunctionBegin;
487 *max_k = gmres->max_k;
488 PetscFunctionReturn(PETSC_SUCCESS);
489 }
490
491 /*MC
492 KSPFGMRES - Implements the Flexible Generalized Minimal Residual method, flexible GMRES. [](sec_flexibleksp)
493
494 Options Database Keys:
495 + -ksp_gmres_restart <restart> - the number of Krylov directions to orthogonalize against
496 . -ksp_gmres_haptol <tol> - sets the tolerance for "happy ending" (exact convergence)
497 . -ksp_gmres_preallocate - preallocate all the Krylov search directions initially (otherwise groups of vectors are allocated as needed)
498 . -ksp_gmres_classicalgramschmidt - use classical (unmodified) Gram-Schmidt to orthogonalize against the Krylov space (fast) (the default)
499 . -ksp_gmres_modifiedgramschmidt - use modified Gram-Schmidt in the orthogonalization (more stable, but slower)
500 . -ksp_gmres_cgs_refinement_type <refine_never,refine_ifneeded,refine_always> - determine if iterative refinement is used to increase the
501 stability of the classical Gram-Schmidt orthogonalization.
502 . -ksp_gmres_krylov_monitor - plot the Krylov space generated
503 . -ksp_fgmres_modifypcnochange - do not change the preconditioner between iterations
504 - -ksp_fgmres_modifypcksp - modify the preconditioner using `KSPFlexibleModifyPCKSP()`
505
506 Level: beginner
507
508 Notes:
509 See `KSPFlexibleSetModifyPC()` for how to vary the preconditioner between iterations
510
511 GMRES requires that the preconditioner used is a linear operator. Flexible GMRES allows the preconditioner to be a nonlinear operator. This
512 allows, for example, Flexible GMRES to use GMRES solvers or other Krylov solvers (which are nonlinear operators in general) inside the preconditioner
513 used by `KSPFGMRES`. For example, the options `-ksp_type fgmres -pc_type ksp -ksp_ksp_type bcgs -ksp_view -ksp_pc_type jacobi` make the preconditioner
514 (or inner solver) be bi-CG-stab with a preconditioner of `PCJACOBI`. `KSPFCG` provides a flexible version of the preconditioned conjugate gradient method.
515
516 Only right preconditioning is supported.
517
518 The following options `-ksp_type fgmres -pc_type ksp -ksp_ksp_type bcgs -ksp_view -ksp_pc_type jacobi` make the preconditioner (or inner solver)
519 be bi-CG-stab with a preconditioner of `PCJACOBI`
520
521 Developer Note:
522 This object is subclassed off of `KSPGMRES`, see the source code in src/ksp/ksp/impls/gmres for comments on the structure of the code
523
524 Contributed by:
525 Allison Baker
526
527 .seealso: [](ch_ksp), [](sec_flexibleksp), `KSPCreate()`, `KSPSetType()`, `KSPType`, `KSP`, `KSPGMRES`, `KSPLGMRES`, `KSPFCG`,
528 `KSPGMRESSetRestart()`, `KSPGMRESSetHapTol()`, `KSPGMRESSetPreAllocateVectors()`, `KSPGMRESSetOrthogonalization()`, `KSPGMRESGetOrthogonalization()`,
529 `KSPGMRESClassicalGramSchmidtOrthogonalization()`, `KSPGMRESModifiedGramSchmidtOrthogonalization()`,
530 `KSPGMRESCGSRefinementType`, `KSPGMRESSetCGSRefinementType()`, `KSPGMRESGetCGSRefinementType()`, `KSPGMRESMonitorKrylov()`, `KSPFlexibleSetModifyPC()`,
531 `KSPFlexibleModifyPCKSP()`
532 M*/
533
KSPCreate_FGMRES(KSP ksp)534 PETSC_EXTERN PetscErrorCode KSPCreate_FGMRES(KSP ksp)
535 {
536 KSP_FGMRES *fgmres;
537
538 PetscFunctionBegin;
539 PetscCall(PetscNew(&fgmres));
540
541 ksp->data = (void *)fgmres;
542 ksp->ops->buildsolution = KSPBuildSolution_FGMRES;
543 ksp->ops->setup = KSPSetUp_FGMRES;
544 ksp->ops->solve = KSPSolve_FGMRES;
545 ksp->ops->reset = KSPReset_FGMRES;
546 ksp->ops->destroy = KSPDestroy_FGMRES;
547 ksp->ops->view = KSPView_GMRES;
548 ksp->ops->setfromoptions = KSPSetFromOptions_FGMRES;
549 ksp->ops->computeextremesingularvalues = KSPComputeExtremeSingularValues_GMRES;
550 ksp->ops->computeeigenvalues = KSPComputeEigenvalues_GMRES;
551
552 PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_UNPRECONDITIONED, PC_RIGHT, 3));
553 PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_NONE, PC_RIGHT, 1));
554
555 PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESSetPreAllocateVectors_C", KSPGMRESSetPreAllocateVectors_GMRES));
556 PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESSetOrthogonalization_C", KSPGMRESSetOrthogonalization_GMRES));
557 PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESGetOrthogonalization_C", KSPGMRESGetOrthogonalization_GMRES));
558 PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESSetRestart_C", KSPGMRESSetRestart_FGMRES));
559 PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESGetRestart_C", KSPGMRESGetRestart_FGMRES));
560 PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFlexibleSetModifyPC_C", KSPFlexibleSetModifyPC_FGMRES));
561 PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESSetCGSRefinementType_C", KSPGMRESSetCGSRefinementType_GMRES));
562 PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESGetCGSRefinementType_C", KSPGMRESGetCGSRefinementType_GMRES));
563
564 fgmres->haptol = 1.0e-30;
565 fgmres->q_preallocate = PETSC_FALSE;
566 fgmres->delta_allocate = FGMRES_DELTA_DIRECTIONS;
567 fgmres->orthog = KSPGMRESClassicalGramSchmidtOrthogonalization;
568 fgmres->nrs = NULL;
569 fgmres->sol_temp = NULL;
570 fgmres->max_k = FGMRES_DEFAULT_MAXK;
571 fgmres->Rsvd = NULL;
572 fgmres->orthogwork = NULL;
573 fgmres->modifypc = KSPFlexibleModifyPCNoChange;
574 fgmres->modifyctx = NULL;
575 fgmres->modifydestroy = NULL;
576 fgmres->cgstype = KSP_GMRES_CGS_REFINE_NEVER;
577 PetscFunctionReturn(PETSC_SUCCESS);
578 }
579