Actual source code: fgmres.c

  1: /*
  2:     This file implements FGMRES (a Generalized Minimal Residual) method.
  3:     Reference:  Saad, 1993.

  5:     Preconditioning:  If the preconditioner is constant then this fgmres
  6:     code is equivalent to RIGHT-PRECONDITIONED GMRES.
  7:     FGMRES is a modification of gmres that allows the preconditioner to change
  8:     at each iteration.

 10:     Restarts:  Restarts are basically solves with x0 not equal to zero.
 11: */

 13: #include <../src/ksp/ksp/impls/gmres/fgmres/fgmresimpl.h>
 14: #define FGMRES_DELTA_DIRECTIONS 10
 15: #define FGMRES_DEFAULT_MAXK     30
 16: static PetscErrorCode KSPFGMRESGetNewVectors(KSP, PetscInt);
 17: static PetscErrorCode KSPFGMRESUpdateHessenberg(KSP, PetscInt, PetscBool, PetscReal *);
 18: static PetscErrorCode KSPFGMRESBuildSoln(PetscScalar *, Vec, Vec, KSP, PetscInt);

 20: static PetscErrorCode KSPSetUp_FGMRES(KSP ksp)
 21: {
 22:   PetscInt    max_k, k;
 23:   KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;

 25:   PetscFunctionBegin;
 26:   max_k = fgmres->max_k;

 28:   PetscCall(KSPSetUp_GMRES(ksp));

 30:   PetscCall(PetscMalloc1(max_k + 2, &fgmres->prevecs));
 31:   PetscCall(PetscMalloc1(max_k + 2, &fgmres->prevecs_user_work));

 33:   /* fgmres->vv_allocated includes extra work vectors, which are not used in the additional
 34:      block of vectors used to store the preconditioned directions, hence  the -VEC_OFFSET
 35:      term for this first allocation of vectors holding preconditioned directions */
 36:   PetscCall(KSPCreateVecs(ksp, fgmres->vv_allocated - VEC_OFFSET, &fgmres->prevecs_user_work[0], 0, NULL));
 37:   for (k = 0; k < fgmres->vv_allocated - VEC_OFFSET; k++) fgmres->prevecs[k] = fgmres->prevecs_user_work[0][k];
 38:   PetscFunctionReturn(PETSC_SUCCESS);
 39: }

 41: static PetscErrorCode KSPFGMRESResidual(KSP ksp)
 42: {
 43:   KSP_FGMRES *fgmres = (KSP_FGMRES *)(ksp->data);
 44:   Mat         Amat, Pmat;

 46:   PetscFunctionBegin;
 47:   PetscCall(PCGetOperators(ksp->pc, &Amat, &Pmat));

 49:   /* put A*x into VEC_TEMP */
 50:   PetscCall(KSP_MatMult(ksp, Amat, ksp->vec_sol, VEC_TEMP));
 51:   /* now put residual (-A*x + f) into vec_vv(0) */
 52:   PetscCall(VecWAXPY(VEC_VV(0), -1.0, VEC_TEMP, ksp->vec_rhs));
 53:   PetscFunctionReturn(PETSC_SUCCESS);
 54: }

 56: static PetscErrorCode KSPFGMRESCycle(PetscInt *itcount, KSP ksp)
 57: {
 58:   KSP_FGMRES *fgmres = (KSP_FGMRES *)(ksp->data);
 59:   PetscReal   res_norm;
 60:   PetscReal   hapbnd, tt;
 61:   PetscBool   hapend = PETSC_FALSE;  /* indicates happy breakdown ending */
 62:   PetscInt    loc_it;                /* local count of # of dir. in Krylov space */
 63:   PetscInt    max_k = fgmres->max_k; /* max # of directions Krylov space */
 64:   Mat         Amat, Pmat;

 66:   PetscFunctionBegin;
 67:   /* Number of pseudo iterations since last restart is the number
 68:      of prestart directions */
 69:   loc_it = 0;

 71:   /* note: (fgmres->it) is always set one less than (loc_it) It is used in
 72:      KSPBUILDSolution_FGMRES, where it is passed to KSPFGMRESBuildSoln.
 73:      Note that when KSPFGMRESBuildSoln is called from this function,
 74:      (loc_it -1) is passed, so the two are equivalent */
 75:   fgmres->it = (loc_it - 1);

 77:   /* initial residual is in VEC_VV(0)  - compute its norm*/
 78:   PetscCall(VecNorm(VEC_VV(0), NORM_2, &res_norm));
 79:   KSPCheckNorm(ksp, res_norm);

 81:   /* first entry in right-hand-side of hessenberg system is just
 82:      the initial residual norm */
 83:   *RS(0) = res_norm;

 85:   ksp->rnorm = res_norm;
 86:   PetscCall(KSPLogResidualHistory(ksp, res_norm));
 87:   PetscCall(KSPMonitor(ksp, ksp->its, res_norm));

 89:   /* check for the convergence - maybe the current guess is good enough */
 90:   PetscCall((*ksp->converged)(ksp, ksp->its, res_norm, &ksp->reason, ksp->cnvP));
 91:   if (ksp->reason) {
 92:     if (itcount) *itcount = 0;
 93:     PetscFunctionReturn(PETSC_SUCCESS);
 94:   }

 96:   /* scale VEC_VV (the initial residual) */
 97:   PetscCall(VecScale(VEC_VV(0), 1.0 / res_norm));

 99:   /* MAIN ITERATION LOOP BEGINNING*/
100:   /* keep iterating until we have converged OR generated the max number
101:      of directions OR reached the max number of iterations for the method */
102:   while (!ksp->reason && loc_it < max_k && ksp->its < ksp->max_it) {
103:     if (loc_it) {
104:       PetscCall(KSPLogResidualHistory(ksp, res_norm));
105:       PetscCall(KSPMonitor(ksp, ksp->its, res_norm));
106:     }
107:     fgmres->it = (loc_it - 1);

109:     /* see if more space is needed for work vectors */
110:     if (fgmres->vv_allocated <= loc_it + VEC_OFFSET + 1) {
111:       PetscCall(KSPFGMRESGetNewVectors(ksp, loc_it + 1));
112:       /* (loc_it+1) is passed in as number of the first vector that should
113:          be allocated */
114:     }

116:     /* CHANGE THE PRECONDITIONER? */
117:     /* ModifyPC is the callback function that can be used to
118:        change the PC or its attributes before its applied */
119:     PetscCall((*fgmres->modifypc)(ksp, ksp->its, loc_it, res_norm, fgmres->modifyctx));

121:     /* apply PRECONDITIONER to direction vector and store with
122:        preconditioned vectors in prevec */
123:     PetscCall(KSP_PCApply(ksp, VEC_VV(loc_it), PREVEC(loc_it)));

125:     PetscCall(PCGetOperators(ksp->pc, &Amat, &Pmat));
126:     /* Multiply preconditioned vector by operator - put in VEC_VV(loc_it+1) */
127:     PetscCall(KSP_MatMult(ksp, Amat, PREVEC(loc_it), VEC_VV(1 + loc_it)));

129:     /* update hessenberg matrix and do Gram-Schmidt - new direction is in
130:        VEC_VV(1+loc_it)*/
131:     PetscCall((*fgmres->orthog)(ksp, loc_it));

133:     /* new entry in hessenburg is the 2-norm of our new direction */
134:     PetscCall(VecNorm(VEC_VV(loc_it + 1), NORM_2, &tt));
135:     KSPCheckNorm(ksp, tt);

137:     *HH(loc_it + 1, loc_it)  = tt;
138:     *HES(loc_it + 1, loc_it) = tt;

140:     /* Happy Breakdown Check */
141:     hapbnd = PetscAbsScalar((tt) / *RS(loc_it));
142:     /* RS(loc_it) contains the res_norm from the last iteration  */
143:     hapbnd = PetscMin(fgmres->haptol, hapbnd);
144:     if (tt > hapbnd) {
145:       /* scale new direction by its norm */
146:       PetscCall(VecScale(VEC_VV(loc_it + 1), 1.0 / tt));
147:     } else {
148:       /* This happens when the solution is exactly reached. */
149:       /* So there is no new direction... */
150:       PetscCall(VecSet(VEC_TEMP, 0.0)); /* set VEC_TEMP to 0 */
151:       hapend = PETSC_TRUE;
152:     }
153:     /* note that for FGMRES we could get HES(loc_it+1, loc_it)  = 0 and the
154:        current solution would not be exact if HES was singular.  Note that
155:        HH non-singular implies that HES is no singular, and HES is guaranteed
156:        to be nonsingular when PREVECS are linearly independent and A is
157:        nonsingular (in GMRES, the nonsingularity of A implies the nonsingularity
158:        of HES). So we should really add a check to verify that HES is nonsingular.*/

160:     /* Now apply rotations to new col of hessenberg (and right side of system),
161:        calculate new rotation, and get new residual norm at the same time*/
162:     PetscCall(KSPFGMRESUpdateHessenberg(ksp, loc_it, hapend, &res_norm));
163:     if (ksp->reason) break;

165:     loc_it++;
166:     fgmres->it = (loc_it - 1); /* Add this here in case it has converged */

168:     PetscCall(PetscObjectSAWsTakeAccess((PetscObject)ksp));
169:     ksp->its++;
170:     ksp->rnorm = res_norm;
171:     PetscCall(PetscObjectSAWsGrantAccess((PetscObject)ksp));

173:     PetscCall((*ksp->converged)(ksp, ksp->its, res_norm, &ksp->reason, ksp->cnvP));

175:     /* Catch error in happy breakdown and signal convergence and break from loop */
176:     if (hapend) {
177:       if (!ksp->reason) {
178:         PetscCheck(!ksp->errorifnotconverged, PetscObjectComm((PetscObject)ksp), PETSC_ERR_NOT_CONVERGED, "Reached happy break down, but convergence was not indicated. Residual norm = %g", (double)res_norm);
179:         ksp->reason = KSP_DIVERGED_BREAKDOWN;
180:         break;
181:       }
182:     }
183:   }
184:   /* END OF ITERATION LOOP */
185:   PetscCall(KSPLogResidualHistory(ksp, res_norm));

187:   /*
188:      Monitor if we know that we will not return for a restart */
189:   if (loc_it && (ksp->reason || ksp->its >= ksp->max_it)) PetscCall(KSPMonitor(ksp, ksp->its, res_norm));

191:   if (itcount) *itcount = loc_it;

193:   /*
194:     Down here we have to solve for the "best" coefficients of the Krylov
195:     columns, add the solution values together, and possibly unwind the
196:     preconditioning from the solution
197:    */

199:   /* Form the solution (or the solution so far) */
200:   /* Note: must pass in (loc_it-1) for iteration count so that KSPFGMRESBuildSoln
201:      properly navigates */

203:   PetscCall(KSPFGMRESBuildSoln(RS(0), ksp->vec_sol, ksp->vec_sol, ksp, loc_it - 1));
204:   PetscFunctionReturn(PETSC_SUCCESS);
205: }

207: static PetscErrorCode KSPSolve_FGMRES(KSP ksp)
208: {
209:   PetscInt    cycle_its = 0; /* iterations done in a call to KSPFGMRESCycle */
210:   KSP_FGMRES *fgmres    = (KSP_FGMRES *)ksp->data;
211:   PetscBool   diagonalscale;

213:   PetscFunctionBegin;
214:   PetscCall(PCGetDiagonalScale(ksp->pc, &diagonalscale));
215:   PetscCheck(!diagonalscale, PetscObjectComm((PetscObject)ksp), PETSC_ERR_SUP, "Krylov method %s does not support diagonal scaling", ((PetscObject)ksp)->type_name);

217:   PetscCall(PetscObjectSAWsTakeAccess((PetscObject)ksp));
218:   ksp->its = 0;
219:   PetscCall(PetscObjectSAWsGrantAccess((PetscObject)ksp));

221:   /* Compute the initial (NOT preconditioned) residual */
222:   if (!ksp->guess_zero) {
223:     PetscCall(KSPFGMRESResidual(ksp));
224:   } else { /* guess is 0 so residual is F (which is in ksp->vec_rhs) */
225:     PetscCall(VecCopy(ksp->vec_rhs, VEC_VV(0)));
226:   }
227:   /* This may be true only on a subset of MPI ranks; setting it here so it will be detected by the first norm computation in the Krylov method */
228:   if (ksp->reason == KSP_DIVERGED_PC_FAILED) PetscCall(VecSetInf(VEC_VV(0)));

230:   /* now the residual is in VEC_VV(0) - which is what
231:      KSPFGMRESCycle expects... */

233:   PetscCall(KSPFGMRESCycle(&cycle_its, ksp));
234:   while (!ksp->reason) {
235:     PetscCall(KSPFGMRESResidual(ksp));
236:     if (ksp->its >= ksp->max_it) break;
237:     PetscCall(KSPFGMRESCycle(&cycle_its, ksp));
238:   }
239:   /* mark lack of convergence */
240:   if (ksp->its >= ksp->max_it && !ksp->reason) ksp->reason = KSP_DIVERGED_ITS;
241:   PetscFunctionReturn(PETSC_SUCCESS);
242: }

244: extern PetscErrorCode KSPReset_FGMRES(KSP);

246: static PetscErrorCode KSPDestroy_FGMRES(KSP ksp)
247: {
248:   PetscFunctionBegin;
249:   PetscCall(KSPReset_FGMRES(ksp));
250:   PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFGMRESSetModifyPC_C", NULL));
251:   PetscCall(KSPDestroy_GMRES(ksp));
252:   PetscFunctionReturn(PETSC_SUCCESS);
253: }

255: static PetscErrorCode KSPFGMRESBuildSoln(PetscScalar *nrs, Vec vguess, Vec vdest, KSP ksp, PetscInt it)
256: {
257:   PetscScalar tt;
258:   PetscInt    ii, k, j;
259:   KSP_FGMRES *fgmres = (KSP_FGMRES *)(ksp->data);

261:   PetscFunctionBegin;
262:   /* Solve for solution vector that minimizes the residual */

264:   /* If it is < 0, no fgmres steps have been performed */
265:   if (it < 0) {
266:     PetscCall(VecCopy(vguess, vdest)); /* VecCopy() is smart, exists immediately if vguess == vdest */
267:     PetscFunctionReturn(PETSC_SUCCESS);
268:   }

270:   /* so fgmres steps HAVE been performed */

272:   /* solve the upper triangular system - RS is the right side and HH is
273:      the upper triangular matrix  - put soln in nrs */
274:   if (*HH(it, it) != 0.0) {
275:     nrs[it] = *RS(it) / *HH(it, it);
276:   } else {
277:     nrs[it] = 0.0;
278:   }
279:   for (ii = 1; ii <= it; ii++) {
280:     k  = it - ii;
281:     tt = *RS(k);
282:     for (j = k + 1; j <= it; j++) tt = tt - *HH(k, j) * nrs[j];
283:     nrs[k] = tt / *HH(k, k);
284:   }

286:   /* Accumulate the correction to the soln of the preconditioned prob. in
287:      VEC_TEMP - note that we use the preconditioned vectors  */
288:   PetscCall(VecMAXPBY(VEC_TEMP, it + 1, nrs, 0, &PREVEC(0)));

290:   /* put updated solution into vdest.*/
291:   if (vdest != vguess) {
292:     PetscCall(VecCopy(VEC_TEMP, vdest));
293:     PetscCall(VecAXPY(vdest, 1.0, vguess));
294:   } else { /* replace guess with solution */
295:     PetscCall(VecAXPY(vdest, 1.0, VEC_TEMP));
296:   }
297:   PetscFunctionReturn(PETSC_SUCCESS);
298: }

300: static PetscErrorCode KSPFGMRESUpdateHessenberg(KSP ksp, PetscInt it, PetscBool hapend, PetscReal *res)
301: {
302:   PetscScalar *hh, *cc, *ss, tt;
303:   PetscInt     j;
304:   KSP_FGMRES  *fgmres = (KSP_FGMRES *)(ksp->data);

306:   PetscFunctionBegin;
307:   hh = HH(0, it); /* pointer to beginning of column to update - so
308:                       incrementing hh "steps down" the (it+1)th col of HH*/
309:   cc = CC(0);     /* beginning of cosine rotations */
310:   ss = SS(0);     /* beginning of sine rotations */

312:   /* Apply all the previously computed plane rotations to the new column
313:      of the Hessenberg matrix */
314:   /* Note: this uses the rotation [conj(c)  s ; -s   c], c= cos(theta), s= sin(theta),
315:      and some refs have [c   s ; -conj(s)  c] (don't be confused!) */

317:   for (j = 1; j <= it; j++) {
318:     tt  = *hh;
319:     *hh = PetscConj(*cc) * tt + *ss * *(hh + 1);
320:     hh++;
321:     *hh = *cc++ * *hh - (*ss++ * tt);
322:     /* hh, cc, and ss have all been incremented one by end of loop */
323:   }

325:   /*
326:     compute the new plane rotation, and apply it to:
327:      1) the right-hand-side of the Hessenberg system (RS)
328:         note: it affects RS(it) and RS(it+1)
329:      2) the new column of the Hessenberg matrix
330:         note: it affects HH(it,it) which is currently pointed to
331:         by hh and HH(it+1, it) (*(hh+1))
332:     thus obtaining the updated value of the residual...
333:   */

335:   /* compute new plane rotation */

337:   if (!hapend) {
338:     tt = PetscSqrtScalar(PetscConj(*hh) * *hh + PetscConj(*(hh + 1)) * *(hh + 1));
339:     if (tt == 0.0) {
340:       ksp->reason = KSP_DIVERGED_NULL;
341:       PetscFunctionReturn(PETSC_SUCCESS);
342:     }

344:     *cc = *hh / tt;       /* new cosine value */
345:     *ss = *(hh + 1) / tt; /* new sine value */

347:     /* apply to 1) and 2) */
348:     *RS(it + 1) = -(*ss * *RS(it));
349:     *RS(it)     = PetscConj(*cc) * *RS(it);
350:     *hh         = PetscConj(*cc) * *hh + *ss * *(hh + 1);

352:     /* residual is the last element (it+1) of right-hand side! */
353:     *res = PetscAbsScalar(*RS(it + 1));

355:   } else { /* happy breakdown: HH(it+1, it) = 0, therefore we don't need to apply
356:             another rotation matrix (so RH doesn't change).  The new residual is
357:             always the new sine term times the residual from last time (RS(it)),
358:             but now the new sine rotation would be zero...so the residual should
359:             be zero...so we will multiply "zero" by the last residual.  This might
360:             not be exactly what we want to do here -could just return "zero". */

362:     *res = 0.0;
363:   }
364:   PetscFunctionReturn(PETSC_SUCCESS);
365: }

367: static PetscErrorCode KSPFGMRESGetNewVectors(KSP ksp, PetscInt it)
368: {
369:   KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
370:   PetscInt    nwork  = fgmres->nwork_alloc; /* number of work vector chunks allocated */
371:   PetscInt    nalloc;                       /* number to allocate */
372:   PetscInt    k;

374:   PetscFunctionBegin;
375:   nalloc = fgmres->delta_allocate; /* number of vectors to allocate
376:                                       in a single chunk */

378:   /* Adjust the number to allocate to make sure that we don't exceed the
379:      number of available slots (fgmres->vecs_allocated)*/
380:   if (it + VEC_OFFSET + nalloc >= fgmres->vecs_allocated) nalloc = fgmres->vecs_allocated - it - VEC_OFFSET;
381:   if (!nalloc) PetscFunctionReturn(PETSC_SUCCESS);

383:   fgmres->vv_allocated += nalloc; /* vv_allocated is the number of vectors allocated */

385:   /* work vectors */
386:   PetscCall(KSPCreateVecs(ksp, nalloc, &fgmres->user_work[nwork], 0, NULL));
387:   for (k = 0; k < nalloc; k++) fgmres->vecs[it + VEC_OFFSET + k] = fgmres->user_work[nwork][k];
388:   /* specify size of chunk allocated */
389:   fgmres->mwork_alloc[nwork] = nalloc;

391:   /* preconditioned vectors */
392:   PetscCall(KSPCreateVecs(ksp, nalloc, &fgmres->prevecs_user_work[nwork], 0, NULL));
393:   for (k = 0; k < nalloc; k++) fgmres->prevecs[it + k] = fgmres->prevecs_user_work[nwork][k];

395:   /* increment the number of work vector chunks */
396:   fgmres->nwork_alloc++;
397:   PetscFunctionReturn(PETSC_SUCCESS);
398: }

400: static PetscErrorCode KSPBuildSolution_FGMRES(KSP ksp, Vec ptr, Vec *result)
401: {
402:   KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;

404:   PetscFunctionBegin;
405:   if (!ptr) {
406:     if (!fgmres->sol_temp) PetscCall(VecDuplicate(ksp->vec_sol, &fgmres->sol_temp));
407:     ptr = fgmres->sol_temp;
408:   }
409:   if (!fgmres->nrs) {
410:     /* allocate the work area */
411:     PetscCall(PetscMalloc1(fgmres->max_k, &fgmres->nrs));
412:   }

414:   PetscCall(KSPFGMRESBuildSoln(fgmres->nrs, ksp->vec_sol, ptr, ksp, fgmres->it));
415:   if (result) *result = ptr;
416:   PetscFunctionReturn(PETSC_SUCCESS);
417: }

419: static PetscErrorCode KSPSetFromOptions_FGMRES(KSP ksp, PetscOptionItems *PetscOptionsObject)
420: {
421:   PetscBool flg;

423:   PetscFunctionBegin;
424:   PetscCall(KSPSetFromOptions_GMRES(ksp, PetscOptionsObject));
425:   PetscOptionsHeadBegin(PetscOptionsObject, "KSP flexible GMRES Options");
426:   PetscCall(PetscOptionsBoolGroupBegin("-ksp_fgmres_modifypcnochange", "do not vary the preconditioner", "KSPFGMRESSetModifyPC", &flg));
427:   if (flg) PetscCall(KSPFGMRESSetModifyPC(ksp, KSPFGMRESModifyPCNoChange, NULL, NULL));
428:   PetscCall(PetscOptionsBoolGroupEnd("-ksp_fgmres_modifypcksp", "vary the KSP based preconditioner", "KSPFGMRESSetModifyPC", &flg));
429:   if (flg) PetscCall(KSPFGMRESSetModifyPC(ksp, KSPFGMRESModifyPCKSP, NULL, NULL));
430:   PetscOptionsHeadEnd();
431:   PetscFunctionReturn(PETSC_SUCCESS);
432: }

434: typedef PetscErrorCode (*FCN1)(KSP, PetscInt, PetscInt, PetscReal, void *); /* force argument to next function to not be extern C*/
435: typedef PetscErrorCode (*FCN2)(void *);

437: static PetscErrorCode KSPFGMRESSetModifyPC_FGMRES(KSP ksp, FCN1 fcn, void *ctx, FCN2 d)
438: {
439:   PetscFunctionBegin;
441:   ((KSP_FGMRES *)ksp->data)->modifypc      = fcn;
442:   ((KSP_FGMRES *)ksp->data)->modifydestroy = d;
443:   ((KSP_FGMRES *)ksp->data)->modifyctx     = ctx;
444:   PetscFunctionReturn(PETSC_SUCCESS);
445: }

447: PetscErrorCode KSPReset_FGMRES(KSP ksp)
448: {
449:   KSP_FGMRES *fgmres = (KSP_FGMRES *)ksp->data;
450:   PetscInt    i;

452:   PetscFunctionBegin;
453:   PetscCall(PetscFree(fgmres->prevecs));
454:   if (fgmres->nwork_alloc > 0) {
455:     i = 0;
456:     /* In the first allocation we allocated VEC_OFFSET fewer vectors in prevecs */
457:     PetscCall(VecDestroyVecs(fgmres->mwork_alloc[i] - VEC_OFFSET, &fgmres->prevecs_user_work[i]));
458:     for (i = 1; i < fgmres->nwork_alloc; i++) PetscCall(VecDestroyVecs(fgmres->mwork_alloc[i], &fgmres->prevecs_user_work[i]));
459:   }
460:   PetscCall(PetscFree(fgmres->prevecs_user_work));
461:   if (fgmres->modifydestroy) PetscCall((*fgmres->modifydestroy)(fgmres->modifyctx));
462:   PetscCall(KSPReset_GMRES(ksp));
463:   PetscFunctionReturn(PETSC_SUCCESS);
464: }

466: static PetscErrorCode KSPGMRESSetRestart_FGMRES(KSP ksp, PetscInt max_k)
467: {
468:   KSP_FGMRES *gmres = (KSP_FGMRES *)ksp->data;

470:   PetscFunctionBegin;
471:   PetscCheck(max_k >= 1, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Restart must be positive");
472:   if (!ksp->setupstage) {
473:     gmres->max_k = max_k;
474:   } else if (gmres->max_k != max_k) {
475:     gmres->max_k    = max_k;
476:     ksp->setupstage = KSP_SETUP_NEW;
477:     /* free the data structures, then create them again */
478:     PetscCall(KSPReset_FGMRES(ksp));
479:   }
480:   PetscFunctionReturn(PETSC_SUCCESS);
481: }

483: static PetscErrorCode KSPGMRESGetRestart_FGMRES(KSP ksp, PetscInt *max_k)
484: {
485:   KSP_FGMRES *gmres = (KSP_FGMRES *)ksp->data;

487:   PetscFunctionBegin;
488:   *max_k = gmres->max_k;
489:   PetscFunctionReturn(PETSC_SUCCESS);
490: }

492: /*MC
493:    KSPFGMRES - Implements the Flexible Generalized Minimal Residual method. [](sec_flexibleksp)

495:    Options Database Keys:
496: +   -ksp_gmres_restart <restart> - the number of Krylov directions to orthogonalize against
497: .   -ksp_gmres_haptol <tol> - sets the tolerance for "happy ending" (exact convergence)
498: .   -ksp_gmres_preallocate - preallocate all the Krylov search directions initially (otherwise groups of vectors are allocated as needed)
499: .   -ksp_gmres_classicalgramschmidt - use classical (unmodified) Gram-Schmidt to orthogonalize against the Krylov space (fast) (the default)
500: .   -ksp_gmres_modifiedgramschmidt - use modified Gram-Schmidt in the orthogonalization (more stable, but slower)
501: .   -ksp_gmres_cgs_refinement_type <refine_never,refine_ifneeded,refine_always> - determine if iterative refinement is used to increase the
502:                                    stability of the classical Gram-Schmidt orthogonalization.
503: .   -ksp_gmres_krylov_monitor - plot the Krylov space generated
504: .   -ksp_fgmres_modifypcnochange - do not change the preconditioner between iterations
505: -   -ksp_fgmres_modifypcksp - modify the preconditioner using `KSPFGMRESModifyPCKSP()`

507:    Level: beginner

509:    Notes:
510:    See `KSPFGMRESSetModifyPC()` for how to vary the preconditioner between iterations

512:    Only right preconditioning is supported.

514:    The following options `-ksp_type fgmres -pc_type ksp -ksp_ksp_type bcgs -ksp_view -ksp_pc_type jacobi` make the preconditioner (or inner solver)
515:    be bi-CG-stab with a preconditioner of `PCJACOBI`

517:    Developer Note:
518:    This object is subclassed off of `KSPGMRES`, see the source code in src/ksp/ksp/impls/gmres for comments on the structure of the code

520:    Contributed by:
521:    Allison Baker

523: .seealso: [](ch_ksp), [](sec_flexibleksp), `KSPCreate()`, `KSPSetType()`, `KSPType`, `KSP`, `KSPGMRES`, `KSPLGMRES`,
524:           `KSPGMRESSetRestart()`, `KSPGMRESSetHapTol()`, `KSPGMRESSetPreAllocateVectors()`, `KSPGMRESSetOrthogonalization()`, `KSPGMRESGetOrthogonalization()`,
525:           `KSPGMRESClassicalGramSchmidtOrthogonalization()`, `KSPGMRESModifiedGramSchmidtOrthogonalization()`,
526:           `KSPGMRESCGSRefinementType`, `KSPGMRESSetCGSRefinementType()`, `KSPGMRESGetCGSRefinementType()`, `KSPGMRESMonitorKrylov()`, `KSPFGMRESSetModifyPC()`,
527:           `KSPFGMRESModifyPCKSP()`
528: M*/

530: PETSC_EXTERN PetscErrorCode KSPCreate_FGMRES(KSP ksp)
531: {
532:   KSP_FGMRES *fgmres;

534:   PetscFunctionBegin;
535:   PetscCall(PetscNew(&fgmres));

537:   ksp->data                              = (void *)fgmres;
538:   ksp->ops->buildsolution                = KSPBuildSolution_FGMRES;
539:   ksp->ops->setup                        = KSPSetUp_FGMRES;
540:   ksp->ops->solve                        = KSPSolve_FGMRES;
541:   ksp->ops->reset                        = KSPReset_FGMRES;
542:   ksp->ops->destroy                      = KSPDestroy_FGMRES;
543:   ksp->ops->view                         = KSPView_GMRES;
544:   ksp->ops->setfromoptions               = KSPSetFromOptions_FGMRES;
545:   ksp->ops->computeextremesingularvalues = KSPComputeExtremeSingularValues_GMRES;
546:   ksp->ops->computeeigenvalues           = KSPComputeEigenvalues_GMRES;

548:   PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_UNPRECONDITIONED, PC_RIGHT, 3));
549:   PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_NONE, PC_RIGHT, 1));

551:   PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESSetPreAllocateVectors_C", KSPGMRESSetPreAllocateVectors_GMRES));
552:   PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESSetOrthogonalization_C", KSPGMRESSetOrthogonalization_GMRES));
553:   PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESGetOrthogonalization_C", KSPGMRESGetOrthogonalization_GMRES));
554:   PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESSetRestart_C", KSPGMRESSetRestart_FGMRES));
555:   PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESGetRestart_C", KSPGMRESGetRestart_FGMRES));
556:   PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFGMRESSetModifyPC_C", KSPFGMRESSetModifyPC_FGMRES));
557:   PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESSetCGSRefinementType_C", KSPGMRESSetCGSRefinementType_GMRES));
558:   PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPGMRESGetCGSRefinementType_C", KSPGMRESGetCGSRefinementType_GMRES));

560:   fgmres->haptol         = 1.0e-30;
561:   fgmres->q_preallocate  = 0;
562:   fgmres->delta_allocate = FGMRES_DELTA_DIRECTIONS;
563:   fgmres->orthog         = KSPGMRESClassicalGramSchmidtOrthogonalization;
564:   fgmres->nrs            = NULL;
565:   fgmres->sol_temp       = NULL;
566:   fgmres->max_k          = FGMRES_DEFAULT_MAXK;
567:   fgmres->Rsvd           = NULL;
568:   fgmres->orthogwork     = NULL;
569:   fgmres->modifypc       = KSPFGMRESModifyPCNoChange;
570:   fgmres->modifyctx      = NULL;
571:   fgmres->modifydestroy  = NULL;
572:   fgmres->cgstype        = KSP_GMRES_CGS_REFINE_NEVER;
573:   PetscFunctionReturn(PETSC_SUCCESS);
574: }