xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision bcee047adeeb73090d7e36cc71e39fc287cdbb97)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h>
6 #include <petscksp.h> /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP                ksp;
10   PC                 pc;                    /* actual preconditioner used on each processor */
11   Vec                xsub, ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec                xdup, ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat                pmats;                 /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter         scatterin, scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool          useparallelmat;
16   PetscSubcomm       psubcomm;
17   PetscInt           nsubcomm; /* num of data structure PetscSubcomm */
18   PetscBool          shifttypeset;
19   MatFactorShiftType shifttype;
20 } PC_Redundant;
21 
22 PetscErrorCode PCFactorSetShiftType_Redundant(PC pc, MatFactorShiftType shifttype)
23 {
24   PC_Redundant *red = (PC_Redundant *)pc->data;
25 
26   PetscFunctionBegin;
27   if (red->ksp) {
28     PC pc;
29     PetscCall(KSPGetPC(red->ksp, &pc));
30     PetscCall(PCFactorSetShiftType(pc, shifttype));
31   } else {
32     red->shifttypeset = PETSC_TRUE;
33     red->shifttype    = shifttype;
34   }
35   PetscFunctionReturn(PETSC_SUCCESS);
36 }
37 
38 static PetscErrorCode PCView_Redundant(PC pc, PetscViewer viewer)
39 {
40   PC_Redundant *red = (PC_Redundant *)pc->data;
41   PetscBool     iascii, isstring;
42   PetscViewer   subviewer;
43 
44   PetscFunctionBegin;
45   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
46   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
47   if (iascii) {
48     if (!red->psubcomm) {
49       PetscCall(PetscViewerASCIIPrintf(viewer, "  Not yet setup\n"));
50     } else {
51       PetscCall(PetscViewerASCIIPrintf(viewer, "  First (color=0) of %" PetscInt_FMT " PCs follows\n", red->nsubcomm));
52       PetscCall(PetscViewerGetSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
53       if (!red->psubcomm->color) { /* only view first redundant pc */
54         PetscCall(PetscViewerASCIIPushTab(subviewer));
55         PetscCall(KSPView(red->ksp, subviewer));
56         PetscCall(PetscViewerASCIIPopTab(subviewer));
57       }
58       PetscCall(PetscViewerRestoreSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
59     }
60   } else if (isstring) {
61     PetscCall(PetscViewerStringSPrintf(viewer, " Redundant solver preconditioner"));
62   }
63   PetscFunctionReturn(PETSC_SUCCESS);
64 }
65 
66 #include <../src/mat/impls/aij/mpi/mpiaij.h>
67 static PetscErrorCode PCSetUp_Redundant(PC pc)
68 {
69   PC_Redundant *red = (PC_Redundant *)pc->data;
70   PetscInt      mstart, mend, mlocal, M;
71   PetscMPIInt   size;
72   MPI_Comm      comm, subcomm;
73   Vec           x;
74 
75   PetscFunctionBegin;
76   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
77 
78   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
79   PetscCallMPI(MPI_Comm_size(comm, &size));
80   if (size == 1) red->useparallelmat = PETSC_FALSE;
81 
82   if (!pc->setupcalled) {
83     PetscInt mloc_sub;
84     if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
85       KSP ksp;
86       PetscCall(PCRedundantGetKSP(pc, &ksp));
87     }
88     subcomm = PetscSubcommChild(red->psubcomm);
89 
90     if (red->useparallelmat) {
91       /* grab the parallel matrix and put it into processors of a subcomminicator */
92       PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, subcomm, MAT_INITIAL_MATRIX, &red->pmats));
93 
94       PetscCallMPI(MPI_Comm_size(subcomm, &size));
95       if (size > 1) {
96         PetscBool foundpack, issbaij;
97         PetscCall(PetscObjectTypeCompare((PetscObject)red->pmats, MATMPISBAIJ, &issbaij));
98         if (!issbaij) {
99           PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_LU, &foundpack));
100         } else {
101           PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_CHOLESKY, &foundpack));
102         }
103         if (!foundpack) { /* reset default ksp and pc */
104           PetscCall(KSPSetType(red->ksp, KSPGMRES));
105           PetscCall(PCSetType(red->pc, PCBJACOBI));
106         } else {
107           PetscCall(PCFactorSetMatSolverType(red->pc, NULL));
108         }
109       }
110 
111       PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
112 
113       /* get working vectors xsub and ysub */
114       PetscCall(MatCreateVecs(red->pmats, &red->xsub, &red->ysub));
115 
116       /* create working vectors xdup and ydup.
117        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
118        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
119        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
120       PetscCall(MatGetLocalSize(red->pmats, &mloc_sub, NULL));
121       PetscCall(VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm), mloc_sub, PETSC_DECIDE, &red->xdup));
122       PetscCall(VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm), 1, mloc_sub, PETSC_DECIDE, NULL, &red->ydup));
123 
124       /* create vecscatters */
125       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
126         IS        is1, is2;
127         PetscInt *idx1, *idx2, i, j, k;
128 
129         PetscCall(MatCreateVecs(pc->pmat, &x, NULL));
130         PetscCall(VecGetSize(x, &M));
131         PetscCall(VecGetOwnershipRange(x, &mstart, &mend));
132         mlocal = mend - mstart;
133         PetscCall(PetscMalloc2(red->psubcomm->n * mlocal, &idx1, red->psubcomm->n * mlocal, &idx2));
134         j = 0;
135         for (k = 0; k < red->psubcomm->n; k++) {
136           for (i = mstart; i < mend; i++) {
137             idx1[j]   = i;
138             idx2[j++] = i + M * k;
139           }
140         }
141         PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx1, PETSC_COPY_VALUES, &is1));
142         PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx2, PETSC_COPY_VALUES, &is2));
143         PetscCall(VecScatterCreate(x, is1, red->xdup, is2, &red->scatterin));
144         PetscCall(ISDestroy(&is1));
145         PetscCall(ISDestroy(&is2));
146 
147         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
148         PetscCall(ISCreateStride(comm, mlocal, mstart + red->psubcomm->color * M, 1, &is1));
149         PetscCall(ISCreateStride(comm, mlocal, mstart, 1, &is2));
150         PetscCall(VecScatterCreate(red->xdup, is1, x, is2, &red->scatterout));
151         PetscCall(ISDestroy(&is1));
152         PetscCall(ISDestroy(&is2));
153         PetscCall(PetscFree2(idx1, idx2));
154         PetscCall(VecDestroy(&x));
155       }
156     } else { /* !red->useparallelmat */
157       PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
158     }
159   } else { /* pc->setupcalled */
160     if (red->useparallelmat) {
161       MatReuse reuse;
162       /* grab the parallel matrix and put it into processors of a subcomminicator */
163       /*--------------------------------------------------------------------------*/
164       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
165         /* destroy old matrices */
166         PetscCall(MatDestroy(&red->pmats));
167         reuse = MAT_INITIAL_MATRIX;
168       } else {
169         reuse = MAT_REUSE_MATRIX;
170       }
171       PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, PetscSubcommChild(red->psubcomm), reuse, &red->pmats));
172       PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
173     } else { /* !red->useparallelmat */
174       PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
175     }
176   }
177 
178   if (pc->setfromoptionscalled) PetscCall(KSPSetFromOptions(red->ksp));
179   PetscCall(KSPSetUp(red->ksp));
180 
181   /* Detect failure */
182   KSPConvergedReason redreason;
183   PetscCall(KSPGetConvergedReason(red->ksp, &redreason));
184   if (redreason) pc->failedreason = PC_SUBPC_ERROR;
185   PetscFunctionReturn(PETSC_SUCCESS);
186 }
187 
188 static PetscErrorCode PCApply_Redundant(PC pc, Vec x, Vec y)
189 {
190   PC_Redundant *red = (PC_Redundant *)pc->data;
191   PetscScalar  *array;
192 
193   PetscFunctionBegin;
194   if (!red->useparallelmat) {
195     PetscCall(KSPSolve(red->ksp, x, y));
196     PetscCall(KSPCheckSolve(red->ksp, pc, y));
197     PetscFunctionReturn(PETSC_SUCCESS);
198   }
199 
200   /* scatter x to xdup */
201   PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
202   PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
203 
204   /* place xdup's local array into xsub */
205   PetscCall(VecGetArray(red->xdup, &array));
206   PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
207 
208   /* apply preconditioner on each processor */
209   PetscCall(KSPSolve(red->ksp, red->xsub, red->ysub));
210   PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
211   PetscCall(VecResetArray(red->xsub));
212   PetscCall(VecRestoreArray(red->xdup, &array));
213 
214   /* place ysub's local array into ydup */
215   PetscCall(VecGetArray(red->ysub, &array));
216   PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
217 
218   /* scatter ydup to y */
219   PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
220   PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
221   PetscCall(VecResetArray(red->ydup));
222   PetscCall(VecRestoreArray(red->ysub, &array));
223   PetscFunctionReturn(PETSC_SUCCESS);
224 }
225 
226 static PetscErrorCode PCApplyTranspose_Redundant(PC pc, Vec x, Vec y)
227 {
228   PC_Redundant *red = (PC_Redundant *)pc->data;
229   PetscScalar  *array;
230 
231   PetscFunctionBegin;
232   if (!red->useparallelmat) {
233     PetscCall(KSPSolveTranspose(red->ksp, x, y));
234     PetscCall(KSPCheckSolve(red->ksp, pc, y));
235     PetscFunctionReturn(PETSC_SUCCESS);
236   }
237 
238   /* scatter x to xdup */
239   PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
240   PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
241 
242   /* place xdup's local array into xsub */
243   PetscCall(VecGetArray(red->xdup, &array));
244   PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
245 
246   /* apply preconditioner on each processor */
247   PetscCall(KSPSolveTranspose(red->ksp, red->xsub, red->ysub));
248   PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
249   PetscCall(VecResetArray(red->xsub));
250   PetscCall(VecRestoreArray(red->xdup, &array));
251 
252   /* place ysub's local array into ydup */
253   PetscCall(VecGetArray(red->ysub, &array));
254   PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
255 
256   /* scatter ydup to y */
257   PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
258   PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
259   PetscCall(VecResetArray(red->ydup));
260   PetscCall(VecRestoreArray(red->ysub, &array));
261   PetscFunctionReturn(PETSC_SUCCESS);
262 }
263 
264 static PetscErrorCode PCReset_Redundant(PC pc)
265 {
266   PC_Redundant *red = (PC_Redundant *)pc->data;
267 
268   PetscFunctionBegin;
269   if (red->useparallelmat) {
270     PetscCall(VecScatterDestroy(&red->scatterin));
271     PetscCall(VecScatterDestroy(&red->scatterout));
272     PetscCall(VecDestroy(&red->ysub));
273     PetscCall(VecDestroy(&red->xsub));
274     PetscCall(VecDestroy(&red->xdup));
275     PetscCall(VecDestroy(&red->ydup));
276   }
277   PetscCall(MatDestroy(&red->pmats));
278   PetscCall(KSPReset(red->ksp));
279   PetscFunctionReturn(PETSC_SUCCESS);
280 }
281 
282 static PetscErrorCode PCDestroy_Redundant(PC pc)
283 {
284   PC_Redundant *red = (PC_Redundant *)pc->data;
285 
286   PetscFunctionBegin;
287   PetscCall(PCReset_Redundant(pc));
288   PetscCall(KSPDestroy(&red->ksp));
289   PetscCall(PetscSubcommDestroy(&red->psubcomm));
290   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", NULL));
291   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", NULL));
292   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", NULL));
293   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", NULL));
294   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", NULL));
295   PetscCall(PetscFree(pc->data));
296   PetscFunctionReturn(PETSC_SUCCESS);
297 }
298 
299 static PetscErrorCode PCSetFromOptions_Redundant(PC pc, PetscOptionItems *PetscOptionsObject)
300 {
301   PC_Redundant *red = (PC_Redundant *)pc->data;
302 
303   PetscFunctionBegin;
304   PetscOptionsHeadBegin(PetscOptionsObject, "Redundant options");
305   PetscCall(PetscOptionsInt("-pc_redundant_number", "Number of redundant pc", "PCRedundantSetNumber", red->nsubcomm, &red->nsubcomm, NULL));
306   PetscOptionsHeadEnd();
307   PetscFunctionReturn(PETSC_SUCCESS);
308 }
309 
310 static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc, PetscInt nreds)
311 {
312   PC_Redundant *red = (PC_Redundant *)pc->data;
313 
314   PetscFunctionBegin;
315   red->nsubcomm = nreds;
316   PetscFunctionReturn(PETSC_SUCCESS);
317 }
318 
319 /*@
320    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
321 
322    Logically Collective
323 
324    Input Parameters:
325 +  pc - the preconditioner context
326 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
327                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
328 
329    Level: advanced
330 
331 .seealso: `PCREDUNDANT`
332 @*/
333 PetscErrorCode PCRedundantSetNumber(PC pc, PetscInt nredundant)
334 {
335   PetscFunctionBegin;
336   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
337   PetscCheck(nredundant > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "num of redundant pc %" PetscInt_FMT " must be positive", nredundant);
338   PetscTryMethod(pc, "PCRedundantSetNumber_C", (PC, PetscInt), (pc, nredundant));
339   PetscFunctionReturn(PETSC_SUCCESS);
340 }
341 
342 static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc, VecScatter in, VecScatter out)
343 {
344   PC_Redundant *red = (PC_Redundant *)pc->data;
345 
346   PetscFunctionBegin;
347   PetscCall(PetscObjectReference((PetscObject)in));
348   PetscCall(VecScatterDestroy(&red->scatterin));
349 
350   red->scatterin = in;
351 
352   PetscCall(PetscObjectReference((PetscObject)out));
353   PetscCall(VecScatterDestroy(&red->scatterout));
354   red->scatterout = out;
355   PetscFunctionReturn(PETSC_SUCCESS);
356 }
357 
358 /*@
359    PCRedundantSetScatter - Sets the scatter used to copy values into the
360      redundant local solve and the scatter to move them back into the global
361      vector.
362 
363    Logically Collective
364 
365    Input Parameters:
366 +  pc - the preconditioner context
367 .  in - the scatter to move the values in
368 -  out - the scatter to move them out
369 
370    Level: advanced
371 
372 .seealso: `PCREDUNDANT`
373 @*/
374 PetscErrorCode PCRedundantSetScatter(PC pc, VecScatter in, VecScatter out)
375 {
376   PetscFunctionBegin;
377   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
378   PetscValidHeaderSpecific(in, PETSCSF_CLASSID, 2);
379   PetscValidHeaderSpecific(out, PETSCSF_CLASSID, 3);
380   PetscTryMethod(pc, "PCRedundantSetScatter_C", (PC, VecScatter, VecScatter), (pc, in, out));
381   PetscFunctionReturn(PETSC_SUCCESS);
382 }
383 
384 static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc, KSP *innerksp)
385 {
386   PC_Redundant *red = (PC_Redundant *)pc->data;
387   MPI_Comm      comm, subcomm;
388   const char   *prefix;
389   PetscBool     issbaij;
390 
391   PetscFunctionBegin;
392   if (!red->psubcomm) {
393     PetscCall(PCGetOptionsPrefix(pc, &prefix));
394 
395     PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
396     PetscCall(PetscSubcommCreate(comm, &red->psubcomm));
397     PetscCall(PetscSubcommSetNumber(red->psubcomm, red->nsubcomm));
398     PetscCall(PetscSubcommSetType(red->psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
399 
400     PetscCall(PetscSubcommSetOptionsPrefix(red->psubcomm, prefix));
401     PetscCall(PetscSubcommSetFromOptions(red->psubcomm));
402 
403     /* create a new PC that processors in each subcomm have copy of */
404     subcomm = PetscSubcommChild(red->psubcomm);
405 
406     PetscCall(KSPCreate(subcomm, &red->ksp));
407     PetscCall(KSPSetErrorIfNotConverged(red->ksp, pc->erroriffailure));
408     PetscCall(PetscObjectIncrementTabLevel((PetscObject)red->ksp, (PetscObject)pc, 1));
409     PetscCall(KSPSetType(red->ksp, KSPPREONLY));
410     PetscCall(KSPGetPC(red->ksp, &red->pc));
411     PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATSEQSBAIJ, &issbaij));
412     if (!issbaij) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATMPISBAIJ, &issbaij));
413     if (!issbaij) {
414       PetscCall(PCSetType(red->pc, PCLU));
415     } else {
416       PetscCall(PCSetType(red->pc, PCCHOLESKY));
417     }
418     if (red->shifttypeset) {
419       PetscCall(PCFactorSetShiftType(red->pc, red->shifttype));
420       red->shifttypeset = PETSC_FALSE;
421     }
422     PetscCall(KSPSetOptionsPrefix(red->ksp, prefix));
423     PetscCall(KSPAppendOptionsPrefix(red->ksp, "redundant_"));
424   }
425   *innerksp = red->ksp;
426   PetscFunctionReturn(PETSC_SUCCESS);
427 }
428 
429 /*@
430    PCRedundantGetKSP - Gets the less parallel `KSP` created by the redundant `PC`.
431 
432    Not Collective
433 
434    Input Parameter:
435 .  pc - the preconditioner context
436 
437    Output Parameter:
438 .  innerksp - the `KSP` on the smaller set of processes
439 
440    Level: advanced
441 
442 .seealso: `PCREDUNDANT`
443 @*/
444 PetscErrorCode PCRedundantGetKSP(PC pc, KSP *innerksp)
445 {
446   PetscFunctionBegin;
447   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
448   PetscValidPointer(innerksp, 2);
449   PetscUseMethod(pc, "PCRedundantGetKSP_C", (PC, KSP *), (pc, innerksp));
450   PetscFunctionReturn(PETSC_SUCCESS);
451 }
452 
453 static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc, Mat *mat, Mat *pmat)
454 {
455   PC_Redundant *red = (PC_Redundant *)pc->data;
456 
457   PetscFunctionBegin;
458   if (mat) *mat = red->pmats;
459   if (pmat) *pmat = red->pmats;
460   PetscFunctionReturn(PETSC_SUCCESS);
461 }
462 
463 /*@
464    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
465 
466    Not Collective
467 
468    Input Parameter:
469 .  pc - the preconditioner context
470 
471    Output Parameters:
472 +  mat - the matrix
473 -  pmat - the (possibly different) preconditioner matrix
474 
475    Level: advanced
476 
477 .seealso: `PCREDUNDANT`
478 @*/
479 PetscErrorCode PCRedundantGetOperators(PC pc, Mat *mat, Mat *pmat)
480 {
481   PetscFunctionBegin;
482   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
483   if (mat) PetscValidPointer(mat, 2);
484   if (pmat) PetscValidPointer(pmat, 3);
485   PetscUseMethod(pc, "PCRedundantGetOperators_C", (PC, Mat *, Mat *), (pc, mat, pmat));
486   PetscFunctionReturn(PETSC_SUCCESS);
487 }
488 
489 /*MC
490      PCREDUNDANT - Runs a `KSP` solver with preconditioner for the entire problem on subgroups of processors
491 
492   Options Database Key:
493 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
494                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
495 
496    Level: intermediate
497 
498    Notes:
499    Options for the redundant preconditioners can be set using the options database prefix -redundant_
500 
501    The default `KSP` is preonly and the default `PC` is `PCLU` or `PCCHOLESKY` if Pmat is of type `MATSBAIJ`.
502 
503    `PCFactorSetShiftType()` applied to this `PC` will convey they shift type into the inner `PC` if it is factorization based.
504 
505    Developer Note:
506    `PCSetInitialGuessNonzero()` is not used by this class but likely should be.
507 
508 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PCRedundantSetScatter()`,
509           `PCRedundantGetKSP()`, `PCRedundantGetOperators()`, `PCRedundantSetNumber()`, `PCREDISTRIBUTE`
510 M*/
511 
512 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
513 {
514   PC_Redundant *red;
515   PetscMPIInt   size;
516 
517   PetscFunctionBegin;
518   PetscCall(PetscNew(&red));
519   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
520 
521   red->nsubcomm       = size;
522   red->useparallelmat = PETSC_TRUE;
523   pc->data            = (void *)red;
524 
525   pc->ops->apply          = PCApply_Redundant;
526   pc->ops->applytranspose = PCApplyTranspose_Redundant;
527   pc->ops->setup          = PCSetUp_Redundant;
528   pc->ops->destroy        = PCDestroy_Redundant;
529   pc->ops->reset          = PCReset_Redundant;
530   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
531   pc->ops->view           = PCView_Redundant;
532 
533   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", PCRedundantSetScatter_Redundant));
534   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", PCRedundantSetNumber_Redundant));
535   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", PCRedundantGetKSP_Redundant));
536   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", PCRedundantGetOperators_Redundant));
537   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", PCFactorSetShiftType_Redundant));
538   PetscFunctionReturn(PETSC_SUCCESS);
539 }
540