1 /*
2 This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
3 */
4 #include <petsc/private/pcimpl.h>
5 #include <petscksp.h> /*I "petscksp.h" I*/
6
7 typedef struct {
8 KSP ksp;
9 PC pc; /* actual preconditioner used on each processor */
10 Vec xsub, ysub; /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
11 Vec xdup, ydup; /* parallel vector that congregates xsub or ysub facilitating vector scattering */
12 Mat pmats; /* matrices belong to a subcommunicator */
13 VecScatter scatterin, scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
14 PetscBool useparallelmat;
15 PetscSubcomm psubcomm;
16 PetscInt nsubcomm; /* num of data structure PetscSubcomm */
17 PetscBool shifttypeset;
18 MatFactorShiftType shifttype;
19 } PC_Redundant;
20
PCFactorSetShiftType_Redundant(PC pc,MatFactorShiftType shifttype)21 static PetscErrorCode PCFactorSetShiftType_Redundant(PC pc, MatFactorShiftType shifttype)
22 {
23 PC_Redundant *red = (PC_Redundant *)pc->data;
24
25 PetscFunctionBegin;
26 if (red->ksp) {
27 PC pc;
28 PetscCall(KSPGetPC(red->ksp, &pc));
29 PetscCall(PCFactorSetShiftType(pc, shifttype));
30 } else {
31 red->shifttypeset = PETSC_TRUE;
32 red->shifttype = shifttype;
33 }
34 PetscFunctionReturn(PETSC_SUCCESS);
35 }
36
PCView_Redundant(PC pc,PetscViewer viewer)37 static PetscErrorCode PCView_Redundant(PC pc, PetscViewer viewer)
38 {
39 PC_Redundant *red = (PC_Redundant *)pc->data;
40 PetscBool isascii, isstring;
41 PetscViewer subviewer;
42
43 PetscFunctionBegin;
44 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
45 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
46 if (isascii) {
47 if (!red->psubcomm) {
48 PetscCall(PetscViewerASCIIPrintf(viewer, " Not yet setup\n"));
49 } else {
50 PetscCall(PetscViewerASCIIPrintf(viewer, " First (color=0) of %" PetscInt_FMT " PCs follows\n", red->nsubcomm));
51 PetscCall(PetscViewerGetSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
52 if (!red->psubcomm->color) { /* only view first redundant pc */
53 PetscCall(PetscViewerASCIIPushTab(subviewer));
54 PetscCall(KSPView(red->ksp, subviewer));
55 PetscCall(PetscViewerASCIIPopTab(subviewer));
56 }
57 PetscCall(PetscViewerRestoreSubViewer(viewer, ((PetscObject)red->pc)->comm, &subviewer));
58 }
59 } else if (isstring) {
60 PetscCall(PetscViewerStringSPrintf(viewer, " Redundant solver preconditioner"));
61 }
62 PetscFunctionReturn(PETSC_SUCCESS);
63 }
64
65 #include <../src/mat/impls/aij/mpi/mpiaij.h>
PCSetUp_Redundant(PC pc)66 static PetscErrorCode PCSetUp_Redundant(PC pc)
67 {
68 PC_Redundant *red = (PC_Redundant *)pc->data;
69 PetscInt mstart, mend, mlocal, M;
70 PetscMPIInt size;
71 MPI_Comm comm, subcomm;
72 Vec x;
73
74 PetscFunctionBegin;
75 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
76
77 /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
78 PetscCallMPI(MPI_Comm_size(comm, &size));
79 if (size == 1) red->useparallelmat = PETSC_FALSE;
80
81 if (!pc->setupcalled) {
82 PetscInt mloc_sub;
83 if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
84 KSP ksp;
85 PetscCall(PCRedundantGetKSP(pc, &ksp));
86 }
87 subcomm = PetscSubcommChild(red->psubcomm);
88
89 if (red->useparallelmat) {
90 /* grab the parallel matrix and put it into the processes of a subcommunicator */
91 PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, subcomm, MAT_INITIAL_MATRIX, &red->pmats));
92
93 PetscCallMPI(MPI_Comm_size(subcomm, &size));
94 if (size > 1) {
95 PetscBool foundpack, issbaij;
96 PetscCall(PetscObjectTypeCompare((PetscObject)red->pmats, MATMPISBAIJ, &issbaij));
97 if (!issbaij) {
98 PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_LU, &foundpack));
99 } else {
100 PetscCall(MatGetFactorAvailable(red->pmats, NULL, MAT_FACTOR_CHOLESKY, &foundpack));
101 }
102 if (!foundpack) { /* reset default ksp and pc */
103 PetscCall(KSPSetType(red->ksp, KSPGMRES));
104 PetscCall(PCSetType(red->pc, PCBJACOBI));
105 } else {
106 PetscCall(PCFactorSetMatSolverType(red->pc, NULL));
107 }
108 }
109
110 PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
111
112 /* get working vectors xsub and ysub */
113 PetscCall(MatCreateVecs(red->pmats, &red->xsub, &red->ysub));
114
115 /* create working vectors xdup and ydup.
116 xdup concatenates all xsub's contiguously to form a mpi vector over dupcomm (see PetscSubcommCreate_interlaced())
117 ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
118 Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
119 PetscCall(MatGetLocalSize(red->pmats, &mloc_sub, NULL));
120 PetscCall(VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm), mloc_sub, PETSC_DECIDE, &red->xdup));
121 PetscCall(VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm), 1, mloc_sub, PETSC_DECIDE, NULL, &red->ydup));
122
123 /* create vecscatters */
124 if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
125 IS is1, is2;
126 PetscInt *idx1, *idx2, i, j, k;
127
128 PetscCall(MatCreateVecs(pc->pmat, &x, NULL));
129 PetscCall(VecGetSize(x, &M));
130 PetscCall(VecGetOwnershipRange(x, &mstart, &mend));
131 mlocal = mend - mstart;
132 PetscCall(PetscMalloc2(red->psubcomm->n * mlocal, &idx1, red->psubcomm->n * mlocal, &idx2));
133 j = 0;
134 for (k = 0; k < red->psubcomm->n; k++) {
135 for (i = mstart; i < mend; i++) {
136 idx1[j] = i;
137 idx2[j++] = i + M * k;
138 }
139 }
140 PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx1, PETSC_COPY_VALUES, &is1));
141 PetscCall(ISCreateGeneral(comm, red->psubcomm->n * mlocal, idx2, PETSC_COPY_VALUES, &is2));
142 PetscCall(VecScatterCreate(x, is1, red->xdup, is2, &red->scatterin));
143 PetscCall(ISDestroy(&is1));
144 PetscCall(ISDestroy(&is2));
145
146 /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
147 PetscCall(ISCreateStride(comm, mlocal, mstart + red->psubcomm->color * M, 1, &is1));
148 PetscCall(ISCreateStride(comm, mlocal, mstart, 1, &is2));
149 PetscCall(VecScatterCreate(red->xdup, is1, x, is2, &red->scatterout));
150 PetscCall(ISDestroy(&is1));
151 PetscCall(ISDestroy(&is2));
152 PetscCall(PetscFree2(idx1, idx2));
153 PetscCall(VecDestroy(&x));
154 }
155 } else { /* !red->useparallelmat */
156 PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
157 }
158 } else { /* pc->setupcalled */
159 if (red->useparallelmat) {
160 MatReuse reuse;
161 /* grab the parallel matrix and put it into the processes of a subcommunicator */
162 if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
163 /* destroy old matrices */
164 PetscCall(MatDestroy(&red->pmats));
165 reuse = MAT_INITIAL_MATRIX;
166 } else {
167 reuse = MAT_REUSE_MATRIX;
168 }
169 PetscCall(MatCreateRedundantMatrix(pc->pmat, red->psubcomm->n, PetscSubcommChild(red->psubcomm), reuse, &red->pmats));
170 PetscCall(KSPSetOperators(red->ksp, red->pmats, red->pmats));
171 } else { /* !red->useparallelmat */
172 PetscCall(KSPSetOperators(red->ksp, pc->mat, pc->pmat));
173 }
174 }
175
176 if (pc->setfromoptionscalled) PetscCall(KSPSetFromOptions(red->ksp));
177 PetscFunctionReturn(PETSC_SUCCESS);
178 }
179
PCApply_Redundant(PC pc,Vec x,Vec y)180 static PetscErrorCode PCApply_Redundant(PC pc, Vec x, Vec y)
181 {
182 PC_Redundant *red = (PC_Redundant *)pc->data;
183 PetscScalar *array;
184
185 PetscFunctionBegin;
186 if (!red->useparallelmat) {
187 PetscCall(KSPSolve(red->ksp, x, y));
188 PetscCall(KSPCheckSolve(red->ksp, pc, y));
189 PetscFunctionReturn(PETSC_SUCCESS);
190 }
191
192 /* scatter x to xdup */
193 PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
194 PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
195
196 /* place xdup's local array into xsub */
197 PetscCall(VecGetArray(red->xdup, &array));
198 PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
199
200 /* apply preconditioner on each processor */
201 PetscCall(KSPSolve(red->ksp, red->xsub, red->ysub));
202 PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
203 PetscCall(VecResetArray(red->xsub));
204 PetscCall(VecRestoreArray(red->xdup, &array));
205
206 /* place ysub's local array into ydup */
207 PetscCall(VecGetArray(red->ysub, &array));
208 PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
209
210 /* scatter ydup to y */
211 PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
212 PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
213 PetscCall(VecResetArray(red->ydup));
214 PetscCall(VecRestoreArray(red->ysub, &array));
215 PetscFunctionReturn(PETSC_SUCCESS);
216 }
217
PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)218 static PetscErrorCode PCApplyTranspose_Redundant(PC pc, Vec x, Vec y)
219 {
220 PC_Redundant *red = (PC_Redundant *)pc->data;
221 PetscScalar *array;
222
223 PetscFunctionBegin;
224 if (!red->useparallelmat) {
225 PetscCall(KSPSolveTranspose(red->ksp, x, y));
226 PetscCall(KSPCheckSolve(red->ksp, pc, y));
227 PetscFunctionReturn(PETSC_SUCCESS);
228 }
229
230 /* scatter x to xdup */
231 PetscCall(VecScatterBegin(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
232 PetscCall(VecScatterEnd(red->scatterin, x, red->xdup, INSERT_VALUES, SCATTER_FORWARD));
233
234 /* place xdup's local array into xsub */
235 PetscCall(VecGetArray(red->xdup, &array));
236 PetscCall(VecPlaceArray(red->xsub, (const PetscScalar *)array));
237
238 /* apply preconditioner on each processor */
239 PetscCall(KSPSolveTranspose(red->ksp, red->xsub, red->ysub));
240 PetscCall(KSPCheckSolve(red->ksp, pc, red->ysub));
241 PetscCall(VecResetArray(red->xsub));
242 PetscCall(VecRestoreArray(red->xdup, &array));
243
244 /* place ysub's local array into ydup */
245 PetscCall(VecGetArray(red->ysub, &array));
246 PetscCall(VecPlaceArray(red->ydup, (const PetscScalar *)array));
247
248 /* scatter ydup to y */
249 PetscCall(VecScatterBegin(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
250 PetscCall(VecScatterEnd(red->scatterout, red->ydup, y, INSERT_VALUES, SCATTER_FORWARD));
251 PetscCall(VecResetArray(red->ydup));
252 PetscCall(VecRestoreArray(red->ysub, &array));
253 PetscFunctionReturn(PETSC_SUCCESS);
254 }
255
PCReset_Redundant(PC pc)256 static PetscErrorCode PCReset_Redundant(PC pc)
257 {
258 PC_Redundant *red = (PC_Redundant *)pc->data;
259
260 PetscFunctionBegin;
261 if (red->useparallelmat) {
262 PetscCall(VecScatterDestroy(&red->scatterin));
263 PetscCall(VecScatterDestroy(&red->scatterout));
264 PetscCall(VecDestroy(&red->ysub));
265 PetscCall(VecDestroy(&red->xsub));
266 PetscCall(VecDestroy(&red->xdup));
267 PetscCall(VecDestroy(&red->ydup));
268 }
269 PetscCall(MatDestroy(&red->pmats));
270 PetscCall(KSPReset(red->ksp));
271 PetscFunctionReturn(PETSC_SUCCESS);
272 }
273
PCDestroy_Redundant(PC pc)274 static PetscErrorCode PCDestroy_Redundant(PC pc)
275 {
276 PC_Redundant *red = (PC_Redundant *)pc->data;
277
278 PetscFunctionBegin;
279 PetscCall(PCReset_Redundant(pc));
280 PetscCall(KSPDestroy(&red->ksp));
281 PetscCall(PetscSubcommDestroy(&red->psubcomm));
282 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", NULL));
283 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", NULL));
284 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", NULL));
285 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", NULL));
286 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", NULL));
287 PetscCall(PetscFree(pc->data));
288 PetscFunctionReturn(PETSC_SUCCESS);
289 }
290
PCSetFromOptions_Redundant(PC pc,PetscOptionItems PetscOptionsObject)291 static PetscErrorCode PCSetFromOptions_Redundant(PC pc, PetscOptionItems PetscOptionsObject)
292 {
293 PC_Redundant *red = (PC_Redundant *)pc->data;
294
295 PetscFunctionBegin;
296 PetscOptionsHeadBegin(PetscOptionsObject, "Redundant options");
297 PetscCall(PetscOptionsInt("-pc_redundant_number", "Number of redundant pc", "PCRedundantSetNumber", red->nsubcomm, &red->nsubcomm, NULL));
298 PetscOptionsHeadEnd();
299 PetscFunctionReturn(PETSC_SUCCESS);
300 }
301
PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)302 static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc, PetscInt nreds)
303 {
304 PC_Redundant *red = (PC_Redundant *)pc->data;
305
306 PetscFunctionBegin;
307 red->nsubcomm = nreds;
308 PetscFunctionReturn(PETSC_SUCCESS);
309 }
310
311 /*@
312 PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
313
314 Logically Collective
315
316 Input Parameters:
317 + pc - the preconditioner context
318 - nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
319 use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
320
321 Level: advanced
322
323 .seealso: [](ch_ksp), `PCREDUNDANT`
324 @*/
PCRedundantSetNumber(PC pc,PetscInt nredundant)325 PetscErrorCode PCRedundantSetNumber(PC pc, PetscInt nredundant)
326 {
327 PetscFunctionBegin;
328 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
329 PetscCheck(nredundant > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "num of redundant pc %" PetscInt_FMT " must be positive", nredundant);
330 PetscTryMethod(pc, "PCRedundantSetNumber_C", (PC, PetscInt), (pc, nredundant));
331 PetscFunctionReturn(PETSC_SUCCESS);
332 }
333
PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)334 static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc, VecScatter in, VecScatter out)
335 {
336 PC_Redundant *red = (PC_Redundant *)pc->data;
337
338 PetscFunctionBegin;
339 PetscCall(PetscObjectReference((PetscObject)in));
340 PetscCall(VecScatterDestroy(&red->scatterin));
341
342 red->scatterin = in;
343
344 PetscCall(PetscObjectReference((PetscObject)out));
345 PetscCall(VecScatterDestroy(&red->scatterout));
346 red->scatterout = out;
347 PetscFunctionReturn(PETSC_SUCCESS);
348 }
349
350 /*@
351 PCRedundantSetScatter - Sets the scatter used to copy values into the
352 redundant local solve and the scatter to move them back into the global
353 vector.
354
355 Logically Collective
356
357 Input Parameters:
358 + pc - the preconditioner context
359 . in - the scatter to move the values in
360 - out - the scatter to move them out
361
362 Level: advanced
363
364 .seealso: [](ch_ksp), `PCREDUNDANT`
365 @*/
PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)366 PetscErrorCode PCRedundantSetScatter(PC pc, VecScatter in, VecScatter out)
367 {
368 PetscFunctionBegin;
369 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
370 PetscValidHeaderSpecific(in, PETSCSF_CLASSID, 2);
371 PetscValidHeaderSpecific(out, PETSCSF_CLASSID, 3);
372 PetscTryMethod(pc, "PCRedundantSetScatter_C", (PC, VecScatter, VecScatter), (pc, in, out));
373 PetscFunctionReturn(PETSC_SUCCESS);
374 }
375
PCRedundantGetKSP_Redundant(PC pc,KSP * innerksp)376 static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc, KSP *innerksp)
377 {
378 PC_Redundant *red = (PC_Redundant *)pc->data;
379 MPI_Comm comm, subcomm;
380 const char *prefix;
381 PetscBool issbaij;
382
383 PetscFunctionBegin;
384 if (!red->psubcomm) {
385 PetscCall(PCGetOptionsPrefix(pc, &prefix));
386
387 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
388 PetscCall(PetscSubcommCreate(comm, &red->psubcomm));
389 PetscCall(PetscSubcommSetNumber(red->psubcomm, red->nsubcomm));
390 PetscCall(PetscSubcommSetType(red->psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
391
392 PetscCall(PetscSubcommSetOptionsPrefix(red->psubcomm, prefix));
393 PetscCall(PetscSubcommSetFromOptions(red->psubcomm));
394
395 /* create a new PC that processors in each subcomm have copy of */
396 subcomm = PetscSubcommChild(red->psubcomm);
397
398 PetscCall(KSPCreate(subcomm, &red->ksp));
399 PetscCall(KSPSetNestLevel(red->ksp, pc->kspnestlevel));
400 PetscCall(KSPSetErrorIfNotConverged(red->ksp, pc->erroriffailure));
401 PetscCall(PetscObjectIncrementTabLevel((PetscObject)red->ksp, (PetscObject)pc, 1));
402 PetscCall(KSPSetType(red->ksp, KSPPREONLY));
403 PetscCall(KSPGetPC(red->ksp, &red->pc));
404 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATSEQSBAIJ, &issbaij));
405 if (!issbaij) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATMPISBAIJ, &issbaij));
406 if (!issbaij) {
407 PetscCall(PCSetType(red->pc, PCLU));
408 } else {
409 PetscCall(PCSetType(red->pc, PCCHOLESKY));
410 }
411 if (red->shifttypeset) {
412 PetscCall(PCFactorSetShiftType(red->pc, red->shifttype));
413 red->shifttypeset = PETSC_FALSE;
414 }
415 PetscCall(KSPSetOptionsPrefix(red->ksp, prefix));
416 PetscCall(KSPAppendOptionsPrefix(red->ksp, "redundant_"));
417 }
418 *innerksp = red->ksp;
419 PetscFunctionReturn(PETSC_SUCCESS);
420 }
421
422 /*@
423 PCRedundantGetKSP - Gets the less parallel `KSP` created by the redundant `PC`.
424
425 Not Collective
426
427 Input Parameter:
428 . pc - the preconditioner context
429
430 Output Parameter:
431 . innerksp - the `KSP` on the smaller set of processes
432
433 Level: advanced
434
435 .seealso: [](ch_ksp), `PCREDUNDANT`
436 @*/
PCRedundantGetKSP(PC pc,KSP * innerksp)437 PetscErrorCode PCRedundantGetKSP(PC pc, KSP *innerksp)
438 {
439 PetscFunctionBegin;
440 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
441 PetscAssertPointer(innerksp, 2);
442 PetscUseMethod(pc, "PCRedundantGetKSP_C", (PC, KSP *), (pc, innerksp));
443 PetscFunctionReturn(PETSC_SUCCESS);
444 }
445
PCRedundantGetOperators_Redundant(PC pc,Mat * mat,Mat * pmat)446 static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc, Mat *mat, Mat *pmat)
447 {
448 PC_Redundant *red = (PC_Redundant *)pc->data;
449
450 PetscFunctionBegin;
451 if (mat) *mat = red->pmats;
452 if (pmat) *pmat = red->pmats;
453 PetscFunctionReturn(PETSC_SUCCESS);
454 }
455
456 /*@
457 PCRedundantGetOperators - gets the sequential linear system matrix and matrix used to construct the preconditioner
458
459 Not Collective
460
461 Input Parameter:
462 . pc - the preconditioner context
463
464 Output Parameters:
465 + mat - the matrix
466 - pmat - the (possibly different) matrix used to construct the preconditioner
467
468 Level: advanced
469
470 .seealso: [](ch_ksp), `PCREDUNDANT`
471 @*/
PCRedundantGetOperators(PC pc,Mat * mat,Mat * pmat)472 PetscErrorCode PCRedundantGetOperators(PC pc, Mat *mat, Mat *pmat)
473 {
474 PetscFunctionBegin;
475 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
476 if (mat) PetscAssertPointer(mat, 2);
477 if (pmat) PetscAssertPointer(pmat, 3);
478 PetscUseMethod(pc, "PCRedundantGetOperators_C", (PC, Mat *, Mat *), (pc, mat, pmat));
479 PetscFunctionReturn(PETSC_SUCCESS);
480 }
481
482 /*MC
483 PCREDUNDANT - Runs a `KSP` solver with preconditioner for the entire problem on subgroups of processors
484
485 Options Database Key:
486 . -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
487 use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
488
489 Level: intermediate
490
491 Notes:
492 Options for the redundant preconditioners can be set using the options database prefix -redundant_
493
494 The default `KSP` is preonly and the default `PC` is `PCLU` or `PCCHOLESKY` if Pmat is of type `MATSBAIJ`.
495
496 `PCFactorSetShiftType()` applied to this `PC` will convey they shift type into the inner `PC` if it is factorization based.
497
498 Developer Note:
499 `PCSetInitialGuessNonzero()` is not used by this class but likely should be.
500
501 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PCRedundantSetScatter()`,
502 `PCRedundantGetKSP()`, `PCRedundantGetOperators()`, `PCRedundantSetNumber()`, `PCREDISTRIBUTE`
503 M*/
504
PCCreate_Redundant(PC pc)505 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
506 {
507 PC_Redundant *red;
508 PetscMPIInt size;
509
510 PetscFunctionBegin;
511 PetscCall(PetscNew(&red));
512 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
513
514 red->nsubcomm = size;
515 red->useparallelmat = PETSC_TRUE;
516 pc->data = (void *)red;
517
518 pc->ops->apply = PCApply_Redundant;
519 pc->ops->applytranspose = PCApplyTranspose_Redundant;
520 pc->ops->setup = PCSetUp_Redundant;
521 pc->ops->destroy = PCDestroy_Redundant;
522 pc->ops->reset = PCReset_Redundant;
523 pc->ops->setfromoptions = PCSetFromOptions_Redundant;
524 pc->ops->view = PCView_Redundant;
525
526 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetScatter_C", PCRedundantSetScatter_Redundant));
527 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantSetNumber_C", PCRedundantSetNumber_Redundant));
528 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetKSP_C", PCRedundantGetKSP_Redundant));
529 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCRedundantGetOperators_C", PCRedundantGetOperators_Redundant));
530 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFactorSetShiftType_C", PCFactorSetShiftType_Redundant));
531 PetscFunctionReturn(PETSC_SUCCESS);
532 }
533