xref: /petsc/src/ksp/pc/impls/redistribute/redistribute.c (revision a4e35b1925eceef64945ea472b84f2bf06a67b5e)
1 
2 /*
3   This file defines a "solve the problem redistributely on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/
6 #include <petscksp.h>
7 
8 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
9 struct _PC_FieldSplitLink {
10   char             *splitname;
11   IS                is;
12   PC_FieldSplitLink next, previous;
13 };
14 
15 typedef struct {
16   KSP          ksp;
17   Vec          x, b;
18   VecScatter   scatter;
19   IS           is;
20   PetscInt     dcnt, *drows; /* these are the local rows that have only diagonal entry */
21   PetscScalar *diag;
22   Vec          work;
23   PetscBool    zerodiag;
24 
25   PetscInt          nsplits;
26   PC_FieldSplitLink splitlinks;
27 } PC_Redistribute;
28 
29 static PetscErrorCode PCFieldSplitSetIS_Redistribute(PC pc, const char splitname[], IS is)
30 {
31   PC_Redistribute   *red  = (PC_Redistribute *)pc->data;
32   PC_FieldSplitLink *next = &red->splitlinks;
33 
34   PetscFunctionBegin;
35   while (*next) next = &(*next)->next;
36   PetscCall(PetscNew(next));
37   if (splitname) {
38     PetscCall(PetscStrallocpy(splitname, &(*next)->splitname));
39   } else {
40     PetscCall(PetscMalloc1(8, &(*next)->splitname));
41     PetscCall(PetscSNPrintf((*next)->splitname, 7, "%" PetscInt_FMT, red->nsplits++));
42   }
43   PetscCall(PetscObjectReference((PetscObject)is));
44   PetscCall(ISDestroy(&(*next)->is));
45   (*next)->is = is;
46   PetscFunctionReturn(PETSC_SUCCESS);
47 }
48 
49 static PetscErrorCode PCView_Redistribute(PC pc, PetscViewer viewer)
50 {
51   PC_Redistribute *red = (PC_Redistribute *)pc->data;
52   PetscBool        iascii, isstring;
53   PetscInt         ncnt, N;
54 
55   PetscFunctionBegin;
56   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
57   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
58   if (iascii) {
59     PetscCall(MPIU_Allreduce(&red->dcnt, &ncnt, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
60     PetscCall(MatGetSize(pc->pmat, &N, NULL));
61     PetscCall(PetscViewerASCIIPrintf(viewer, "    Number rows eliminated %" PetscInt_FMT " Percentage rows eliminated %g\n", ncnt, (double)(100.0 * ((PetscReal)ncnt) / ((PetscReal)N))));
62     PetscCall(PetscViewerASCIIPrintf(viewer, "  Redistribute preconditioner: \n"));
63     PetscCall(KSPView(red->ksp, viewer));
64   } else if (isstring) {
65     PetscCall(PetscViewerStringSPrintf(viewer, " Redistribute preconditioner"));
66     PetscCall(KSPView(red->ksp, viewer));
67   }
68   PetscFunctionReturn(PETSC_SUCCESS);
69 }
70 
71 static PetscErrorCode PCSetUp_Redistribute(PC pc)
72 {
73   PC_Redistribute         *red = (PC_Redistribute *)pc->data;
74   MPI_Comm                 comm;
75   PetscInt                 rstart, rend, nrstart, nrend, i, nz, cnt, *rows, ncnt, dcnt, *drows;
76   PetscLayout              map, nmap;
77   PetscMPIInt              size, tag, n;
78   PETSC_UNUSED PetscMPIInt imdex;
79   PetscInt                *source = NULL;
80   PetscMPIInt             *sizes  = NULL, nrecvs;
81   PetscInt                 j, nsends;
82   PetscInt                *owner = NULL, *starts = NULL, count, slen;
83   PetscInt                *rvalues, *svalues, recvtotal;
84   PetscMPIInt             *onodes1, *olengths1;
85   MPI_Request             *send_waits = NULL, *recv_waits = NULL;
86   MPI_Status               recv_status, *send_status;
87   Vec                      tvec, diag;
88   Mat                      tmat;
89   const PetscScalar       *d, *values;
90   const PetscInt          *cols;
91   PC_FieldSplitLink       *next = &red->splitlinks;
92 
93   PetscFunctionBegin;
94   if (pc->setupcalled) {
95     PetscCheck(pc->flag == SAME_NONZERO_PATTERN, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PC is not supported for a change in the nonzero structure of the matrix");
96     PetscCall(KSPGetOperators(red->ksp, NULL, &tmat));
97     PetscCall(MatCreateSubMatrix(pc->pmat, red->is, red->is, MAT_REUSE_MATRIX, &tmat));
98     PetscCall(KSPSetOperators(red->ksp, tmat, tmat));
99   } else {
100     PetscInt          NN;
101     PC                ipc;
102     PetscVoidFunction fptr;
103 
104     PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
105     PetscCallMPI(MPI_Comm_size(comm, &size));
106     PetscCall(PetscObjectGetNewTag((PetscObject)pc, &tag));
107 
108     /* count non-diagonal rows on process */
109     PetscCall(MatGetOwnershipRange(pc->mat, &rstart, &rend));
110     cnt = 0;
111     for (i = rstart; i < rend; i++) {
112       PetscCall(MatGetRow(pc->mat, i, &nz, &cols, &values));
113       for (PetscInt j = 0; j < nz; j++) {
114         if (values[j] != 0 && cols[j] != i) {
115           cnt++;
116           break;
117         }
118       }
119       PetscCall(MatRestoreRow(pc->mat, i, &nz, &cols, &values));
120     }
121     PetscCall(PetscMalloc1(cnt, &rows));
122     PetscCall(PetscMalloc1(rend - rstart - cnt, &drows));
123 
124     /* list non-diagonal rows on process */
125     cnt  = 0;
126     dcnt = 0;
127     for (i = rstart; i < rend; i++) {
128       PetscBool diagonly = PETSC_TRUE;
129       PetscCall(MatGetRow(pc->mat, i, &nz, &cols, &values));
130       for (PetscInt j = 0; j < nz; j++) {
131         if (values[j] != 0 && cols[j] != i) {
132           diagonly = PETSC_FALSE;
133           break;
134         }
135       }
136       if (!diagonly) rows[cnt++] = i;
137       else drows[dcnt++] = i - rstart;
138       PetscCall(MatRestoreRow(pc->mat, i, &nz, &cols, &values));
139     }
140 
141     /* create PetscLayout for non-diagonal rows on each process */
142     PetscCall(PetscLayoutCreate(comm, &map));
143     PetscCall(PetscLayoutSetLocalSize(map, cnt));
144     PetscCall(PetscLayoutSetBlockSize(map, 1));
145     PetscCall(PetscLayoutSetUp(map));
146     nrstart = map->rstart;
147     nrend   = map->rend;
148 
149     /* create PetscLayout for load-balanced non-diagonal rows on each process */
150     PetscCall(PetscLayoutCreate(comm, &nmap));
151     PetscCall(MPIU_Allreduce(&cnt, &ncnt, 1, MPIU_INT, MPI_SUM, comm));
152     PetscCall(PetscLayoutSetSize(nmap, ncnt));
153     PetscCall(PetscLayoutSetBlockSize(nmap, 1));
154     PetscCall(PetscLayoutSetUp(nmap));
155 
156     PetscCall(MatGetSize(pc->pmat, &NN, NULL));
157     PetscCall(PetscInfo(pc, "Number of diagonal rows eliminated %" PetscInt_FMT ", percentage eliminated %g\n", NN - ncnt, (double)(((PetscReal)(NN - ncnt)) / ((PetscReal)(NN)))));
158 
159     if (size > 1) {
160       /*
161         the following block of code assumes MPI can send messages to self, which is not supported for MPI-uni hence we need to handle
162         the size 1 case as a special case
163 
164        this code is taken from VecScatterCreate_PtoS()
165        Determines what rows need to be moved where to
166        load balance the non-diagonal rows
167        */
168       /*  count number of contributors to each processor */
169       PetscCall(PetscMalloc2(size, &sizes, cnt, &owner));
170       PetscCall(PetscArrayzero(sizes, size));
171       j      = 0;
172       nsends = 0;
173       for (i = nrstart; i < nrend; i++) {
174         if (i < nmap->range[j]) j = 0;
175         for (; j < size; j++) {
176           if (i < nmap->range[j + 1]) {
177             if (!sizes[j]++) nsends++;
178             owner[i - nrstart] = j;
179             break;
180           }
181         }
182       }
183       /* inform other processors of number of messages and max length*/
184       PetscCall(PetscGatherNumberOfMessages(comm, NULL, sizes, &nrecvs));
185       PetscCall(PetscGatherMessageLengths(comm, nsends, nrecvs, sizes, &onodes1, &olengths1));
186       PetscCall(PetscSortMPIIntWithArray(nrecvs, onodes1, olengths1));
187       recvtotal = 0;
188       for (i = 0; i < nrecvs; i++) recvtotal += olengths1[i];
189 
190       /* post receives:  rvalues - rows I will own; count - nu */
191       PetscCall(PetscMalloc3(recvtotal, &rvalues, nrecvs, &source, nrecvs, &recv_waits));
192       count = 0;
193       for (i = 0; i < nrecvs; i++) {
194         PetscCallMPI(MPI_Irecv((rvalues + count), olengths1[i], MPIU_INT, onodes1[i], tag, comm, recv_waits + i));
195         count += olengths1[i];
196       }
197 
198       /* do sends:
199        1) starts[i] gives the starting index in svalues for stuff going to
200        the ith processor
201        */
202       PetscCall(PetscMalloc3(cnt, &svalues, nsends, &send_waits, size, &starts));
203       starts[0] = 0;
204       for (i = 1; i < size; i++) starts[i] = starts[i - 1] + sizes[i - 1];
205       for (i = 0; i < cnt; i++) svalues[starts[owner[i]]++] = rows[i];
206       for (i = 0; i < cnt; i++) rows[i] = rows[i] - nrstart;
207       red->drows = drows;
208       red->dcnt  = dcnt;
209       PetscCall(PetscFree(rows));
210 
211       starts[0] = 0;
212       for (i = 1; i < size; i++) starts[i] = starts[i - 1] + sizes[i - 1];
213       count = 0;
214       for (i = 0; i < size; i++) {
215         if (sizes[i]) PetscCallMPI(MPI_Isend(svalues + starts[i], sizes[i], MPIU_INT, i, tag, comm, send_waits + count++));
216       }
217 
218       /*  wait on receives */
219       count = nrecvs;
220       slen  = 0;
221       while (count) {
222         PetscCallMPI(MPI_Waitany(nrecvs, recv_waits, &imdex, &recv_status));
223         /* unpack receives into our local space */
224         PetscCallMPI(MPI_Get_count(&recv_status, MPIU_INT, &n));
225         slen += n;
226         count--;
227       }
228       PetscCheck(slen == recvtotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Total message lengths %" PetscInt_FMT " not expected %" PetscInt_FMT, slen, recvtotal);
229       PetscCall(ISCreateGeneral(comm, slen, rvalues, PETSC_COPY_VALUES, &red->is));
230 
231       /* free all work space */
232       PetscCall(PetscFree(olengths1));
233       PetscCall(PetscFree(onodes1));
234       PetscCall(PetscFree3(rvalues, source, recv_waits));
235       PetscCall(PetscFree2(sizes, owner));
236       if (nsends) { /* wait on sends */
237         PetscCall(PetscMalloc1(nsends, &send_status));
238         PetscCallMPI(MPI_Waitall(nsends, send_waits, send_status));
239         PetscCall(PetscFree(send_status));
240       }
241       PetscCall(PetscFree3(svalues, send_waits, starts));
242     } else {
243       PetscCall(ISCreateGeneral(comm, cnt, rows, PETSC_OWN_POINTER, &red->is));
244       red->drows = drows;
245       red->dcnt  = dcnt;
246       slen       = cnt;
247     }
248     PetscCall(PetscLayoutDestroy(&map));
249 
250     PetscCall(VecCreateMPI(comm, slen, PETSC_DETERMINE, &red->b));
251     PetscCall(VecDuplicate(red->b, &red->x));
252     PetscCall(MatCreateVecs(pc->pmat, &tvec, NULL));
253     PetscCall(VecScatterCreate(tvec, red->is, red->b, NULL, &red->scatter));
254 
255     /* Map the PCFIELDSPLIT fields to redistributed KSP */
256     PetscCall(KSPGetPC(red->ksp, &ipc));
257     PetscCall(PetscObjectQueryFunction((PetscObject)ipc, "PCFieldSplitSetIS_C", &fptr));
258     if (fptr && *next) {
259       PetscScalar       *atvec;
260       const PetscScalar *ab;
261       PetscInt           primes[] = {2, 3, 5, 7, 11, 13, 17, 19};
262       PetscInt           cnt      = 0;
263 
264       PetscCheck(red->nsplits <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(primes), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No support for this many fields");
265       PetscCall(VecSet(tvec, 1.0));
266       PetscCall(VecGetArray(tvec, &atvec));
267 
268       while (*next) {
269         const PetscInt *indices;
270         PetscInt        n;
271 
272         PetscCall(ISGetIndices((*next)->is, &indices));
273         PetscCall(ISGetLocalSize((*next)->is, &n));
274         for (PetscInt i = 0; i < n; i++) atvec[indices[i] - rstart] *= primes[cnt];
275         PetscCall(ISRestoreIndices((*next)->is, &indices));
276         cnt++;
277         next = &(*next)->next;
278       }
279       PetscCall(VecRestoreArray(tvec, &atvec));
280       PetscCall(VecScatterBegin(red->scatter, tvec, red->b, INSERT_VALUES, SCATTER_FORWARD));
281       PetscCall(VecScatterEnd(red->scatter, tvec, red->b, INSERT_VALUES, SCATTER_FORWARD));
282       cnt = 0;
283       PetscCall(VecGetArrayRead(red->b, &ab));
284       next = &red->splitlinks;
285       while (*next) {
286         PetscInt  n = 0;
287         PetscInt *indices;
288         IS        ris;
289 
290         for (PetscInt i = 0; i < nmap->rend - nmap->rstart; i++) {
291           if (!(((PetscInt)PetscRealPart(ab[i])) % primes[cnt])) n++;
292         }
293         PetscCall(PetscMalloc1(n, &indices));
294         n = 0;
295         for (PetscInt i = 0; i < nmap->rend - nmap->rstart; i++) {
296           if (!(((PetscInt)PetscRealPart(ab[i])) % primes[cnt])) indices[n++] = i + nmap->rstart;
297         }
298         PetscCall(ISCreateGeneral(comm, n, indices, PETSC_OWN_POINTER, &ris));
299         PetscCall(PCFieldSplitSetIS(ipc, (*next)->splitname, ris));
300 
301         PetscCall(ISDestroy(&ris));
302         cnt++;
303         next = &(*next)->next;
304       }
305       PetscCall(VecRestoreArrayRead(red->b, &ab));
306     }
307     PetscCall(VecDestroy(&tvec));
308     PetscCall(MatCreateSubMatrix(pc->pmat, red->is, red->is, MAT_INITIAL_MATRIX, &tmat));
309     PetscCall(KSPSetOperators(red->ksp, tmat, tmat));
310     PetscCall(MatDestroy(&tmat));
311     PetscCall(PetscLayoutDestroy(&nmap));
312   }
313 
314   /* get diagonal portion of matrix */
315   PetscCall(PetscFree(red->diag));
316   PetscCall(PetscMalloc1(red->dcnt, &red->diag));
317   PetscCall(MatCreateVecs(pc->pmat, &diag, NULL));
318   PetscCall(MatGetDiagonal(pc->pmat, diag));
319   PetscCall(VecGetArrayRead(diag, &d));
320   for (i = 0; i < red->dcnt; i++) {
321     if (d[red->drows[i]] != 0) red->diag[i] = 1.0 / d[red->drows[i]];
322     else {
323       red->zerodiag = PETSC_TRUE;
324       red->diag[i]  = 0.0;
325     }
326   }
327   PetscCall(VecRestoreArrayRead(diag, &d));
328   PetscCall(VecDestroy(&diag));
329   PetscCall(KSPSetUp(red->ksp));
330   PetscFunctionReturn(PETSC_SUCCESS);
331 }
332 
333 static PetscErrorCode PCApply_Redistribute(PC pc, Vec b, Vec x)
334 {
335   PC_Redistribute   *red   = (PC_Redistribute *)pc->data;
336   PetscInt           dcnt  = red->dcnt, i;
337   const PetscInt    *drows = red->drows;
338   PetscScalar       *xwork;
339   const PetscScalar *bwork, *diag = red->diag;
340 
341   PetscFunctionBegin;
342   if (!red->work) PetscCall(VecDuplicate(b, &red->work));
343   /* compute the rows of solution that have diagonal entries only */
344   PetscCall(VecSet(x, 0.0)); /* x = diag(A)^{-1} b */
345   PetscCall(VecGetArray(x, &xwork));
346   PetscCall(VecGetArrayRead(b, &bwork));
347   if (red->zerodiag) {
348     for (i = 0; i < dcnt; i++) {
349       if (diag[i] == 0.0 && bwork[drows[i]] != 0.0) {
350         PetscCheck(!pc->erroriffailure, PETSC_COMM_SELF, PETSC_ERR_CONV_FAILED, "Linear system is inconsistent, zero matrix row but nonzero right hand side");
351         PetscCall(PetscInfo(pc, "Linear system is inconsistent, zero matrix row but nonzero right hand side\n"));
352         PetscCall(VecSetInf(x));
353         pc->failedreasonrank = PC_INCONSISTENT_RHS;
354       }
355     }
356   }
357   for (i = 0; i < dcnt; i++) xwork[drows[i]] = diag[i] * bwork[drows[i]];
358   PetscCall(PetscLogFlops(dcnt));
359   PetscCall(VecRestoreArray(red->work, &xwork));
360   PetscCall(VecRestoreArrayRead(b, &bwork));
361   /* update the right hand side for the reduced system with diagonal rows (and corresponding columns) removed */
362   PetscCall(MatMult(pc->pmat, x, red->work));
363   PetscCall(VecAYPX(red->work, -1.0, b)); /* red->work = b - A x */
364 
365   PetscCall(VecScatterBegin(red->scatter, red->work, red->b, INSERT_VALUES, SCATTER_FORWARD));
366   PetscCall(VecScatterEnd(red->scatter, red->work, red->b, INSERT_VALUES, SCATTER_FORWARD));
367   PetscCall(KSPSolve(red->ksp, red->b, red->x));
368   PetscCall(KSPCheckSolve(red->ksp, pc, red->x));
369   PetscCall(VecScatterBegin(red->scatter, red->x, x, INSERT_VALUES, SCATTER_REVERSE));
370   PetscCall(VecScatterEnd(red->scatter, red->x, x, INSERT_VALUES, SCATTER_REVERSE));
371   PetscFunctionReturn(PETSC_SUCCESS);
372 }
373 
374 static PetscErrorCode PCDestroy_Redistribute(PC pc)
375 {
376   PC_Redistribute  *red  = (PC_Redistribute *)pc->data;
377   PC_FieldSplitLink next = red->splitlinks;
378 
379   PetscFunctionBegin;
380   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
381 
382   while (next) {
383     PC_FieldSplitLink ilink;
384     PetscCall(PetscFree(next->splitname));
385     PetscCall(ISDestroy(&next->is));
386     ilink = next;
387     next  = next->next;
388     PetscCall(PetscFree(ilink));
389   }
390   PetscCall(VecScatterDestroy(&red->scatter));
391   PetscCall(ISDestroy(&red->is));
392   PetscCall(VecDestroy(&red->b));
393   PetscCall(VecDestroy(&red->x));
394   PetscCall(KSPDestroy(&red->ksp));
395   PetscCall(VecDestroy(&red->work));
396   PetscCall(PetscFree(red->drows));
397   PetscCall(PetscFree(red->diag));
398   PetscCall(PetscFree(pc->data));
399   PetscFunctionReturn(PETSC_SUCCESS);
400 }
401 
402 static PetscErrorCode PCSetFromOptions_Redistribute(PC pc, PetscOptionItems *PetscOptionsObject)
403 {
404   PC_Redistribute *red = (PC_Redistribute *)pc->data;
405 
406   PetscFunctionBegin;
407   PetscCall(KSPSetFromOptions(red->ksp));
408   PetscFunctionReturn(PETSC_SUCCESS);
409 }
410 
411 /*@
412   PCRedistributeGetKSP - Gets the `KSP` created by the `PCREDISTRIBUTE`
413 
414   Not Collective
415 
416   Input Parameter:
417 . pc - the preconditioner context
418 
419   Output Parameter:
420 . innerksp - the inner `KSP`
421 
422   Level: advanced
423 
424 .seealso: `KSP`, `PCREDISTRIBUTE`
425 @*/
426 PetscErrorCode PCRedistributeGetKSP(PC pc, KSP *innerksp)
427 {
428   PC_Redistribute *red = (PC_Redistribute *)pc->data;
429 
430   PetscFunctionBegin;
431   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
432   PetscAssertPointer(innerksp, 2);
433   *innerksp = red->ksp;
434   PetscFunctionReturn(PETSC_SUCCESS);
435 }
436 
437 /*MC
438      PCREDISTRIBUTE - Redistributes a matrix for load balancing, removing the rows (and the corresponding columns) that only have a diagonal entry and then
439      applies a `KSP` to that new smaller matrix
440 
441      Level: intermediate
442 
443      Notes:
444      Options for the redistribute `KSP` and `PC` with the options database prefix -redistribute_
445 
446      Usually run this with `-ksp_type preonly`
447 
448      If you have used `MatZeroRows()` to eliminate (for example, Dirichlet) boundary conditions for a symmetric problem then you can use, for example, `-ksp_type preonly
449      -pc_type redistribute -redistribute_ksp_type cg -redistribute_pc_type bjacobi -redistribute_sub_pc_type icc` to take advantage of the symmetry.
450 
451      Supports the function `PCFieldSplitSetIS()`; passs the appropriate reduced field indices to an inner `PCFIELDSPLIT`, set with, for example
452      `-ksp_type preonly -pc_type redistribute -redistribute_pc_type fieldsplit. Does not support the `PCFIELDSPLIT` options database keys.
453 
454      This does NOT call a partitioner to reorder rows to lower communication; the ordering of the rows in the original matrix and redistributed matrix is the same. Rows are moved
455      between MPI processes inside the preconditioner to balance the number of rows on each process.
456 
457      Developer Note:
458      Should add an option to this preconditioner to use a partitioner to redistribute the rows to lower communication.
459 
460 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PCRedistributeGetKSP()`, `MatZeroRows()`, `PCFieldSplitSetIS()`, `PCFIELDSPLIT`
461 M*/
462 
463 PETSC_EXTERN PetscErrorCode PCCreate_Redistribute(PC pc)
464 {
465   PC_Redistribute *red;
466   const char      *prefix;
467 
468   PetscFunctionBegin;
469   PetscCall(PetscNew(&red));
470   pc->data = (void *)red;
471 
472   pc->ops->apply          = PCApply_Redistribute;
473   pc->ops->applytranspose = NULL;
474   pc->ops->setup          = PCSetUp_Redistribute;
475   pc->ops->destroy        = PCDestroy_Redistribute;
476   pc->ops->setfromoptions = PCSetFromOptions_Redistribute;
477   pc->ops->view           = PCView_Redistribute;
478 
479   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &red->ksp));
480   PetscCall(KSPSetNestLevel(red->ksp, pc->kspnestlevel));
481   PetscCall(KSPSetErrorIfNotConverged(red->ksp, pc->erroriffailure));
482   PetscCall(PetscObjectIncrementTabLevel((PetscObject)red->ksp, (PetscObject)pc, 1));
483   PetscCall(PCGetOptionsPrefix(pc, &prefix));
484   PetscCall(KSPSetOptionsPrefix(red->ksp, prefix));
485   PetscCall(KSPAppendOptionsPrefix(red->ksp, "redistribute_"));
486   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_Redistribute));
487   PetscFunctionReturn(PETSC_SUCCESS);
488 }
489