xref: /petsc/src/ksp/pc/impls/redistribute/redistribute.c (revision 2ad7e442857a3cef22c06b0e94de84654ca4e109)
1 /*
2   This file defines a "solve the problem redistributely on each subgroup of processor" preconditioner.
3 */
4 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/
5 #include <petscksp.h>
6 
7 typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
8 struct _PC_FieldSplitLink {
9   char             *splitname;
10   IS                is;
11   PC_FieldSplitLink next, previous;
12 };
13 
14 typedef struct {
15   KSP          ksp;
16   Vec          x, b;
17   VecScatter   scatter;
18   IS           is;
19   PetscInt     dcnt, *drows; /* these are the local rows that have only diagonal entry */
20   PetscScalar *diag;
21   Vec          work;
22   PetscBool    zerodiag;
23 
24   PetscInt          nsplits;
25   PC_FieldSplitLink splitlinks;
26 } PC_Redistribute;
27 
28 static PetscErrorCode PCFieldSplitSetIS_Redistribute(PC pc, const char splitname[], IS is)
29 {
30   PC_Redistribute   *red  = (PC_Redistribute *)pc->data;
31   PC_FieldSplitLink *next = &red->splitlinks;
32 
33   PetscFunctionBegin;
34   while (*next) next = &(*next)->next;
35   PetscCall(PetscNew(next));
36   if (splitname) {
37     PetscCall(PetscStrallocpy(splitname, &(*next)->splitname));
38   } else {
39     PetscCall(PetscMalloc1(8, &(*next)->splitname));
40     PetscCall(PetscSNPrintf((*next)->splitname, 7, "%" PetscInt_FMT, red->nsplits++));
41   }
42   PetscCall(PetscObjectReference((PetscObject)is));
43   PetscCall(ISDestroy(&(*next)->is));
44   (*next)->is = is;
45   PetscFunctionReturn(PETSC_SUCCESS);
46 }
47 
48 static PetscErrorCode PCView_Redistribute(PC pc, PetscViewer viewer)
49 {
50   PC_Redistribute *red = (PC_Redistribute *)pc->data;
51   PetscBool        iascii, isstring;
52   PetscInt         ncnt, N;
53 
54   PetscFunctionBegin;
55   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
56   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
57   if (iascii) {
58     PetscCall(MPIU_Allreduce(&red->dcnt, &ncnt, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)pc)));
59     PetscCall(MatGetSize(pc->pmat, &N, NULL));
60     PetscCall(PetscViewerASCIIPrintf(viewer, "    Number rows eliminated %" PetscInt_FMT " Percentage rows eliminated %g\n", ncnt, (double)(100.0 * ((PetscReal)ncnt) / ((PetscReal)N))));
61     PetscCall(PetscViewerASCIIPrintf(viewer, "  Redistribute preconditioner: \n"));
62     PetscCall(KSPView(red->ksp, viewer));
63   } else if (isstring) {
64     PetscCall(PetscViewerStringSPrintf(viewer, " Redistribute preconditioner"));
65     PetscCall(KSPView(red->ksp, viewer));
66   }
67   PetscFunctionReturn(PETSC_SUCCESS);
68 }
69 
70 static PetscErrorCode PCSetUp_Redistribute(PC pc)
71 {
72   PC_Redistribute         *red = (PC_Redistribute *)pc->data;
73   MPI_Comm                 comm;
74   PetscInt                 rstart, rend, nrstart, nrend, i, nz, cnt, *rows, ncnt, dcnt, *drows;
75   PetscLayout              map, nmap;
76   PetscMPIInt              size, tag, n;
77   PETSC_UNUSED PetscMPIInt imdex;
78   PetscInt                *source = NULL;
79   PetscMPIInt             *sizes  = NULL, nrecvs;
80   PetscInt                 j, nsends;
81   PetscInt                *owner = NULL, *starts = NULL, count, slen;
82   PetscInt                *rvalues, *svalues, recvtotal;
83   PetscMPIInt             *onodes1, *olengths1;
84   MPI_Request             *send_waits = NULL, *recv_waits = NULL;
85   MPI_Status               recv_status, *send_status;
86   Vec                      tvec, diag;
87   Mat                      tmat;
88   const PetscScalar       *d, *values;
89   const PetscInt          *cols;
90   PC_FieldSplitLink       *next = &red->splitlinks;
91 
92   PetscFunctionBegin;
93   if (pc->setupcalled) {
94     PetscCheck(pc->flag == SAME_NONZERO_PATTERN, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PC is not supported for a change in the nonzero structure of the matrix");
95     PetscCall(KSPGetOperators(red->ksp, NULL, &tmat));
96     PetscCall(MatCreateSubMatrix(pc->pmat, red->is, red->is, MAT_REUSE_MATRIX, &tmat));
97     PetscCall(KSPSetOperators(red->ksp, tmat, tmat));
98   } else {
99     PetscInt          NN;
100     PC                ipc;
101     PetscVoidFunction fptr;
102 
103     PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
104     PetscCallMPI(MPI_Comm_size(comm, &size));
105     PetscCall(PetscObjectGetNewTag((PetscObject)pc, &tag));
106 
107     /* count non-diagonal rows on process */
108     PetscCall(MatGetOwnershipRange(pc->mat, &rstart, &rend));
109     cnt = 0;
110     for (i = rstart; i < rend; i++) {
111       PetscCall(MatGetRow(pc->mat, i, &nz, &cols, &values));
112       for (PetscInt j = 0; j < nz; j++) {
113         if (values[j] != 0 && cols[j] != i) {
114           cnt++;
115           break;
116         }
117       }
118       PetscCall(MatRestoreRow(pc->mat, i, &nz, &cols, &values));
119     }
120     PetscCall(PetscMalloc1(cnt, &rows));
121     PetscCall(PetscMalloc1(rend - rstart - cnt, &drows));
122 
123     /* list non-diagonal rows on process */
124     cnt  = 0;
125     dcnt = 0;
126     for (i = rstart; i < rend; i++) {
127       PetscBool diagonly = PETSC_TRUE;
128       PetscCall(MatGetRow(pc->mat, i, &nz, &cols, &values));
129       for (PetscInt j = 0; j < nz; j++) {
130         if (values[j] != 0 && cols[j] != i) {
131           diagonly = PETSC_FALSE;
132           break;
133         }
134       }
135       if (!diagonly) rows[cnt++] = i;
136       else drows[dcnt++] = i - rstart;
137       PetscCall(MatRestoreRow(pc->mat, i, &nz, &cols, &values));
138     }
139 
140     /* create PetscLayout for non-diagonal rows on each process */
141     PetscCall(PetscLayoutCreate(comm, &map));
142     PetscCall(PetscLayoutSetLocalSize(map, cnt));
143     PetscCall(PetscLayoutSetBlockSize(map, 1));
144     PetscCall(PetscLayoutSetUp(map));
145     nrstart = map->rstart;
146     nrend   = map->rend;
147 
148     /* create PetscLayout for load-balanced non-diagonal rows on each process */
149     PetscCall(PetscLayoutCreate(comm, &nmap));
150     PetscCall(MPIU_Allreduce(&cnt, &ncnt, 1, MPIU_INT, MPI_SUM, comm));
151     PetscCall(PetscLayoutSetSize(nmap, ncnt));
152     PetscCall(PetscLayoutSetBlockSize(nmap, 1));
153     PetscCall(PetscLayoutSetUp(nmap));
154 
155     PetscCall(MatGetSize(pc->pmat, &NN, NULL));
156     PetscCall(PetscInfo(pc, "Number of diagonal rows eliminated %" PetscInt_FMT ", percentage eliminated %g\n", NN - ncnt, (double)(((PetscReal)(NN - ncnt)) / ((PetscReal)(NN)))));
157 
158     if (size > 1) {
159       /*
160         the following block of code assumes MPI can send messages to self, which is not supported for MPI-uni hence we need to handle
161         the size 1 case as a special case
162 
163        this code is taken from VecScatterCreate_PtoS()
164        Determines what rows need to be moved where to
165        load balance the non-diagonal rows
166        */
167       /*  count number of contributors to each processor */
168       PetscCall(PetscMalloc2(size, &sizes, cnt, &owner));
169       PetscCall(PetscArrayzero(sizes, size));
170       j      = 0;
171       nsends = 0;
172       for (i = nrstart; i < nrend; i++) {
173         if (i < nmap->range[j]) j = 0;
174         for (; j < size; j++) {
175           if (i < nmap->range[j + 1]) {
176             if (!sizes[j]++) nsends++;
177             owner[i - nrstart] = j;
178             break;
179           }
180         }
181       }
182       /* inform other processors of number of messages and max length*/
183       PetscCall(PetscGatherNumberOfMessages(comm, NULL, sizes, &nrecvs));
184       PetscCall(PetscGatherMessageLengths(comm, nsends, nrecvs, sizes, &onodes1, &olengths1));
185       PetscCall(PetscSortMPIIntWithArray(nrecvs, onodes1, olengths1));
186       recvtotal = 0;
187       for (i = 0; i < nrecvs; i++) recvtotal += olengths1[i];
188 
189       /* post receives:  rvalues - rows I will own; count - nu */
190       PetscCall(PetscMalloc3(recvtotal, &rvalues, nrecvs, &source, nrecvs, &recv_waits));
191       count = 0;
192       for (i = 0; i < nrecvs; i++) {
193         PetscCallMPI(MPI_Irecv((rvalues + count), olengths1[i], MPIU_INT, onodes1[i], tag, comm, recv_waits + i));
194         count += olengths1[i];
195       }
196 
197       /* do sends:
198        1) starts[i] gives the starting index in svalues for stuff going to
199        the ith processor
200        */
201       PetscCall(PetscMalloc3(cnt, &svalues, nsends, &send_waits, size, &starts));
202       starts[0] = 0;
203       for (i = 1; i < size; i++) starts[i] = starts[i - 1] + sizes[i - 1];
204       for (i = 0; i < cnt; i++) svalues[starts[owner[i]]++] = rows[i];
205       for (i = 0; i < cnt; i++) rows[i] = rows[i] - nrstart;
206       red->drows = drows;
207       red->dcnt  = dcnt;
208       PetscCall(PetscFree(rows));
209 
210       starts[0] = 0;
211       for (i = 1; i < size; i++) starts[i] = starts[i - 1] + sizes[i - 1];
212       count = 0;
213       for (i = 0; i < size; i++) {
214         if (sizes[i]) PetscCallMPI(MPI_Isend(svalues + starts[i], sizes[i], MPIU_INT, i, tag, comm, send_waits + count++));
215       }
216 
217       /*  wait on receives */
218       count = nrecvs;
219       slen  = 0;
220       while (count) {
221         PetscCallMPI(MPI_Waitany(nrecvs, recv_waits, &imdex, &recv_status));
222         /* unpack receives into our local space */
223         PetscCallMPI(MPI_Get_count(&recv_status, MPIU_INT, &n));
224         slen += n;
225         count--;
226       }
227       PetscCheck(slen == recvtotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Total message lengths %" PetscInt_FMT " not expected %" PetscInt_FMT, slen, recvtotal);
228       PetscCall(ISCreateGeneral(comm, slen, rvalues, PETSC_COPY_VALUES, &red->is));
229 
230       /* free all work space */
231       PetscCall(PetscFree(olengths1));
232       PetscCall(PetscFree(onodes1));
233       PetscCall(PetscFree3(rvalues, source, recv_waits));
234       PetscCall(PetscFree2(sizes, owner));
235       if (nsends) { /* wait on sends */
236         PetscCall(PetscMalloc1(nsends, &send_status));
237         PetscCallMPI(MPI_Waitall(nsends, send_waits, send_status));
238         PetscCall(PetscFree(send_status));
239       }
240       PetscCall(PetscFree3(svalues, send_waits, starts));
241     } else {
242       PetscCall(ISCreateGeneral(comm, cnt, rows, PETSC_OWN_POINTER, &red->is));
243       red->drows = drows;
244       red->dcnt  = dcnt;
245       slen       = cnt;
246     }
247     PetscCall(PetscLayoutDestroy(&map));
248 
249     PetscCall(VecCreateMPI(comm, slen, PETSC_DETERMINE, &red->b));
250     PetscCall(VecDuplicate(red->b, &red->x));
251     PetscCall(MatCreateVecs(pc->pmat, &tvec, NULL));
252     PetscCall(VecScatterCreate(tvec, red->is, red->b, NULL, &red->scatter));
253 
254     /* Map the PCFIELDSPLIT fields to redistributed KSP */
255     PetscCall(KSPGetPC(red->ksp, &ipc));
256     PetscCall(PetscObjectQueryFunction((PetscObject)ipc, "PCFieldSplitSetIS_C", &fptr));
257     if (fptr && *next) {
258       PetscScalar       *atvec;
259       const PetscScalar *ab;
260       PetscInt           primes[] = {2, 3, 5, 7, 11, 13, 17, 19};
261       PetscInt           cnt      = 0;
262 
263       PetscCheck(red->nsplits <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(primes), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No support for this many fields");
264       PetscCall(VecSet(tvec, 1.0));
265       PetscCall(VecGetArray(tvec, &atvec));
266 
267       while (*next) {
268         const PetscInt *indices;
269         PetscInt        n;
270 
271         PetscCall(ISGetIndices((*next)->is, &indices));
272         PetscCall(ISGetLocalSize((*next)->is, &n));
273         for (PetscInt i = 0; i < n; i++) atvec[indices[i] - rstart] *= primes[cnt];
274         PetscCall(ISRestoreIndices((*next)->is, &indices));
275         cnt++;
276         next = &(*next)->next;
277       }
278       PetscCall(VecRestoreArray(tvec, &atvec));
279       PetscCall(VecScatterBegin(red->scatter, tvec, red->b, INSERT_VALUES, SCATTER_FORWARD));
280       PetscCall(VecScatterEnd(red->scatter, tvec, red->b, INSERT_VALUES, SCATTER_FORWARD));
281       cnt = 0;
282       PetscCall(VecGetArrayRead(red->b, &ab));
283       next = &red->splitlinks;
284       while (*next) {
285         PetscInt  n = 0;
286         PetscInt *indices;
287         IS        ris;
288 
289         for (PetscInt i = 0; i < nmap->rend - nmap->rstart; i++) {
290           if (!(((PetscInt)PetscRealPart(ab[i])) % primes[cnt])) n++;
291         }
292         PetscCall(PetscMalloc1(n, &indices));
293         n = 0;
294         for (PetscInt i = 0; i < nmap->rend - nmap->rstart; i++) {
295           if (!(((PetscInt)PetscRealPart(ab[i])) % primes[cnt])) indices[n++] = i + nmap->rstart;
296         }
297         PetscCall(ISCreateGeneral(comm, n, indices, PETSC_OWN_POINTER, &ris));
298         PetscCall(PCFieldSplitSetIS(ipc, (*next)->splitname, ris));
299 
300         PetscCall(ISDestroy(&ris));
301         cnt++;
302         next = &(*next)->next;
303       }
304       PetscCall(VecRestoreArrayRead(red->b, &ab));
305     }
306     PetscCall(VecDestroy(&tvec));
307     PetscCall(MatCreateSubMatrix(pc->pmat, red->is, red->is, MAT_INITIAL_MATRIX, &tmat));
308     PetscCall(KSPSetOperators(red->ksp, tmat, tmat));
309     PetscCall(MatDestroy(&tmat));
310     PetscCall(PetscLayoutDestroy(&nmap));
311   }
312 
313   /* get diagonal portion of matrix */
314   PetscCall(PetscFree(red->diag));
315   PetscCall(PetscMalloc1(red->dcnt, &red->diag));
316   PetscCall(MatCreateVecs(pc->pmat, &diag, NULL));
317   PetscCall(MatGetDiagonal(pc->pmat, diag));
318   PetscCall(VecGetArrayRead(diag, &d));
319   for (i = 0; i < red->dcnt; i++) {
320     if (d[red->drows[i]] != 0) red->diag[i] = 1.0 / d[red->drows[i]];
321     else {
322       red->zerodiag = PETSC_TRUE;
323       red->diag[i]  = 0.0;
324     }
325   }
326   PetscCall(VecRestoreArrayRead(diag, &d));
327   PetscCall(VecDestroy(&diag));
328   PetscCall(KSPSetUp(red->ksp));
329   PetscFunctionReturn(PETSC_SUCCESS);
330 }
331 
332 static PetscErrorCode PCApply_Redistribute(PC pc, Vec b, Vec x)
333 {
334   PC_Redistribute   *red   = (PC_Redistribute *)pc->data;
335   PetscInt           dcnt  = red->dcnt, i;
336   const PetscInt    *drows = red->drows;
337   PetscScalar       *xwork;
338   const PetscScalar *bwork, *diag = red->diag;
339   PetscBool          nonzero_guess;
340 
341   PetscFunctionBegin;
342   if (!red->work) PetscCall(VecDuplicate(b, &red->work));
343   PetscCall(KSPGetInitialGuessNonzero(red->ksp, &nonzero_guess));
344   if (nonzero_guess) {
345     PetscCall(VecScatterBegin(red->scatter, x, red->x, INSERT_VALUES, SCATTER_FORWARD));
346     PetscCall(VecScatterEnd(red->scatter, x, red->x, INSERT_VALUES, SCATTER_FORWARD));
347   }
348 
349   /* compute the rows of solution that have diagonal entries only */
350   PetscCall(VecSet(x, 0.0)); /* x = diag(A)^{-1} b */
351   PetscCall(VecGetArray(x, &xwork));
352   PetscCall(VecGetArrayRead(b, &bwork));
353   if (red->zerodiag) {
354     for (i = 0; i < dcnt; i++) {
355       if (diag[i] == 0.0 && bwork[drows[i]] != 0.0) {
356         PetscCheck(!pc->erroriffailure, PETSC_COMM_SELF, PETSC_ERR_CONV_FAILED, "Linear system is inconsistent, zero matrix row but nonzero right hand side");
357         PetscCall(PetscInfo(pc, "Linear system is inconsistent, zero matrix row but nonzero right hand side\n"));
358         PetscCall(VecSetInf(x));
359         pc->failedreasonrank = PC_INCONSISTENT_RHS;
360       }
361     }
362   }
363   for (i = 0; i < dcnt; i++) xwork[drows[i]] = diag[i] * bwork[drows[i]];
364   PetscCall(PetscLogFlops(dcnt));
365   PetscCall(VecRestoreArray(red->work, &xwork));
366   PetscCall(VecRestoreArrayRead(b, &bwork));
367   /* update the right hand side for the reduced system with diagonal rows (and corresponding columns) removed */
368   PetscCall(MatMult(pc->pmat, x, red->work));
369   PetscCall(VecAYPX(red->work, -1.0, b)); /* red->work = b - A x */
370 
371   PetscCall(VecScatterBegin(red->scatter, red->work, red->b, INSERT_VALUES, SCATTER_FORWARD));
372   PetscCall(VecScatterEnd(red->scatter, red->work, red->b, INSERT_VALUES, SCATTER_FORWARD));
373   PetscCall(KSPSolve(red->ksp, red->b, red->x));
374   PetscCall(KSPCheckSolve(red->ksp, pc, red->x));
375   PetscCall(VecScatterBegin(red->scatter, red->x, x, INSERT_VALUES, SCATTER_REVERSE));
376   PetscCall(VecScatterEnd(red->scatter, red->x, x, INSERT_VALUES, SCATTER_REVERSE));
377   PetscFunctionReturn(PETSC_SUCCESS);
378 }
379 
380 static PetscErrorCode PCDestroy_Redistribute(PC pc)
381 {
382   PC_Redistribute  *red  = (PC_Redistribute *)pc->data;
383   PC_FieldSplitLink next = red->splitlinks;
384 
385   PetscFunctionBegin;
386   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
387 
388   while (next) {
389     PC_FieldSplitLink ilink;
390     PetscCall(PetscFree(next->splitname));
391     PetscCall(ISDestroy(&next->is));
392     ilink = next;
393     next  = next->next;
394     PetscCall(PetscFree(ilink));
395   }
396   PetscCall(VecScatterDestroy(&red->scatter));
397   PetscCall(ISDestroy(&red->is));
398   PetscCall(VecDestroy(&red->b));
399   PetscCall(VecDestroy(&red->x));
400   PetscCall(KSPDestroy(&red->ksp));
401   PetscCall(VecDestroy(&red->work));
402   PetscCall(PetscFree(red->drows));
403   PetscCall(PetscFree(red->diag));
404   PetscCall(PetscFree(pc->data));
405   PetscFunctionReturn(PETSC_SUCCESS);
406 }
407 
408 static PetscErrorCode PCSetFromOptions_Redistribute(PC pc, PetscOptionItems *PetscOptionsObject)
409 {
410   PC_Redistribute *red = (PC_Redistribute *)pc->data;
411 
412   PetscFunctionBegin;
413   PetscCall(KSPSetFromOptions(red->ksp));
414   PetscFunctionReturn(PETSC_SUCCESS);
415 }
416 
417 /*@
418   PCRedistributeGetKSP - Gets the `KSP` created by the `PCREDISTRIBUTE`
419 
420   Not Collective
421 
422   Input Parameter:
423 . pc - the preconditioner context
424 
425   Output Parameter:
426 . innerksp - the inner `KSP`
427 
428   Level: advanced
429 
430 .seealso: `KSP`, `PCREDISTRIBUTE`
431 @*/
432 PetscErrorCode PCRedistributeGetKSP(PC pc, KSP *innerksp)
433 {
434   PC_Redistribute *red = (PC_Redistribute *)pc->data;
435 
436   PetscFunctionBegin;
437   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
438   PetscAssertPointer(innerksp, 2);
439   *innerksp = red->ksp;
440   PetscFunctionReturn(PETSC_SUCCESS);
441 }
442 
443 /*MC
444      PCREDISTRIBUTE - Redistributes a matrix for load balancing, removing the rows (and the corresponding columns) that only have a diagonal entry and then
445      applies a `KSP` to that new smaller matrix
446 
447      Level: intermediate
448 
449      Notes:
450      Options for the redistribute `KSP` and `PC` with the options database prefix -redistribute_
451 
452      Usually run this with `-ksp_type preonly`
453 
454      If you have used `MatZeroRows()` to eliminate (for example, Dirichlet) boundary conditions for a symmetric problem then you can use, for example, `-ksp_type preonly
455      -pc_type redistribute -redistribute_ksp_type cg -redistribute_pc_type bjacobi -redistribute_sub_pc_type icc` to take advantage of the symmetry.
456 
457      Supports the function `PCFieldSplitSetIS()`; pass the appropriate reduced field indices to an inner `PCFIELDSPLIT`, set with, for example
458      `-ksp_type preonly -pc_type redistribute -redistribute_pc_type fieldsplit. Does not support the `PCFIELDSPLIT` options database keys.
459 
460      This does NOT call a partitioner to reorder rows to lower communication; the ordering of the rows in the original matrix and redistributed matrix is the same. Rows are moved
461      between MPI processes inside the preconditioner to balance the number of rows on each process.
462 
463      Developer Note:
464      Should add an option to this preconditioner to use a partitioner to redistribute the rows to lower communication.
465 
466 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PCRedistributeGetKSP()`, `MatZeroRows()`, `PCFieldSplitSetIS()`, `PCFIELDSPLIT`
467 M*/
468 
469 PETSC_EXTERN PetscErrorCode PCCreate_Redistribute(PC pc)
470 {
471   PC_Redistribute *red;
472   const char      *prefix;
473 
474   PetscFunctionBegin;
475   PetscCall(PetscNew(&red));
476   pc->data = (void *)red;
477 
478   pc->ops->apply          = PCApply_Redistribute;
479   pc->ops->applytranspose = NULL;
480   pc->ops->setup          = PCSetUp_Redistribute;
481   pc->ops->destroy        = PCDestroy_Redistribute;
482   pc->ops->setfromoptions = PCSetFromOptions_Redistribute;
483   pc->ops->view           = PCView_Redistribute;
484 
485   PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &red->ksp));
486   PetscCall(KSPSetNestLevel(red->ksp, pc->kspnestlevel));
487   PetscCall(KSPSetErrorIfNotConverged(red->ksp, pc->erroriffailure));
488   PetscCall(PetscObjectIncrementTabLevel((PetscObject)red->ksp, (PetscObject)pc, 1));
489   PetscCall(PCGetOptionsPrefix(pc, &prefix));
490   PetscCall(KSPSetOptionsPrefix(red->ksp, prefix));
491   PetscCall(KSPAppendOptionsPrefix(red->ksp, "redistribute_"));
492   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_Redistribute));
493   PetscFunctionReturn(PETSC_SUCCESS);
494 }
495