xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision aed5ffcbc31e41b96407a92d0c19d6464f10f960)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc-private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP          ksp;
10   PC           pc;                   /* actual preconditioner used on each processor */
11   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool    useparallelmat;
16   PetscSubcomm psubcomm;
17   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
18 } PC_Redundant;
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "PCView_Redundant"
22 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26   PetscBool      iascii,isstring;
27   PetscViewer    subviewer;
28 
29   PetscFunctionBegin;
30   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
31   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
32   if (iascii) {
33     if (!red->psubcomm) {
34       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
35     } else {
36       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
37       ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
38       if (!red->psubcomm->color) { /* only view first redundant pc */
39         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
40         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
41         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
42       }
43       ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
44     }
45   } else if (isstring) {
46     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
47   }
48   PetscFunctionReturn(0);
49 }
50 
51 #undef __FUNCT__
52 #define __FUNCT__ "PCSetUp_Redundant"
53 static PetscErrorCode PCSetUp_Redundant(PC pc)
54 {
55   PC_Redundant   *red = (PC_Redundant*)pc->data;
56   PetscErrorCode ierr;
57   PetscInt       mstart,mend,mlocal,M;
58   PetscMPIInt    size;
59   MPI_Comm       comm,subcomm;
60   Vec            x;
61   const char     *prefix;
62 
63   PetscFunctionBegin;
64   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
65 
66   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
67   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
68   if (size == 1) red->useparallelmat = PETSC_FALSE;
69 
70   if (!pc->setupcalled) {
71     PetscInt mloc_sub;
72     if (!red->psubcomm) {
73       ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
74       ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
75       ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
76       /* enable runtime switch of psubcomm type, e.g., '-psubcomm_type interlaced */
77       ierr = PetscSubcommSetFromOptions(red->psubcomm);CHKERRQ(ierr);
78       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
79 
80       /* create a new PC that processors in each subcomm have copy of */
81       subcomm = red->psubcomm->comm;
82 
83       ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
84       ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
85       ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
86       ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
87       ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
88       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
89 
90       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
91       ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
92       ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
93     } else {
94       subcomm = red->psubcomm->comm;
95     }
96 
97     if (red->useparallelmat) {
98       /* grab the parallel matrix and put it into processors of a subcomminicator */
99       ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);CHKERRQ(ierr);
100       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr);
101 
102       /* get working vectors xsub and ysub */
103       ierr = MatGetVecs(red->pmats,&red->xsub,&red->ysub);CHKERRQ(ierr);
104 
105       /* create working vectors xdup and ydup.
106        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
107        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
108        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
109       ierr = MatGetLocalSize(red->pmats,&mloc_sub,NULL);CHKERRQ(ierr);
110       ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
111       ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);CHKERRQ(ierr);
112 
113       /* create vecscatters */
114       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
115         IS       is1,is2;
116         PetscInt *idx1,*idx2,i,j,k;
117 
118         ierr = MatGetVecs(pc->pmat,&x,0);CHKERRQ(ierr);
119         ierr = VecGetSize(x,&M);CHKERRQ(ierr);
120         ierr = VecGetOwnershipRange(x,&mstart,&mend);CHKERRQ(ierr);
121         mlocal = mend - mstart;
122         ierr = PetscMalloc2(red->psubcomm->n*mlocal,PetscInt,&idx1,red->psubcomm->n*mlocal,PetscInt,&idx2);CHKERRQ(ierr);
123         j    = 0;
124         for (k=0; k<red->psubcomm->n; k++) {
125           for (i=mstart; i<mend; i++) {
126             idx1[j]   = i;
127             idx2[j++] = i + M*k;
128           }
129         }
130         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
131         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
132         ierr = VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
133         ierr = ISDestroy(&is1);CHKERRQ(ierr);
134         ierr = ISDestroy(&is2);CHKERRQ(ierr);
135 
136         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
137         ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);CHKERRQ(ierr);
138         ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
139         ierr = VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);CHKERRQ(ierr);
140         ierr = ISDestroy(&is1);CHKERRQ(ierr);
141         ierr = ISDestroy(&is2);CHKERRQ(ierr);
142         ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
143         ierr = VecDestroy(&x);CHKERRQ(ierr);
144       }
145     } else { /* !red->useparallelmat */
146       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
147     }
148   } else { /* pc->setupcalled */
149     if (red->useparallelmat) {
150       MatReuse       reuse;
151       /* grab the parallel matrix and put it into processors of a subcomminicator */
152       /*--------------------------------------------------------------------------*/
153       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
154         /* destroy old matrices */
155         ierr  = MatDestroy(&red->pmats);CHKERRQ(ierr);
156         reuse = MAT_INITIAL_MATRIX;
157       } else {
158         reuse = MAT_REUSE_MATRIX;
159       }
160       ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,reuse,&red->pmats);CHKERRQ(ierr);
161       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,pc->flag);CHKERRQ(ierr);
162     } else { /* !red->useparallelmat */
163       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
164     }
165   }
166 
167   if (pc->setfromoptionscalled) {
168     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
169   }
170   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "PCApply_Redundant"
176 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
177 {
178   PC_Redundant   *red = (PC_Redundant*)pc->data;
179   PetscErrorCode ierr;
180   PetscScalar    *array;
181 
182   PetscFunctionBegin;
183   if (!red->useparallelmat) {
184     ierr = KSPSolve(red->ksp,x,y);CHKERRQ(ierr);
185     PetscFunctionReturn(0);
186   }
187 
188   /* scatter x to xdup */
189   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
190   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
191 
192   /* place xdup's local array into xsub */
193   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
194   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
195 
196   /* apply preconditioner on each processor */
197   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
198   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
199   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
200 
201   /* place ysub's local array into ydup */
202   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
203   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
204 
205   /* scatter ydup to y */
206   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
207   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
208   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
209   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
210   PetscFunctionReturn(0);
211 }
212 
213 #undef __FUNCT__
214 #define __FUNCT__ "PCReset_Redundant"
215 static PetscErrorCode PCReset_Redundant(PC pc)
216 {
217   PC_Redundant   *red = (PC_Redundant*)pc->data;
218   PetscErrorCode ierr;
219 
220   PetscFunctionBegin;
221   if (red->useparallelmat) {
222     ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
223     ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
224     ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
225     ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
226     ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
227     ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
228   }
229   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
230   ierr = KSPReset(red->ksp);CHKERRQ(ierr);
231   PetscFunctionReturn(0);
232 }
233 
234 #undef __FUNCT__
235 #define __FUNCT__ "PCDestroy_Redundant"
236 static PetscErrorCode PCDestroy_Redundant(PC pc)
237 {
238   PC_Redundant   *red = (PC_Redundant*)pc->data;
239   PetscErrorCode ierr;
240 
241   PetscFunctionBegin;
242   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
243   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
244   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
245   ierr = PetscFree(pc->data);CHKERRQ(ierr);
246   PetscFunctionReturn(0);
247 }
248 
249 #undef __FUNCT__
250 #define __FUNCT__ "PCSetFromOptions_Redundant"
251 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
252 {
253   PetscErrorCode ierr;
254   PC_Redundant   *red = (PC_Redundant*)pc->data;
255 
256   PetscFunctionBegin;
257   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
258   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
259   ierr = PetscOptionsTail();CHKERRQ(ierr);
260   PetscFunctionReturn(0);
261 }
262 
263 #undef __FUNCT__
264 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
265 static PetscErrorCode  PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
266 {
267   PC_Redundant *red = (PC_Redundant*)pc->data;
268 
269   PetscFunctionBegin;
270   red->nsubcomm = nreds;
271   PetscFunctionReturn(0);
272 }
273 
274 #undef __FUNCT__
275 #define __FUNCT__ "PCRedundantSetNumber"
276 /*@
277    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
278 
279    Logically Collective on PC
280 
281    Input Parameters:
282 +  pc - the preconditioner context
283 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
284                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
285 
286    Level: advanced
287 
288 .keywords: PC, redundant solve
289 @*/
290 PetscErrorCode  PCRedundantSetNumber(PC pc,PetscInt nredundant)
291 {
292   PetscErrorCode ierr;
293 
294   PetscFunctionBegin;
295   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
296   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
297   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
298   PetscFunctionReturn(0);
299 }
300 
301 #undef __FUNCT__
302 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
303 static PetscErrorCode  PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
304 {
305   PC_Redundant   *red = (PC_Redundant*)pc->data;
306   PetscErrorCode ierr;
307 
308   PetscFunctionBegin;
309   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
310   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
311 
312   red->scatterin  = in;
313 
314   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
315   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
316   red->scatterout = out;
317   PetscFunctionReturn(0);
318 }
319 
320 #undef __FUNCT__
321 #define __FUNCT__ "PCRedundantSetScatter"
322 /*@
323    PCRedundantSetScatter - Sets the scatter used to copy values into the
324      redundant local solve and the scatter to move them back into the global
325      vector.
326 
327    Logically Collective on PC
328 
329    Input Parameters:
330 +  pc - the preconditioner context
331 .  in - the scatter to move the values in
332 -  out - the scatter to move them out
333 
334    Level: advanced
335 
336 .keywords: PC, redundant solve
337 @*/
338 PetscErrorCode  PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
339 {
340   PetscErrorCode ierr;
341 
342   PetscFunctionBegin;
343   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
344   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
345   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
346   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
347   PetscFunctionReturn(0);
348 }
349 
350 #undef __FUNCT__
351 #define __FUNCT__ "PCRedundantGetKSP_Redundant"
352 static PetscErrorCode  PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
353 {
354   PetscErrorCode ierr;
355   PC_Redundant   *red = (PC_Redundant*)pc->data;
356   MPI_Comm       comm,subcomm;
357   const char     *prefix;
358 
359   PetscFunctionBegin;
360   if (!red->psubcomm) {
361     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
362     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
363     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
364     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
365     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
366 
367     /* create a new PC that processors in each subcomm have copy of */
368     subcomm = red->psubcomm->comm;
369 
370     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
371     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
372     ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
373     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
374     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
375     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
376 
377     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
378     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
379     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
380   }
381   *innerksp = red->ksp;
382   PetscFunctionReturn(0);
383 }
384 
385 #undef __FUNCT__
386 #define __FUNCT__ "PCRedundantGetKSP"
387 /*@
388    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
389 
390    Not Collective
391 
392    Input Parameter:
393 .  pc - the preconditioner context
394 
395    Output Parameter:
396 .  innerksp - the KSP on the smaller set of processes
397 
398    Level: advanced
399 
400 .keywords: PC, redundant solve
401 @*/
402 PetscErrorCode  PCRedundantGetKSP(PC pc,KSP *innerksp)
403 {
404   PetscErrorCode ierr;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
408   PetscValidPointer(innerksp,2);
409   ierr = PetscTryMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
410   PetscFunctionReturn(0);
411 }
412 
413 #undef __FUNCT__
414 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
415 static PetscErrorCode  PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
416 {
417   PC_Redundant *red = (PC_Redundant*)pc->data;
418 
419   PetscFunctionBegin;
420   if (mat)  *mat  = red->pmats;
421   if (pmat) *pmat = red->pmats;
422   PetscFunctionReturn(0);
423 }
424 
425 #undef __FUNCT__
426 #define __FUNCT__ "PCRedundantGetOperators"
427 /*@
428    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
429 
430    Not Collective
431 
432    Input Parameter:
433 .  pc - the preconditioner context
434 
435    Output Parameters:
436 +  mat - the matrix
437 -  pmat - the (possibly different) preconditioner matrix
438 
439    Level: advanced
440 
441 .keywords: PC, redundant solve
442 @*/
443 PetscErrorCode  PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
444 {
445   PetscErrorCode ierr;
446 
447   PetscFunctionBegin;
448   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
449   if (mat)  PetscValidPointer(mat,2);
450   if (pmat) PetscValidPointer(pmat,3);
451   ierr = PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
452   PetscFunctionReturn(0);
453 }
454 
455 /* -------------------------------------------------------------------------------------*/
456 /*MC
457      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
458 
459      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
460 
461   Options Database:
462 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
463                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
464 
465    Level: intermediate
466 
467    Notes: The default KSP is preonly and the default PC is LU.
468 
469    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
470 
471 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
472            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
473 M*/
474 
475 #undef __FUNCT__
476 #define __FUNCT__ "PCCreate_Redundant"
477 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
478 {
479   PetscErrorCode ierr;
480   PC_Redundant   *red;
481   PetscMPIInt    size;
482 
483   PetscFunctionBegin;
484   ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr);
485   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
486 
487   red->nsubcomm       = size;
488   red->useparallelmat = PETSC_TRUE;
489   pc->data            = (void*)red;
490 
491   pc->ops->apply          = PCApply_Redundant;
492   pc->ops->applytranspose = 0;
493   pc->ops->setup          = PCSetUp_Redundant;
494   pc->ops->destroy        = PCDestroy_Redundant;
495   pc->ops->reset          = PCReset_Redundant;
496   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
497   pc->ops->view           = PCView_Redundant;
498 
499   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
500   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
501   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
502   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
503   PetscFunctionReturn(0);
504 }
505 
506