xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 117ef88edefbfc12e7c19efe87a19a2e1b0acd4f)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP                ksp;
10   PC                 pc;                   /* actual preconditioner used on each processor */
11   Vec                xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec                xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat                pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter         scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool          useparallelmat;
16   PetscSubcomm       psubcomm;
17   PetscInt           nsubcomm;             /* num of data structure PetscSubcomm */
18   PetscBool          shifttypeset;
19   MatFactorShiftType shifttype;
20 } PC_Redundant;
21 
22 PetscErrorCode  PCFactorSetShiftType_Redundant(PC pc,MatFactorShiftType shifttype)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   if (red->ksp) {
29     PC pc;
30     ierr = KSPGetPC(red->ksp,&pc);CHKERRQ(ierr);
31     ierr = PCFactorSetShiftType(pc,shifttype);CHKERRQ(ierr);
32   } else {
33     red->shifttypeset = PETSC_TRUE;
34     red->shifttype    = shifttype;
35   }
36   PetscFunctionReturn(0);
37 }
38 
39 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
40 {
41   PC_Redundant   *red = (PC_Redundant*)pc->data;
42   PetscErrorCode ierr;
43   PetscBool      iascii,isstring;
44   PetscViewer    subviewer;
45 
46   PetscFunctionBegin;
47   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
48   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
49   if (iascii) {
50     if (!red->psubcomm) {
51       ierr = PetscViewerASCIIPrintf(viewer,"  Not yet setup\n");CHKERRQ(ierr);
52     } else {
53       ierr = PetscViewerASCIIPrintf(viewer,"  First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
54       ierr = PetscViewerGetSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
55       if (!red->psubcomm->color) { /* only view first redundant pc */
56         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
57         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
58         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
59       }
60       ierr = PetscViewerRestoreSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
61     }
62   } else if (isstring) {
63     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
64   }
65   PetscFunctionReturn(0);
66 }
67 
68 #include <../src/mat/impls/aij/mpi/mpiaij.h>
69 static PetscErrorCode PCSetUp_Redundant(PC pc)
70 {
71   PC_Redundant   *red = (PC_Redundant*)pc->data;
72   PetscErrorCode ierr;
73   PetscInt       mstart,mend,mlocal,M;
74   PetscMPIInt    size;
75   MPI_Comm       comm,subcomm;
76   Vec            x;
77 
78   PetscFunctionBegin;
79   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
80 
81   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
82   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
83   if (size == 1) red->useparallelmat = PETSC_FALSE;
84 
85   if (!pc->setupcalled) {
86     PetscInt mloc_sub;
87     if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
88       KSP ksp;
89       ierr = PCRedundantGetKSP(pc,&ksp);CHKERRQ(ierr);
90     }
91     subcomm = PetscSubcommChild(red->psubcomm);
92 
93     if (red->useparallelmat) {
94       /* grab the parallel matrix and put it into processors of a subcomminicator */
95       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);CHKERRQ(ierr);
96 
97       ierr = MPI_Comm_size(subcomm,&size);CHKERRQ(ierr);
98       if (size > 1) {
99         PetscBool foundpack,issbaij;
100         ierr = PetscObjectTypeCompare((PetscObject)red->pmats,MATMPISBAIJ,&issbaij);CHKERRQ(ierr);
101         if (!issbaij) {
102           ierr = MatGetFactorAvailable(red->pmats,NULL,MAT_FACTOR_LU,&foundpack);CHKERRQ(ierr);
103         } else {
104           ierr = MatGetFactorAvailable(red->pmats,NULL,MAT_FACTOR_CHOLESKY,&foundpack);CHKERRQ(ierr);
105         }
106         if (!foundpack) { /* reset default ksp and pc */
107           ierr = KSPSetType(red->ksp,KSPGMRES);CHKERRQ(ierr);
108           ierr = PCSetType(red->pc,PCBJACOBI);CHKERRQ(ierr);
109         } else {
110           ierr = PCFactorSetMatSolverType(red->pc,NULL);CHKERRQ(ierr);
111         }
112       }
113 
114       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
115 
116       /* get working vectors xsub and ysub */
117       ierr = MatCreateVecs(red->pmats,&red->xsub,&red->ysub);CHKERRQ(ierr);
118 
119       /* create working vectors xdup and ydup.
120        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
121        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
122        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
123       ierr = MatGetLocalSize(red->pmats,&mloc_sub,NULL);CHKERRQ(ierr);
124       ierr = VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm),mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
125       ierr = VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm),1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);CHKERRQ(ierr);
126 
127       /* create vecscatters */
128       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
129         IS       is1,is2;
130         PetscInt *idx1,*idx2,i,j,k;
131 
132         ierr = MatCreateVecs(pc->pmat,&x,NULL);CHKERRQ(ierr);
133         ierr = VecGetSize(x,&M);CHKERRQ(ierr);
134         ierr = VecGetOwnershipRange(x,&mstart,&mend);CHKERRQ(ierr);
135         mlocal = mend - mstart;
136         ierr = PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);CHKERRQ(ierr);
137         j    = 0;
138         for (k=0; k<red->psubcomm->n; k++) {
139           for (i=mstart; i<mend; i++) {
140             idx1[j]   = i;
141             idx2[j++] = i + M*k;
142           }
143         }
144         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
145         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
146         ierr = VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
147         ierr = ISDestroy(&is1);CHKERRQ(ierr);
148         ierr = ISDestroy(&is2);CHKERRQ(ierr);
149 
150         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
151         ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);CHKERRQ(ierr);
152         ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
153         ierr = VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);CHKERRQ(ierr);
154         ierr = ISDestroy(&is1);CHKERRQ(ierr);
155         ierr = ISDestroy(&is2);CHKERRQ(ierr);
156         ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
157         ierr = VecDestroy(&x);CHKERRQ(ierr);
158       }
159     } else { /* !red->useparallelmat */
160       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
161     }
162   } else { /* pc->setupcalled */
163     if (red->useparallelmat) {
164       MatReuse       reuse;
165       /* grab the parallel matrix and put it into processors of a subcomminicator */
166       /*--------------------------------------------------------------------------*/
167       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
168         /* destroy old matrices */
169         ierr  = MatDestroy(&red->pmats);CHKERRQ(ierr);
170         reuse = MAT_INITIAL_MATRIX;
171       } else {
172         reuse = MAT_REUSE_MATRIX;
173       }
174       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,PetscSubcommChild(red->psubcomm),reuse,&red->pmats);CHKERRQ(ierr);
175       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
176     } else { /* !red->useparallelmat */
177       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
178     }
179   }
180 
181   if (pc->setfromoptionscalled) {
182     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
183   }
184   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
185   PetscFunctionReturn(0);
186 }
187 
188 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
189 {
190   PC_Redundant   *red = (PC_Redundant*)pc->data;
191   PetscErrorCode ierr;
192   PetscScalar    *array;
193 
194   PetscFunctionBegin;
195   if (!red->useparallelmat) {
196     ierr = KSPSolve(red->ksp,x,y);CHKERRQ(ierr);
197     ierr = KSPCheckSolve(red->ksp,pc,y);CHKERRQ(ierr);
198     PetscFunctionReturn(0);
199   }
200 
201   /* scatter x to xdup */
202   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
203   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
204 
205   /* place xdup's local array into xsub */
206   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
207   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
208 
209   /* apply preconditioner on each processor */
210   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
211   ierr = KSPCheckSolve(red->ksp,pc,red->ysub);CHKERRQ(ierr);
212   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
213   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
214 
215   /* place ysub's local array into ydup */
216   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
217   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
218 
219   /* scatter ydup to y */
220   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
221   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
222   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
223   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
224   PetscFunctionReturn(0);
225 }
226 
227 static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
228 {
229   PC_Redundant   *red = (PC_Redundant*)pc->data;
230   PetscErrorCode ierr;
231   PetscScalar    *array;
232 
233   PetscFunctionBegin;
234   if (!red->useparallelmat) {
235     ierr = KSPSolveTranspose(red->ksp,x,y);CHKERRQ(ierr);
236     ierr = KSPCheckSolve(red->ksp,pc,y);CHKERRQ(ierr);
237     PetscFunctionReturn(0);
238   }
239 
240   /* scatter x to xdup */
241   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
242   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
243 
244   /* place xdup's local array into xsub */
245   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
246   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
247 
248   /* apply preconditioner on each processor */
249   ierr = KSPSolveTranspose(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
250   ierr = KSPCheckSolve(red->ksp,pc,red->ysub);CHKERRQ(ierr);
251   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
252   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
253 
254   /* place ysub's local array into ydup */
255   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
256   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
257 
258   /* scatter ydup to y */
259   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
260   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
261   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
262   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
263   PetscFunctionReturn(0);
264 }
265 
266 static PetscErrorCode PCReset_Redundant(PC pc)
267 {
268   PC_Redundant   *red = (PC_Redundant*)pc->data;
269   PetscErrorCode ierr;
270 
271   PetscFunctionBegin;
272   if (red->useparallelmat) {
273     ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
274     ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
275     ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
276     ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
277     ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
278     ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
279   }
280   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
281   ierr = KSPReset(red->ksp);CHKERRQ(ierr);
282   PetscFunctionReturn(0);
283 }
284 
285 static PetscErrorCode PCDestroy_Redundant(PC pc)
286 {
287   PC_Redundant   *red = (PC_Redundant*)pc->data;
288   PetscErrorCode ierr;
289 
290   PetscFunctionBegin;
291   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
292   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
293   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
294   ierr = PetscFree(pc->data);CHKERRQ(ierr);
295   PetscFunctionReturn(0);
296 }
297 
298 static PetscErrorCode PCSetFromOptions_Redundant(PetscOptionItems *PetscOptionsObject,PC pc)
299 {
300   PetscErrorCode ierr;
301   PC_Redundant   *red = (PC_Redundant*)pc->data;
302 
303   PetscFunctionBegin;
304   ierr = PetscOptionsHead(PetscOptionsObject,"Redundant options");CHKERRQ(ierr);
305   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,NULL);CHKERRQ(ierr);
306   ierr = PetscOptionsTail();CHKERRQ(ierr);
307   PetscFunctionReturn(0);
308 }
309 
310 static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
311 {
312   PC_Redundant *red = (PC_Redundant*)pc->data;
313 
314   PetscFunctionBegin;
315   red->nsubcomm = nreds;
316   PetscFunctionReturn(0);
317 }
318 
319 /*@
320    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
321 
322    Logically Collective on PC
323 
324    Input Parameters:
325 +  pc - the preconditioner context
326 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
327                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
328 
329    Level: advanced
330 
331 @*/
332 PetscErrorCode PCRedundantSetNumber(PC pc,PetscInt nredundant)
333 {
334   PetscErrorCode ierr;
335 
336   PetscFunctionBegin;
337   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
338   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
339   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
340   PetscFunctionReturn(0);
341 }
342 
343 static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
344 {
345   PC_Redundant   *red = (PC_Redundant*)pc->data;
346   PetscErrorCode ierr;
347 
348   PetscFunctionBegin;
349   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
350   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
351 
352   red->scatterin  = in;
353 
354   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
355   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
356   red->scatterout = out;
357   PetscFunctionReturn(0);
358 }
359 
360 /*@
361    PCRedundantSetScatter - Sets the scatter used to copy values into the
362      redundant local solve and the scatter to move them back into the global
363      vector.
364 
365    Logically Collective on PC
366 
367    Input Parameters:
368 +  pc - the preconditioner context
369 .  in - the scatter to move the values in
370 -  out - the scatter to move them out
371 
372    Level: advanced
373 
374 @*/
375 PetscErrorCode PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
376 {
377   PetscErrorCode ierr;
378 
379   PetscFunctionBegin;
380   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
381   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
382   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
383   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
384   PetscFunctionReturn(0);
385 }
386 
387 static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
388 {
389   PetscErrorCode ierr;
390   PC_Redundant   *red = (PC_Redundant*)pc->data;
391   MPI_Comm       comm,subcomm;
392   const char     *prefix;
393   PetscBool      issbaij;
394 
395   PetscFunctionBegin;
396   if (!red->psubcomm) {
397     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
398 
399     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
400     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
401     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
402     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
403 
404     ierr = PetscSubcommSetOptionsPrefix(red->psubcomm,prefix);CHKERRQ(ierr);
405     ierr = PetscSubcommSetFromOptions(red->psubcomm);CHKERRQ(ierr);
406     ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
407 
408     /* create a new PC that processors in each subcomm have copy of */
409     subcomm = PetscSubcommChild(red->psubcomm);
410 
411     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
412     ierr = KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);CHKERRQ(ierr);
413     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
414     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
415     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
416     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
417     ierr = PetscObjectTypeCompare((PetscObject)pc->pmat,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
418     if (!issbaij) {
419       ierr = PetscObjectTypeCompare((PetscObject)pc->pmat,MATMPISBAIJ,&issbaij);CHKERRQ(ierr);
420     }
421     if (!issbaij) {
422       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
423     } else {
424       ierr = PCSetType(red->pc,PCCHOLESKY);CHKERRQ(ierr);
425     }
426     if (red->shifttypeset) {
427       ierr = PCFactorSetShiftType(red->pc,red->shifttype);CHKERRQ(ierr);
428       red->shifttypeset = PETSC_FALSE;
429     }
430     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
431     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
432   }
433   *innerksp = red->ksp;
434   PetscFunctionReturn(0);
435 }
436 
437 /*@
438    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
439 
440    Not Collective
441 
442    Input Parameter:
443 .  pc - the preconditioner context
444 
445    Output Parameter:
446 .  innerksp - the KSP on the smaller set of processes
447 
448    Level: advanced
449 
450 @*/
451 PetscErrorCode PCRedundantGetKSP(PC pc,KSP *innerksp)
452 {
453   PetscErrorCode ierr;
454 
455   PetscFunctionBegin;
456   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
457   PetscValidPointer(innerksp,2);
458   ierr = PetscUseMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
459   PetscFunctionReturn(0);
460 }
461 
462 static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
463 {
464   PC_Redundant *red = (PC_Redundant*)pc->data;
465 
466   PetscFunctionBegin;
467   if (mat)  *mat  = red->pmats;
468   if (pmat) *pmat = red->pmats;
469   PetscFunctionReturn(0);
470 }
471 
472 /*@
473    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
474 
475    Not Collective
476 
477    Input Parameter:
478 .  pc - the preconditioner context
479 
480    Output Parameters:
481 +  mat - the matrix
482 -  pmat - the (possibly different) preconditioner matrix
483 
484    Level: advanced
485 
486 @*/
487 PetscErrorCode PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
488 {
489   PetscErrorCode ierr;
490 
491   PetscFunctionBegin;
492   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
493   if (mat)  PetscValidPointer(mat,2);
494   if (pmat) PetscValidPointer(pmat,3);
495   ierr = PetscUseMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
496   PetscFunctionReturn(0);
497 }
498 
499 /* -------------------------------------------------------------------------------------*/
500 /*MC
501      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
502 
503      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
504 
505   Options Database:
506 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
507                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
508 
509    Level: intermediate
510 
511    Notes:
512     The default KSP is preonly and the default PC is LU or CHOLESKY if Pmat is of type MATSBAIJ.
513 
514    PCFactorSetShiftType() applied to this PC will convey they shift type into the inner PC if it is factorization based.
515 
516    Developer Notes:
517     Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
518 
519 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
520            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
521 M*/
522 
523 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
524 {
525   PetscErrorCode ierr;
526   PC_Redundant   *red;
527   PetscMPIInt    size;
528 
529   PetscFunctionBegin;
530   ierr = PetscNewLog(pc,&red);CHKERRQ(ierr);
531   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
532 
533   red->nsubcomm       = size;
534   red->useparallelmat = PETSC_TRUE;
535   pc->data            = (void*)red;
536 
537   pc->ops->apply          = PCApply_Redundant;
538   pc->ops->applytranspose = PCApplyTranspose_Redundant;
539   pc->ops->setup          = PCSetUp_Redundant;
540   pc->ops->destroy        = PCDestroy_Redundant;
541   pc->ops->reset          = PCReset_Redundant;
542   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
543   pc->ops->view           = PCView_Redundant;
544 
545   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
546   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
547   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
548   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
549   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCFactorSetShiftType_C",PCFactorSetShiftType_Redundant);CHKERRQ(ierr);
550   PetscFunctionReturn(0);
551 }
552 
553