xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision a7fac7c2b47dbcd8ece0cc2dfdfe0e63be1bb7b5)
1 
2 /*
3   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h>
6 #include <petscksp.h>           /*I "petscksp.h" I*/
7 
8 typedef struct {
9   KSP          ksp;
10   PC           pc;                   /* actual preconditioner used on each processor */
11   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
12   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
13   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
14   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
15   PetscBool    useparallelmat;
16   PetscSubcomm psubcomm;
17   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
18 } PC_Redundant;
19 
20 #undef __FUNCT__
21 #define __FUNCT__ "PCView_Redundant"
22 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
23 {
24   PC_Redundant   *red = (PC_Redundant*)pc->data;
25   PetscErrorCode ierr;
26   PetscBool      iascii,isstring;
27   PetscViewer    subviewer;
28 
29   PetscFunctionBegin;
30   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
31   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
32   if (iascii) {
33     if (!red->psubcomm) {
34       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: Not yet setup\n");CHKERRQ(ierr);
35     } else {
36       ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
37       ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
38       if (!red->psubcomm->color) { /* only view first redundant pc */
39         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
40         ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
41         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
42       }
43       ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
44     }
45   } else if (isstring) {
46     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
47   }
48   PetscFunctionReturn(0);
49 }
50 
51 #undef __FUNCT__
52 #define __FUNCT__ "PCSetUp_Redundant"
53 static PetscErrorCode PCSetUp_Redundant(PC pc)
54 {
55   PC_Redundant   *red = (PC_Redundant*)pc->data;
56   PetscErrorCode ierr;
57   PetscInt       mstart,mend,mlocal,M;
58   PetscMPIInt    size;
59   MPI_Comm       comm,subcomm;
60   Vec            x;
61   const char     *prefix;
62 
63   PetscFunctionBegin;
64   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
65 
66   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
67   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
68   if (size == 1) red->useparallelmat = PETSC_FALSE;
69 
70   if (!pc->setupcalled) {
71     PetscInt mloc_sub;
72     if (!red->psubcomm) {
73       ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
74       ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
75       ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
76       /* enable runtime switch of psubcomm type, e.g., '-psubcomm_type interlaced */
77       ierr = PetscSubcommSetFromOptions(red->psubcomm);CHKERRQ(ierr);
78       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
79 
80       /* create a new PC that processors in each subcomm have copy of */
81       subcomm = PetscSubcommChild(red->psubcomm);
82 
83       ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
84       ierr = KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);CHKERRQ(ierr);
85       ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
86       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
87       ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
88       ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
89       ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
90 
91       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
92       ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
93       ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
94     } else {
95       subcomm = PetscSubcommChild(red->psubcomm);
96     }
97 
98     if (red->useparallelmat) {
99       /* grab the parallel matrix and put it into processors of a subcomminicator */
100       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);CHKERRQ(ierr);
101       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
102 
103       /* get working vectors xsub and ysub */
104       ierr = MatCreateVecs(red->pmats,&red->xsub,&red->ysub);CHKERRQ(ierr);
105 
106       /* create working vectors xdup and ydup.
107        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
108        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
109        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
110       ierr = MatGetLocalSize(red->pmats,&mloc_sub,NULL);CHKERRQ(ierr);
111       ierr = VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm),mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
112       ierr = VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm),1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);CHKERRQ(ierr);
113 
114       /* create vecscatters */
115       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
116         IS       is1,is2;
117         PetscInt *idx1,*idx2,i,j,k;
118 
119         ierr = MatCreateVecs(pc->pmat,&x,0);CHKERRQ(ierr);
120         ierr = VecGetSize(x,&M);CHKERRQ(ierr);
121         ierr = VecGetOwnershipRange(x,&mstart,&mend);CHKERRQ(ierr);
122         mlocal = mend - mstart;
123         ierr = PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);CHKERRQ(ierr);
124         j    = 0;
125         for (k=0; k<red->psubcomm->n; k++) {
126           for (i=mstart; i<mend; i++) {
127             idx1[j]   = i;
128             idx2[j++] = i + M*k;
129           }
130         }
131         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
132         ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);
133         ierr = VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
134         ierr = ISDestroy(&is1);CHKERRQ(ierr);
135         ierr = ISDestroy(&is2);CHKERRQ(ierr);
136 
137         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
138         ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);CHKERRQ(ierr);
139         ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
140         ierr = VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);CHKERRQ(ierr);
141         ierr = ISDestroy(&is1);CHKERRQ(ierr);
142         ierr = ISDestroy(&is2);CHKERRQ(ierr);
143         ierr = PetscFree2(idx1,idx2);CHKERRQ(ierr);
144         ierr = VecDestroy(&x);CHKERRQ(ierr);
145       }
146     } else { /* !red->useparallelmat */
147       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
148     }
149   } else { /* pc->setupcalled */
150     if (red->useparallelmat) {
151       MatReuse       reuse;
152       /* grab the parallel matrix and put it into processors of a subcomminicator */
153       /*--------------------------------------------------------------------------*/
154       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
155         /* destroy old matrices */
156         ierr  = MatDestroy(&red->pmats);CHKERRQ(ierr);
157         reuse = MAT_INITIAL_MATRIX;
158       } else {
159         reuse = MAT_REUSE_MATRIX;
160       }
161       ierr = MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,PetscSubcommChild(red->psubcomm),reuse,&red->pmats);CHKERRQ(ierr);
162       ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats);CHKERRQ(ierr);
163     } else { /* !red->useparallelmat */
164       ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat);CHKERRQ(ierr);
165     }
166   }
167 
168   if (pc->setfromoptionscalled) {
169     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
170   }
171   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
172   PetscFunctionReturn(0);
173 }
174 
175 #undef __FUNCT__
176 #define __FUNCT__ "PCApply_Redundant"
177 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
178 {
179   PC_Redundant   *red = (PC_Redundant*)pc->data;
180   PetscErrorCode ierr;
181   PetscScalar    *array;
182 
183   PetscFunctionBegin;
184   if (!red->useparallelmat) {
185     ierr = KSPSolve(red->ksp,x,y);CHKERRQ(ierr);
186     PetscFunctionReturn(0);
187   }
188 
189   /* scatter x to xdup */
190   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
191   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
192 
193   /* place xdup's local array into xsub */
194   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
195   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
196 
197   /* apply preconditioner on each processor */
198   ierr = KSPSolve(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
199   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
200   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
201 
202   /* place ysub's local array into ydup */
203   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
204   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
205 
206   /* scatter ydup to y */
207   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
208   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
209   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
210   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
211   PetscFunctionReturn(0);
212 }
213 
214 #undef __FUNCT__
215 #define __FUNCT__ "PCApplyTranspose_Redundant"
216 static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
217 {
218   PC_Redundant   *red = (PC_Redundant*)pc->data;
219   PetscErrorCode ierr;
220   PetscScalar    *array;
221 
222   PetscFunctionBegin;
223   if (!red->useparallelmat) {
224     ierr = KSPSolveTranspose(red->ksp,x,y);CHKERRQ(ierr);
225     PetscFunctionReturn(0);
226   }
227 
228   /* scatter x to xdup */
229   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
230   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
231 
232   /* place xdup's local array into xsub */
233   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
234   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
235 
236   /* apply preconditioner on each processor */
237   ierr = KSPSolveTranspose(red->ksp,red->xsub,red->ysub);CHKERRQ(ierr);
238   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
239   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
240 
241   /* place ysub's local array into ydup */
242   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
243   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
244 
245   /* scatter ydup to y */
246   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
247   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
248   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
249   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
250   PetscFunctionReturn(0);
251 }
252 
253 #undef __FUNCT__
254 #define __FUNCT__ "PCReset_Redundant"
255 static PetscErrorCode PCReset_Redundant(PC pc)
256 {
257   PC_Redundant   *red = (PC_Redundant*)pc->data;
258   PetscErrorCode ierr;
259 
260   PetscFunctionBegin;
261   if (red->useparallelmat) {
262     ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
263     ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
264     ierr = VecDestroy(&red->ysub);CHKERRQ(ierr);
265     ierr = VecDestroy(&red->xsub);CHKERRQ(ierr);
266     ierr = VecDestroy(&red->xdup);CHKERRQ(ierr);
267     ierr = VecDestroy(&red->ydup);CHKERRQ(ierr);
268   }
269   ierr = MatDestroy(&red->pmats);CHKERRQ(ierr);
270   ierr = KSPReset(red->ksp);CHKERRQ(ierr);
271   PetscFunctionReturn(0);
272 }
273 
274 #undef __FUNCT__
275 #define __FUNCT__ "PCDestroy_Redundant"
276 static PetscErrorCode PCDestroy_Redundant(PC pc)
277 {
278   PC_Redundant   *red = (PC_Redundant*)pc->data;
279   PetscErrorCode ierr;
280 
281   PetscFunctionBegin;
282   ierr = PCReset_Redundant(pc);CHKERRQ(ierr);
283   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
284   ierr = PetscSubcommDestroy(&red->psubcomm);CHKERRQ(ierr);
285   ierr = PetscFree(pc->data);CHKERRQ(ierr);
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "PCSetFromOptions_Redundant"
291 static PetscErrorCode PCSetFromOptions_Redundant(PetscOptions *PetscOptionsObject,PC pc)
292 {
293   PetscErrorCode ierr;
294   PC_Redundant   *red = (PC_Redundant*)pc->data;
295 
296   PetscFunctionBegin;
297   ierr = PetscOptionsHead(PetscOptionsObject,"Redundant options");CHKERRQ(ierr);
298   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
299   ierr = PetscOptionsTail();CHKERRQ(ierr);
300   PetscFunctionReturn(0);
301 }
302 
303 #undef __FUNCT__
304 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
305 static PetscErrorCode  PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
306 {
307   PC_Redundant *red = (PC_Redundant*)pc->data;
308 
309   PetscFunctionBegin;
310   red->nsubcomm = nreds;
311   PetscFunctionReturn(0);
312 }
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "PCRedundantSetNumber"
316 /*@
317    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
318 
319    Logically Collective on PC
320 
321    Input Parameters:
322 +  pc - the preconditioner context
323 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
324                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
325 
326    Level: advanced
327 
328 .keywords: PC, redundant solve
329 @*/
330 PetscErrorCode  PCRedundantSetNumber(PC pc,PetscInt nredundant)
331 {
332   PetscErrorCode ierr;
333 
334   PetscFunctionBegin;
335   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
336   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
337   ierr = PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));CHKERRQ(ierr);
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
343 static PetscErrorCode  PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
344 {
345   PC_Redundant   *red = (PC_Redundant*)pc->data;
346   PetscErrorCode ierr;
347 
348   PetscFunctionBegin;
349   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
350   ierr = VecScatterDestroy(&red->scatterin);CHKERRQ(ierr);
351 
352   red->scatterin  = in;
353 
354   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
355   ierr = VecScatterDestroy(&red->scatterout);CHKERRQ(ierr);
356   red->scatterout = out;
357   PetscFunctionReturn(0);
358 }
359 
360 #undef __FUNCT__
361 #define __FUNCT__ "PCRedundantSetScatter"
362 /*@
363    PCRedundantSetScatter - Sets the scatter used to copy values into the
364      redundant local solve and the scatter to move them back into the global
365      vector.
366 
367    Logically Collective on PC
368 
369    Input Parameters:
370 +  pc - the preconditioner context
371 .  in - the scatter to move the values in
372 -  out - the scatter to move them out
373 
374    Level: advanced
375 
376 .keywords: PC, redundant solve
377 @*/
378 PetscErrorCode  PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
379 {
380   PetscErrorCode ierr;
381 
382   PetscFunctionBegin;
383   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
384   PetscValidHeaderSpecific(in,VEC_SCATTER_CLASSID,2);
385   PetscValidHeaderSpecific(out,VEC_SCATTER_CLASSID,3);
386   ierr = PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));CHKERRQ(ierr);
387   PetscFunctionReturn(0);
388 }
389 
390 #undef __FUNCT__
391 #define __FUNCT__ "PCRedundantGetKSP_Redundant"
392 static PetscErrorCode  PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
393 {
394   PetscErrorCode ierr;
395   PC_Redundant   *red = (PC_Redundant*)pc->data;
396   MPI_Comm       comm,subcomm;
397   const char     *prefix;
398 
399   PetscFunctionBegin;
400   if (!red->psubcomm) {
401     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
402     ierr = PetscSubcommCreate(comm,&red->psubcomm);CHKERRQ(ierr);
403     ierr = PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);CHKERRQ(ierr);
404     ierr = PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr);
405     ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
406 
407     /* create a new PC that processors in each subcomm have copy of */
408     subcomm = PetscSubcommChild(red->psubcomm);
409 
410     ierr = KSPCreate(subcomm,&red->ksp);CHKERRQ(ierr);
411     ierr = KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);CHKERRQ(ierr);
412     ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
413     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
414     ierr = KSPSetType(red->ksp,KSPPREONLY);CHKERRQ(ierr);
415     ierr = KSPGetPC(red->ksp,&red->pc);CHKERRQ(ierr);
416     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
417 
418     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
419     ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
420     ierr = KSPAppendOptionsPrefix(red->ksp,"redundant_");CHKERRQ(ierr);
421   }
422   *innerksp = red->ksp;
423   PetscFunctionReturn(0);
424 }
425 
426 #undef __FUNCT__
427 #define __FUNCT__ "PCRedundantGetKSP"
428 /*@
429    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.
430 
431    Not Collective
432 
433    Input Parameter:
434 .  pc - the preconditioner context
435 
436    Output Parameter:
437 .  innerksp - the KSP on the smaller set of processes
438 
439    Level: advanced
440 
441 .keywords: PC, redundant solve
442 @*/
443 PetscErrorCode  PCRedundantGetKSP(PC pc,KSP *innerksp)
444 {
445   PetscErrorCode ierr;
446 
447   PetscFunctionBegin;
448   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
449   PetscValidPointer(innerksp,2);
450   ierr = PetscTryMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));CHKERRQ(ierr);
451   PetscFunctionReturn(0);
452 }
453 
454 #undef __FUNCT__
455 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
456 static PetscErrorCode  PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
457 {
458   PC_Redundant *red = (PC_Redundant*)pc->data;
459 
460   PetscFunctionBegin;
461   if (mat)  *mat  = red->pmats;
462   if (pmat) *pmat = red->pmats;
463   PetscFunctionReturn(0);
464 }
465 
466 #undef __FUNCT__
467 #define __FUNCT__ "PCRedundantGetOperators"
468 /*@
469    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
470 
471    Not Collective
472 
473    Input Parameter:
474 .  pc - the preconditioner context
475 
476    Output Parameters:
477 +  mat - the matrix
478 -  pmat - the (possibly different) preconditioner matrix
479 
480    Level: advanced
481 
482 .keywords: PC, redundant solve
483 @*/
484 PetscErrorCode  PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
485 {
486   PetscErrorCode ierr;
487 
488   PetscFunctionBegin;
489   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
490   if (mat)  PetscValidPointer(mat,2);
491   if (pmat) PetscValidPointer(pmat,3);
492   ierr = PetscTryMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));CHKERRQ(ierr);
493   PetscFunctionReturn(0);
494 }
495 
496 /* -------------------------------------------------------------------------------------*/
497 /*MC
498      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors
499 
500      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx
501 
502   Options Database:
503 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
504                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
505 
506    Level: intermediate
507 
508    Notes: The default KSP is preonly and the default PC is LU.
509 
510    Developer Notes: Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.
511 
512 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
513            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
514 M*/
515 
516 #undef __FUNCT__
517 #define __FUNCT__ "PCCreate_Redundant"
518 PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
519 {
520   PetscErrorCode ierr;
521   PC_Redundant   *red;
522   PetscMPIInt    size;
523 
524   PetscFunctionBegin;
525   ierr = PetscNewLog(pc,&red);CHKERRQ(ierr);
526   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
527 
528   red->nsubcomm       = size;
529   red->useparallelmat = PETSC_TRUE;
530   pc->data            = (void*)red;
531 
532   pc->ops->apply          = PCApply_Redundant;
533   pc->ops->applytranspose = PCApplyTranspose_Redundant;
534   pc->ops->setup          = PCSetUp_Redundant;
535   pc->ops->destroy        = PCDestroy_Redundant;
536   pc->ops->reset          = PCReset_Redundant;
537   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
538   pc->ops->view           = PCView_Redundant;
539 
540   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
541   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
542   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);CHKERRQ(ierr);
543   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
544   PetscFunctionReturn(0);
545 }
546 
547