xref: /petsc/src/ksp/pc/impls/redundant/redundant.c (revision 7d6bfa3b9d7db0ccd4cc481237114ca8dbb0dbff)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
5 */
6 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
7 #include "petscksp.h"
8 
9 typedef struct {
10   KSP          ksp;
11   PC           pc;                   /* actual preconditioner used on each processor */
12   Vec          xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of ((PetscObject)pc)->comm */
13   Vec          xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
14   Mat          pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
15   VecScatter   scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
16   PetscTruth   useparallelmat;
17   PetscSubcomm psubcomm;
18   PetscInt     nsubcomm;           /* num of data structure PetscSubcomm */
19 } PC_Redundant;
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "PCView_Redundant"
23 static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
24 {
25   PC_Redundant   *red = (PC_Redundant*)pc->data;
26   PetscErrorCode ierr;
27   PetscMPIInt    rank;
28   PetscTruth     iascii,isstring;
29   PetscViewer    sviewer,subviewer;
30   PetscInt       color = red->psubcomm->color;
31 
32   PetscFunctionBegin;
33   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
34   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
35   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
36   if (iascii) {
37     ierr = PetscViewerASCIIPrintf(viewer,"  Redundant preconditioner: First (color=0) of %D PCs follows\n",red->nsubcomm);CHKERRQ(ierr);
38     ierr = PetscViewerGetSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
39     if (!color) { /* only view first redundant pc */
40       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
41       ierr = KSPView(red->ksp,subviewer);CHKERRQ(ierr);
42       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
43     }
44     ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)red->pc)->comm,&subviewer);CHKERRQ(ierr);
45   } else if (isstring) { /* not test it yet! */
46     ierr = PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");CHKERRQ(ierr);
47     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
48     if (!rank) {
49       ierr = KSPView(red->ksp,sviewer);CHKERRQ(ierr);
50     }
51     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name);
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #include "private/matimpl.h"        /*I "petscmat.h" I*/
59 #undef __FUNCT__
60 #define __FUNCT__ "PCSetUp_Redundant"
61 static PetscErrorCode PCSetUp_Redundant(PC pc)
62 {
63   PC_Redundant   *red = (PC_Redundant*)pc->data;
64   PetscErrorCode ierr;
65   PetscInt       mstart,mend,mlocal,m,mlocal_sub,rstart_sub,rend_sub,mloc_sub;
66   PetscMPIInt    size;
67   MatReuse       reuse = MAT_INITIAL_MATRIX;
68   MatStructure   str = DIFFERENT_NONZERO_PATTERN;
69   MPI_Comm       comm = ((PetscObject)pc)->comm,subcomm;
70   Vec            vec;
71   PetscMPIInt    subsize,subrank;
72   const char     *prefix;
73   KSP            subksp;
74 
75   PetscFunctionBegin;
76   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
77   ierr = VecGetSize(vec,&m);CHKERRQ(ierr);
78 
79   if (!pc->setupcalled) {
80     ierr = PetscSubcommCreate(comm,red->nsubcomm,&red->psubcomm);CHKERRQ(ierr);
81     ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
82 
83     /* create a new PC that processors in each subcomm have copy of */
84     subcomm = red->psubcomm->comm;
85     ierr = KSPCreate(subcomm,&subksp);CHKERRQ(ierr);
86     ierr = PetscObjectIncrementTabLevel((PetscObject)subksp,(PetscObject)pc,1);CHKERRQ(ierr);
87     ierr = PetscLogObjectParent(pc,subksp);CHKERRQ(ierr);
88     ierr = KSPSetType(subksp,KSPPREONLY);CHKERRQ(ierr);
89     ierr = KSPGetPC(subksp,&red->pc);CHKERRQ(ierr);
90     ierr = PCSetType(red->pc,PCLU);CHKERRQ(ierr);
91 
92     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
93     ierr = KSPSetOptionsPrefix(subksp,prefix);CHKERRQ(ierr);
94     ierr = KSPAppendOptionsPrefix(subksp,"redundant_");CHKERRQ(ierr);
95     red->ksp = subksp;
96 
97     /* create working vectors xsub/ysub and xdup/ydup */
98     ierr = VecGetLocalSize(vec,&mlocal);CHKERRQ(ierr);
99     ierr = VecGetOwnershipRange(vec,&mstart,&mend);CHKERRQ(ierr);
100 
101     /* get local size of xsub/ysub */
102     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
103     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
104     rstart_sub = pc->pmat->rmap->range[red->psubcomm->n*subrank]; /* rstart in xsub/ysub */
105     if (subrank+1 < subsize){
106       rend_sub = pc->pmat->rmap->range[red->psubcomm->n*(subrank+1)];
107     } else {
108       rend_sub = m;
109     }
110     mloc_sub = rend_sub - rstart_sub;
111     ierr = VecCreateMPI(subcomm,mloc_sub,PETSC_DECIDE,&red->ysub);CHKERRQ(ierr);
112     /* create xsub with empty local arrays, because xdup's arrays will be placed into it */
113     ierr = VecCreateMPIWithArray(subcomm,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->xsub);CHKERRQ(ierr);
114 
115     /* create xdup and ydup. ydup has empty local arrays because ysub's arrays will be place into it.
116        Note: we use communicator dupcomm, not ((PetscObject)pc)->comm! */
117     ierr = VecCreateMPI(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,&red->xdup);CHKERRQ(ierr);
118     ierr = VecCreateMPIWithArray(red->psubcomm->dupparent,mloc_sub,PETSC_DECIDE,PETSC_NULL,&red->ydup);CHKERRQ(ierr);
119 
120     /* create vec scatters */
121     if (!red->scatterin){
122       IS       is1,is2;
123       PetscInt *idx1,*idx2,i,j,k;
124 
125       ierr = PetscMalloc(2*red->psubcomm->n*mlocal*sizeof(PetscInt),&idx1);CHKERRQ(ierr);
126       idx2 = idx1 + red->psubcomm->n*mlocal;
127       j = 0;
128       for (k=0; k<red->psubcomm->n; k++){
129         for (i=mstart; i<mend; i++){
130           idx1[j]   = i;
131           idx2[j++] = i + m*k;
132         }
133       }
134       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,&is1);CHKERRQ(ierr);
135       ierr = ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,&is2);CHKERRQ(ierr);
136       ierr = VecScatterCreate(vec,is1,red->xdup,is2,&red->scatterin);CHKERRQ(ierr);
137       ierr = ISDestroy(is1);CHKERRQ(ierr);
138       ierr = ISDestroy(is2);CHKERRQ(ierr);
139 
140       ierr = ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*m,1,&is1);CHKERRQ(ierr);
141       ierr = ISCreateStride(comm,mlocal,mstart,1,&is2);CHKERRQ(ierr);
142       ierr = VecScatterCreate(red->xdup,is1,vec,is2,&red->scatterout);CHKERRQ(ierr);
143       ierr = ISDestroy(is1);CHKERRQ(ierr);
144       ierr = ISDestroy(is2);CHKERRQ(ierr);
145       ierr = PetscFree(idx1);CHKERRQ(ierr);
146     }
147   }
148   ierr = VecDestroy(vec);CHKERRQ(ierr);
149 
150   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
151   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
152   if (size == 1) {
153     red->useparallelmat = PETSC_FALSE;
154   }
155 
156   if (red->useparallelmat) {
157     if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
158       /* destroy old matrices */
159       if (red->pmats) {
160         ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
161       }
162     } else if (pc->setupcalled == 1) {
163       reuse = MAT_REUSE_MATRIX;
164       str   = SAME_NONZERO_PATTERN;
165     }
166 
167     /* grab the parallel matrix and put it into processors of a subcomminicator */
168     /*--------------------------------------------------------------------------*/
169     ierr = VecGetLocalSize(red->ysub,&mlocal_sub);CHKERRQ(ierr);
170     ierr = MatGetRedundantMatrix(pc->pmat,red->psubcomm->n,red->psubcomm->comm,mlocal_sub,reuse,&red->pmats);CHKERRQ(ierr);
171     /* tell PC of the subcommunicator its operators */
172     ierr = KSPSetOperators(red->ksp,red->pmats,red->pmats,str);CHKERRQ(ierr);
173   } else {
174     ierr = KSPSetOperators(red->ksp,pc->mat,pc->pmat,pc->flag);CHKERRQ(ierr);
175   }
176   if (pc->setfromoptionscalled){
177     ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
178   }
179   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
180   PetscFunctionReturn(0);
181 }
182 
183 #undef __FUNCT__
184 #define __FUNCT__ "PCApply_Redundant"
185 static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
186 {
187   PC_Redundant   *red = (PC_Redundant*)pc->data;
188   PetscErrorCode ierr;
189   PetscScalar    *array;
190 
191   PetscFunctionBegin;
192   /* scatter x to xdup */
193   ierr = VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
194   ierr = VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
195 
196   /* place xdup's local array into xsub */
197   ierr = VecGetArray(red->xdup,&array);CHKERRQ(ierr);
198   ierr = VecPlaceArray(red->xsub,(const PetscScalar*)array);CHKERRQ(ierr);
199 
200   /* apply preconditioner on each processor */
201   ierr = PCApply(red->pc,red->xsub,red->ysub);CHKERRQ(ierr);
202   ierr = VecResetArray(red->xsub);CHKERRQ(ierr);
203   ierr = VecRestoreArray(red->xdup,&array);CHKERRQ(ierr);
204 
205   /* place ysub's local array into ydup */
206   ierr = VecGetArray(red->ysub,&array);CHKERRQ(ierr);
207   ierr = VecPlaceArray(red->ydup,(const PetscScalar*)array);CHKERRQ(ierr);
208 
209   /* scatter ydup to y */
210   ierr = VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
211   ierr = VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
212   ierr = VecResetArray(red->ydup);CHKERRQ(ierr);
213   ierr = VecRestoreArray(red->ysub,&array);CHKERRQ(ierr);
214   PetscFunctionReturn(0);
215 }
216 
217 #undef __FUNCT__
218 #define __FUNCT__ "PCDestroy_Redundant"
219 static PetscErrorCode PCDestroy_Redundant(PC pc)
220 {
221   PC_Redundant   *red = (PC_Redundant*)pc->data;
222   PetscErrorCode ierr;
223 
224   PetscFunctionBegin;
225   if (red->scatterin)  {ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr);}
226   if (red->scatterout) {ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr);}
227   if (red->ysub)       {ierr = VecDestroy(red->ysub);CHKERRQ(ierr);}
228   if (red->xsub)       {ierr = VecDestroy(red->xsub);CHKERRQ(ierr);}
229   if (red->xdup)       {ierr = VecDestroy(red->xdup);CHKERRQ(ierr);}
230   if (red->ydup)       {ierr = VecDestroy(red->ydup);CHKERRQ(ierr);}
231   if (red->pmats) {
232     ierr = MatDestroy(red->pmats);CHKERRQ(ierr);
233   }
234   if (red->psubcomm) {ierr = PetscSubcommDestroy(red->psubcomm);CHKERRQ(ierr);}
235   if (red->ksp) {ierr = KSPDestroy(red->ksp);CHKERRQ(ierr);}
236   ierr = PetscFree(red);CHKERRQ(ierr);
237   PetscFunctionReturn(0);
238 }
239 
240 #undef __FUNCT__
241 #define __FUNCT__ "PCSetFromOptions_Redundant"
242 static PetscErrorCode PCSetFromOptions_Redundant(PC pc)
243 {
244   PetscErrorCode ierr;
245   PC_Redundant   *red = (PC_Redundant*)pc->data;
246 
247   PetscFunctionBegin;
248   ierr = PetscOptionsHead("Redundant options");CHKERRQ(ierr);
249   ierr = PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);CHKERRQ(ierr);
250   ierr = PetscOptionsTail();CHKERRQ(ierr);
251   PetscFunctionReturn(0);
252 }
253 
254 EXTERN_C_BEGIN
255 #undef __FUNCT__
256 #define __FUNCT__ "PCRedundantSetNumber_Redundant"
257 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
258 {
259   PC_Redundant *red = (PC_Redundant*)pc->data;
260 
261   PetscFunctionBegin;
262   red->nsubcomm = nreds;
263   PetscFunctionReturn(0);
264 }
265 EXTERN_C_END
266 
267 #undef __FUNCT__
268 #define __FUNCT__ "PCRedundantSetNumber"
269 /*@
270    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.
271 
272    Collective on PC
273 
274    Input Parameters:
275 +  pc - the preconditioner context
276 -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
277                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
278 
279    Level: advanced
280 
281 .keywords: PC, redundant solve
282 @*/
283 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetNumber(PC pc,PetscInt nredundant)
284 {
285   PetscErrorCode ierr,(*f)(PC,PetscInt);
286 
287   PetscFunctionBegin;
288   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
289   if (nredundant <= 0) SETERRQ1(PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
290   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetNumber_C",(void (**)(void))&f);CHKERRQ(ierr);
291   if (f) {
292     ierr = (*f)(pc,nredundant);CHKERRQ(ierr);
293   }
294   PetscFunctionReturn(0);
295 }
296 
297 EXTERN_C_BEGIN
298 #undef __FUNCT__
299 #define __FUNCT__ "PCRedundantSetScatter_Redundant"
300 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
301 {
302   PC_Redundant   *red = (PC_Redundant*)pc->data;
303   PetscErrorCode ierr;
304 
305   PetscFunctionBegin;
306   ierr = PetscObjectReference((PetscObject)in);CHKERRQ(ierr);
307   if (red->scatterin) { ierr = VecScatterDestroy(red->scatterin);CHKERRQ(ierr); }
308   red->scatterin  = in;
309   ierr = PetscObjectReference((PetscObject)out);CHKERRQ(ierr);
310   if (red->scatterout) { ierr = VecScatterDestroy(red->scatterout);CHKERRQ(ierr); }
311   red->scatterout = out;
312   PetscFunctionReturn(0);
313 }
314 EXTERN_C_END
315 
316 #undef __FUNCT__
317 #define __FUNCT__ "PCRedundantSetScatter"
318 /*@
319    PCRedundantSetScatter - Sets the scatter used to copy values into the
320      redundant local solve and the scatter to move them back into the global
321      vector.
322 
323    Collective on PC
324 
325    Input Parameters:
326 +  pc - the preconditioner context
327 .  in - the scatter to move the values in
328 -  out - the scatter to move them out
329 
330    Level: advanced
331 
332 .keywords: PC, redundant solve
333 @*/
334 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
335 {
336   PetscErrorCode ierr,(*f)(PC,VecScatter,VecScatter);
337 
338   PetscFunctionBegin;
339   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
340   PetscValidHeaderSpecific(in,VEC_SCATTER_COOKIE,2);
341   PetscValidHeaderSpecific(out,VEC_SCATTER_COOKIE,3);
342   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetScatter_C",(void (**)(void))&f);CHKERRQ(ierr);
343   if (f) {
344     ierr = (*f)(pc,in,out);CHKERRQ(ierr);
345   }
346   PetscFunctionReturn(0);
347 }
348 
349 EXTERN_C_BEGIN
350 #undef __FUNCT__
351 #define __FUNCT__ "PCRedundantGetPC_Redundant"
352 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC_Redundant(PC pc,PC *innerpc)
353 {
354   PC_Redundant *red = (PC_Redundant*)pc->data;
355 
356   PetscFunctionBegin;
357   *innerpc = red->pc;
358   PetscFunctionReturn(0);
359 }
360 EXTERN_C_END
361 
362 #undef __FUNCT__
363 #define __FUNCT__ "PCRedundantGetPC"
364 /*@
365    PCRedundantGetPC - Gets the sequential PC created by the redundant PC.
366 
367    Not Collective
368 
369    Input Parameter:
370 .  pc - the preconditioner context
371 
372    Output Parameter:
373 .  innerpc - the sequential PC
374 
375    Level: advanced
376 
377 .keywords: PC, redundant solve
378 @*/
379 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetPC(PC pc,PC *innerpc)
380 {
381   PetscErrorCode ierr,(*f)(PC,PC*);
382 
383   PetscFunctionBegin;
384   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
385   PetscValidPointer(innerpc,2);
386   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetPC_C",(void (**)(void))&f);CHKERRQ(ierr);
387   if (f) {
388     ierr = (*f)(pc,innerpc);CHKERRQ(ierr);
389   }
390   PetscFunctionReturn(0);
391 }
392 
393 EXTERN_C_BEGIN
394 #undef __FUNCT__
395 #define __FUNCT__ "PCRedundantGetOperators_Redundant"
396 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
397 {
398   PC_Redundant *red = (PC_Redundant*)pc->data;
399 
400   PetscFunctionBegin;
401   if (mat)  *mat  = red->pmats;
402   if (pmat) *pmat = red->pmats;
403   PetscFunctionReturn(0);
404 }
405 EXTERN_C_END
406 
407 #undef __FUNCT__
408 #define __FUNCT__ "PCRedundantGetOperators"
409 /*@
410    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
411 
412    Not Collective
413 
414    Input Parameter:
415 .  pc - the preconditioner context
416 
417    Output Parameters:
418 +  mat - the matrix
419 -  pmat - the (possibly different) preconditioner matrix
420 
421    Level: advanced
422 
423 .keywords: PC, redundant solve
424 @*/
425 PetscErrorCode PETSCKSP_DLLEXPORT PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
426 {
427   PetscErrorCode ierr,(*f)(PC,Mat*,Mat*);
428 
429   PetscFunctionBegin;
430   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
431   if (mat)  PetscValidPointer(mat,2);
432   if (pmat) PetscValidPointer(pmat,3);
433   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetOperators_C",(void (**)(void))&f);CHKERRQ(ierr);
434   if (f) {
435     ierr = (*f)(pc,mat,pmat);CHKERRQ(ierr);
436   }
437   PetscFunctionReturn(0);
438 }
439 
440 /* -------------------------------------------------------------------------------------*/
441 /*MC
442      PCREDUNDANT - Runs a preconditioner for the entire problem on subgroups of processors
443 
444      Options for the redundant preconditioners can be set with -redundant_pc_xxx
445 
446   Options Database:
447 .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
448                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.
449 
450    Level: intermediate
451 
452 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
453            PCRedundantGetPC(), PCRedundantGetOperators(), PCRedundantSetNumber()
454 M*/
455 
456 EXTERN_C_BEGIN
457 #undef __FUNCT__
458 #define __FUNCT__ "PCCreate_Redundant"
459 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_Redundant(PC pc)
460 {
461   PetscErrorCode ierr;
462   PC_Redundant   *red;
463   PetscMPIInt    size;
464 
465   PetscFunctionBegin;
466   ierr = PetscNewLog(pc,PC_Redundant,&red);CHKERRQ(ierr);
467   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
468   red->nsubcomm       = size;
469   red->useparallelmat = PETSC_TRUE;
470   pc->data            = (void*)red;
471 
472   pc->ops->apply           = PCApply_Redundant;
473   pc->ops->applytranspose  = 0;
474   pc->ops->setup           = PCSetUp_Redundant;
475   pc->ops->destroy         = PCDestroy_Redundant;
476   pc->ops->setfromoptions  = PCSetFromOptions_Redundant;
477   pc->ops->view            = PCView_Redundant;
478   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
479                     PCRedundantSetScatter_Redundant);CHKERRQ(ierr);
480   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetNumber_C","PCRedundantSetNumber_Redundant",
481                     PCRedundantSetNumber_Redundant);CHKERRQ(ierr);
482   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetPC_C","PCRedundantGetPC_Redundant",
483                     PCRedundantGetPC_Redundant);CHKERRQ(ierr);
484   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
485                     PCRedundantGetOperators_Redundant);CHKERRQ(ierr);
486   PetscFunctionReturn(0);
487 }
488 EXTERN_C_END
489