xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision dde8958d96c09bc677f093c6b71afab400f91c19)
1 #include <petsc/private/petscimpl.h>
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/pcimpl.h>
4 #include <petscksp.h> /*I "petscksp.h" I*/
5 #include <petscdm.h> /*I "petscdm.h" I*/
6 #include "../src/ksp/pc/impls/telescope/telescope.h"
7 
8 static PetscBool  cited = PETSC_FALSE;
9 static const char citation[] =
10 "@inproceedings{MaySananRuppKnepleySmith2016,\n"
11 "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
12 "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
13 "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
14 "  series    = {PASC '16},\n"
15 "  isbn      = {978-1-4503-4126-4},\n"
16 "  location  = {Lausanne, Switzerland},\n"
17 "  pages     = {5:1--5:12},\n"
18 "  articleno = {5},\n"
19 "  numpages  = {12},\n"
20 "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
21 "  doi       = {10.1145/2929908.2929913},\n"
22 "  acmid     = {2929913},\n"
23 "  publisher = {ACM},\n"
24 "  address   = {New York, NY, USA},\n"
25 "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
26 "  year      = {2016}\n"
27 "}\n";
28 
29 /*
30  default setup mode
31 
32  [1a] scatter to (FORWARD)
33  x(comm) -> xtmp(comm)
34  [1b] local copy (to) ranks with color = 0
35  xred(subcomm) <- xtmp
36 
37  [2] solve on sub KSP to obtain yred(subcomm)
38 
39  [3a] local copy (from) ranks with color = 0
40  yred(subcomm) --> xtmp
41  [2b] scatter from (REVERSE)
42  xtmp(comm) -> y(comm)
43 */
44 
45 /*
46   Collective[comm_f]
47   Notes
48    * Using comm_f = MPI_COMM_NULL will result in an error
49    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
50    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
51 */
52 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool *isvalid)
53 {
54   PetscInt       valid = 1;
55   MPI_Group      group_f,group_c;
56   PetscErrorCode ierr;
57   PetscMPIInt    count,k,size_f = 0,size_c = 0,size_c_sum = 0;
58   PetscMPIInt    *ranks_f,*ranks_c;
59 
60   PetscFunctionBegin;
61   if (comm_f == MPI_COMM_NULL) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"comm_f cannot be MPI_COMM_NULL");
62 
63   ierr = MPI_Comm_group(comm_f,&group_f);CHKERRMPI(ierr);
64   if (comm_c != MPI_COMM_NULL) {
65     ierr = MPI_Comm_group(comm_c,&group_c);CHKERRMPI(ierr);
66   }
67 
68   ierr = MPI_Comm_size(comm_f,&size_f);CHKERRMPI(ierr);
69   if (comm_c != MPI_COMM_NULL) {
70     ierr = MPI_Comm_size(comm_c,&size_c);CHKERRMPI(ierr);
71   }
72 
73   /* check not all comm_c's are NULL */
74   size_c_sum = size_c;
75   ierr = MPI_Allreduce(MPI_IN_PLACE,&size_c_sum,1,MPI_INT,MPI_SUM,comm_f);CHKERRMPI(ierr);
76   if (size_c_sum == 0) valid = 0;
77 
78   /* check we can map at least 1 rank in comm_c to comm_f */
79   ierr = PetscMalloc1(size_f,&ranks_f);CHKERRQ(ierr);
80   ierr = PetscMalloc1(size_c,&ranks_c);CHKERRQ(ierr);
81   for (k=0; k<size_f; k++) ranks_f[k] = MPI_UNDEFINED;
82   for (k=0; k<size_c; k++) ranks_c[k] = k;
83 
84   /*
85    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
86    I do not want the code to terminate immediately if this occurs, rather I want to throw
87    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
88    that comm_c is not a valid sub-communicator.
89    Hence I purposefully do not call CHKERRQ() after MPI_Group_translate_ranks().
90   */
91   count = 0;
92   if (comm_c != MPI_COMM_NULL) {
93     (void)MPI_Group_translate_ranks(group_c,size_c,ranks_c,group_f,ranks_f);
94     for (k=0; k<size_f; k++) {
95       if (ranks_f[k] == MPI_UNDEFINED) {
96         count++;
97       }
98     }
99   }
100   if (count == size_f) valid = 0;
101 
102   ierr = MPI_Allreduce(MPI_IN_PLACE,&valid,1,MPIU_INT,MPI_MIN,comm_f);CHKERRMPI(ierr);
103   if (valid == 1) *isvalid = PETSC_TRUE;
104   else *isvalid = PETSC_FALSE;
105 
106   ierr = PetscFree(ranks_f);CHKERRQ(ierr);
107   ierr = PetscFree(ranks_c);CHKERRQ(ierr);
108   ierr = MPI_Group_free(&group_f);CHKERRMPI(ierr);
109   if (comm_c != MPI_COMM_NULL) {
110     ierr = MPI_Group_free(&group_c);CHKERRMPI(ierr);
111   }
112   PetscFunctionReturn(0);
113 }
114 
115 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
116 {
117   DM subdm = NULL;
118 
119   if (!PCTelescope_isActiveRank(sred)) { subdm = NULL; }
120   else {
121     switch (sred->sr_type) {
122     case TELESCOPE_DEFAULT: subdm = NULL;
123       break;
124     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
125       break;
126     case TELESCOPE_DMPLEX:  subdm = NULL;
127       break;
128     case TELESCOPE_COARSEDM: if (sred->ksp) { KSPGetDM(sred->ksp,&subdm); }
129       break;
130     }
131   }
132   return(subdm);
133 }
134 
135 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
136 {
137   PetscErrorCode ierr;
138   PetscInt       m,M,bs,st,ed;
139   Vec            x,xred,yred,xtmp;
140   Mat            B;
141   MPI_Comm       comm,subcomm;
142   VecScatter     scatter;
143   IS             isin;
144 
145   PetscFunctionBegin;
146   ierr = PetscInfo(pc,"PCTelescope: setup (default)\n");CHKERRQ(ierr);
147   comm = PetscSubcommParent(sred->psubcomm);
148   subcomm = PetscSubcommChild(sred->psubcomm);
149 
150   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
151   ierr = MatGetSize(B,&M,NULL);CHKERRQ(ierr);
152   ierr = MatGetBlockSize(B,&bs);CHKERRQ(ierr);
153   ierr = MatCreateVecs(B,&x,NULL);CHKERRQ(ierr);
154 
155   xred = NULL;
156   m    = 0;
157   if (PCTelescope_isActiveRank(sred)) {
158     ierr = VecCreate(subcomm,&xred);CHKERRQ(ierr);
159     ierr = VecSetSizes(xred,PETSC_DECIDE,M);CHKERRQ(ierr);
160     ierr = VecSetBlockSize(xred,bs);CHKERRQ(ierr);
161     ierr = VecSetFromOptions(xred);CHKERRQ(ierr);
162     ierr = VecGetLocalSize(xred,&m);CHKERRQ(ierr);
163   }
164 
165   yred = NULL;
166   if (PCTelescope_isActiveRank(sred)) {
167     ierr = VecDuplicate(xred,&yred);CHKERRQ(ierr);
168   }
169 
170   ierr = VecCreate(comm,&xtmp);CHKERRQ(ierr);
171   ierr = VecSetSizes(xtmp,m,PETSC_DECIDE);CHKERRQ(ierr);
172   ierr = VecSetBlockSize(xtmp,bs);CHKERRQ(ierr);
173   ierr = VecSetType(xtmp,((PetscObject)x)->type_name);CHKERRQ(ierr);
174 
175   if (PCTelescope_isActiveRank(sred)) {
176     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
177     ierr = ISCreateStride(comm,(ed-st),st,1,&isin);CHKERRQ(ierr);
178   } else {
179     ierr = VecGetOwnershipRange(x,&st,&ed);CHKERRQ(ierr);
180     ierr = ISCreateStride(comm,0,st,1,&isin);CHKERRQ(ierr);
181   }
182   ierr = ISSetBlockSize(isin,bs);CHKERRQ(ierr);
183 
184   ierr = VecScatterCreate(x,isin,xtmp,NULL,&scatter);CHKERRQ(ierr);
185 
186   sred->isin    = isin;
187   sred->scatter = scatter;
188   sred->xred    = xred;
189   sred->yred    = yred;
190   sred->xtmp    = xtmp;
191   ierr = VecDestroy(&x);CHKERRQ(ierr);
192   PetscFunctionReturn(0);
193 }
194 
195 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
196 {
197   PetscErrorCode ierr;
198   MPI_Comm       comm,subcomm;
199   Mat            Bred,B;
200   PetscInt       nr,nc,bs;
201   IS             isrow,iscol;
202   Mat            Blocal,*_Blocal;
203 
204   PetscFunctionBegin;
205   ierr = PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");CHKERRQ(ierr);
206   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
207   subcomm = PetscSubcommChild(sred->psubcomm);
208   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
209   ierr = MatGetSize(B,&nr,&nc);CHKERRQ(ierr);
210   isrow = sred->isin;
211   ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&iscol);CHKERRQ(ierr);
212   ierr = ISSetIdentity(iscol);CHKERRQ(ierr);
213   ierr = MatGetBlockSizes(B,NULL,&bs);CHKERRQ(ierr);
214   ierr = ISSetBlockSize(iscol,bs);CHKERRQ(ierr);
215   ierr = MatSetOption(B,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
216   ierr = MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);CHKERRQ(ierr);
217   Blocal = *_Blocal;
218   ierr = PetscFree(_Blocal);CHKERRQ(ierr);
219   Bred = NULL;
220   if (PCTelescope_isActiveRank(sred)) {
221     PetscInt mm;
222 
223     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
224 
225     ierr = MatGetSize(Blocal,&mm,NULL);CHKERRQ(ierr);
226     ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);CHKERRQ(ierr);
227   }
228   *A = Bred;
229   ierr = ISDestroy(&iscol);CHKERRQ(ierr);
230   ierr = MatDestroy(&Blocal);CHKERRQ(ierr);
231   PetscFunctionReturn(0);
232 }
233 
234 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
235 {
236   PetscErrorCode ierr;
237   PetscBool      has_const;
238   const Vec      *vecs;
239   Vec            *sub_vecs = NULL;
240   PetscInt       i,k,n = 0;
241   MPI_Comm       subcomm;
242 
243   PetscFunctionBegin;
244   subcomm = PetscSubcommChild(sred->psubcomm);
245   ierr = MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);CHKERRQ(ierr);
246 
247   if (PCTelescope_isActiveRank(sred)) {
248     if (n) {
249       ierr = VecDuplicateVecs(sred->xred,n,&sub_vecs);CHKERRQ(ierr);
250     }
251   }
252 
253   /* copy entries */
254   for (k=0; k<n; k++) {
255     const PetscScalar *x_array;
256     PetscScalar       *LA_sub_vec;
257     PetscInt          st,ed;
258 
259     /* pull in vector x->xtmp */
260     ierr = VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
261     ierr = VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
262     if (sub_vecs) {
263       /* copy vector entries into xred */
264       ierr = VecGetArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
265       if (sub_vecs[k]) {
266         ierr = VecGetOwnershipRange(sub_vecs[k],&st,&ed);CHKERRQ(ierr);
267         ierr = VecGetArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
268         for (i=0; i<ed-st; i++) {
269           LA_sub_vec[i] = x_array[i];
270         }
271         ierr = VecRestoreArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
272       }
273       ierr = VecRestoreArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
274     }
275   }
276 
277   if (PCTelescope_isActiveRank(sred)) {
278     /* create new (near) nullspace for redundant object */
279     ierr = MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);CHKERRQ(ierr);
280     ierr = VecDestroyVecs(n,&sub_vecs);CHKERRQ(ierr);
281     if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
282     if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
283   }
284   PetscFunctionReturn(0);
285 }
286 
287 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
288 {
289   PetscErrorCode ierr;
290   Mat            B;
291 
292   PetscFunctionBegin;
293   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
294   /* Propagate the nullspace if it exists */
295   {
296     MatNullSpace nullspace,sub_nullspace;
297     ierr = MatGetNullSpace(B,&nullspace);CHKERRQ(ierr);
298     if (nullspace) {
299       ierr = PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");CHKERRQ(ierr);
300       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace);CHKERRQ(ierr);
301       if (PCTelescope_isActiveRank(sred)) {
302         ierr = MatSetNullSpace(sub_mat,sub_nullspace);CHKERRQ(ierr);
303         ierr = MatNullSpaceDestroy(&sub_nullspace);CHKERRQ(ierr);
304       }
305     }
306   }
307   /* Propagate the near nullspace if it exists */
308   {
309     MatNullSpace nearnullspace,sub_nearnullspace;
310     ierr = MatGetNearNullSpace(B,&nearnullspace);CHKERRQ(ierr);
311     if (nearnullspace) {
312       ierr = PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n");CHKERRQ(ierr);
313       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);CHKERRQ(ierr);
314       if (PCTelescope_isActiveRank(sred)) {
315         ierr = MatSetNearNullSpace(sub_mat,sub_nearnullspace);CHKERRQ(ierr);
316         ierr = MatNullSpaceDestroy(&sub_nearnullspace);CHKERRQ(ierr);
317       }
318     }
319   }
320   PetscFunctionReturn(0);
321 }
322 
323 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
324 {
325   PC_Telescope   sred = (PC_Telescope)pc->data;
326   PetscErrorCode ierr;
327   PetscBool      iascii,isstring;
328   PetscViewer    subviewer;
329 
330   PetscFunctionBegin;
331   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
332   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
333   if (iascii) {
334     {
335       MPI_Comm    comm,subcomm;
336       PetscMPIInt comm_size,subcomm_size;
337       DM          dm = NULL,subdm = NULL;
338 
339       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
340       subdm = private_PCTelescopeGetSubDM(sred);
341 
342       if (sred->psubcomm) {
343         comm = PetscSubcommParent(sred->psubcomm);
344         subcomm = PetscSubcommChild(sred->psubcomm);
345         ierr = MPI_Comm_size(comm,&comm_size);CHKERRMPI(ierr);
346         ierr = MPI_Comm_size(subcomm,&subcomm_size);CHKERRMPI(ierr);
347 
348         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
349         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent comm size reduction factor = %D\n",sred->redfactor);CHKERRQ(ierr);
350         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);CHKERRQ(ierr);
351         switch (sred->subcommtype) {
352         case PETSC_SUBCOMM_INTERLACED :
353           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: type = interlaced\n",sred->subcommtype);CHKERRQ(ierr);
354           break;
355         case PETSC_SUBCOMM_CONTIGUOUS :
356           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm type = contiguous\n",sred->subcommtype);CHKERRQ(ierr);
357           break;
358         default :
359           SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
360         }
361         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
362       } else {
363         ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
364         subcomm = sred->subcomm;
365         if (!PCTelescope_isActiveRank(sred)) {
366           subcomm = PETSC_COMM_SELF;
367         }
368 
369         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
370         ierr = PetscViewerASCIIPrintf(viewer,"subcomm: using user provided sub-communicator\n");CHKERRQ(ierr);
371         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
372       }
373 
374       ierr = PetscViewerGetSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
375       if (PCTelescope_isActiveRank(sred)) {
376         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
377 
378         if (dm && sred->ignore_dm) {
379           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring DM\n");CHKERRQ(ierr);
380         }
381         if (sred->ignore_kspcomputeoperators) {
382           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring KSPComputeOperators\n");CHKERRQ(ierr);
383         }
384         switch (sred->sr_type) {
385         case TELESCOPE_DEFAULT:
386           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: default\n");CHKERRQ(ierr);
387           break;
388         case TELESCOPE_DMDA:
389           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMDA auto-repartitioning\n");CHKERRQ(ierr);
390           ierr = DMView_DA_Short(subdm,subviewer);CHKERRQ(ierr);
391           break;
392         case TELESCOPE_DMPLEX:
393           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMPLEX auto-repartitioning\n");CHKERRQ(ierr);
394           break;
395         case TELESCOPE_COARSEDM:
396           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: coarse DM\n");CHKERRQ(ierr);
397           break;
398         }
399 
400         if (dm) {
401           PetscObject obj = (PetscObject)dm;
402           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object:");CHKERRQ(ierr);
403           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
404           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
405           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
406           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
407           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
408           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
409         } else {
410           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object: NULL\n");CHKERRQ(ierr);
411         }
412         if (subdm) {
413           PetscObject obj = (PetscObject)subdm;
414           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object:");CHKERRQ(ierr);
415           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
416           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
417           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
418           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
419           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
420           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
421         } else {
422           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object: NULL\n");CHKERRQ(ierr);
423         }
424 
425         ierr = KSPView(sred->ksp,subviewer);CHKERRQ(ierr);
426         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
427       }
428       ierr = PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
429     }
430   }
431   PetscFunctionReturn(0);
432 }
433 
434 static PetscErrorCode PCSetUp_Telescope(PC pc)
435 {
436   PC_Telescope    sred = (PC_Telescope)pc->data;
437   PetscErrorCode  ierr;
438   MPI_Comm        comm,subcomm=0;
439   PCTelescopeType sr_type;
440 
441   PetscFunctionBegin;
442   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
443 
444   /* Determine type of setup/update */
445   if (!pc->setupcalled) {
446     PetscBool has_dm,same;
447     DM        dm;
448 
449     sr_type = TELESCOPE_DEFAULT;
450     has_dm = PETSC_FALSE;
451     ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
452     if (dm) { has_dm = PETSC_TRUE; }
453     if (has_dm) {
454       /* check for dmda */
455       ierr = PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);CHKERRQ(ierr);
456       if (same) {
457         ierr = PetscInfo(pc,"PCTelescope: found DMDA\n");CHKERRQ(ierr);
458         sr_type = TELESCOPE_DMDA;
459       }
460       /* check for dmplex */
461       ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);CHKERRQ(ierr);
462       if (same) {
463         ierr = PetscInfo(pc,"PCTelescope: found DMPLEX\n");CHKERRQ(ierr);
464         sr_type = TELESCOPE_DMPLEX;
465       }
466 
467       if (sred->use_coarse_dm) {
468         ierr = PetscInfo(pc,"PCTelescope: using coarse DM\n");CHKERRQ(ierr);
469         sr_type = TELESCOPE_COARSEDM;
470       }
471 
472       if (sred->ignore_dm) {
473         ierr = PetscInfo(pc,"PCTelescope: ignoring DM\n");CHKERRQ(ierr);
474         sr_type = TELESCOPE_DEFAULT;
475       }
476     }
477     sred->sr_type = sr_type;
478   } else {
479     sr_type = sred->sr_type;
480   }
481 
482   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
483   switch (sr_type) {
484   case TELESCOPE_DEFAULT:
485     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
486     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
487     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
488     sred->pctelescope_reset_type              = NULL;
489     break;
490   case TELESCOPE_DMDA:
491     pc->ops->apply                            = PCApply_Telescope_dmda;
492     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
493     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
494     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
495     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
496     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
497     break;
498   case TELESCOPE_DMPLEX:
499     SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
500   case TELESCOPE_COARSEDM:
501     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
502     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
503     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
504     sred->pctelescope_matcreate_type          = NULL;
505     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
506     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
507     break;
508   default:
509     SETERRQ(comm,PETSC_ERR_SUP,"Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
510   }
511 
512   /* subcomm definition */
513   if (!pc->setupcalled) {
514     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
515       if (!sred->psubcomm) {
516         ierr = PetscSubcommCreate(comm,&sred->psubcomm);CHKERRQ(ierr);
517         ierr = PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);CHKERRQ(ierr);
518         ierr = PetscSubcommSetType(sred->psubcomm,sred->subcommtype);CHKERRQ(ierr);
519         ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
520         sred->subcomm = PetscSubcommChild(sred->psubcomm);
521       }
522     } else { /* query PC for DM, check communicators */
523       DM          dm,dm_coarse_partition = NULL;
524       MPI_Comm    comm_fine,comm_coarse_partition = MPI_COMM_NULL;
525       PetscMPIInt csize_fine=0,csize_coarse_partition=0,cs[2],csg[2],cnt=0;
526       PetscBool   isvalidsubcomm;
527 
528       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
529       comm_fine = PetscObjectComm((PetscObject)dm);
530       ierr = DMGetCoarseDM(dm,&dm_coarse_partition);CHKERRQ(ierr);
531       if (dm_coarse_partition) { cnt = 1; }
532       ierr = MPI_Allreduce(MPI_IN_PLACE,&cnt,1,MPI_INT,MPI_SUM,comm_fine);CHKERRMPI(ierr);
533       if (cnt == 0) SETERRQ(comm_fine,PETSC_ERR_SUP,"Zero instances of a coarse DM were found");
534 
535       ierr = MPI_Comm_size(comm_fine,&csize_fine);CHKERRMPI(ierr);
536       if (dm_coarse_partition) {
537         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
538         ierr = MPI_Comm_size(comm_coarse_partition,&csize_coarse_partition);CHKERRMPI(ierr);
539       }
540 
541       cs[0] = csize_fine;
542       cs[1] = csize_coarse_partition;
543       ierr = MPI_Allreduce(cs,csg,2,MPI_INT,MPI_MAX,comm_fine);CHKERRMPI(ierr);
544       if (csg[0] == csg[1]) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM uses the same size communicator as the parent DM attached to the PC");
545 
546       ierr = PCTelescopeTestValidSubcomm(comm_fine,comm_coarse_partition,&isvalidsubcomm);CHKERRQ(ierr);
547       if (!isvalidsubcomm) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM communicator is not a sub-communicator of parentDM->comm");
548       sred->subcomm = comm_coarse_partition;
549     }
550   }
551   subcomm = sred->subcomm;
552 
553   /* internal KSP */
554   if (!pc->setupcalled) {
555     const char *prefix;
556 
557     if (PCTelescope_isActiveRank(sred)) {
558       ierr = KSPCreate(subcomm,&sred->ksp);CHKERRQ(ierr);
559       ierr = KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);CHKERRQ(ierr);
560       ierr = PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
561       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);CHKERRQ(ierr);
562       ierr = KSPSetType(sred->ksp,KSPPREONLY);CHKERRQ(ierr);
563       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
564       ierr = KSPSetOptionsPrefix(sred->ksp,prefix);CHKERRQ(ierr);
565       ierr = KSPAppendOptionsPrefix(sred->ksp,"telescope_");CHKERRQ(ierr);
566     }
567   }
568 
569   /* setup */
570   if (!pc->setupcalled && sred->pctelescope_setup_type) {
571     ierr = sred->pctelescope_setup_type(pc,sred);CHKERRQ(ierr);
572   }
573   /* update */
574   if (!pc->setupcalled) {
575     if (sred->pctelescope_matcreate_type) {
576       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);CHKERRQ(ierr);
577     }
578     if (sred->pctelescope_matnullspacecreate_type) {
579       ierr = sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);CHKERRQ(ierr);
580     }
581   } else {
582     if (sred->pctelescope_matcreate_type) {
583       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);CHKERRQ(ierr);
584     }
585   }
586 
587   /* common - no construction */
588   if (PCTelescope_isActiveRank(sred)) {
589     ierr = KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);CHKERRQ(ierr);
590     if (pc->setfromoptionscalled && !pc->setupcalled) {
591       ierr = KSPSetFromOptions(sred->ksp);CHKERRQ(ierr);
592     }
593   }
594   PetscFunctionReturn(0);
595 }
596 
597 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
598 {
599   PC_Telescope      sred = (PC_Telescope)pc->data;
600   PetscErrorCode    ierr;
601   Vec               xtmp,xred,yred;
602   PetscInt          i,st,ed;
603   VecScatter        scatter;
604   PetscScalar       *array;
605   const PetscScalar *x_array;
606 
607   PetscFunctionBegin;
608   ierr = PetscCitationsRegister(citation,&cited);CHKERRQ(ierr);
609 
610   xtmp    = sred->xtmp;
611   scatter = sred->scatter;
612   xred    = sred->xred;
613   yred    = sred->yred;
614 
615   /* pull in vector x->xtmp */
616   ierr = VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
617   ierr = VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
618 
619   /* copy vector entries into xred */
620   ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
621   if (xred) {
622     PetscScalar *LA_xred;
623     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
624     ierr = VecGetArray(xred,&LA_xred);CHKERRQ(ierr);
625     for (i=0; i<ed-st; i++) {
626       LA_xred[i] = x_array[i];
627     }
628     ierr = VecRestoreArray(xred,&LA_xred);CHKERRQ(ierr);
629   }
630   ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
631   /* solve */
632   if (PCTelescope_isActiveRank(sred)) {
633     ierr = KSPSolve(sred->ksp,xred,yred);CHKERRQ(ierr);
634     ierr = KSPCheckSolve(sred->ksp,pc,yred);CHKERRQ(ierr);
635   }
636   /* return vector */
637   ierr = VecGetArray(xtmp,&array);CHKERRQ(ierr);
638   if (yred) {
639     const PetscScalar *LA_yred;
640     ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
641     ierr = VecGetArrayRead(yred,&LA_yred);CHKERRQ(ierr);
642     for (i=0; i<ed-st; i++) {
643       array[i] = LA_yred[i];
644     }
645     ierr = VecRestoreArrayRead(yred,&LA_yred);CHKERRQ(ierr);
646   }
647   ierr = VecRestoreArray(xtmp,&array);CHKERRQ(ierr);
648   ierr = VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
649   ierr = VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
650   PetscFunctionReturn(0);
651 }
652 
653 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
654 {
655   PC_Telescope      sred = (PC_Telescope)pc->data;
656   PetscErrorCode    ierr;
657   Vec               xtmp,yred;
658   PetscInt          i,st,ed;
659   VecScatter        scatter;
660   const PetscScalar *x_array;
661   PetscBool         default_init_guess_value;
662 
663   PetscFunctionBegin;
664   xtmp    = sred->xtmp;
665   scatter = sred->scatter;
666   yred    = sred->yred;
667 
668   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
669   *reason = (PCRichardsonConvergedReason)0;
670 
671   if (!zeroguess) {
672     ierr = PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");CHKERRQ(ierr);
673     /* pull in vector y->xtmp */
674     ierr = VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
675     ierr = VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
676 
677     /* copy vector entries into xred */
678     ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
679     if (yred) {
680       PetscScalar *LA_yred;
681       ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
682       ierr = VecGetArray(yred,&LA_yred);CHKERRQ(ierr);
683       for (i=0; i<ed-st; i++) {
684         LA_yred[i] = x_array[i];
685       }
686       ierr = VecRestoreArray(yred,&LA_yred);CHKERRQ(ierr);
687     }
688     ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
689   }
690 
691   if (PCTelescope_isActiveRank(sred)) {
692     ierr = KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);CHKERRQ(ierr);
693     if (!zeroguess) {ierr = KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);CHKERRQ(ierr);}
694   }
695 
696   ierr = PCApply_Telescope(pc,x,y);CHKERRQ(ierr);
697 
698   if (PCTelescope_isActiveRank(sred)) {
699     ierr = KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);CHKERRQ(ierr);
700   }
701 
702   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
703   *outits = 1;
704   PetscFunctionReturn(0);
705 }
706 
707 static PetscErrorCode PCReset_Telescope(PC pc)
708 {
709   PC_Telescope   sred = (PC_Telescope)pc->data;
710   PetscErrorCode ierr;
711 
712   PetscFunctionBegin;
713   ierr = ISDestroy(&sred->isin);CHKERRQ(ierr);
714   ierr = VecScatterDestroy(&sred->scatter);CHKERRQ(ierr);
715   ierr = VecDestroy(&sred->xred);CHKERRQ(ierr);
716   ierr = VecDestroy(&sred->yred);CHKERRQ(ierr);
717   ierr = VecDestroy(&sred->xtmp);CHKERRQ(ierr);
718   ierr = MatDestroy(&sred->Bred);CHKERRQ(ierr);
719   ierr = KSPReset(sred->ksp);CHKERRQ(ierr);
720   if (sred->pctelescope_reset_type) {
721     ierr = sred->pctelescope_reset_type(pc);CHKERRQ(ierr);
722   }
723   PetscFunctionReturn(0);
724 }
725 
726 static PetscErrorCode PCDestroy_Telescope(PC pc)
727 {
728   PC_Telescope   sred = (PC_Telescope)pc->data;
729   PetscErrorCode ierr;
730 
731   PetscFunctionBegin;
732   ierr = PCReset_Telescope(pc);CHKERRQ(ierr);
733   ierr = KSPDestroy(&sred->ksp);CHKERRQ(ierr);
734   ierr = PetscSubcommDestroy(&sred->psubcomm);CHKERRQ(ierr);
735   ierr = PetscFree(sred->dm_ctx);CHKERRQ(ierr);
736   ierr = PetscFree(pc->data);CHKERRQ(ierr);
737   PetscFunctionReturn(0);
738 }
739 
740 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
741 {
742   PC_Telescope     sred = (PC_Telescope)pc->data;
743   PetscErrorCode   ierr;
744   MPI_Comm         comm;
745   PetscMPIInt      size;
746   PetscBool        flg;
747   PetscSubcommType subcommtype;
748 
749   PetscFunctionBegin;
750   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
751   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
752   ierr = PetscOptionsHead(PetscOptionsObject,"Telescope options");CHKERRQ(ierr);
753   ierr = PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg);CHKERRQ(ierr);
754   if (flg) {
755     ierr = PCTelescopeSetSubcommType(pc,subcommtype);CHKERRQ(ierr);
756   }
757   ierr = PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,NULL);CHKERRQ(ierr);
758   if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
759   ierr = PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,NULL);CHKERRQ(ierr);
760   ierr = PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,NULL);CHKERRQ(ierr);
761   ierr = PetscOptionsBool("-pc_telescope_use_coarse_dm","Define sub-communicator from the coarse DM","PCTelescopeSetUseCoarseDM",sred->use_coarse_dm,&sred->use_coarse_dm,NULL);CHKERRQ(ierr);
762   ierr = PetscOptionsTail();CHKERRQ(ierr);
763   PetscFunctionReturn(0);
764 }
765 
766 /* PC simplementation specific API's */
767 
768 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
769 {
770   PC_Telescope red = (PC_Telescope)pc->data;
771   PetscFunctionBegin;
772   if (ksp) *ksp = red->ksp;
773   PetscFunctionReturn(0);
774 }
775 
776 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
777 {
778   PC_Telescope red = (PC_Telescope)pc->data;
779   PetscFunctionBegin;
780   if (subcommtype) *subcommtype = red->subcommtype;
781   PetscFunctionReturn(0);
782 }
783 
784 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
785 {
786   PC_Telescope     red = (PC_Telescope)pc->data;
787 
788   PetscFunctionBegin;
789   if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
790   red->subcommtype = subcommtype;
791   PetscFunctionReturn(0);
792 }
793 
794 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
795 {
796   PC_Telescope red = (PC_Telescope)pc->data;
797   PetscFunctionBegin;
798   if (fact) *fact = red->redfactor;
799   PetscFunctionReturn(0);
800 }
801 
802 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
803 {
804   PC_Telescope     red = (PC_Telescope)pc->data;
805   PetscMPIInt      size;
806   PetscErrorCode   ierr;
807 
808   PetscFunctionBegin;
809   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRMPI(ierr);
810   if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
811   if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
812   red->redfactor = fact;
813   PetscFunctionReturn(0);
814 }
815 
816 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
817 {
818   PC_Telescope red = (PC_Telescope)pc->data;
819   PetscFunctionBegin;
820   if (v) *v = red->ignore_dm;
821   PetscFunctionReturn(0);
822 }
823 
824 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
825 {
826   PC_Telescope red = (PC_Telescope)pc->data;
827   PetscFunctionBegin;
828   red->ignore_dm = v;
829   PetscFunctionReturn(0);
830 }
831 
832 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool *v)
833 {
834   PC_Telescope red = (PC_Telescope)pc->data;
835   PetscFunctionBegin;
836   if (v) *v = red->use_coarse_dm;
837   PetscFunctionReturn(0);
838 }
839 
840 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v)
841 {
842   PC_Telescope red = (PC_Telescope)pc->data;
843   PetscFunctionBegin;
844   red->use_coarse_dm = v;
845   PetscFunctionReturn(0);
846 }
847 
848 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
849 {
850   PC_Telescope red = (PC_Telescope)pc->data;
851   PetscFunctionBegin;
852   if (v) *v = red->ignore_kspcomputeoperators;
853   PetscFunctionReturn(0);
854 }
855 
856 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
857 {
858   PC_Telescope red = (PC_Telescope)pc->data;
859   PetscFunctionBegin;
860   red->ignore_kspcomputeoperators = v;
861   PetscFunctionReturn(0);
862 }
863 
864 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
865 {
866   PC_Telescope red = (PC_Telescope)pc->data;
867   PetscFunctionBegin;
868   *dm = private_PCTelescopeGetSubDM(red);
869   PetscFunctionReturn(0);
870 }
871 
872 /*@
873  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
874 
875  Not Collective
876 
877  Input Parameter:
878 .  pc - the preconditioner context
879 
880  Output Parameter:
881 .  subksp - the KSP defined the smaller set of processes
882 
883  Level: advanced
884 
885 @*/
886 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
887 {
888   PetscErrorCode ierr;
889   PetscFunctionBegin;
890   ierr = PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));CHKERRQ(ierr);
891   PetscFunctionReturn(0);
892 }
893 
894 /*@
895  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
896 
897  Not Collective
898 
899  Input Parameter:
900 .  pc - the preconditioner context
901 
902  Output Parameter:
903 .  fact - the reduction factor
904 
905  Level: advanced
906 
907 @*/
908 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
909 {
910   PetscErrorCode ierr;
911   PetscFunctionBegin;
912   ierr = PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));CHKERRQ(ierr);
913   PetscFunctionReturn(0);
914 }
915 
916 /*@
917  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
918 
919  Not Collective
920 
921  Input Parameter:
922 .  pc - the preconditioner context
923 
924  Output Parameter:
925 .  fact - the reduction factor
926 
927  Level: advanced
928 
929 @*/
930 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
931 {
932   PetscErrorCode ierr;
933   PetscFunctionBegin;
934   ierr = PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));CHKERRQ(ierr);
935   PetscFunctionReturn(0);
936 }
937 
938 /*@
939  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
940 
941  Not Collective
942 
943  Input Parameter:
944 .  pc - the preconditioner context
945 
946  Output Parameter:
947 .  v - the flag
948 
949  Level: advanced
950 
951 @*/
952 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
953 {
954   PetscErrorCode ierr;
955   PetscFunctionBegin;
956   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
957   PetscFunctionReturn(0);
958 }
959 
960 /*@
961  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
962 
963  Not Collective
964 
965  Input Parameter:
966 .  pc - the preconditioner context
967 
968  Output Parameter:
969 .  v - Use PETSC_TRUE to ignore any DM
970 
971  Level: advanced
972 
973 @*/
974 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
975 {
976   PetscErrorCode ierr;
977   PetscFunctionBegin;
978   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
979   PetscFunctionReturn(0);
980 }
981 
982 /*@
983  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse DM attached to DM associated with the PC will be used.
984 
985  Not Collective
986 
987  Input Parameter:
988 .  pc - the preconditioner context
989 
990  Output Parameter:
991 .  v - the flag
992 
993  Level: advanced
994 
995 @*/
996 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc,PetscBool *v)
997 {
998   PetscErrorCode ierr;
999   PetscFunctionBegin;
1000   ierr = PetscUseMethod(pc,"PCTelescopeGetUseCoarseDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1001   PetscFunctionReturn(0);
1002 }
1003 
1004 /*@
1005  PCTelescopeSetUseCoarseDM - Set a flag to query the DM attached to the PC if it also has a coarse DM
1006 
1007  Not Collective
1008 
1009  Input Parameter:
1010 .  pc - the preconditioner context
1011 
1012  Output Parameter:
1013 .  v - Use PETSC_FALSE to ignore any coarse DM
1014 
1015  Notes:
1016  When you have specified to use a coarse DM, the communicator used to create the sub-KSP within PCTelescope
1017  will be that of the coarse DM. Hence the flags -pc_telescope_reduction_factor and
1018  -pc_telescope_subcomm_type will no longer have any meaning.
1019  It is required that the communicator associated with the parent (fine) and the coarse DM are of different sizes.
1020  An error will occur of the size of the communicator associated with the coarse DM
1021  is the same as that of the parent DM.
1022  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
1023  This will be checked at the time the preconditioner is setup and an error will occur if
1024  the coarse DM does not define a sub-communicator of that used by the parent DM.
1025 
1026  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
1027  the DM used (e.g. it supports DMSHELL, DMPLEX, etc).
1028 
1029  Support is currently only provided for the case when you are using KSPSetComputeOperators()
1030 
1031  The user is required to compose a function with the parent DM to facilitate the transfer of fields (Vec) between the different decompositions defined by the fine and coarse DMs.
1032  In the user code, this is achieved via
1033 .vb
1034    {
1035      DM dm_fine;
1036      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1037    }
1038 .ve
1039  The signature of the user provided field scatter method is
1040 .vb
1041    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1042 .ve
1043  The user must provide support for both mode = SCATTER_FORWARD and mode = SCATTER_REVERSE.
1044  SCATTER_FORWARD implies the direction of transfer is from the parent (fine) DM to the coarse DM.
1045 
1046  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
1047  of state variables between the fine and coarse DMs.
1048  In the context of a finite element discretization, an example state variable might be
1049  values associated with quadrature points within each element.
1050  A user provided state scatter method is composed via
1051 .vb
1052    {
1053      DM dm_fine;
1054      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1055    }
1056 .ve
1057  The signature of the user provided state scatter method is
1058 .vb
1059    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1060 .ve
1061  SCATTER_FORWARD implies the direction of transfer is from the fine DM to the coarse DM.
1062  The user is only required to support mode = SCATTER_FORWARD.
1063  No assumption is made about the data type of the state variables.
1064  These must be managed by the user and must be accessible from the DM.
1065 
1066  Care must be taken in defining the user context passed to KSPSetComputeOperators() which is to be
1067  associated with the sub-KSP residing within PCTelescope.
1068  In general, PCTelescope assumes that the context on the fine and coarse DM used with
1069  KSPSetComputeOperators() should be "similar" in type or origin.
1070  Specifically the following rules are used to infer what context on the sub-KSP should be.
1071 
1072  First the contexts from the KSP and the fine and coarse DMs are retrieved.
1073  Note that the special case of a DMSHELL context is queried.
1074 
1075 .vb
1076    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1077    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1078    DMShellGetContext(dm_fine,&dmfine_shellctx);
1079 
1080    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1081    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1082 .ve
1083 
1084  The following rules are then enforced:
1085 
1086  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1087  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,NULL);
1088 
1089  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1090  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1091  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1092 
1093  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1094  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1095  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1096 
1097  If neither of the above three tests passed, then PCTelescope cannot safely determine what
1098  context should be provided to KSPSetComputeOperators() for use with the sub-KSP.
1099  In this case, an additional mechanism is provided via a composed function which will return
1100  the actual context to be used. To use this feature you must compose the "getter" function
1101  with the coarse DM, e.g.
1102 .vb
1103    {
1104      DM dm_coarse;
1105      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1106    }
1107 .ve
1108  The signature of the user provided method is
1109 .vb
1110    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1111 .ve
1112 
1113  Level: advanced
1114 
1115 @*/
1116 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc,PetscBool v)
1117 {
1118   PetscErrorCode ierr;
1119   PetscFunctionBegin;
1120   ierr = PetscTryMethod(pc,"PCTelescopeSetUseCoarseDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1121   PetscFunctionReturn(0);
1122 }
1123 
1124 /*@
1125  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
1126 
1127  Not Collective
1128 
1129  Input Parameter:
1130 .  pc - the preconditioner context
1131 
1132  Output Parameter:
1133 .  v - the flag
1134 
1135  Level: advanced
1136 
1137 @*/
1138 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
1139 {
1140   PetscErrorCode ierr;
1141   PetscFunctionBegin;
1142   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1143   PetscFunctionReturn(0);
1144 }
1145 
1146 /*@
1147  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
1148 
1149  Not Collective
1150 
1151  Input Parameter:
1152 .  pc - the preconditioner context
1153 
1154  Output Parameter:
1155 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
1156 
1157  Level: advanced
1158 
1159 @*/
1160 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
1161 {
1162   PetscErrorCode ierr;
1163   PetscFunctionBegin;
1164   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1165   PetscFunctionReturn(0);
1166 }
1167 
1168 /*@
1169  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
1170 
1171  Not Collective
1172 
1173  Input Parameter:
1174 .  pc - the preconditioner context
1175 
1176  Output Parameter:
1177 .  subdm - The re-partitioned DM
1178 
1179  Level: advanced
1180 
1181 @*/
1182 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
1183 {
1184   PetscErrorCode ierr;
1185   PetscFunctionBegin;
1186   ierr = PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));CHKERRQ(ierr);
1187   PetscFunctionReturn(0);
1188 }
1189 
1190 /*@
1191  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1192 
1193  Logically Collective
1194 
1195  Input Parameters:
1196 +  pc - the preconditioner context
1197 -  subcommtype - the subcommunicator type (see PetscSubcommType)
1198 
1199  Level: advanced
1200 
1201 .seealso: PetscSubcommType, PetscSubcomm, PCTELESCOPE
1202 @*/
1203 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1204 {
1205   PetscErrorCode ierr;
1206   PetscFunctionBegin;
1207   ierr = PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));CHKERRQ(ierr);
1208   PetscFunctionReturn(0);
1209 }
1210 
1211 /*@
1212  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1213 
1214  Not Collective
1215 
1216  Input Parameter:
1217 .  pc - the preconditioner context
1218 
1219  Output Parameter:
1220 .  subcommtype - the subcommunicator type (see PetscSubcommType)
1221 
1222  Level: advanced
1223 
1224 .seealso: PetscSubcomm, PetscSubcommType, PCTELESCOPE
1225 @*/
1226 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1227 {
1228   PetscErrorCode ierr;
1229   PetscFunctionBegin;
1230   ierr = PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));CHKERRQ(ierr);
1231   PetscFunctionReturn(0);
1232 }
1233 
1234 /* -------------------------------------------------------------------------------------*/
1235 /*MC
1236    PCTELESCOPE - Runs a KSP solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1237 
1238    Options Database:
1239 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1240 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1241 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1242 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether KSPSetComputeOperators should be used on the sub-KSP.
1243 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse DM should be used to define the sub-communicator.
1244 
1245    Level: advanced
1246 
1247    Notes:
1248    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
1249    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner (PC).
1250    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
1251    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
1252    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1253    Additionally, in comparison with PCREDUNDANT, PCTELESCOPE can utilize an attached DM.
1254 
1255    The default type of the sub KSP (the KSP defined on c') is PREONLY.
1256 
1257    There are three setup mechanisms for PCTelescope. Features support by each type are described below.
1258    In the following, we will refer to the operators B and B', these are the Bmat provided to the KSP on the
1259    communicators c and c' respectively.
1260 
1261    [1] Default setup
1262    The sub-communicator c' is created via PetscSubcommCreate().
1263    Explicitly defined nullspace and near nullspace vectors will be propagated from B to B'.
1264    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1265    No support is provided for KSPSetComputeOperators().
1266    Currently there is no support for the flag -pc_use_amat.
1267 
1268    [2] DM aware setup
1269    If a DM is attached to the PC, it is re-partitioned on the sub-communicator c'.
1270    c' is created via PetscSubcommCreate().
1271    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
1272    Currently only support for re-partitioning a DMDA is provided.
1273    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1274    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1275    Support is provided for KSPSetComputeOperators(). The user provided function and context is propagated to the sub KSP.
1276    This is fragile since the user must ensure that their user context is valid for use on c'.
1277    Currently there is no support for the flag -pc_use_amat.
1278 
1279    [3] Coarse DM setup
1280    If a DM (dmfine) is attached to the PC, dmfine is queried for a "coarse" DM (call this dmcoarse) via DMGetCoarseDM().
1281    PCTELESCOPE will interpret the coarse DM as being defined on a sub-communicator of c.
1282    The communicator associated with dmcoarse will define the c' to be used within PCTELESCOPE.
1283    PCTELESCOPE will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1284    The intention of this setup type is that PCTELESCOPE will use an existing (e.g. user defined) communicator hierarchy, say as would be
1285    available with using multi-grid on unstructured meshes.
1286    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1287    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1288    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1289    There is no general method to permute field orderings, hence only KSPSetComputeOperators() is supported.
1290    The user must use PetscObjectComposeFunction() with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1291    Propagation of the user context for KSPSetComputeOperators() on the sub KSP is attempted by querying the DM contexts associated with dmfine and dmcoarse. Alternatively, the user may use PetscObjectComposeFunction() with dmcoarse to define a method which will return the appropriate user context for KSPSetComputeOperators().
1292    Currently there is no support for the flag -pc_use_amat.
1293    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
1294    Further information about the user-provided methods required by this setup type are described here PCTelescopeSetUseCoarseDM().
1295 
1296    Developer Notes:
1297    During PCSetup, the B operator is scattered onto c'.
1298    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1299    Then, KSPSolve() is executed on the c' communicator.
1300 
1301    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
1302    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
1303 
1304    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1305    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1306    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1307 
1308    The telescoping preconditioner can re-partition an attached DM if it is a DMDA (2D or 3D -
1309    support for 1D DMDAs is not provided). If a DMDA is found, a topologically equivalent DMDA is created on c'
1310    and this new DM is attached the sub KSP. The design of telescope is such that it should be possible to extend support
1311    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
1312    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1313 
1314    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1315 
1316    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1317    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
1318    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
1319 
1320    Limitations/improvements include the following.
1321    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
1322    A unified mechanism to query for user contexts as required by KSPSetComputeOperators() and MatNullSpaceSetFunction().
1323 
1324    The symmetric permutation used when a DMDA is encountered is performed via explicitly assembling a permutation matrix P,
1325    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
1326    VecPermute() does not support the use case required here. By computing P, one can permute both the operator and RHS in a
1327    consistent manner.
1328 
1329    Mapping of vectors (default setup mode) is performed in the following way.
1330    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1331    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1332    We perform the scatter to the sub-communicator in the following way.
1333    [1] Given a vector x defined on communicator c
1334 
1335 .vb
1336    rank(c)  local values of x
1337    ------- ----------------------------------------
1338         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1339         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1340         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1341         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1342 .ve
1343 
1344    scatter into xtmp defined also on comm c, so that we have the following values
1345 
1346 .vb
1347    rank(c)  local values of xtmp
1348    ------- ----------------------------------------------------------------------------
1349         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1350         1   [ ]
1351         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1352         3   [ ]
1353 .ve
1354 
1355    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1356 
1357    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1358    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1359 
1360 .vb
1361    rank(c')  local values of xred
1362    -------- ----------------------------------------------------------------------------
1363          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1364          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1365 .ve
1366 
1367   Contributed by Dave May
1368 
1369   Reference:
1370   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1371 
1372 .seealso:  PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
1373 M*/
1374 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1375 {
1376   PetscErrorCode       ierr;
1377   struct _PC_Telescope *sred;
1378 
1379   PetscFunctionBegin;
1380   ierr = PetscNewLog(pc,&sred);CHKERRQ(ierr);
1381   sred->psubcomm       = NULL;
1382   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1383   sred->subcomm        = MPI_COMM_NULL;
1384   sred->redfactor      = 1;
1385   sred->ignore_dm      = PETSC_FALSE;
1386   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1387   sred->use_coarse_dm  = PETSC_FALSE;
1388   pc->data             = (void*)sred;
1389 
1390   pc->ops->apply           = PCApply_Telescope;
1391   pc->ops->applytranspose  = NULL;
1392   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1393   pc->ops->setup           = PCSetUp_Telescope;
1394   pc->ops->destroy         = PCDestroy_Telescope;
1395   pc->ops->reset           = PCReset_Telescope;
1396   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1397   pc->ops->view            = PCView_Telescope;
1398 
1399   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1400   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1401   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1402   sred->pctelescope_reset_type              = NULL;
1403 
1404   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);CHKERRQ(ierr);
1405   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope);CHKERRQ(ierr);
1406   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope);CHKERRQ(ierr);
1407   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);CHKERRQ(ierr);
1408   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);CHKERRQ(ierr);
1409   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);CHKERRQ(ierr);
1410   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);CHKERRQ(ierr);
1411   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1412   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1413   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);CHKERRQ(ierr);
1414   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",PCTelescopeGetUseCoarseDM_Telescope);CHKERRQ(ierr);
1415   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",PCTelescopeSetUseCoarseDM_Telescope);CHKERRQ(ierr);
1416   PetscFunctionReturn(0);
1417 }
1418