xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision ccb4e88a40f0b86eaeca07ff64c64e4de2fae686)
1 #include <petsc/private/petscimpl.h>
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/pcimpl.h>
4 #include <petscksp.h> /*I "petscksp.h" I*/
5 #include <petscdm.h> /*I "petscdm.h" I*/
6 #include "../src/ksp/pc/impls/telescope/telescope.h"
7 
8 static PetscBool  cited = PETSC_FALSE;
9 static const char citation[] =
10 "@inproceedings{MaySananRuppKnepleySmith2016,\n"
11 "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
12 "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
13 "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
14 "  series    = {PASC '16},\n"
15 "  isbn      = {978-1-4503-4126-4},\n"
16 "  location  = {Lausanne, Switzerland},\n"
17 "  pages     = {5:1--5:12},\n"
18 "  articleno = {5},\n"
19 "  numpages  = {12},\n"
20 "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
21 "  doi       = {10.1145/2929908.2929913},\n"
22 "  acmid     = {2929913},\n"
23 "  publisher = {ACM},\n"
24 "  address   = {New York, NY, USA},\n"
25 "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
26 "  year      = {2016}\n"
27 "}\n";
28 
29 /*
30  default setup mode
31 
32  [1a] scatter to (FORWARD)
33  x(comm) -> xtmp(comm)
34  [1b] local copy (to) ranks with color = 0
35  xred(subcomm) <- xtmp
36 
37  [2] solve on sub KSP to obtain yred(subcomm)
38 
39  [3a] local copy (from) ranks with color = 0
40  yred(subcomm) --> xtmp
41  [2b] scatter from (REVERSE)
42  xtmp(comm) -> y(comm)
43 */
44 
45 /*
46   Collective[comm_f]
47   Notes
48    * Using comm_f = MPI_COMM_NULL will result in an error
49    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
50    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
51 */
52 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool *isvalid)
53 {
54   PetscInt       valid = 1;
55   MPI_Group      group_f,group_c;
56   PetscErrorCode ierr;
57   PetscMPIInt    count,k,size_f = 0,size_c = 0,size_c_sum = 0;
58   PetscMPIInt    *ranks_f,*ranks_c;
59 
60   PetscFunctionBegin;
61   if (comm_f == MPI_COMM_NULL) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"comm_f cannot be MPI_COMM_NULL");
62 
63   ierr = MPI_Comm_group(comm_f,&group_f);CHKERRMPI(ierr);
64   if (comm_c != MPI_COMM_NULL) {
65     ierr = MPI_Comm_group(comm_c,&group_c);CHKERRMPI(ierr);
66   }
67 
68   ierr = MPI_Comm_size(comm_f,&size_f);CHKERRMPI(ierr);
69   if (comm_c != MPI_COMM_NULL) {
70     ierr = MPI_Comm_size(comm_c,&size_c);CHKERRMPI(ierr);
71   }
72 
73   /* check not all comm_c's are NULL */
74   size_c_sum = size_c;
75   ierr = MPI_Allreduce(MPI_IN_PLACE,&size_c_sum,1,MPI_INT,MPI_SUM,comm_f);CHKERRMPI(ierr);
76   if (size_c_sum == 0) valid = 0;
77 
78   /* check we can map at least 1 rank in comm_c to comm_f */
79   ierr = PetscMalloc1(size_f,&ranks_f);CHKERRQ(ierr);
80   ierr = PetscMalloc1(size_c,&ranks_c);CHKERRQ(ierr);
81   for (k=0; k<size_f; k++) ranks_f[k] = MPI_UNDEFINED;
82   for (k=0; k<size_c; k++) ranks_c[k] = k;
83 
84   /*
85    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
86    I do not want the code to terminate immediately if this occurs, rather I want to throw
87    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
88    that comm_c is not a valid sub-communicator.
89    Hence I purposefully do not call CHKERRQ() after MPI_Group_translate_ranks().
90   */
91   count = 0;
92   if (comm_c != MPI_COMM_NULL) {
93     (void)MPI_Group_translate_ranks(group_c,size_c,ranks_c,group_f,ranks_f);
94     for (k=0; k<size_f; k++) {
95       if (ranks_f[k] == MPI_UNDEFINED) {
96         count++;
97       }
98     }
99   }
100   if (count == size_f) valid = 0;
101 
102   ierr = MPI_Allreduce(MPI_IN_PLACE,&valid,1,MPIU_INT,MPI_MIN,comm_f);CHKERRMPI(ierr);
103   if (valid == 1) *isvalid = PETSC_TRUE;
104   else *isvalid = PETSC_FALSE;
105 
106   ierr = PetscFree(ranks_f);CHKERRQ(ierr);
107   ierr = PetscFree(ranks_c);CHKERRQ(ierr);
108   ierr = MPI_Group_free(&group_f);CHKERRMPI(ierr);
109   if (comm_c != MPI_COMM_NULL) {
110     ierr = MPI_Group_free(&group_c);CHKERRMPI(ierr);
111   }
112   PetscFunctionReturn(0);
113 }
114 
115 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
116 {
117   DM subdm = NULL;
118 
119   if (!PCTelescope_isActiveRank(sred)) { subdm = NULL; }
120   else {
121     switch (sred->sr_type) {
122     case TELESCOPE_DEFAULT: subdm = NULL;
123       break;
124     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
125       break;
126     case TELESCOPE_DMPLEX:  subdm = NULL;
127       break;
128     case TELESCOPE_COARSEDM: if (sred->ksp) { KSPGetDM(sred->ksp,&subdm); }
129       break;
130     }
131   }
132   return(subdm);
133 }
134 
135 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
136 {
137   PetscErrorCode ierr;
138   PetscInt       m,M,bs,st,ed;
139   Vec            x,xred,yred,xtmp;
140   Mat            B;
141   MPI_Comm       comm,subcomm;
142   VecScatter     scatter;
143   IS             isin;
144   VecType        vectype;
145 
146   PetscFunctionBegin;
147   ierr = PetscInfo(pc,"PCTelescope: setup (default)\n");CHKERRQ(ierr);
148   comm = PetscSubcommParent(sred->psubcomm);
149   subcomm = PetscSubcommChild(sred->psubcomm);
150 
151   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
152   ierr = MatGetSize(B,&M,NULL);CHKERRQ(ierr);
153   ierr = MatGetBlockSize(B,&bs);CHKERRQ(ierr);
154   ierr = MatCreateVecs(B,&x,NULL);CHKERRQ(ierr);
155   ierr = MatGetVecType(B,&vectype);CHKERRQ(ierr);
156 
157   xred = NULL;
158   m    = 0;
159   if (PCTelescope_isActiveRank(sred)) {
160     ierr = VecCreate(subcomm,&xred);CHKERRQ(ierr);
161     ierr = VecSetSizes(xred,PETSC_DECIDE,M);CHKERRQ(ierr);
162     ierr = VecSetBlockSize(xred,bs);CHKERRQ(ierr);
163     ierr = VecSetType(xred,vectype);CHKERRQ(ierr); /* Use the preconditioner matrix's vectype by default */
164     ierr = VecSetFromOptions(xred);CHKERRQ(ierr);
165     ierr = VecGetLocalSize(xred,&m);CHKERRQ(ierr);
166   }
167 
168   yred = NULL;
169   if (PCTelescope_isActiveRank(sred)) {
170     ierr = VecDuplicate(xred,&yred);CHKERRQ(ierr);
171   }
172 
173   ierr = VecCreate(comm,&xtmp);CHKERRQ(ierr);
174   ierr = VecSetSizes(xtmp,m,PETSC_DECIDE);CHKERRQ(ierr);
175   ierr = VecSetBlockSize(xtmp,bs);CHKERRQ(ierr);
176   ierr = VecSetType(xtmp,vectype);CHKERRQ(ierr);
177 
178   if (PCTelescope_isActiveRank(sred)) {
179     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
180     ierr = ISCreateStride(comm,(ed-st),st,1,&isin);CHKERRQ(ierr);
181   } else {
182     ierr = VecGetOwnershipRange(x,&st,&ed);CHKERRQ(ierr);
183     ierr = ISCreateStride(comm,0,st,1,&isin);CHKERRQ(ierr);
184   }
185   ierr = ISSetBlockSize(isin,bs);CHKERRQ(ierr);
186 
187   ierr = VecScatterCreate(x,isin,xtmp,NULL,&scatter);CHKERRQ(ierr);
188 
189   sred->isin    = isin;
190   sred->scatter = scatter;
191   sred->xred    = xred;
192   sred->yred    = yred;
193   sred->xtmp    = xtmp;
194   ierr = VecDestroy(&x);CHKERRQ(ierr);
195   PetscFunctionReturn(0);
196 }
197 
198 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
199 {
200   PetscErrorCode ierr;
201   MPI_Comm       comm,subcomm;
202   Mat            Bred,B;
203   PetscInt       nr,nc,bs;
204   IS             isrow,iscol;
205   Mat            Blocal,*_Blocal;
206 
207   PetscFunctionBegin;
208   ierr = PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");CHKERRQ(ierr);
209   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
210   subcomm = PetscSubcommChild(sred->psubcomm);
211   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
212   ierr = MatGetSize(B,&nr,&nc);CHKERRQ(ierr);
213   isrow = sred->isin;
214   ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&iscol);CHKERRQ(ierr);
215   ierr = ISSetIdentity(iscol);CHKERRQ(ierr);
216   ierr = MatGetBlockSizes(B,NULL,&bs);CHKERRQ(ierr);
217   ierr = ISSetBlockSize(iscol,bs);CHKERRQ(ierr);
218   ierr = MatSetOption(B,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
219   ierr = MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);CHKERRQ(ierr);
220   Blocal = *_Blocal;
221   ierr = PetscFree(_Blocal);CHKERRQ(ierr);
222   Bred = NULL;
223   if (PCTelescope_isActiveRank(sred)) {
224     PetscInt mm;
225 
226     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
227 
228     ierr = MatGetSize(Blocal,&mm,NULL);CHKERRQ(ierr);
229     ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);CHKERRQ(ierr);
230   }
231   *A = Bred;
232   ierr = ISDestroy(&iscol);CHKERRQ(ierr);
233   ierr = MatDestroy(&Blocal);CHKERRQ(ierr);
234   PetscFunctionReturn(0);
235 }
236 
237 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
238 {
239   PetscErrorCode ierr;
240   PetscBool      has_const;
241   const Vec      *vecs;
242   Vec            *sub_vecs = NULL;
243   PetscInt       i,k,n = 0;
244   MPI_Comm       subcomm;
245 
246   PetscFunctionBegin;
247   subcomm = PetscSubcommChild(sred->psubcomm);
248   ierr = MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);CHKERRQ(ierr);
249 
250   if (PCTelescope_isActiveRank(sred)) {
251     if (n) {
252       ierr = VecDuplicateVecs(sred->xred,n,&sub_vecs);CHKERRQ(ierr);
253     }
254   }
255 
256   /* copy entries */
257   for (k=0; k<n; k++) {
258     const PetscScalar *x_array;
259     PetscScalar       *LA_sub_vec;
260     PetscInt          st,ed;
261 
262     /* pull in vector x->xtmp */
263     ierr = VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
264     ierr = VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
265     if (sub_vecs) {
266       /* copy vector entries into xred */
267       ierr = VecGetArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
268       if (sub_vecs[k]) {
269         ierr = VecGetOwnershipRange(sub_vecs[k],&st,&ed);CHKERRQ(ierr);
270         ierr = VecGetArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
271         for (i=0; i<ed-st; i++) {
272           LA_sub_vec[i] = x_array[i];
273         }
274         ierr = VecRestoreArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
275       }
276       ierr = VecRestoreArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
277     }
278   }
279 
280   if (PCTelescope_isActiveRank(sred)) {
281     /* create new (near) nullspace for redundant object */
282     ierr = MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);CHKERRQ(ierr);
283     ierr = VecDestroyVecs(n,&sub_vecs);CHKERRQ(ierr);
284     if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
285     if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
286   }
287   PetscFunctionReturn(0);
288 }
289 
290 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
291 {
292   PetscErrorCode ierr;
293   Mat            B;
294 
295   PetscFunctionBegin;
296   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
297   /* Propagate the nullspace if it exists */
298   {
299     MatNullSpace nullspace,sub_nullspace;
300     ierr = MatGetNullSpace(B,&nullspace);CHKERRQ(ierr);
301     if (nullspace) {
302       ierr = PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");CHKERRQ(ierr);
303       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace);CHKERRQ(ierr);
304       if (PCTelescope_isActiveRank(sred)) {
305         ierr = MatSetNullSpace(sub_mat,sub_nullspace);CHKERRQ(ierr);
306         ierr = MatNullSpaceDestroy(&sub_nullspace);CHKERRQ(ierr);
307       }
308     }
309   }
310   /* Propagate the near nullspace if it exists */
311   {
312     MatNullSpace nearnullspace,sub_nearnullspace;
313     ierr = MatGetNearNullSpace(B,&nearnullspace);CHKERRQ(ierr);
314     if (nearnullspace) {
315       ierr = PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n");CHKERRQ(ierr);
316       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);CHKERRQ(ierr);
317       if (PCTelescope_isActiveRank(sred)) {
318         ierr = MatSetNearNullSpace(sub_mat,sub_nearnullspace);CHKERRQ(ierr);
319         ierr = MatNullSpaceDestroy(&sub_nearnullspace);CHKERRQ(ierr);
320       }
321     }
322   }
323   PetscFunctionReturn(0);
324 }
325 
326 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
327 {
328   PC_Telescope   sred = (PC_Telescope)pc->data;
329   PetscErrorCode ierr;
330   PetscBool      iascii,isstring;
331   PetscViewer    subviewer;
332 
333   PetscFunctionBegin;
334   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
335   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
336   if (iascii) {
337     {
338       MPI_Comm    comm,subcomm;
339       PetscMPIInt comm_size,subcomm_size;
340       DM          dm = NULL,subdm = NULL;
341 
342       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
343       subdm = private_PCTelescopeGetSubDM(sred);
344 
345       if (sred->psubcomm) {
346         comm = PetscSubcommParent(sred->psubcomm);
347         subcomm = PetscSubcommChild(sred->psubcomm);
348         ierr = MPI_Comm_size(comm,&comm_size);CHKERRMPI(ierr);
349         ierr = MPI_Comm_size(subcomm,&subcomm_size);CHKERRMPI(ierr);
350 
351         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
352         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent comm size reduction factor = %D\n",sred->redfactor);CHKERRQ(ierr);
353         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);CHKERRQ(ierr);
354         switch (sred->subcommtype) {
355         case PETSC_SUBCOMM_INTERLACED :
356           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: type = interlaced\n",sred->subcommtype);CHKERRQ(ierr);
357           break;
358         case PETSC_SUBCOMM_CONTIGUOUS :
359           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm type = contiguous\n",sred->subcommtype);CHKERRQ(ierr);
360           break;
361         default :
362           SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
363         }
364         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
365       } else {
366         ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
367         subcomm = sred->subcomm;
368         if (!PCTelescope_isActiveRank(sred)) {
369           subcomm = PETSC_COMM_SELF;
370         }
371 
372         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
373         ierr = PetscViewerASCIIPrintf(viewer,"subcomm: using user provided sub-communicator\n");CHKERRQ(ierr);
374         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
375       }
376 
377       ierr = PetscViewerGetSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
378       if (PCTelescope_isActiveRank(sred)) {
379         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
380 
381         if (dm && sred->ignore_dm) {
382           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring DM\n");CHKERRQ(ierr);
383         }
384         if (sred->ignore_kspcomputeoperators) {
385           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring KSPComputeOperators\n");CHKERRQ(ierr);
386         }
387         switch (sred->sr_type) {
388         case TELESCOPE_DEFAULT:
389           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: default\n");CHKERRQ(ierr);
390           break;
391         case TELESCOPE_DMDA:
392           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMDA auto-repartitioning\n");CHKERRQ(ierr);
393           ierr = DMView_DA_Short(subdm,subviewer);CHKERRQ(ierr);
394           break;
395         case TELESCOPE_DMPLEX:
396           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMPLEX auto-repartitioning\n");CHKERRQ(ierr);
397           break;
398         case TELESCOPE_COARSEDM:
399           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: coarse DM\n");CHKERRQ(ierr);
400           break;
401         }
402 
403         if (dm) {
404           PetscObject obj = (PetscObject)dm;
405           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object:");CHKERRQ(ierr);
406           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
407           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
408           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
409           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
410           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
411           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
412         } else {
413           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object: NULL\n");CHKERRQ(ierr);
414         }
415         if (subdm) {
416           PetscObject obj = (PetscObject)subdm;
417           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object:");CHKERRQ(ierr);
418           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
419           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
420           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
421           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
422           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
423           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
424         } else {
425           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object: NULL\n");CHKERRQ(ierr);
426         }
427 
428         ierr = KSPView(sred->ksp,subviewer);CHKERRQ(ierr);
429         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
430       }
431       ierr = PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
432     }
433   }
434   PetscFunctionReturn(0);
435 }
436 
437 static PetscErrorCode PCSetUp_Telescope(PC pc)
438 {
439   PC_Telescope    sred = (PC_Telescope)pc->data;
440   PetscErrorCode  ierr;
441   MPI_Comm        comm,subcomm=0;
442   PCTelescopeType sr_type;
443 
444   PetscFunctionBegin;
445   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
446 
447   /* Determine type of setup/update */
448   if (!pc->setupcalled) {
449     PetscBool has_dm,same;
450     DM        dm;
451 
452     sr_type = TELESCOPE_DEFAULT;
453     has_dm = PETSC_FALSE;
454     ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
455     if (dm) { has_dm = PETSC_TRUE; }
456     if (has_dm) {
457       /* check for dmda */
458       ierr = PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);CHKERRQ(ierr);
459       if (same) {
460         ierr = PetscInfo(pc,"PCTelescope: found DMDA\n");CHKERRQ(ierr);
461         sr_type = TELESCOPE_DMDA;
462       }
463       /* check for dmplex */
464       ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);CHKERRQ(ierr);
465       if (same) {
466         ierr = PetscInfo(pc,"PCTelescope: found DMPLEX\n");CHKERRQ(ierr);
467         sr_type = TELESCOPE_DMPLEX;
468       }
469 
470       if (sred->use_coarse_dm) {
471         ierr = PetscInfo(pc,"PCTelescope: using coarse DM\n");CHKERRQ(ierr);
472         sr_type = TELESCOPE_COARSEDM;
473       }
474 
475       if (sred->ignore_dm) {
476         ierr = PetscInfo(pc,"PCTelescope: ignoring DM\n");CHKERRQ(ierr);
477         sr_type = TELESCOPE_DEFAULT;
478       }
479     }
480     sred->sr_type = sr_type;
481   } else {
482     sr_type = sred->sr_type;
483   }
484 
485   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
486   switch (sr_type) {
487   case TELESCOPE_DEFAULT:
488     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
489     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
490     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
491     sred->pctelescope_reset_type              = NULL;
492     break;
493   case TELESCOPE_DMDA:
494     pc->ops->apply                            = PCApply_Telescope_dmda;
495     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
496     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
497     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
498     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
499     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
500     break;
501   case TELESCOPE_DMPLEX:
502     SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
503   case TELESCOPE_COARSEDM:
504     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
505     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
506     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
507     sred->pctelescope_matcreate_type          = NULL;
508     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
509     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
510     break;
511   default:
512     SETERRQ(comm,PETSC_ERR_SUP,"Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
513   }
514 
515   /* subcomm definition */
516   if (!pc->setupcalled) {
517     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
518       if (!sred->psubcomm) {
519         ierr = PetscSubcommCreate(comm,&sred->psubcomm);CHKERRQ(ierr);
520         ierr = PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);CHKERRQ(ierr);
521         ierr = PetscSubcommSetType(sred->psubcomm,sred->subcommtype);CHKERRQ(ierr);
522         ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
523         sred->subcomm = PetscSubcommChild(sred->psubcomm);
524       }
525     } else { /* query PC for DM, check communicators */
526       DM          dm,dm_coarse_partition = NULL;
527       MPI_Comm    comm_fine,comm_coarse_partition = MPI_COMM_NULL;
528       PetscMPIInt csize_fine=0,csize_coarse_partition=0,cs[2],csg[2],cnt=0;
529       PetscBool   isvalidsubcomm;
530 
531       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
532       comm_fine = PetscObjectComm((PetscObject)dm);
533       ierr = DMGetCoarseDM(dm,&dm_coarse_partition);CHKERRQ(ierr);
534       if (dm_coarse_partition) { cnt = 1; }
535       ierr = MPI_Allreduce(MPI_IN_PLACE,&cnt,1,MPI_INT,MPI_SUM,comm_fine);CHKERRMPI(ierr);
536       if (cnt == 0) SETERRQ(comm_fine,PETSC_ERR_SUP,"Zero instances of a coarse DM were found");
537 
538       ierr = MPI_Comm_size(comm_fine,&csize_fine);CHKERRMPI(ierr);
539       if (dm_coarse_partition) {
540         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
541         ierr = MPI_Comm_size(comm_coarse_partition,&csize_coarse_partition);CHKERRMPI(ierr);
542       }
543 
544       cs[0] = csize_fine;
545       cs[1] = csize_coarse_partition;
546       ierr = MPI_Allreduce(cs,csg,2,MPI_INT,MPI_MAX,comm_fine);CHKERRMPI(ierr);
547       if (csg[0] == csg[1]) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM uses the same size communicator as the parent DM attached to the PC");
548 
549       ierr = PCTelescopeTestValidSubcomm(comm_fine,comm_coarse_partition,&isvalidsubcomm);CHKERRQ(ierr);
550       if (!isvalidsubcomm) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM communicator is not a sub-communicator of parentDM->comm");
551       sred->subcomm = comm_coarse_partition;
552     }
553   }
554   subcomm = sred->subcomm;
555 
556   /* internal KSP */
557   if (!pc->setupcalled) {
558     const char *prefix;
559 
560     if (PCTelescope_isActiveRank(sred)) {
561       ierr = KSPCreate(subcomm,&sred->ksp);CHKERRQ(ierr);
562       ierr = KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);CHKERRQ(ierr);
563       ierr = PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
564       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);CHKERRQ(ierr);
565       ierr = KSPSetType(sred->ksp,KSPPREONLY);CHKERRQ(ierr);
566       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
567       ierr = KSPSetOptionsPrefix(sred->ksp,prefix);CHKERRQ(ierr);
568       ierr = KSPAppendOptionsPrefix(sred->ksp,"telescope_");CHKERRQ(ierr);
569     }
570   }
571 
572   /* setup */
573   if (!pc->setupcalled && sred->pctelescope_setup_type) {
574     ierr = sred->pctelescope_setup_type(pc,sred);CHKERRQ(ierr);
575   }
576   /* update */
577   if (!pc->setupcalled) {
578     if (sred->pctelescope_matcreate_type) {
579       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);CHKERRQ(ierr);
580     }
581     if (sred->pctelescope_matnullspacecreate_type) {
582       ierr = sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);CHKERRQ(ierr);
583     }
584   } else {
585     if (sred->pctelescope_matcreate_type) {
586       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);CHKERRQ(ierr);
587     }
588   }
589 
590   /* common - no construction */
591   if (PCTelescope_isActiveRank(sred)) {
592     ierr = KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);CHKERRQ(ierr);
593     if (pc->setfromoptionscalled && !pc->setupcalled) {
594       ierr = KSPSetFromOptions(sred->ksp);CHKERRQ(ierr);
595     }
596   }
597   PetscFunctionReturn(0);
598 }
599 
600 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
601 {
602   PC_Telescope      sred = (PC_Telescope)pc->data;
603   PetscErrorCode    ierr;
604   Vec               xtmp,xred,yred;
605   PetscInt          i,st,ed;
606   VecScatter        scatter;
607   PetscScalar       *array;
608   const PetscScalar *x_array;
609 
610   PetscFunctionBegin;
611   ierr = PetscCitationsRegister(citation,&cited);CHKERRQ(ierr);
612 
613   xtmp    = sred->xtmp;
614   scatter = sred->scatter;
615   xred    = sred->xred;
616   yred    = sred->yred;
617 
618   /* pull in vector x->xtmp */
619   ierr = VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
620   ierr = VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
621 
622   /* copy vector entries into xred */
623   ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
624   if (xred) {
625     PetscScalar *LA_xred;
626     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
627     ierr = VecGetArray(xred,&LA_xred);CHKERRQ(ierr);
628     for (i=0; i<ed-st; i++) {
629       LA_xred[i] = x_array[i];
630     }
631     ierr = VecRestoreArray(xred,&LA_xred);CHKERRQ(ierr);
632   }
633   ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
634   /* solve */
635   if (PCTelescope_isActiveRank(sred)) {
636     ierr = KSPSolve(sred->ksp,xred,yred);CHKERRQ(ierr);
637     ierr = KSPCheckSolve(sred->ksp,pc,yred);CHKERRQ(ierr);
638   }
639   /* return vector */
640   ierr = VecGetArray(xtmp,&array);CHKERRQ(ierr);
641   if (yred) {
642     const PetscScalar *LA_yred;
643     ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
644     ierr = VecGetArrayRead(yred,&LA_yred);CHKERRQ(ierr);
645     for (i=0; i<ed-st; i++) {
646       array[i] = LA_yred[i];
647     }
648     ierr = VecRestoreArrayRead(yred,&LA_yred);CHKERRQ(ierr);
649   }
650   ierr = VecRestoreArray(xtmp,&array);CHKERRQ(ierr);
651   ierr = VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
652   ierr = VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
653   PetscFunctionReturn(0);
654 }
655 
656 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
657 {
658   PC_Telescope      sred = (PC_Telescope)pc->data;
659   PetscErrorCode    ierr;
660   Vec               xtmp,yred;
661   PetscInt          i,st,ed;
662   VecScatter        scatter;
663   const PetscScalar *x_array;
664   PetscBool         default_init_guess_value;
665 
666   PetscFunctionBegin;
667   xtmp    = sred->xtmp;
668   scatter = sred->scatter;
669   yred    = sred->yred;
670 
671   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
672   *reason = (PCRichardsonConvergedReason)0;
673 
674   if (!zeroguess) {
675     ierr = PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");CHKERRQ(ierr);
676     /* pull in vector y->xtmp */
677     ierr = VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
678     ierr = VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
679 
680     /* copy vector entries into xred */
681     ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
682     if (yred) {
683       PetscScalar *LA_yred;
684       ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
685       ierr = VecGetArray(yred,&LA_yred);CHKERRQ(ierr);
686       for (i=0; i<ed-st; i++) {
687         LA_yred[i] = x_array[i];
688       }
689       ierr = VecRestoreArray(yred,&LA_yred);CHKERRQ(ierr);
690     }
691     ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
692   }
693 
694   if (PCTelescope_isActiveRank(sred)) {
695     ierr = KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);CHKERRQ(ierr);
696     if (!zeroguess) {ierr = KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);CHKERRQ(ierr);}
697   }
698 
699   ierr = PCApply_Telescope(pc,x,y);CHKERRQ(ierr);
700 
701   if (PCTelescope_isActiveRank(sred)) {
702     ierr = KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);CHKERRQ(ierr);
703   }
704 
705   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
706   *outits = 1;
707   PetscFunctionReturn(0);
708 }
709 
710 static PetscErrorCode PCReset_Telescope(PC pc)
711 {
712   PC_Telescope   sred = (PC_Telescope)pc->data;
713   PetscErrorCode ierr;
714 
715   PetscFunctionBegin;
716   ierr = ISDestroy(&sred->isin);CHKERRQ(ierr);
717   ierr = VecScatterDestroy(&sred->scatter);CHKERRQ(ierr);
718   ierr = VecDestroy(&sred->xred);CHKERRQ(ierr);
719   ierr = VecDestroy(&sred->yred);CHKERRQ(ierr);
720   ierr = VecDestroy(&sred->xtmp);CHKERRQ(ierr);
721   ierr = MatDestroy(&sred->Bred);CHKERRQ(ierr);
722   ierr = KSPReset(sred->ksp);CHKERRQ(ierr);
723   if (sred->pctelescope_reset_type) {
724     ierr = sred->pctelescope_reset_type(pc);CHKERRQ(ierr);
725   }
726   PetscFunctionReturn(0);
727 }
728 
729 static PetscErrorCode PCDestroy_Telescope(PC pc)
730 {
731   PC_Telescope   sred = (PC_Telescope)pc->data;
732   PetscErrorCode ierr;
733 
734   PetscFunctionBegin;
735   ierr = PCReset_Telescope(pc);CHKERRQ(ierr);
736   ierr = KSPDestroy(&sred->ksp);CHKERRQ(ierr);
737   ierr = PetscSubcommDestroy(&sred->psubcomm);CHKERRQ(ierr);
738   ierr = PetscFree(sred->dm_ctx);CHKERRQ(ierr);
739   ierr = PetscFree(pc->data);CHKERRQ(ierr);
740   PetscFunctionReturn(0);
741 }
742 
743 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
744 {
745   PC_Telescope     sred = (PC_Telescope)pc->data;
746   PetscErrorCode   ierr;
747   MPI_Comm         comm;
748   PetscMPIInt      size;
749   PetscBool        flg;
750   PetscSubcommType subcommtype;
751 
752   PetscFunctionBegin;
753   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
754   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
755   ierr = PetscOptionsHead(PetscOptionsObject,"Telescope options");CHKERRQ(ierr);
756   ierr = PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg);CHKERRQ(ierr);
757   if (flg) {
758     ierr = PCTelescopeSetSubcommType(pc,subcommtype);CHKERRQ(ierr);
759   }
760   ierr = PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,NULL);CHKERRQ(ierr);
761   if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
762   ierr = PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,NULL);CHKERRQ(ierr);
763   ierr = PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,NULL);CHKERRQ(ierr);
764   ierr = PetscOptionsBool("-pc_telescope_use_coarse_dm","Define sub-communicator from the coarse DM","PCTelescopeSetUseCoarseDM",sred->use_coarse_dm,&sred->use_coarse_dm,NULL);CHKERRQ(ierr);
765   ierr = PetscOptionsTail();CHKERRQ(ierr);
766   PetscFunctionReturn(0);
767 }
768 
769 /* PC simplementation specific API's */
770 
771 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
772 {
773   PC_Telescope red = (PC_Telescope)pc->data;
774   PetscFunctionBegin;
775   if (ksp) *ksp = red->ksp;
776   PetscFunctionReturn(0);
777 }
778 
779 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
780 {
781   PC_Telescope red = (PC_Telescope)pc->data;
782   PetscFunctionBegin;
783   if (subcommtype) *subcommtype = red->subcommtype;
784   PetscFunctionReturn(0);
785 }
786 
787 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
788 {
789   PC_Telescope     red = (PC_Telescope)pc->data;
790 
791   PetscFunctionBegin;
792   if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
793   red->subcommtype = subcommtype;
794   PetscFunctionReturn(0);
795 }
796 
797 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
798 {
799   PC_Telescope red = (PC_Telescope)pc->data;
800   PetscFunctionBegin;
801   if (fact) *fact = red->redfactor;
802   PetscFunctionReturn(0);
803 }
804 
805 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
806 {
807   PC_Telescope     red = (PC_Telescope)pc->data;
808   PetscMPIInt      size;
809   PetscErrorCode   ierr;
810 
811   PetscFunctionBegin;
812   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRMPI(ierr);
813   if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
814   if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
815   red->redfactor = fact;
816   PetscFunctionReturn(0);
817 }
818 
819 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
820 {
821   PC_Telescope red = (PC_Telescope)pc->data;
822   PetscFunctionBegin;
823   if (v) *v = red->ignore_dm;
824   PetscFunctionReturn(0);
825 }
826 
827 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
828 {
829   PC_Telescope red = (PC_Telescope)pc->data;
830   PetscFunctionBegin;
831   red->ignore_dm = v;
832   PetscFunctionReturn(0);
833 }
834 
835 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool *v)
836 {
837   PC_Telescope red = (PC_Telescope)pc->data;
838   PetscFunctionBegin;
839   if (v) *v = red->use_coarse_dm;
840   PetscFunctionReturn(0);
841 }
842 
843 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v)
844 {
845   PC_Telescope red = (PC_Telescope)pc->data;
846   PetscFunctionBegin;
847   red->use_coarse_dm = v;
848   PetscFunctionReturn(0);
849 }
850 
851 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
852 {
853   PC_Telescope red = (PC_Telescope)pc->data;
854   PetscFunctionBegin;
855   if (v) *v = red->ignore_kspcomputeoperators;
856   PetscFunctionReturn(0);
857 }
858 
859 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
860 {
861   PC_Telescope red = (PC_Telescope)pc->data;
862   PetscFunctionBegin;
863   red->ignore_kspcomputeoperators = v;
864   PetscFunctionReturn(0);
865 }
866 
867 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
868 {
869   PC_Telescope red = (PC_Telescope)pc->data;
870   PetscFunctionBegin;
871   *dm = private_PCTelescopeGetSubDM(red);
872   PetscFunctionReturn(0);
873 }
874 
875 /*@
876  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
877 
878  Not Collective
879 
880  Input Parameter:
881 .  pc - the preconditioner context
882 
883  Output Parameter:
884 .  subksp - the KSP defined the smaller set of processes
885 
886  Level: advanced
887 
888 @*/
889 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
890 {
891   PetscErrorCode ierr;
892   PetscFunctionBegin;
893   ierr = PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));CHKERRQ(ierr);
894   PetscFunctionReturn(0);
895 }
896 
897 /*@
898  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
899 
900  Not Collective
901 
902  Input Parameter:
903 .  pc - the preconditioner context
904 
905  Output Parameter:
906 .  fact - the reduction factor
907 
908  Level: advanced
909 
910 @*/
911 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
912 {
913   PetscErrorCode ierr;
914   PetscFunctionBegin;
915   ierr = PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));CHKERRQ(ierr);
916   PetscFunctionReturn(0);
917 }
918 
919 /*@
920  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
921 
922  Not Collective
923 
924  Input Parameter:
925 .  pc - the preconditioner context
926 
927  Output Parameter:
928 .  fact - the reduction factor
929 
930  Level: advanced
931 
932 @*/
933 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
934 {
935   PetscErrorCode ierr;
936   PetscFunctionBegin;
937   ierr = PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));CHKERRQ(ierr);
938   PetscFunctionReturn(0);
939 }
940 
941 /*@
942  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
943 
944  Not Collective
945 
946  Input Parameter:
947 .  pc - the preconditioner context
948 
949  Output Parameter:
950 .  v - the flag
951 
952  Level: advanced
953 
954 @*/
955 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
956 {
957   PetscErrorCode ierr;
958   PetscFunctionBegin;
959   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
960   PetscFunctionReturn(0);
961 }
962 
963 /*@
964  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
965 
966  Not Collective
967 
968  Input Parameter:
969 .  pc - the preconditioner context
970 
971  Output Parameter:
972 .  v - Use PETSC_TRUE to ignore any DM
973 
974  Level: advanced
975 
976 @*/
977 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
978 {
979   PetscErrorCode ierr;
980   PetscFunctionBegin;
981   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
982   PetscFunctionReturn(0);
983 }
984 
985 /*@
986  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse DM attached to DM associated with the PC will be used.
987 
988  Not Collective
989 
990  Input Parameter:
991 .  pc - the preconditioner context
992 
993  Output Parameter:
994 .  v - the flag
995 
996  Level: advanced
997 
998 @*/
999 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc,PetscBool *v)
1000 {
1001   PetscErrorCode ierr;
1002   PetscFunctionBegin;
1003   ierr = PetscUseMethod(pc,"PCTelescopeGetUseCoarseDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1004   PetscFunctionReturn(0);
1005 }
1006 
1007 /*@
1008  PCTelescopeSetUseCoarseDM - Set a flag to query the DM attached to the PC if it also has a coarse DM
1009 
1010  Not Collective
1011 
1012  Input Parameter:
1013 .  pc - the preconditioner context
1014 
1015  Output Parameter:
1016 .  v - Use PETSC_FALSE to ignore any coarse DM
1017 
1018  Notes:
1019  When you have specified to use a coarse DM, the communicator used to create the sub-KSP within PCTelescope
1020  will be that of the coarse DM. Hence the flags -pc_telescope_reduction_factor and
1021  -pc_telescope_subcomm_type will no longer have any meaning.
1022  It is required that the communicator associated with the parent (fine) and the coarse DM are of different sizes.
1023  An error will occur of the size of the communicator associated with the coarse DM
1024  is the same as that of the parent DM.
1025  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
1026  This will be checked at the time the preconditioner is setup and an error will occur if
1027  the coarse DM does not define a sub-communicator of that used by the parent DM.
1028 
1029  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
1030  the DM used (e.g. it supports DMSHELL, DMPLEX, etc).
1031 
1032  Support is currently only provided for the case when you are using KSPSetComputeOperators()
1033 
1034  The user is required to compose a function with the parent DM to facilitate the transfer of fields (Vec) between the different decompositions defined by the fine and coarse DMs.
1035  In the user code, this is achieved via
1036 .vb
1037    {
1038      DM dm_fine;
1039      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1040    }
1041 .ve
1042  The signature of the user provided field scatter method is
1043 .vb
1044    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1045 .ve
1046  The user must provide support for both mode = SCATTER_FORWARD and mode = SCATTER_REVERSE.
1047  SCATTER_FORWARD implies the direction of transfer is from the parent (fine) DM to the coarse DM.
1048 
1049  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
1050  of state variables between the fine and coarse DMs.
1051  In the context of a finite element discretization, an example state variable might be
1052  values associated with quadrature points within each element.
1053  A user provided state scatter method is composed via
1054 .vb
1055    {
1056      DM dm_fine;
1057      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1058    }
1059 .ve
1060  The signature of the user provided state scatter method is
1061 .vb
1062    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1063 .ve
1064  SCATTER_FORWARD implies the direction of transfer is from the fine DM to the coarse DM.
1065  The user is only required to support mode = SCATTER_FORWARD.
1066  No assumption is made about the data type of the state variables.
1067  These must be managed by the user and must be accessible from the DM.
1068 
1069  Care must be taken in defining the user context passed to KSPSetComputeOperators() which is to be
1070  associated with the sub-KSP residing within PCTelescope.
1071  In general, PCTelescope assumes that the context on the fine and coarse DM used with
1072  KSPSetComputeOperators() should be "similar" in type or origin.
1073  Specifically the following rules are used to infer what context on the sub-KSP should be.
1074 
1075  First the contexts from the KSP and the fine and coarse DMs are retrieved.
1076  Note that the special case of a DMSHELL context is queried.
1077 
1078 .vb
1079    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1080    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1081    DMShellGetContext(dm_fine,&dmfine_shellctx);
1082 
1083    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1084    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1085 .ve
1086 
1087  The following rules are then enforced:
1088 
1089  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1090  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,NULL);
1091 
1092  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1093  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1094  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1095 
1096  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1097  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1098  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1099 
1100  If neither of the above three tests passed, then PCTelescope cannot safely determine what
1101  context should be provided to KSPSetComputeOperators() for use with the sub-KSP.
1102  In this case, an additional mechanism is provided via a composed function which will return
1103  the actual context to be used. To use this feature you must compose the "getter" function
1104  with the coarse DM, e.g.
1105 .vb
1106    {
1107      DM dm_coarse;
1108      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1109    }
1110 .ve
1111  The signature of the user provided method is
1112 .vb
1113    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1114 .ve
1115 
1116  Level: advanced
1117 
1118 @*/
1119 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc,PetscBool v)
1120 {
1121   PetscErrorCode ierr;
1122   PetscFunctionBegin;
1123   ierr = PetscTryMethod(pc,"PCTelescopeSetUseCoarseDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1124   PetscFunctionReturn(0);
1125 }
1126 
1127 /*@
1128  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
1129 
1130  Not Collective
1131 
1132  Input Parameter:
1133 .  pc - the preconditioner context
1134 
1135  Output Parameter:
1136 .  v - the flag
1137 
1138  Level: advanced
1139 
1140 @*/
1141 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
1142 {
1143   PetscErrorCode ierr;
1144   PetscFunctionBegin;
1145   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1146   PetscFunctionReturn(0);
1147 }
1148 
1149 /*@
1150  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
1151 
1152  Not Collective
1153 
1154  Input Parameter:
1155 .  pc - the preconditioner context
1156 
1157  Output Parameter:
1158 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
1159 
1160  Level: advanced
1161 
1162 @*/
1163 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
1164 {
1165   PetscErrorCode ierr;
1166   PetscFunctionBegin;
1167   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1168   PetscFunctionReturn(0);
1169 }
1170 
1171 /*@
1172  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
1173 
1174  Not Collective
1175 
1176  Input Parameter:
1177 .  pc - the preconditioner context
1178 
1179  Output Parameter:
1180 .  subdm - The re-partitioned DM
1181 
1182  Level: advanced
1183 
1184 @*/
1185 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
1186 {
1187   PetscErrorCode ierr;
1188   PetscFunctionBegin;
1189   ierr = PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));CHKERRQ(ierr);
1190   PetscFunctionReturn(0);
1191 }
1192 
1193 /*@
1194  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1195 
1196  Logically Collective
1197 
1198  Input Parameters:
1199 +  pc - the preconditioner context
1200 -  subcommtype - the subcommunicator type (see PetscSubcommType)
1201 
1202  Level: advanced
1203 
1204 .seealso: PetscSubcommType, PetscSubcomm, PCTELESCOPE
1205 @*/
1206 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1207 {
1208   PetscErrorCode ierr;
1209   PetscFunctionBegin;
1210   ierr = PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));CHKERRQ(ierr);
1211   PetscFunctionReturn(0);
1212 }
1213 
1214 /*@
1215  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1216 
1217  Not Collective
1218 
1219  Input Parameter:
1220 .  pc - the preconditioner context
1221 
1222  Output Parameter:
1223 .  subcommtype - the subcommunicator type (see PetscSubcommType)
1224 
1225  Level: advanced
1226 
1227 .seealso: PetscSubcomm, PetscSubcommType, PCTELESCOPE
1228 @*/
1229 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1230 {
1231   PetscErrorCode ierr;
1232   PetscFunctionBegin;
1233   ierr = PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));CHKERRQ(ierr);
1234   PetscFunctionReturn(0);
1235 }
1236 
1237 /* -------------------------------------------------------------------------------------*/
1238 /*MC
1239    PCTELESCOPE - Runs a KSP solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1240 
1241    Options Database:
1242 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1243 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1244 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1245 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether KSPSetComputeOperators should be used on the sub-KSP.
1246 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse DM should be used to define the sub-communicator.
1247 
1248    Level: advanced
1249 
1250    Notes:
1251    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
1252    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner (PC).
1253    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
1254    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
1255    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1256    Additionally, in comparison with PCREDUNDANT, PCTELESCOPE can utilize an attached DM.
1257 
1258    The default type of the sub KSP (the KSP defined on c') is PREONLY.
1259 
1260    There are three setup mechanisms for PCTelescope. Features support by each type are described below.
1261    In the following, we will refer to the operators B and B', these are the Bmat provided to the KSP on the
1262    communicators c and c' respectively.
1263 
1264    [1] Default setup
1265    The sub-communicator c' is created via PetscSubcommCreate().
1266    Explicitly defined nullspace and near nullspace vectors will be propagated from B to B'.
1267    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1268    No support is provided for KSPSetComputeOperators().
1269    Currently there is no support for the flag -pc_use_amat.
1270 
1271    [2] DM aware setup
1272    If a DM is attached to the PC, it is re-partitioned on the sub-communicator c'.
1273    c' is created via PetscSubcommCreate().
1274    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
1275    Currently only support for re-partitioning a DMDA is provided.
1276    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1277    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1278    Support is provided for KSPSetComputeOperators(). The user provided function and context is propagated to the sub KSP.
1279    This is fragile since the user must ensure that their user context is valid for use on c'.
1280    Currently there is no support for the flag -pc_use_amat.
1281 
1282    [3] Coarse DM setup
1283    If a DM (dmfine) is attached to the PC, dmfine is queried for a "coarse" DM (call this dmcoarse) via DMGetCoarseDM().
1284    PCTELESCOPE will interpret the coarse DM as being defined on a sub-communicator of c.
1285    The communicator associated with dmcoarse will define the c' to be used within PCTELESCOPE.
1286    PCTELESCOPE will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1287    The intention of this setup type is that PCTELESCOPE will use an existing (e.g. user defined) communicator hierarchy, say as would be
1288    available with using multi-grid on unstructured meshes.
1289    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1290    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1291    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1292    There is no general method to permute field orderings, hence only KSPSetComputeOperators() is supported.
1293    The user must use PetscObjectComposeFunction() with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1294    Propagation of the user context for KSPSetComputeOperators() on the sub KSP is attempted by querying the DM contexts associated with dmfine and dmcoarse. Alternatively, the user may use PetscObjectComposeFunction() with dmcoarse to define a method which will return the appropriate user context for KSPSetComputeOperators().
1295    Currently there is no support for the flag -pc_use_amat.
1296    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
1297    Further information about the user-provided methods required by this setup type are described here PCTelescopeSetUseCoarseDM().
1298 
1299    Developer Notes:
1300    During PCSetup, the B operator is scattered onto c'.
1301    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1302    Then, KSPSolve() is executed on the c' communicator.
1303 
1304    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
1305    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
1306 
1307    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1308    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1309    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1310 
1311    The telescoping preconditioner can re-partition an attached DM if it is a DMDA (2D or 3D -
1312    support for 1D DMDAs is not provided). If a DMDA is found, a topologically equivalent DMDA is created on c'
1313    and this new DM is attached the sub KSP. The design of telescope is such that it should be possible to extend support
1314    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
1315    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1316 
1317    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1318 
1319    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1320    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
1321    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
1322 
1323    Limitations/improvements include the following.
1324    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
1325    A unified mechanism to query for user contexts as required by KSPSetComputeOperators() and MatNullSpaceSetFunction().
1326 
1327    The symmetric permutation used when a DMDA is encountered is performed via explicitly assembling a permutation matrix P,
1328    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
1329    VecPermute() does not support the use case required here. By computing P, one can permute both the operator and RHS in a
1330    consistent manner.
1331 
1332    Mapping of vectors (default setup mode) is performed in the following way.
1333    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1334    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1335    We perform the scatter to the sub-communicator in the following way.
1336    [1] Given a vector x defined on communicator c
1337 
1338 .vb
1339    rank(c)  local values of x
1340    ------- ----------------------------------------
1341         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1342         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1343         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1344         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1345 .ve
1346 
1347    scatter into xtmp defined also on comm c, so that we have the following values
1348 
1349 .vb
1350    rank(c)  local values of xtmp
1351    ------- ----------------------------------------------------------------------------
1352         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1353         1   [ ]
1354         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1355         3   [ ]
1356 .ve
1357 
1358    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1359 
1360    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1361    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1362 
1363 .vb
1364    rank(c')  local values of xred
1365    -------- ----------------------------------------------------------------------------
1366          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1367          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1368 .ve
1369 
1370   Contributed by Dave May
1371 
1372   Reference:
1373   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1374 
1375 .seealso:  PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
1376 M*/
1377 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1378 {
1379   PetscErrorCode       ierr;
1380   struct _PC_Telescope *sred;
1381 
1382   PetscFunctionBegin;
1383   ierr = PetscNewLog(pc,&sred);CHKERRQ(ierr);
1384   sred->psubcomm       = NULL;
1385   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1386   sred->subcomm        = MPI_COMM_NULL;
1387   sred->redfactor      = 1;
1388   sred->ignore_dm      = PETSC_FALSE;
1389   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1390   sred->use_coarse_dm  = PETSC_FALSE;
1391   pc->data             = (void*)sred;
1392 
1393   pc->ops->apply           = PCApply_Telescope;
1394   pc->ops->applytranspose  = NULL;
1395   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1396   pc->ops->setup           = PCSetUp_Telescope;
1397   pc->ops->destroy         = PCDestroy_Telescope;
1398   pc->ops->reset           = PCReset_Telescope;
1399   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1400   pc->ops->view            = PCView_Telescope;
1401 
1402   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1403   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1404   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1405   sred->pctelescope_reset_type              = NULL;
1406 
1407   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);CHKERRQ(ierr);
1408   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope);CHKERRQ(ierr);
1409   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope);CHKERRQ(ierr);
1410   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);CHKERRQ(ierr);
1411   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);CHKERRQ(ierr);
1412   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);CHKERRQ(ierr);
1413   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);CHKERRQ(ierr);
1414   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1415   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1416   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);CHKERRQ(ierr);
1417   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",PCTelescopeGetUseCoarseDM_Telescope);CHKERRQ(ierr);
1418   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",PCTelescopeSetUseCoarseDM_Telescope);CHKERRQ(ierr);
1419   PetscFunctionReturn(0);
1420 }
1421