xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision 2f613bf53f46f9356e00a2ca2bd69453be72fc31)
1 
2 #include <petsc/private/petscimpl.h>
3 #include <petsc/private/matimpl.h>
4 #include <petsc/private/pcimpl.h>
5 #include <petscksp.h> /*I "petscksp.h" I*/
6 #include <petscdm.h> /*I "petscdm.h" I*/
7 #include "../src/ksp/pc/impls/telescope/telescope.h"
8 
9 static PetscBool  cited = PETSC_FALSE;
10 static const char citation[] =
11 "@inproceedings{MaySananRuppKnepleySmith2016,\n"
12 "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
13 "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
14 "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
15 "  series    = {PASC '16},\n"
16 "  isbn      = {978-1-4503-4126-4},\n"
17 "  location  = {Lausanne, Switzerland},\n"
18 "  pages     = {5:1--5:12},\n"
19 "  articleno = {5},\n"
20 "  numpages  = {12},\n"
21 "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
22 "  doi       = {10.1145/2929908.2929913},\n"
23 "  acmid     = {2929913},\n"
24 "  publisher = {ACM},\n"
25 "  address   = {New York, NY, USA},\n"
26 "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
27 "  year      = {2016}\n"
28 "}\n";
29 
30 /*
31  default setup mode
32 
33  [1a] scatter to (FORWARD)
34  x(comm) -> xtmp(comm)
35  [1b] local copy (to) ranks with color = 0
36  xred(subcomm) <- xtmp
37 
38  [2] solve on sub KSP to obtain yred(subcomm)
39 
40  [3a] local copy (from) ranks with color = 0
41  yred(subcomm) --> xtmp
42  [2b] scatter from (REVERSE)
43  xtmp(comm) -> y(comm)
44 */
45 
46 /*
47   Collective[comm_f]
48   Notes
49    * Using comm_f = MPI_COMM_NULL will result in an error
50    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
51    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
52 */
53 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool *isvalid)
54 {
55   PetscInt       valid = 1;
56   MPI_Group      group_f,group_c;
57   PetscErrorCode ierr;
58   PetscMPIInt    count,k,size_f = 0,size_c = 0,size_c_sum = 0;
59   PetscMPIInt    *ranks_f,*ranks_c;
60 
61   PetscFunctionBegin;
62   if (comm_f == MPI_COMM_NULL) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"comm_f cannot be MPI_COMM_NULL");
63 
64   ierr = MPI_Comm_group(comm_f,&group_f);CHKERRMPI(ierr);
65   if (comm_c != MPI_COMM_NULL) {
66     ierr = MPI_Comm_group(comm_c,&group_c);CHKERRMPI(ierr);
67   }
68 
69   ierr = MPI_Comm_size(comm_f,&size_f);CHKERRMPI(ierr);
70   if (comm_c != MPI_COMM_NULL) {
71     ierr = MPI_Comm_size(comm_c,&size_c);CHKERRMPI(ierr);
72   }
73 
74   /* check not all comm_c's are NULL */
75   size_c_sum = size_c;
76   ierr = MPI_Allreduce(MPI_IN_PLACE,&size_c_sum,1,MPI_INT,MPI_SUM,comm_f);CHKERRMPI(ierr);
77   if (size_c_sum == 0) valid = 0;
78 
79   /* check we can map at least 1 rank in comm_c to comm_f */
80   ierr = PetscMalloc1(size_f,&ranks_f);CHKERRQ(ierr);
81   ierr = PetscMalloc1(size_c,&ranks_c);CHKERRQ(ierr);
82   for (k=0; k<size_f; k++) ranks_f[k] = MPI_UNDEFINED;
83   for (k=0; k<size_c; k++) ranks_c[k] = k;
84 
85   /*
86    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
87    I do not want the code to terminate immediately if this occurs, rather I want to throw
88    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
89    that comm_c is not a valid sub-communicator.
90    Hence I purposefully do not call CHKERRQ() after MPI_Group_translate_ranks().
91   */
92   count = 0;
93   if (comm_c != MPI_COMM_NULL) {
94     (void)MPI_Group_translate_ranks(group_c,size_c,ranks_c,group_f,ranks_f);
95     for (k=0; k<size_f; k++) {
96       if (ranks_f[k] == MPI_UNDEFINED) {
97         count++;
98       }
99     }
100   }
101   if (count == size_f) valid = 0;
102 
103   ierr = MPI_Allreduce(MPI_IN_PLACE,&valid,1,MPIU_INT,MPI_MIN,comm_f);CHKERRMPI(ierr);
104   if (valid == 1) *isvalid = PETSC_TRUE;
105   else *isvalid = PETSC_FALSE;
106 
107   ierr = PetscFree(ranks_f);CHKERRQ(ierr);
108   ierr = PetscFree(ranks_c);CHKERRQ(ierr);
109   ierr = MPI_Group_free(&group_f);CHKERRMPI(ierr);
110   if (comm_c != MPI_COMM_NULL) {
111     ierr = MPI_Group_free(&group_c);CHKERRMPI(ierr);
112   }
113   PetscFunctionReturn(0);
114 }
115 
116 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
117 {
118   DM subdm = NULL;
119 
120   if (!PCTelescope_isActiveRank(sred)) { subdm = NULL; }
121   else {
122     switch (sred->sr_type) {
123     case TELESCOPE_DEFAULT: subdm = NULL;
124       break;
125     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
126       break;
127     case TELESCOPE_DMPLEX:  subdm = NULL;
128       break;
129     case TELESCOPE_COARSEDM: if (sred->ksp) { KSPGetDM(sred->ksp,&subdm); }
130       break;
131     }
132   }
133   return(subdm);
134 }
135 
136 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
137 {
138   PetscErrorCode ierr;
139   PetscInt       m,M,bs,st,ed;
140   Vec            x,xred,yred,xtmp;
141   Mat            B;
142   MPI_Comm       comm,subcomm;
143   VecScatter     scatter;
144   IS             isin;
145 
146   PetscFunctionBegin;
147   ierr = PetscInfo(pc,"PCTelescope: setup (default)\n");CHKERRQ(ierr);
148   comm = PetscSubcommParent(sred->psubcomm);
149   subcomm = PetscSubcommChild(sred->psubcomm);
150 
151   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
152   ierr = MatGetSize(B,&M,NULL);CHKERRQ(ierr);
153   ierr = MatGetBlockSize(B,&bs);CHKERRQ(ierr);
154   ierr = MatCreateVecs(B,&x,NULL);CHKERRQ(ierr);
155 
156   xred = NULL;
157   m    = 0;
158   if (PCTelescope_isActiveRank(sred)) {
159     ierr = VecCreate(subcomm,&xred);CHKERRQ(ierr);
160     ierr = VecSetSizes(xred,PETSC_DECIDE,M);CHKERRQ(ierr);
161     ierr = VecSetBlockSize(xred,bs);CHKERRQ(ierr);
162     ierr = VecSetFromOptions(xred);CHKERRQ(ierr);
163     ierr = VecGetLocalSize(xred,&m);CHKERRQ(ierr);
164   }
165 
166   yred = NULL;
167   if (PCTelescope_isActiveRank(sred)) {
168     ierr = VecDuplicate(xred,&yred);CHKERRQ(ierr);
169   }
170 
171   ierr = VecCreate(comm,&xtmp);CHKERRQ(ierr);
172   ierr = VecSetSizes(xtmp,m,PETSC_DECIDE);CHKERRQ(ierr);
173   ierr = VecSetBlockSize(xtmp,bs);CHKERRQ(ierr);
174   ierr = VecSetType(xtmp,((PetscObject)x)->type_name);CHKERRQ(ierr);
175 
176   if (PCTelescope_isActiveRank(sred)) {
177     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
178     ierr = ISCreateStride(comm,(ed-st),st,1,&isin);CHKERRQ(ierr);
179   } else {
180     ierr = VecGetOwnershipRange(x,&st,&ed);CHKERRQ(ierr);
181     ierr = ISCreateStride(comm,0,st,1,&isin);CHKERRQ(ierr);
182   }
183   ierr = ISSetBlockSize(isin,bs);CHKERRQ(ierr);
184 
185   ierr = VecScatterCreate(x,isin,xtmp,NULL,&scatter);CHKERRQ(ierr);
186 
187   sred->isin    = isin;
188   sred->scatter = scatter;
189   sred->xred    = xred;
190   sred->yred    = yred;
191   sred->xtmp    = xtmp;
192   ierr = VecDestroy(&x);CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
196 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
197 {
198   PetscErrorCode ierr;
199   MPI_Comm       comm,subcomm;
200   Mat            Bred,B;
201   PetscInt       nr,nc,bs;
202   IS             isrow,iscol;
203   Mat            Blocal,*_Blocal;
204 
205   PetscFunctionBegin;
206   ierr = PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");CHKERRQ(ierr);
207   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
208   subcomm = PetscSubcommChild(sred->psubcomm);
209   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
210   ierr = MatGetSize(B,&nr,&nc);CHKERRQ(ierr);
211   isrow = sred->isin;
212   ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&iscol);CHKERRQ(ierr);
213   ierr = ISSetIdentity(iscol);CHKERRQ(ierr);
214   ierr = MatGetBlockSizes(B,NULL,&bs);CHKERRQ(ierr);
215   ierr = ISSetBlockSize(iscol,bs);CHKERRQ(ierr);
216   ierr = MatSetOption(B,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
217   ierr = MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);CHKERRQ(ierr);
218   Blocal = *_Blocal;
219   ierr = PetscFree(_Blocal);CHKERRQ(ierr);
220   Bred = NULL;
221   if (PCTelescope_isActiveRank(sred)) {
222     PetscInt mm;
223 
224     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
225 
226     ierr = MatGetSize(Blocal,&mm,NULL);CHKERRQ(ierr);
227     ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);CHKERRQ(ierr);
228   }
229   *A = Bred;
230   ierr = ISDestroy(&iscol);CHKERRQ(ierr);
231   ierr = MatDestroy(&Blocal);CHKERRQ(ierr);
232   PetscFunctionReturn(0);
233 }
234 
235 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
236 {
237   PetscErrorCode ierr;
238   PetscBool      has_const;
239   const Vec      *vecs;
240   Vec            *sub_vecs = NULL;
241   PetscInt       i,k,n = 0;
242   MPI_Comm       subcomm;
243 
244   PetscFunctionBegin;
245   subcomm = PetscSubcommChild(sred->psubcomm);
246   ierr = MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);CHKERRQ(ierr);
247 
248   if (PCTelescope_isActiveRank(sred)) {
249     if (n) {
250       ierr = VecDuplicateVecs(sred->xred,n,&sub_vecs);CHKERRQ(ierr);
251     }
252   }
253 
254   /* copy entries */
255   for (k=0; k<n; k++) {
256     const PetscScalar *x_array;
257     PetscScalar       *LA_sub_vec;
258     PetscInt          st,ed;
259 
260     /* pull in vector x->xtmp */
261     ierr = VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
262     ierr = VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
263     if (sub_vecs) {
264       /* copy vector entries into xred */
265       ierr = VecGetArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
266       if (sub_vecs[k]) {
267         ierr = VecGetOwnershipRange(sub_vecs[k],&st,&ed);CHKERRQ(ierr);
268         ierr = VecGetArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
269         for (i=0; i<ed-st; i++) {
270           LA_sub_vec[i] = x_array[i];
271         }
272         ierr = VecRestoreArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
273       }
274       ierr = VecRestoreArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
275     }
276   }
277 
278   if (PCTelescope_isActiveRank(sred)) {
279     /* create new (near) nullspace for redundant object */
280     ierr = MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);CHKERRQ(ierr);
281     ierr = VecDestroyVecs(n,&sub_vecs);CHKERRQ(ierr);
282     if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
283     if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
284   }
285   PetscFunctionReturn(0);
286 }
287 
288 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
289 {
290   PetscErrorCode ierr;
291   Mat            B;
292 
293   PetscFunctionBegin;
294   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
295   /* Propagate the nullspace if it exists */
296   {
297     MatNullSpace nullspace,sub_nullspace;
298     ierr = MatGetNullSpace(B,&nullspace);CHKERRQ(ierr);
299     if (nullspace) {
300       ierr = PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");CHKERRQ(ierr);
301       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace);CHKERRQ(ierr);
302       if (PCTelescope_isActiveRank(sred)) {
303         ierr = MatSetNullSpace(sub_mat,sub_nullspace);CHKERRQ(ierr);
304         ierr = MatNullSpaceDestroy(&sub_nullspace);CHKERRQ(ierr);
305       }
306     }
307   }
308   /* Propagate the near nullspace if it exists */
309   {
310     MatNullSpace nearnullspace,sub_nearnullspace;
311     ierr = MatGetNearNullSpace(B,&nearnullspace);CHKERRQ(ierr);
312     if (nearnullspace) {
313       ierr = PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n");CHKERRQ(ierr);
314       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);CHKERRQ(ierr);
315       if (PCTelescope_isActiveRank(sred)) {
316         ierr = MatSetNearNullSpace(sub_mat,sub_nearnullspace);CHKERRQ(ierr);
317         ierr = MatNullSpaceDestroy(&sub_nearnullspace);CHKERRQ(ierr);
318       }
319     }
320   }
321   PetscFunctionReturn(0);
322 }
323 
324 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
325 {
326   PC_Telescope   sred = (PC_Telescope)pc->data;
327   PetscErrorCode ierr;
328   PetscBool      iascii,isstring;
329   PetscViewer    subviewer;
330 
331   PetscFunctionBegin;
332   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
333   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
334   if (iascii) {
335     {
336       MPI_Comm    comm,subcomm;
337       PetscMPIInt comm_size,subcomm_size;
338       DM          dm = NULL,subdm = NULL;
339 
340       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
341       subdm = private_PCTelescopeGetSubDM(sred);
342 
343       if (sred->psubcomm) {
344         comm = PetscSubcommParent(sred->psubcomm);
345         subcomm = PetscSubcommChild(sred->psubcomm);
346         ierr = MPI_Comm_size(comm,&comm_size);CHKERRMPI(ierr);
347         ierr = MPI_Comm_size(subcomm,&subcomm_size);CHKERRMPI(ierr);
348 
349         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
350         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent comm size reduction factor = %D\n",sred->redfactor);CHKERRQ(ierr);
351         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);CHKERRQ(ierr);
352         switch (sred->subcommtype) {
353         case PETSC_SUBCOMM_INTERLACED :
354           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: type = interlaced\n",sred->subcommtype);CHKERRQ(ierr);
355           break;
356         case PETSC_SUBCOMM_CONTIGUOUS :
357           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm type = contiguous\n",sred->subcommtype);CHKERRQ(ierr);
358           break;
359         default :
360           SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
361         }
362         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
363       } else {
364         ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
365         subcomm = sred->subcomm;
366         if (!PCTelescope_isActiveRank(sred)) {
367           subcomm = PETSC_COMM_SELF;
368         }
369 
370         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
371         ierr = PetscViewerASCIIPrintf(viewer,"subcomm: using user provided sub-communicator\n");CHKERRQ(ierr);
372         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
373       }
374 
375       ierr = PetscViewerGetSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
376       if (PCTelescope_isActiveRank(sred)) {
377         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
378 
379         if (dm && sred->ignore_dm) {
380           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring DM\n");CHKERRQ(ierr);
381         }
382         if (sred->ignore_kspcomputeoperators) {
383           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring KSPComputeOperators\n");CHKERRQ(ierr);
384         }
385         switch (sred->sr_type) {
386         case TELESCOPE_DEFAULT:
387           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: default\n");CHKERRQ(ierr);
388           break;
389         case TELESCOPE_DMDA:
390           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMDA auto-repartitioning\n");CHKERRQ(ierr);
391           ierr = DMView_DA_Short(subdm,subviewer);CHKERRQ(ierr);
392           break;
393         case TELESCOPE_DMPLEX:
394           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMPLEX auto-repartitioning\n");CHKERRQ(ierr);
395           break;
396         case TELESCOPE_COARSEDM:
397           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: coarse DM\n");CHKERRQ(ierr);
398           break;
399         }
400 
401         if (dm) {
402           PetscObject obj = (PetscObject)dm;
403           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object:");CHKERRQ(ierr);
404           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
405           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
406           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
407           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
408           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
409           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
410         } else {
411           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object: NULL\n");CHKERRQ(ierr);
412         }
413         if (subdm) {
414           PetscObject obj = (PetscObject)subdm;
415           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object:");CHKERRQ(ierr);
416           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
417           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
418           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
419           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
420           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
421           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
422         } else {
423           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object: NULL\n");CHKERRQ(ierr);
424         }
425 
426         ierr = KSPView(sred->ksp,subviewer);CHKERRQ(ierr);
427         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
428       }
429       ierr = PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
430     }
431   }
432   PetscFunctionReturn(0);
433 }
434 
435 static PetscErrorCode PCSetUp_Telescope(PC pc)
436 {
437   PC_Telescope    sred = (PC_Telescope)pc->data;
438   PetscErrorCode  ierr;
439   MPI_Comm        comm,subcomm=0;
440   PCTelescopeType sr_type;
441 
442   PetscFunctionBegin;
443   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
444 
445   /* Determine type of setup/update */
446   if (!pc->setupcalled) {
447     PetscBool has_dm,same;
448     DM        dm;
449 
450     sr_type = TELESCOPE_DEFAULT;
451     has_dm = PETSC_FALSE;
452     ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
453     if (dm) { has_dm = PETSC_TRUE; }
454     if (has_dm) {
455       /* check for dmda */
456       ierr = PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);CHKERRQ(ierr);
457       if (same) {
458         ierr = PetscInfo(pc,"PCTelescope: found DMDA\n");CHKERRQ(ierr);
459         sr_type = TELESCOPE_DMDA;
460       }
461       /* check for dmplex */
462       ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);CHKERRQ(ierr);
463       if (same) {
464         ierr = PetscInfo(pc,"PCTelescope: found DMPLEX\n");CHKERRQ(ierr);
465         sr_type = TELESCOPE_DMPLEX;
466       }
467 
468       if (sred->use_coarse_dm) {
469         ierr = PetscInfo(pc,"PCTelescope: using coarse DM\n");CHKERRQ(ierr);
470         sr_type = TELESCOPE_COARSEDM;
471       }
472 
473       if (sred->ignore_dm) {
474         ierr = PetscInfo(pc,"PCTelescope: ignoring DM\n");CHKERRQ(ierr);
475         sr_type = TELESCOPE_DEFAULT;
476       }
477     }
478     sred->sr_type = sr_type;
479   } else {
480     sr_type = sred->sr_type;
481   }
482 
483   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
484   switch (sr_type) {
485   case TELESCOPE_DEFAULT:
486     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
487     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
488     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
489     sred->pctelescope_reset_type              = NULL;
490     break;
491   case TELESCOPE_DMDA:
492     pc->ops->apply                            = PCApply_Telescope_dmda;
493     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
494     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
495     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
496     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
497     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
498     break;
499   case TELESCOPE_DMPLEX:
500     SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
501   case TELESCOPE_COARSEDM:
502     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
503     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
504     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
505     sred->pctelescope_matcreate_type          = NULL;
506     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
507     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
508     break;
509   default:
510     SETERRQ(comm,PETSC_ERR_SUP,"Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
511   }
512 
513   /* subcomm definition */
514   if (!pc->setupcalled) {
515     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
516       if (!sred->psubcomm) {
517         ierr = PetscSubcommCreate(comm,&sred->psubcomm);CHKERRQ(ierr);
518         ierr = PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);CHKERRQ(ierr);
519         ierr = PetscSubcommSetType(sred->psubcomm,sred->subcommtype);CHKERRQ(ierr);
520         ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
521         sred->subcomm = PetscSubcommChild(sred->psubcomm);
522       }
523     } else { /* query PC for DM, check communicators */
524       DM          dm,dm_coarse_partition = NULL;
525       MPI_Comm    comm_fine,comm_coarse_partition = MPI_COMM_NULL;
526       PetscMPIInt csize_fine=0,csize_coarse_partition=0,cs[2],csg[2],cnt=0;
527       PetscBool   isvalidsubcomm;
528 
529       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
530       comm_fine = PetscObjectComm((PetscObject)dm);
531       ierr = DMGetCoarseDM(dm,&dm_coarse_partition);CHKERRQ(ierr);
532       if (dm_coarse_partition) { cnt = 1; }
533       ierr = MPI_Allreduce(MPI_IN_PLACE,&cnt,1,MPI_INT,MPI_SUM,comm_fine);CHKERRMPI(ierr);
534       if (cnt == 0) SETERRQ(comm_fine,PETSC_ERR_SUP,"Zero instances of a coarse DM were found");
535 
536       ierr = MPI_Comm_size(comm_fine,&csize_fine);CHKERRMPI(ierr);
537       if (dm_coarse_partition) {
538         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
539         ierr = MPI_Comm_size(comm_coarse_partition,&csize_coarse_partition);CHKERRMPI(ierr);
540       }
541 
542       cs[0] = csize_fine;
543       cs[1] = csize_coarse_partition;
544       ierr = MPI_Allreduce(cs,csg,2,MPI_INT,MPI_MAX,comm_fine);CHKERRMPI(ierr);
545       if (csg[0] == csg[1]) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM uses the same size communicator as the parent DM attached to the PC");
546 
547       ierr = PCTelescopeTestValidSubcomm(comm_fine,comm_coarse_partition,&isvalidsubcomm);CHKERRQ(ierr);
548       if (!isvalidsubcomm) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM communicator is not a sub-communicator of parentDM->comm");
549       sred->subcomm = comm_coarse_partition;
550     }
551   }
552   subcomm = sred->subcomm;
553 
554   /* internal KSP */
555   if (!pc->setupcalled) {
556     const char *prefix;
557 
558     if (PCTelescope_isActiveRank(sred)) {
559       ierr = KSPCreate(subcomm,&sred->ksp);CHKERRQ(ierr);
560       ierr = KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);CHKERRQ(ierr);
561       ierr = PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
562       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);CHKERRQ(ierr);
563       ierr = KSPSetType(sred->ksp,KSPPREONLY);CHKERRQ(ierr);
564       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
565       ierr = KSPSetOptionsPrefix(sred->ksp,prefix);CHKERRQ(ierr);
566       ierr = KSPAppendOptionsPrefix(sred->ksp,"telescope_");CHKERRQ(ierr);
567     }
568   }
569 
570   /* setup */
571   if (!pc->setupcalled && sred->pctelescope_setup_type) {
572     ierr = sred->pctelescope_setup_type(pc,sred);CHKERRQ(ierr);
573   }
574   /* update */
575   if (!pc->setupcalled) {
576     if (sred->pctelescope_matcreate_type) {
577       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);CHKERRQ(ierr);
578     }
579     if (sred->pctelescope_matnullspacecreate_type) {
580       ierr = sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);CHKERRQ(ierr);
581     }
582   } else {
583     if (sred->pctelescope_matcreate_type) {
584       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);CHKERRQ(ierr);
585     }
586   }
587 
588   /* common - no construction */
589   if (PCTelescope_isActiveRank(sred)) {
590     ierr = KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);CHKERRQ(ierr);
591     if (pc->setfromoptionscalled && !pc->setupcalled) {
592       ierr = KSPSetFromOptions(sred->ksp);CHKERRQ(ierr);
593     }
594   }
595   PetscFunctionReturn(0);
596 }
597 
598 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
599 {
600   PC_Telescope      sred = (PC_Telescope)pc->data;
601   PetscErrorCode    ierr;
602   Vec               xtmp,xred,yred;
603   PetscInt          i,st,ed;
604   VecScatter        scatter;
605   PetscScalar       *array;
606   const PetscScalar *x_array;
607 
608   PetscFunctionBegin;
609   ierr = PetscCitationsRegister(citation,&cited);CHKERRQ(ierr);
610 
611   xtmp    = sred->xtmp;
612   scatter = sred->scatter;
613   xred    = sred->xred;
614   yred    = sred->yred;
615 
616   /* pull in vector x->xtmp */
617   ierr = VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
618   ierr = VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
619 
620   /* copy vector entries into xred */
621   ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
622   if (xred) {
623     PetscScalar *LA_xred;
624     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
625     ierr = VecGetArray(xred,&LA_xred);CHKERRQ(ierr);
626     for (i=0; i<ed-st; i++) {
627       LA_xred[i] = x_array[i];
628     }
629     ierr = VecRestoreArray(xred,&LA_xred);CHKERRQ(ierr);
630   }
631   ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
632   /* solve */
633   if (PCTelescope_isActiveRank(sred)) {
634     ierr = KSPSolve(sred->ksp,xred,yred);CHKERRQ(ierr);
635     ierr = KSPCheckSolve(sred->ksp,pc,yred);CHKERRQ(ierr);
636   }
637   /* return vector */
638   ierr = VecGetArray(xtmp,&array);CHKERRQ(ierr);
639   if (yred) {
640     const PetscScalar *LA_yred;
641     ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
642     ierr = VecGetArrayRead(yred,&LA_yred);CHKERRQ(ierr);
643     for (i=0; i<ed-st; i++) {
644       array[i] = LA_yred[i];
645     }
646     ierr = VecRestoreArrayRead(yred,&LA_yred);CHKERRQ(ierr);
647   }
648   ierr = VecRestoreArray(xtmp,&array);CHKERRQ(ierr);
649   ierr = VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
650   ierr = VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
651   PetscFunctionReturn(0);
652 }
653 
654 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
655 {
656   PC_Telescope      sred = (PC_Telescope)pc->data;
657   PetscErrorCode    ierr;
658   Vec               xtmp,yred;
659   PetscInt          i,st,ed;
660   VecScatter        scatter;
661   const PetscScalar *x_array;
662   PetscBool         default_init_guess_value;
663 
664   PetscFunctionBegin;
665   xtmp    = sred->xtmp;
666   scatter = sred->scatter;
667   yred    = sred->yred;
668 
669   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
670   *reason = (PCRichardsonConvergedReason)0;
671 
672   if (!zeroguess) {
673     ierr = PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");CHKERRQ(ierr);
674     /* pull in vector y->xtmp */
675     ierr = VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
676     ierr = VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
677 
678     /* copy vector entries into xred */
679     ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
680     if (yred) {
681       PetscScalar *LA_yred;
682       ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
683       ierr = VecGetArray(yred,&LA_yred);CHKERRQ(ierr);
684       for (i=0; i<ed-st; i++) {
685         LA_yred[i] = x_array[i];
686       }
687       ierr = VecRestoreArray(yred,&LA_yred);CHKERRQ(ierr);
688     }
689     ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
690   }
691 
692   if (PCTelescope_isActiveRank(sred)) {
693     ierr = KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);CHKERRQ(ierr);
694     if (!zeroguess) {ierr = KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);CHKERRQ(ierr);}
695   }
696 
697   ierr = PCApply_Telescope(pc,x,y);CHKERRQ(ierr);
698 
699   if (PCTelescope_isActiveRank(sred)) {
700     ierr = KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);CHKERRQ(ierr);
701   }
702 
703   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
704   *outits = 1;
705   PetscFunctionReturn(0);
706 }
707 
708 static PetscErrorCode PCReset_Telescope(PC pc)
709 {
710   PC_Telescope   sred = (PC_Telescope)pc->data;
711   PetscErrorCode ierr;
712 
713   PetscFunctionBegin;
714   ierr = ISDestroy(&sred->isin);CHKERRQ(ierr);
715   ierr = VecScatterDestroy(&sred->scatter);CHKERRQ(ierr);
716   ierr = VecDestroy(&sred->xred);CHKERRQ(ierr);
717   ierr = VecDestroy(&sred->yred);CHKERRQ(ierr);
718   ierr = VecDestroy(&sred->xtmp);CHKERRQ(ierr);
719   ierr = MatDestroy(&sred->Bred);CHKERRQ(ierr);
720   ierr = KSPReset(sred->ksp);CHKERRQ(ierr);
721   if (sred->pctelescope_reset_type) {
722     ierr = sred->pctelescope_reset_type(pc);CHKERRQ(ierr);
723   }
724   PetscFunctionReturn(0);
725 }
726 
727 static PetscErrorCode PCDestroy_Telescope(PC pc)
728 {
729   PC_Telescope   sred = (PC_Telescope)pc->data;
730   PetscErrorCode ierr;
731 
732   PetscFunctionBegin;
733   ierr = PCReset_Telescope(pc);CHKERRQ(ierr);
734   ierr = KSPDestroy(&sred->ksp);CHKERRQ(ierr);
735   ierr = PetscSubcommDestroy(&sred->psubcomm);CHKERRQ(ierr);
736   ierr = PetscFree(sred->dm_ctx);CHKERRQ(ierr);
737   ierr = PetscFree(pc->data);CHKERRQ(ierr);
738   PetscFunctionReturn(0);
739 }
740 
741 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
742 {
743   PC_Telescope     sred = (PC_Telescope)pc->data;
744   PetscErrorCode   ierr;
745   MPI_Comm         comm;
746   PetscMPIInt      size;
747   PetscBool        flg;
748   PetscSubcommType subcommtype;
749 
750   PetscFunctionBegin;
751   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
752   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
753   ierr = PetscOptionsHead(PetscOptionsObject,"Telescope options");CHKERRQ(ierr);
754   ierr = PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg);CHKERRQ(ierr);
755   if (flg) {
756     ierr = PCTelescopeSetSubcommType(pc,subcommtype);CHKERRQ(ierr);
757   }
758   ierr = PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,NULL);CHKERRQ(ierr);
759   if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
760   ierr = PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,NULL);CHKERRQ(ierr);
761   ierr = PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,NULL);CHKERRQ(ierr);
762   ierr = PetscOptionsBool("-pc_telescope_use_coarse_dm","Define sub-communicator from the coarse DM","PCTelescopeSetUseCoarseDM",sred->use_coarse_dm,&sred->use_coarse_dm,NULL);CHKERRQ(ierr);
763   ierr = PetscOptionsTail();CHKERRQ(ierr);
764   PetscFunctionReturn(0);
765 }
766 
767 /* PC simplementation specific API's */
768 
769 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
770 {
771   PC_Telescope red = (PC_Telescope)pc->data;
772   PetscFunctionBegin;
773   if (ksp) *ksp = red->ksp;
774   PetscFunctionReturn(0);
775 }
776 
777 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
778 {
779   PC_Telescope red = (PC_Telescope)pc->data;
780   PetscFunctionBegin;
781   if (subcommtype) *subcommtype = red->subcommtype;
782   PetscFunctionReturn(0);
783 }
784 
785 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
786 {
787   PC_Telescope     red = (PC_Telescope)pc->data;
788 
789   PetscFunctionBegin;
790   if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
791   red->subcommtype = subcommtype;
792   PetscFunctionReturn(0);
793 }
794 
795 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
796 {
797   PC_Telescope red = (PC_Telescope)pc->data;
798   PetscFunctionBegin;
799   if (fact) *fact = red->redfactor;
800   PetscFunctionReturn(0);
801 }
802 
803 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
804 {
805   PC_Telescope     red = (PC_Telescope)pc->data;
806   PetscMPIInt      size;
807   PetscErrorCode   ierr;
808 
809   PetscFunctionBegin;
810   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRMPI(ierr);
811   if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
812   if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
813   red->redfactor = fact;
814   PetscFunctionReturn(0);
815 }
816 
817 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
818 {
819   PC_Telescope red = (PC_Telescope)pc->data;
820   PetscFunctionBegin;
821   if (v) *v = red->ignore_dm;
822   PetscFunctionReturn(0);
823 }
824 
825 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
826 {
827   PC_Telescope red = (PC_Telescope)pc->data;
828   PetscFunctionBegin;
829   red->ignore_dm = v;
830   PetscFunctionReturn(0);
831 }
832 
833 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool *v)
834 {
835   PC_Telescope red = (PC_Telescope)pc->data;
836   PetscFunctionBegin;
837   if (v) *v = red->use_coarse_dm;
838   PetscFunctionReturn(0);
839 }
840 
841 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v)
842 {
843   PC_Telescope red = (PC_Telescope)pc->data;
844   PetscFunctionBegin;
845   red->use_coarse_dm = v;
846   PetscFunctionReturn(0);
847 }
848 
849 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
850 {
851   PC_Telescope red = (PC_Telescope)pc->data;
852   PetscFunctionBegin;
853   if (v) *v = red->ignore_kspcomputeoperators;
854   PetscFunctionReturn(0);
855 }
856 
857 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
858 {
859   PC_Telescope red = (PC_Telescope)pc->data;
860   PetscFunctionBegin;
861   red->ignore_kspcomputeoperators = v;
862   PetscFunctionReturn(0);
863 }
864 
865 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
866 {
867   PC_Telescope red = (PC_Telescope)pc->data;
868   PetscFunctionBegin;
869   *dm = private_PCTelescopeGetSubDM(red);
870   PetscFunctionReturn(0);
871 }
872 
873 /*@
874  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
875 
876  Not Collective
877 
878  Input Parameter:
879 .  pc - the preconditioner context
880 
881  Output Parameter:
882 .  subksp - the KSP defined the smaller set of processes
883 
884  Level: advanced
885 
886 @*/
887 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
888 {
889   PetscErrorCode ierr;
890   PetscFunctionBegin;
891   ierr = PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));CHKERRQ(ierr);
892   PetscFunctionReturn(0);
893 }
894 
895 /*@
896  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
897 
898  Not Collective
899 
900  Input Parameter:
901 .  pc - the preconditioner context
902 
903  Output Parameter:
904 .  fact - the reduction factor
905 
906  Level: advanced
907 
908 @*/
909 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
910 {
911   PetscErrorCode ierr;
912   PetscFunctionBegin;
913   ierr = PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));CHKERRQ(ierr);
914   PetscFunctionReturn(0);
915 }
916 
917 /*@
918  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
919 
920  Not Collective
921 
922  Input Parameter:
923 .  pc - the preconditioner context
924 
925  Output Parameter:
926 .  fact - the reduction factor
927 
928  Level: advanced
929 
930 @*/
931 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
932 {
933   PetscErrorCode ierr;
934   PetscFunctionBegin;
935   ierr = PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));CHKERRQ(ierr);
936   PetscFunctionReturn(0);
937 }
938 
939 /*@
940  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
941 
942  Not Collective
943 
944  Input Parameter:
945 .  pc - the preconditioner context
946 
947  Output Parameter:
948 .  v - the flag
949 
950  Level: advanced
951 
952 @*/
953 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
954 {
955   PetscErrorCode ierr;
956   PetscFunctionBegin;
957   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
958   PetscFunctionReturn(0);
959 }
960 
961 /*@
962  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
963 
964  Not Collective
965 
966  Input Parameter:
967 .  pc - the preconditioner context
968 
969  Output Parameter:
970 .  v - Use PETSC_TRUE to ignore any DM
971 
972  Level: advanced
973 
974 @*/
975 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
976 {
977   PetscErrorCode ierr;
978   PetscFunctionBegin;
979   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
980   PetscFunctionReturn(0);
981 }
982 
983 /*@
984  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse DM attached to DM associated with the PC will be used.
985 
986  Not Collective
987 
988  Input Parameter:
989 .  pc - the preconditioner context
990 
991  Output Parameter:
992 .  v - the flag
993 
994  Level: advanced
995 
996 @*/
997 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc,PetscBool *v)
998 {
999   PetscErrorCode ierr;
1000   PetscFunctionBegin;
1001   ierr = PetscUseMethod(pc,"PCTelescopeGetUseCoarseDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1002   PetscFunctionReturn(0);
1003 }
1004 
1005 /*@
1006  PCTelescopeSetUseCoarseDM - Set a flag to query the DM attached to the PC if it also has a coarse DM
1007 
1008  Not Collective
1009 
1010  Input Parameter:
1011 .  pc - the preconditioner context
1012 
1013  Output Parameter:
1014 .  v - Use PETSC_FALSE to ignore any coarse DM
1015 
1016  Notes:
1017  When you have specified to use a coarse DM, the communicator used to create the sub-KSP within PCTelescope
1018  will be that of the coarse DM. Hence the flags -pc_telescope_reduction_factor and
1019  -pc_telescope_subcomm_type will no longer have any meaning.
1020  It is required that the communicator associated with the parent (fine) and the coarse DM are of different sizes.
1021  An error will occur of the size of the communicator associated with the coarse DM
1022  is the same as that of the parent DM.
1023  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
1024  This will be checked at the time the preconditioner is setup and an error will occur if
1025  the coarse DM does not define a sub-communicator of that used by the parent DM.
1026 
1027  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
1028  the DM used (e.g. it supports DMSHELL, DMPLEX, etc).
1029 
1030  Support is currently only provided for the case when you are using KSPSetComputeOperators()
1031 
1032  The user is required to compose a function with the parent DM to facilitate the transfer of fields (Vec) between the different decompositions defined by the fine and coarse DMs.
1033  In the user code, this is achieved via
1034 .vb
1035    {
1036      DM dm_fine;
1037      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1038    }
1039 .ve
1040  The signature of the user provided field scatter method is
1041 .vb
1042    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1043 .ve
1044  The user must provide support for both mode = SCATTER_FORWARD and mode = SCATTER_REVERSE.
1045  SCATTER_FORWARD implies the direction of transfer is from the parent (fine) DM to the coarse DM.
1046 
1047  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
1048  of state variables between the fine and coarse DMs.
1049  In the context of a finite element discretization, an example state variable might be
1050  values associated with quadrature points within each element.
1051  A user provided state scatter method is composed via
1052 .vb
1053    {
1054      DM dm_fine;
1055      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1056    }
1057 .ve
1058  The signature of the user provided state scatter method is
1059 .vb
1060    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1061 .ve
1062  SCATTER_FORWARD implies the direction of transfer is from the fine DM to the coarse DM.
1063  The user is only required to support mode = SCATTER_FORWARD.
1064  No assumption is made about the data type of the state variables.
1065  These must be managed by the user and must be accessible from the DM.
1066 
1067  Care must be taken in defining the user context passed to KSPSetComputeOperators() which is to be
1068  associated with the sub-KSP residing within PCTelescope.
1069  In general, PCTelescope assumes that the context on the fine and coarse DM used with
1070  KSPSetComputeOperators() should be "similar" in type or origin.
1071  Specifically the following rules are used to infer what context on the sub-KSP should be.
1072 
1073  First the contexts from the KSP and the fine and coarse DMs are retrieved.
1074  Note that the special case of a DMSHELL context is queried.
1075 
1076 .vb
1077    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1078    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1079    DMShellGetContext(dm_fine,&dmfine_shellctx);
1080 
1081    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1082    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1083 .ve
1084 
1085  The following rules are then enforced:
1086 
1087  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1088  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,NULL);
1089 
1090  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1091  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1092  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1093 
1094  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1095  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1096  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1097 
1098  If neither of the above three tests passed, then PCTelescope cannot safely determine what
1099  context should be provided to KSPSetComputeOperators() for use with the sub-KSP.
1100  In this case, an additional mechanism is provided via a composed function which will return
1101  the actual context to be used. To use this feature you must compose the "getter" function
1102  with the coarse DM, e.g.
1103 .vb
1104    {
1105      DM dm_coarse;
1106      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1107    }
1108 .ve
1109  The signature of the user provided method is
1110 .vb
1111    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1112 .ve
1113 
1114  Level: advanced
1115 
1116 @*/
1117 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc,PetscBool v)
1118 {
1119   PetscErrorCode ierr;
1120   PetscFunctionBegin;
1121   ierr = PetscTryMethod(pc,"PCTelescopeSetUseCoarseDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1122   PetscFunctionReturn(0);
1123 }
1124 
1125 /*@
1126  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
1127 
1128  Not Collective
1129 
1130  Input Parameter:
1131 .  pc - the preconditioner context
1132 
1133  Output Parameter:
1134 .  v - the flag
1135 
1136  Level: advanced
1137 
1138 @*/
1139 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
1140 {
1141   PetscErrorCode ierr;
1142   PetscFunctionBegin;
1143   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1144   PetscFunctionReturn(0);
1145 }
1146 
1147 /*@
1148  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
1149 
1150  Not Collective
1151 
1152  Input Parameter:
1153 .  pc - the preconditioner context
1154 
1155  Output Parameter:
1156 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
1157 
1158  Level: advanced
1159 
1160 @*/
1161 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
1162 {
1163   PetscErrorCode ierr;
1164   PetscFunctionBegin;
1165   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1166   PetscFunctionReturn(0);
1167 }
1168 
1169 /*@
1170  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
1171 
1172  Not Collective
1173 
1174  Input Parameter:
1175 .  pc - the preconditioner context
1176 
1177  Output Parameter:
1178 .  subdm - The re-partitioned DM
1179 
1180  Level: advanced
1181 
1182 @*/
1183 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
1184 {
1185   PetscErrorCode ierr;
1186   PetscFunctionBegin;
1187   ierr = PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));CHKERRQ(ierr);
1188   PetscFunctionReturn(0);
1189 }
1190 
1191 /*@
1192  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1193 
1194  Logically Collective
1195 
1196  Input Parameters:
1197 +  pc - the preconditioner context
1198 -  subcommtype - the subcommunicator type (see PetscSubcommType)
1199 
1200  Level: advanced
1201 
1202 .seealso: PetscSubcommType, PetscSubcomm, PCTELESCOPE
1203 @*/
1204 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1205 {
1206   PetscErrorCode ierr;
1207   PetscFunctionBegin;
1208   ierr = PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));CHKERRQ(ierr);
1209   PetscFunctionReturn(0);
1210 }
1211 
1212 /*@
1213  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1214 
1215  Not Collective
1216 
1217  Input Parameter:
1218 .  pc - the preconditioner context
1219 
1220  Output Parameter:
1221 .  subcommtype - the subcommunicator type (see PetscSubcommType)
1222 
1223  Level: advanced
1224 
1225 .seealso: PetscSubcomm, PetscSubcommType, PCTELESCOPE
1226 @*/
1227 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1228 {
1229   PetscErrorCode ierr;
1230   PetscFunctionBegin;
1231   ierr = PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));CHKERRQ(ierr);
1232   PetscFunctionReturn(0);
1233 }
1234 
1235 /* -------------------------------------------------------------------------------------*/
1236 /*MC
1237    PCTELESCOPE - Runs a KSP solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1238 
1239    Options Database:
1240 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1241 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1242 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1243 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether KSPSetComputeOperators should be used on the sub-KSP.
1244 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse DM should be used to define the sub-communicator.
1245 
1246    Level: advanced
1247 
1248    Notes:
1249    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
1250    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner (PC).
1251    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
1252    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
1253    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1254    Additionally, in comparison with PCREDUNDANT, PCTELESCOPE can utilize an attached DM.
1255 
1256    The default type of the sub KSP (the KSP defined on c') is PREONLY.
1257 
1258    There are three setup mechanisms for PCTelescope. Features support by each type are described below.
1259    In the following, we will refer to the operators B and B', these are the Bmat provided to the KSP on the
1260    communicators c and c' respectively.
1261 
1262    [1] Default setup
1263    The sub-communicator c' is created via PetscSubcommCreate().
1264    Explicitly defined nullspace and near nullspace vectors will be propagated from B to B'.
1265    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1266    No support is provided for KSPSetComputeOperators().
1267    Currently there is no support for the flag -pc_use_amat.
1268 
1269    [2] DM aware setup
1270    If a DM is attached to the PC, it is re-partitioned on the sub-communicator c'.
1271    c' is created via PetscSubcommCreate().
1272    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
1273    Currently only support for re-partitioning a DMDA is provided.
1274    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1275    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1276    Support is provided for KSPSetComputeOperators(). The user provided function and context is propagated to the sub KSP.
1277    This is fragile since the user must ensure that their user context is valid for use on c'.
1278    Currently there is no support for the flag -pc_use_amat.
1279 
1280    [3] Coarse DM setup
1281    If a DM (dmfine) is attached to the PC, dmfine is queried for a "coarse" DM (call this dmcoarse) via DMGetCoarseDM().
1282    PCTELESCOPE will interpret the coarse DM as being defined on a sub-communicator of c.
1283    The communicator associated with dmcoarse will define the c' to be used within PCTELESCOPE.
1284    PCTELESCOPE will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1285    The intention of this setup type is that PCTELESCOPE will use an existing (e.g. user defined) communicator hierarchy, say as would be
1286    available with using multi-grid on unstructured meshes.
1287    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1288    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1289    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1290    There is no general method to permute field orderings, hence only KSPSetComputeOperators() is supported.
1291    The user must use PetscObjectComposeFunction() with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1292    Propagation of the user context for KSPSetComputeOperators() on the sub KSP is attempted by querying the DM contexts associated with dmfine and dmcoarse. Alternatively, the user may use PetscObjectComposeFunction() with dmcoarse to define a method which will return the appropriate user context for KSPSetComputeOperators().
1293    Currently there is no support for the flag -pc_use_amat.
1294    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
1295    Further information about the user-provided methods required by this setup type are described here PCTelescopeSetUseCoarseDM().
1296 
1297    Developer Notes:
1298    During PCSetup, the B operator is scattered onto c'.
1299    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1300    Then, KSPSolve() is executed on the c' communicator.
1301 
1302    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
1303    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
1304 
1305    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1306    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1307    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1308 
1309    The telescoping preconditioner can re-partition an attached DM if it is a DMDA (2D or 3D -
1310    support for 1D DMDAs is not provided). If a DMDA is found, a topologically equivalent DMDA is created on c'
1311    and this new DM is attached the sub KSP. The design of telescope is such that it should be possible to extend support
1312    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
1313    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1314 
1315    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1316 
1317    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1318    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
1319    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
1320 
1321    Limitations/improvements include the following.
1322    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
1323    A unified mechanism to query for user contexts as required by KSPSetComputeOperators() and MatNullSpaceSetFunction().
1324 
1325    The symmetric permutation used when a DMDA is encountered is performed via explicitly assmbleming a permutation matrix P,
1326    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
1327    VecPermute() does not supported for the use case required here. By computing P, one can permute both the operator and RHS in a
1328    consistent manner.
1329 
1330    Mapping of vectors (default setup mode) is performed in the following way.
1331    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1332    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1333    We perform the scatter to the sub-communicator in the following way.
1334    [1] Given a vector x defined on communicator c
1335 
1336 .vb
1337    rank(c)  local values of x
1338    ------- ----------------------------------------
1339         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1340         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1341         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1342         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1343 .ve
1344 
1345    scatter into xtmp defined also on comm c, so that we have the following values
1346 
1347 .vb
1348    rank(c)  local values of xtmp
1349    ------- ----------------------------------------------------------------------------
1350         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1351         1   [ ]
1352         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1353         3   [ ]
1354 .ve
1355 
1356    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1357 
1358    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1359    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1360 
1361 .vb
1362    rank(c')  local values of xred
1363    -------- ----------------------------------------------------------------------------
1364          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1365          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1366 .ve
1367 
1368   Contributed by Dave May
1369 
1370   Reference:
1371   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1372 
1373 .seealso:  PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
1374 M*/
1375 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1376 {
1377   PetscErrorCode       ierr;
1378   struct _PC_Telescope *sred;
1379 
1380   PetscFunctionBegin;
1381   ierr = PetscNewLog(pc,&sred);CHKERRQ(ierr);
1382   sred->psubcomm       = NULL;
1383   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1384   sred->subcomm        = MPI_COMM_NULL;
1385   sred->redfactor      = 1;
1386   sred->ignore_dm      = PETSC_FALSE;
1387   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1388   sred->use_coarse_dm  = PETSC_FALSE;
1389   pc->data             = (void*)sred;
1390 
1391   pc->ops->apply           = PCApply_Telescope;
1392   pc->ops->applytranspose  = NULL;
1393   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1394   pc->ops->setup           = PCSetUp_Telescope;
1395   pc->ops->destroy         = PCDestroy_Telescope;
1396   pc->ops->reset           = PCReset_Telescope;
1397   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1398   pc->ops->view            = PCView_Telescope;
1399 
1400   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1401   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1402   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1403   sred->pctelescope_reset_type              = NULL;
1404 
1405   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);CHKERRQ(ierr);
1406   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope);CHKERRQ(ierr);
1407   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope);CHKERRQ(ierr);
1408   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);CHKERRQ(ierr);
1409   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);CHKERRQ(ierr);
1410   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);CHKERRQ(ierr);
1411   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);CHKERRQ(ierr);
1412   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1413   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1414   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);CHKERRQ(ierr);
1415   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",PCTelescopeGetUseCoarseDM_Telescope);CHKERRQ(ierr);
1416   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",PCTelescopeSetUseCoarseDM_Telescope);CHKERRQ(ierr);
1417   PetscFunctionReturn(0);
1418 }
1419