xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision 608e5a7a8a35b40c4e75a0aae313cbdeb3df9fee)
1 
2 #include <petsc/private/petscimpl.h>
3 #include <petsc/private/matimpl.h>
4 #include <petsc/private/pcimpl.h>
5 #include <petscksp.h> /*I "petscksp.h" I*/
6 #include <petscdm.h> /*I "petscdm.h" I*/
7 #include "../src/ksp/pc/impls/telescope/telescope.h"
8 
9 static PetscBool  cited = PETSC_FALSE;
10 static const char citation[] =
11 "@inproceedings{MaySananRuppKnepleySmith2016,\n"
12 "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
13 "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
14 "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
15 "  series    = {PASC '16},\n"
16 "  isbn      = {978-1-4503-4126-4},\n"
17 "  location  = {Lausanne, Switzerland},\n"
18 "  pages     = {5:1--5:12},\n"
19 "  articleno = {5},\n"
20 "  numpages  = {12},\n"
21 "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
22 "  doi       = {10.1145/2929908.2929913},\n"
23 "  acmid     = {2929913},\n"
24 "  publisher = {ACM},\n"
25 "  address   = {New York, NY, USA},\n"
26 "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
27 "  year      = {2016}\n"
28 "}\n";
29 
30 /*
31  default setup mode
32 
33  [1a] scatter to (FORWARD)
34  x(comm) -> xtmp(comm)
35  [1b] local copy (to) ranks with color = 0
36  xred(subcomm) <- xtmp
37 
38  [2] solve on sub KSP to obtain yred(subcomm)
39 
40  [3a] local copy (from) ranks with color = 0
41  yred(subcomm) --> xtmp
42  [2b] scatter from (REVERSE)
43  xtmp(comm) -> y(comm)
44 */
45 
46 /*
47   Collective[comm_f]
48   Notes
49    * Using comm_f = MPI_COMM_NULL will result in an error
50    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
51    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
52 */
53 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool *isvalid)
54 {
55   PetscInt       valid = 1;
56   MPI_Group      group_f,group_c;
57   PetscErrorCode ierr;
58   PetscMPIInt    count,k,size_f = 0,size_c = 0,size_c_sum = 0;
59   PetscMPIInt    *ranks_f,*ranks_c;
60 
61   PetscFunctionBegin;
62   if (comm_f == MPI_COMM_NULL) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"comm_f cannot be MPI_COMM_NULL");
63 
64   ierr = MPI_Comm_group(comm_f,&group_f);CHKERRMPI(ierr);
65   if (comm_c != MPI_COMM_NULL) {
66     ierr = MPI_Comm_group(comm_c,&group_c);CHKERRMPI(ierr);
67   }
68 
69   ierr = MPI_Comm_size(comm_f,&size_f);CHKERRMPI(ierr);
70   if (comm_c != MPI_COMM_NULL) {
71     ierr = MPI_Comm_size(comm_c,&size_c);CHKERRMPI(ierr);
72   }
73 
74   /* check not all comm_c's are NULL */
75   size_c_sum = size_c;
76   ierr = MPI_Allreduce(MPI_IN_PLACE,&size_c_sum,1,MPI_INT,MPI_SUM,comm_f);CHKERRMPI(ierr);
77   if (size_c_sum == 0) valid = 0;
78 
79   /* check we can map at least 1 rank in comm_c to comm_f */
80   ierr = PetscMalloc1(size_f,&ranks_f);CHKERRQ(ierr);
81   ierr = PetscMalloc1(size_c,&ranks_c);CHKERRQ(ierr);
82   for (k=0; k<size_f; k++) ranks_f[k] = MPI_UNDEFINED;
83   for (k=0; k<size_c; k++) ranks_c[k] = k;
84 
85   /*
86    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
87    I do not want the code to terminate immediately if this occurs, rather I want to throw
88    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
89    that comm_c is not a valid sub-communicator.
90    Hence I purposefully do not call CHKERRQ() after MPI_Group_translate_ranks().
91   */
92   count = 0;
93   if (comm_c != MPI_COMM_NULL) {
94     (void)MPI_Group_translate_ranks(group_c,size_c,ranks_c,group_f,ranks_f);
95     for (k=0; k<size_f; k++) {
96       if (ranks_f[k] == MPI_UNDEFINED) {
97         count++;
98       }
99     }
100   }
101   if (count == size_f) valid = 0;
102 
103   ierr = MPI_Allreduce(MPI_IN_PLACE,&valid,1,MPIU_INT,MPI_MIN,comm_f);CHKERRMPI(ierr);
104   if (valid == 1) *isvalid = PETSC_TRUE;
105   else *isvalid = PETSC_FALSE;
106 
107   ierr = PetscFree(ranks_f);CHKERRQ(ierr);
108   ierr = PetscFree(ranks_c);CHKERRQ(ierr);
109   ierr = MPI_Group_free(&group_f);CHKERRMPI(ierr);
110   if (comm_c != MPI_COMM_NULL) {
111     ierr = MPI_Group_free(&group_c);CHKERRMPI(ierr);
112   }
113   PetscFunctionReturn(0);
114 }
115 
116 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
117 {
118   DM subdm = NULL;
119 
120   if (!PCTelescope_isActiveRank(sred)) { subdm = NULL; }
121   else {
122     switch (sred->sr_type) {
123     case TELESCOPE_DEFAULT: subdm = NULL;
124       break;
125     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
126       break;
127     case TELESCOPE_DMPLEX:  subdm = NULL;
128       break;
129     case TELESCOPE_COARSEDM: if (sred->ksp) { KSPGetDM(sred->ksp,&subdm); }
130       break;
131     }
132   }
133   return(subdm);
134 }
135 
136 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
137 {
138   PetscErrorCode ierr;
139   PetscInt       m,M,bs,st,ed;
140   Vec            x,xred,yred,xtmp;
141   Mat            B;
142   MPI_Comm       comm,subcomm;
143   VecScatter     scatter;
144   IS             isin;
145 
146   PetscFunctionBegin;
147   ierr = PetscInfo(pc,"PCTelescope: setup (default)\n");CHKERRQ(ierr);
148   comm = PetscSubcommParent(sred->psubcomm);
149   subcomm = PetscSubcommChild(sred->psubcomm);
150 
151   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
152   ierr = MatGetSize(B,&M,NULL);CHKERRQ(ierr);
153   ierr = MatGetBlockSize(B,&bs);CHKERRQ(ierr);
154   ierr = MatCreateVecs(B,&x,NULL);CHKERRQ(ierr);
155 
156   xred = NULL;
157   m    = 0;
158   if (PCTelescope_isActiveRank(sred)) {
159     ierr = VecCreate(subcomm,&xred);CHKERRQ(ierr);
160     ierr = VecSetSizes(xred,PETSC_DECIDE,M);CHKERRQ(ierr);
161     ierr = VecSetBlockSize(xred,bs);CHKERRQ(ierr);
162     ierr = VecSetFromOptions(xred);CHKERRQ(ierr);
163     ierr = VecGetLocalSize(xred,&m);CHKERRQ(ierr);
164   }
165 
166   yred = NULL;
167   if (PCTelescope_isActiveRank(sred)) {
168     ierr = VecDuplicate(xred,&yred);CHKERRQ(ierr);
169   }
170 
171   ierr = VecCreate(comm,&xtmp);CHKERRQ(ierr);
172   ierr = VecSetSizes(xtmp,m,PETSC_DECIDE);CHKERRQ(ierr);
173   ierr = VecSetBlockSize(xtmp,bs);CHKERRQ(ierr);
174   ierr = VecSetType(xtmp,((PetscObject)x)->type_name);CHKERRQ(ierr);
175 
176   if (PCTelescope_isActiveRank(sred)) {
177     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
178     ierr = ISCreateStride(comm,(ed-st),st,1,&isin);CHKERRQ(ierr);
179   } else {
180     ierr = VecGetOwnershipRange(x,&st,&ed);CHKERRQ(ierr);
181     ierr = ISCreateStride(comm,0,st,1,&isin);CHKERRQ(ierr);
182   }
183   ierr = ISSetBlockSize(isin,bs);CHKERRQ(ierr);
184 
185   ierr = VecScatterCreate(x,isin,xtmp,NULL,&scatter);CHKERRQ(ierr);
186 
187   sred->isin    = isin;
188   sred->scatter = scatter;
189   sred->xred    = xred;
190   sred->yred    = yred;
191   sred->xtmp    = xtmp;
192   ierr = VecDestroy(&x);CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
196 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
197 {
198   PetscErrorCode ierr;
199   MPI_Comm       comm,subcomm;
200   Mat            Bred,B;
201   PetscInt       nr,nc;
202   IS             isrow,iscol;
203   Mat            Blocal,*_Blocal;
204 
205   PetscFunctionBegin;
206   ierr = PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");CHKERRQ(ierr);
207   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
208   subcomm = PetscSubcommChild(sred->psubcomm);
209   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
210   ierr = MatGetSize(B,&nr,&nc);CHKERRQ(ierr);
211   isrow = sred->isin;
212   ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&iscol);CHKERRQ(ierr);
213   ierr = ISSetIdentity(iscol);CHKERRQ(ierr);
214   ierr = MatSetOption(B,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
215   ierr = MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);CHKERRQ(ierr);
216   Blocal = *_Blocal;
217   ierr = PetscFree(_Blocal);CHKERRQ(ierr);
218   Bred = NULL;
219   if (PCTelescope_isActiveRank(sred)) {
220     PetscInt mm;
221 
222     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
223 
224     ierr = MatGetSize(Blocal,&mm,NULL);CHKERRQ(ierr);
225     ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);CHKERRQ(ierr);
226   }
227   *A = Bred;
228   ierr = ISDestroy(&iscol);CHKERRQ(ierr);
229   ierr = MatDestroy(&Blocal);CHKERRQ(ierr);
230   PetscFunctionReturn(0);
231 }
232 
233 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
234 {
235   PetscErrorCode ierr;
236   PetscBool      has_const;
237   const Vec      *vecs;
238   Vec            *sub_vecs = NULL;
239   PetscInt       i,k,n = 0;
240   MPI_Comm       subcomm;
241 
242   PetscFunctionBegin;
243   subcomm = PetscSubcommChild(sred->psubcomm);
244   ierr = MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);CHKERRQ(ierr);
245 
246   if (PCTelescope_isActiveRank(sred)) {
247     if (n) {
248       ierr = VecDuplicateVecs(sred->xred,n,&sub_vecs);CHKERRQ(ierr);
249     }
250   }
251 
252   /* copy entries */
253   for (k=0; k<n; k++) {
254     const PetscScalar *x_array;
255     PetscScalar       *LA_sub_vec;
256     PetscInt          st,ed;
257 
258     /* pull in vector x->xtmp */
259     ierr = VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
260     ierr = VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
261     if (sub_vecs) {
262       /* copy vector entries into xred */
263       ierr = VecGetArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
264       if (sub_vecs[k]) {
265         ierr = VecGetOwnershipRange(sub_vecs[k],&st,&ed);CHKERRQ(ierr);
266         ierr = VecGetArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
267         for (i=0; i<ed-st; i++) {
268           LA_sub_vec[i] = x_array[i];
269         }
270         ierr = VecRestoreArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
271       }
272       ierr = VecRestoreArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
273     }
274   }
275 
276   if (PCTelescope_isActiveRank(sred)) {
277     /* create new (near) nullspace for redundant object */
278     ierr = MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);CHKERRQ(ierr);
279     ierr = VecDestroyVecs(n,&sub_vecs);CHKERRQ(ierr);
280     if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
281     if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
282   }
283   PetscFunctionReturn(0);
284 }
285 
286 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
287 {
288   PetscErrorCode ierr;
289   Mat            B;
290 
291   PetscFunctionBegin;
292   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
293   /* Propagate the nullspace if it exists */
294   {
295     MatNullSpace nullspace,sub_nullspace;
296     ierr = MatGetNullSpace(B,&nullspace);CHKERRQ(ierr);
297     if (nullspace) {
298       ierr = PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");CHKERRQ(ierr);
299       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace);CHKERRQ(ierr);
300       if (PCTelescope_isActiveRank(sred)) {
301         ierr = MatSetNullSpace(sub_mat,sub_nullspace);CHKERRQ(ierr);
302         ierr = MatNullSpaceDestroy(&sub_nullspace);CHKERRQ(ierr);
303       }
304     }
305   }
306   /* Propagate the near nullspace if it exists */
307   {
308     MatNullSpace nearnullspace,sub_nearnullspace;
309     ierr = MatGetNearNullSpace(B,&nearnullspace);CHKERRQ(ierr);
310     if (nearnullspace) {
311       ierr = PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n");CHKERRQ(ierr);
312       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);CHKERRQ(ierr);
313       if (PCTelescope_isActiveRank(sred)) {
314         ierr = MatSetNearNullSpace(sub_mat,sub_nearnullspace);CHKERRQ(ierr);
315         ierr = MatNullSpaceDestroy(&sub_nearnullspace);CHKERRQ(ierr);
316       }
317     }
318   }
319   PetscFunctionReturn(0);
320 }
321 
322 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
323 {
324   PC_Telescope   sred = (PC_Telescope)pc->data;
325   PetscErrorCode ierr;
326   PetscBool      iascii,isstring;
327   PetscViewer    subviewer;
328 
329   PetscFunctionBegin;
330   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
331   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
332   if (iascii) {
333     {
334       MPI_Comm    comm,subcomm;
335       PetscMPIInt comm_size,subcomm_size;
336       DM          dm = NULL,subdm = NULL;
337 
338       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
339       subdm = private_PCTelescopeGetSubDM(sred);
340 
341       if (sred->psubcomm) {
342         comm = PetscSubcommParent(sred->psubcomm);
343         subcomm = PetscSubcommChild(sred->psubcomm);
344         ierr = MPI_Comm_size(comm,&comm_size);CHKERRMPI(ierr);
345         ierr = MPI_Comm_size(subcomm,&subcomm_size);CHKERRMPI(ierr);
346 
347         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
348         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent comm size reduction factor = %D\n",sred->redfactor);CHKERRQ(ierr);
349         ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: parent_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);CHKERRQ(ierr);
350         switch (sred->subcommtype) {
351         case PETSC_SUBCOMM_INTERLACED :
352           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm: type = interlaced\n",sred->subcommtype);CHKERRQ(ierr);
353           break;
354         case PETSC_SUBCOMM_CONTIGUOUS :
355           ierr = PetscViewerASCIIPrintf(viewer,"petsc subcomm type = contiguous\n",sred->subcommtype);CHKERRQ(ierr);
356           break;
357         default :
358           SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
359         }
360         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
361       } else {
362         ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
363         subcomm = sred->subcomm;
364         if (!PCTelescope_isActiveRank(sred)) {
365           subcomm = PETSC_COMM_SELF;
366         }
367 
368         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
369         ierr = PetscViewerASCIIPrintf(viewer,"subcomm: using user provided sub-communicator\n");CHKERRQ(ierr);
370         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
371       }
372 
373       ierr = PetscViewerGetSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
374       if (PCTelescope_isActiveRank(sred)) {
375         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
376 
377         if (dm && sred->ignore_dm) {
378           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring DM\n");CHKERRQ(ierr);
379         }
380         if (sred->ignore_kspcomputeoperators) {
381           ierr = PetscViewerASCIIPrintf(subviewer,"ignoring KSPComputeOperators\n");CHKERRQ(ierr);
382         }
383         switch (sred->sr_type) {
384         case TELESCOPE_DEFAULT:
385           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: default\n");CHKERRQ(ierr);
386           break;
387         case TELESCOPE_DMDA:
388           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMDA auto-repartitioning\n");CHKERRQ(ierr);
389           ierr = DMView_DA_Short(subdm,subviewer);CHKERRQ(ierr);
390           break;
391         case TELESCOPE_DMPLEX:
392           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: DMPLEX auto-repartitioning\n");CHKERRQ(ierr);
393           break;
394         case TELESCOPE_COARSEDM:
395           ierr = PetscViewerASCIIPrintf(subviewer,"setup type: coarse DM\n");CHKERRQ(ierr);
396           break;
397         }
398 
399         if (dm) {
400           PetscObject obj = (PetscObject)dm;
401           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object:");CHKERRQ(ierr);
402           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
403           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
404           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
405           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
406           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
407           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
408         } else {
409           ierr = PetscViewerASCIIPrintf(subviewer,"Parent DM object: NULL\n");CHKERRQ(ierr);
410         }
411         if (subdm) {
412           PetscObject obj = (PetscObject)subdm;
413           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object:");CHKERRQ(ierr);
414           PetscViewerASCIIUseTabs(subviewer,PETSC_FALSE);
415           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer," type = %s;",obj->type_name); }
416           if (obj->name) { PetscViewerASCIIPrintf(subviewer," name = %s;",obj->name); }
417           if (obj->prefix) { PetscViewerASCIIPrintf(subviewer," prefix = %s",obj->prefix); }
418           ierr = PetscViewerASCIIPrintf(subviewer,"\n");CHKERRQ(ierr);
419           PetscViewerASCIIUseTabs(subviewer,PETSC_TRUE);
420         } else {
421           ierr = PetscViewerASCIIPrintf(subviewer,"Sub DM object: NULL\n");CHKERRQ(ierr);
422         }
423 
424         ierr = KSPView(sred->ksp,subviewer);CHKERRQ(ierr);
425         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
426       }
427       ierr = PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
428     }
429   }
430   PetscFunctionReturn(0);
431 }
432 
433 static PetscErrorCode PCSetUp_Telescope(PC pc)
434 {
435   PC_Telescope    sred = (PC_Telescope)pc->data;
436   PetscErrorCode  ierr;
437   MPI_Comm        comm,subcomm=0;
438   PCTelescopeType sr_type;
439 
440   PetscFunctionBegin;
441   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
442 
443   /* Determine type of setup/update */
444   if (!pc->setupcalled) {
445     PetscBool has_dm,same;
446     DM        dm;
447 
448     sr_type = TELESCOPE_DEFAULT;
449     has_dm = PETSC_FALSE;
450     ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
451     if (dm) { has_dm = PETSC_TRUE; }
452     if (has_dm) {
453       /* check for dmda */
454       ierr = PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);CHKERRQ(ierr);
455       if (same) {
456         ierr = PetscInfo(pc,"PCTelescope: found DMDA\n");CHKERRQ(ierr);
457         sr_type = TELESCOPE_DMDA;
458       }
459       /* check for dmplex */
460       ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);CHKERRQ(ierr);
461       if (same) {
462         ierr = PetscInfo(pc,"PCTelescope: found DMPLEX\n");CHKERRQ(ierr);
463         sr_type = TELESCOPE_DMPLEX;
464       }
465 
466       if (sred->use_coarse_dm) {
467         ierr = PetscInfo(pc,"PCTelescope: using coarse DM\n");CHKERRQ(ierr);
468         sr_type = TELESCOPE_COARSEDM;
469       }
470 
471       if (sred->ignore_dm) {
472         ierr = PetscInfo(pc,"PCTelescope: ignoring DM\n");CHKERRQ(ierr);
473         sr_type = TELESCOPE_DEFAULT;
474       }
475     }
476     sred->sr_type = sr_type;
477   } else {
478     sr_type = sred->sr_type;
479   }
480 
481   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
482   switch (sr_type) {
483   case TELESCOPE_DEFAULT:
484     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
485     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
486     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
487     sred->pctelescope_reset_type              = NULL;
488     break;
489   case TELESCOPE_DMDA:
490     pc->ops->apply                            = PCApply_Telescope_dmda;
491     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
492     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
493     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
494     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
495     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
496     break;
497   case TELESCOPE_DMPLEX:
498     SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
499   case TELESCOPE_COARSEDM:
500     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
501     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
502     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
503     sred->pctelescope_matcreate_type          = NULL;
504     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
505     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
506     break;
507   default:
508     SETERRQ(comm,PETSC_ERR_SUP,"Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
509   }
510 
511   /* subcomm definition */
512   if (!pc->setupcalled) {
513     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
514       if (!sred->psubcomm) {
515         ierr = PetscSubcommCreate(comm,&sred->psubcomm);CHKERRQ(ierr);
516         ierr = PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);CHKERRQ(ierr);
517         ierr = PetscSubcommSetType(sred->psubcomm,sred->subcommtype);CHKERRQ(ierr);
518         ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
519         sred->subcomm = PetscSubcommChild(sred->psubcomm);
520       }
521     } else { /* query PC for DM, check communicators */
522       DM          dm,dm_coarse_partition = NULL;
523       MPI_Comm    comm_fine,comm_coarse_partition = MPI_COMM_NULL;
524       PetscMPIInt csize_fine=0,csize_coarse_partition=0,cs[2],csg[2],cnt=0;
525       PetscBool   isvalidsubcomm;
526 
527       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
528       comm_fine = PetscObjectComm((PetscObject)dm);
529       ierr = DMGetCoarseDM(dm,&dm_coarse_partition);CHKERRQ(ierr);
530       if (dm_coarse_partition) { cnt = 1; }
531       ierr = MPI_Allreduce(MPI_IN_PLACE,&cnt,1,MPI_INT,MPI_SUM,comm_fine);CHKERRMPI(ierr);
532       if (cnt == 0) SETERRQ(comm_fine,PETSC_ERR_SUP,"Zero instances of a coarse DM were found");
533 
534       ierr = MPI_Comm_size(comm_fine,&csize_fine);CHKERRMPI(ierr);
535       if (dm_coarse_partition) {
536         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
537         ierr = MPI_Comm_size(comm_coarse_partition,&csize_coarse_partition);CHKERRMPI(ierr);
538       }
539 
540       cs[0] = csize_fine;
541       cs[1] = csize_coarse_partition;
542       ierr = MPI_Allreduce(cs,csg,2,MPI_INT,MPI_MAX,comm_fine);CHKERRMPI(ierr);
543       if (csg[0] == csg[1]) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM uses the same size communicator as the parent DM attached to the PC");
544 
545       ierr = PCTelescopeTestValidSubcomm(comm_fine,comm_coarse_partition,&isvalidsubcomm);CHKERRQ(ierr);
546       if (!isvalidsubcomm) SETERRQ(comm_fine,PETSC_ERR_SUP,"Coarse DM communicator is not a sub-communicator of parentDM->comm");
547       sred->subcomm = comm_coarse_partition;
548     }
549   }
550   subcomm = sred->subcomm;
551 
552   /* internal KSP */
553   if (!pc->setupcalled) {
554     const char *prefix;
555 
556     if (PCTelescope_isActiveRank(sred)) {
557       ierr = KSPCreate(subcomm,&sred->ksp);CHKERRQ(ierr);
558       ierr = KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);CHKERRQ(ierr);
559       ierr = PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
560       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);CHKERRQ(ierr);
561       ierr = KSPSetType(sred->ksp,KSPPREONLY);CHKERRQ(ierr);
562       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
563       ierr = KSPSetOptionsPrefix(sred->ksp,prefix);CHKERRQ(ierr);
564       ierr = KSPAppendOptionsPrefix(sred->ksp,"telescope_");CHKERRQ(ierr);
565     }
566   }
567 
568   /* setup */
569   if (!pc->setupcalled && sred->pctelescope_setup_type) {
570     ierr = sred->pctelescope_setup_type(pc,sred);CHKERRQ(ierr);
571   }
572   /* update */
573   if (!pc->setupcalled) {
574     if (sred->pctelescope_matcreate_type) {
575       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);CHKERRQ(ierr);
576     }
577     if (sred->pctelescope_matnullspacecreate_type) {
578       ierr = sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);CHKERRQ(ierr);
579     }
580   } else {
581     if (sred->pctelescope_matcreate_type) {
582       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);CHKERRQ(ierr);
583     }
584   }
585 
586   /* common - no construction */
587   if (PCTelescope_isActiveRank(sred)) {
588     ierr = KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);CHKERRQ(ierr);
589     if (pc->setfromoptionscalled && !pc->setupcalled) {
590       ierr = KSPSetFromOptions(sred->ksp);CHKERRQ(ierr);
591     }
592   }
593   PetscFunctionReturn(0);
594 }
595 
596 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
597 {
598   PC_Telescope      sred = (PC_Telescope)pc->data;
599   PetscErrorCode    ierr;
600   Vec               xtmp,xred,yred;
601   PetscInt          i,st,ed;
602   VecScatter        scatter;
603   PetscScalar       *array;
604   const PetscScalar *x_array;
605 
606   PetscFunctionBegin;
607   ierr = PetscCitationsRegister(citation,&cited);CHKERRQ(ierr);
608 
609   xtmp    = sred->xtmp;
610   scatter = sred->scatter;
611   xred    = sred->xred;
612   yred    = sred->yred;
613 
614   /* pull in vector x->xtmp */
615   ierr = VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
616   ierr = VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
617 
618   /* copy vector entries into xred */
619   ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
620   if (xred) {
621     PetscScalar *LA_xred;
622     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
623     ierr = VecGetArray(xred,&LA_xred);CHKERRQ(ierr);
624     for (i=0; i<ed-st; i++) {
625       LA_xred[i] = x_array[i];
626     }
627     ierr = VecRestoreArray(xred,&LA_xred);CHKERRQ(ierr);
628   }
629   ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
630   /* solve */
631   if (PCTelescope_isActiveRank(sred)) {
632     ierr = KSPSolve(sred->ksp,xred,yred);CHKERRQ(ierr);
633     ierr = KSPCheckSolve(sred->ksp,pc,yred);CHKERRQ(ierr);
634   }
635   /* return vector */
636   ierr = VecGetArray(xtmp,&array);CHKERRQ(ierr);
637   if (yred) {
638     const PetscScalar *LA_yred;
639     ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
640     ierr = VecGetArrayRead(yred,&LA_yred);CHKERRQ(ierr);
641     for (i=0; i<ed-st; i++) {
642       array[i] = LA_yred[i];
643     }
644     ierr = VecRestoreArrayRead(yred,&LA_yred);CHKERRQ(ierr);
645   }
646   ierr = VecRestoreArray(xtmp,&array);CHKERRQ(ierr);
647   ierr = VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
648   ierr = VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
649   PetscFunctionReturn(0);
650 }
651 
652 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
653 {
654   PC_Telescope      sred = (PC_Telescope)pc->data;
655   PetscErrorCode    ierr;
656   Vec               xtmp,yred;
657   PetscInt          i,st,ed;
658   VecScatter        scatter;
659   const PetscScalar *x_array;
660   PetscBool         default_init_guess_value;
661 
662   PetscFunctionBegin;
663   xtmp    = sred->xtmp;
664   scatter = sred->scatter;
665   yred    = sred->yred;
666 
667   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
668   *reason = (PCRichardsonConvergedReason)0;
669 
670   if (!zeroguess) {
671     ierr = PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");CHKERRQ(ierr);
672     /* pull in vector y->xtmp */
673     ierr = VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
674     ierr = VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
675 
676     /* copy vector entries into xred */
677     ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
678     if (yred) {
679       PetscScalar *LA_yred;
680       ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
681       ierr = VecGetArray(yred,&LA_yred);CHKERRQ(ierr);
682       for (i=0; i<ed-st; i++) {
683         LA_yred[i] = x_array[i];
684       }
685       ierr = VecRestoreArray(yred,&LA_yred);CHKERRQ(ierr);
686     }
687     ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
688   }
689 
690   if (PCTelescope_isActiveRank(sred)) {
691     ierr = KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);CHKERRQ(ierr);
692     if (!zeroguess) ierr = KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);CHKERRQ(ierr);
693   }
694 
695   ierr = PCApply_Telescope(pc,x,y);CHKERRQ(ierr);
696 
697   if (PCTelescope_isActiveRank(sred)) {
698     ierr = KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);CHKERRQ(ierr);
699   }
700 
701   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
702   *outits = 1;
703   PetscFunctionReturn(0);
704 }
705 
706 static PetscErrorCode PCReset_Telescope(PC pc)
707 {
708   PC_Telescope   sred = (PC_Telescope)pc->data;
709   PetscErrorCode ierr;
710 
711   PetscFunctionBegin;
712   ierr = ISDestroy(&sred->isin);CHKERRQ(ierr);
713   ierr = VecScatterDestroy(&sred->scatter);CHKERRQ(ierr);
714   ierr = VecDestroy(&sred->xred);CHKERRQ(ierr);
715   ierr = VecDestroy(&sred->yred);CHKERRQ(ierr);
716   ierr = VecDestroy(&sred->xtmp);CHKERRQ(ierr);
717   ierr = MatDestroy(&sred->Bred);CHKERRQ(ierr);
718   ierr = KSPReset(sred->ksp);CHKERRQ(ierr);
719   if (sred->pctelescope_reset_type) {
720     ierr = sred->pctelescope_reset_type(pc);CHKERRQ(ierr);
721   }
722   PetscFunctionReturn(0);
723 }
724 
725 static PetscErrorCode PCDestroy_Telescope(PC pc)
726 {
727   PC_Telescope   sred = (PC_Telescope)pc->data;
728   PetscErrorCode ierr;
729 
730   PetscFunctionBegin;
731   ierr = PCReset_Telescope(pc);CHKERRQ(ierr);
732   ierr = KSPDestroy(&sred->ksp);CHKERRQ(ierr);
733   ierr = PetscSubcommDestroy(&sred->psubcomm);CHKERRQ(ierr);
734   ierr = PetscFree(sred->dm_ctx);CHKERRQ(ierr);
735   ierr = PetscFree(pc->data);CHKERRQ(ierr);
736   PetscFunctionReturn(0);
737 }
738 
739 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
740 {
741   PC_Telescope     sred = (PC_Telescope)pc->data;
742   PetscErrorCode   ierr;
743   MPI_Comm         comm;
744   PetscMPIInt      size;
745   PetscBool        flg;
746   PetscSubcommType subcommtype;
747 
748   PetscFunctionBegin;
749   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
750   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
751   ierr = PetscOptionsHead(PetscOptionsObject,"Telescope options");CHKERRQ(ierr);
752   ierr = PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg);CHKERRQ(ierr);
753   if (flg) {
754     ierr = PCTelescopeSetSubcommType(pc,subcommtype);CHKERRQ(ierr);
755   }
756   ierr = PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,NULL);CHKERRQ(ierr);
757   if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
758   ierr = PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,NULL);CHKERRQ(ierr);
759   ierr = PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,NULL);CHKERRQ(ierr);
760   ierr = PetscOptionsBool("-pc_telescope_use_coarse_dm","Define sub-communicator from the coarse DM","PCTelescopeSetUseCoarseDM",sred->use_coarse_dm,&sred->use_coarse_dm,NULL);CHKERRQ(ierr);
761   ierr = PetscOptionsTail();CHKERRQ(ierr);
762   PetscFunctionReturn(0);
763 }
764 
765 /* PC simplementation specific API's */
766 
767 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
768 {
769   PC_Telescope red = (PC_Telescope)pc->data;
770   PetscFunctionBegin;
771   if (ksp) *ksp = red->ksp;
772   PetscFunctionReturn(0);
773 }
774 
775 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
776 {
777   PC_Telescope red = (PC_Telescope)pc->data;
778   PetscFunctionBegin;
779   if (subcommtype) *subcommtype = red->subcommtype;
780   PetscFunctionReturn(0);
781 }
782 
783 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
784 {
785   PC_Telescope     red = (PC_Telescope)pc->data;
786 
787   PetscFunctionBegin;
788   if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
789   red->subcommtype = subcommtype;
790   PetscFunctionReturn(0);
791 }
792 
793 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
794 {
795   PC_Telescope red = (PC_Telescope)pc->data;
796   PetscFunctionBegin;
797   if (fact) *fact = red->redfactor;
798   PetscFunctionReturn(0);
799 }
800 
801 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
802 {
803   PC_Telescope     red = (PC_Telescope)pc->data;
804   PetscMPIInt      size;
805   PetscErrorCode   ierr;
806 
807   PetscFunctionBegin;
808   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRMPI(ierr);
809   if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
810   if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
811   red->redfactor = fact;
812   PetscFunctionReturn(0);
813 }
814 
815 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
816 {
817   PC_Telescope red = (PC_Telescope)pc->data;
818   PetscFunctionBegin;
819   if (v) *v = red->ignore_dm;
820   PetscFunctionReturn(0);
821 }
822 
823 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
824 {
825   PC_Telescope red = (PC_Telescope)pc->data;
826   PetscFunctionBegin;
827   red->ignore_dm = v;
828   PetscFunctionReturn(0);
829 }
830 
831 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool *v)
832 {
833   PC_Telescope red = (PC_Telescope)pc->data;
834   PetscFunctionBegin;
835   if (v) *v = red->use_coarse_dm;
836   PetscFunctionReturn(0);
837 }
838 
839 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v)
840 {
841   PC_Telescope red = (PC_Telescope)pc->data;
842   PetscFunctionBegin;
843   red->use_coarse_dm = v;
844   PetscFunctionReturn(0);
845 }
846 
847 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
848 {
849   PC_Telescope red = (PC_Telescope)pc->data;
850   PetscFunctionBegin;
851   if (v) *v = red->ignore_kspcomputeoperators;
852   PetscFunctionReturn(0);
853 }
854 
855 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
856 {
857   PC_Telescope red = (PC_Telescope)pc->data;
858   PetscFunctionBegin;
859   red->ignore_kspcomputeoperators = v;
860   PetscFunctionReturn(0);
861 }
862 
863 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
864 {
865   PC_Telescope red = (PC_Telescope)pc->data;
866   PetscFunctionBegin;
867   *dm = private_PCTelescopeGetSubDM(red);
868   PetscFunctionReturn(0);
869 }
870 
871 /*@
872  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
873 
874  Not Collective
875 
876  Input Parameter:
877 .  pc - the preconditioner context
878 
879  Output Parameter:
880 .  subksp - the KSP defined the smaller set of processes
881 
882  Level: advanced
883 
884 @*/
885 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
886 {
887   PetscErrorCode ierr;
888   PetscFunctionBegin;
889   ierr = PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));CHKERRQ(ierr);
890   PetscFunctionReturn(0);
891 }
892 
893 /*@
894  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
895 
896  Not Collective
897 
898  Input Parameter:
899 .  pc - the preconditioner context
900 
901  Output Parameter:
902 .  fact - the reduction factor
903 
904  Level: advanced
905 
906 @*/
907 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
908 {
909   PetscErrorCode ierr;
910   PetscFunctionBegin;
911   ierr = PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));CHKERRQ(ierr);
912   PetscFunctionReturn(0);
913 }
914 
915 /*@
916  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
917 
918  Not Collective
919 
920  Input Parameter:
921 .  pc - the preconditioner context
922 
923  Output Parameter:
924 .  fact - the reduction factor
925 
926  Level: advanced
927 
928 @*/
929 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
930 {
931   PetscErrorCode ierr;
932   PetscFunctionBegin;
933   ierr = PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));CHKERRQ(ierr);
934   PetscFunctionReturn(0);
935 }
936 
937 /*@
938  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
939 
940  Not Collective
941 
942  Input Parameter:
943 .  pc - the preconditioner context
944 
945  Output Parameter:
946 .  v - the flag
947 
948  Level: advanced
949 
950 @*/
951 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
952 {
953   PetscErrorCode ierr;
954   PetscFunctionBegin;
955   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
956   PetscFunctionReturn(0);
957 }
958 
959 /*@
960  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
961 
962  Not Collective
963 
964  Input Parameter:
965 .  pc - the preconditioner context
966 
967  Output Parameter:
968 .  v - Use PETSC_TRUE to ignore any DM
969 
970  Level: advanced
971 
972 @*/
973 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
974 {
975   PetscErrorCode ierr;
976   PetscFunctionBegin;
977   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
978   PetscFunctionReturn(0);
979 }
980 
981 /*@
982  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse DM attached to DM associated with the PC will be used.
983 
984  Not Collective
985 
986  Input Parameter:
987 .  pc - the preconditioner context
988 
989  Output Parameter:
990 .  v - the flag
991 
992  Level: advanced
993 
994 @*/
995 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc,PetscBool *v)
996 {
997   PetscErrorCode ierr;
998   PetscFunctionBegin;
999   ierr = PetscUseMethod(pc,"PCTelescopeGetUseCoarseDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1000   PetscFunctionReturn(0);
1001 }
1002 
1003 /*@
1004  PCTelescopeSetUseCoarseDM - Set a flag to query the DM attached to the PC if it also has a coarse DM
1005 
1006  Not Collective
1007 
1008  Input Parameter:
1009 .  pc - the preconditioner context
1010 
1011  Output Parameter:
1012 .  v - Use PETSC_FALSE to ignore any coarse DM
1013 
1014  Notes:
1015  When you have specified to use a coarse DM, the communicator used to create the sub-KSP within PCTelescope
1016  will be that of the coarse DM. Hence the flags -pc_telescope_reduction_factor and
1017  -pc_telescope_subcomm_type will no longer have any meaning.
1018  It is required that the communicator associated with the parent (fine) and the coarse DM are of different sizes.
1019  An error will occur of the size of the communicator associated with the coarse DM
1020  is the same as that of the parent DM.
1021  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
1022  This will be checked at the time the preconditioner is setup and an error will occur if
1023  the coarse DM does not define a sub-communicator of that used by the parent DM.
1024 
1025  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
1026  the DM used (e.g. it supports DMSHELL, DMPLEX, etc).
1027 
1028  Support is currently only provided for the case when you are using KSPSetComputeOperators()
1029 
1030  The user is required to compose a function with the parent DM to facilitate the transfer of fields (Vec) between the different decompositions defined by the fine and coarse DMs.
1031  In the user code, this is achieved via
1032 .vb
1033    {
1034      DM dm_fine;
1035      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1036    }
1037 .ve
1038  The signature of the user provided field scatter method is
1039 .vb
1040    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1041 .ve
1042  The user must provide support for both mode = SCATTER_FORWARD and mode = SCATTER_REVERSE.
1043  SCATTER_FORWARD implies the direction of transfer is from the parent (fine) DM to the coarse DM.
1044 
1045  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
1046  of state variables between the fine and coarse DMs.
1047  In the context of a finite element discretization, an example state variable might be
1048  values associated with quadrature points within each element.
1049  A user provided state scatter method is composed via
1050 .vb
1051    {
1052      DM dm_fine;
1053      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1054    }
1055 .ve
1056  The signature of the user provided state scatter method is
1057 .vb
1058    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1059 .ve
1060  SCATTER_FORWARD implies the direction of transfer is from the fine DM to the coarse DM.
1061  The user is only required to support mode = SCATTER_FORWARD.
1062  No assumption is made about the data type of the state variables.
1063  These must be managed by the user and must be accessible from the DM.
1064 
1065  Care must be taken in defining the user context passed to KSPSetComputeOperators() which is to be
1066  associated with the sub-KSP residing within PCTelescope.
1067  In general, PCTelescope assumes that the context on the fine and coarse DM used with
1068  KSPSetComputeOperators() should be "similar" in type or origin.
1069  Specifically the following rules are used to infer what context on the sub-KSP should be.
1070 
1071  First the contexts from the KSP and the fine and coarse DMs are retrieved.
1072  Note that the special case of a DMSHELL context is queried.
1073 
1074 .vb
1075    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1076    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1077    DMShellGetContext(dm_fine,&dmfine_shellctx);
1078 
1079    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1080    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1081 .ve
1082 
1083  The following rules are then enforced:
1084 
1085  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1086  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,NULL);
1087 
1088  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1089  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1090  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);
1091 
1092  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1093  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1094  KSPSetComputeOperators(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);
1095 
1096  If neither of the above three tests passed, then PCTelescope cannot safely determine what
1097  context should be provided to KSPSetComputeOperators() for use with the sub-KSP.
1098  In this case, an additional mechanism is provided via a composed function which will return
1099  the actual context to be used. To use this feature you must compose the "getter" function
1100  with the coarse DM, e.g.
1101 .vb
1102    {
1103      DM dm_coarse;
1104      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1105    }
1106 .ve
1107  The signature of the user provided method is
1108 .vb
1109    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1110 .ve
1111 
1112  Level: advanced
1113 
1114 @*/
1115 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc,PetscBool v)
1116 {
1117   PetscErrorCode ierr;
1118   PetscFunctionBegin;
1119   ierr = PetscTryMethod(pc,"PCTelescopeSetUseCoarseDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1120   PetscFunctionReturn(0);
1121 }
1122 
1123 /*@
1124  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
1125 
1126  Not Collective
1127 
1128  Input Parameter:
1129 .  pc - the preconditioner context
1130 
1131  Output Parameter:
1132 .  v - the flag
1133 
1134  Level: advanced
1135 
1136 @*/
1137 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
1138 {
1139   PetscErrorCode ierr;
1140   PetscFunctionBegin;
1141   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
1142   PetscFunctionReturn(0);
1143 }
1144 
1145 /*@
1146  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
1147 
1148  Not Collective
1149 
1150  Input Parameter:
1151 .  pc - the preconditioner context
1152 
1153  Output Parameter:
1154 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
1155 
1156  Level: advanced
1157 
1158 @*/
1159 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
1160 {
1161   PetscErrorCode ierr;
1162   PetscFunctionBegin;
1163   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
1164   PetscFunctionReturn(0);
1165 }
1166 
1167 /*@
1168  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
1169 
1170  Not Collective
1171 
1172  Input Parameter:
1173 .  pc - the preconditioner context
1174 
1175  Output Parameter:
1176 .  subdm - The re-partitioned DM
1177 
1178  Level: advanced
1179 
1180 @*/
1181 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
1182 {
1183   PetscErrorCode ierr;
1184   PetscFunctionBegin;
1185   ierr = PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));CHKERRQ(ierr);
1186   PetscFunctionReturn(0);
1187 }
1188 
1189 /*@
1190  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1191 
1192  Logically Collective
1193 
1194  Input Parameter:
1195 +  pc - the preconditioner context
1196 -  subcommtype - the subcommunicator type (see PetscSubcommType)
1197 
1198  Level: advanced
1199 
1200 .seealso: PetscSubcommType, PetscSubcomm, PCTELESCOPE
1201 @*/
1202 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1203 {
1204   PetscErrorCode ierr;
1205   PetscFunctionBegin;
1206   ierr = PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));CHKERRQ(ierr);
1207   PetscFunctionReturn(0);
1208 }
1209 
1210 /*@
1211  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
1212 
1213  Not Collective
1214 
1215  Input Parameter:
1216 .  pc - the preconditioner context
1217 
1218  Output Parameter:
1219 .  subcommtype - the subcommunicator type (see PetscSubcommType)
1220 
1221  Level: advanced
1222 
1223 .seealso: PetscSubcomm, PetscSubcommType, PCTELESCOPE
1224 @*/
1225 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1226 {
1227   PetscErrorCode ierr;
1228   PetscFunctionBegin;
1229   ierr = PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));CHKERRQ(ierr);
1230   PetscFunctionReturn(0);
1231 }
1232 
1233 /* -------------------------------------------------------------------------------------*/
1234 /*MC
1235    PCTELESCOPE - Runs a KSP solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.
1236 
1237    Options Database:
1238 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1239 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1240 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1241 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether KSPSetComputeOperators should be used on the sub-KSP.
1242 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse DM should be used to define the sub-communicator.
1243 
1244    Level: advanced
1245 
1246    Notes:
1247    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
1248    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner (PC).
1249    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
1250    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
1251    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1252    Additionally, in comparison with PCREDUNDANT, PCTELESCOPE can utilize an attached DM.
1253 
1254    The default type of the sub KSP (the KSP defined on c') is PREONLY.
1255 
1256    There are three setup mechanisms for PCTelescope. Features support by each type are described below.
1257    In the following, we will refer to the operators B and B', these are the Bmat provided to the KSP on the
1258    communicators c and c' respectively.
1259 
1260    [1] Default setup
1261    The sub-communicator c' is created via PetscSubcommCreate().
1262    Explicitly defined nullspace and near nullspace vectors will be propogated from B to B'.
1263    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1264    No support is provided for KSPSetComputeOperators().
1265    Currently there is no support for the flag -pc_use_amat.
1266 
1267    [2] DM aware setup
1268    If a DM is attached to the PC, it is re-partitioned on the sub-communicator c'.
1269    c' is created via PetscSubcommCreate().
1270    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
1271    Currently only support for re-partitioning a DMDA is provided.
1272    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1273    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1274    Support is provided for KSPSetComputeOperators(). The user provided function and context is propagated to the sub KSP.
1275    This is fragile since the user must ensure that their user context is valid for use on c'.
1276    Currently there is no support for the flag -pc_use_amat.
1277 
1278    [3] Coarse DM setup
1279    If a DM (dmfine) is attached to the PC, dmfine is queried for a "coarse" DM (call this dmcoarse) via DMGetCoarseDM().
1280    PCTELESCOPE will interpret the coarse DM as being defined on a sub-communicator of c.
1281    The communicator associated with dmcoarse will define the c' to be used within PCTELESCOPE.
1282    PCTELESCOPE will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1283    The intention of this setup type is that PCTELESCOPE will use an existing (e.g. user defined) communicator hierarchy, say as would be
1284    available with using multi-grid on unstructured meshes.
1285    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1286    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1287    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to MatNullSpaceSetFunction()).
1288    There is no general method to permute field orderings, hence only KSPSetComputeOperators() is supported.
1289    The user must use PetscObjectComposeFunction() with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1290    Propogation of the user context for KSPSetComputeOperators() on the sub KSP is attempted by querying the DM contexts associated with dmfine and dmcoarse. Alternatively, the user may use PetscObjectComposeFunction() with dmcoarse to define a method which will return the appropriate user context for KSPSetComputeOperators().
1291    Currently there is no support for the flag -pc_use_amat.
1292    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
1293    Further information about the user-provided methods required by this setup type are described here PCTelescopeSetUseCoarseDM().
1294 
1295    Developer Notes:
1296    During PCSetup, the B operator is scattered onto c'.
1297    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1298    Then, KSPSolve() is executed on the c' communicator.
1299 
1300    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
1301    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
1302 
1303    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1304    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1305    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1306 
1307    The telescoping preconditioner can re-partition an attached DM if it is a DMDA (2D or 3D -
1308    support for 1D DMDAs is not provided). If a DMDA is found, a topologically equivalent DMDA is created on c'
1309    and this new DM is attached the sub KSP. The design of telescope is such that it should be possible to extend support
1310    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
1311    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.
1312 
1313    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.
1314 
1315    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1316    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
1317    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
1318 
1319    Limitations/improvements include the following.
1320    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
1321    A unified mechanism to query for user contexts as required by KSPSetComputeOperators() and MatNullSpaceSetFunction().
1322 
1323    The symmetric permutation used when a DMDA is encountered is performed via explicitly assmbleming a permutation matrix P,
1324    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
1325    VecPermute() does not supported for the use case required here. By computing P, one can permute both the operator and RHS in a
1326    consistent manner.
1327 
1328    Mapping of vectors (default setup mode) is performed in the following way.
1329    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1330    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1331    We perform the scatter to the sub-communicator in the following way.
1332    [1] Given a vector x defined on communicator c
1333 
1334 .vb
1335    rank(c)  local values of x
1336    ------- ----------------------------------------
1337         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1338         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1339         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1340         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1341 .ve
1342 
1343    scatter into xtmp defined also on comm c, so that we have the following values
1344 
1345 .vb
1346    rank(c)  local values of xtmp
1347    ------- ----------------------------------------------------------------------------
1348         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1349         1   [ ]
1350         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1351         3   [ ]
1352 .ve
1353 
1354    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1355 
1356    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1357    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1358 
1359 .vb
1360    rank(c')  local values of xred
1361    -------- ----------------------------------------------------------------------------
1362          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1363          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1364 .ve
1365 
1366   Contributed by Dave May
1367 
1368   Reference:
1369   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1370 
1371 .seealso:  PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
1372 M*/
1373 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1374 {
1375   PetscErrorCode       ierr;
1376   struct _PC_Telescope *sred;
1377 
1378   PetscFunctionBegin;
1379   ierr = PetscNewLog(pc,&sred);CHKERRQ(ierr);
1380   sred->psubcomm       = NULL;
1381   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1382   sred->subcomm        = MPI_COMM_NULL;
1383   sred->redfactor      = 1;
1384   sred->ignore_dm      = PETSC_FALSE;
1385   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1386   sred->use_coarse_dm  = PETSC_FALSE;
1387   pc->data             = (void*)sred;
1388 
1389   pc->ops->apply           = PCApply_Telescope;
1390   pc->ops->applytranspose  = NULL;
1391   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1392   pc->ops->setup           = PCSetUp_Telescope;
1393   pc->ops->destroy         = PCDestroy_Telescope;
1394   pc->ops->reset           = PCReset_Telescope;
1395   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1396   pc->ops->view            = PCView_Telescope;
1397 
1398   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1399   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1400   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1401   sred->pctelescope_reset_type              = NULL;
1402 
1403   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);CHKERRQ(ierr);
1404   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope);CHKERRQ(ierr);
1405   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope);CHKERRQ(ierr);
1406   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);CHKERRQ(ierr);
1407   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);CHKERRQ(ierr);
1408   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);CHKERRQ(ierr);
1409   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);CHKERRQ(ierr);
1410   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1411   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1412   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);CHKERRQ(ierr);
1413   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetUseCoarseDM_C",PCTelescopeGetUseCoarseDM_Telescope);CHKERRQ(ierr);
1414   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetUseCoarseDM_C",PCTelescopeSetUseCoarseDM_Telescope);CHKERRQ(ierr);
1415   PetscFunctionReturn(0);
1416 }
1417