xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision 94ef8dde638caef1d0cd84a7dc8a2db65fcda8b6)
1 
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/pcimpl.h>
4 #include <petscksp.h> /*I "petscksp.h" I*/
5 #include <petscdm.h> /*I "petscdm.h" I*/
6 #include "../src/ksp/pc/impls/telescope/telescope.h"
7 
8 static PetscBool  cited = PETSC_FALSE;
9 static const char citation[] =
10 "@inproceedings{MaySananRuppKnepleySmith2016,\n"
11 "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
12 "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
13 "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
14 "  series    = {PASC '16},\n"
15 "  isbn      = {978-1-4503-4126-4},\n"
16 "  location  = {Lausanne, Switzerland},\n"
17 "  pages     = {5:1--5:12},\n"
18 "  articleno = {5},\n"
19 "  numpages  = {12},\n"
20 "  url       = {http://doi.acm.org/10.1145/2929908.2929913},\n"
21 "  doi       = {10.1145/2929908.2929913},\n"
22 "  acmid     = {2929913},\n"
23 "  publisher = {ACM},\n"
24 "  address   = {New York, NY, USA},\n"
25 "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
26 "  year      = {2016}\n"
27 "}\n";
28 
29 /*
30  PCTelescopeSetUp_default()
31  PCTelescopeMatCreate_default()
32 
33  default
34 
35  // scatter in
36  x(comm) -> xtmp(comm)
37 
38  xred(subcomm) <- xtmp
39  yred(subcomm)
40 
41  yred(subcomm) --> xtmp
42 
43  // scatter out
44  xtmp(comm) -> y(comm)
45 */
46 
47 PetscBool isActiveRank(PetscSubcomm scomm)
48 {
49   if (scomm->color == 0) { return PETSC_TRUE; }
50   else { return PETSC_FALSE; }
51 }
52 
53 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
54 {
55   DM subdm = NULL;
56 
57   if (!isActiveRank(sred->psubcomm)) { subdm = NULL; }
58   else {
59     switch (sred->sr_type) {
60     case TELESCOPE_DEFAULT: subdm = NULL;
61       break;
62     case TELESCOPE_DMDA:    subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
63       break;
64     case TELESCOPE_DMPLEX:  subdm = NULL;
65       break;
66     }
67   }
68   return(subdm);
69 }
70 
71 PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
72 {
73   PetscErrorCode ierr;
74   PetscInt       m,M,bs,st,ed;
75   Vec            x,xred,yred,xtmp;
76   Mat            B;
77   MPI_Comm       comm,subcomm;
78   VecScatter     scatter;
79   IS             isin;
80 
81   PetscFunctionBegin;
82   ierr = PetscInfo(pc,"PCTelescope: setup (default)\n");CHKERRQ(ierr);
83   comm = PetscSubcommParent(sred->psubcomm);
84   subcomm = PetscSubcommChild(sred->psubcomm);
85 
86   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
87   ierr = MatGetSize(B,&M,NULL);CHKERRQ(ierr);
88   ierr = MatGetBlockSize(B,&bs);CHKERRQ(ierr);
89   ierr = MatCreateVecs(B,&x,NULL);CHKERRQ(ierr);
90 
91   xred = NULL;
92   m    = 0;
93   if (isActiveRank(sred->psubcomm)) {
94     ierr = VecCreate(subcomm,&xred);CHKERRQ(ierr);
95     ierr = VecSetSizes(xred,PETSC_DECIDE,M);CHKERRQ(ierr);
96     ierr = VecSetBlockSize(xred,bs);CHKERRQ(ierr);
97     ierr = VecSetFromOptions(xred);CHKERRQ(ierr);
98     ierr = VecGetLocalSize(xred,&m);CHKERRQ(ierr);
99   }
100 
101   yred = NULL;
102   if (isActiveRank(sred->psubcomm)) {
103     ierr = VecDuplicate(xred,&yred);CHKERRQ(ierr);
104   }
105 
106   ierr = VecCreate(comm,&xtmp);CHKERRQ(ierr);
107   ierr = VecSetSizes(xtmp,m,PETSC_DECIDE);CHKERRQ(ierr);
108   ierr = VecSetBlockSize(xtmp,bs);CHKERRQ(ierr);
109   ierr = VecSetType(xtmp,((PetscObject)x)->type_name);CHKERRQ(ierr);
110 
111   if (isActiveRank(sred->psubcomm)) {
112     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
113     ierr = ISCreateStride(comm,(ed-st),st,1,&isin);CHKERRQ(ierr);
114   } else {
115     ierr = VecGetOwnershipRange(x,&st,&ed);CHKERRQ(ierr);
116     ierr = ISCreateStride(comm,0,st,1,&isin);CHKERRQ(ierr);
117   }
118   ierr = ISSetBlockSize(isin,bs);CHKERRQ(ierr);
119 
120   ierr = VecScatterCreate(x,isin,xtmp,NULL,&scatter);CHKERRQ(ierr);
121 
122   sred->isin    = isin;
123   sred->scatter = scatter;
124   sred->xred    = xred;
125   sred->yred    = yred;
126   sred->xtmp    = xtmp;
127   ierr = VecDestroy(&x);CHKERRQ(ierr);
128   PetscFunctionReturn(0);
129 }
130 
131 PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
132 {
133   PetscErrorCode ierr;
134   MPI_Comm       comm,subcomm;
135   Mat            Bred,B;
136   PetscInt       nr,nc;
137   IS             isrow,iscol;
138   Mat            Blocal,*_Blocal;
139 
140   PetscFunctionBegin;
141   ierr = PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");CHKERRQ(ierr);
142   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
143   subcomm = PetscSubcommChild(sred->psubcomm);
144   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
145   ierr = MatGetSize(B,&nr,&nc);CHKERRQ(ierr);
146   isrow = sred->isin;
147   ierr = ISCreateStride(comm,nc,0,1,&iscol);CHKERRQ(ierr);
148   ierr = MatCreateSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);CHKERRQ(ierr);
149   Blocal = *_Blocal;
150   ierr = PetscFree(_Blocal);CHKERRQ(ierr);
151   Bred = NULL;
152   if (isActiveRank(sred->psubcomm)) {
153     PetscInt mm;
154 
155     if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
156 
157     ierr = MatGetSize(Blocal,&mm,NULL);CHKERRQ(ierr);
158     ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);CHKERRQ(ierr);
159   }
160   *A = Bred;
161   ierr = ISDestroy(&iscol);CHKERRQ(ierr);
162   ierr = MatDestroy(&Blocal);CHKERRQ(ierr);
163   PetscFunctionReturn(0);
164 }
165 
166 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
167 {
168   PetscErrorCode   ierr;
169   PetscBool        has_const;
170   const Vec        *vecs;
171   Vec              *sub_vecs = NULL;
172   PetscInt         i,k,n = 0;
173   MPI_Comm         subcomm;
174 
175   PetscFunctionBegin;
176   subcomm = PetscSubcommChild(sred->psubcomm);
177   ierr = MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);CHKERRQ(ierr);
178 
179   if (isActiveRank(sred->psubcomm)) {
180     if (n) {
181       ierr = VecDuplicateVecs(sred->xred,n,&sub_vecs);CHKERRQ(ierr);
182     }
183   }
184 
185   /* copy entries */
186   for (k=0; k<n; k++) {
187     const PetscScalar *x_array;
188     PetscScalar       *LA_sub_vec;
189     PetscInt          st,ed;
190 
191     /* pull in vector x->xtmp */
192     ierr = VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
193     ierr = VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
194     if (sub_vecs) {
195       /* copy vector entries into xred */
196       ierr = VecGetArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
197       if (sub_vecs[k]) {
198         ierr = VecGetOwnershipRange(sub_vecs[k],&st,&ed);CHKERRQ(ierr);
199         ierr = VecGetArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
200         for (i=0; i<ed-st; i++) {
201           LA_sub_vec[i] = x_array[i];
202         }
203         ierr = VecRestoreArray(sub_vecs[k],&LA_sub_vec);CHKERRQ(ierr);
204       }
205       ierr = VecRestoreArrayRead(sred->xtmp,&x_array);CHKERRQ(ierr);
206     }
207   }
208 
209   if (isActiveRank(sred->psubcomm)) {
210     /* create new (near) nullspace for redundant object */
211     ierr = MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);CHKERRQ(ierr);
212     ierr = VecDestroyVecs(n,&sub_vecs);CHKERRQ(ierr);
213     if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
214     if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
215   }
216   PetscFunctionReturn(0);
217 }
218 
219 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
220 {
221   PetscErrorCode   ierr;
222   Mat              B;
223 
224   PetscFunctionBegin;
225   ierr = PCGetOperators(pc,NULL,&B);CHKERRQ(ierr);
226 
227   /* Propagate the nullspace if it exists */
228   {
229     MatNullSpace nullspace,sub_nullspace;
230     ierr = MatGetNullSpace(B,&nullspace);CHKERRQ(ierr);
231     if (nullspace) {
232       ierr = PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");CHKERRQ(ierr);
233       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nullspace,&sub_nullspace);CHKERRQ(ierr);
234       if (isActiveRank(sred->psubcomm)) {
235         ierr = MatSetNullSpace(sub_mat,sub_nullspace);CHKERRQ(ierr);
236         ierr = MatNullSpaceDestroy(&sub_nullspace);CHKERRQ(ierr);
237       }
238     }
239   }
240 
241   /* Propagate the near nullspace if it exists */
242   {
243     MatNullSpace nearnullspace,sub_nearnullspace;
244     ierr = MatGetNearNullSpace(B,&nearnullspace);CHKERRQ(ierr);
245     if (nearnullspace) {
246       ierr = PetscInfo(pc,"PCTelescope: generating near nullspace (default)\n");CHKERRQ(ierr);
247       ierr = PCTelescopeSubNullSpaceCreate_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);CHKERRQ(ierr);
248       if (isActiveRank(sred->psubcomm)) {
249         ierr = MatSetNearNullSpace(sub_mat,sub_nearnullspace);CHKERRQ(ierr);
250         ierr = MatNullSpaceDestroy(&sub_nearnullspace);CHKERRQ(ierr);
251       }
252     }
253   }
254   PetscFunctionReturn(0);
255 }
256 
257 static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
258 {
259   PC_Telescope     sred = (PC_Telescope)pc->data;
260   PetscErrorCode   ierr;
261   PetscBool        iascii,isstring;
262   PetscViewer      subviewer;
263 
264   PetscFunctionBegin;
265   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
266   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
267   if (iascii) {
268     if (!sred->psubcomm) {
269       ierr = PetscViewerASCIIPrintf(viewer,"  Telescope: preconditioner not yet setup\n");CHKERRQ(ierr);
270     } else {
271       MPI_Comm    comm,subcomm;
272       PetscMPIInt comm_size,subcomm_size;
273       DM          dm,subdm;
274 
275       ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
276       subdm = private_PCTelescopeGetSubDM(sred);
277       comm = PetscSubcommParent(sred->psubcomm);
278       subcomm = PetscSubcommChild(sred->psubcomm);
279       ierr = MPI_Comm_size(comm,&comm_size);CHKERRQ(ierr);
280       ierr = MPI_Comm_size(subcomm,&subcomm_size);CHKERRQ(ierr);
281 
282       ierr = PetscViewerASCIIPrintf(viewer,"  Telescope: parent comm size reduction factor = %D\n",sred->redfactor);CHKERRQ(ierr);
283       ierr = PetscViewerASCIIPrintf(viewer,"  Telescope: comm_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);CHKERRQ(ierr);
284       switch (sred->subcommtype) {
285         case PETSC_SUBCOMM_INTERLACED :
286           ierr = PetscViewerASCIIPrintf(viewer,"  Telescope: subcomm type: interlaced\n",sred->subcommtype);CHKERRQ(ierr);
287           break;
288         case PETSC_SUBCOMM_CONTIGUOUS :
289           ierr = PetscViewerASCIIPrintf(viewer,"  Telescope: subcomm type: contiguous\n",sred->subcommtype);CHKERRQ(ierr);
290           break;
291         default :
292           SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"General subcomm type not supported by PCTelescope");
293       }
294       ierr = PetscViewerGetSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
295       if (isActiveRank(sred->psubcomm)) {
296         ierr = PetscViewerASCIIPushTab(subviewer);CHKERRQ(ierr);
297 
298         if (dm && sred->ignore_dm) {
299           ierr = PetscViewerASCIIPrintf(subviewer,"  Telescope: ignoring DM\n");CHKERRQ(ierr);
300         }
301         if (sred->ignore_kspcomputeoperators) {
302           ierr = PetscViewerASCIIPrintf(subviewer,"  Telescope: ignoring KSPComputeOperators\n");CHKERRQ(ierr);
303         }
304         switch (sred->sr_type) {
305         case TELESCOPE_DEFAULT:
306           ierr = PetscViewerASCIIPrintf(subviewer,"  Telescope: using default setup\n");CHKERRQ(ierr);
307           break;
308         case TELESCOPE_DMDA:
309           ierr = PetscViewerASCIIPrintf(subviewer,"  Telescope: DMDA detected\n");CHKERRQ(ierr);
310           ierr = DMView_DMDAShort(subdm,subviewer);CHKERRQ(ierr);
311           break;
312         case TELESCOPE_DMPLEX:
313           ierr = PetscViewerASCIIPrintf(subviewer,"  Telescope: DMPLEX detected\n");CHKERRQ(ierr);
314           break;
315         }
316 
317         ierr = KSPView(sred->ksp,subviewer);CHKERRQ(ierr);
318         ierr = PetscViewerASCIIPopTab(subviewer);CHKERRQ(ierr);
319       }
320       ierr = PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);CHKERRQ(ierr);
321     }
322   }
323   PetscFunctionReturn(0);
324 }
325 
326 static PetscErrorCode PCSetUp_Telescope(PC pc)
327 {
328   PC_Telescope      sred = (PC_Telescope)pc->data;
329   PetscErrorCode    ierr;
330   MPI_Comm          comm,subcomm=0;
331   PCTelescopeType   sr_type;
332 
333   PetscFunctionBegin;
334   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
335 
336   /* subcomm definition */
337   if (!pc->setupcalled) {
338     if (!sred->psubcomm) {
339       ierr = PetscSubcommCreate(comm,&sred->psubcomm);CHKERRQ(ierr);
340       ierr = PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);CHKERRQ(ierr);
341       ierr = PetscSubcommSetType(sred->psubcomm,sred->subcommtype);CHKERRQ(ierr);
342       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
343     }
344   }
345   subcomm = PetscSubcommChild(sred->psubcomm);
346 
347   /* internal KSP */
348   if (!pc->setupcalled) {
349     const char *prefix;
350 
351     if (isActiveRank(sred->psubcomm)) {
352       ierr = KSPCreate(subcomm,&sred->ksp);CHKERRQ(ierr);
353       ierr = KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);CHKERRQ(ierr);
354       ierr = PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
355       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);CHKERRQ(ierr);
356       ierr = KSPSetType(sred->ksp,KSPPREONLY);CHKERRQ(ierr);
357       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
358       ierr = KSPSetOptionsPrefix(sred->ksp,prefix);CHKERRQ(ierr);
359       ierr = KSPAppendOptionsPrefix(sred->ksp,"telescope_");CHKERRQ(ierr);
360     }
361   }
362   /* Determine type of setup/update */
363   if (!pc->setupcalled) {
364     PetscBool has_dm,same;
365     DM        dm;
366 
367     sr_type = TELESCOPE_DEFAULT;
368     has_dm = PETSC_FALSE;
369     ierr = PCGetDM(pc,&dm);CHKERRQ(ierr);
370     if (dm) { has_dm = PETSC_TRUE; }
371     if (has_dm) {
372       /* check for dmda */
373       ierr = PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);CHKERRQ(ierr);
374       if (same) {
375         ierr = PetscInfo(pc,"PCTelescope: found DMDA\n");CHKERRQ(ierr);
376         sr_type = TELESCOPE_DMDA;
377       }
378       /* check for dmplex */
379       ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);CHKERRQ(ierr);
380       if (same) {
381         PetscInfo(pc,"PCTelescope: found DMPLEX\n");
382         sr_type = TELESCOPE_DMPLEX;
383       }
384     }
385 
386     if (sred->ignore_dm) {
387       PetscInfo(pc,"PCTelescope: ignore DM\n");
388       sr_type = TELESCOPE_DEFAULT;
389     }
390     sred->sr_type = sr_type;
391   } else {
392     sr_type = sred->sr_type;
393   }
394 
395   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
396   switch (sr_type) {
397   case TELESCOPE_DEFAULT:
398     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
399     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
400     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
401     sred->pctelescope_reset_type              = NULL;
402     break;
403   case TELESCOPE_DMDA:
404     pc->ops->apply                            = PCApply_Telescope_dmda;
405     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
406     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
407     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
408     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
409     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
410     break;
411   case TELESCOPE_DMPLEX: SETERRQ(comm,PETSC_ERR_SUP,"Support for DMPLEX is currently not available");
412     break;
413   default: SETERRQ(comm,PETSC_ERR_SUP,"Only support for repartitioning DMDA is provided");
414     break;
415   }
416 
417   /* setup */
418   if (sred->pctelescope_setup_type) {
419     ierr = sred->pctelescope_setup_type(pc,sred);CHKERRQ(ierr);
420   }
421   /* update */
422   if (!pc->setupcalled) {
423     if (sred->pctelescope_matcreate_type) {
424       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);CHKERRQ(ierr);
425     }
426     if (sred->pctelescope_matnullspacecreate_type) {
427       ierr = sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);CHKERRQ(ierr);
428     }
429   } else {
430     if (sred->pctelescope_matcreate_type) {
431       ierr = sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);CHKERRQ(ierr);
432     }
433   }
434 
435   /* common - no construction */
436   if (isActiveRank(sred->psubcomm)) {
437     ierr = KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);CHKERRQ(ierr);
438     if (pc->setfromoptionscalled && !pc->setupcalled){
439       ierr = KSPSetFromOptions(sred->ksp);CHKERRQ(ierr);
440     }
441   }
442   PetscFunctionReturn(0);
443 }
444 
445 static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
446 {
447   PC_Telescope      sred = (PC_Telescope)pc->data;
448   PetscErrorCode    ierr;
449   Vec               xtmp,xred,yred;
450   PetscInt          i,st,ed;
451   VecScatter        scatter;
452   PetscScalar       *array;
453   const PetscScalar *x_array;
454 
455   PetscFunctionBegin;
456   ierr = PetscCitationsRegister(citation,&cited);CHKERRQ(ierr);
457 
458   xtmp    = sred->xtmp;
459   scatter = sred->scatter;
460   xred    = sred->xred;
461   yred    = sred->yred;
462 
463   /* pull in vector x->xtmp */
464   ierr = VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
465   ierr = VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
466 
467   /* copy vector entries into xred */
468   ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
469   if (xred) {
470     PetscScalar *LA_xred;
471     ierr = VecGetOwnershipRange(xred,&st,&ed);CHKERRQ(ierr);
472     ierr = VecGetArray(xred,&LA_xred);CHKERRQ(ierr);
473     for (i=0; i<ed-st; i++) {
474       LA_xred[i] = x_array[i];
475     }
476     ierr = VecRestoreArray(xred,&LA_xred);CHKERRQ(ierr);
477   }
478   ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
479   /* solve */
480   if (isActiveRank(sred->psubcomm)) {
481     ierr = KSPSolve(sred->ksp,xred,yred);CHKERRQ(ierr);
482   }
483   /* return vector */
484   ierr = VecGetArray(xtmp,&array);CHKERRQ(ierr);
485   if (yred) {
486     const PetscScalar *LA_yred;
487     ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
488     ierr = VecGetArrayRead(yred,&LA_yred);CHKERRQ(ierr);
489     for (i=0; i<ed-st; i++) {
490       array[i] = LA_yred[i];
491     }
492     ierr = VecRestoreArrayRead(yred,&LA_yred);CHKERRQ(ierr);
493   }
494   ierr = VecRestoreArray(xtmp,&array);CHKERRQ(ierr);
495   ierr = VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
496   ierr = VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
497   PetscFunctionReturn(0);
498 }
499 
500 static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
501 {
502   PC_Telescope      sred = (PC_Telescope)pc->data;
503   PetscErrorCode    ierr;
504   Vec               xtmp,yred;
505   PetscInt          i,st,ed;
506   VecScatter        scatter;
507   const PetscScalar *x_array;
508   PetscBool         default_init_guess_value;
509 
510   PetscFunctionBegin;
511   xtmp    = sred->xtmp;
512   scatter = sred->scatter;
513   yred    = sred->yred;
514 
515   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
516   *reason = (PCRichardsonConvergedReason)0;
517 
518   if (!zeroguess) {
519     ierr = PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");CHKERRQ(ierr);
520     /* pull in vector y->xtmp */
521     ierr = VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
522     ierr = VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
523 
524     /* copy vector entries into xred */
525     ierr = VecGetArrayRead(xtmp,&x_array);CHKERRQ(ierr);
526     if (yred) {
527       PetscScalar *LA_yred;
528       ierr = VecGetOwnershipRange(yred,&st,&ed);CHKERRQ(ierr);
529       ierr = VecGetArray(yred,&LA_yred);CHKERRQ(ierr);
530       for (i=0; i<ed-st; i++) {
531         LA_yred[i] = x_array[i];
532       }
533       ierr = VecRestoreArray(yred,&LA_yred);CHKERRQ(ierr);
534     }
535     ierr = VecRestoreArrayRead(xtmp,&x_array);CHKERRQ(ierr);
536   }
537 
538   if (isActiveRank(sred->psubcomm)) {
539     ierr = KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);CHKERRQ(ierr);
540     if (!zeroguess) ierr = KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);CHKERRQ(ierr);
541   }
542 
543   ierr = PCApply_Telescope(pc,x,y);CHKERRQ(ierr);
544 
545   if (isActiveRank(sred->psubcomm)) {
546     ierr = KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);CHKERRQ(ierr);
547   }
548 
549   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
550   *outits = 1;
551   PetscFunctionReturn(0);
552 }
553 
554 static PetscErrorCode PCReset_Telescope(PC pc)
555 {
556   PC_Telescope   sred = (PC_Telescope)pc->data;
557   PetscErrorCode ierr;
558 
559   ierr = ISDestroy(&sred->isin);CHKERRQ(ierr);
560   ierr = VecScatterDestroy(&sred->scatter);CHKERRQ(ierr);
561   ierr = VecDestroy(&sred->xred);CHKERRQ(ierr);
562   ierr = VecDestroy(&sred->yred);CHKERRQ(ierr);
563   ierr = VecDestroy(&sred->xtmp);CHKERRQ(ierr);
564   ierr = MatDestroy(&sred->Bred);CHKERRQ(ierr);
565   ierr = KSPReset(sred->ksp);CHKERRQ(ierr);
566   if (sred->pctelescope_reset_type) {
567     ierr = sred->pctelescope_reset_type(pc);CHKERRQ(ierr);
568   }
569   PetscFunctionReturn(0);
570 }
571 
572 static PetscErrorCode PCDestroy_Telescope(PC pc)
573 {
574   PC_Telescope     sred = (PC_Telescope)pc->data;
575   PetscErrorCode   ierr;
576 
577   PetscFunctionBegin;
578   ierr = PCReset_Telescope(pc);CHKERRQ(ierr);
579   ierr = KSPDestroy(&sred->ksp);CHKERRQ(ierr);
580   ierr = PetscSubcommDestroy(&sred->psubcomm);CHKERRQ(ierr);
581   ierr = PetscFree(sred->dm_ctx);CHKERRQ(ierr);
582   ierr = PetscFree(pc->data);CHKERRQ(ierr);
583   PetscFunctionReturn(0);
584 }
585 
586 static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
587 {
588   PC_Telescope     sred = (PC_Telescope)pc->data;
589   PetscErrorCode   ierr;
590   MPI_Comm         comm;
591   PetscMPIInt      size;
592   PetscBool        flg;
593   PetscSubcommType subcommtype;
594 
595   PetscFunctionBegin;
596   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
597   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
598   ierr = PetscOptionsHead(PetscOptionsObject,"Telescope options");CHKERRQ(ierr);
599   ierr = PetscOptionsEnum("-pc_telescope_subcomm_type","Subcomm type (interlaced or contiguous)","PCTelescopeSetSubcommType",PetscSubcommTypes,(PetscEnum)sred->subcommtype,(PetscEnum*)&subcommtype,&flg);CHKERRQ(ierr);
600   if (flg) {
601     ierr = PCTelescopeSetSubcommType(pc,subcommtype);CHKERRQ(ierr);
602   }
603   ierr = PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,0);CHKERRQ(ierr);
604   if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
605   ierr = PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,0);CHKERRQ(ierr);
606   ierr = PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,0);CHKERRQ(ierr);
607   ierr = PetscOptionsTail();CHKERRQ(ierr);
608   PetscFunctionReturn(0);
609 }
610 
611 /* PC simplementation specific API's */
612 
613 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
614 {
615   PC_Telescope red = (PC_Telescope)pc->data;
616   PetscFunctionBegin;
617   if (ksp) *ksp = red->ksp;
618   PetscFunctionReturn(0);
619 }
620 
621 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType *subcommtype)
622 {
623   PC_Telescope red = (PC_Telescope)pc->data;
624   PetscFunctionBegin;
625   if (subcommtype) *subcommtype = red->subcommtype;
626   PetscFunctionReturn(0);
627 }
628 
629 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)
630 {
631   PC_Telescope     red = (PC_Telescope)pc->data;
632 
633   PetscFunctionBegin;
634   if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"You cannot change the subcommunicator type for PCTelescope after it has been set up.");
635   red->subcommtype = subcommtype;
636   PetscFunctionReturn(0);
637 }
638 
639 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
640 {
641   PC_Telescope red = (PC_Telescope)pc->data;
642   PetscFunctionBegin;
643   if (fact) *fact = red->redfactor;
644   PetscFunctionReturn(0);
645 }
646 
647 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
648 {
649   PC_Telescope     red = (PC_Telescope)pc->data;
650   PetscMPIInt      size;
651   PetscErrorCode   ierr;
652 
653   PetscFunctionBegin;
654   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
655   if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
656   if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
657   red->redfactor = fact;
658   PetscFunctionReturn(0);
659 }
660 
661 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
662 {
663   PC_Telescope red = (PC_Telescope)pc->data;
664   PetscFunctionBegin;
665   if (v) *v = red->ignore_dm;
666   PetscFunctionReturn(0);
667 }
668 
669 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
670 {
671   PC_Telescope red = (PC_Telescope)pc->data;
672   PetscFunctionBegin;
673   red->ignore_dm = v;
674   PetscFunctionReturn(0);
675 }
676 
677 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
678 {
679   PC_Telescope red = (PC_Telescope)pc->data;
680   PetscFunctionBegin;
681   if (v) *v = red->ignore_kspcomputeoperators;
682   PetscFunctionReturn(0);
683 }
684 
685 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
686 {
687   PC_Telescope red = (PC_Telescope)pc->data;
688   PetscFunctionBegin;
689   red->ignore_kspcomputeoperators = v;
690   PetscFunctionReturn(0);
691 }
692 
693 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
694 {
695   PC_Telescope red = (PC_Telescope)pc->data;
696   PetscFunctionBegin;
697   *dm = private_PCTelescopeGetSubDM(red);
698   PetscFunctionReturn(0);
699 }
700 
701 /*@
702  PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
703 
704  Not Collective
705 
706  Input Parameter:
707 .  pc - the preconditioner context
708 
709  Output Parameter:
710 .  subksp - the KSP defined the smaller set of processes
711 
712  Level: advanced
713 
714 .keywords: PC, telescopting solve
715 @*/
716 PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
717 {
718   PetscErrorCode ierr;
719   PetscFunctionBegin;
720   ierr = PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));CHKERRQ(ierr);
721   PetscFunctionReturn(0);
722 }
723 
724 /*@
725  PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
726 
727  Not Collective
728 
729  Input Parameter:
730 .  pc - the preconditioner context
731 
732  Output Parameter:
733 .  fact - the reduction factor
734 
735  Level: advanced
736 
737 .keywords: PC, telescoping solve
738 @*/
739 PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
740 {
741   PetscErrorCode ierr;
742   PetscFunctionBegin;
743   ierr = PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));CHKERRQ(ierr);
744   PetscFunctionReturn(0);
745 }
746 
747 /*@
748  PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
749 
750  Not Collective
751 
752  Input Parameter:
753 .  pc - the preconditioner context
754 
755  Output Parameter:
756 .  fact - the reduction factor
757 
758  Level: advanced
759 
760 .keywords: PC, telescoping solve
761 @*/
762 PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
763 {
764   PetscErrorCode ierr;
765   PetscFunctionBegin;
766   ierr = PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));CHKERRQ(ierr);
767   PetscFunctionReturn(0);
768 }
769 
770 /*@
771  PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
772 
773  Not Collective
774 
775  Input Parameter:
776 .  pc - the preconditioner context
777 
778  Output Parameter:
779 .  v - the flag
780 
781  Level: advanced
782 
783 .keywords: PC, telescoping solve
784 @*/
785 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
786 {
787   PetscErrorCode ierr;
788   PetscFunctionBegin;
789   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
790   PetscFunctionReturn(0);
791 }
792 
793 /*@
794  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
795 
796  Not Collective
797 
798  Input Parameter:
799 .  pc - the preconditioner context
800 
801  Output Parameter:
802 .  v - Use PETSC_TRUE to ignore any DM
803 
804  Level: advanced
805 
806 .keywords: PC, telescoping solve
807 @*/
808 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
809 {
810   PetscErrorCode ierr;
811   PetscFunctionBegin;
812   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
813   PetscFunctionReturn(0);
814 }
815 
816 /*@
817  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
818 
819  Not Collective
820 
821  Input Parameter:
822 .  pc - the preconditioner context
823 
824  Output Parameter:
825 .  v - the flag
826 
827  Level: advanced
828 
829 .keywords: PC, telescoping solve
830 @*/
831 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
832 {
833   PetscErrorCode ierr;
834   PetscFunctionBegin;
835   ierr = PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));CHKERRQ(ierr);
836   PetscFunctionReturn(0);
837 }
838 
839 /*@
840  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
841 
842  Not Collective
843 
844  Input Parameter:
845 .  pc - the preconditioner context
846 
847  Output Parameter:
848 .  v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
849 
850  Level: advanced
851 
852 .keywords: PC, telescoping solve
853 @*/
854 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
855 {
856   PetscErrorCode ierr;
857   PetscFunctionBegin;
858   ierr = PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));CHKERRQ(ierr);
859   PetscFunctionReturn(0);
860 }
861 
862 /*@
863  PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
864 
865  Not Collective
866 
867  Input Parameter:
868 .  pc - the preconditioner context
869 
870  Output Parameter:
871 .  subdm - The re-partitioned DM
872 
873  Level: advanced
874 
875 .keywords: PC, telescoping solve
876 @*/
877 PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
878 {
879   PetscErrorCode ierr;
880   PetscFunctionBegin;
881   ierr = PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));CHKERRQ(ierr);
882   PetscFunctionReturn(0);
883 }
884 
885 /*@
886  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
887 
888  Logically Collective
889 
890  Input Parameter:
891 +  pc - the preconditioner context
892 -  subcommtype - the subcommunicator type (see PetscSubcommType)
893 
894  Level: advanced
895 
896 .keywords: PC, telescoping solve
897 
898 .seealso: PetscSubcommType, PetscSubcomm, PCTELESCOPE
899 @*/
900 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
901 {
902   PetscErrorCode ierr;
903   PetscFunctionBegin;
904   ierr = PetscTryMethod(pc,"PCTelescopeSetSubcommType_C",(PC,PetscSubcommType),(pc,subcommtype));CHKERRQ(ierr);
905   PetscFunctionReturn(0);
906 }
907 
908 /*@
909  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)
910 
911  Not Collective
912 
913  Input Parameter:
914 .  pc - the preconditioner context
915 
916  Output Parameter:
917 .  subcommtype - the subcommunicator type (see PetscSubcommType)
918 
919  Level: advanced
920 
921 .keywords: PC, telescoping solve
922 
923 .seealso: PetscSubcomm, PetscSubcommType, PCTELESCOPE
924 @*/
925 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
926 {
927   PetscErrorCode ierr;
928   PetscFunctionBegin;
929   ierr = PetscUseMethod(pc,"PCTelescopeGetSubcommType_C",(PC,PetscSubcommType*),(pc,subcommtype));CHKERRQ(ierr);
930   PetscFunctionReturn(0);
931 }
932 
933 /* -------------------------------------------------------------------------------------*/
934 /*MC
935    PCTELESCOPE - Runs a KSP solver on a sub-group of processors. MPI processes not in the sub-communicator are idle during the solve.
936 
937    Options Database:
938 +  -pc_telescope_reduction_factor <r> - factor to use communicator size by. e.g. with 64 MPI processes and r=4, the new sub-communicator will have 64/4 = 16 ranks.
939 -  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored
940 -  -pc_telescope_subcomm_type <interlaced,contiguous> - how to define the reduced communicator. see PetscSubcomm for more.
941 
942    Level: advanced
943 
944    Notes:
945    The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
946    sub-communicator, in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
947    This means there will be MPI processes which will be idle during the application of this preconditioner.
948 
949    The default KSP is PREONLY. If a DM is attached to the PC, it is re-partitioned on the sub-communicator.
950    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
951    Currently only support for re-partitioning a DMDA is provided.
952    Any nullspace attached to the original Bmat operator is extracted, re-partitioned and set on the repartitioned Bmat operator.
953    KSPSetComputeOperators() is not propagated to the sub KSP.
954    Currently there is no support for the flag -pc_use_amat
955 
956    Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
957    creates a child sub-communicator (c') containing fewer MPI processes than the original parent preconditioner (PC).
958 
959   Developer Notes:
960    During PCSetup, the B operator is scattered onto c'.
961    Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
962    Then, KSPSolve() is executed on the c' communicator.
963 
964    The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
965    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
966 
967    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
968    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
969    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
970 
971    The telescoping preconditioner is aware of an attached DM. In the event that the DM is of type DMDA (2D or 3D -
972    1D support for 1D DMDAs is not provided), a new DMDA is created on c' (e.g. it is re-partitioned), and this new DM
973    is attached the sub KSPSolve(). The design of telescope is such that it should be possible to extend support
974    for re-partitioning other to DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
975 
976    By default, B' is defined by simply fusing rows from different MPI processes
977 
978    When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
979    into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatCreateSubMatrices(), (iii) fusing the
980    locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
981 
982    Limitations/improvements include the following.
983    VecPlaceArray() could be used within PCApply() to improve efficiency and reduce memory usage.
984 
985    The symmetric permutation used when a DMDA is encountered is performed via explicitly assmbleming a permutation matrix P,
986    and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). We opted to use P^T.A.P as it appears
987    VecPermute() does not supported for the use case required here. By computing P, one can permute both the operator and RHS in a
988    consistent manner.
989 
990    Mapping of vectors is performed in the following way.
991    Suppose the parent comm size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
992    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
993    We perform the scatter to the sub-comm in the following way.
994    [1] Given a vector x defined on comm c
995 
996    rank(c) : _________ 0 ______  ________ 1 _______  ________ 2 _____________ ___________ 3 __________
997          x : [0, 1, 2, 3, 4, 5] [6, 7, 8, 9, 10, 11] [12, 13, 14, 15, 16, 17] [18, 19, 20, 21, 22, 23]
998 
999    scatter to xtmp defined also on comm c so that we have the following values
1000 
1001    rank(c) : ___________________ 0 ________________  _1_  ______________________ 2 _______________________  __3_
1002       xtmp : [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] [  ] [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23] [  ]
1003 
1004    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1005 
1006 
1007    [2] Copy the value from rank 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1008    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1009 
1010     rank(c') : ___________________ 0 _______________  ______________________ 1 _____________________
1011       xred : [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]
1012 
1013 
1014   Contributed by Dave May
1015 
1016   Reference:
1017   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1018 
1019 .seealso:  PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
1020 M*/
1021 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1022 {
1023   PetscErrorCode       ierr;
1024   struct _PC_Telescope *sred;
1025 
1026   PetscFunctionBegin;
1027   ierr = PetscNewLog(pc,&sred);CHKERRQ(ierr);
1028   sred->subcommtype    = PETSC_SUBCOMM_INTERLACED;
1029   sred->redfactor      = 1;
1030   sred->ignore_dm      = PETSC_FALSE;
1031   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1032   pc->data             = (void*)sred;
1033 
1034   pc->ops->apply           = PCApply_Telescope;
1035   pc->ops->applytranspose  = NULL;
1036   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1037   pc->ops->setup           = PCSetUp_Telescope;
1038   pc->ops->destroy         = PCDestroy_Telescope;
1039   pc->ops->reset           = PCReset_Telescope;
1040   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1041   pc->ops->view            = PCView_Telescope;
1042 
1043   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1044   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1045   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1046   sred->pctelescope_reset_type              = NULL;
1047 
1048   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);CHKERRQ(ierr);
1049   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetSubcommType_C",PCTelescopeGetSubcommType_Telescope);CHKERRQ(ierr);
1050   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetSubcommType_C",PCTelescopeSetSubcommType_Telescope);CHKERRQ(ierr);
1051   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);CHKERRQ(ierr);
1052   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);CHKERRQ(ierr);
1053   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);CHKERRQ(ierr);
1054   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);CHKERRQ(ierr);
1055   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1056   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);CHKERRQ(ierr);
1057   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);CHKERRQ(ierr);
1058   PetscFunctionReturn(0);
1059 }
1060