xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision f13dfd9ea68e0ddeee984e65c377a1819eab8a8a)
1 #include <petsc/private/petscimpl.h>
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/pcimpl.h>
4 #include <petscksp.h> /*I "petscksp.h" I*/
5 #include <petscdm.h>  /*I "petscdm.h" I*/
6 #include "../src/ksp/pc/impls/telescope/telescope.h"
7 
8 static PetscBool  cited      = PETSC_FALSE;
9 static const char citation[] = "@inproceedings{MaySananRuppKnepleySmith2016,\n"
10                                "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
11                                "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
12                                "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
13                                "  series    = {PASC '16},\n"
14                                "  isbn      = {978-1-4503-4126-4},\n"
15                                "  location  = {Lausanne, Switzerland},\n"
16                                "  pages     = {5:1--5:12},\n"
17                                "  articleno = {5},\n"
18                                "  numpages  = {12},\n"
19                                "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
20                                "  doi       = {10.1145/2929908.2929913},\n"
21                                "  acmid     = {2929913},\n"
22                                "  publisher = {ACM},\n"
23                                "  address   = {New York, NY, USA},\n"
24                                "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
25                                "  year      = {2016}\n"
26                                "}\n";
27 
28 /*
29  default setup mode
30 
31  [1a] scatter to (FORWARD)
32  x(comm) -> xtmp(comm)
33  [1b] local copy (to) ranks with color = 0
34  xred(subcomm) <- xtmp
35 
36  [2] solve on sub KSP to obtain yred(subcomm)
37 
38  [3a] local copy (from) ranks with color = 0
39  yred(subcomm) --> xtmp
40  [2b] scatter from (REVERSE)
41  xtmp(comm) -> y(comm)
42 */
43 
44 /*
45   Collective[comm_f]
46   Notes
47    * Using comm_f = MPI_COMM_NULL will result in an error
48    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
49    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
50 */
51 static PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f, MPI_Comm comm_c, PetscBool *isvalid)
52 {
53   PetscInt     valid = 1;
54   MPI_Group    group_f, group_c;
55   PetscMPIInt  count, k, size_f = 0, size_c = 0, size_c_sum = 0;
56   PetscMPIInt *ranks_f, *ranks_c;
57 
58   PetscFunctionBegin;
59   PetscCheck(comm_f != MPI_COMM_NULL, PETSC_COMM_SELF, PETSC_ERR_SUP, "comm_f cannot be MPI_COMM_NULL");
60 
61   PetscCallMPI(MPI_Comm_group(comm_f, &group_f));
62   if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_group(comm_c, &group_c));
63 
64   PetscCallMPI(MPI_Comm_size(comm_f, &size_f));
65   if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_size(comm_c, &size_c));
66 
67   /* check not all comm_c's are NULL */
68   size_c_sum = size_c;
69   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &size_c_sum, 1, MPI_INT, MPI_SUM, comm_f));
70   if (size_c_sum == 0) valid = 0;
71 
72   /* check we can map at least 1 rank in comm_c to comm_f */
73   PetscCall(PetscMalloc1(size_f, &ranks_f));
74   PetscCall(PetscMalloc1(size_c, &ranks_c));
75   for (k = 0; k < size_f; k++) ranks_f[k] = MPI_UNDEFINED;
76   for (k = 0; k < size_c; k++) ranks_c[k] = k;
77 
78   /*
79    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
80    I do not want the code to terminate immediately if this occurs, rather I want to throw
81    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
82    that comm_c is not a valid sub-communicator.
83    Hence I purposefully do not call PetscCall() after MPI_Group_translate_ranks().
84   */
85   count = 0;
86   if (comm_c != MPI_COMM_NULL) {
87     (void)MPI_Group_translate_ranks(group_c, size_c, ranks_c, group_f, ranks_f);
88     for (k = 0; k < size_f; k++) {
89       if (ranks_f[k] == MPI_UNDEFINED) count++;
90     }
91   }
92   if (count == size_f) valid = 0;
93 
94   PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &valid, 1, MPIU_INT, MPI_MIN, comm_f));
95   if (valid == 1) *isvalid = PETSC_TRUE;
96   else *isvalid = PETSC_FALSE;
97 
98   PetscCall(PetscFree(ranks_f));
99   PetscCall(PetscFree(ranks_c));
100   PetscCallMPI(MPI_Group_free(&group_f));
101   if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Group_free(&group_c));
102   PetscFunctionReturn(PETSC_SUCCESS);
103 }
104 
105 static DM private_PCTelescopeGetSubDM(PC_Telescope sred)
106 {
107   DM subdm = NULL;
108 
109   if (!PCTelescope_isActiveRank(sred)) {
110     subdm = NULL;
111   } else {
112     switch (sred->sr_type) {
113     case TELESCOPE_DEFAULT:
114       subdm = NULL;
115       break;
116     case TELESCOPE_DMDA:
117       subdm = ((PC_Telescope_DMDACtx *)sred->dm_ctx)->dmrepart;
118       break;
119     case TELESCOPE_DMPLEX:
120       subdm = NULL;
121       break;
122     case TELESCOPE_COARSEDM:
123       if (sred->ksp) PetscCallAbort(PETSC_COMM_SELF, KSPGetDM(sred->ksp, &subdm));
124       break;
125     }
126   }
127   return subdm;
128 }
129 
130 static PetscErrorCode PCTelescopeSetUp_default(PC pc, PC_Telescope sred)
131 {
132   PetscInt   m, M, bs, st, ed;
133   Vec        x, xred, yred, xtmp;
134   Mat        B;
135   MPI_Comm   comm, subcomm;
136   VecScatter scatter;
137   IS         isin;
138   VecType    vectype;
139 
140   PetscFunctionBegin;
141   PetscCall(PetscInfo(pc, "PCTelescope: setup (default)\n"));
142   comm    = PetscSubcommParent(sred->psubcomm);
143   subcomm = PetscSubcommChild(sred->psubcomm);
144 
145   PetscCall(PCGetOperators(pc, NULL, &B));
146   PetscCall(MatGetSize(B, &M, NULL));
147   PetscCall(MatGetBlockSize(B, &bs));
148   PetscCall(MatCreateVecs(B, &x, NULL));
149   PetscCall(MatGetVecType(B, &vectype));
150 
151   xred = NULL;
152   m    = 0;
153   if (PCTelescope_isActiveRank(sred)) {
154     PetscCall(VecCreate(subcomm, &xred));
155     PetscCall(VecSetSizes(xred, PETSC_DECIDE, M));
156     PetscCall(VecSetBlockSize(xred, bs));
157     PetscCall(VecSetType(xred, vectype)); /* Use the preconditioner matrix's vectype by default */
158     PetscCall(VecSetFromOptions(xred));
159     PetscCall(VecGetLocalSize(xred, &m));
160   }
161 
162   yred = NULL;
163   if (PCTelescope_isActiveRank(sred)) PetscCall(VecDuplicate(xred, &yred));
164 
165   PetscCall(VecCreate(comm, &xtmp));
166   PetscCall(VecSetSizes(xtmp, m, PETSC_DECIDE));
167   PetscCall(VecSetBlockSize(xtmp, bs));
168   PetscCall(VecSetType(xtmp, vectype));
169 
170   if (PCTelescope_isActiveRank(sred)) {
171     PetscCall(VecGetOwnershipRange(xred, &st, &ed));
172     PetscCall(ISCreateStride(comm, (ed - st), st, 1, &isin));
173   } else {
174     PetscCall(VecGetOwnershipRange(x, &st, &ed));
175     PetscCall(ISCreateStride(comm, 0, st, 1, &isin));
176   }
177   PetscCall(ISSetBlockSize(isin, bs));
178 
179   PetscCall(VecScatterCreate(x, isin, xtmp, NULL, &scatter));
180 
181   sred->isin    = isin;
182   sred->scatter = scatter;
183   sred->xred    = xred;
184   sred->yred    = yred;
185   sred->xtmp    = xtmp;
186   PetscCall(VecDestroy(&x));
187   PetscFunctionReturn(PETSC_SUCCESS);
188 }
189 
190 static PetscErrorCode PCTelescopeMatCreate_default(PC pc, PC_Telescope sred, MatReuse reuse, Mat *A)
191 {
192   MPI_Comm comm, subcomm;
193   Mat      Bred, B;
194   PetscInt nr, nc, bs;
195   IS       isrow, iscol;
196   Mat      Blocal, *_Blocal;
197 
198   PetscFunctionBegin;
199   PetscCall(PetscInfo(pc, "PCTelescope: updating the redundant preconditioned operator (default)\n"));
200   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
201   subcomm = PetscSubcommChild(sred->psubcomm);
202   PetscCall(PCGetOperators(pc, NULL, &B));
203   PetscCall(MatGetSize(B, &nr, &nc));
204   isrow = sred->isin;
205   PetscCall(ISCreateStride(PETSC_COMM_SELF, nc, 0, 1, &iscol));
206   PetscCall(ISSetIdentity(iscol));
207   PetscCall(MatGetBlockSizes(B, NULL, &bs));
208   PetscCall(ISSetBlockSize(iscol, bs));
209   PetscCall(MatSetOption(B, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
210   PetscCall(MatCreateSubMatrices(B, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &_Blocal));
211   Blocal = *_Blocal;
212   PetscCall(PetscFree(_Blocal));
213   Bred = NULL;
214   if (PCTelescope_isActiveRank(sred)) {
215     PetscInt mm;
216 
217     if (reuse != MAT_INITIAL_MATRIX) Bred = *A;
218 
219     PetscCall(MatGetSize(Blocal, &mm, NULL));
220     PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, Blocal, mm, reuse, &Bred));
221   }
222   *A = Bred;
223   PetscCall(ISDestroy(&iscol));
224   PetscCall(MatDestroy(&Blocal));
225   PetscFunctionReturn(PETSC_SUCCESS);
226 }
227 
228 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc, PC_Telescope sred, MatNullSpace nullspace, MatNullSpace *sub_nullspace)
229 {
230   PetscBool  has_const;
231   const Vec *vecs;
232   Vec       *sub_vecs = NULL;
233   PetscInt   i, k, n = 0;
234   MPI_Comm   subcomm;
235 
236   PetscFunctionBegin;
237   subcomm = PetscSubcommChild(sred->psubcomm);
238   PetscCall(MatNullSpaceGetVecs(nullspace, &has_const, &n, &vecs));
239 
240   if (PCTelescope_isActiveRank(sred)) {
241     if (n) PetscCall(VecDuplicateVecs(sred->xred, n, &sub_vecs));
242   }
243 
244   /* copy entries */
245   for (k = 0; k < n; k++) {
246     const PetscScalar *x_array;
247     PetscScalar       *LA_sub_vec;
248     PetscInt           st, ed;
249 
250     /* pull in vector x->xtmp */
251     PetscCall(VecScatterBegin(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
252     PetscCall(VecScatterEnd(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
253     if (sub_vecs) {
254       /* copy vector entries into xred */
255       PetscCall(VecGetArrayRead(sred->xtmp, &x_array));
256       if (sub_vecs[k]) {
257         PetscCall(VecGetOwnershipRange(sub_vecs[k], &st, &ed));
258         PetscCall(VecGetArray(sub_vecs[k], &LA_sub_vec));
259         for (i = 0; i < ed - st; i++) LA_sub_vec[i] = x_array[i];
260         PetscCall(VecRestoreArray(sub_vecs[k], &LA_sub_vec));
261       }
262       PetscCall(VecRestoreArrayRead(sred->xtmp, &x_array));
263     }
264   }
265 
266   if (PCTelescope_isActiveRank(sred)) {
267     /* create new (near) nullspace for redundant object */
268     PetscCall(MatNullSpaceCreate(subcomm, has_const, n, sub_vecs, sub_nullspace));
269     PetscCall(VecDestroyVecs(n, &sub_vecs));
270     PetscCheck(!nullspace->remove, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
271     PetscCheck(!nullspace->rmctx, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
272   }
273   PetscFunctionReturn(PETSC_SUCCESS);
274 }
275 
276 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc, PC_Telescope sred, Mat sub_mat)
277 {
278   Mat B;
279 
280   PetscFunctionBegin;
281   PetscCall(PCGetOperators(pc, NULL, &B));
282   /* Propagate the nullspace if it exists */
283   {
284     MatNullSpace nullspace, sub_nullspace;
285     PetscCall(MatGetNullSpace(B, &nullspace));
286     if (nullspace) {
287       PetscCall(PetscInfo(pc, "PCTelescope: generating nullspace (default)\n"));
288       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nullspace, &sub_nullspace));
289       if (PCTelescope_isActiveRank(sred)) {
290         PetscCall(MatSetNullSpace(sub_mat, sub_nullspace));
291         PetscCall(MatNullSpaceDestroy(&sub_nullspace));
292       }
293     }
294   }
295   /* Propagate the near nullspace if it exists */
296   {
297     MatNullSpace nearnullspace, sub_nearnullspace;
298     PetscCall(MatGetNearNullSpace(B, &nearnullspace));
299     if (nearnullspace) {
300       PetscCall(PetscInfo(pc, "PCTelescope: generating near nullspace (default)\n"));
301       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nearnullspace, &sub_nearnullspace));
302       if (PCTelescope_isActiveRank(sred)) {
303         PetscCall(MatSetNearNullSpace(sub_mat, sub_nearnullspace));
304         PetscCall(MatNullSpaceDestroy(&sub_nearnullspace));
305       }
306     }
307   }
308   PetscFunctionReturn(PETSC_SUCCESS);
309 }
310 
311 static PetscErrorCode PCView_Telescope(PC pc, PetscViewer viewer)
312 {
313   PC_Telescope sred = (PC_Telescope)pc->data;
314   PetscBool    iascii, isstring;
315   PetscViewer  subviewer;
316 
317   PetscFunctionBegin;
318   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
319   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
320   if (iascii) {
321     {
322       MPI_Comm    comm, subcomm;
323       PetscMPIInt comm_size, subcomm_size;
324       DM          dm = NULL, subdm = NULL;
325 
326       PetscCall(PCGetDM(pc, &dm));
327       subdm = private_PCTelescopeGetSubDM(sred);
328 
329       if (sred->psubcomm) {
330         comm    = PetscSubcommParent(sred->psubcomm);
331         subcomm = PetscSubcommChild(sred->psubcomm);
332         PetscCallMPI(MPI_Comm_size(comm, &comm_size));
333         PetscCallMPI(MPI_Comm_size(subcomm, &subcomm_size));
334 
335         PetscCall(PetscViewerASCIIPushTab(viewer));
336         PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent comm size reduction factor = %" PetscInt_FMT "\n", sred->redfactor));
337         PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent_size = %d , subcomm_size = %d\n", (int)comm_size, (int)subcomm_size));
338         switch (sred->subcommtype) {
339         case PETSC_SUBCOMM_INTERLACED:
340           PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: type = %s\n", PetscSubcommTypes[sred->subcommtype]));
341           break;
342         case PETSC_SUBCOMM_CONTIGUOUS:
343           PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm type = %s\n", PetscSubcommTypes[sred->subcommtype]));
344           break;
345         default:
346           SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "General subcomm type not supported by PCTelescope");
347         }
348         PetscCall(PetscViewerASCIIPopTab(viewer));
349       } else {
350         PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
351         subcomm = sred->subcomm;
352         if (!PCTelescope_isActiveRank(sred)) subcomm = PETSC_COMM_SELF;
353 
354         PetscCall(PetscViewerASCIIPushTab(viewer));
355         PetscCall(PetscViewerASCIIPrintf(viewer, "subcomm: using user provided sub-communicator\n"));
356         PetscCall(PetscViewerASCIIPopTab(viewer));
357       }
358 
359       PetscCall(PetscViewerGetSubViewer(viewer, subcomm, &subviewer));
360       if (PCTelescope_isActiveRank(sred)) {
361         PetscCall(PetscViewerASCIIPushTab(subviewer));
362 
363         if (dm && sred->ignore_dm) PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring DM\n"));
364         if (sred->ignore_kspcomputeoperators) PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring KSPComputeOperators\n"));
365         switch (sred->sr_type) {
366         case TELESCOPE_DEFAULT:
367           PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: default\n"));
368           break;
369         case TELESCOPE_DMDA:
370           PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMDA auto-repartitioning\n"));
371           PetscCall(DMView_DA_Short(subdm, subviewer));
372           break;
373         case TELESCOPE_DMPLEX:
374           PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMPLEX auto-repartitioning\n"));
375           break;
376         case TELESCOPE_COARSEDM:
377           PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: coarse DM\n"));
378           break;
379         }
380 
381         if (dm) {
382           PetscObject obj = (PetscObject)dm;
383           PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object:"));
384           PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE));
385           if (obj->type_name) PetscCall(PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name));
386           if (obj->name) PetscCall(PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name));
387           if (obj->prefix) PetscCall(PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix));
388           PetscCall(PetscViewerASCIIPrintf(subviewer, "\n"));
389           PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE));
390         } else {
391           PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object: NULL\n"));
392         }
393         if (subdm) {
394           PetscObject obj = (PetscObject)subdm;
395           PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object:"));
396           PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE));
397           if (obj->type_name) PetscCall(PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name));
398           if (obj->name) PetscCall(PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name));
399           if (obj->prefix) PetscCall(PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix));
400           PetscCall(PetscViewerASCIIPrintf(subviewer, "\n"));
401           PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE));
402         } else {
403           PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object: NULL\n"));
404         }
405 
406         PetscCall(KSPView(sred->ksp, subviewer));
407         PetscCall(PetscViewerASCIIPopTab(subviewer));
408       }
409       PetscCall(PetscViewerRestoreSubViewer(viewer, subcomm, &subviewer));
410     }
411   }
412   PetscFunctionReturn(PETSC_SUCCESS);
413 }
414 
415 static PetscErrorCode PCSetUp_Telescope(PC pc)
416 {
417   PC_Telescope    sred = (PC_Telescope)pc->data;
418   MPI_Comm        comm, subcomm = 0;
419   PCTelescopeType sr_type;
420 
421   PetscFunctionBegin;
422   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
423 
424   /* Determine type of setup/update */
425   if (!pc->setupcalled) {
426     PetscBool has_dm, same;
427     DM        dm;
428 
429     sr_type = TELESCOPE_DEFAULT;
430     has_dm  = PETSC_FALSE;
431     PetscCall(PCGetDM(pc, &dm));
432     if (dm) has_dm = PETSC_TRUE;
433     if (has_dm) {
434       /* check for dmda */
435       PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMDA, &same));
436       if (same) {
437         PetscCall(PetscInfo(pc, "PCTelescope: found DMDA\n"));
438         sr_type = TELESCOPE_DMDA;
439       }
440       /* check for dmplex */
441       PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &same));
442       if (same) {
443         PetscCall(PetscInfo(pc, "PCTelescope: found DMPLEX\n"));
444         sr_type = TELESCOPE_DMPLEX;
445       }
446 
447       if (sred->use_coarse_dm) {
448         PetscCall(PetscInfo(pc, "PCTelescope: using coarse DM\n"));
449         sr_type = TELESCOPE_COARSEDM;
450       }
451 
452       if (sred->ignore_dm) {
453         PetscCall(PetscInfo(pc, "PCTelescope: ignoring DM\n"));
454         sr_type = TELESCOPE_DEFAULT;
455       }
456     }
457     sred->sr_type = sr_type;
458   } else {
459     sr_type = sred->sr_type;
460   }
461 
462   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
463   switch (sr_type) {
464   case TELESCOPE_DEFAULT:
465     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
466     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
467     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
468     sred->pctelescope_reset_type              = NULL;
469     break;
470   case TELESCOPE_DMDA:
471     pc->ops->apply                            = PCApply_Telescope_dmda;
472     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
473     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
474     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
475     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
476     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
477     break;
478   case TELESCOPE_DMPLEX:
479     SETERRQ(comm, PETSC_ERR_SUP, "Support for DMPLEX is currently not available");
480   case TELESCOPE_COARSEDM:
481     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
482     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
483     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
484     sred->pctelescope_matcreate_type          = NULL;
485     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
486     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
487     break;
488   default:
489     SETERRQ(comm, PETSC_ERR_SUP, "Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
490   }
491 
492   /* subcomm definition */
493   if (!pc->setupcalled) {
494     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
495       if (!sred->psubcomm) {
496         PetscCall(PetscSubcommCreate(comm, &sred->psubcomm));
497         PetscCall(PetscSubcommSetNumber(sred->psubcomm, sred->redfactor));
498         PetscCall(PetscSubcommSetType(sred->psubcomm, sred->subcommtype));
499         sred->subcomm = PetscSubcommChild(sred->psubcomm);
500       }
501     } else { /* query PC for DM, check communicators */
502       DM          dm, dm_coarse_partition          = NULL;
503       MPI_Comm    comm_fine, comm_coarse_partition = MPI_COMM_NULL;
504       PetscMPIInt csize_fine = 0, csize_coarse_partition = 0, cs[2], csg[2], cnt = 0;
505       PetscBool   isvalidsubcomm = PETSC_TRUE;
506 
507       PetscCall(PCGetDM(pc, &dm));
508       comm_fine = PetscObjectComm((PetscObject)dm);
509       PetscCall(DMGetCoarseDM(dm, &dm_coarse_partition));
510       if (dm_coarse_partition) cnt = 1;
511       PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &cnt, 1, MPI_INT, MPI_SUM, comm_fine));
512       PetscCheck(cnt != 0, comm_fine, PETSC_ERR_SUP, "Zero instances of a coarse DM were found");
513 
514       PetscCallMPI(MPI_Comm_size(comm_fine, &csize_fine));
515       if (dm_coarse_partition) {
516         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
517         PetscCallMPI(MPI_Comm_size(comm_coarse_partition, &csize_coarse_partition));
518       }
519 
520       cs[0] = csize_fine;
521       cs[1] = csize_coarse_partition;
522       PetscCallMPI(MPI_Allreduce(cs, csg, 2, MPI_INT, MPI_MAX, comm_fine));
523       PetscCheck(csg[0] != csg[1], comm_fine, PETSC_ERR_SUP, "Coarse DM uses the same size communicator as the parent DM attached to the PC");
524 
525       PetscCall(PCTelescopeTestValidSubcomm(comm_fine, comm_coarse_partition, &isvalidsubcomm));
526       PetscCheck(isvalidsubcomm, comm_fine, PETSC_ERR_SUP, "Coarse DM communicator is not a sub-communicator of parentDM->comm");
527       sred->subcomm = comm_coarse_partition;
528     }
529   }
530   subcomm = sred->subcomm;
531 
532   /* internal KSP */
533   if (!pc->setupcalled) {
534     const char *prefix;
535 
536     if (PCTelescope_isActiveRank(sred)) {
537       PetscCall(KSPCreate(subcomm, &sred->ksp));
538       PetscCall(KSPSetNestLevel(sred->ksp, pc->kspnestlevel));
539       PetscCall(KSPSetErrorIfNotConverged(sred->ksp, pc->erroriffailure));
540       PetscCall(PetscObjectIncrementTabLevel((PetscObject)sred->ksp, (PetscObject)pc, 1));
541       PetscCall(KSPSetType(sred->ksp, KSPPREONLY));
542       PetscCall(PCGetOptionsPrefix(pc, &prefix));
543       PetscCall(KSPSetOptionsPrefix(sred->ksp, prefix));
544       PetscCall(KSPAppendOptionsPrefix(sred->ksp, "telescope_"));
545     }
546   }
547 
548   /* setup */
549   if (!pc->setupcalled && sred->pctelescope_setup_type) PetscCall(sred->pctelescope_setup_type(pc, sred));
550   /* update */
551   if (!pc->setupcalled) {
552     if (sred->pctelescope_matcreate_type) PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_INITIAL_MATRIX, &sred->Bred));
553     if (sred->pctelescope_matnullspacecreate_type) PetscCall(sred->pctelescope_matnullspacecreate_type(pc, sred, sred->Bred));
554   } else {
555     if (sred->pctelescope_matcreate_type) PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_REUSE_MATRIX, &sred->Bred));
556   }
557 
558   /* common - no construction */
559   if (PCTelescope_isActiveRank(sred)) {
560     PetscCall(KSPSetOperators(sred->ksp, sred->Bred, sred->Bred));
561     if (pc->setfromoptionscalled && !pc->setupcalled) PetscCall(KSPSetFromOptions(sred->ksp));
562   }
563   PetscFunctionReturn(PETSC_SUCCESS);
564 }
565 
566 static PetscErrorCode PCApply_Telescope(PC pc, Vec x, Vec y)
567 {
568   PC_Telescope       sred = (PC_Telescope)pc->data;
569   Vec                xtmp, xred, yred;
570   PetscInt           i, st, ed;
571   VecScatter         scatter;
572   PetscScalar       *array;
573   const PetscScalar *x_array;
574 
575   PetscFunctionBegin;
576   PetscCall(PetscCitationsRegister(citation, &cited));
577 
578   xtmp    = sred->xtmp;
579   scatter = sred->scatter;
580   xred    = sred->xred;
581   yred    = sred->yred;
582 
583   /* pull in vector x->xtmp */
584   PetscCall(VecScatterBegin(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD));
585   PetscCall(VecScatterEnd(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD));
586 
587   /* copy vector entries into xred */
588   PetscCall(VecGetArrayRead(xtmp, &x_array));
589   if (xred) {
590     PetscScalar *LA_xred;
591     PetscCall(VecGetOwnershipRange(xred, &st, &ed));
592     PetscCall(VecGetArray(xred, &LA_xred));
593     for (i = 0; i < ed - st; i++) LA_xred[i] = x_array[i];
594     PetscCall(VecRestoreArray(xred, &LA_xred));
595   }
596   PetscCall(VecRestoreArrayRead(xtmp, &x_array));
597   /* solve */
598   if (PCTelescope_isActiveRank(sred)) {
599     PetscCall(KSPSolve(sred->ksp, xred, yred));
600     PetscCall(KSPCheckSolve(sred->ksp, pc, yred));
601   }
602   /* return vector */
603   PetscCall(VecGetArray(xtmp, &array));
604   if (yred) {
605     const PetscScalar *LA_yred;
606     PetscCall(VecGetOwnershipRange(yred, &st, &ed));
607     PetscCall(VecGetArrayRead(yred, &LA_yred));
608     for (i = 0; i < ed - st; i++) array[i] = LA_yred[i];
609     PetscCall(VecRestoreArrayRead(yred, &LA_yred));
610   }
611   PetscCall(VecRestoreArray(xtmp, &array));
612   PetscCall(VecScatterBegin(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE));
613   PetscCall(VecScatterEnd(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE));
614   PetscFunctionReturn(PETSC_SUCCESS);
615 }
616 
617 static PetscErrorCode PCApplyRichardson_Telescope(PC pc, Vec x, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
618 {
619   PC_Telescope       sred = (PC_Telescope)pc->data;
620   Vec                xtmp, yred;
621   PetscInt           i, st, ed;
622   VecScatter         scatter;
623   const PetscScalar *x_array;
624   PetscBool          default_init_guess_value;
625 
626   PetscFunctionBegin;
627   xtmp    = sred->xtmp;
628   scatter = sred->scatter;
629   yred    = sred->yred;
630 
631   PetscCheck(its <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCApplyRichardson_Telescope only supports max_it = 1");
632   *reason = (PCRichardsonConvergedReason)0;
633 
634   if (!zeroguess) {
635     PetscCall(PetscInfo(pc, "PCTelescope: Scattering y for non-zero initial guess\n"));
636     /* pull in vector y->xtmp */
637     PetscCall(VecScatterBegin(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD));
638     PetscCall(VecScatterEnd(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD));
639 
640     /* copy vector entries into xred */
641     PetscCall(VecGetArrayRead(xtmp, &x_array));
642     if (yred) {
643       PetscScalar *LA_yred;
644       PetscCall(VecGetOwnershipRange(yred, &st, &ed));
645       PetscCall(VecGetArray(yred, &LA_yred));
646       for (i = 0; i < ed - st; i++) LA_yred[i] = x_array[i];
647       PetscCall(VecRestoreArray(yred, &LA_yred));
648     }
649     PetscCall(VecRestoreArrayRead(xtmp, &x_array));
650   }
651 
652   if (PCTelescope_isActiveRank(sred)) {
653     PetscCall(KSPGetInitialGuessNonzero(sred->ksp, &default_init_guess_value));
654     if (!zeroguess) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, PETSC_TRUE));
655   }
656 
657   PetscCall(PCApply_Telescope(pc, x, y));
658 
659   if (PCTelescope_isActiveRank(sred)) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, default_init_guess_value));
660 
661   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
662   *outits = 1;
663   PetscFunctionReturn(PETSC_SUCCESS);
664 }
665 
666 static PetscErrorCode PCReset_Telescope(PC pc)
667 {
668   PC_Telescope sred = (PC_Telescope)pc->data;
669 
670   PetscFunctionBegin;
671   PetscCall(ISDestroy(&sred->isin));
672   PetscCall(VecScatterDestroy(&sred->scatter));
673   PetscCall(VecDestroy(&sred->xred));
674   PetscCall(VecDestroy(&sred->yred));
675   PetscCall(VecDestroy(&sred->xtmp));
676   PetscCall(MatDestroy(&sred->Bred));
677   PetscCall(KSPReset(sred->ksp));
678   if (sred->pctelescope_reset_type) PetscCall(sred->pctelescope_reset_type(pc));
679   PetscFunctionReturn(PETSC_SUCCESS);
680 }
681 
682 static PetscErrorCode PCDestroy_Telescope(PC pc)
683 {
684   PC_Telescope sred = (PC_Telescope)pc->data;
685 
686   PetscFunctionBegin;
687   PetscCall(PCReset_Telescope(pc));
688   PetscCall(KSPDestroy(&sred->ksp));
689   PetscCall(PetscSubcommDestroy(&sred->psubcomm));
690   PetscCall(PetscFree(sred->dm_ctx));
691   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", NULL));
692   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", NULL));
693   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", NULL));
694   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", NULL));
695   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", NULL));
696   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", NULL));
697   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", NULL));
698   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", NULL));
699   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", NULL));
700   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", NULL));
701   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", NULL));
702   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", NULL));
703   PetscCall(PetscFree(pc->data));
704   PetscFunctionReturn(PETSC_SUCCESS);
705 }
706 
707 static PetscErrorCode PCSetFromOptions_Telescope(PC pc, PetscOptionItems *PetscOptionsObject)
708 {
709   PC_Telescope     sred = (PC_Telescope)pc->data;
710   MPI_Comm         comm;
711   PetscMPIInt      size;
712   PetscBool        flg;
713   PetscSubcommType subcommtype;
714 
715   PetscFunctionBegin;
716   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
717   PetscCallMPI(MPI_Comm_size(comm, &size));
718   PetscOptionsHeadBegin(PetscOptionsObject, "Telescope options");
719   PetscCall(PetscOptionsEnum("-pc_telescope_subcomm_type", "Subcomm type (interlaced or contiguous)", "PCTelescopeSetSubcommType", PetscSubcommTypes, (PetscEnum)sred->subcommtype, (PetscEnum *)&subcommtype, &flg));
720   if (flg) PetscCall(PCTelescopeSetSubcommType(pc, subcommtype));
721   PetscCall(PetscOptionsInt("-pc_telescope_reduction_factor", "Factor to reduce comm size by", "PCTelescopeSetReductionFactor", sred->redfactor, &sred->redfactor, NULL));
722   PetscCheck(sred->redfactor <= size, comm, PETSC_ERR_ARG_WRONG, "-pc_telescope_reduction_factor <= comm size");
723   PetscCall(PetscOptionsBool("-pc_telescope_ignore_dm", "Ignore any DM attached to the PC", "PCTelescopeSetIgnoreDM", sred->ignore_dm, &sred->ignore_dm, NULL));
724   PetscCall(PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators", "Ignore method used to compute A", "PCTelescopeSetIgnoreKSPComputeOperators", sred->ignore_kspcomputeoperators, &sred->ignore_kspcomputeoperators, NULL));
725   PetscCall(PetscOptionsBool("-pc_telescope_use_coarse_dm", "Define sub-communicator from the coarse DM", "PCTelescopeSetUseCoarseDM", sred->use_coarse_dm, &sred->use_coarse_dm, NULL));
726   PetscOptionsHeadEnd();
727   PetscFunctionReturn(PETSC_SUCCESS);
728 }
729 
730 /* PC simplementation specific API's */
731 
732 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc, KSP *ksp)
733 {
734   PC_Telescope red = (PC_Telescope)pc->data;
735 
736   PetscFunctionBegin;
737   if (ksp) *ksp = red->ksp;
738   PetscFunctionReturn(PETSC_SUCCESS);
739 }
740 
741 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc, PetscSubcommType *subcommtype)
742 {
743   PC_Telescope red = (PC_Telescope)pc->data;
744 
745   PetscFunctionBegin;
746   if (subcommtype) *subcommtype = red->subcommtype;
747   PetscFunctionReturn(PETSC_SUCCESS);
748 }
749 
750 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc, PetscSubcommType subcommtype)
751 {
752   PC_Telescope red = (PC_Telescope)pc->data;
753 
754   PetscFunctionBegin;
755   PetscCheck(!pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "You cannot change the subcommunicator type for PCTelescope after it has been set up.");
756   red->subcommtype = subcommtype;
757   PetscFunctionReturn(PETSC_SUCCESS);
758 }
759 
760 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc, PetscInt *fact)
761 {
762   PC_Telescope red = (PC_Telescope)pc->data;
763 
764   PetscFunctionBegin;
765   if (fact) *fact = red->redfactor;
766   PetscFunctionReturn(PETSC_SUCCESS);
767 }
768 
769 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc, PetscInt fact)
770 {
771   PC_Telescope red = (PC_Telescope)pc->data;
772   PetscMPIInt  size;
773 
774   PetscFunctionBegin;
775   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
776   PetscCheck(fact > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be positive", fact);
777   PetscCheck(fact <= size, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be <= comm.size", fact);
778   red->redfactor = fact;
779   PetscFunctionReturn(PETSC_SUCCESS);
780 }
781 
782 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc, PetscBool *v)
783 {
784   PC_Telescope red = (PC_Telescope)pc->data;
785 
786   PetscFunctionBegin;
787   if (v) *v = red->ignore_dm;
788   PetscFunctionReturn(PETSC_SUCCESS);
789 }
790 
791 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc, PetscBool v)
792 {
793   PC_Telescope red = (PC_Telescope)pc->data;
794 
795   PetscFunctionBegin;
796   red->ignore_dm = v;
797   PetscFunctionReturn(PETSC_SUCCESS);
798 }
799 
800 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc, PetscBool *v)
801 {
802   PC_Telescope red = (PC_Telescope)pc->data;
803 
804   PetscFunctionBegin;
805   if (v) *v = red->use_coarse_dm;
806   PetscFunctionReturn(PETSC_SUCCESS);
807 }
808 
809 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc, PetscBool v)
810 {
811   PC_Telescope red = (PC_Telescope)pc->data;
812 
813   PetscFunctionBegin;
814   red->use_coarse_dm = v;
815   PetscFunctionReturn(PETSC_SUCCESS);
816 }
817 
818 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool *v)
819 {
820   PC_Telescope red = (PC_Telescope)pc->data;
821 
822   PetscFunctionBegin;
823   if (v) *v = red->ignore_kspcomputeoperators;
824   PetscFunctionReturn(PETSC_SUCCESS);
825 }
826 
827 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool v)
828 {
829   PC_Telescope red = (PC_Telescope)pc->data;
830 
831   PetscFunctionBegin;
832   red->ignore_kspcomputeoperators = v;
833   PetscFunctionReturn(PETSC_SUCCESS);
834 }
835 
836 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc, DM *dm)
837 {
838   PC_Telescope red = (PC_Telescope)pc->data;
839 
840   PetscFunctionBegin;
841   *dm = private_PCTelescopeGetSubDM(red);
842   PetscFunctionReturn(PETSC_SUCCESS);
843 }
844 
845 /*@
846   PCTelescopeGetKSP - Gets the `KSP` created by the telescoping `PC`.
847 
848   Not Collective
849 
850   Input Parameter:
851 . pc - the preconditioner context
852 
853   Output Parameter:
854 . subksp - the `KSP` defined on the smaller set of processes
855 
856   Level: advanced
857 
858 .seealso: [](ch_ksp), `PC`, `KSP`, `PCTELESCOPE`
859 @*/
860 PetscErrorCode PCTelescopeGetKSP(PC pc, KSP *subksp)
861 {
862   PetscFunctionBegin;
863   PetscUseMethod(pc, "PCTelescopeGetKSP_C", (PC, KSP *), (pc, subksp));
864   PetscFunctionReturn(PETSC_SUCCESS);
865 }
866 
867 /*@
868   PCTelescopeGetReductionFactor - Gets the factor by which the original number of MPI processes has been reduced by that was set by
869   `PCTelescopeSetReductionFactor()`
870 
871   Not Collective
872 
873   Input Parameter:
874 . pc - the preconditioner context
875 
876   Output Parameter:
877 . fact - the reduction factor
878 
879   Level: advanced
880 
881 .seealso: [](ch_ksp), `PC`, `PCTELESCOPE`, `PCTelescopeSetReductionFactor()`
882 @*/
883 PetscErrorCode PCTelescopeGetReductionFactor(PC pc, PetscInt *fact)
884 {
885   PetscFunctionBegin;
886   PetscUseMethod(pc, "PCTelescopeGetReductionFactor_C", (PC, PetscInt *), (pc, fact));
887   PetscFunctionReturn(PETSC_SUCCESS);
888 }
889 
890 /*@
891   PCTelescopeSetReductionFactor - Sets the factor by which the original number of MPI processes will been reduced by.
892 
893   Not Collective
894 
895   Input Parameter:
896 . pc - the preconditioner context
897 
898   Output Parameter:
899 . fact - the reduction factor
900 
901   Level: advanced
902 
903 .seealso: [](ch_ksp), `PCTELESCOPE`, `PCTelescopeGetReductionFactor()`
904 @*/
905 PetscErrorCode PCTelescopeSetReductionFactor(PC pc, PetscInt fact)
906 {
907   PetscFunctionBegin;
908   PetscTryMethod(pc, "PCTelescopeSetReductionFactor_C", (PC, PetscInt), (pc, fact));
909   PetscFunctionReturn(PETSC_SUCCESS);
910 }
911 
912 /*@
913   PCTelescopeGetIgnoreDM - Get the flag indicating if any `DM` attached to the `PC` will be used in constructing the `PC` on the
914   reduced number of MPI processes
915 
916   Not Collective
917 
918   Input Parameter:
919 . pc - the preconditioner context
920 
921   Output Parameter:
922 . v - the flag
923 
924   Level: advanced
925 
926 .seealso: [](ch_ksp), `DM`, `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`
927 @*/
928 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc, PetscBool *v)
929 {
930   PetscFunctionBegin;
931   PetscUseMethod(pc, "PCTelescopeGetIgnoreDM_C", (PC, PetscBool *), (pc, v));
932   PetscFunctionReturn(PETSC_SUCCESS);
933 }
934 
935 /*@
936   PCTelescopeSetIgnoreDM - Set a flag to ignore any `DM` attached to the `PC` when constructing the `PC` on the
937   reduced number of MPI processes
938 
939   Not Collective
940 
941   Input Parameter:
942 . pc - the preconditioner context
943 
944   Output Parameter:
945 . v - Use `PETSC_TRUE` to ignore any `DM`
946 
947   Level: advanced
948 
949 .seealso: [](ch_ksp), `DM`, `PCTELESCOPE`, `PCTelescopeGetIgnoreDM()`
950 @*/
951 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc, PetscBool v)
952 {
953   PetscFunctionBegin;
954   PetscTryMethod(pc, "PCTelescopeSetIgnoreDM_C", (PC, PetscBool), (pc, v));
955   PetscFunctionReturn(PETSC_SUCCESS);
956 }
957 
958 /*@
959   PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse `DM` attached to `DM` associated with the `PC` will be used in constructing
960   the `PC` on the reduced number of MPI processes
961 
962   Not Collective
963 
964   Input Parameter:
965 . pc - the preconditioner context
966 
967   Output Parameter:
968 . v - the flag
969 
970   Level: advanced
971 
972 .seealso: [](ch_ksp), `DM`, `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`
973 @*/
974 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc, PetscBool *v)
975 {
976   PetscFunctionBegin;
977   PetscUseMethod(pc, "PCTelescopeGetUseCoarseDM_C", (PC, PetscBool *), (pc, v));
978   PetscFunctionReturn(PETSC_SUCCESS);
979 }
980 
981 /*@
982   PCTelescopeSetUseCoarseDM - Set a flag to query the `DM` attached to the `PC` if it also has a coarse `DM` and utilize that `DM`
983   in constructing the `PC` on the reduced number of MPI processes
984 
985   Not Collective
986 
987   Input Parameter:
988 . pc - the preconditioner context
989 
990   Output Parameter:
991 . v - Use `PETSC_FALSE` to ignore any coarse `DM`
992 
993   Level: advanced
994 
995   Notes:
996   When you have specified to use a coarse `DM`, the communicator used to create the sub-`KSP` within `PCTELESCOPE`
997   will be that of the coarse `DM`. Hence the flags `-pc_telescope_reduction_factor` and
998   `-pc_telescope_subcomm_type` will no longer have any meaning.
999 
1000   It is required that the communicator associated with the parent (fine) and the coarse `DM` are of different sizes.
1001   An error will occur of the size of the communicator associated with the coarse `DM`
1002   is the same as that of the parent `DM`.
1003   Furthermore, it is required that the communicator on the coarse `DM` is a sub-communicator of the parent.
1004   This will be checked at the time the preconditioner is setup and an error will occur if
1005   the coarse `DM` does not define a sub-communicator of that used by the parent `DM`.
1006 
1007   The particular Telescope setup invoked when using a coarse `DM` is agnostic with respect to the type of
1008   the `DM` used (e.g. it supports `DMSHELL`, `DMPLEX`, etc).
1009 
1010   Support is currently only provided for the case when you are using `KSPSetComputeOperators()`
1011 
1012   The user is required to compose a function with the parent `DM` to facilitate the transfer of fields (`Vec`)
1013   between the different decompositions defined by the fine and coarse `DM`s.
1014   In the user code, this is achieved via
1015 .vb
1016    {
1017      DM dm_fine;
1018      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1019    }
1020 .ve
1021   The signature of the user provided field scatter method is
1022 .vb
1023    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1024 .ve
1025   The user must provide support for both mode `SCATTER_FORWARD` and mode `SCATTER_REVERSE`.
1026   `SCATTER_FORWARD` implies the direction of transfer is from the parent (fine) `DM` to the coarse `DM`.
1027 
1028   Optionally, the user may also compose a function with the parent `DM` to facilitate the transfer
1029   of state variables between the fine and coarse `DM`s.
1030   In the context of a finite element discretization, an example state variable might be
1031   values associated with quadrature points within each element.
1032   A user provided state scatter method is composed via
1033 .vb
1034    {
1035      DM dm_fine;
1036      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1037    }
1038 .ve
1039   The signature of the user provided state scatter method is
1040 .vb
1041    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1042 .ve
1043   `SCATTER_FORWARD` implies the direction of transfer is from the fine `DM` to the coarse `DM`.
1044   The user is only required to support mode = `SCATTER_FORWARD`.
1045   No assumption is made about the data type of the state variables.
1046   These must be managed by the user and must be accessible from the `DM`.
1047 
1048   Care must be taken in defining the user context passed to `KSPSetComputeOperators()` which is to be
1049   associated with the sub-`KSP` residing within `PCTELESCOPE`.
1050   In general, `PCTELESCOPE` assumes that the context on the fine and coarse `DM` used with
1051   `KSPSetComputeOperators()` should be "similar" in type or origin.
1052   Specifically the following rules are used to infer what context on the sub-`KSP` should be.
1053 
1054   First the contexts from the `KSP` and the fine and coarse `DM`s are retrieved.
1055   Note that the special case of a `DMSHELL` context is queried.
1056 
1057 .vb
1058    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1059    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1060    DMShellGetContext(dm_fine,&dmfine_shellctx);
1061 
1062    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1063    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1064 .ve
1065 
1066   The following rules are then enforced\:
1067 
1068   1. If `dmfine_kspctx` = `NULL`, then we provide a `NULL` pointer as the context for the sub-`KSP`\:
1069   `KSPSetComputeOperators`(`sub_ksp`,`dmfine_kspfunc`,`NULL`);
1070 
1071   2. If `dmfine_kspctx` != `NULL` and `dmfine_kspctx` == `dmfine_appctx`,
1072 
1073   check that `dmcoarse_appctx` is also non-`NULL`. If this is true, then\:
1074   `KSPSetComputeOperators`(`sub_ksp`,`dmfine_kspfunc`,`dmcoarse_appctx`);
1075 
1076   3. If `dmfine_kspctx` != `NULL` and `dmfine_kspctx` == `dmfine_shellctx`,
1077 
1078   check that `dmcoarse_shellctx` is also non-`NULL`. If this is true, then\:
1079   `KSPSetComputeOperators`(`sub_ksp`,`dmfine_kspfunc`,`dmcoarse_shellctx`);
1080 
1081   If neither of the above three tests passed, then `PCTELESCOPE` cannot safely determine what
1082   context should be provided to `KSPSetComputeOperators()` for use with the sub-`KSP`.
1083   In this case, an additional mechanism is provided via a composed function which will return
1084   the actual context to be used. To use this feature you must compose the "getter" function
1085   with the coarse `DM`, e.g.
1086 .vb
1087    {
1088      DM dm_coarse;
1089      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1090    }
1091 .ve
1092   The signature of the user provided method is
1093 .vb
1094    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1095 .ve
1096 
1097 .seealso: [](ch_ksp), `DM`, `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`
1098 @*/
1099 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc, PetscBool v)
1100 {
1101   PetscFunctionBegin;
1102   PetscTryMethod(pc, "PCTelescopeSetUseCoarseDM_C", (PC, PetscBool), (pc, v));
1103   PetscFunctionReturn(PETSC_SUCCESS);
1104 }
1105 
1106 /*@
1107   PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if `KSPComputeOperators()` will be used to construct
1108   the matrix on the reduced number of MPI processes
1109 
1110   Not Collective
1111 
1112   Input Parameter:
1113 . pc - the preconditioner context
1114 
1115   Output Parameter:
1116 . v - the flag
1117 
1118   Level: advanced
1119 
1120 .seealso: [](ch_ksp), `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeSetIgnoreKSPComputeOperators()`
1121 @*/
1122 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc, PetscBool *v)
1123 {
1124   PetscFunctionBegin;
1125   PetscUseMethod(pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", (PC, PetscBool *), (pc, v));
1126   PetscFunctionReturn(PETSC_SUCCESS);
1127 }
1128 
1129 /*@
1130   PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to have `PCTELESCOPE` ignore the function provided to `KSPComputeOperators()` in
1131   constructint the matrix on the reduced number of MPI processes
1132 
1133   Not Collective
1134 
1135   Input Parameter:
1136 . pc - the preconditioner context
1137 
1138   Output Parameter:
1139 . v - Use `PETSC_TRUE` to ignore the function (if defined) set via `KSPSetComputeOperators()` on `pc`
1140 
1141   Level: advanced
1142 
1143 .seealso: [](ch_ksp), `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()`
1144 @*/
1145 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc, PetscBool v)
1146 {
1147   PetscFunctionBegin;
1148   PetscTryMethod(pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", (PC, PetscBool), (pc, v));
1149   PetscFunctionReturn(PETSC_SUCCESS);
1150 }
1151 
1152 /*@
1153   PCTelescopeGetDM - Get the re-partitioned `DM` attached to the sub-`KSP`.
1154 
1155   Not Collective
1156 
1157   Input Parameter:
1158 . pc - the preconditioner context
1159 
1160   Output Parameter:
1161 . subdm - The re-partitioned `DM`
1162 
1163   Level: advanced
1164 
1165 .seealso: [](ch_ksp), `DM`, `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()`
1166 @*/
1167 PetscErrorCode PCTelescopeGetDM(PC pc, DM *subdm)
1168 {
1169   PetscFunctionBegin;
1170   PetscUseMethod(pc, "PCTelescopeGetDM_C", (PC, DM *), (pc, subdm));
1171   PetscFunctionReturn(PETSC_SUCCESS);
1172 }
1173 
1174 /*@
1175   PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1176 
1177   Logically Collective
1178 
1179   Input Parameters:
1180 + pc          - the preconditioner context
1181 - subcommtype - the subcommunicator type (see `PetscSubcommType`)
1182 
1183   Level: advanced
1184 
1185 .seealso: [](ch_ksp), `PetscSubcommType`, `PetscSubcomm`, `PCTELESCOPE`, `PCTelescopeGetSubcommType()`
1186 @*/
1187 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1188 {
1189   PetscFunctionBegin;
1190   PetscTryMethod(pc, "PCTelescopeSetSubcommType_C", (PC, PetscSubcommType), (pc, subcommtype));
1191   PetscFunctionReturn(PETSC_SUCCESS);
1192 }
1193 
1194 /*@
1195   PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous) set with `PCTelescopeSetSubcommType()`
1196 
1197   Not Collective
1198 
1199   Input Parameter:
1200 . pc - the preconditioner context
1201 
1202   Output Parameter:
1203 . subcommtype - the subcommunicator type (see `PetscSubcommType`)
1204 
1205   Level: advanced
1206 
1207 .seealso: [](ch_ksp), `PetscSubcomm`, `PetscSubcommType`, `PCTELESCOPE`, `PCTelescopeSetSubcommType()`
1208 @*/
1209 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1210 {
1211   PetscFunctionBegin;
1212   PetscUseMethod(pc, "PCTelescopeGetSubcommType_C", (PC, PetscSubcommType *), (pc, subcommtype));
1213   PetscFunctionReturn(PETSC_SUCCESS);
1214 }
1215 
1216 /*MC
1217    PCTELESCOPE - Runs a `KSP` solver on a sub-communicator {cite}`maysananruppknepleysmith2016`. MPI processes not in the sub-communicator are idle during the solve.
1218 
1219    Options Database Keys:
1220 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1221 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached `DM` should be ignored in constructing the new `PC`
1222 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI processes on the sub-communicator. see `PetscSubcomm` for more information.
1223 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether `KSPSetComputeOperators()` should be used on the sub-`KSP`.
1224 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse `DM` should be used to define the sub-communicator.
1225 
1226    Level: advanced
1227 
1228    Notes:
1229    Assuming that the parent preconditioner `PC` is defined on a communicator c, this implementation
1230    creates a child sub-communicator (c') containing fewer MPI processes than the original parent preconditioner `PC`.
1231    The preconditioner is deemed telescopic as it only calls `KSPSolve()` on a single
1232    sub-communicator, in contrast with `PCREDUNDANT` which calls `KSPSolve()` on N sub-communicators.
1233    This means there will be MPI processes which will be idle during the application of this preconditioner.
1234    Additionally, in comparison with `PCREDUNDANT`, `PCTELESCOPE` can utilize an attached `DM` to construct `DM` dependent preconditioner, such as `PCMG`
1235 
1236    The default type of the sub `KSP` (the `KSP` defined on c') is `KSPPREONLY`.
1237 
1238    There are three setup mechanisms for `PCTELESCOPE`. Features support by each type are described below.
1239    In the following, we will refer to the operators B and B', these are the Bmat provided to the `KSP` on the
1240    communicators c and c' respectively.
1241 
1242    [1] Default setup
1243    The sub-communicator c' is created via `PetscSubcommCreate()`.
1244    Explicitly defined nullspace and near nullspace vectors will be propagated from B to B'.
1245    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1246    No support is provided for `KSPSetComputeOperators()`.
1247    Currently there is no support for the flag `-pc_use_amat`.
1248 
1249    [2] `DM` aware setup
1250    If a `DM` is attached to the `PC`, it is re-partitioned on the sub-communicator c'.
1251    c' is created via `PetscSubcommCreate()`.
1252    Both the Bmat operator and the right-hand side vector are permuted into the new DOF ordering defined by the re-partitioned `DM`.
1253    Currently only support for re-partitioning a `DMDA` is provided.
1254    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1255    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1256    Support is provided for `KSPSetComputeOperators()`. The user provided function and context is propagated to the sub `KSP`.
1257    This is fragile since the user must ensure that their user context is valid for use on c'.
1258    Currently there is no support for the flag `-pc_use_amat`.
1259 
1260    [3] Coarse `DM` setup
1261    If a `DM` (dmfine) is attached to the `PC`, dmfine is queried for a "coarse" `DM` (call this dmcoarse) via `DMGetCoarseDM()`.
1262    `PCTELESCOPE` will interpret the coarse `DM` as being defined on a sub-communicator of c.
1263    The communicator associated with dmcoarse will define the c' to be used within `PCTELESCOPE`.
1264    `PCTELESCOPE` will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1265    The intention of this setup type is that `PCTELESCOPE` will use an existing (e.g. user defined) communicator hierarchy, say as would be
1266    available with using multi-grid on unstructured meshes.
1267    This setup will not use the command line options `-pc_telescope_reduction_factor` or `-pc_telescope_subcomm_type`.
1268    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1269    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1270    There is no general method to permute field orderings, hence only `KSPSetComputeOperators()` is supported.
1271    The user must use `PetscObjectComposeFunction()` with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1272    Propagation of the user context for `KSPSetComputeOperators()` on the sub `KSP` is attempted by querying the `DM` contexts associated with dmfine and dmcoarse. Alternatively, the user may use `PetscObjectComposeFunction()` with dmcoarse to define a method which will return the appropriate user context for `KSPSetComputeOperators()`.
1273    Currently there is no support for the flag `-pc_use_amat`.
1274    This setup can be invoked by the option `-pc_telescope_use_coarse_dm` or by calling `PCTelescopeSetUseCoarseDM`(pc,`PETSC_TRUE`);
1275    Further information about the user-provided methods required by this setup type are described here `PCTelescopeSetUseCoarseDM()`.
1276 
1277    Developer Notes:
1278    During `PCSetup()`, the B operator is scattered onto c'.
1279    Within `PCApply()`, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1280    Then, `KSPSolve()` is executed on the c' communicator.
1281 
1282    The communicator used within the telescoping preconditioner is defined by a `PetscSubcomm` using the INTERLACED
1283    creation routine by default (this can be changed with `-pc_telescope_subcomm_type`). We run the sub `KSP` on only
1284    the ranks within the communicator which have a color equal to zero.
1285 
1286    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1287    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1288    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1289 
1290    The telescoping preconditioner can re-partition an attached `DM` if it is a `DMDA` (2D or 3D -
1291    support for 1D `DMDA`s is not provided). If a `DMDA` is found, a topologically equivalent `DMDA` is created on c'
1292    and this new `DM` is attached the sub `KSP`. The design of telescope is such that it should be possible to extend support
1293    for re-partitioning other to `DM`'s (e.g. `DMPLEX`). The user can supply a flag to ignore attached DMs.
1294    Alternatively, user-provided re-partitioned `DM`s can be used via `-pc_telescope_use_coarse_dm`.
1295 
1296    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI processes common to c and c'.
1297 
1298    When a `DMDA` is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1299    into the ordering defined by the `DMDA` on c', (ii) extracting the local chunks via `MatCreateSubMatrices()`, (iii) fusing the
1300    locally (sequential) matrices defined on the ranks common to c and c' into B' using `MatCreateMPIMatConcatenateSeqMat()`
1301 
1302    Limitations/improvements include the following.
1303    `VecPlaceArray()` could be used within `PCApply()` to improve efficiency and reduce memory usage.
1304    A unified mechanism to query for user contexts as required by `KSPSetComputeOperators()` and `MatNullSpaceSetFunction()`.
1305 
1306    The symmetric permutation used when a `DMDA` is encountered is performed via explicitly assembling a permutation matrix P,
1307    and performing P^T.A.P. Possibly it might be more efficient to use `MatPermute()`. We opted to use P^T.A.P as it appears
1308    `VecPermute()` does not support the use case required here. By computing P, one can permute both the operator and RHS in a
1309    consistent manner.
1310 
1311    Mapping of vectors (default setup mode) is performed in the following way.
1312    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1313    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1314    We perform the scatter to the sub-communicator in the following way.
1315    [1] Given a vector x defined on communicator c
1316 
1317 .vb
1318    rank(c)  local values of x
1319    ------- ----------------------------------------
1320         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1321         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1322         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1323         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1324 .ve
1325 
1326    scatter into xtmp defined also on comm c, so that we have the following values
1327 
1328 .vb
1329    rank(c)  local values of xtmp
1330    ------- ----------------------------------------------------------------------------
1331         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1332         1   [ ]
1333         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1334         3   [ ]
1335 .ve
1336 
1337    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1338 
1339    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1340    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1341 
1342 .vb
1343    rank(c')  local values of xred
1344    -------- ----------------------------------------------------------------------------
1345          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1346          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1347 .ve
1348 
1349   Contributed by:
1350   Dave May
1351 
1352 .seealso: [](ch_ksp), `PCTelescopeGetKSP()`, `PCTelescopeGetDM()`, `PCTelescopeGetReductionFactor()`, `PCTelescopeSetReductionFactor()`, `PCTelescopeGetIgnoreDM()`, `PCTelescopeSetIgnoreDM()`, `PCREDUNDANT`
1353 M*/
1354 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1355 {
1356   struct _PC_Telescope *sred;
1357 
1358   PetscFunctionBegin;
1359   PetscCall(PetscNew(&sred));
1360   sred->psubcomm                   = NULL;
1361   sred->subcommtype                = PETSC_SUBCOMM_INTERLACED;
1362   sred->subcomm                    = MPI_COMM_NULL;
1363   sred->redfactor                  = 1;
1364   sred->ignore_dm                  = PETSC_FALSE;
1365   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1366   sred->use_coarse_dm              = PETSC_FALSE;
1367   pc->data                         = (void *)sred;
1368 
1369   pc->ops->apply           = PCApply_Telescope;
1370   pc->ops->applytranspose  = NULL;
1371   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1372   pc->ops->setup           = PCSetUp_Telescope;
1373   pc->ops->destroy         = PCDestroy_Telescope;
1374   pc->ops->reset           = PCReset_Telescope;
1375   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1376   pc->ops->view            = PCView_Telescope;
1377 
1378   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1379   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1380   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1381   sred->pctelescope_reset_type              = NULL;
1382 
1383   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", PCTelescopeGetKSP_Telescope));
1384   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", PCTelescopeGetSubcommType_Telescope));
1385   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", PCTelescopeSetSubcommType_Telescope));
1386   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", PCTelescopeGetReductionFactor_Telescope));
1387   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", PCTelescopeSetReductionFactor_Telescope));
1388   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", PCTelescopeGetIgnoreDM_Telescope));
1389   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", PCTelescopeSetIgnoreDM_Telescope));
1390   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", PCTelescopeGetIgnoreKSPComputeOperators_Telescope));
1391   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", PCTelescopeSetIgnoreKSPComputeOperators_Telescope));
1392   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", PCTelescopeGetDM_Telescope));
1393   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", PCTelescopeGetUseCoarseDM_Telescope));
1394   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", PCTelescopeSetUseCoarseDM_Telescope));
1395   PetscFunctionReturn(PETSC_SUCCESS);
1396 }
1397