xref: /petsc/src/ksp/pc/impls/telescope/telescope.c (revision a4e35b1925eceef64945ea472b84f2bf06a67b5e)
1 #include <petsc/private/petscimpl.h>
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/pcimpl.h>
4 #include <petscksp.h> /*I "petscksp.h" I*/
5 #include <petscdm.h>  /*I "petscdm.h" I*/
6 #include "../src/ksp/pc/impls/telescope/telescope.h"
7 
8 static PetscBool  cited      = PETSC_FALSE;
9 static const char citation[] = "@inproceedings{MaySananRuppKnepleySmith2016,\n"
10                                "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
11                                "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
12                                "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
13                                "  series    = {PASC '16},\n"
14                                "  isbn      = {978-1-4503-4126-4},\n"
15                                "  location  = {Lausanne, Switzerland},\n"
16                                "  pages     = {5:1--5:12},\n"
17                                "  articleno = {5},\n"
18                                "  numpages  = {12},\n"
19                                "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
20                                "  doi       = {10.1145/2929908.2929913},\n"
21                                "  acmid     = {2929913},\n"
22                                "  publisher = {ACM},\n"
23                                "  address   = {New York, NY, USA},\n"
24                                "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
25                                "  year      = {2016}\n"
26                                "}\n";
27 
28 /*
29  default setup mode
30 
31  [1a] scatter to (FORWARD)
32  x(comm) -> xtmp(comm)
33  [1b] local copy (to) ranks with color = 0
34  xred(subcomm) <- xtmp
35 
36  [2] solve on sub KSP to obtain yred(subcomm)
37 
38  [3a] local copy (from) ranks with color = 0
39  yred(subcomm) --> xtmp
40  [2b] scatter from (REVERSE)
41  xtmp(comm) -> y(comm)
42 */
43 
44 /*
45   Collective[comm_f]
46   Notes
47    * Using comm_f = MPI_COMM_NULL will result in an error
48    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
49    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
50 */
51 PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f, MPI_Comm comm_c, PetscBool *isvalid)
52 {
53   PetscInt     valid = 1;
54   MPI_Group    group_f, group_c;
55   PetscMPIInt  count, k, size_f = 0, size_c = 0, size_c_sum = 0;
56   PetscMPIInt *ranks_f, *ranks_c;
57 
58   PetscFunctionBegin;
59   PetscCheck(comm_f != MPI_COMM_NULL, PETSC_COMM_SELF, PETSC_ERR_SUP, "comm_f cannot be MPI_COMM_NULL");
60 
61   PetscCallMPI(MPI_Comm_group(comm_f, &group_f));
62   if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_group(comm_c, &group_c));
63 
64   PetscCallMPI(MPI_Comm_size(comm_f, &size_f));
65   if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_size(comm_c, &size_c));
66 
67   /* check not all comm_c's are NULL */
68   size_c_sum = size_c;
69   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &size_c_sum, 1, MPI_INT, MPI_SUM, comm_f));
70   if (size_c_sum == 0) valid = 0;
71 
72   /* check we can map at least 1 rank in comm_c to comm_f */
73   PetscCall(PetscMalloc1(size_f, &ranks_f));
74   PetscCall(PetscMalloc1(size_c, &ranks_c));
75   for (k = 0; k < size_f; k++) ranks_f[k] = MPI_UNDEFINED;
76   for (k = 0; k < size_c; k++) ranks_c[k] = k;
77 
78   /*
79    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
80    I do not want the code to terminate immediately if this occurs, rather I want to throw
81    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
82    that comm_c is not a valid sub-communicator.
83    Hence I purposefully do not call PetscCall() after MPI_Group_translate_ranks().
84   */
85   count = 0;
86   if (comm_c != MPI_COMM_NULL) {
87     (void)MPI_Group_translate_ranks(group_c, size_c, ranks_c, group_f, ranks_f);
88     for (k = 0; k < size_f; k++) {
89       if (ranks_f[k] == MPI_UNDEFINED) count++;
90     }
91   }
92   if (count == size_f) valid = 0;
93 
94   PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &valid, 1, MPIU_INT, MPI_MIN, comm_f));
95   if (valid == 1) *isvalid = PETSC_TRUE;
96   else *isvalid = PETSC_FALSE;
97 
98   PetscCall(PetscFree(ranks_f));
99   PetscCall(PetscFree(ranks_c));
100   PetscCallMPI(MPI_Group_free(&group_f));
101   if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Group_free(&group_c));
102   PetscFunctionReturn(PETSC_SUCCESS);
103 }
104 
105 DM private_PCTelescopeGetSubDM(PC_Telescope sred)
106 {
107   DM subdm = NULL;
108 
109   if (!PCTelescope_isActiveRank(sred)) {
110     subdm = NULL;
111   } else {
112     switch (sred->sr_type) {
113     case TELESCOPE_DEFAULT:
114       subdm = NULL;
115       break;
116     case TELESCOPE_DMDA:
117       subdm = ((PC_Telescope_DMDACtx *)sred->dm_ctx)->dmrepart;
118       break;
119     case TELESCOPE_DMPLEX:
120       subdm = NULL;
121       break;
122     case TELESCOPE_COARSEDM:
123       if (sred->ksp) PetscCallAbort(PETSC_COMM_SELF, KSPGetDM(sred->ksp, &subdm));
124       break;
125     }
126   }
127   return subdm;
128 }
129 
130 PetscErrorCode PCTelescopeSetUp_default(PC pc, PC_Telescope sred)
131 {
132   PetscInt   m, M, bs, st, ed;
133   Vec        x, xred, yred, xtmp;
134   Mat        B;
135   MPI_Comm   comm, subcomm;
136   VecScatter scatter;
137   IS         isin;
138   VecType    vectype;
139 
140   PetscFunctionBegin;
141   PetscCall(PetscInfo(pc, "PCTelescope: setup (default)\n"));
142   comm    = PetscSubcommParent(sred->psubcomm);
143   subcomm = PetscSubcommChild(sred->psubcomm);
144 
145   PetscCall(PCGetOperators(pc, NULL, &B));
146   PetscCall(MatGetSize(B, &M, NULL));
147   PetscCall(MatGetBlockSize(B, &bs));
148   PetscCall(MatCreateVecs(B, &x, NULL));
149   PetscCall(MatGetVecType(B, &vectype));
150 
151   xred = NULL;
152   m    = 0;
153   if (PCTelescope_isActiveRank(sred)) {
154     PetscCall(VecCreate(subcomm, &xred));
155     PetscCall(VecSetSizes(xred, PETSC_DECIDE, M));
156     PetscCall(VecSetBlockSize(xred, bs));
157     PetscCall(VecSetType(xred, vectype)); /* Use the preconditioner matrix's vectype by default */
158     PetscCall(VecSetFromOptions(xred));
159     PetscCall(VecGetLocalSize(xred, &m));
160   }
161 
162   yred = NULL;
163   if (PCTelescope_isActiveRank(sred)) PetscCall(VecDuplicate(xred, &yred));
164 
165   PetscCall(VecCreate(comm, &xtmp));
166   PetscCall(VecSetSizes(xtmp, m, PETSC_DECIDE));
167   PetscCall(VecSetBlockSize(xtmp, bs));
168   PetscCall(VecSetType(xtmp, vectype));
169 
170   if (PCTelescope_isActiveRank(sred)) {
171     PetscCall(VecGetOwnershipRange(xred, &st, &ed));
172     PetscCall(ISCreateStride(comm, (ed - st), st, 1, &isin));
173   } else {
174     PetscCall(VecGetOwnershipRange(x, &st, &ed));
175     PetscCall(ISCreateStride(comm, 0, st, 1, &isin));
176   }
177   PetscCall(ISSetBlockSize(isin, bs));
178 
179   PetscCall(VecScatterCreate(x, isin, xtmp, NULL, &scatter));
180 
181   sred->isin    = isin;
182   sred->scatter = scatter;
183   sred->xred    = xred;
184   sred->yred    = yred;
185   sred->xtmp    = xtmp;
186   PetscCall(VecDestroy(&x));
187   PetscFunctionReturn(PETSC_SUCCESS);
188 }
189 
190 PetscErrorCode PCTelescopeMatCreate_default(PC pc, PC_Telescope sred, MatReuse reuse, Mat *A)
191 {
192   MPI_Comm comm, subcomm;
193   Mat      Bred, B;
194   PetscInt nr, nc, bs;
195   IS       isrow, iscol;
196   Mat      Blocal, *_Blocal;
197 
198   PetscFunctionBegin;
199   PetscCall(PetscInfo(pc, "PCTelescope: updating the redundant preconditioned operator (default)\n"));
200   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
201   subcomm = PetscSubcommChild(sred->psubcomm);
202   PetscCall(PCGetOperators(pc, NULL, &B));
203   PetscCall(MatGetSize(B, &nr, &nc));
204   isrow = sred->isin;
205   PetscCall(ISCreateStride(PETSC_COMM_SELF, nc, 0, 1, &iscol));
206   PetscCall(ISSetIdentity(iscol));
207   PetscCall(MatGetBlockSizes(B, NULL, &bs));
208   PetscCall(ISSetBlockSize(iscol, bs));
209   PetscCall(MatSetOption(B, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
210   PetscCall(MatCreateSubMatrices(B, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &_Blocal));
211   Blocal = *_Blocal;
212   PetscCall(PetscFree(_Blocal));
213   Bred = NULL;
214   if (PCTelescope_isActiveRank(sred)) {
215     PetscInt mm;
216 
217     if (reuse != MAT_INITIAL_MATRIX) Bred = *A;
218 
219     PetscCall(MatGetSize(Blocal, &mm, NULL));
220     PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, Blocal, mm, reuse, &Bred));
221   }
222   *A = Bred;
223   PetscCall(ISDestroy(&iscol));
224   PetscCall(MatDestroy(&Blocal));
225   PetscFunctionReturn(PETSC_SUCCESS);
226 }
227 
228 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc, PC_Telescope sred, MatNullSpace nullspace, MatNullSpace *sub_nullspace)
229 {
230   PetscBool  has_const;
231   const Vec *vecs;
232   Vec       *sub_vecs = NULL;
233   PetscInt   i, k, n = 0;
234   MPI_Comm   subcomm;
235 
236   PetscFunctionBegin;
237   subcomm = PetscSubcommChild(sred->psubcomm);
238   PetscCall(MatNullSpaceGetVecs(nullspace, &has_const, &n, &vecs));
239 
240   if (PCTelescope_isActiveRank(sred)) {
241     if (n) PetscCall(VecDuplicateVecs(sred->xred, n, &sub_vecs));
242   }
243 
244   /* copy entries */
245   for (k = 0; k < n; k++) {
246     const PetscScalar *x_array;
247     PetscScalar       *LA_sub_vec;
248     PetscInt           st, ed;
249 
250     /* pull in vector x->xtmp */
251     PetscCall(VecScatterBegin(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
252     PetscCall(VecScatterEnd(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
253     if (sub_vecs) {
254       /* copy vector entries into xred */
255       PetscCall(VecGetArrayRead(sred->xtmp, &x_array));
256       if (sub_vecs[k]) {
257         PetscCall(VecGetOwnershipRange(sub_vecs[k], &st, &ed));
258         PetscCall(VecGetArray(sub_vecs[k], &LA_sub_vec));
259         for (i = 0; i < ed - st; i++) LA_sub_vec[i] = x_array[i];
260         PetscCall(VecRestoreArray(sub_vecs[k], &LA_sub_vec));
261       }
262       PetscCall(VecRestoreArrayRead(sred->xtmp, &x_array));
263     }
264   }
265 
266   if (PCTelescope_isActiveRank(sred)) {
267     /* create new (near) nullspace for redundant object */
268     PetscCall(MatNullSpaceCreate(subcomm, has_const, n, sub_vecs, sub_nullspace));
269     PetscCall(VecDestroyVecs(n, &sub_vecs));
270     PetscCheck(!nullspace->remove, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
271     PetscCheck(!nullspace->rmctx, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
272   }
273   PetscFunctionReturn(PETSC_SUCCESS);
274 }
275 
276 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc, PC_Telescope sred, Mat sub_mat)
277 {
278   Mat B;
279 
280   PetscFunctionBegin;
281   PetscCall(PCGetOperators(pc, NULL, &B));
282   /* Propagate the nullspace if it exists */
283   {
284     MatNullSpace nullspace, sub_nullspace;
285     PetscCall(MatGetNullSpace(B, &nullspace));
286     if (nullspace) {
287       PetscCall(PetscInfo(pc, "PCTelescope: generating nullspace (default)\n"));
288       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nullspace, &sub_nullspace));
289       if (PCTelescope_isActiveRank(sred)) {
290         PetscCall(MatSetNullSpace(sub_mat, sub_nullspace));
291         PetscCall(MatNullSpaceDestroy(&sub_nullspace));
292       }
293     }
294   }
295   /* Propagate the near nullspace if it exists */
296   {
297     MatNullSpace nearnullspace, sub_nearnullspace;
298     PetscCall(MatGetNearNullSpace(B, &nearnullspace));
299     if (nearnullspace) {
300       PetscCall(PetscInfo(pc, "PCTelescope: generating near nullspace (default)\n"));
301       PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nearnullspace, &sub_nearnullspace));
302       if (PCTelescope_isActiveRank(sred)) {
303         PetscCall(MatSetNearNullSpace(sub_mat, sub_nearnullspace));
304         PetscCall(MatNullSpaceDestroy(&sub_nearnullspace));
305       }
306     }
307   }
308   PetscFunctionReturn(PETSC_SUCCESS);
309 }
310 
311 static PetscErrorCode PCView_Telescope(PC pc, PetscViewer viewer)
312 {
313   PC_Telescope sred = (PC_Telescope)pc->data;
314   PetscBool    iascii, isstring;
315   PetscViewer  subviewer;
316 
317   PetscFunctionBegin;
318   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
319   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
320   if (iascii) {
321     {
322       MPI_Comm    comm, subcomm;
323       PetscMPIInt comm_size, subcomm_size;
324       DM          dm = NULL, subdm = NULL;
325 
326       PetscCall(PCGetDM(pc, &dm));
327       subdm = private_PCTelescopeGetSubDM(sred);
328 
329       if (sred->psubcomm) {
330         comm    = PetscSubcommParent(sred->psubcomm);
331         subcomm = PetscSubcommChild(sred->psubcomm);
332         PetscCallMPI(MPI_Comm_size(comm, &comm_size));
333         PetscCallMPI(MPI_Comm_size(subcomm, &subcomm_size));
334 
335         PetscCall(PetscViewerASCIIPushTab(viewer));
336         PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent comm size reduction factor = %" PetscInt_FMT "\n", sred->redfactor));
337         PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent_size = %d , subcomm_size = %d\n", (int)comm_size, (int)subcomm_size));
338         switch (sred->subcommtype) {
339         case PETSC_SUBCOMM_INTERLACED:
340           PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm: type = %s\n", PetscSubcommTypes[sred->subcommtype]));
341           break;
342         case PETSC_SUBCOMM_CONTIGUOUS:
343           PetscCall(PetscViewerASCIIPrintf(viewer, "petsc subcomm type = %s\n", PetscSubcommTypes[sred->subcommtype]));
344           break;
345         default:
346           SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "General subcomm type not supported by PCTelescope");
347         }
348         PetscCall(PetscViewerASCIIPopTab(viewer));
349       } else {
350         PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
351         subcomm = sred->subcomm;
352         if (!PCTelescope_isActiveRank(sred)) subcomm = PETSC_COMM_SELF;
353 
354         PetscCall(PetscViewerASCIIPushTab(viewer));
355         PetscCall(PetscViewerASCIIPrintf(viewer, "subcomm: using user provided sub-communicator\n"));
356         PetscCall(PetscViewerASCIIPopTab(viewer));
357       }
358 
359       PetscCall(PetscViewerGetSubViewer(viewer, subcomm, &subviewer));
360       if (PCTelescope_isActiveRank(sred)) {
361         PetscCall(PetscViewerASCIIPushTab(subviewer));
362 
363         if (dm && sred->ignore_dm) PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring DM\n"));
364         if (sred->ignore_kspcomputeoperators) PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring KSPComputeOperators\n"));
365         switch (sred->sr_type) {
366         case TELESCOPE_DEFAULT:
367           PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: default\n"));
368           break;
369         case TELESCOPE_DMDA:
370           PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMDA auto-repartitioning\n"));
371           PetscCall(DMView_DA_Short(subdm, subviewer));
372           break;
373         case TELESCOPE_DMPLEX:
374           PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMPLEX auto-repartitioning\n"));
375           break;
376         case TELESCOPE_COARSEDM:
377           PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: coarse DM\n"));
378           break;
379         }
380 
381         if (dm) {
382           PetscObject obj = (PetscObject)dm;
383           PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object:"));
384           PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE));
385           if (obj->type_name) PetscCall(PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name));
386           if (obj->name) PetscCall(PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name));
387           if (obj->prefix) PetscCall(PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix));
388           PetscCall(PetscViewerASCIIPrintf(subviewer, "\n"));
389           PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE));
390         } else {
391           PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object: NULL\n"));
392         }
393         if (subdm) {
394           PetscObject obj = (PetscObject)subdm;
395           PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object:"));
396           PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE));
397           if (obj->type_name) PetscCall(PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name));
398           if (obj->name) PetscCall(PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name));
399           if (obj->prefix) PetscCall(PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix));
400           PetscCall(PetscViewerASCIIPrintf(subviewer, "\n"));
401           PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE));
402         } else {
403           PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object: NULL\n"));
404         }
405 
406         PetscCall(KSPView(sred->ksp, subviewer));
407         PetscCall(PetscViewerASCIIPopTab(subviewer));
408       }
409       PetscCall(PetscViewerRestoreSubViewer(viewer, subcomm, &subviewer));
410     }
411   }
412   PetscFunctionReturn(PETSC_SUCCESS);
413 }
414 
415 static PetscErrorCode PCSetUp_Telescope(PC pc)
416 {
417   PC_Telescope    sred = (PC_Telescope)pc->data;
418   MPI_Comm        comm, subcomm = 0;
419   PCTelescopeType sr_type;
420 
421   PetscFunctionBegin;
422   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
423 
424   /* Determine type of setup/update */
425   if (!pc->setupcalled) {
426     PetscBool has_dm, same;
427     DM        dm;
428 
429     sr_type = TELESCOPE_DEFAULT;
430     has_dm  = PETSC_FALSE;
431     PetscCall(PCGetDM(pc, &dm));
432     if (dm) has_dm = PETSC_TRUE;
433     if (has_dm) {
434       /* check for dmda */
435       PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMDA, &same));
436       if (same) {
437         PetscCall(PetscInfo(pc, "PCTelescope: found DMDA\n"));
438         sr_type = TELESCOPE_DMDA;
439       }
440       /* check for dmplex */
441       PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &same));
442       if (same) {
443         PetscCall(PetscInfo(pc, "PCTelescope: found DMPLEX\n"));
444         sr_type = TELESCOPE_DMPLEX;
445       }
446 
447       if (sred->use_coarse_dm) {
448         PetscCall(PetscInfo(pc, "PCTelescope: using coarse DM\n"));
449         sr_type = TELESCOPE_COARSEDM;
450       }
451 
452       if (sred->ignore_dm) {
453         PetscCall(PetscInfo(pc, "PCTelescope: ignoring DM\n"));
454         sr_type = TELESCOPE_DEFAULT;
455       }
456     }
457     sred->sr_type = sr_type;
458   } else {
459     sr_type = sred->sr_type;
460   }
461 
462   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
463   switch (sr_type) {
464   case TELESCOPE_DEFAULT:
465     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
466     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
467     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
468     sred->pctelescope_reset_type              = NULL;
469     break;
470   case TELESCOPE_DMDA:
471     pc->ops->apply                            = PCApply_Telescope_dmda;
472     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
473     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
474     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
475     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
476     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
477     break;
478   case TELESCOPE_DMPLEX:
479     SETERRQ(comm, PETSC_ERR_SUP, "Support for DMPLEX is currently not available");
480   case TELESCOPE_COARSEDM:
481     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
482     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
483     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
484     sred->pctelescope_matcreate_type          = NULL;
485     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
486     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
487     break;
488   default:
489     SETERRQ(comm, PETSC_ERR_SUP, "Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
490   }
491 
492   /* subcomm definition */
493   if (!pc->setupcalled) {
494     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
495       if (!sred->psubcomm) {
496         PetscCall(PetscSubcommCreate(comm, &sred->psubcomm));
497         PetscCall(PetscSubcommSetNumber(sred->psubcomm, sred->redfactor));
498         PetscCall(PetscSubcommSetType(sred->psubcomm, sred->subcommtype));
499         sred->subcomm = PetscSubcommChild(sred->psubcomm);
500       }
501     } else { /* query PC for DM, check communicators */
502       DM          dm, dm_coarse_partition          = NULL;
503       MPI_Comm    comm_fine, comm_coarse_partition = MPI_COMM_NULL;
504       PetscMPIInt csize_fine = 0, csize_coarse_partition = 0, cs[2], csg[2], cnt = 0;
505       PetscBool   isvalidsubcomm;
506 
507       PetscCall(PCGetDM(pc, &dm));
508       comm_fine = PetscObjectComm((PetscObject)dm);
509       PetscCall(DMGetCoarseDM(dm, &dm_coarse_partition));
510       if (dm_coarse_partition) cnt = 1;
511       PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &cnt, 1, MPI_INT, MPI_SUM, comm_fine));
512       PetscCheck(cnt != 0, comm_fine, PETSC_ERR_SUP, "Zero instances of a coarse DM were found");
513 
514       PetscCallMPI(MPI_Comm_size(comm_fine, &csize_fine));
515       if (dm_coarse_partition) {
516         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
517         PetscCallMPI(MPI_Comm_size(comm_coarse_partition, &csize_coarse_partition));
518       }
519 
520       cs[0] = csize_fine;
521       cs[1] = csize_coarse_partition;
522       PetscCallMPI(MPI_Allreduce(cs, csg, 2, MPI_INT, MPI_MAX, comm_fine));
523       PetscCheck(csg[0] != csg[1], comm_fine, PETSC_ERR_SUP, "Coarse DM uses the same size communicator as the parent DM attached to the PC");
524 
525       PetscCall(PCTelescopeTestValidSubcomm(comm_fine, comm_coarse_partition, &isvalidsubcomm));
526       PetscCheck(isvalidsubcomm, comm_fine, PETSC_ERR_SUP, "Coarse DM communicator is not a sub-communicator of parentDM->comm");
527       sred->subcomm = comm_coarse_partition;
528     }
529   }
530   subcomm = sred->subcomm;
531 
532   /* internal KSP */
533   if (!pc->setupcalled) {
534     const char *prefix;
535 
536     if (PCTelescope_isActiveRank(sred)) {
537       PetscCall(KSPCreate(subcomm, &sred->ksp));
538       PetscCall(KSPSetNestLevel(sred->ksp, pc->kspnestlevel));
539       PetscCall(KSPSetErrorIfNotConverged(sred->ksp, pc->erroriffailure));
540       PetscCall(PetscObjectIncrementTabLevel((PetscObject)sred->ksp, (PetscObject)pc, 1));
541       PetscCall(KSPSetType(sred->ksp, KSPPREONLY));
542       PetscCall(PCGetOptionsPrefix(pc, &prefix));
543       PetscCall(KSPSetOptionsPrefix(sred->ksp, prefix));
544       PetscCall(KSPAppendOptionsPrefix(sred->ksp, "telescope_"));
545     }
546   }
547 
548   /* setup */
549   if (!pc->setupcalled && sred->pctelescope_setup_type) PetscCall(sred->pctelescope_setup_type(pc, sred));
550   /* update */
551   if (!pc->setupcalled) {
552     if (sred->pctelescope_matcreate_type) PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_INITIAL_MATRIX, &sred->Bred));
553     if (sred->pctelescope_matnullspacecreate_type) PetscCall(sred->pctelescope_matnullspacecreate_type(pc, sred, sred->Bred));
554   } else {
555     if (sred->pctelescope_matcreate_type) PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_REUSE_MATRIX, &sred->Bred));
556   }
557 
558   /* common - no construction */
559   if (PCTelescope_isActiveRank(sred)) {
560     PetscCall(KSPSetOperators(sred->ksp, sred->Bred, sred->Bred));
561     if (pc->setfromoptionscalled && !pc->setupcalled) PetscCall(KSPSetFromOptions(sred->ksp));
562   }
563   PetscFunctionReturn(PETSC_SUCCESS);
564 }
565 
566 static PetscErrorCode PCApply_Telescope(PC pc, Vec x, Vec y)
567 {
568   PC_Telescope       sred = (PC_Telescope)pc->data;
569   Vec                xtmp, xred, yred;
570   PetscInt           i, st, ed;
571   VecScatter         scatter;
572   PetscScalar       *array;
573   const PetscScalar *x_array;
574 
575   PetscFunctionBegin;
576   PetscCall(PetscCitationsRegister(citation, &cited));
577 
578   xtmp    = sred->xtmp;
579   scatter = sred->scatter;
580   xred    = sred->xred;
581   yred    = sred->yred;
582 
583   /* pull in vector x->xtmp */
584   PetscCall(VecScatterBegin(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD));
585   PetscCall(VecScatterEnd(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD));
586 
587   /* copy vector entries into xred */
588   PetscCall(VecGetArrayRead(xtmp, &x_array));
589   if (xred) {
590     PetscScalar *LA_xred;
591     PetscCall(VecGetOwnershipRange(xred, &st, &ed));
592     PetscCall(VecGetArray(xred, &LA_xred));
593     for (i = 0; i < ed - st; i++) LA_xred[i] = x_array[i];
594     PetscCall(VecRestoreArray(xred, &LA_xred));
595   }
596   PetscCall(VecRestoreArrayRead(xtmp, &x_array));
597   /* solve */
598   if (PCTelescope_isActiveRank(sred)) {
599     PetscCall(KSPSolve(sred->ksp, xred, yred));
600     PetscCall(KSPCheckSolve(sred->ksp, pc, yred));
601   }
602   /* return vector */
603   PetscCall(VecGetArray(xtmp, &array));
604   if (yred) {
605     const PetscScalar *LA_yred;
606     PetscCall(VecGetOwnershipRange(yred, &st, &ed));
607     PetscCall(VecGetArrayRead(yred, &LA_yred));
608     for (i = 0; i < ed - st; i++) array[i] = LA_yred[i];
609     PetscCall(VecRestoreArrayRead(yred, &LA_yred));
610   }
611   PetscCall(VecRestoreArray(xtmp, &array));
612   PetscCall(VecScatterBegin(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE));
613   PetscCall(VecScatterEnd(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE));
614   PetscFunctionReturn(PETSC_SUCCESS);
615 }
616 
617 static PetscErrorCode PCApplyRichardson_Telescope(PC pc, Vec x, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
618 {
619   PC_Telescope       sred = (PC_Telescope)pc->data;
620   Vec                xtmp, yred;
621   PetscInt           i, st, ed;
622   VecScatter         scatter;
623   const PetscScalar *x_array;
624   PetscBool          default_init_guess_value;
625 
626   PetscFunctionBegin;
627   xtmp    = sred->xtmp;
628   scatter = sred->scatter;
629   yred    = sred->yred;
630 
631   PetscCheck(its <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCApplyRichardson_Telescope only supports max_it = 1");
632   *reason = (PCRichardsonConvergedReason)0;
633 
634   if (!zeroguess) {
635     PetscCall(PetscInfo(pc, "PCTelescope: Scattering y for non-zero initial guess\n"));
636     /* pull in vector y->xtmp */
637     PetscCall(VecScatterBegin(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD));
638     PetscCall(VecScatterEnd(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD));
639 
640     /* copy vector entries into xred */
641     PetscCall(VecGetArrayRead(xtmp, &x_array));
642     if (yred) {
643       PetscScalar *LA_yred;
644       PetscCall(VecGetOwnershipRange(yred, &st, &ed));
645       PetscCall(VecGetArray(yred, &LA_yred));
646       for (i = 0; i < ed - st; i++) LA_yred[i] = x_array[i];
647       PetscCall(VecRestoreArray(yred, &LA_yred));
648     }
649     PetscCall(VecRestoreArrayRead(xtmp, &x_array));
650   }
651 
652   if (PCTelescope_isActiveRank(sred)) {
653     PetscCall(KSPGetInitialGuessNonzero(sred->ksp, &default_init_guess_value));
654     if (!zeroguess) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, PETSC_TRUE));
655   }
656 
657   PetscCall(PCApply_Telescope(pc, x, y));
658 
659   if (PCTelescope_isActiveRank(sred)) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, default_init_guess_value));
660 
661   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
662   *outits = 1;
663   PetscFunctionReturn(PETSC_SUCCESS);
664 }
665 
666 static PetscErrorCode PCReset_Telescope(PC pc)
667 {
668   PC_Telescope sred = (PC_Telescope)pc->data;
669 
670   PetscFunctionBegin;
671   PetscCall(ISDestroy(&sred->isin));
672   PetscCall(VecScatterDestroy(&sred->scatter));
673   PetscCall(VecDestroy(&sred->xred));
674   PetscCall(VecDestroy(&sred->yred));
675   PetscCall(VecDestroy(&sred->xtmp));
676   PetscCall(MatDestroy(&sred->Bred));
677   PetscCall(KSPReset(sred->ksp));
678   if (sred->pctelescope_reset_type) PetscCall(sred->pctelescope_reset_type(pc));
679   PetscFunctionReturn(PETSC_SUCCESS);
680 }
681 
682 static PetscErrorCode PCDestroy_Telescope(PC pc)
683 {
684   PC_Telescope sred = (PC_Telescope)pc->data;
685 
686   PetscFunctionBegin;
687   PetscCall(PCReset_Telescope(pc));
688   PetscCall(KSPDestroy(&sred->ksp));
689   PetscCall(PetscSubcommDestroy(&sred->psubcomm));
690   PetscCall(PetscFree(sred->dm_ctx));
691   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", NULL));
692   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", NULL));
693   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", NULL));
694   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", NULL));
695   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", NULL));
696   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", NULL));
697   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", NULL));
698   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", NULL));
699   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", NULL));
700   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", NULL));
701   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", NULL));
702   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", NULL));
703   PetscCall(PetscFree(pc->data));
704   PetscFunctionReturn(PETSC_SUCCESS);
705 }
706 
707 static PetscErrorCode PCSetFromOptions_Telescope(PC pc, PetscOptionItems *PetscOptionsObject)
708 {
709   PC_Telescope     sred = (PC_Telescope)pc->data;
710   MPI_Comm         comm;
711   PetscMPIInt      size;
712   PetscBool        flg;
713   PetscSubcommType subcommtype;
714 
715   PetscFunctionBegin;
716   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
717   PetscCallMPI(MPI_Comm_size(comm, &size));
718   PetscOptionsHeadBegin(PetscOptionsObject, "Telescope options");
719   PetscCall(PetscOptionsEnum("-pc_telescope_subcomm_type", "Subcomm type (interlaced or contiguous)", "PCTelescopeSetSubcommType", PetscSubcommTypes, (PetscEnum)sred->subcommtype, (PetscEnum *)&subcommtype, &flg));
720   if (flg) PetscCall(PCTelescopeSetSubcommType(pc, subcommtype));
721   PetscCall(PetscOptionsInt("-pc_telescope_reduction_factor", "Factor to reduce comm size by", "PCTelescopeSetReductionFactor", sred->redfactor, &sred->redfactor, NULL));
722   PetscCheck(sred->redfactor <= size, comm, PETSC_ERR_ARG_WRONG, "-pc_telescope_reduction_factor <= comm size");
723   PetscCall(PetscOptionsBool("-pc_telescope_ignore_dm", "Ignore any DM attached to the PC", "PCTelescopeSetIgnoreDM", sred->ignore_dm, &sred->ignore_dm, NULL));
724   PetscCall(PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators", "Ignore method used to compute A", "PCTelescopeSetIgnoreKSPComputeOperators", sred->ignore_kspcomputeoperators, &sred->ignore_kspcomputeoperators, NULL));
725   PetscCall(PetscOptionsBool("-pc_telescope_use_coarse_dm", "Define sub-communicator from the coarse DM", "PCTelescopeSetUseCoarseDM", sred->use_coarse_dm, &sred->use_coarse_dm, NULL));
726   PetscOptionsHeadEnd();
727   PetscFunctionReturn(PETSC_SUCCESS);
728 }
729 
730 /* PC simplementation specific API's */
731 
732 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc, KSP *ksp)
733 {
734   PC_Telescope red = (PC_Telescope)pc->data;
735   PetscFunctionBegin;
736   if (ksp) *ksp = red->ksp;
737   PetscFunctionReturn(PETSC_SUCCESS);
738 }
739 
740 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc, PetscSubcommType *subcommtype)
741 {
742   PC_Telescope red = (PC_Telescope)pc->data;
743   PetscFunctionBegin;
744   if (subcommtype) *subcommtype = red->subcommtype;
745   PetscFunctionReturn(PETSC_SUCCESS);
746 }
747 
748 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc, PetscSubcommType subcommtype)
749 {
750   PC_Telescope red = (PC_Telescope)pc->data;
751 
752   PetscFunctionBegin;
753   PetscCheck(!pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "You cannot change the subcommunicator type for PCTelescope after it has been set up.");
754   red->subcommtype = subcommtype;
755   PetscFunctionReturn(PETSC_SUCCESS);
756 }
757 
758 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc, PetscInt *fact)
759 {
760   PC_Telescope red = (PC_Telescope)pc->data;
761   PetscFunctionBegin;
762   if (fact) *fact = red->redfactor;
763   PetscFunctionReturn(PETSC_SUCCESS);
764 }
765 
766 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc, PetscInt fact)
767 {
768   PC_Telescope red = (PC_Telescope)pc->data;
769   PetscMPIInt  size;
770 
771   PetscFunctionBegin;
772   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
773   PetscCheck(fact > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be positive", fact);
774   PetscCheck(fact <= size, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be <= comm.size", fact);
775   red->redfactor = fact;
776   PetscFunctionReturn(PETSC_SUCCESS);
777 }
778 
779 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc, PetscBool *v)
780 {
781   PC_Telescope red = (PC_Telescope)pc->data;
782   PetscFunctionBegin;
783   if (v) *v = red->ignore_dm;
784   PetscFunctionReturn(PETSC_SUCCESS);
785 }
786 
787 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc, PetscBool v)
788 {
789   PC_Telescope red = (PC_Telescope)pc->data;
790   PetscFunctionBegin;
791   red->ignore_dm = v;
792   PetscFunctionReturn(PETSC_SUCCESS);
793 }
794 
795 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc, PetscBool *v)
796 {
797   PC_Telescope red = (PC_Telescope)pc->data;
798   PetscFunctionBegin;
799   if (v) *v = red->use_coarse_dm;
800   PetscFunctionReturn(PETSC_SUCCESS);
801 }
802 
803 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc, PetscBool v)
804 {
805   PC_Telescope red = (PC_Telescope)pc->data;
806   PetscFunctionBegin;
807   red->use_coarse_dm = v;
808   PetscFunctionReturn(PETSC_SUCCESS);
809 }
810 
811 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool *v)
812 {
813   PC_Telescope red = (PC_Telescope)pc->data;
814   PetscFunctionBegin;
815   if (v) *v = red->ignore_kspcomputeoperators;
816   PetscFunctionReturn(PETSC_SUCCESS);
817 }
818 
819 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool v)
820 {
821   PC_Telescope red = (PC_Telescope)pc->data;
822   PetscFunctionBegin;
823   red->ignore_kspcomputeoperators = v;
824   PetscFunctionReturn(PETSC_SUCCESS);
825 }
826 
827 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc, DM *dm)
828 {
829   PC_Telescope red = (PC_Telescope)pc->data;
830   PetscFunctionBegin;
831   *dm = private_PCTelescopeGetSubDM(red);
832   PetscFunctionReturn(PETSC_SUCCESS);
833 }
834 
835 /*@
836   PCTelescopeGetKSP - Gets the `KSP` created by the telescoping `PC`.
837 
838   Not Collective
839 
840   Input Parameter:
841 . pc - the preconditioner context
842 
843   Output Parameter:
844 . subksp - the `KSP` defined on the smaller set of processes
845 
846   Level: advanced
847 
848 .seealso: `PC`, `KSP`, `PCTELESCOPE`
849 @*/
850 PetscErrorCode PCTelescopeGetKSP(PC pc, KSP *subksp)
851 {
852   PetscFunctionBegin;
853   PetscUseMethod(pc, "PCTelescopeGetKSP_C", (PC, KSP *), (pc, subksp));
854   PetscFunctionReturn(PETSC_SUCCESS);
855 }
856 
857 /*@
858   PCTelescopeGetReductionFactor - Gets the factor by which the original number of MPI processes has been reduced by that was set by
859   `PCTelescopeSetReductionFactor()`
860 
861   Not Collective
862 
863   Input Parameter:
864 . pc - the preconditioner context
865 
866   Output Parameter:
867 . fact - the reduction factor
868 
869   Level: advanced
870 
871 .seealso: `PC`, `PCTELESCOPE`, `PCTelescopeSetReductionFactor()`
872 @*/
873 PetscErrorCode PCTelescopeGetReductionFactor(PC pc, PetscInt *fact)
874 {
875   PetscFunctionBegin;
876   PetscUseMethod(pc, "PCTelescopeGetReductionFactor_C", (PC, PetscInt *), (pc, fact));
877   PetscFunctionReturn(PETSC_SUCCESS);
878 }
879 
880 /*@
881   PCTelescopeSetReductionFactor - Sets the factor by which the original number of MPI processes will been reduced by.
882 
883   Not Collective
884 
885   Input Parameter:
886 . pc - the preconditioner context
887 
888   Output Parameter:
889 . fact - the reduction factor
890 
891   Level: advanced
892 
893 .seealso: `PCTELESCOPE`, `PCTelescopeGetReductionFactor()`
894 @*/
895 PetscErrorCode PCTelescopeSetReductionFactor(PC pc, PetscInt fact)
896 {
897   PetscFunctionBegin;
898   PetscTryMethod(pc, "PCTelescopeSetReductionFactor_C", (PC, PetscInt), (pc, fact));
899   PetscFunctionReturn(PETSC_SUCCESS);
900 }
901 
902 /*@
903   PCTelescopeGetIgnoreDM - Get the flag indicating if any `DM` attached to the `PC` will be used in constructing the `PC` on the
904   reduced number of MPI processes
905 
906   Not Collective
907 
908   Input Parameter:
909 . pc - the preconditioner context
910 
911   Output Parameter:
912 . v - the flag
913 
914   Level: advanced
915 
916 .seealso: `DM`, `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`
917 @*/
918 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc, PetscBool *v)
919 {
920   PetscFunctionBegin;
921   PetscUseMethod(pc, "PCTelescopeGetIgnoreDM_C", (PC, PetscBool *), (pc, v));
922   PetscFunctionReturn(PETSC_SUCCESS);
923 }
924 
925 /*@
926   PCTelescopeSetIgnoreDM - Set a flag to ignore any `DM` attached to the `PC` when constructing the `PC` on the
927   reduced number of MPI processes
928 
929   Not Collective
930 
931   Input Parameter:
932 . pc - the preconditioner context
933 
934   Output Parameter:
935 . v - Use `PETSC_TRUE` to ignore any `DM`
936 
937   Level: advanced
938 
939 .seealso: `DM`, `PCTELESCOPE`, `PCTelescopeGetIgnoreDM()`
940 @*/
941 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc, PetscBool v)
942 {
943   PetscFunctionBegin;
944   PetscTryMethod(pc, "PCTelescopeSetIgnoreDM_C", (PC, PetscBool), (pc, v));
945   PetscFunctionReturn(PETSC_SUCCESS);
946 }
947 
948 /*@
949   PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse `DM` attached to `DM` associated with the `PC` will be used in constructing
950   the `PC` on the reduced number of MPI processes
951 
952   Not Collective
953 
954   Input Parameter:
955 . pc - the preconditioner context
956 
957   Output Parameter:
958 . v - the flag
959 
960   Level: advanced
961 
962 .seealso: `DM`, `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`
963 @*/
964 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc, PetscBool *v)
965 {
966   PetscFunctionBegin;
967   PetscUseMethod(pc, "PCTelescopeGetUseCoarseDM_C", (PC, PetscBool *), (pc, v));
968   PetscFunctionReturn(PETSC_SUCCESS);
969 }
970 
971 /*@
972   PCTelescopeSetUseCoarseDM - Set a flag to query the `DM` attached to the `PC` if it also has a coarse `DM` and utilize that `DM`
973   in constructing the `PC` on the reduced number of MPI processes
974 
975   Not Collective
976 
977   Input Parameter:
978 . pc - the preconditioner context
979 
980   Output Parameter:
981 . v - Use `PETSC_FALSE` to ignore any coarse `DM`
982 
983   Level: advanced
984 
985   Notes:
986   When you have specified to use a coarse `DM`, the communicator used to create the sub-`KSP` within `PCTELESCOPE`
987   will be that of the coarse `DM`. Hence the flags `-pc_telescope_reduction_factor` and
988   `-pc_telescope_subcomm_type` will no longer have any meaning.
989 
990   It is required that the communicator associated with the parent (fine) and the coarse `DM` are of different sizes.
991   An error will occur of the size of the communicator associated with the coarse `DM`
992   is the same as that of the parent `DM`.
993   Furthermore, it is required that the communicator on the coarse `DM` is a sub-communicator of the parent.
994   This will be checked at the time the preconditioner is setup and an error will occur if
995   the coarse `DM` does not define a sub-communicator of that used by the parent `DM`.
996 
997   The particular Telescope setup invoked when using a coarse `DM` is agnostic with respect to the type of
998   the `DM` used (e.g. it supports `DMSHELL`, `DMPLEX`, etc).
999 
1000   Support is currently only provided for the case when you are using `KSPSetComputeOperators()`
1001 
1002   The user is required to compose a function with the parent `DM` to facilitate the transfer of fields (`Vec`)
1003   between the different decompositions defined by the fine and coarse `DM`s.
1004   In the user code, this is achieved via
1005 .vb
1006    {
1007      DM dm_fine;
1008      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1009    }
1010 .ve
1011   The signature of the user provided field scatter method is
1012 .vb
1013    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1014 .ve
1015   The user must provide support for both mode `SCATTER_FORWARD` and mode `SCATTER_REVERSE`.
1016   `SCATTER_FORWARD` implies the direction of transfer is from the parent (fine) `DM` to the coarse `DM`.
1017 
1018   Optionally, the user may also compose a function with the parent `DM` to facilitate the transfer
1019   of state variables between the fine and coarse `DM`s.
1020   In the context of a finite element discretization, an example state variable might be
1021   values associated with quadrature points within each element.
1022   A user provided state scatter method is composed via
1023 .vb
1024    {
1025      DM dm_fine;
1026      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1027    }
1028 .ve
1029   The signature of the user provided state scatter method is
1030 .vb
1031    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1032 .ve
1033   `SCATTER_FORWARD` implies the direction of transfer is from the fine `DM` to the coarse `DM`.
1034   The user is only required to support mode = `SCATTER_FORWARD`.
1035   No assumption is made about the data type of the state variables.
1036   These must be managed by the user and must be accessible from the `DM`.
1037 
1038   Care must be taken in defining the user context passed to `KSPSetComputeOperators()` which is to be
1039   associated with the sub-`KSP` residing within `PCTELESCOPE`.
1040   In general, `PCTELESCOPE` assumes that the context on the fine and coarse `DM` used with
1041   `KSPSetComputeOperators()` should be "similar" in type or origin.
1042   Specifically the following rules are used to infer what context on the sub-`KSP` should be.
1043 
1044   First the contexts from the `KSP` and the fine and coarse `DM`s are retrieved.
1045   Note that the special case of a `DMSHELL` context is queried.
1046 
1047 .vb
1048    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1049    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1050    DMShellGetContext(dm_fine,&dmfine_shellctx);
1051 
1052    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1053    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1054 .ve
1055 
1056   The following rules are then enforced\:
1057 
1058   1. If `dmfine_kspctx` = `NULL`, then we provide a `NULL` pointer as the context for the sub-`KSP`\:
1059   `KSPSetComputeOperators`(`sub_ksp`,`dmfine_kspfunc`,`NULL`);
1060 
1061   2. If `dmfine_kspctx` != `NULL` and `dmfine_kspctx` == `dmfine_appctx`,
1062 
1063   check that `dmcoarse_appctx` is also non-`NULL`. If this is true, then\:
1064   `KSPSetComputeOperators`(`sub_ksp`,`dmfine_kspfunc`,`dmcoarse_appctx`);
1065 
1066   3. If `dmfine_kspctx` != `NULL` and `dmfine_kspctx` == `dmfine_shellctx`,
1067 
1068   check that `dmcoarse_shellctx` is also non-`NULL`. If this is true, then\:
1069   `KSPSetComputeOperators`(`sub_ksp`,`dmfine_kspfunc`,`dmcoarse_shellctx`);
1070 
1071   If neither of the above three tests passed, then `PCTELESCOPE` cannot safely determine what
1072   context should be provided to `KSPSetComputeOperators()` for use with the sub-`KSP`.
1073   In this case, an additional mechanism is provided via a composed function which will return
1074   the actual context to be used. To use this feature you must compose the "getter" function
1075   with the coarse `DM`, e.g.
1076 .vb
1077    {
1078      DM dm_coarse;
1079      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1080    }
1081 .ve
1082   The signature of the user provided method is
1083 .vb
1084    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1085 .ve
1086 
1087 .seealso: `DM`, `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`
1088 @*/
1089 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc, PetscBool v)
1090 {
1091   PetscFunctionBegin;
1092   PetscTryMethod(pc, "PCTelescopeSetUseCoarseDM_C", (PC, PetscBool), (pc, v));
1093   PetscFunctionReturn(PETSC_SUCCESS);
1094 }
1095 
1096 /*@
1097   PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if `KSPComputeOperators()` will be used to construct
1098   the matrix on the reduced number of MPI processes
1099 
1100   Not Collective
1101 
1102   Input Parameter:
1103 . pc - the preconditioner context
1104 
1105   Output Parameter:
1106 . v - the flag
1107 
1108   Level: advanced
1109 
1110 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeSetIgnoreKSPComputeOperators()`
1111 @*/
1112 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc, PetscBool *v)
1113 {
1114   PetscFunctionBegin;
1115   PetscUseMethod(pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", (PC, PetscBool *), (pc, v));
1116   PetscFunctionReturn(PETSC_SUCCESS);
1117 }
1118 
1119 /*@
1120   PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to have `PCTELESCOPE` ignore the function provided to `KSPComputeOperators()` in
1121   constructint the matrix on the reduced number of MPI processes
1122 
1123   Not Collective
1124 
1125   Input Parameter:
1126 . pc - the preconditioner context
1127 
1128   Output Parameter:
1129 . v - Use `PETSC_TRUE` to ignore the function (if defined) set via `KSPSetComputeOperators()` on `pc`
1130 
1131   Level: advanced
1132 
1133 .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()`
1134 @*/
1135 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc, PetscBool v)
1136 {
1137   PetscFunctionBegin;
1138   PetscTryMethod(pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", (PC, PetscBool), (pc, v));
1139   PetscFunctionReturn(PETSC_SUCCESS);
1140 }
1141 
1142 /*@
1143   PCTelescopeGetDM - Get the re-partitioned `DM` attached to the sub-`KSP`.
1144 
1145   Not Collective
1146 
1147   Input Parameter:
1148 . pc - the preconditioner context
1149 
1150   Output Parameter:
1151 . subdm - The re-partitioned `DM`
1152 
1153   Level: advanced
1154 
1155 .seealso: `DM`, `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()`
1156 @*/
1157 PetscErrorCode PCTelescopeGetDM(PC pc, DM *subdm)
1158 {
1159   PetscFunctionBegin;
1160   PetscUseMethod(pc, "PCTelescopeGetDM_C", (PC, DM *), (pc, subdm));
1161   PetscFunctionReturn(PETSC_SUCCESS);
1162 }
1163 
1164 /*@
1165   PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)
1166 
1167   Logically Collective
1168 
1169   Input Parameters:
1170 + pc          - the preconditioner context
1171 - subcommtype - the subcommunicator type (see `PetscSubcommType`)
1172 
1173   Level: advanced
1174 
1175 .seealso: `PetscSubcommType`, `PetscSubcomm`, `PCTELESCOPE`, `PCTelescopeGetSubcommType()`
1176 @*/
1177 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1178 {
1179   PetscFunctionBegin;
1180   PetscTryMethod(pc, "PCTelescopeSetSubcommType_C", (PC, PetscSubcommType), (pc, subcommtype));
1181   PetscFunctionReturn(PETSC_SUCCESS);
1182 }
1183 
1184 /*@
1185   PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous) set with `PCTelescopeSetSubcommType()`
1186 
1187   Not Collective
1188 
1189   Input Parameter:
1190 . pc - the preconditioner context
1191 
1192   Output Parameter:
1193 . subcommtype - the subcommunicator type (see `PetscSubcommType`)
1194 
1195   Level: advanced
1196 
1197 .seealso: `PetscSubcomm`, `PetscSubcommType`, `PCTELESCOPE`, `PCTelescopeSetSubcommType()`
1198 @*/
1199 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1200 {
1201   PetscFunctionBegin;
1202   PetscUseMethod(pc, "PCTelescopeGetSubcommType_C", (PC, PetscSubcommType *), (pc, subcommtype));
1203   PetscFunctionReturn(PETSC_SUCCESS);
1204 }
1205 
1206 /*MC
1207    PCTELESCOPE - Runs a `KSP` solver on a sub-communicator. MPI processes not in the sub-communicator are idle during the solve.
1208 
1209    Options Database Keys:
1210 +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1211 .  -pc_telescope_ignore_dm  - flag to indicate whether an attached `DM` should be ignored in constructing the new `PC`
1212 .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI processes on the sub-communicator. see `PetscSubcomm` for more information.
1213 .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether `KSPSetComputeOperators()` should be used on the sub-`KSP`.
1214 -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse `DM` should be used to define the sub-communicator.
1215 
1216    Level: advanced
1217 
1218    Notes:
1219    Assuming that the parent preconditioner `PC` is defined on a communicator c, this implementation
1220    creates a child sub-communicator (c') containing fewer MPI processes than the original parent preconditioner `PC`.
1221    The preconditioner is deemed telescopic as it only calls `KSPSolve()` on a single
1222    sub-communicator, in contrast with `PCREDUNDANT` which calls `KSPSolve()` on N sub-communicators.
1223    This means there will be MPI processes which will be idle during the application of this preconditioner.
1224    Additionally, in comparison with `PCREDUNDANT`, `PCTELESCOPE` can utilize an attached `DM` to construct `DM` dependent preconditioner, such as `PCMG`
1225 
1226    The default type of the sub `KSP` (the `KSP` defined on c') is `KSPPREONLY`.
1227 
1228    There are three setup mechanisms for `PCTELESCOPE`. Features support by each type are described below.
1229    In the following, we will refer to the operators B and B', these are the Bmat provided to the `KSP` on the
1230    communicators c and c' respectively.
1231 
1232    [1] Default setup
1233    The sub-communicator c' is created via `PetscSubcommCreate()`.
1234    Explicitly defined nullspace and near nullspace vectors will be propagated from B to B'.
1235    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1236    No support is provided for `KSPSetComputeOperators()`.
1237    Currently there is no support for the flag `-pc_use_amat`.
1238 
1239    [2] `DM` aware setup
1240    If a `DM` is attached to the `PC`, it is re-partitioned on the sub-communicator c'.
1241    c' is created via `PetscSubcommCreate()`.
1242    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned `DM`.
1243    Currently only support for re-partitioning a `DMDA` is provided.
1244    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1245    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1246    Support is provided for `KSPSetComputeOperators()`. The user provided function and context is propagated to the sub `KSP`.
1247    This is fragile since the user must ensure that their user context is valid for use on c'.
1248    Currently there is no support for the flag `-pc_use_amat`.
1249 
1250    [3] Coarse `DM` setup
1251    If a `DM` (dmfine) is attached to the `PC`, dmfine is queried for a "coarse" `DM` (call this dmcoarse) via `DMGetCoarseDM()`.
1252    `PCTELESCOPE` will interpret the coarse `DM` as being defined on a sub-communicator of c.
1253    The communicator associated with dmcoarse will define the c' to be used within `PCTELESCOPE`.
1254    `PCTELESCOPE` will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1255    The intention of this setup type is that `PCTELESCOPE` will use an existing (e.g. user defined) communicator hierarchy, say as would be
1256    available with using multi-grid on unstructured meshes.
1257    This setup will not use the command line options `-pc_telescope_reduction_factor` or `-pc_telescope_subcomm_type`.
1258    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1259    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1260    There is no general method to permute field orderings, hence only `KSPSetComputeOperators()` is supported.
1261    The user must use `PetscObjectComposeFunction()` with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1262    Propagation of the user context for `KSPSetComputeOperators()` on the sub `KSP` is attempted by querying the `DM` contexts associated with dmfine and dmcoarse. Alternatively, the user may use `PetscObjectComposeFunction()` with dmcoarse to define a method which will return the appropriate user context for `KSPSetComputeOperators()`.
1263    Currently there is no support for the flag `-pc_use_amat`.
1264    This setup can be invoked by the option `-pc_telescope_use_coarse_dm` or by calling `PCTelescopeSetUseCoarseDM`(pc,`PETSC_TRUE`);
1265    Further information about the user-provided methods required by this setup type are described here `PCTelescopeSetUseCoarseDM()`.
1266 
1267    Developer Notes:
1268    During `PCSetup()`, the B operator is scattered onto c'.
1269    Within `PCApply()`, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1270    Then, `KSPSolve()` is executed on the c' communicator.
1271 
1272    The communicator used within the telescoping preconditioner is defined by a `PetscSubcomm` using the INTERLACED
1273    creation routine by default (this can be changed with `-pc_telescope_subcomm_type`). We run the sub `KSP` on only
1274    the ranks within the communicator which have a color equal to zero.
1275 
1276    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1277    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1278    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1279 
1280    The telescoping preconditioner can re-partition an attached `DM` if it is a `DMDA` (2D or 3D -
1281    support for 1D `DMDA`s is not provided). If a `DMDA` is found, a topologically equivalent `DMDA` is created on c'
1282    and this new `DM` is attached the sub `KSP`. The design of telescope is such that it should be possible to extend support
1283    for re-partitioning other to `DM`'s (e.g. `DMPLEX`). The user can supply a flag to ignore attached DMs.
1284    Alternatively, user-provided re-partitioned `DM`s can be used via `-pc_telescope_use_coarse_dm`.
1285 
1286    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI processes common to c and c'.
1287 
1288    When a `DMDA` is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1289    into the ordering defined by the `DMDA` on c', (ii) extracting the local chunks via `MatCreateSubMatrices()`, (iii) fusing the
1290    locally (sequential) matrices defined on the ranks common to c and c' into B' using `MatCreateMPIMatConcatenateSeqMat()`
1291 
1292    Limitations/improvements include the following.
1293    `VecPlaceArray()` could be used within `PCApply()` to improve efficiency and reduce memory usage.
1294    A unified mechanism to query for user contexts as required by `KSPSetComputeOperators()` and `MatNullSpaceSetFunction()`.
1295 
1296    The symmetric permutation used when a `DMDA` is encountered is performed via explicitly assembling a permutation matrix P,
1297    and performing P^T.A.P. Possibly it might be more efficient to use `MatPermute()`. We opted to use P^T.A.P as it appears
1298    `VecPermute()` does not support the use case required here. By computing P, one can permute both the operator and RHS in a
1299    consistent manner.
1300 
1301    Mapping of vectors (default setup mode) is performed in the following way.
1302    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1303    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1304    We perform the scatter to the sub-communicator in the following way.
1305    [1] Given a vector x defined on communicator c
1306 
1307 .vb
1308    rank(c)  local values of x
1309    ------- ----------------------------------------
1310         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1311         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1312         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1313         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1314 .ve
1315 
1316    scatter into xtmp defined also on comm c, so that we have the following values
1317 
1318 .vb
1319    rank(c)  local values of xtmp
1320    ------- ----------------------------------------------------------------------------
1321         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1322         1   [ ]
1323         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1324         3   [ ]
1325 .ve
1326 
1327    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1328 
1329    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1330    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1331 
1332 .vb
1333    rank(c')  local values of xred
1334    -------- ----------------------------------------------------------------------------
1335          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1336          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1337 .ve
1338 
1339   Contributed by Dave May
1340 
1341   Reference:
1342   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913
1343 
1344 .seealso: `PCTelescopeGetKSP()`, `PCTelescopeGetDM()`, `PCTelescopeGetReductionFactor()`, `PCTelescopeSetReductionFactor()`, `PCTelescopeGetIgnoreDM()`, `PCTelescopeSetIgnoreDM()`, `PCREDUNDANT`
1345 M*/
1346 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1347 {
1348   struct _PC_Telescope *sred;
1349 
1350   PetscFunctionBegin;
1351   PetscCall(PetscNew(&sred));
1352   sred->psubcomm                   = NULL;
1353   sred->subcommtype                = PETSC_SUBCOMM_INTERLACED;
1354   sred->subcomm                    = MPI_COMM_NULL;
1355   sred->redfactor                  = 1;
1356   sred->ignore_dm                  = PETSC_FALSE;
1357   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1358   sred->use_coarse_dm              = PETSC_FALSE;
1359   pc->data                         = (void *)sred;
1360 
1361   pc->ops->apply           = PCApply_Telescope;
1362   pc->ops->applytranspose  = NULL;
1363   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1364   pc->ops->setup           = PCSetUp_Telescope;
1365   pc->ops->destroy         = PCDestroy_Telescope;
1366   pc->ops->reset           = PCReset_Telescope;
1367   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1368   pc->ops->view            = PCView_Telescope;
1369 
1370   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1371   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1372   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1373   sred->pctelescope_reset_type              = NULL;
1374 
1375   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", PCTelescopeGetKSP_Telescope));
1376   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", PCTelescopeGetSubcommType_Telescope));
1377   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", PCTelescopeSetSubcommType_Telescope));
1378   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", PCTelescopeGetReductionFactor_Telescope));
1379   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", PCTelescopeSetReductionFactor_Telescope));
1380   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", PCTelescopeGetIgnoreDM_Telescope));
1381   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", PCTelescopeSetIgnoreDM_Telescope));
1382   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", PCTelescopeGetIgnoreKSPComputeOperators_Telescope));
1383   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", PCTelescopeSetIgnoreKSPComputeOperators_Telescope));
1384   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", PCTelescopeGetDM_Telescope));
1385   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", PCTelescopeGetUseCoarseDM_Telescope));
1386   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", PCTelescopeSetUseCoarseDM_Telescope));
1387   PetscFunctionReturn(PETSC_SUCCESS);
1388 }
1389