1 #include <petsc/private/petscimpl.h>
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/pcimpl.h>
4 #include <petscksp.h> /*I "petscksp.h" I*/
5 #include <petscdm.h> /*I "petscdm.h" I*/
6 #include "../src/ksp/pc/impls/telescope/telescope.h"
7
8 static PetscBool cited = PETSC_FALSE;
9 static const char citation[] = "@inproceedings{MaySananRuppKnepleySmith2016,\n"
10 " title = {Extreme-Scale Multigrid Components within PETSc},\n"
11 " author = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
12 " booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
13 " series = {PASC '16},\n"
14 " isbn = {978-1-4503-4126-4},\n"
15 " location = {Lausanne, Switzerland},\n"
16 " pages = {5:1--5:12},\n"
17 " articleno = {5},\n"
18 " numpages = {12},\n"
19 " url = {https://doi.acm.org/10.1145/2929908.2929913},\n"
20 " doi = {10.1145/2929908.2929913},\n"
21 " acmid = {2929913},\n"
22 " publisher = {ACM},\n"
23 " address = {New York, NY, USA},\n"
24 " keywords = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
25 " year = {2016}\n"
26 "}\n";
27
28 /*
29 default setup mode
30
31 [1a] scatter to (FORWARD)
32 x(comm) -> xtmp(comm)
33 [1b] local copy (to) ranks with color = 0
34 xred(subcomm) <- xtmp
35
36 [2] solve on sub KSP to obtain yred(subcomm)
37
38 [3a] local copy (from) ranks with color = 0
39 yred(subcomm) --> xtmp
40 [2b] scatter from (REVERSE)
41 xtmp(comm) -> y(comm)
42 */
43
44 /*
45 Collective[comm_f]
46 Notes
47 * Using comm_f = MPI_COMM_NULL will result in an error
48 * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
49 * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
50 */
PCTelescopeTestValidSubcomm(MPI_Comm comm_f,MPI_Comm comm_c,PetscBool * isvalid)51 static PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f, MPI_Comm comm_c, PetscBool *isvalid)
52 {
53 PetscInt valid = 1;
54 MPI_Group group_f, group_c;
55 PetscMPIInt count, k, size_f = 0, size_c = 0, size_c_sum = 0;
56 PetscMPIInt *ranks_f, *ranks_c;
57
58 PetscFunctionBegin;
59 PetscCheck(comm_f != MPI_COMM_NULL, PETSC_COMM_SELF, PETSC_ERR_SUP, "comm_f cannot be MPI_COMM_NULL");
60
61 PetscCallMPI(MPI_Comm_group(comm_f, &group_f));
62 if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_group(comm_c, &group_c));
63
64 PetscCallMPI(MPI_Comm_size(comm_f, &size_f));
65 if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Comm_size(comm_c, &size_c));
66
67 /* check not all comm_c's are NULL */
68 size_c_sum = size_c;
69 PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &size_c_sum, 1, MPI_INT, MPI_SUM, comm_f));
70 if (size_c_sum == 0) valid = 0;
71
72 /* check we can map at least 1 rank in comm_c to comm_f */
73 PetscCall(PetscMalloc1(size_f, &ranks_f));
74 PetscCall(PetscMalloc1(size_c, &ranks_c));
75 for (k = 0; k < size_f; k++) ranks_f[k] = MPI_UNDEFINED;
76 for (k = 0; k < size_c; k++) ranks_c[k] = k;
77
78 /*
79 MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
80 I do not want the code to terminate immediately if this occurs, rather I want to throw
81 the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
82 that comm_c is not a valid sub-communicator.
83 Hence I purposefully do not call PetscCall() after MPI_Group_translate_ranks().
84 */
85 count = 0;
86 if (comm_c != MPI_COMM_NULL) {
87 (void)MPI_Group_translate_ranks(group_c, size_c, ranks_c, group_f, ranks_f);
88 for (k = 0; k < size_f; k++) {
89 if (ranks_f[k] == MPI_UNDEFINED) count++;
90 }
91 }
92 if (count == size_f) valid = 0;
93
94 PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &valid, 1, MPIU_INT, MPI_MIN, comm_f));
95 if (valid == 1) *isvalid = PETSC_TRUE;
96 else *isvalid = PETSC_FALSE;
97
98 PetscCall(PetscFree(ranks_f));
99 PetscCall(PetscFree(ranks_c));
100 PetscCallMPI(MPI_Group_free(&group_f));
101 if (comm_c != MPI_COMM_NULL) PetscCallMPI(MPI_Group_free(&group_c));
102 PetscFunctionReturn(PETSC_SUCCESS);
103 }
104
private_PCTelescopeGetSubDM(PC_Telescope sred)105 static DM private_PCTelescopeGetSubDM(PC_Telescope sred)
106 {
107 DM subdm = NULL;
108
109 if (!PCTelescope_isActiveRank(sred)) {
110 subdm = NULL;
111 } else {
112 switch (sred->sr_type) {
113 case TELESCOPE_DEFAULT:
114 subdm = NULL;
115 break;
116 case TELESCOPE_DMDA:
117 subdm = ((PC_Telescope_DMDACtx *)sred->dm_ctx)->dmrepart;
118 break;
119 case TELESCOPE_DMPLEX:
120 subdm = NULL;
121 break;
122 case TELESCOPE_COARSEDM:
123 if (sred->ksp) PetscCallAbort(PETSC_COMM_SELF, KSPGetDM(sred->ksp, &subdm));
124 break;
125 }
126 }
127 return subdm;
128 }
129
PCTelescopeSetUp_default(PC pc,PC_Telescope sred)130 static PetscErrorCode PCTelescopeSetUp_default(PC pc, PC_Telescope sred)
131 {
132 PetscInt m, M, bs, st, ed;
133 Vec x, xred, yred, xtmp;
134 Mat B;
135 MPI_Comm comm, subcomm;
136 VecScatter scatter;
137 IS isin;
138 VecType vectype;
139
140 PetscFunctionBegin;
141 PetscCall(PetscInfo(pc, "PCTelescope: setup (default)\n"));
142 comm = PetscSubcommParent(sred->psubcomm);
143 subcomm = PetscSubcommChild(sred->psubcomm);
144
145 PetscCall(PCGetOperators(pc, NULL, &B));
146 PetscCall(MatGetSize(B, &M, NULL));
147 PetscCall(MatGetBlockSize(B, &bs));
148 PetscCall(MatCreateVecs(B, &x, NULL));
149 PetscCall(MatGetVecType(B, &vectype)); /* Use the vectype of the matrix used to construct the preconditioner by default */
150
151 xred = NULL;
152 m = 0;
153 if (PCTelescope_isActiveRank(sred)) {
154 PetscCall(VecCreate(subcomm, &xred));
155 PetscCall(VecSetSizes(xred, PETSC_DECIDE, M));
156 PetscCall(VecSetBlockSize(xred, bs));
157 PetscCall(VecSetType(xred, vectype));
158 PetscCall(VecSetFromOptions(xred));
159 PetscCall(VecGetLocalSize(xred, &m));
160 }
161
162 yred = NULL;
163 if (PCTelescope_isActiveRank(sred)) PetscCall(VecDuplicate(xred, &yred));
164
165 PetscCall(VecCreate(comm, &xtmp));
166 PetscCall(VecSetSizes(xtmp, m, PETSC_DECIDE));
167 PetscCall(VecSetBlockSize(xtmp, bs));
168 PetscCall(VecSetType(xtmp, vectype));
169
170 if (PCTelescope_isActiveRank(sred)) {
171 PetscCall(VecGetOwnershipRange(xred, &st, &ed));
172 PetscCall(ISCreateStride(comm, ed - st, st, 1, &isin));
173 } else {
174 PetscCall(VecGetOwnershipRange(x, &st, &ed));
175 PetscCall(ISCreateStride(comm, 0, st, 1, &isin));
176 }
177 PetscCall(ISSetBlockSize(isin, bs));
178
179 PetscCall(VecScatterCreate(x, isin, xtmp, NULL, &scatter));
180
181 sred->isin = isin;
182 sred->scatter = scatter;
183 sred->xred = xred;
184 sred->yred = yred;
185 sred->xtmp = xtmp;
186 PetscCall(VecDestroy(&x));
187 PetscFunctionReturn(PETSC_SUCCESS);
188 }
189
PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat * A)190 static PetscErrorCode PCTelescopeMatCreate_default(PC pc, PC_Telescope sred, MatReuse reuse, Mat *A)
191 {
192 MPI_Comm comm, subcomm;
193 Mat Bred, B;
194 PetscInt nr, nc, bs;
195 IS isrow, iscol;
196 Mat Blocal, *_Blocal;
197
198 PetscFunctionBegin;
199 PetscCall(PetscInfo(pc, "PCTelescope: updating the redundant preconditioned operator (default)\n"));
200 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
201 subcomm = PetscSubcommChild(sred->psubcomm);
202 PetscCall(PCGetOperators(pc, NULL, &B));
203 PetscCall(MatGetSize(B, &nr, &nc));
204 isrow = sred->isin;
205 PetscCall(ISCreateStride(PETSC_COMM_SELF, nc, 0, 1, &iscol));
206 PetscCall(ISSetIdentity(iscol));
207 PetscCall(MatGetBlockSizes(B, NULL, &bs));
208 PetscCall(ISSetBlockSize(iscol, bs));
209 PetscCall(MatSetOption(B, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
210 PetscCall(MatCreateSubMatrices(B, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &_Blocal));
211 Blocal = *_Blocal;
212 PetscCall(PetscFree(_Blocal));
213 Bred = NULL;
214 if (PCTelescope_isActiveRank(sred)) {
215 PetscInt mm;
216
217 if (reuse != MAT_INITIAL_MATRIX) Bred = *A;
218
219 PetscCall(MatGetSize(Blocal, &mm, NULL));
220 PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, Blocal, mm, reuse, &Bred));
221 PetscCall(MatPropagateSymmetryOptions(B, Bred));
222 }
223 *A = Bred;
224 PetscCall(ISDestroy(&iscol));
225 PetscCall(MatDestroy(&Blocal));
226 PetscFunctionReturn(PETSC_SUCCESS);
227 }
228
PCTelescopeSubNullSpaceCreate_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace * sub_nullspace)229 static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc, PC_Telescope sred, MatNullSpace nullspace, MatNullSpace *sub_nullspace)
230 {
231 PetscBool has_const;
232 const Vec *vecs;
233 Vec *sub_vecs = NULL;
234 PetscInt i, k, n = 0;
235 MPI_Comm subcomm;
236
237 PetscFunctionBegin;
238 subcomm = PetscSubcommChild(sred->psubcomm);
239 PetscCall(MatNullSpaceGetVecs(nullspace, &has_const, &n, &vecs));
240
241 if (PCTelescope_isActiveRank(sred)) {
242 if (n) PetscCall(VecDuplicateVecs(sred->xred, n, &sub_vecs));
243 }
244
245 /* copy entries */
246 for (k = 0; k < n; k++) {
247 const PetscScalar *x_array;
248 PetscScalar *LA_sub_vec;
249 PetscInt st, ed;
250
251 /* pull in vector x->xtmp */
252 PetscCall(VecScatterBegin(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
253 PetscCall(VecScatterEnd(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
254 if (sub_vecs) {
255 /* copy vector entries into xred */
256 PetscCall(VecGetArrayRead(sred->xtmp, &x_array));
257 if (sub_vecs[k]) {
258 PetscCall(VecGetOwnershipRange(sub_vecs[k], &st, &ed));
259 PetscCall(VecGetArray(sub_vecs[k], &LA_sub_vec));
260 for (i = 0; i < ed - st; i++) LA_sub_vec[i] = x_array[i];
261 PetscCall(VecRestoreArray(sub_vecs[k], &LA_sub_vec));
262 }
263 PetscCall(VecRestoreArrayRead(sred->xtmp, &x_array));
264 }
265 }
266
267 if (PCTelescope_isActiveRank(sred)) {
268 /* create new (near) nullspace for redundant object */
269 PetscCall(MatNullSpaceCreate(subcomm, has_const, n, sub_vecs, sub_nullspace));
270 PetscCall(VecDestroyVecs(n, &sub_vecs));
271 PetscCheck(!nullspace->remove, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
272 PetscCheck(!nullspace->rmctx, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
273 }
274 PetscFunctionReturn(PETSC_SUCCESS);
275 }
276
PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)277 static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc, PC_Telescope sred, Mat sub_mat)
278 {
279 Mat B;
280
281 PetscFunctionBegin;
282 PetscCall(PCGetOperators(pc, NULL, &B));
283 /* Propagate the nullspace if it exists */
284 {
285 MatNullSpace nullspace, sub_nullspace;
286 PetscCall(MatGetNullSpace(B, &nullspace));
287 if (nullspace) {
288 PetscCall(PetscInfo(pc, "PCTelescope: generating nullspace (default)\n"));
289 PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nullspace, &sub_nullspace));
290 if (PCTelescope_isActiveRank(sred)) {
291 PetscCall(MatSetNullSpace(sub_mat, sub_nullspace));
292 PetscCall(MatNullSpaceDestroy(&sub_nullspace));
293 }
294 }
295 }
296 /* Propagate the near nullspace if it exists */
297 {
298 MatNullSpace nearnullspace, sub_nearnullspace;
299 PetscCall(MatGetNearNullSpace(B, &nearnullspace));
300 if (nearnullspace) {
301 PetscCall(PetscInfo(pc, "PCTelescope: generating near nullspace (default)\n"));
302 PetscCall(PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nearnullspace, &sub_nearnullspace));
303 if (PCTelescope_isActiveRank(sred)) {
304 PetscCall(MatSetNearNullSpace(sub_mat, sub_nearnullspace));
305 PetscCall(MatNullSpaceDestroy(&sub_nearnullspace));
306 }
307 }
308 }
309 PetscFunctionReturn(PETSC_SUCCESS);
310 }
311
PCView_Telescope(PC pc,PetscViewer viewer)312 static PetscErrorCode PCView_Telescope(PC pc, PetscViewer viewer)
313 {
314 PC_Telescope sred = (PC_Telescope)pc->data;
315 PetscBool isascii, isstring;
316 PetscViewer subviewer;
317
318 PetscFunctionBegin;
319 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
320 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
321 if (isascii) {
322 {
323 MPI_Comm comm, subcomm;
324 PetscMPIInt comm_size, subcomm_size;
325 DM dm = NULL, subdm = NULL;
326
327 PetscCall(PCGetDM(pc, &dm));
328 subdm = private_PCTelescopeGetSubDM(sred);
329
330 if (sred->psubcomm) {
331 comm = PetscSubcommParent(sred->psubcomm);
332 subcomm = PetscSubcommChild(sred->psubcomm);
333 PetscCallMPI(MPI_Comm_size(comm, &comm_size));
334 PetscCallMPI(MPI_Comm_size(subcomm, &subcomm_size));
335
336 PetscCall(PetscViewerASCIIPushTab(viewer));
337 PetscCall(PetscViewerASCIIPrintf(viewer, "PETSc subcomm: parent comm size reduction factor = %" PetscInt_FMT "\n", sred->redfactor));
338 PetscCall(PetscViewerASCIIPrintf(viewer, "PETSc subcomm: parent_size = %d , subcomm_size = %d\n", comm_size, subcomm_size));
339 switch (sred->subcommtype) {
340 case PETSC_SUBCOMM_INTERLACED:
341 PetscCall(PetscViewerASCIIPrintf(viewer, "PETSc subcomm: type = %s\n", PetscSubcommTypes[sred->subcommtype]));
342 break;
343 case PETSC_SUBCOMM_CONTIGUOUS:
344 PetscCall(PetscViewerASCIIPrintf(viewer, "PETSc subcomm type = %s\n", PetscSubcommTypes[sred->subcommtype]));
345 break;
346 default:
347 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "General subcomm type not supported by PCTelescope");
348 }
349 PetscCall(PetscViewerASCIIPopTab(viewer));
350 } else {
351 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
352 subcomm = sred->subcomm;
353 if (!PCTelescope_isActiveRank(sred)) subcomm = PETSC_COMM_SELF;
354
355 PetscCall(PetscViewerASCIIPushTab(viewer));
356 PetscCall(PetscViewerASCIIPrintf(viewer, "subcomm: using user provided sub-communicator\n"));
357 PetscCall(PetscViewerASCIIPopTab(viewer));
358 }
359
360 PetscCall(PetscViewerGetSubViewer(viewer, subcomm, &subviewer));
361 if (PCTelescope_isActiveRank(sred)) {
362 PetscCall(PetscViewerASCIIPushTab(subviewer));
363
364 if (dm && sred->ignore_dm) PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring DM\n"));
365 if (sred->ignore_kspcomputeoperators) PetscCall(PetscViewerASCIIPrintf(subviewer, "ignoring KSPComputeOperators\n"));
366 switch (sred->sr_type) {
367 case TELESCOPE_DEFAULT:
368 PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: default\n"));
369 break;
370 case TELESCOPE_DMDA:
371 PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMDA auto-repartitioning\n"));
372 PetscCall(DMView_DA_Short(subdm, subviewer));
373 break;
374 case TELESCOPE_DMPLEX:
375 PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: DMPLEX auto-repartitioning\n"));
376 break;
377 case TELESCOPE_COARSEDM:
378 PetscCall(PetscViewerASCIIPrintf(subviewer, "setup type: coarse DM\n"));
379 break;
380 }
381
382 if (dm) {
383 PetscObject obj = (PetscObject)dm;
384 PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object:"));
385 PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE));
386 if (obj->type_name) PetscCall(PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name));
387 if (obj->name) PetscCall(PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name));
388 if (obj->prefix) PetscCall(PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix));
389 PetscCall(PetscViewerASCIIPrintf(subviewer, "\n"));
390 PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE));
391 } else {
392 PetscCall(PetscViewerASCIIPrintf(subviewer, "Parent DM object: NULL\n"));
393 }
394 if (subdm) {
395 PetscObject obj = (PetscObject)subdm;
396 PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object:"));
397 PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE));
398 if (obj->type_name) PetscCall(PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name));
399 if (obj->name) PetscCall(PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name));
400 if (obj->prefix) PetscCall(PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix));
401 PetscCall(PetscViewerASCIIPrintf(subviewer, "\n"));
402 PetscCall(PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE));
403 } else {
404 PetscCall(PetscViewerASCIIPrintf(subviewer, "Sub DM object: NULL\n"));
405 }
406
407 PetscCall(KSPView(sred->ksp, subviewer));
408 PetscCall(PetscViewerASCIIPopTab(subviewer));
409 }
410 PetscCall(PetscViewerRestoreSubViewer(viewer, subcomm, &subviewer));
411 }
412 }
413 PetscFunctionReturn(PETSC_SUCCESS);
414 }
415
PCSetUp_Telescope(PC pc)416 static PetscErrorCode PCSetUp_Telescope(PC pc)
417 {
418 PC_Telescope sred = (PC_Telescope)pc->data;
419 MPI_Comm comm, subcomm = 0;
420 PCTelescopeType sr_type;
421
422 PetscFunctionBegin;
423 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
424
425 /* Determine type of setup/update */
426 if (!pc->setupcalled) {
427 PetscBool has_dm, same;
428 DM dm;
429
430 sr_type = TELESCOPE_DEFAULT;
431 has_dm = PETSC_FALSE;
432 PetscCall(PCGetDM(pc, &dm));
433 if (dm) has_dm = PETSC_TRUE;
434 if (has_dm) {
435 /* check for dmda */
436 PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMDA, &same));
437 if (same) {
438 PetscCall(PetscInfo(pc, "PCTelescope: found DMDA\n"));
439 sr_type = TELESCOPE_DMDA;
440 }
441 /* check for dmplex */
442 PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &same));
443 if (same) {
444 PetscCall(PetscInfo(pc, "PCTelescope: found DMPLEX\n"));
445 sr_type = TELESCOPE_DMPLEX;
446 }
447
448 if (sred->use_coarse_dm) {
449 PetscCall(PetscInfo(pc, "PCTelescope: using coarse DM\n"));
450 sr_type = TELESCOPE_COARSEDM;
451 }
452
453 if (sred->ignore_dm) {
454 PetscCall(PetscInfo(pc, "PCTelescope: ignoring DM\n"));
455 sr_type = TELESCOPE_DEFAULT;
456 }
457 }
458 sred->sr_type = sr_type;
459 } else {
460 sr_type = sred->sr_type;
461 }
462
463 /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
464 switch (sr_type) {
465 case TELESCOPE_DEFAULT:
466 sred->pctelescope_setup_type = PCTelescopeSetUp_default;
467 sred->pctelescope_matcreate_type = PCTelescopeMatCreate_default;
468 sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
469 sred->pctelescope_reset_type = NULL;
470 break;
471 case TELESCOPE_DMDA:
472 pc->ops->apply = PCApply_Telescope_dmda;
473 pc->ops->applyrichardson = PCApplyRichardson_Telescope_dmda;
474 sred->pctelescope_setup_type = PCTelescopeSetUp_dmda;
475 sred->pctelescope_matcreate_type = PCTelescopeMatCreate_dmda;
476 sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
477 sred->pctelescope_reset_type = PCReset_Telescope_dmda;
478 break;
479 case TELESCOPE_DMPLEX:
480 SETERRQ(comm, PETSC_ERR_SUP, "Support for DMPLEX is currently not available");
481 case TELESCOPE_COARSEDM:
482 pc->ops->apply = PCApply_Telescope_CoarseDM;
483 pc->ops->applyrichardson = PCApplyRichardson_Telescope_CoarseDM;
484 sred->pctelescope_setup_type = PCTelescopeSetUp_CoarseDM;
485 sred->pctelescope_matcreate_type = NULL;
486 sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
487 sred->pctelescope_reset_type = PCReset_Telescope_CoarseDM;
488 break;
489 default:
490 SETERRQ(comm, PETSC_ERR_SUP, "Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
491 }
492
493 /* subcomm definition */
494 if (!pc->setupcalled) {
495 if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
496 if (!sred->psubcomm) {
497 PetscCall(PetscSubcommCreate(comm, &sred->psubcomm));
498 PetscCall(PetscSubcommSetNumber(sred->psubcomm, sred->redfactor));
499 PetscCall(PetscSubcommSetType(sred->psubcomm, sred->subcommtype));
500 sred->subcomm = PetscSubcommChild(sred->psubcomm);
501 }
502 } else { /* query PC for DM, check communicators */
503 DM dm, dm_coarse_partition = NULL;
504 MPI_Comm comm_fine, comm_coarse_partition = MPI_COMM_NULL;
505 PetscMPIInt csize_fine = 0, csize_coarse_partition = 0, cs[2], csg[2], cnt = 0;
506 PetscBool isvalidsubcomm = PETSC_TRUE;
507
508 PetscCall(PCGetDM(pc, &dm));
509 comm_fine = PetscObjectComm((PetscObject)dm);
510 PetscCall(DMGetCoarseDM(dm, &dm_coarse_partition));
511 if (dm_coarse_partition) cnt = 1;
512 PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &cnt, 1, MPI_INT, MPI_SUM, comm_fine));
513 PetscCheck(cnt != 0, comm_fine, PETSC_ERR_SUP, "Zero instances of a coarse DM were found");
514
515 PetscCallMPI(MPI_Comm_size(comm_fine, &csize_fine));
516 if (dm_coarse_partition) {
517 comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
518 PetscCallMPI(MPI_Comm_size(comm_coarse_partition, &csize_coarse_partition));
519 }
520
521 cs[0] = csize_fine;
522 cs[1] = csize_coarse_partition;
523 PetscCallMPI(MPIU_Allreduce(cs, csg, 2, MPI_INT, MPI_MAX, comm_fine));
524 PetscCheck(csg[0] != csg[1], comm_fine, PETSC_ERR_SUP, "Coarse DM uses the same size communicator as the parent DM attached to the PC");
525
526 PetscCall(PCTelescopeTestValidSubcomm(comm_fine, comm_coarse_partition, &isvalidsubcomm));
527 PetscCheck(isvalidsubcomm, comm_fine, PETSC_ERR_SUP, "Coarse DM communicator is not a sub-communicator of parentDM->comm");
528 sred->subcomm = comm_coarse_partition;
529 }
530 }
531 subcomm = sred->subcomm;
532
533 /* internal KSP */
534 if (!pc->setupcalled) {
535 const char *prefix;
536
537 if (PCTelescope_isActiveRank(sred)) {
538 PetscCall(KSPCreate(subcomm, &sred->ksp));
539 PetscCall(KSPSetNestLevel(sred->ksp, pc->kspnestlevel));
540 PetscCall(KSPSetErrorIfNotConverged(sred->ksp, pc->erroriffailure));
541 PetscCall(PetscObjectIncrementTabLevel((PetscObject)sred->ksp, (PetscObject)pc, 1));
542 PetscCall(KSPSetType(sred->ksp, KSPPREONLY));
543 PetscCall(PCGetOptionsPrefix(pc, &prefix));
544 PetscCall(KSPSetOptionsPrefix(sred->ksp, prefix));
545 PetscCall(KSPAppendOptionsPrefix(sred->ksp, "telescope_"));
546 }
547 }
548
549 /* setup */
550 if (!pc->setupcalled && sred->pctelescope_setup_type) PetscCall(sred->pctelescope_setup_type(pc, sred));
551 /* update */
552 if (!pc->setupcalled) {
553 if (sred->pctelescope_matcreate_type) PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_INITIAL_MATRIX, &sred->Bred));
554 if (sred->pctelescope_matnullspacecreate_type) PetscCall(sred->pctelescope_matnullspacecreate_type(pc, sred, sred->Bred));
555 } else {
556 if (sred->pctelescope_matcreate_type) PetscCall(sred->pctelescope_matcreate_type(pc, sred, MAT_REUSE_MATRIX, &sred->Bred));
557 }
558
559 /* common - no construction */
560 if (PCTelescope_isActiveRank(sred)) {
561 PetscCall(KSPSetOperators(sred->ksp, sred->Bred, sred->Bred));
562 if (pc->setfromoptionscalled && !pc->setupcalled) PetscCall(KSPSetFromOptions(sred->ksp));
563 }
564 PetscFunctionReturn(PETSC_SUCCESS);
565 }
566
PCApply_Telescope(PC pc,Vec x,Vec y)567 static PetscErrorCode PCApply_Telescope(PC pc, Vec x, Vec y)
568 {
569 PC_Telescope sred = (PC_Telescope)pc->data;
570 Vec xtmp, xred, yred;
571 PetscInt i, st, ed;
572 VecScatter scatter;
573 PetscScalar *array;
574 const PetscScalar *x_array;
575
576 PetscFunctionBegin;
577 PetscCall(PetscCitationsRegister(citation, &cited));
578
579 xtmp = sred->xtmp;
580 scatter = sred->scatter;
581 xred = sred->xred;
582 yred = sred->yred;
583
584 /* pull in vector x->xtmp */
585 PetscCall(VecScatterBegin(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD));
586 PetscCall(VecScatterEnd(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD));
587
588 /* copy vector entries into xred */
589 PetscCall(VecGetArrayRead(xtmp, &x_array));
590 if (xred) {
591 PetscScalar *LA_xred;
592 PetscCall(VecGetOwnershipRange(xred, &st, &ed));
593 PetscCall(VecGetArray(xred, &LA_xred));
594 for (i = 0; i < ed - st; i++) LA_xred[i] = x_array[i];
595 PetscCall(VecRestoreArray(xred, &LA_xred));
596 }
597 PetscCall(VecRestoreArrayRead(xtmp, &x_array));
598 /* solve */
599 if (PCTelescope_isActiveRank(sred)) {
600 PetscCall(KSPSolve(sred->ksp, xred, yred));
601 PetscCall(KSPCheckSolve(sred->ksp, pc, yred));
602 }
603 /* return vector */
604 PetscCall(VecGetArray(xtmp, &array));
605 if (yred) {
606 const PetscScalar *LA_yred;
607 PetscCall(VecGetOwnershipRange(yred, &st, &ed));
608 PetscCall(VecGetArrayRead(yred, &LA_yred));
609 for (i = 0; i < ed - st; i++) array[i] = LA_yred[i];
610 PetscCall(VecRestoreArrayRead(yred, &LA_yred));
611 }
612 PetscCall(VecRestoreArray(xtmp, &array));
613 PetscCall(VecScatterBegin(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE));
614 PetscCall(VecScatterEnd(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE));
615 PetscFunctionReturn(PETSC_SUCCESS);
616 }
617
PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol,PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt * outits,PCRichardsonConvergedReason * reason)618 static PetscErrorCode PCApplyRichardson_Telescope(PC pc, Vec x, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
619 {
620 PC_Telescope sred = (PC_Telescope)pc->data;
621 Vec xtmp, yred;
622 PetscInt i, st, ed;
623 VecScatter scatter;
624 const PetscScalar *x_array;
625 PetscBool default_init_guess_value;
626
627 PetscFunctionBegin;
628 xtmp = sred->xtmp;
629 scatter = sred->scatter;
630 yred = sred->yred;
631
632 PetscCheck(its <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCApplyRichardson_Telescope only supports max_it = 1");
633 *reason = (PCRichardsonConvergedReason)0;
634
635 if (!zeroguess) {
636 PetscCall(PetscInfo(pc, "PCTelescope: Scattering y for non-zero initial guess\n"));
637 /* pull in vector y->xtmp */
638 PetscCall(VecScatterBegin(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD));
639 PetscCall(VecScatterEnd(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD));
640
641 /* copy vector entries into xred */
642 PetscCall(VecGetArrayRead(xtmp, &x_array));
643 if (yred) {
644 PetscScalar *LA_yred;
645 PetscCall(VecGetOwnershipRange(yred, &st, &ed));
646 PetscCall(VecGetArray(yred, &LA_yred));
647 for (i = 0; i < ed - st; i++) LA_yred[i] = x_array[i];
648 PetscCall(VecRestoreArray(yred, &LA_yred));
649 }
650 PetscCall(VecRestoreArrayRead(xtmp, &x_array));
651 }
652
653 if (PCTelescope_isActiveRank(sred)) {
654 PetscCall(KSPGetInitialGuessNonzero(sred->ksp, &default_init_guess_value));
655 if (!zeroguess) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, PETSC_TRUE));
656 }
657
658 PetscCall(PCApply_Telescope(pc, x, y));
659
660 if (PCTelescope_isActiveRank(sred)) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, default_init_guess_value));
661
662 if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
663 *outits = 1;
664 PetscFunctionReturn(PETSC_SUCCESS);
665 }
666
PCReset_Telescope(PC pc)667 static PetscErrorCode PCReset_Telescope(PC pc)
668 {
669 PC_Telescope sred = (PC_Telescope)pc->data;
670
671 PetscFunctionBegin;
672 PetscCall(ISDestroy(&sred->isin));
673 PetscCall(VecScatterDestroy(&sred->scatter));
674 PetscCall(VecDestroy(&sred->xred));
675 PetscCall(VecDestroy(&sred->yred));
676 PetscCall(VecDestroy(&sred->xtmp));
677 PetscCall(MatDestroy(&sred->Bred));
678 PetscCall(KSPReset(sred->ksp));
679 if (sred->pctelescope_reset_type) PetscCall(sred->pctelescope_reset_type(pc));
680 PetscFunctionReturn(PETSC_SUCCESS);
681 }
682
PCDestroy_Telescope(PC pc)683 static PetscErrorCode PCDestroy_Telescope(PC pc)
684 {
685 PC_Telescope sred = (PC_Telescope)pc->data;
686
687 PetscFunctionBegin;
688 PetscCall(PCReset_Telescope(pc));
689 PetscCall(KSPDestroy(&sred->ksp));
690 PetscCall(PetscSubcommDestroy(&sred->psubcomm));
691 PetscCall(PetscFree(sred->dm_ctx));
692 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", NULL));
693 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", NULL));
694 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", NULL));
695 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", NULL));
696 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", NULL));
697 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", NULL));
698 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", NULL));
699 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", NULL));
700 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", NULL));
701 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", NULL));
702 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", NULL));
703 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", NULL));
704 PetscCall(PetscFree(pc->data));
705 PetscFunctionReturn(PETSC_SUCCESS);
706 }
707
PCSetFromOptions_Telescope(PC pc,PetscOptionItems PetscOptionsObject)708 static PetscErrorCode PCSetFromOptions_Telescope(PC pc, PetscOptionItems PetscOptionsObject)
709 {
710 PC_Telescope sred = (PC_Telescope)pc->data;
711 MPI_Comm comm;
712 PetscMPIInt size;
713 PetscBool flg;
714 PetscSubcommType subcommtype;
715
716 PetscFunctionBegin;
717 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
718 PetscCallMPI(MPI_Comm_size(comm, &size));
719 PetscOptionsHeadBegin(PetscOptionsObject, "Telescope options");
720 PetscCall(PetscOptionsEnum("-pc_telescope_subcomm_type", "Subcomm type (interlaced or contiguous)", "PCTelescopeSetSubcommType", PetscSubcommTypes, (PetscEnum)sred->subcommtype, (PetscEnum *)&subcommtype, &flg));
721 if (flg) PetscCall(PCTelescopeSetSubcommType(pc, subcommtype));
722 PetscCall(PetscOptionsInt("-pc_telescope_reduction_factor", "Factor to reduce comm size by", "PCTelescopeSetReductionFactor", sred->redfactor, &sred->redfactor, NULL));
723 PetscCheck(sred->redfactor <= size, comm, PETSC_ERR_ARG_WRONG, "-pc_telescope_reduction_factor <= comm size");
724 PetscCall(PetscOptionsBool("-pc_telescope_ignore_dm", "Ignore any DM attached to the PC", "PCTelescopeSetIgnoreDM", sred->ignore_dm, &sred->ignore_dm, NULL));
725 PetscCall(PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators", "Ignore method used to compute A", "PCTelescopeSetIgnoreKSPComputeOperators", sred->ignore_kspcomputeoperators, &sred->ignore_kspcomputeoperators, NULL));
726 PetscCall(PetscOptionsBool("-pc_telescope_use_coarse_dm", "Define sub-communicator from the coarse DM", "PCTelescopeSetUseCoarseDM", sred->use_coarse_dm, &sred->use_coarse_dm, NULL));
727 PetscOptionsHeadEnd();
728 PetscFunctionReturn(PETSC_SUCCESS);
729 }
730
731 /* PC implementation specific API's */
732
PCTelescopeGetKSP_Telescope(PC pc,KSP * ksp)733 static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc, KSP *ksp)
734 {
735 PC_Telescope red = (PC_Telescope)pc->data;
736
737 PetscFunctionBegin;
738 if (ksp) *ksp = red->ksp;
739 PetscFunctionReturn(PETSC_SUCCESS);
740 }
741
PCTelescopeGetSubcommType_Telescope(PC pc,PetscSubcommType * subcommtype)742 static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc, PetscSubcommType *subcommtype)
743 {
744 PC_Telescope red = (PC_Telescope)pc->data;
745
746 PetscFunctionBegin;
747 if (subcommtype) *subcommtype = red->subcommtype;
748 PetscFunctionReturn(PETSC_SUCCESS);
749 }
750
PCTelescopeSetSubcommType_Telescope(PC pc,PetscSubcommType subcommtype)751 static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc, PetscSubcommType subcommtype)
752 {
753 PC_Telescope red = (PC_Telescope)pc->data;
754
755 PetscFunctionBegin;
756 PetscCheck(!pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "You cannot change the subcommunicator type for PCTelescope after it has been set up.");
757 red->subcommtype = subcommtype;
758 PetscFunctionReturn(PETSC_SUCCESS);
759 }
760
PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt * fact)761 static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc, PetscInt *fact)
762 {
763 PC_Telescope red = (PC_Telescope)pc->data;
764
765 PetscFunctionBegin;
766 if (fact) *fact = red->redfactor;
767 PetscFunctionReturn(PETSC_SUCCESS);
768 }
769
PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)770 static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc, PetscInt fact)
771 {
772 PC_Telescope red = (PC_Telescope)pc->data;
773 PetscMPIInt size;
774
775 PetscFunctionBegin;
776 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
777 PetscCheck(fact > 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be positive", fact);
778 PetscCheck(fact <= size, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Reduction factor of telescoping PC %" PetscInt_FMT " must be <= comm.size", fact);
779 red->redfactor = fact;
780 PetscFunctionReturn(PETSC_SUCCESS);
781 }
782
PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool * v)783 static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc, PetscBool *v)
784 {
785 PC_Telescope red = (PC_Telescope)pc->data;
786
787 PetscFunctionBegin;
788 if (v) *v = red->ignore_dm;
789 PetscFunctionReturn(PETSC_SUCCESS);
790 }
791
PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)792 static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc, PetscBool v)
793 {
794 PC_Telescope red = (PC_Telescope)pc->data;
795
796 PetscFunctionBegin;
797 red->ignore_dm = v;
798 PetscFunctionReturn(PETSC_SUCCESS);
799 }
800
PCTelescopeGetUseCoarseDM_Telescope(PC pc,PetscBool * v)801 static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc, PetscBool *v)
802 {
803 PC_Telescope red = (PC_Telescope)pc->data;
804
805 PetscFunctionBegin;
806 if (v) *v = red->use_coarse_dm;
807 PetscFunctionReturn(PETSC_SUCCESS);
808 }
809
PCTelescopeSetUseCoarseDM_Telescope(PC pc,PetscBool v)810 static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc, PetscBool v)
811 {
812 PC_Telescope red = (PC_Telescope)pc->data;
813
814 PetscFunctionBegin;
815 red->use_coarse_dm = v;
816 PetscFunctionReturn(PETSC_SUCCESS);
817 }
818
PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool * v)819 static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool *v)
820 {
821 PC_Telescope red = (PC_Telescope)pc->data;
822
823 PetscFunctionBegin;
824 if (v) *v = red->ignore_kspcomputeoperators;
825 PetscFunctionReturn(PETSC_SUCCESS);
826 }
827
PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)828 static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool v)
829 {
830 PC_Telescope red = (PC_Telescope)pc->data;
831
832 PetscFunctionBegin;
833 red->ignore_kspcomputeoperators = v;
834 PetscFunctionReturn(PETSC_SUCCESS);
835 }
836
PCTelescopeGetDM_Telescope(PC pc,DM * dm)837 static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc, DM *dm)
838 {
839 PC_Telescope red = (PC_Telescope)pc->data;
840
841 PetscFunctionBegin;
842 *dm = private_PCTelescopeGetSubDM(red);
843 PetscFunctionReturn(PETSC_SUCCESS);
844 }
845
846 /*@
847 PCTelescopeGetKSP - Gets the `KSP` created by the telescoping `PC`.
848
849 Not Collective
850
851 Input Parameter:
852 . pc - the preconditioner context
853
854 Output Parameter:
855 . subksp - the `KSP` defined on the smaller set of processes
856
857 Level: advanced
858
859 .seealso: [](ch_ksp), `PC`, `KSP`, `PCTELESCOPE`
860 @*/
PCTelescopeGetKSP(PC pc,KSP * subksp)861 PetscErrorCode PCTelescopeGetKSP(PC pc, KSP *subksp)
862 {
863 PetscFunctionBegin;
864 PetscUseMethod(pc, "PCTelescopeGetKSP_C", (PC, KSP *), (pc, subksp));
865 PetscFunctionReturn(PETSC_SUCCESS);
866 }
867
868 /*@
869 PCTelescopeGetReductionFactor - Gets the factor by which the original number of MPI processes has been reduced by that was set by
870 `PCTelescopeSetReductionFactor()`
871
872 Not Collective
873
874 Input Parameter:
875 . pc - the preconditioner context
876
877 Output Parameter:
878 . fact - the reduction factor
879
880 Level: advanced
881
882 .seealso: [](ch_ksp), `PC`, `PCTELESCOPE`, `PCTelescopeSetReductionFactor()`
883 @*/
PCTelescopeGetReductionFactor(PC pc,PetscInt * fact)884 PetscErrorCode PCTelescopeGetReductionFactor(PC pc, PetscInt *fact)
885 {
886 PetscFunctionBegin;
887 PetscUseMethod(pc, "PCTelescopeGetReductionFactor_C", (PC, PetscInt *), (pc, fact));
888 PetscFunctionReturn(PETSC_SUCCESS);
889 }
890
891 /*@
892 PCTelescopeSetReductionFactor - Sets the factor by which the original number of MPI processes will been reduced by when
893 constructing the subcommunicator to be used with the `PCTELESCOPE`.
894
895 Not Collective
896
897 Input Parameter:
898 . pc - the preconditioner context
899
900 Output Parameter:
901 . fact - the reduction factor
902
903 Level: advanced
904
905 .seealso: [](ch_ksp), `PCTELESCOPE`, `PCTelescopeGetReductionFactor()`
906 @*/
PCTelescopeSetReductionFactor(PC pc,PetscInt fact)907 PetscErrorCode PCTelescopeSetReductionFactor(PC pc, PetscInt fact)
908 {
909 PetscFunctionBegin;
910 PetscTryMethod(pc, "PCTelescopeSetReductionFactor_C", (PC, PetscInt), (pc, fact));
911 PetscFunctionReturn(PETSC_SUCCESS);
912 }
913
914 /*@
915 PCTelescopeGetIgnoreDM - Get the flag indicating if any `DM` attached to the `PC` will be used in constructing the `PC` on the
916 reduced number of MPI processes
917
918 Not Collective
919
920 Input Parameter:
921 . pc - the preconditioner context
922
923 Output Parameter:
924 . v - the flag
925
926 Level: advanced
927
928 .seealso: [](ch_ksp), `DM`, `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`
929 @*/
PCTelescopeGetIgnoreDM(PC pc,PetscBool * v)930 PetscErrorCode PCTelescopeGetIgnoreDM(PC pc, PetscBool *v)
931 {
932 PetscFunctionBegin;
933 PetscUseMethod(pc, "PCTelescopeGetIgnoreDM_C", (PC, PetscBool *), (pc, v));
934 PetscFunctionReturn(PETSC_SUCCESS);
935 }
936
937 /*@
938 PCTelescopeSetIgnoreDM - Set a flag to ignore any `DM` attached to the `PC` when constructing the `PC` on the
939 reduced number of MPI processes
940
941 Not Collective
942
943 Input Parameter:
944 . pc - the preconditioner context
945
946 Output Parameter:
947 . v - Use `PETSC_TRUE` to ignore any `DM`
948
949 Level: advanced
950
951 .seealso: [](ch_ksp), `DM`, `PCTELESCOPE`, `PCTelescopeGetIgnoreDM()`
952 @*/
PCTelescopeSetIgnoreDM(PC pc,PetscBool v)953 PetscErrorCode PCTelescopeSetIgnoreDM(PC pc, PetscBool v)
954 {
955 PetscFunctionBegin;
956 PetscTryMethod(pc, "PCTelescopeSetIgnoreDM_C", (PC, PetscBool), (pc, v));
957 PetscFunctionReturn(PETSC_SUCCESS);
958 }
959
960 /*@
961 PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse `DM` attached to `DM` associated with the `PC` will be used in constructing
962 the `PC` on the reduced number of MPI processes
963
964 Not Collective
965
966 Input Parameter:
967 . pc - the preconditioner context
968
969 Output Parameter:
970 . v - the flag
971
972 Level: advanced
973
974 .seealso: [](ch_ksp), `DM`, `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`
975 @*/
PCTelescopeGetUseCoarseDM(PC pc,PetscBool * v)976 PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc, PetscBool *v)
977 {
978 PetscFunctionBegin;
979 PetscUseMethod(pc, "PCTelescopeGetUseCoarseDM_C", (PC, PetscBool *), (pc, v));
980 PetscFunctionReturn(PETSC_SUCCESS);
981 }
982
983 /*@
984 PCTelescopeSetUseCoarseDM - Set a flag to query the `DM` attached to the `PC` if it also has a coarse `DM` and utilize that `DM`
985 in constructing the `PC` on the reduced number of MPI processes
986
987 Not Collective
988
989 Input Parameter:
990 . pc - the preconditioner context
991
992 Output Parameter:
993 . v - Use `PETSC_FALSE` to ignore any coarse `DM`
994
995 Level: advanced
996
997 Notes:
998 When you have specified to use a coarse `DM`, the communicator used to create the sub-`KSP` within `PCTELESCOPE`
999 will be that of the coarse `DM`. Hence the flags `-pc_telescope_reduction_factor` and
1000 `-pc_telescope_subcomm_type` will not be used.
1001
1002 It is required that the communicator associated with the parent (fine) and the coarse `DM` are of different sizes.
1003 An error will occur of the size if the communicator associated with the coarse `DM` is the same as that of the parent `DM`.
1004 Furthermore, it is required that the communicator on the coarse `DM` is a sub-communicator of the parent.
1005 This will be checked at the time the preconditioner is setup and an error will occur if
1006 the coarse `DM` does not define a sub-communicator of that used by the parent `DM`.
1007
1008 The particular Telescope setup invoked when using a coarse `DM` is agnostic with respect to the type of
1009 the `DM` used (e.g. it supports `DMSHELL`, `DMPLEX`, etc).
1010
1011 Support is currently only provided for the case when you are using `KSPSetComputeOperators()`
1012
1013 The user is required to compose a function with the parent `DM` to facilitate the transfer of fields (`Vec`)
1014 between the different decompositions defined by the fine and coarse `DM`s.
1015 In the user code, this is achieved via
1016 .vb
1017 {
1018 DM dm_fine;
1019 PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
1020 }
1021 .ve
1022 The signature of the user provided field scatter method is
1023 .vb
1024 PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
1025 .ve
1026 The user must provide support for both mode `SCATTER_FORWARD` and mode `SCATTER_REVERSE`.
1027 `SCATTER_FORWARD` implies the direction of transfer is from the parent (fine) `DM` to the coarse `DM`.
1028
1029 Optionally, the user may also compose a function with the parent `DM` to facilitate the transfer
1030 of state variables between the fine and coarse `DM`s.
1031 In the context of a finite element discretization, an example state variable might be
1032 values associated with quadrature points within each element.
1033 A user provided state scatter method is composed via
1034 .vb
1035 {
1036 DM dm_fine;
1037 PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
1038 }
1039 .ve
1040 The signature of the user provided state scatter method is
1041 .vb
1042 PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
1043 .ve
1044 `SCATTER_FORWARD` implies the direction of transfer is from the fine `DM` to the coarse `DM`.
1045 The user is only required to support mode = `SCATTER_FORWARD`.
1046 No assumption is made about the data type of the state variables.
1047 These must be managed by the user and must be accessible from the `DM`.
1048
1049 Care must be taken in defining the user context passed to `KSPSetComputeOperators()` which is to be
1050 associated with the sub-`KSP` residing within `PCTELESCOPE`.
1051 In general, `PCTELESCOPE` assumes that the context on the fine and coarse `DM` used with
1052 `KSPSetComputeOperators()` should be "similar" in type or origin.
1053 Specifically the following rules are used to infer what context on the sub-`KSP` should be.
1054
1055 First the contexts from the `KSP` and the fine and coarse `DM`s are retrieved.
1056 Note that the special case of a `DMSHELL` context is queried.
1057
1058 .vb
1059 DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1060 DMGetApplicationContext(dm_fine,&dmfine_appctx);
1061 DMShellGetContext(dm_fine,&dmfine_shellctx);
1062
1063 DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1064 DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1065 .ve
1066
1067 The following rules are then enforced\:
1068
1069 1. If `dmfine_kspctx` = `NULL`, then we provide a `NULL` pointer as the context for the sub-`KSP`\:
1070 `KSPSetComputeOperators`(`sub_ksp`,`dmfine_kspfunc`,`NULL`);
1071
1072 2. If `dmfine_kspctx` != `NULL` and `dmfine_kspctx` == `dmfine_appctx`,
1073
1074 check that `dmcoarse_appctx` is also non-`NULL`. If this is true, then\:
1075 `KSPSetComputeOperators`(`sub_ksp`,`dmfine_kspfunc`,`dmcoarse_appctx`);
1076
1077 3. If `dmfine_kspctx` != `NULL` and `dmfine_kspctx` == `dmfine_shellctx`,
1078
1079 check that `dmcoarse_shellctx` is also non-`NULL`. If this is true, then\:
1080 `KSPSetComputeOperators`(`sub_ksp`,`dmfine_kspfunc`,`dmcoarse_shellctx`);
1081
1082 If neither of the above three tests passed, then `PCTELESCOPE` cannot safely determine what
1083 context should be provided to `KSPSetComputeOperators()` for use with the sub-`KSP`.
1084 In this case, an additional mechanism is provided via a composed function which will return
1085 the actual context to be used. To use this feature you must compose the "getter" function
1086 with the coarse `DM`, e.g.
1087 .vb
1088 {
1089 DM dm_coarse;
1090 PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1091 }
1092 .ve
1093 The signature of the user provided method is
1094 .vb
1095 PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1096 .ve
1097
1098 .seealso: [](ch_ksp), `DM`, `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`
1099 @*/
PCTelescopeSetUseCoarseDM(PC pc,PetscBool v)1100 PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc, PetscBool v)
1101 {
1102 PetscFunctionBegin;
1103 PetscTryMethod(pc, "PCTelescopeSetUseCoarseDM_C", (PC, PetscBool), (pc, v));
1104 PetscFunctionReturn(PETSC_SUCCESS);
1105 }
1106
1107 /*@
1108 PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if `KSPComputeOperators()` will be used to construct
1109 the matrix on the reduced number of MPI processes
1110
1111 Not Collective
1112
1113 Input Parameter:
1114 . pc - the preconditioner context
1115
1116 Output Parameter:
1117 . v - the flag
1118
1119 Level: advanced
1120
1121 .seealso: [](ch_ksp), `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeSetIgnoreKSPComputeOperators()`
1122 @*/
PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool * v)1123 PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc, PetscBool *v)
1124 {
1125 PetscFunctionBegin;
1126 PetscUseMethod(pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", (PC, PetscBool *), (pc, v));
1127 PetscFunctionReturn(PETSC_SUCCESS);
1128 }
1129
1130 /*@
1131 PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to have `PCTELESCOPE` ignore the function provided to `KSPComputeOperators()` in
1132 constructint the matrix on the reduced number of MPI processes
1133
1134 Not Collective
1135
1136 Input Parameter:
1137 . pc - the preconditioner context
1138
1139 Output Parameter:
1140 . v - Use `PETSC_TRUE` to ignore the function (if defined) set via `KSPSetComputeOperators()` on `pc`
1141
1142 Level: advanced
1143
1144 .seealso: [](ch_ksp), `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()`
1145 @*/
PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)1146 PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc, PetscBool v)
1147 {
1148 PetscFunctionBegin;
1149 PetscTryMethod(pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", (PC, PetscBool), (pc, v));
1150 PetscFunctionReturn(PETSC_SUCCESS);
1151 }
1152
1153 /*@
1154 PCTelescopeGetDM - Get the re-partitioned `DM` attached to the sub-`KSP`.
1155
1156 Not Collective
1157
1158 Input Parameter:
1159 . pc - the preconditioner context
1160
1161 Output Parameter:
1162 . subdm - The re-partitioned `DM`
1163
1164 Level: advanced
1165
1166 .seealso: [](ch_ksp), `DM`, `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()`
1167 @*/
PCTelescopeGetDM(PC pc,DM * subdm)1168 PetscErrorCode PCTelescopeGetDM(PC pc, DM *subdm)
1169 {
1170 PetscFunctionBegin;
1171 PetscUseMethod(pc, "PCTelescopeGetDM_C", (PC, DM *), (pc, subdm));
1172 PetscFunctionReturn(PETSC_SUCCESS);
1173 }
1174
1175 /*@
1176 PCTelescopeSetSubcommType - set subcommunicator type `PetscSubcommType` (interlaced or contiguous) to be used when
1177 the subcommunicator is generated from the given `PC`
1178
1179 Logically Collective
1180
1181 Input Parameters:
1182 + pc - the preconditioner context
1183 - subcommtype - the subcommunicator type (see `PetscSubcommType`)
1184
1185 Level: advanced
1186
1187 .seealso: [](ch_ksp), `PetscSubcommType`, `PetscSubcomm`, `PCTELESCOPE`, `PCTelescopeGetSubcommType()`
1188 @*/
PCTelescopeSetSubcommType(PC pc,PetscSubcommType subcommtype)1189 PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1190 {
1191 PetscFunctionBegin;
1192 PetscTryMethod(pc, "PCTelescopeSetSubcommType_C", (PC, PetscSubcommType), (pc, subcommtype));
1193 PetscFunctionReturn(PETSC_SUCCESS);
1194 }
1195
1196 /*@
1197 PCTelescopeGetSubcommType - Get the subcommunicator type `PetscSubcommType` (interlaced or contiguous) set with `PCTelescopeSetSubcommType()`
1198
1199 Not Collective
1200
1201 Input Parameter:
1202 . pc - the preconditioner context
1203
1204 Output Parameter:
1205 . subcommtype - the subcommunicator type (see `PetscSubcommType`)
1206
1207 Level: advanced
1208
1209 .seealso: [](ch_ksp), `PetscSubcomm`, `PetscSubcommType`, `PCTELESCOPE`, `PCTelescopeSetSubcommType()`
1210 @*/
PCTelescopeGetSubcommType(PC pc,PetscSubcommType * subcommtype)1211 PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1212 {
1213 PetscFunctionBegin;
1214 PetscUseMethod(pc, "PCTelescopeGetSubcommType_C", (PC, PetscSubcommType *), (pc, subcommtype));
1215 PetscFunctionReturn(PETSC_SUCCESS);
1216 }
1217
1218 /*MC
1219 PCTELESCOPE - Runs a `KSP` solver on a sub-communicator {cite}`maysananruppknepleysmith2016` of the communicator used by the original `KSP`.
1220 MPI processes not in the sub-communicator are idle during the solve. Usually used to solve the smaller coarser grid problems in multigrid
1221 (`PCMG`) that could not be efficiently solved on the entire communication
1222
1223 Options Database Keys:
1224 + -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1225 . -pc_telescope_ignore_dm - flag to indicate whether an attached `DM` should be ignored in constructing the new `PC`
1226 . -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI processes on the sub-communicator. see `PetscSubcomm` for more information.
1227 . -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether `KSPSetComputeOperators()` should be used on the sub-`KSP`.
1228 - -pc_telescope_use_coarse_dm - flag to indicate whether the coarse `DM` should be used to define the sub-communicator.
1229
1230 Level: advanced
1231
1232 Notes:
1233 Assuming that the parent preconditioner `PC` is defined on a communicator c, this implementation
1234 creates a child sub-communicator (c') containing fewer MPI processes than the original parent preconditioner `PC`.
1235 The preconditioner is deemed telescopic as it only calls `KSPSolve()` on a single
1236 sub-communicator, in contrast with `PCREDUNDANT` which calls `KSPSolve()` on N sub-communicators.
1237 This means there will be MPI processes which will be idle during the application of this preconditioner.
1238 Additionally, in comparison with `PCREDUNDANT`, `PCTELESCOPE` can utilize an attached `DM` to construct `DM` dependent preconditioner, such as `PCMG`
1239
1240 The default type `KSPType` of the sub `KSP` (the `KSP` defined on c') is `KSPPREONLY`.
1241
1242 There are three setup mechanisms for `PCTELESCOPE`. Features support by each type are described below.
1243 In the following, we will refer to the operators B and B', these are the `Bmat` provided to the `KSP` on the
1244 communicators c and c' respectively.
1245
1246 [1] Default setup
1247 The sub-communicator c' is created via `PetscSubcommCreate()`.
1248 Any explicitly defined nullspace and near nullspace vectors attached to B with `MatSetNullSpace()` and `MatSetNearNullSpace()` are transferred to B'.
1249 Currently there is no support for nullspaces provided with `MatNullSpaceSetFunction()`).
1250 No support is provided for `KSPSetComputeOperators()`.
1251 Currently there is no support for the flag `-pc_use_amat`.
1252
1253 [2] `DM` aware setup
1254 The sub-communicator c' is created via `PetscSubcommCreate()`.
1255 If a `DM` is attached to the `PC`, it is re-partitioned on the sub-communicator c'.
1256 Both the `Bmat` operator and the right-hand side vector are permuted into the new DOF ordering defined by the re-partitioned `DM`.
1257 Currently only support for re-partitioning a `DMDA` is provided.
1258 Any explicitly defined nullspace or near nullspace vectors attached to the original B with `MatSetNullSpace()`
1259 and `MatSetNearNullSpace()` are extracted, re-partitioned and set on B'
1260 (currently there is no support for nullspaces provided with `MatNullSpaceSetFunction()`).
1261 Support is provided for `KSPSetComputeOperators()`. The user provided function and context is propagated to the sub `KSP`.
1262 This is fragile since the user must ensure that their user context is valid for use on c'.
1263 Currently there is no support for the flag `-pc_use_amat`.
1264
1265 [3] Coarse `DM` setup
1266 If a `DM` (dmfine) is attached to the `PC`, dmfine is queried for a "coarse" `DM` (call this dmcoarse) via `DMGetCoarseDM()`.
1267 `PCTELESCOPE` will interpret the coarse `DM` as being defined on a sub-communicator of c.
1268 The communicator associated with dmcoarse will define the c' to be used within `PCTELESCOPE`.
1269 `PCTELESCOPE` will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1270 The intention of this setup type is that `PCTELESCOPE` will use an existing (e.g. user defined) communicator hierarchy, say as would be
1271 available with using multi-grid on unstructured meshes.
1272 This setup will not use the command line options `-pc_telescope_reduction_factor` or `-pc_telescope_subcomm_type`.
1273 Any explicitly defined nullspace or near nullspace vectors attached to the B are extracted, scattered into the correct ordering consistent
1274 with dmcoarse and set on B'
1275 (currently there is no support for nullspaces provided with `MatNullSpaceSetFunction()`).
1276 There is no general method to permute field orderings, hence only `KSPSetComputeOperators()` is supported.
1277 The user must use `PetscObjectComposeFunction()` with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1278 Propagation of the user context for `KSPSetComputeOperators()` on the sub `KSP` is attempted by querying the `DM` contexts associated with
1279 dmfine and dmcoarse. Alternatively, the user may use `PetscObjectComposeFunction()` with dmcoarse to define a method which will return the appropriate user context for `KSPSetComputeOperators()`.
1280 Currently there is no support for the flag `-pc_use_amat`.
1281 This setup can be invoked by the option `-pc_telescope_use_coarse_dm` or by calling `PCTelescopeSetUseCoarseDM`(pc,`PETSC_TRUE`);
1282 Further information about the user-provided methods required by this setup type are described here `PCTelescopeSetUseCoarseDM()`.
1283
1284 Developer Notes:
1285 During `PCSetUp()`, the B operator is scattered onto c'.
1286 Within `PCApply()`, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1287 Then, `KSPSolve()` is executed on the c' communicator.
1288
1289 The communicator used within the telescoping preconditioner is defined by a `PetscSubcomm` using the INTERLACED
1290 creation routine by default (this can be changed with `-pc_telescope_subcomm_type`). We run the sub `KSP` on only
1291 the ranks within the communicator which have a color equal to zero.
1292
1293 The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1294 In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1295 a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')
1296
1297 The telescoping preconditioner can re-partition an attached `DM` if it is a `DMDA` (2D or 3D -
1298 support for 1D `DMDA`s is not provided). If a `DMDA` is found, a topologically equivalent `DMDA` is created on c'
1299 and this new `DM` is attached the sub `KSP`. The design of telescope is such that it should be possible to extend support
1300 for re-partitioning other to `DM`'s (e.g. `DMPLEX`). The user can supply a flag to ignore attached DMs.
1301 Alternatively, user-provided re-partitioned `DM`s can be used via `-pc_telescope_use_coarse_dm`.
1302
1303 With the default setup mode, B' is defined by fusing rows (in order) associated with MPI processes common to c and c'.
1304
1305 When a `DMDA` is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1306 into the ordering defined by the `DMDA` on c', (ii) extracting the local chunks via `MatCreateSubMatrices()`, (iii) fusing the
1307 locally (sequential) matrices defined on the ranks common to c and c' into B' using `MatCreateMPIMatConcatenateSeqMat()`
1308
1309 Limitations/improvements include the following.
1310 `VecPlaceArray()` could be used within `PCApply()` to improve efficiency and reduce memory usage.
1311 A unified mechanism to query for user contexts as required by `KSPSetComputeOperators()` and `MatNullSpaceSetFunction()`.
1312
1313 The symmetric permutation used when a `DMDA` is encountered is performed via explicitly assembling a permutation matrix P,
1314 and performing P^T.A.P. Possibly it might be more efficient to use `MatPermute()`. We opted to use P^T.A.P as it appears
1315 `VecPermute()` does not support the use case required here. By computing P, one can permute both the operator and RHS in a
1316 consistent manner.
1317
1318 Mapping of vectors (default setup mode) is performed in the following way.
1319 Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1320 Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1321 We perform the scatter to the sub-communicator in the following way.
1322 [1] Given a vector x defined on communicator c
1323
1324 .vb
1325 rank(c) local values of x
1326 ------- ----------------------------------------
1327 0 [ 0.0, 1.0, 2.0, 3.0, 4.0, 5.0 ]
1328 1 [ 6.0, 7.0, 8.0, 9.0, 10.0, 11.0 ]
1329 2 [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1330 3 [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1331 .ve
1332
1333 scatter into xtmp defined also on comm c, so that we have the following values
1334
1335 .vb
1336 rank(c) local values of xtmp
1337 ------- ----------------------------------------------------------------------------
1338 0 [ 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0 ]
1339 1 [ ]
1340 2 [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1341 3 [ ]
1342 .ve
1343
1344 The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
1345
1346 [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1347 Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
1348
1349 .vb
1350 rank(c') local values of xred
1351 -------- ----------------------------------------------------------------------------
1352 0 [ 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0 ]
1353 1 [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1354 .ve
1355
1356 Contributed by:
1357 Dave May
1358
1359 .seealso: [](ch_ksp), `PCTelescopeGetKSP()`, `PCTelescopeGetDM()`, `PCTelescopeGetReductionFactor()`, `PCTelescopeSetReductionFactor()`, `PCTelescopeGetIgnoreDM()`, `PCTelescopeSetIgnoreDM()`, `PCREDUNDANT`
1360 M*/
PCCreate_Telescope(PC pc)1361 PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1362 {
1363 struct _PC_Telescope *sred;
1364
1365 PetscFunctionBegin;
1366 PetscCall(PetscNew(&sred));
1367 sred->psubcomm = NULL;
1368 sred->subcommtype = PETSC_SUBCOMM_INTERLACED;
1369 sred->subcomm = MPI_COMM_NULL;
1370 sred->redfactor = 1;
1371 sred->ignore_dm = PETSC_FALSE;
1372 sred->ignore_kspcomputeoperators = PETSC_FALSE;
1373 sred->use_coarse_dm = PETSC_FALSE;
1374 pc->data = (void *)sred;
1375
1376 pc->ops->apply = PCApply_Telescope;
1377 pc->ops->applytranspose = NULL;
1378 pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1379 pc->ops->setup = PCSetUp_Telescope;
1380 pc->ops->destroy = PCDestroy_Telescope;
1381 pc->ops->reset = PCReset_Telescope;
1382 pc->ops->setfromoptions = PCSetFromOptions_Telescope;
1383 pc->ops->view = PCView_Telescope;
1384
1385 sred->pctelescope_setup_type = PCTelescopeSetUp_default;
1386 sred->pctelescope_matcreate_type = PCTelescopeMatCreate_default;
1387 sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1388 sred->pctelescope_reset_type = NULL;
1389
1390 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", PCTelescopeGetKSP_Telescope));
1391 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", PCTelescopeGetSubcommType_Telescope));
1392 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", PCTelescopeSetSubcommType_Telescope));
1393 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", PCTelescopeGetReductionFactor_Telescope));
1394 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", PCTelescopeSetReductionFactor_Telescope));
1395 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", PCTelescopeGetIgnoreDM_Telescope));
1396 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", PCTelescopeSetIgnoreDM_Telescope));
1397 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", PCTelescopeGetIgnoreKSPComputeOperators_Telescope));
1398 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", PCTelescopeSetIgnoreKSPComputeOperators_Telescope));
1399 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", PCTelescopeGetDM_Telescope));
1400 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", PCTelescopeGetUseCoarseDM_Telescope));
1401 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", PCTelescopeSetUseCoarseDM_Telescope));
1402 PetscFunctionReturn(PETSC_SUCCESS);
1403 }
1404