xref: /petsc/src/mat/graphops/coarsen/impls/misk/misk.c (revision f13dfd9ea68e0ddeee984e65c377a1819eab8a8a)
1 #include <petsc/private/matimpl.h> /*I "petscmat.h" I*/
2 #include <../src/mat/impls/aij/seq/aij.h>
3 #include <../src/mat/impls/aij/mpi/mpiaij.h>
4 #include <petscsf.h>
5 
6 #define MIS_NOT_DONE       -2
7 #define MIS_DELETED        -1
8 #define MIS_REMOVED        -3
9 #define MIS_IS_SELECTED(s) (s >= 0)
10 
11 /* edge for priority queue */
12 typedef struct edge_tag {
13   PetscReal weight;
14   PetscInt  lid0, gid1, cpid1;
15 } Edge;
16 
17 static PetscErrorCode PetscCoarsenDataView_private(PetscCoarsenData *agg_lists, PetscViewer viewer)
18 {
19   PetscCDIntNd *pos, *pos2;
20 
21   PetscFunctionBegin;
22   for (PetscInt kk = 0; kk < agg_lists->size; kk++) {
23     PetscCall(PetscCDGetHeadPos(agg_lists, kk, &pos));
24     if ((pos2 = pos)) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "selected local %d: ", (int)kk));
25     while (pos) {
26       PetscInt gid1;
27       PetscCall(PetscCDIntNdGetID(pos, &gid1));
28       PetscCall(PetscCDGetNextPos(agg_lists, kk, &pos));
29       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %d ", (int)gid1));
30     }
31     if (pos2) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
32   }
33   PetscFunctionReturn(PETSC_SUCCESS);
34 }
35 
36 /*
37   MatCoarsenApply_MISK_private - parallel heavy edge matching
38 
39   Input Parameter:
40    . perm - permutation
41    . Gmat - global matrix of graph (data not defined)
42 
43   Output Parameter:
44    . a_locals_llist - array of list of local nodes rooted at local node
45 */
46 static PetscErrorCode MatCoarsenApply_MISK_private(IS perm, const PetscInt misk, Mat Gmat, PetscCoarsenData **a_locals_llist)
47 {
48   PetscBool   isMPI;
49   MPI_Comm    comm;
50   PetscMPIInt rank, size;
51   Mat         cMat, Prols[5], Rtot;
52   PetscScalar one = 1;
53 
54   PetscFunctionBegin;
55   PetscValidHeaderSpecific(perm, IS_CLASSID, 1);
56   PetscValidHeaderSpecific(Gmat, MAT_CLASSID, 3);
57   PetscAssertPointer(a_locals_llist, 4);
58   PetscCheck(misk < 5 && misk > 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "too many/few levels: %d", (int)misk);
59   PetscCall(PetscObjectBaseTypeCompare((PetscObject)Gmat, MATMPIAIJ, &isMPI));
60   PetscCall(PetscObjectGetComm((PetscObject)Gmat, &comm));
61   PetscCallMPI(MPI_Comm_rank(comm, &rank));
62   PetscCallMPI(MPI_Comm_size(comm, &size));
63   PetscCall(PetscInfo(Gmat, "misk %d\n", (int)misk));
64   /* make a copy of the graph, this gets destroyed in iterates */
65   if (misk > 1) PetscCall(MatDuplicate(Gmat, MAT_COPY_VALUES, &cMat));
66   else cMat = Gmat;
67   for (PetscInt iterIdx = 0; iterIdx < misk; iterIdx++) {
68     Mat_SeqAIJ       *matA, *matB = NULL;
69     Mat_MPIAIJ       *mpimat = NULL;
70     const PetscInt   *perm_ix;
71     const PetscInt    nloc_inner = cMat->rmap->n;
72     PetscCoarsenData *agg_lists;
73     PetscInt         *cpcol_gid = NULL, *cpcol_state, *lid_cprowID, *lid_state, *lid_parent_gid = NULL;
74     PetscInt          num_fine_ghosts, kk, n, ix, j, *idx, *ai, Iend, my0, nremoved, gid, cpid, lidj, sgid, t1, t2, slid, nDone, nselected = 0, state;
75     PetscBool        *lid_removed, isOK;
76     PetscLayout       layout;
77     PetscSF           sf;
78 
79     if (isMPI) {
80       mpimat = (Mat_MPIAIJ *)cMat->data;
81       matA   = (Mat_SeqAIJ *)mpimat->A->data;
82       matB   = (Mat_SeqAIJ *)mpimat->B->data;
83       /* force compressed storage of B */
84       PetscCall(MatCheckCompressedRow(mpimat->B, matB->nonzerorowcnt, &matB->compressedrow, matB->i, cMat->rmap->n, -1.0));
85     } else {
86       PetscBool isAIJ;
87       PetscCall(PetscObjectBaseTypeCompare((PetscObject)cMat, MATSEQAIJ, &isAIJ));
88       PetscCheck(isAIJ, PETSC_COMM_SELF, PETSC_ERR_USER, "Require AIJ matrix.");
89       matA = (Mat_SeqAIJ *)cMat->data;
90     }
91     PetscCall(MatGetOwnershipRange(cMat, &my0, &Iend));
92     if (mpimat) {
93       PetscInt *lid_gid;
94       PetscCall(PetscMalloc1(nloc_inner, &lid_gid)); /* explicit array needed */
95       for (kk = 0, gid = my0; kk < nloc_inner; kk++, gid++) lid_gid[kk] = gid;
96       PetscCall(VecGetLocalSize(mpimat->lvec, &num_fine_ghosts));
97       PetscCall(PetscMalloc1(num_fine_ghosts, &cpcol_gid));
98       PetscCall(PetscMalloc1(num_fine_ghosts, &cpcol_state));
99       PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)cMat), &sf));
100       PetscCall(MatGetLayouts(cMat, &layout, NULL));
101       PetscCall(PetscSFSetGraphLayout(sf, layout, num_fine_ghosts, NULL, PETSC_COPY_VALUES, mpimat->garray));
102       PetscCall(PetscSFBcastBegin(sf, MPIU_INT, lid_gid, cpcol_gid, MPI_REPLACE));
103       PetscCall(PetscSFBcastEnd(sf, MPIU_INT, lid_gid, cpcol_gid, MPI_REPLACE));
104       for (kk = 0; kk < num_fine_ghosts; kk++) cpcol_state[kk] = MIS_NOT_DONE;
105       PetscCall(PetscFree(lid_gid));
106     } else num_fine_ghosts = 0;
107 
108     PetscCall(PetscMalloc1(nloc_inner, &lid_cprowID));
109     PetscCall(PetscMalloc1(nloc_inner, &lid_removed)); /* explicit array needed */
110     PetscCall(PetscMalloc1(nloc_inner, &lid_parent_gid));
111     PetscCall(PetscMalloc1(nloc_inner, &lid_state));
112 
113     /* the data structure */
114     PetscCall(PetscCDCreate(nloc_inner, &agg_lists));
115     /* need an inverse map - locals */
116     for (kk = 0; kk < nloc_inner; kk++) {
117       lid_cprowID[kk]    = -1;
118       lid_removed[kk]    = PETSC_FALSE;
119       lid_parent_gid[kk] = -1.0;
120       lid_state[kk]      = MIS_NOT_DONE;
121     }
122     /* set index into cmpressed row 'lid_cprowID' */
123     if (matB) {
124       for (ix = 0; ix < matB->compressedrow.nrows; ix++) {
125         const PetscInt lid = matB->compressedrow.rindex[ix];
126         if (lid >= 0) lid_cprowID[lid] = ix;
127       }
128     }
129     /* MIS */
130     nremoved = nDone = 0;
131     if (!iterIdx) PetscCall(ISGetIndices(perm, &perm_ix)); // use permutation on first MIS
132     else perm_ix = NULL;
133     while (nDone < nloc_inner || PETSC_TRUE) { /* asynchronous not implemented */
134       /* check all vertices */
135       for (kk = 0; kk < nloc_inner; kk++) {
136         const PetscInt lid = perm_ix ? perm_ix[kk] : kk;
137         state              = lid_state[lid];
138         if (iterIdx == 0 && lid_removed[lid]) continue;
139         if (state == MIS_NOT_DONE) {
140           /* parallel test, delete if selected ghost */
141           isOK = PETSC_TRUE;
142           /* parallel test */
143           if ((ix = lid_cprowID[lid]) != -1) { /* if I have any ghost neighbors */
144             ai  = matB->compressedrow.i;
145             n   = ai[ix + 1] - ai[ix];
146             idx = matB->j + ai[ix];
147             for (j = 0; j < n; j++) {
148               cpid = idx[j]; /* compressed row ID in B mat */
149               gid  = cpcol_gid[cpid];
150               if (cpcol_state[cpid] == MIS_NOT_DONE && gid >= Iend) { /* or pe>rank */
151                 isOK = PETSC_FALSE;                                   /* can not delete */
152                 break;
153               }
154             }
155           }
156           if (isOK) { /* select or remove this vertex if it is a true singleton like a BC */
157             nDone++;
158             /* check for singleton */
159             ai = matA->i;
160             n  = ai[lid + 1] - ai[lid];
161             if (n < 2) {
162               /* if I have any ghost adj then not a singleton */
163               ix = lid_cprowID[lid];
164               if (ix == -1 || !(matB->compressedrow.i[ix + 1] - matB->compressedrow.i[ix])) {
165                 if (iterIdx == 0) {
166                   lid_removed[lid] = PETSC_TRUE;
167                   nremoved++; // let it get selected
168                 }
169                 // PetscCall(PetscCDAppendID(agg_lists, lid, lid + my0));
170                 // lid_state[lid] = nselected; // >= 0  is selected, cache for ordering coarse grid
171                 /* should select this because it is technically in the MIS but lets not */
172                 continue; /* one local adj (me) and no ghost - singleton */
173               }
174             }
175             /* SELECTED state encoded with global index */
176             lid_state[lid] = nselected; // >= 0  is selected, cache for ordering coarse grid
177             nselected++;
178             PetscCall(PetscCDAppendID(agg_lists, lid, lid + my0));
179             /* delete local adj */
180             idx = matA->j + ai[lid];
181             for (j = 0; j < n; j++) {
182               lidj = idx[j];
183               if (lid_state[lidj] == MIS_NOT_DONE) {
184                 nDone++;
185                 PetscCall(PetscCDAppendID(agg_lists, lid, lidj + my0));
186                 lid_state[lidj] = MIS_DELETED; /* delete this */
187               }
188             }
189           } /* selected */
190         } /* not done vertex */
191       } /* vertex loop */
192 
193       /* update ghost states and count todos */
194       if (mpimat) {
195         /* scatter states, check for done */
196         PetscCall(PetscSFBcastBegin(sf, MPIU_INT, lid_state, cpcol_state, MPI_REPLACE));
197         PetscCall(PetscSFBcastEnd(sf, MPIU_INT, lid_state, cpcol_state, MPI_REPLACE));
198         ai = matB->compressedrow.i;
199         for (ix = 0; ix < matB->compressedrow.nrows; ix++) {
200           const int lidj = matB->compressedrow.rindex[ix]; /* local boundary node */
201           state          = lid_state[lidj];
202           if (state == MIS_NOT_DONE) {
203             /* look at ghosts */
204             n   = ai[ix + 1] - ai[ix];
205             idx = matB->j + ai[ix];
206             for (j = 0; j < n; j++) {
207               cpid = idx[j];                            /* compressed row ID in B mat */
208               if (MIS_IS_SELECTED(cpcol_state[cpid])) { /* lid is now deleted by ghost */
209                 nDone++;
210                 lid_state[lidj]      = MIS_DELETED; /* delete this */
211                 sgid                 = cpcol_gid[cpid];
212                 lid_parent_gid[lidj] = sgid; /* keep track of proc that I belong to */
213                 break;
214               }
215             }
216           }
217         }
218         /* all done? */
219         t1 = nloc_inner - nDone;
220         PetscCall(MPIU_Allreduce(&t1, &t2, 1, MPIU_INT, MPI_SUM, comm)); /* synchronous version */
221         if (!t2) break;
222       } else break; /* no mpi - all done */
223     } /* outer parallel MIS loop */
224     if (!iterIdx) PetscCall(ISRestoreIndices(perm, &perm_ix));
225     PetscCall(PetscInfo(Gmat, "\t removed %" PetscInt_FMT " of %" PetscInt_FMT " vertices.  %" PetscInt_FMT " selected.\n", nremoved, nloc_inner, nselected));
226 
227     /* tell adj who my lid_parent_gid vertices belong to - fill in agg_lists selected ghost lists */
228     if (matB) {
229       PetscInt *cpcol_sel_gid, *icpcol_gid;
230       /* need to copy this to free buffer -- should do this globally */
231       PetscCall(PetscMalloc1(num_fine_ghosts, &cpcol_sel_gid));
232       PetscCall(PetscMalloc1(num_fine_ghosts, &icpcol_gid));
233       for (cpid = 0; cpid < num_fine_ghosts; cpid++) icpcol_gid[cpid] = cpcol_gid[cpid];
234       /* get proc of deleted ghost */
235       PetscCall(PetscSFBcastBegin(sf, MPIU_INT, lid_parent_gid, cpcol_sel_gid, MPI_REPLACE));
236       PetscCall(PetscSFBcastEnd(sf, MPIU_INT, lid_parent_gid, cpcol_sel_gid, MPI_REPLACE));
237       for (cpid = 0; cpid < num_fine_ghosts; cpid++) {
238         sgid = cpcol_sel_gid[cpid];
239         gid  = icpcol_gid[cpid];
240         if (sgid >= my0 && sgid < Iend) { /* I own this deleted */
241           slid = sgid - my0;
242           PetscCall(PetscCDAppendID(agg_lists, slid, gid));
243         }
244       }
245       // done - cleanup
246       PetscCall(PetscFree(icpcol_gid));
247       PetscCall(PetscFree(cpcol_sel_gid));
248       PetscCall(PetscSFDestroy(&sf));
249       PetscCall(PetscFree(cpcol_gid));
250       PetscCall(PetscFree(cpcol_state));
251     }
252     PetscCall(PetscFree(lid_cprowID));
253     PetscCall(PetscFree(lid_removed));
254     PetscCall(PetscFree(lid_parent_gid));
255     PetscCall(PetscFree(lid_state));
256 
257     /* MIS done - make projection matrix - P */
258     MatType jtype;
259     PetscCall(MatGetType(Gmat, &jtype));
260     PetscCall(MatCreate(comm, &Prols[iterIdx]));
261     PetscCall(MatSetType(Prols[iterIdx], jtype));
262     PetscCall(MatSetSizes(Prols[iterIdx], nloc_inner, nselected, PETSC_DETERMINE, PETSC_DETERMINE));
263     PetscCall(MatSeqAIJSetPreallocation(Prols[iterIdx], 1, NULL));
264     PetscCall(MatMPIAIJSetPreallocation(Prols[iterIdx], 1, NULL, 1, NULL));
265     {
266       PetscCDIntNd *pos, *pos2;
267       PetscInt      colIndex, Iend, fgid;
268       PetscCall(MatGetOwnershipRangeColumn(Prols[iterIdx], &colIndex, &Iend));
269       // TODO - order with permutation in lid_selected (reversed)
270       for (PetscInt lid = 0; lid < agg_lists->size; lid++) {
271         PetscCall(PetscCDGetHeadPos(agg_lists, lid, &pos));
272         pos2 = pos;
273         while (pos) {
274           PetscCall(PetscCDIntNdGetID(pos, &fgid));
275           PetscCall(PetscCDGetNextPos(agg_lists, lid, &pos));
276           PetscCall(MatSetValues(Prols[iterIdx], 1, &fgid, 1, &colIndex, &one, INSERT_VALUES));
277         }
278         if (pos2) colIndex++;
279       }
280       PetscCheck(Iend == colIndex, PETSC_COMM_SELF, PETSC_ERR_SUP, "Iend!=colIndex: %d %d", (int)Iend, (int)colIndex);
281     }
282     PetscCall(MatAssemblyBegin(Prols[iterIdx], MAT_FINAL_ASSEMBLY));
283     PetscCall(MatAssemblyEnd(Prols[iterIdx], MAT_FINAL_ASSEMBLY));
284     /* project to make new graph for next MIS, skip if last */
285     if (iterIdx < misk - 1) {
286       Mat new_mat;
287       PetscCall(MatPtAP(cMat, Prols[iterIdx], MAT_INITIAL_MATRIX, PETSC_DEFAULT, &new_mat));
288       PetscCall(MatDestroy(&cMat));
289       cMat = new_mat; // next iter
290     } else if (cMat != Gmat) PetscCall(MatDestroy(&cMat));
291     // cleanup
292     PetscCall(PetscCDDestroy(agg_lists));
293   } /* MIS-k iteration */
294   /* make total prolongator Rtot = P_0 * P_1 * ... */
295   Rtot = Prols[misk - 1]; // compose P then transpose to get R
296   for (PetscInt iterIdx = misk - 1; iterIdx > 0; iterIdx--) {
297     Mat P;
298     PetscCall(MatMatMult(Prols[iterIdx - 1], Rtot, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &P));
299     PetscCall(MatDestroy(&Prols[iterIdx - 1]));
300     PetscCall(MatDestroy(&Rtot));
301     Rtot = P;
302   }
303   PetscCall(MatTranspose(Rtot, MAT_INPLACE_MATRIX, &Rtot)); // R now
304   PetscCall(MatViewFromOptions(Rtot, NULL, "-misk_aggregation_view"));
305   /* make aggregates with Rtot - could use Rtot directly in theory but have to go through the aggregate list data structure */
306   {
307     PetscInt          Istart, Iend, ncols, NN, MM, jj = 0, max_osz = 0;
308     const PetscInt    nloc = Gmat->rmap->n;
309     PetscCoarsenData *agg_lists;
310     Mat               mat;
311     PetscCall(PetscCDCreate(nloc, &agg_lists));
312     *a_locals_llist = agg_lists; // return
313     PetscCall(MatGetOwnershipRange(Rtot, &Istart, &Iend));
314     for (int grow = Istart, lid = 0; grow < Iend; grow++, lid++) {
315       const PetscInt *idx;
316       PetscCall(MatGetRow(Rtot, grow, &ncols, &idx, NULL));
317       for (int jj = 0; jj < ncols; jj++) {
318         PetscInt gcol = idx[jj];
319         PetscCall(PetscCDAppendID(agg_lists, lid, gcol)); // local row, global column
320       }
321       PetscCall(MatRestoreRow(Rtot, grow, &ncols, &idx, NULL));
322     }
323     PetscCall(MatDestroy(&Rtot));
324 
325     /* make fake matrix, get largest nnz */
326     for (int lid = 0; lid < nloc; lid++) {
327       PetscCall(PetscCDCountAt(agg_lists, lid, &jj));
328       if (jj > max_osz) max_osz = jj;
329     }
330     PetscCall(MatGetSize(Gmat, &MM, &NN));
331     if (max_osz > MM - nloc) max_osz = MM - nloc;
332     PetscCall(MatGetOwnershipRange(Gmat, &Istart, NULL));
333     /* matrix of ghost adj for square graph */
334     PetscCall(MatCreateAIJ(comm, nloc, nloc, PETSC_DETERMINE, PETSC_DETERMINE, 0, NULL, max_osz, NULL, &mat));
335     for (PetscInt lid = 0, gidi = Istart; lid < nloc; lid++, gidi++) {
336       PetscCDIntNd *pos;
337       PetscCall(PetscCDGetHeadPos(agg_lists, lid, &pos));
338       while (pos) {
339         PetscInt gidj;
340         PetscCall(PetscCDIntNdGetID(pos, &gidj));
341         PetscCall(PetscCDGetNextPos(agg_lists, lid, &pos));
342         if (gidj < Istart || gidj >= Istart + nloc) PetscCall(MatSetValues(mat, 1, &gidi, 1, &gidj, &one, ADD_VALUES));
343       }
344     }
345     PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
346     PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY));
347     PetscCall(PetscCDSetMat(agg_lists, mat));
348   }
349   PetscFunctionReturn(PETSC_SUCCESS);
350 }
351 
352 /*
353    Distance k MIS. k is in 'subctx'
354 */
355 static PetscErrorCode MatCoarsenApply_MISK(MatCoarsen coarse)
356 {
357   Mat      mat = coarse->graph;
358   PetscInt k;
359 
360   PetscFunctionBegin;
361   PetscCall(MatCoarsenMISKGetDistance(coarse, &k));
362   PetscCheck(k > 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "too few levels: %d", (int)k);
363   if (!coarse->perm) {
364     IS       perm;
365     PetscInt n, m;
366 
367     PetscCall(MatGetLocalSize(mat, &m, &n));
368     PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), m, 0, 1, &perm));
369     PetscCall(MatCoarsenApply_MISK_private(perm, (PetscInt)k, mat, &coarse->agg_lists));
370     PetscCall(ISDestroy(&perm));
371   } else {
372     PetscCall(MatCoarsenApply_MISK_private(coarse->perm, (PetscInt)k, mat, &coarse->agg_lists));
373   }
374   PetscFunctionReturn(PETSC_SUCCESS);
375 }
376 
377 static PetscErrorCode MatCoarsenView_MISK(MatCoarsen coarse, PetscViewer viewer)
378 {
379   PetscMPIInt rank;
380   PetscBool   iascii;
381 
382   PetscFunctionBegin;
383   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)coarse), &rank));
384   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
385   if (iascii) {
386     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
387     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] MISK aggregator\n", rank));
388     if (!rank) PetscCall(PetscCoarsenDataView_private(coarse->agg_lists, viewer));
389     PetscCall(PetscViewerFlush(viewer));
390     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
391   }
392   PetscFunctionReturn(PETSC_SUCCESS);
393 }
394 
395 static PetscErrorCode MatCoarsenSetFromOptions_MISK(MatCoarsen coarse, PetscOptionItems *PetscOptionsObject)
396 {
397   PetscInt  k = 1;
398   PetscBool flg;
399 
400   PetscFunctionBegin;
401   PetscOptionsHeadBegin(PetscOptionsObject, "MatCoarsen-MISk options");
402   PetscCall(PetscOptionsInt("-mat_coarsen_misk_distance", "k distance for MIS", "", k, &k, &flg));
403   if (flg) coarse->subctx = (void *)(size_t)k;
404   PetscOptionsHeadEnd();
405   PetscFunctionReturn(PETSC_SUCCESS);
406 }
407 
408 /*MC
409    MATCOARSENMISK - A coarsener that uses MISK, a simple greedy coarsener
410 
411    Level: beginner
412 
413    Options Database Key:
414 .   -mat_coarsen_misk_distance <k> - distance for MIS
415 
416 .seealso: `MatCoarsen`, `MatCoarsenMISKSetDistance()`, `MatCoarsenApply()`, `MatCoarsenSetType()`, `MatCoarsenType`, `MatCoarsenCreate()`
417 M*/
418 
419 PETSC_EXTERN PetscErrorCode MatCoarsenCreate_MISK(MatCoarsen coarse)
420 {
421   PetscFunctionBegin;
422   coarse->ops->apply          = MatCoarsenApply_MISK;
423   coarse->ops->view           = MatCoarsenView_MISK;
424   coarse->subctx              = (void *)(size_t)1;
425   coarse->ops->setfromoptions = MatCoarsenSetFromOptions_MISK;
426   PetscFunctionReturn(PETSC_SUCCESS);
427 }
428 
429 /*@
430   MatCoarsenMISKSetDistance - the distance to be used by MISK
431 
432   Collective
433 
434   Input Parameters:
435 + crs - the coarsen
436 - k   - the distance
437 
438   Options Database Key:
439 . -mat_coarsen_misk_distance <k> - distance for MIS
440 
441   Level: advanced
442 
443 .seealso: `MATCOARSENMISK`, `MatCoarsen`, `MatCoarseSetFromOptions()`, `MatCoarsenSetType()`, `MatCoarsenRegister()`, `MatCoarsenCreate()`,
444           `MatCoarsenDestroy()`, `MatCoarsenSetAdjacency()`, `MatCoarsenMISKGetDistance()`
445           `MatCoarsenGetData()`
446 @*/
447 PetscErrorCode MatCoarsenMISKSetDistance(MatCoarsen crs, PetscInt k)
448 {
449   PetscFunctionBegin;
450   crs->subctx = (void *)(size_t)k;
451   PetscFunctionReturn(PETSC_SUCCESS);
452 }
453 
454 /*@
455   MatCoarsenMISKGetDistance - gets the distance to be used by MISK
456 
457   Collective
458 
459   Input Parameter:
460 . crs - the coarsen
461 
462   Output Parameter:
463 . k - the distance
464 
465   Level: advanced
466 
467 .seealso: `MATCOARSENMISK`, `MatCoarsen`, `MatCoarseSetFromOptions()`, `MatCoarsenSetType()`,
468 `MatCoarsenRegister()`, `MatCoarsenCreate()`, `MatCoarsenDestroy()`,
469 `MatCoarsenSetAdjacency()`, `MatCoarsenGetData()`
470 @*/
471 PetscErrorCode MatCoarsenMISKGetDistance(MatCoarsen crs, PetscInt *k)
472 {
473   PetscFunctionBegin;
474   *k = (PetscInt)(size_t)crs->subctx;
475   PetscFunctionReturn(PETSC_SUCCESS);
476 }
477