xref: /petsc/src/ksp/pc/impls/gamg/agg.c (revision ac84dfd5778759083efa0c46d3820bac8a11500e)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 
5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/
6 #include <petscblaslapack.h>
7 #include <petscdm.h>
8 #include <petsc/private/kspimpl.h>
9 
10 typedef struct {
11   PetscInt   nsmooths;                     // number of smoothing steps to construct prolongation
12   PetscInt   aggressive_coarsening_levels; // number of aggressive coarsening levels (square or MISk)
13   PetscInt   aggressive_mis_k;             // the k in MIS-k
14   PetscBool  use_aggressive_square_graph;
15   PetscBool  use_minimum_degree_ordering;
16   PetscBool  use_low_mem_filter;
17   PetscBool  graph_symmetrize;
18   MatCoarsen crs;
19 } PC_GAMG_AGG;
20 
21 /*@
22   PCGAMGSetNSmooths - Set number of smoothing steps (1 is typical) used to construct the prolongation operator
23 
24   Logically Collective
25 
26   Input Parameters:
27 + pc - the preconditioner context
28 - n  - the number of smooths
29 
30   Options Database Key:
31 . -pc_gamg_agg_nsmooths <nsmooth, default=1> - the flag
32 
33   Level: intermediate
34 
35   Note:
36   This is a different concept from the number smoothing steps used during the linear solution process which
37   can be set with `-mg_levels_ksp_max_it`
38 
39   Developer Note:
40   This should be named `PCGAMGAGGSetNSmooths()`.
41 
42 .seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCMG`, `PCGAMG`
43 @*/
44 PetscErrorCode PCGAMGSetNSmooths(PC pc, PetscInt n)
45 {
46   PetscFunctionBegin;
47   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
48   PetscValidLogicalCollectiveInt(pc, n, 2);
49   PetscTryMethod(pc, "PCGAMGSetNSmooths_C", (PC, PetscInt), (pc, n));
50   PetscFunctionReturn(PETSC_SUCCESS);
51 }
52 
53 static PetscErrorCode PCGAMGSetNSmooths_AGG(PC pc, PetscInt n)
54 {
55   PC_MG       *mg          = (PC_MG *)pc->data;
56   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
57   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
58 
59   PetscFunctionBegin;
60   pc_gamg_agg->nsmooths = n;
61   PetscFunctionReturn(PETSC_SUCCESS);
62 }
63 
64 /*@
65   PCGAMGSetAggressiveLevels -  Use aggressive coarsening on first n levels
66 
67   Logically Collective
68 
69   Input Parameters:
70 + pc - the preconditioner context
71 - n  - 0, 1 or more
72 
73   Options Database Key:
74 . -pc_gamg_aggressive_coarsening <n,default = 1> - the flag
75 
76   Level: intermediate
77 
78   Note:
79   By default, aggressive coarsening squares the matrix (computes $ A^T A$) before coarsening. Calling `PCGAMGSetAggressiveSquareGraph()` with a value of `PETSC_FALSE` changes the aggressive coarsening strategy to use MIS-k, see `PCGAMGMISkSetAggressive()`.
80 
81 .seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGMISkSetAggressive()`, `PCGAMGSetAggressiveSquareGraph()`, `PCGAMGMISkSetMinDegreeOrdering()`, `PCGAMGSetLowMemoryFilter()`
82 @*/
83 PetscErrorCode PCGAMGSetAggressiveLevels(PC pc, PetscInt n)
84 {
85   PetscFunctionBegin;
86   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
87   PetscValidLogicalCollectiveInt(pc, n, 2);
88   PetscTryMethod(pc, "PCGAMGSetAggressiveLevels_C", (PC, PetscInt), (pc, n));
89   PetscFunctionReturn(PETSC_SUCCESS);
90 }
91 
92 /*@
93   PCGAMGMISkSetAggressive - Number (k) distance in MIS coarsening (>2 is 'aggressive')
94 
95   Logically Collective
96 
97   Input Parameters:
98 + pc - the preconditioner context
99 - n  - 1 or more (default = 2)
100 
101   Options Database Key:
102 . -pc_gamg_aggressive_mis_k <n,default=2> - the flag
103 
104   Level: intermediate
105 
106 .seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGSetAggressiveLevels()`, `PCGAMGSetAggressiveSquareGraph()`, `PCGAMGMISkSetMinDegreeOrdering()`, `PCGAMGSetLowMemoryFilter()`
107 @*/
108 PetscErrorCode PCGAMGMISkSetAggressive(PC pc, PetscInt n)
109 {
110   PetscFunctionBegin;
111   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
112   PetscValidLogicalCollectiveInt(pc, n, 2);
113   PetscTryMethod(pc, "PCGAMGMISkSetAggressive_C", (PC, PetscInt), (pc, n));
114   PetscFunctionReturn(PETSC_SUCCESS);
115 }
116 
117 /*@
118   PCGAMGSetAggressiveSquareGraph - Use graph square, $A^T A$, for aggressive coarsening. Coarsening is slower than the alternative (MIS-2), which is faster and uses less memory
119 
120   Logically Collective
121 
122   Input Parameters:
123 + pc - the preconditioner context
124 - b  - default true
125 
126   Options Database Key:
127 . -pc_gamg_aggressive_square_graph <bool,default=true> - the flag
128 
129   Level: intermediate
130 
131   Notes:
132   If `b` is `PETSC_FALSE` then MIS-k is used for aggressive coarsening, see `PCGAMGMISkSetAggressive()`
133 
134   Squaring the matrix to perform the aggressive coarsening is slower and requires more memory than using MIS-k, but may result in a better preconditioner
135   that converges faster.
136 
137 .seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGSetAggressiveLevels()`, `PCGAMGMISkSetAggressive()`, `PCGAMGMISkSetMinDegreeOrdering()`, `PCGAMGSetLowMemoryFilter()`
138 @*/
139 PetscErrorCode PCGAMGSetAggressiveSquareGraph(PC pc, PetscBool b)
140 {
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
143   PetscValidLogicalCollectiveBool(pc, b, 2);
144   PetscTryMethod(pc, "PCGAMGSetAggressiveSquareGraph_C", (PC, PetscBool), (pc, b));
145   PetscFunctionReturn(PETSC_SUCCESS);
146 }
147 
148 /*@
149   PCGAMGMISkSetMinDegreeOrdering - Use minimum degree ordering in greedy MIS algorithm
150 
151   Logically Collective
152 
153   Input Parameters:
154 + pc - the preconditioner context
155 - b  - default false
156 
157   Options Database Key:
158 . -pc_gamg_mis_k_minimum_degree_ordering <bool,default=false> - the flag
159 
160   Level: intermediate
161 
162 .seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGSetAggressiveLevels()`, `PCGAMGMISkSetAggressive()`, `PCGAMGSetAggressiveSquareGraph()`, `PCGAMGSetLowMemoryFilter()`
163 @*/
164 PetscErrorCode PCGAMGMISkSetMinDegreeOrdering(PC pc, PetscBool b)
165 {
166   PetscFunctionBegin;
167   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
168   PetscValidLogicalCollectiveBool(pc, b, 2);
169   PetscTryMethod(pc, "PCGAMGMISkSetMinDegreeOrdering_C", (PC, PetscBool), (pc, b));
170   PetscFunctionReturn(PETSC_SUCCESS);
171 }
172 
173 /*@
174   PCGAMGSetLowMemoryFilter - Use low memory graph/matrix filter
175 
176   Logically Collective
177 
178   Input Parameters:
179 + pc - the preconditioner context
180 - b  - default false
181 
182   Options Database Key:
183 . -pc_gamg_low_memory_threshold_filter <bool,default=false> - the flag
184 
185   Level: intermediate
186 
187 .seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGSetAggressiveLevels()`,
188   `PCGAMGMISkSetAggressive()`, `PCGAMGSetAggressiveSquareGraph()`, `PCGAMGMISkSetMinDegreeOrdering()`
189 @*/
190 PetscErrorCode PCGAMGSetLowMemoryFilter(PC pc, PetscBool b)
191 {
192   PetscFunctionBegin;
193   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
194   PetscValidLogicalCollectiveBool(pc, b, 2);
195   PetscTryMethod(pc, "PCGAMGSetLowMemoryFilter_C", (PC, PetscBool), (pc, b));
196   PetscFunctionReturn(PETSC_SUCCESS);
197 }
198 
199 /*@
200   PCGAMGSetGraphSymmetrize - Symmetrize graph used for coarsening. Defaults to true, but if matrix has symmetric attribute, then not needed since the graph is already known to be symmetric
201 
202   Logically Collective
203 
204   Input Parameters:
205 + pc - the preconditioner context
206 - b  - default true
207 
208   Options Database Key:
209 . -pc_gamg_graph_symmetrize <bool,default=true> - the flag
210 
211   Level: intermediate
212 
213 .seealso: [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), `PCGAMG`, `PCGAMGSetThreshold()`, `PCGAMGSetAggressiveLevels()`, `MatCreateGraph()`,
214   `PCGAMGMISkSetAggressive()`, `PCGAMGSetAggressiveSquareGraph()`, `PCGAMGMISkSetMinDegreeOrdering()`
215 @*/
216 PetscErrorCode PCGAMGSetGraphSymmetrize(PC pc, PetscBool b)
217 {
218   PetscFunctionBegin;
219   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
220   PetscValidLogicalCollectiveBool(pc, b, 2);
221   PetscTryMethod(pc, "PCGAMGSetGraphSymmetrize_C", (PC, PetscBool), (pc, b));
222   PetscFunctionReturn(PETSC_SUCCESS);
223 }
224 
225 static PetscErrorCode PCGAMGSetAggressiveLevels_AGG(PC pc, PetscInt n)
226 {
227   PC_MG       *mg          = (PC_MG *)pc->data;
228   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
229   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
230 
231   PetscFunctionBegin;
232   pc_gamg_agg->aggressive_coarsening_levels = n;
233   PetscFunctionReturn(PETSC_SUCCESS);
234 }
235 
236 static PetscErrorCode PCGAMGMISkSetAggressive_AGG(PC pc, PetscInt n)
237 {
238   PC_MG       *mg          = (PC_MG *)pc->data;
239   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
240   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
241 
242   PetscFunctionBegin;
243   pc_gamg_agg->aggressive_mis_k = n;
244   PetscFunctionReturn(PETSC_SUCCESS);
245 }
246 
247 static PetscErrorCode PCGAMGSetAggressiveSquareGraph_AGG(PC pc, PetscBool b)
248 {
249   PC_MG       *mg          = (PC_MG *)pc->data;
250   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
251   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
252 
253   PetscFunctionBegin;
254   pc_gamg_agg->use_aggressive_square_graph = b;
255   PetscFunctionReturn(PETSC_SUCCESS);
256 }
257 
258 static PetscErrorCode PCGAMGSetLowMemoryFilter_AGG(PC pc, PetscBool b)
259 {
260   PC_MG       *mg          = (PC_MG *)pc->data;
261   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
262   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
263 
264   PetscFunctionBegin;
265   pc_gamg_agg->use_low_mem_filter = b;
266   PetscFunctionReturn(PETSC_SUCCESS);
267 }
268 
269 static PetscErrorCode PCGAMGSetGraphSymmetrize_AGG(PC pc, PetscBool b)
270 {
271   PC_MG       *mg          = (PC_MG *)pc->data;
272   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
273   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
274 
275   PetscFunctionBegin;
276   pc_gamg_agg->graph_symmetrize = b;
277   PetscFunctionReturn(PETSC_SUCCESS);
278 }
279 
280 static PetscErrorCode PCGAMGMISkSetMinDegreeOrdering_AGG(PC pc, PetscBool b)
281 {
282   PC_MG       *mg          = (PC_MG *)pc->data;
283   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
284   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
285 
286   PetscFunctionBegin;
287   pc_gamg_agg->use_minimum_degree_ordering = b;
288   PetscFunctionReturn(PETSC_SUCCESS);
289 }
290 
291 static PetscErrorCode PCSetFromOptions_GAMG_AGG(PC pc, PetscOptionItems PetscOptionsObject)
292 {
293   PC_MG       *mg          = (PC_MG *)pc->data;
294   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
295   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
296   PetscBool    n_aggressive_flg, old_sq_provided = PETSC_FALSE, new_sq_provided = PETSC_FALSE, new_sqr_graph = pc_gamg_agg->use_aggressive_square_graph;
297   PetscInt     nsq_graph_old = 0;
298 
299   PetscFunctionBegin;
300   PetscOptionsHeadBegin(PetscOptionsObject, "GAMG-AGG options");
301   PetscCall(PetscOptionsInt("-pc_gamg_agg_nsmooths", "number of smoothing steps to construct prolongation, usually 1", "PCGAMGSetNSmooths", pc_gamg_agg->nsmooths, &pc_gamg_agg->nsmooths, NULL));
302   // aggressive coarsening logic with deprecated -pc_gamg_square_graph
303   PetscCall(PetscOptionsInt("-pc_gamg_aggressive_coarsening", "Number of aggressive coarsening (MIS-2) levels from finest", "PCGAMGSetAggressiveLevels", pc_gamg_agg->aggressive_coarsening_levels, &pc_gamg_agg->aggressive_coarsening_levels, &n_aggressive_flg));
304   if (!n_aggressive_flg)
305     PetscCall(PetscOptionsInt("-pc_gamg_square_graph", "Number of aggressive coarsening (MIS-2) levels from finest (deprecated alias for -pc_gamg_aggressive_coarsening)", "PCGAMGSetAggressiveLevels", nsq_graph_old, &nsq_graph_old, &old_sq_provided));
306   PetscCall(PetscOptionsBool("-pc_gamg_aggressive_square_graph", "Use square graph $ (A^T A)$ for aggressive coarsening, if false, MIS-k (k=2) is used, see PCGAMGMISkSetAggressive()", "PCGAMGSetAggressiveSquareGraph", new_sqr_graph, &pc_gamg_agg->use_aggressive_square_graph, &new_sq_provided));
307   if (!new_sq_provided && old_sq_provided) {
308     pc_gamg_agg->aggressive_coarsening_levels = nsq_graph_old; // could be zero
309     pc_gamg_agg->use_aggressive_square_graph  = PETSC_TRUE;
310   }
311   if (new_sq_provided && old_sq_provided)
312     PetscCall(PetscInfo(pc, "Warning: both -pc_gamg_square_graph and -pc_gamg_aggressive_coarsening are used. -pc_gamg_square_graph is deprecated, Number of aggressive levels is %" PetscInt_FMT "\n", pc_gamg_agg->aggressive_coarsening_levels));
313   PetscCall(PetscOptionsBool("-pc_gamg_mis_k_minimum_degree_ordering", "Use minimum degree ordering for greedy MIS", "PCGAMGMISkSetMinDegreeOrdering", pc_gamg_agg->use_minimum_degree_ordering, &pc_gamg_agg->use_minimum_degree_ordering, NULL));
314   PetscCall(PetscOptionsBool("-pc_gamg_low_memory_threshold_filter", "Use the (built-in) low memory graph/matrix filter", "PCGAMGSetLowMemoryFilter", pc_gamg_agg->use_low_mem_filter, &pc_gamg_agg->use_low_mem_filter, NULL));
315   PetscCall(PetscOptionsInt("-pc_gamg_aggressive_mis_k", "Number of levels of multigrid to use.", "PCGAMGMISkSetAggressive", pc_gamg_agg->aggressive_mis_k, &pc_gamg_agg->aggressive_mis_k, NULL));
316   PetscCall(PetscOptionsBool("-pc_gamg_graph_symmetrize", "Symmetrize graph for coarsening", "PCGAMGSetGraphSymmetrize", pc_gamg_agg->graph_symmetrize, &pc_gamg_agg->graph_symmetrize, NULL));
317   PetscOptionsHeadEnd();
318   PetscFunctionReturn(PETSC_SUCCESS);
319 }
320 
321 static PetscErrorCode PCDestroy_GAMG_AGG(PC pc)
322 {
323   PC_MG       *mg          = (PC_MG *)pc->data;
324   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
325   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
326 
327   PetscFunctionBegin;
328   PetscCall(MatCoarsenDestroy(&pc_gamg_agg->crs));
329   PetscCall(PetscFree(pc_gamg->subctx));
330   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetNSmooths_C", NULL));
331   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetAggressiveLevels_C", NULL));
332   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGMISkSetAggressive_C", NULL));
333   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGMISkSetMinDegreeOrdering_C", NULL));
334   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetLowMemoryFilter_C", NULL));
335   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetAggressiveSquareGraph_C", NULL));
336   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetGraphSymmetrize_C", NULL));
337   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
338   PetscFunctionReturn(PETSC_SUCCESS);
339 }
340 
341 /*
342    PCSetCoordinates_AGG
343 
344    Collective
345 
346    Input Parameter:
347    . pc - the preconditioner context
348    . ndm - dimension of data (used for dof/vertex for Stokes)
349    . a_nloc - number of vertices local
350    . coords - [a_nloc][ndm] - interleaved coordinate data: {x_0, y_0, z_0, x_1, y_1, ...}
351 */
352 
353 static PetscErrorCode PCSetCoordinates_AGG(PC pc, PetscInt ndm, PetscInt a_nloc, PetscReal *coords)
354 {
355   PC_MG   *mg      = (PC_MG *)pc->data;
356   PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx;
357   PetscInt arrsz, kk, ii, jj, nloc, ndatarows, ndf;
358   Mat      mat = pc->pmat;
359 
360   PetscFunctionBegin;
361   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
362   PetscValidHeaderSpecific(mat, MAT_CLASSID, 1);
363   nloc = a_nloc;
364 
365   /* SA: null space vectors */
366   PetscCall(MatGetBlockSize(mat, &ndf));               /* this does not work for Stokes */
367   if (coords && ndf == 1) pc_gamg->data_cell_cols = 1; /* scalar w/ coords and SA (not needed) */
368   else if (coords) {
369     PetscCheck(ndm <= ndf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "degrees of motion %" PetscInt_FMT " > block size %" PetscInt_FMT, ndm, ndf);
370     pc_gamg->data_cell_cols = (ndm == 2 ? 3 : 6); /* displacement elasticity */
371     if (ndm != ndf) PetscCheck(pc_gamg->data_cell_cols == ndf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Don't know how to create null space for ndm=%" PetscInt_FMT ", ndf=%" PetscInt_FMT ".  Use MatSetNearNullSpace().", ndm, ndf);
372   } else pc_gamg->data_cell_cols = ndf; /* no data, force SA with constant null space vectors */
373   pc_gamg->data_cell_rows = ndatarows = ndf;
374   PetscCheck(pc_gamg->data_cell_cols > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "pc_gamg->data_cell_cols %" PetscInt_FMT " <= 0", pc_gamg->data_cell_cols);
375   arrsz = nloc * pc_gamg->data_cell_rows * pc_gamg->data_cell_cols;
376 
377   if (!pc_gamg->data || (pc_gamg->data_sz != arrsz)) {
378     PetscCall(PetscFree(pc_gamg->data));
379     PetscCall(PetscMalloc1(arrsz + 1, &pc_gamg->data));
380   }
381   /* copy data in - column-oriented */
382   for (kk = 0; kk < nloc; kk++) {
383     const PetscInt M    = nloc * pc_gamg->data_cell_rows; /* stride into data */
384     PetscReal     *data = &pc_gamg->data[kk * ndatarows]; /* start of cell */
385 
386     if (pc_gamg->data_cell_cols == 1) *data = 1.0;
387     else {
388       /* translational modes */
389       for (ii = 0; ii < ndatarows; ii++) {
390         for (jj = 0; jj < ndatarows; jj++) {
391           if (ii == jj) data[ii * M + jj] = 1.0;
392           else data[ii * M + jj] = 0.0;
393         }
394       }
395 
396       /* rotational modes */
397       if (coords) {
398         if (ndm == 2) {
399           data += 2 * M;
400           data[0] = -coords[2 * kk + 1];
401           data[1] = coords[2 * kk];
402         } else {
403           data += 3 * M;
404           data[0]         = 0.0;
405           data[M + 0]     = coords[3 * kk + 2];
406           data[2 * M + 0] = -coords[3 * kk + 1];
407           data[1]         = -coords[3 * kk + 2];
408           data[M + 1]     = 0.0;
409           data[2 * M + 1] = coords[3 * kk];
410           data[2]         = coords[3 * kk + 1];
411           data[M + 2]     = -coords[3 * kk];
412           data[2 * M + 2] = 0.0;
413         }
414       }
415     }
416   }
417   pc_gamg->data_sz = arrsz;
418   PetscFunctionReturn(PETSC_SUCCESS);
419 }
420 
421 /*
422    PCSetData_AGG - called if data is not set with PCSetCoordinates.
423       Looks in Mat for near null space.
424       Does not work for Stokes
425 
426   Input Parameter:
427    . pc -
428    . a_A - matrix to get (near) null space out of.
429 */
430 static PetscErrorCode PCSetData_AGG(PC pc, Mat a_A)
431 {
432   PC_MG       *mg      = (PC_MG *)pc->data;
433   PC_GAMG     *pc_gamg = (PC_GAMG *)mg->innerctx;
434   MatNullSpace mnull;
435 
436   PetscFunctionBegin;
437   PetscCall(MatGetNearNullSpace(a_A, &mnull));
438   if (!mnull) {
439     DM dm;
440 
441     PetscCall(PCGetDM(pc, &dm));
442     if (!dm) PetscCall(MatGetDM(a_A, &dm));
443     if (dm) {
444       PetscObject deformation;
445       PetscInt    Nf;
446 
447       PetscCall(DMGetNumFields(dm, &Nf));
448       if (Nf) {
449         PetscCall(DMGetField(dm, 0, NULL, &deformation));
450         if (deformation) {
451           PetscCall(PetscObjectQuery(deformation, "nearnullspace", (PetscObject *)&mnull));
452           if (!mnull) PetscCall(PetscObjectQuery(deformation, "nullspace", (PetscObject *)&mnull));
453         }
454       }
455     }
456   }
457 
458   if (!mnull) {
459     PetscInt bs, NN, MM;
460 
461     PetscCall(MatGetBlockSize(a_A, &bs));
462     PetscCall(MatGetLocalSize(a_A, &MM, &NN));
463     PetscCheck(MM % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MM %" PetscInt_FMT " must be divisible by bs %" PetscInt_FMT, MM, bs);
464     PetscCall(PCSetCoordinates_AGG(pc, bs, MM / bs, NULL));
465   } else {
466     PetscReal         *nullvec;
467     PetscBool          has_const;
468     PetscInt           i, j, mlocal, nvec, bs;
469     const Vec         *vecs;
470     const PetscScalar *v;
471 
472     PetscCall(MatGetLocalSize(a_A, &mlocal, NULL));
473     PetscCall(MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs));
474     for (i = 0; i < nvec; i++) {
475       PetscCall(VecGetLocalSize(vecs[i], &j));
476       PetscCheck(j == mlocal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Attached null space vector size %" PetscInt_FMT " != matrix size %" PetscInt_FMT, j, mlocal);
477     }
478     pc_gamg->data_sz = (nvec + !!has_const) * mlocal;
479     PetscCall(PetscMalloc1((nvec + !!has_const) * mlocal, &nullvec));
480     if (has_const)
481       for (i = 0; i < mlocal; i++) nullvec[i] = 1.0;
482     for (i = 0; i < nvec; i++) {
483       PetscCall(VecGetArrayRead(vecs[i], &v));
484       for (j = 0; j < mlocal; j++) nullvec[(i + !!has_const) * mlocal + j] = PetscRealPart(v[j]);
485       PetscCall(VecRestoreArrayRead(vecs[i], &v));
486     }
487     pc_gamg->data           = nullvec;
488     pc_gamg->data_cell_cols = (nvec + !!has_const);
489     PetscCall(MatGetBlockSize(a_A, &bs));
490     pc_gamg->data_cell_rows = bs;
491   }
492   PetscFunctionReturn(PETSC_SUCCESS);
493 }
494 
495 /*
496   formProl0 - collect null space data for each aggregate, do QR, put R in coarse grid data and Q in P_0
497 
498   Input Parameter:
499    . agg_llists - list of arrays with aggregates -- list from selected vertices of aggregate unselected vertices
500    . bs - row block size
501    . nSAvec - column bs of new P
502    . my0crs - global index of start of locals
503    . data_stride - bs*(nloc nodes + ghost nodes) [data_stride][nSAvec]
504    . data_in[data_stride*nSAvec] - local data on fine grid
505    . flid_fgid[data_stride/bs] - make local to global IDs, includes ghosts in 'locals_llist'
506 
507   Output Parameter:
508    . a_data_out - in with fine grid data (w/ghosts), out with coarse grid data
509    . a_Prol - prolongation operator
510 */
511 static PetscErrorCode formProl0(PetscCoarsenData *agg_llists, PetscInt bs, PetscInt nSAvec, PetscInt my0crs, PetscInt data_stride, PetscReal data_in[], const PetscInt flid_fgid[], PetscReal **a_data_out, Mat a_Prol)
512 {
513   PetscInt        Istart, my0, Iend, nloc, clid, flid = 0, aggID, kk, jj, ii, mm, nSelected, minsz, nghosts, out_data_stride;
514   MPI_Comm        comm;
515   PetscReal      *out_data;
516   PetscCDIntNd   *pos;
517   PCGAMGHashTable fgid_flid;
518 
519   PetscFunctionBegin;
520   PetscCall(PetscObjectGetComm((PetscObject)a_Prol, &comm));
521   PetscCall(MatGetOwnershipRange(a_Prol, &Istart, &Iend));
522   nloc = (Iend - Istart) / bs;
523   my0  = Istart / bs;
524   PetscCheck((Iend - Istart) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Iend %" PetscInt_FMT " - Istart %" PetscInt_FMT " must be divisible by bs %" PetscInt_FMT, Iend, Istart, bs);
525   Iend /= bs;
526   nghosts = data_stride / bs - nloc;
527 
528   PetscCall(PCGAMGHashTableCreate(2 * nghosts + 1, &fgid_flid));
529   for (kk = 0; kk < nghosts; kk++) PetscCall(PCGAMGHashTableAdd(&fgid_flid, flid_fgid[nloc + kk], nloc + kk));
530 
531   /* count selected -- same as number of cols of P */
532   for (nSelected = mm = 0; mm < nloc; mm++) {
533     PetscBool ise;
534 
535     PetscCall(PetscCDIsEmptyAt(agg_llists, mm, &ise));
536     if (!ise) nSelected++;
537   }
538   PetscCall(MatGetOwnershipRangeColumn(a_Prol, &ii, &jj));
539   PetscCheck((ii / nSAvec) == my0crs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "ii %" PetscInt_FMT " /nSAvec %" PetscInt_FMT "  != my0crs %" PetscInt_FMT, ii, nSAvec, my0crs);
540   PetscCheck(nSelected == (jj - ii) / nSAvec, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nSelected %" PetscInt_FMT " != (jj %" PetscInt_FMT " - ii %" PetscInt_FMT ")/nSAvec %" PetscInt_FMT, nSelected, jj, ii, nSAvec);
541 
542   /* aloc space for coarse point data (output) */
543   out_data_stride = nSelected * nSAvec;
544 
545   PetscCall(PetscMalloc1(out_data_stride * nSAvec, &out_data));
546   for (ii = 0; ii < out_data_stride * nSAvec; ii++) out_data[ii] = PETSC_MAX_REAL;
547   *a_data_out = out_data; /* output - stride nSelected*nSAvec */
548 
549   /* find points and set prolongation */
550   minsz = 100;
551   for (mm = clid = 0; mm < nloc; mm++) {
552     PetscCall(PetscCDCountAt(agg_llists, mm, &jj));
553     if (jj > 0) {
554       const PetscInt lid = mm, cgid = my0crs + clid;
555       PetscInt       cids[100]; /* max bs */
556       PetscBLASInt   asz, M, N, INFO;
557       PetscBLASInt   Mdata, LDA, LWORK;
558       PetscScalar   *qqc, *qqr, *TAU, *WORK;
559       PetscInt      *fids;
560       PetscReal     *data;
561 
562       PetscCall(PetscBLASIntCast(jj, &asz));
563       PetscCall(PetscBLASIntCast(asz * bs, &M));
564       PetscCall(PetscBLASIntCast(nSAvec, &N));
565       PetscCall(PetscBLASIntCast(M + ((N - M > 0) ? N - M : 0), &Mdata));
566       PetscCall(PetscBLASIntCast(Mdata, &LDA));
567       PetscCall(PetscBLASIntCast(N * bs, &LWORK));
568       /* count agg */
569       if (asz < minsz) minsz = asz;
570 
571       /* get block */
572       PetscCall(PetscMalloc5(Mdata * N, &qqc, M * N, &qqr, N, &TAU, LWORK, &WORK, M, &fids));
573 
574       aggID = 0;
575       PetscCall(PetscCDGetHeadPos(agg_llists, lid, &pos));
576       while (pos) {
577         PetscInt gid1;
578 
579         PetscCall(PetscCDIntNdGetID(pos, &gid1));
580         PetscCall(PetscCDGetNextPos(agg_llists, lid, &pos));
581 
582         if (gid1 >= my0 && gid1 < Iend) flid = gid1 - my0;
583         else {
584           PetscCall(PCGAMGHashTableFind(&fgid_flid, gid1, &flid));
585           PetscCheck(flid >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cannot find gid1 in table");
586         }
587         /* copy in B_i matrix - column-oriented */
588         data = &data_in[flid * bs];
589         for (ii = 0; ii < bs; ii++) {
590           for (jj = 0; jj < N; jj++) {
591             PetscReal d = data[jj * data_stride + ii];
592 
593             qqc[jj * Mdata + aggID * bs + ii] = d;
594           }
595         }
596         /* set fine IDs */
597         for (kk = 0; kk < bs; kk++) fids[aggID * bs + kk] = flid_fgid[flid] * bs + kk;
598         aggID++;
599       }
600 
601       /* pad with zeros */
602       for (ii = asz * bs; ii < Mdata; ii++) {
603         for (jj = 0; jj < N; jj++, kk++) qqc[jj * Mdata + ii] = .0;
604       }
605 
606       /* QR */
607       PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF));
608       PetscCallBLAS("LAPACKgeqrf", LAPACKgeqrf_(&Mdata, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO));
609       PetscCall(PetscFPTrapPop());
610       PetscCheck(INFO == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "xGEQRF error");
611       /* get R - column-oriented - output B_{i+1} */
612       {
613         PetscReal *data = &out_data[clid * nSAvec];
614 
615         for (jj = 0; jj < nSAvec; jj++) {
616           for (ii = 0; ii < nSAvec; ii++) {
617             PetscCheck(data[jj * out_data_stride + ii] == PETSC_MAX_REAL, PETSC_COMM_SELF, PETSC_ERR_PLIB, "data[jj*out_data_stride + ii] != %e", (double)PETSC_MAX_REAL);
618             if (ii <= jj) data[jj * out_data_stride + ii] = PetscRealPart(qqc[jj * Mdata + ii]);
619             else data[jj * out_data_stride + ii] = 0.;
620           }
621         }
622       }
623 
624       /* get Q - row-oriented */
625       PetscCallBLAS("LAPACKorgqr", LAPACKorgqr_(&Mdata, &N, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO));
626       PetscCheck(INFO == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "xORGQR error arg %" PetscBLASInt_FMT, -INFO);
627 
628       for (ii = 0; ii < M; ii++) {
629         for (jj = 0; jj < N; jj++) qqr[N * ii + jj] = qqc[jj * Mdata + ii];
630       }
631 
632       /* add diagonal block of P0 */
633       for (kk = 0; kk < N; kk++) { cids[kk] = N * cgid + kk; /* global col IDs in P0 */ }
634       PetscCall(MatSetValues(a_Prol, M, fids, N, cids, qqr, INSERT_VALUES));
635       PetscCall(PetscFree5(qqc, qqr, TAU, WORK, fids));
636       clid++;
637     } /* coarse agg */
638   } /* for all fine nodes */
639   PetscCall(MatAssemblyBegin(a_Prol, MAT_FINAL_ASSEMBLY));
640   PetscCall(MatAssemblyEnd(a_Prol, MAT_FINAL_ASSEMBLY));
641   PetscCall(PCGAMGHashTableDestroy(&fgid_flid));
642   PetscFunctionReturn(PETSC_SUCCESS);
643 }
644 
645 static PetscErrorCode PCView_GAMG_AGG(PC pc, PetscViewer viewer)
646 {
647   PC_MG       *mg          = (PC_MG *)pc->data;
648   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
649   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
650 
651   PetscFunctionBegin;
652   PetscCall(PetscViewerASCIIPrintf(viewer, "      AGG specific options\n"));
653   PetscCall(PetscViewerASCIIPrintf(viewer, "        Number of levels of aggressive coarsening %" PetscInt_FMT "\n", pc_gamg_agg->aggressive_coarsening_levels));
654   if (pc_gamg_agg->aggressive_coarsening_levels > 0) {
655     PetscCall(PetscViewerASCIIPrintf(viewer, "        %s aggressive coarsening\n", !pc_gamg_agg->use_aggressive_square_graph ? "MIS-k" : "Square graph"));
656     if (!pc_gamg_agg->use_aggressive_square_graph) PetscCall(PetscViewerASCIIPrintf(viewer, "        MIS-%" PetscInt_FMT " coarsening on aggressive levels\n", pc_gamg_agg->aggressive_mis_k));
657   }
658   PetscCall(PetscViewerASCIIPushTab(viewer));
659   PetscCall(PetscViewerASCIIPushTab(viewer));
660   PetscCall(PetscViewerASCIIPushTab(viewer));
661   PetscCall(PetscViewerASCIIPushTab(viewer));
662   if (pc_gamg_agg->crs) PetscCall(MatCoarsenView(pc_gamg_agg->crs, viewer));
663   else PetscCall(PetscViewerASCIIPrintf(viewer, "Coarsening algorithm not yet selected\n"));
664   PetscCall(PetscViewerASCIIPopTab(viewer));
665   PetscCall(PetscViewerASCIIPopTab(viewer));
666   PetscCall(PetscViewerASCIIPopTab(viewer));
667   PetscCall(PetscViewerASCIIPopTab(viewer));
668   PetscCall(PetscViewerASCIIPrintf(viewer, "        Number smoothing steps to construct prolongation %" PetscInt_FMT "\n", pc_gamg_agg->nsmooths));
669   PetscFunctionReturn(PETSC_SUCCESS);
670 }
671 
672 static PetscErrorCode PCGAMGCreateGraph_AGG(PC pc, Mat Amat, Mat *a_Gmat)
673 {
674   PC_MG          *mg          = (PC_MG *)pc->data;
675   PC_GAMG        *pc_gamg     = (PC_GAMG *)mg->innerctx;
676   PC_GAMG_AGG    *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
677   const PetscReal vfilter     = pc_gamg->threshold[pc_gamg->current_level];
678   PetscBool       ishem, ismis;
679   const char     *prefix;
680   MatInfo         info0, info1;
681   PetscInt        bs;
682 
683   PetscFunctionBegin;
684   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_COARSEN], 0, 0, 0, 0));
685   /* Note: depending on the algorithm that will be used for computing the coarse grid points this should pass PETSC_TRUE or PETSC_FALSE as the first argument */
686   /* MATCOARSENHEM requires numerical weights for edges so ensure they are computed */
687   PetscCall(MatCoarsenDestroy(&pc_gamg_agg->crs));
688   PetscCall(MatCoarsenCreate(PetscObjectComm((PetscObject)pc), &pc_gamg_agg->crs));
689   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, &prefix));
690   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc_gamg_agg->crs, prefix));
691   PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)pc_gamg_agg->crs, "pc_gamg_"));
692   PetscCall(MatCoarsenSetFromOptions(pc_gamg_agg->crs));
693   PetscCall(MatGetBlockSize(Amat, &bs));
694   // check for valid indices wrt bs
695   for (int ii = 0; ii < pc_gamg_agg->crs->strength_index_size; ii++) {
696     PetscCheck(pc_gamg_agg->crs->strength_index[ii] >= 0 && pc_gamg_agg->crs->strength_index[ii] < bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Indices (%" PetscInt_FMT ") must be non-negative and < block size (%" PetscInt_FMT "), NB, can not use -mat_coarsen_strength_index with -mat_coarsen_strength_index",
697                pc_gamg_agg->crs->strength_index[ii], bs);
698   }
699   PetscCall(PetscObjectTypeCompare((PetscObject)pc_gamg_agg->crs, MATCOARSENHEM, &ishem));
700   if (ishem) {
701     if (pc_gamg_agg->aggressive_coarsening_levels) PetscCall(PetscInfo(pc, "HEM and aggressive coarsening ignored: HEM using %" PetscInt_FMT " iterations\n", pc_gamg_agg->crs->max_it));
702     pc_gamg_agg->aggressive_coarsening_levels = 0;                                         // aggressive and HEM does not make sense
703     PetscCall(MatCoarsenSetMaximumIterations(pc_gamg_agg->crs, pc_gamg_agg->crs->max_it)); // for code coverage
704     PetscCall(MatCoarsenSetThreshold(pc_gamg_agg->crs, vfilter));                          // for code coverage
705   } else {
706     PetscCall(PetscObjectTypeCompare((PetscObject)pc_gamg_agg->crs, MATCOARSENMIS, &ismis));
707     if (ismis && pc_gamg_agg->aggressive_coarsening_levels && !pc_gamg_agg->use_aggressive_square_graph) {
708       PetscCall(PetscInfo(pc, "MIS and aggressive coarsening and no square graph: force square graph\n"));
709       pc_gamg_agg->use_aggressive_square_graph = PETSC_TRUE;
710     }
711   }
712   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_COARSEN], 0, 0, 0, 0));
713   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_GRAPH], 0, 0, 0, 0));
714   PetscCall(MatGetInfo(Amat, MAT_LOCAL, &info0)); /* global reduction */
715 
716   if (ishem || pc_gamg_agg->use_low_mem_filter) {
717     PetscCall(MatCreateGraph(Amat, pc_gamg_agg->graph_symmetrize, (vfilter >= 0 || ishem) ? PETSC_TRUE : PETSC_FALSE, vfilter, pc_gamg_agg->crs->strength_index_size, pc_gamg_agg->crs->strength_index, a_Gmat));
718   } else {
719     // make scalar graph, symmetrize if not known to be symmetric, scale, but do not filter (expensive)
720     PetscCall(MatCreateGraph(Amat, pc_gamg_agg->graph_symmetrize, PETSC_TRUE, -1, pc_gamg_agg->crs->strength_index_size, pc_gamg_agg->crs->strength_index, a_Gmat));
721     if (vfilter >= 0) {
722       PetscInt           Istart, Iend, ncols, nnz0, nnz1, NN, MM, nloc;
723       Mat                tGmat, Gmat = *a_Gmat;
724       MPI_Comm           comm;
725       const PetscScalar *vals;
726       const PetscInt    *idx;
727       PetscInt          *d_nnz, *o_nnz, kk, *garray = NULL, *AJ, maxcols = 0;
728       MatScalar         *AA; // this is checked in graph
729       PetscBool          isseqaij;
730       Mat                a, b, c;
731       MatType            jtype;
732 
733       PetscCall(PetscObjectGetComm((PetscObject)Gmat, &comm));
734       PetscCall(PetscObjectBaseTypeCompare((PetscObject)Gmat, MATSEQAIJ, &isseqaij));
735       PetscCall(MatGetType(Gmat, &jtype));
736       PetscCall(MatCreate(comm, &tGmat));
737       PetscCall(MatSetType(tGmat, jtype));
738 
739       /* TODO GPU: this can be called when filter = 0 -> Probably provide MatAIJThresholdCompress that compresses the entries below a threshold?
740         Also, if the matrix is symmetric, can we skip this
741         operation? It can be very expensive on large matrices. */
742 
743       // global sizes
744       PetscCall(MatGetSize(Gmat, &MM, &NN));
745       PetscCall(MatGetOwnershipRange(Gmat, &Istart, &Iend));
746       nloc = Iend - Istart;
747       PetscCall(PetscMalloc2(nloc, &d_nnz, nloc, &o_nnz));
748       if (isseqaij) {
749         a = Gmat;
750         b = NULL;
751       } else {
752         Mat_MPIAIJ *d = (Mat_MPIAIJ *)Gmat->data;
753 
754         a      = d->A;
755         b      = d->B;
756         garray = d->garray;
757       }
758       /* Determine upper bound on non-zeros needed in new filtered matrix */
759       for (PetscInt row = 0; row < nloc; row++) {
760         PetscCall(MatGetRow(a, row, &ncols, NULL, NULL));
761         d_nnz[row] = ncols;
762         if (ncols > maxcols) maxcols = ncols;
763         PetscCall(MatRestoreRow(a, row, &ncols, NULL, NULL));
764       }
765       if (b) {
766         for (PetscInt row = 0; row < nloc; row++) {
767           PetscCall(MatGetRow(b, row, &ncols, NULL, NULL));
768           o_nnz[row] = ncols;
769           if (ncols > maxcols) maxcols = ncols;
770           PetscCall(MatRestoreRow(b, row, &ncols, NULL, NULL));
771         }
772       }
773       PetscCall(MatSetSizes(tGmat, nloc, nloc, MM, MM));
774       PetscCall(MatSetBlockSizes(tGmat, 1, 1));
775       PetscCall(MatSeqAIJSetPreallocation(tGmat, 0, d_nnz));
776       PetscCall(MatMPIAIJSetPreallocation(tGmat, 0, d_nnz, 0, o_nnz));
777       PetscCall(MatSetOption(tGmat, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
778       PetscCall(PetscFree2(d_nnz, o_nnz));
779       PetscCall(PetscMalloc2(maxcols, &AA, maxcols, &AJ));
780       nnz0 = nnz1 = 0;
781       for (c = a, kk = 0; c && kk < 2; c = b, kk++) {
782         for (PetscInt row = 0, grow = Istart, ncol_row, jj; row < nloc; row++, grow++) {
783           PetscCall(MatGetRow(c, row, &ncols, &idx, &vals));
784           for (ncol_row = jj = 0; jj < ncols; jj++, nnz0++) {
785             PetscScalar sv = PetscAbs(PetscRealPart(vals[jj]));
786             if (PetscRealPart(sv) > vfilter) {
787               PetscInt cid = idx[jj] + Istart; //diag
788 
789               nnz1++;
790               if (c != a) cid = garray[idx[jj]];
791               AA[ncol_row] = vals[jj];
792               AJ[ncol_row] = cid;
793               ncol_row++;
794             }
795           }
796           PetscCall(MatRestoreRow(c, row, &ncols, &idx, &vals));
797           PetscCall(MatSetValues(tGmat, 1, &grow, ncol_row, AJ, AA, INSERT_VALUES));
798         }
799       }
800       PetscCall(PetscFree2(AA, AJ));
801       PetscCall(MatAssemblyBegin(tGmat, MAT_FINAL_ASSEMBLY));
802       PetscCall(MatAssemblyEnd(tGmat, MAT_FINAL_ASSEMBLY));
803       PetscCall(MatPropagateSymmetryOptions(Gmat, tGmat)); /* Normal Mat options are not relevant ? */
804       PetscCall(PetscInfo(pc, "\t %g%% nnz after filtering, with threshold %g, %g nnz ave. (N=%" PetscInt_FMT ", max row size %" PetscInt_FMT "\n", (!nnz0) ? 1. : 100. * (double)nnz1 / (double)nnz0, (double)vfilter, (!nloc) ? 1. : (double)nnz0 / (double)nloc, MM, maxcols));
805       PetscCall(MatViewFromOptions(tGmat, NULL, "-mat_filter_graph_view"));
806       PetscCall(MatDestroy(&Gmat));
807       *a_Gmat = tGmat;
808     }
809   }
810 
811   PetscCall(MatGetInfo(*a_Gmat, MAT_LOCAL, &info1)); /* global reduction */
812   if (info0.nz_used > 0) PetscCall(PetscInfo(pc, "Filtering left %g %% edges in graph (%e %e)\n", 100.0 * info1.nz_used * (double)(bs * bs) / info0.nz_used, info0.nz_used, info1.nz_used));
813   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_GRAPH], 0, 0, 0, 0));
814   PetscFunctionReturn(PETSC_SUCCESS);
815 }
816 
817 typedef PetscInt    NState;
818 static const NState NOT_DONE = -2;
819 static const NState DELETED  = -1;
820 static const NState REMOVED  = -3;
821 #define IS_SELECTED(s) (s != DELETED && s != NOT_DONE && s != REMOVED)
822 
823 /*
824    fixAggregatesWithSquare - greedy grab of with G1 (unsquared graph) -- AIJ specific -- change to fixAggregatesWithSquare -- TODD
825      - AGG-MG specific: clears singletons out of 'selected_2'
826 
827    Input Parameter:
828    . Gmat_2 - global matrix of squared graph (data not defined)
829    . Gmat_1 - base graph to grab with base graph
830    Input/Output Parameter:
831    . aggs_2 - linked list of aggs with gids)
832 */
833 static PetscErrorCode fixAggregatesWithSquare(PC pc, Mat Gmat_2, Mat Gmat_1, PetscCoarsenData *aggs_2)
834 {
835   PetscBool      isMPI;
836   Mat_SeqAIJ    *matA_1, *matB_1 = NULL;
837   MPI_Comm       comm;
838   PetscInt       lid, *ii, *idx, ix, Iend, my0, kk, n, j;
839   Mat_MPIAIJ    *mpimat_2 = NULL, *mpimat_1 = NULL;
840   const PetscInt nloc = Gmat_2->rmap->n;
841   PetscScalar   *cpcol_1_state, *cpcol_2_state, *cpcol_2_par_orig, *lid_parent_gid;
842   PetscInt      *lid_cprowID_1 = NULL;
843   NState        *lid_state;
844   Vec            ghost_par_orig2;
845   PetscMPIInt    rank;
846 
847   PetscFunctionBegin;
848   PetscCall(PetscObjectGetComm((PetscObject)Gmat_2, &comm));
849   PetscCallMPI(MPI_Comm_rank(comm, &rank));
850   PetscCall(MatGetOwnershipRange(Gmat_1, &my0, &Iend));
851 
852   /* get submatrices */
853   PetscCall(PetscStrbeginswith(((PetscObject)Gmat_1)->type_name, MATMPIAIJ, &isMPI));
854   PetscCall(PetscInfo(pc, "isMPI = %s\n", isMPI ? "yes" : "no"));
855   PetscCall(PetscMalloc3(nloc, &lid_state, nloc, &lid_parent_gid, nloc, &lid_cprowID_1));
856   for (lid = 0; lid < nloc; lid++) lid_cprowID_1[lid] = -1;
857   if (isMPI) {
858     /* grab matrix objects */
859     mpimat_2 = (Mat_MPIAIJ *)Gmat_2->data;
860     mpimat_1 = (Mat_MPIAIJ *)Gmat_1->data;
861     matA_1   = (Mat_SeqAIJ *)mpimat_1->A->data;
862     matB_1   = (Mat_SeqAIJ *)mpimat_1->B->data;
863 
864     /* force compressed row storage for B matrix in AuxMat */
865     PetscCall(MatCheckCompressedRow(mpimat_1->B, matB_1->nonzerorowcnt, &matB_1->compressedrow, matB_1->i, Gmat_1->rmap->n, -1.0));
866     for (ix = 0; ix < matB_1->compressedrow.nrows; ix++) {
867       PetscInt lid = matB_1->compressedrow.rindex[ix];
868 
869       PetscCheck(lid <= nloc && lid >= -1, PETSC_COMM_SELF, PETSC_ERR_USER, "lid %" PetscInt_FMT " out of range. nloc = %" PetscInt_FMT, lid, nloc);
870       if (lid != -1) lid_cprowID_1[lid] = ix;
871     }
872   } else {
873     PetscBool isAIJ;
874 
875     PetscCall(PetscStrbeginswith(((PetscObject)Gmat_1)->type_name, MATSEQAIJ, &isAIJ));
876     PetscCheck(isAIJ, PETSC_COMM_SELF, PETSC_ERR_USER, "Require AIJ matrix.");
877     matA_1 = (Mat_SeqAIJ *)Gmat_1->data;
878   }
879   if (nloc > 0) { PetscCheck(!matB_1 || matB_1->compressedrow.use, PETSC_COMM_SELF, PETSC_ERR_PLIB, "matB_1 && !matB_1->compressedrow.use: PETSc bug???"); }
880   /* get state of locals and selected gid for deleted */
881   for (lid = 0; lid < nloc; lid++) {
882     lid_parent_gid[lid] = -1.0;
883     lid_state[lid]      = DELETED;
884   }
885 
886   /* set lid_state */
887   for (lid = 0; lid < nloc; lid++) {
888     PetscCDIntNd *pos;
889 
890     PetscCall(PetscCDGetHeadPos(aggs_2, lid, &pos));
891     if (pos) {
892       PetscInt gid1;
893 
894       PetscCall(PetscCDIntNdGetID(pos, &gid1));
895       PetscCheck(gid1 == lid + my0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "gid1 %" PetscInt_FMT " != lid %" PetscInt_FMT " + my0 %" PetscInt_FMT, gid1, lid, my0);
896       lid_state[lid] = gid1;
897     }
898   }
899 
900   /* map local to selected local, DELETED means a ghost owns it */
901   for (lid = 0; lid < nloc; lid++) {
902     NState state = lid_state[lid];
903 
904     if (IS_SELECTED(state)) {
905       PetscCDIntNd *pos;
906 
907       PetscCall(PetscCDGetHeadPos(aggs_2, lid, &pos));
908       while (pos) {
909         PetscInt gid1;
910 
911         PetscCall(PetscCDIntNdGetID(pos, &gid1));
912         PetscCall(PetscCDGetNextPos(aggs_2, lid, &pos));
913         if (gid1 >= my0 && gid1 < Iend) lid_parent_gid[gid1 - my0] = (PetscScalar)(lid + my0);
914       }
915     }
916   }
917   /* get 'cpcol_1/2_state' & cpcol_2_par_orig - uses mpimat_1/2->lvec for temp space */
918   if (isMPI) {
919     Vec tempVec;
920 
921     /* get 'cpcol_1_state' */
922     PetscCall(MatCreateVecs(Gmat_1, &tempVec, NULL));
923     for (kk = 0, j = my0; kk < nloc; kk++, j++) {
924       PetscScalar v = (PetscScalar)lid_state[kk];
925 
926       PetscCall(VecSetValues(tempVec, 1, &j, &v, INSERT_VALUES));
927     }
928     PetscCall(VecAssemblyBegin(tempVec));
929     PetscCall(VecAssemblyEnd(tempVec));
930     PetscCall(VecScatterBegin(mpimat_1->Mvctx, tempVec, mpimat_1->lvec, INSERT_VALUES, SCATTER_FORWARD));
931     PetscCall(VecScatterEnd(mpimat_1->Mvctx, tempVec, mpimat_1->lvec, INSERT_VALUES, SCATTER_FORWARD));
932     PetscCall(VecGetArray(mpimat_1->lvec, &cpcol_1_state));
933     /* get 'cpcol_2_state' */
934     PetscCall(VecScatterBegin(mpimat_2->Mvctx, tempVec, mpimat_2->lvec, INSERT_VALUES, SCATTER_FORWARD));
935     PetscCall(VecScatterEnd(mpimat_2->Mvctx, tempVec, mpimat_2->lvec, INSERT_VALUES, SCATTER_FORWARD));
936     PetscCall(VecGetArray(mpimat_2->lvec, &cpcol_2_state));
937     /* get 'cpcol_2_par_orig' */
938     for (kk = 0, j = my0; kk < nloc; kk++, j++) {
939       PetscScalar v = lid_parent_gid[kk];
940 
941       PetscCall(VecSetValues(tempVec, 1, &j, &v, INSERT_VALUES));
942     }
943     PetscCall(VecAssemblyBegin(tempVec));
944     PetscCall(VecAssemblyEnd(tempVec));
945     PetscCall(VecDuplicate(mpimat_2->lvec, &ghost_par_orig2));
946     PetscCall(VecScatterBegin(mpimat_2->Mvctx, tempVec, ghost_par_orig2, INSERT_VALUES, SCATTER_FORWARD));
947     PetscCall(VecScatterEnd(mpimat_2->Mvctx, tempVec, ghost_par_orig2, INSERT_VALUES, SCATTER_FORWARD));
948     PetscCall(VecGetArray(ghost_par_orig2, &cpcol_2_par_orig));
949 
950     PetscCall(VecDestroy(&tempVec));
951   } /* ismpi */
952   for (lid = 0; lid < nloc; lid++) {
953     NState state = lid_state[lid];
954 
955     if (IS_SELECTED(state)) {
956       /* steal locals */
957       ii  = matA_1->i;
958       n   = ii[lid + 1] - ii[lid];
959       idx = matA_1->j + ii[lid];
960       for (j = 0; j < n; j++) {
961         PetscInt lidj   = idx[j], sgid;
962         NState   statej = lid_state[lidj];
963 
964         if (statej == DELETED && (sgid = (PetscInt)PetscRealPart(lid_parent_gid[lidj])) != lid + my0) { /* steal local */
965           lid_parent_gid[lidj] = (PetscScalar)(lid + my0);                                              /* send this if sgid is not local */
966           if (sgid >= my0 && sgid < Iend) {                                                             /* I'm stealing this local from a local sgid */
967             PetscInt      hav = 0, slid = sgid - my0, gidj = lidj + my0;
968             PetscCDIntNd *pos, *last = NULL;
969 
970             /* looking for local from local so id_llist_2 works */
971             PetscCall(PetscCDGetHeadPos(aggs_2, slid, &pos));
972             while (pos) {
973               PetscInt gid;
974 
975               PetscCall(PetscCDIntNdGetID(pos, &gid));
976               if (gid == gidj) {
977                 PetscCheck(last, PETSC_COMM_SELF, PETSC_ERR_PLIB, "last cannot be null");
978                 PetscCall(PetscCDRemoveNextNode(aggs_2, slid, last));
979                 PetscCall(PetscCDAppendNode(aggs_2, lid, pos));
980                 hav = 1;
981                 break;
982               } else last = pos;
983               PetscCall(PetscCDGetNextPos(aggs_2, slid, &pos));
984             }
985             if (hav != 1) {
986               PetscCheck(hav, PETSC_COMM_SELF, PETSC_ERR_PLIB, "failed to find adj in 'selected' lists - structurally unsymmetric matrix");
987               SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "found node %" PetscInt_FMT " times???", hav);
988             }
989           } else { /* I'm stealing this local, owned by a ghost */
990             PetscCheck(sgid == -1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Mat has an un-symmetric graph. Use '-%spc_gamg_sym_graph true' to symmetrize the graph or '-%spc_gamg_threshold -1' if the matrix is structurally symmetric.",
991                        ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "");
992             PetscCall(PetscCDAppendID(aggs_2, lid, lidj + my0));
993           }
994         }
995       } /* local neighbors */
996     } else if (state == DELETED /* && lid_cprowID_1 */) {
997       PetscInt sgidold = (PetscInt)PetscRealPart(lid_parent_gid[lid]);
998 
999       /* see if I have a selected ghost neighbor that will steal me */
1000       if ((ix = lid_cprowID_1[lid]) != -1) {
1001         ii  = matB_1->compressedrow.i;
1002         n   = ii[ix + 1] - ii[ix];
1003         idx = matB_1->j + ii[ix];
1004         for (j = 0; j < n; j++) {
1005           PetscInt cpid   = idx[j];
1006           NState   statej = (NState)PetscRealPart(cpcol_1_state[cpid]);
1007 
1008           if (IS_SELECTED(statej) && sgidold != statej) { /* ghost will steal this, remove from my list */
1009             lid_parent_gid[lid] = (PetscScalar)statej;    /* send who selected */
1010             if (sgidold >= my0 && sgidold < Iend) {       /* this was mine */
1011               PetscInt      hav = 0, oldslidj = sgidold - my0;
1012               PetscCDIntNd *pos, *last        = NULL;
1013 
1014               /* remove from 'oldslidj' list */
1015               PetscCall(PetscCDGetHeadPos(aggs_2, oldslidj, &pos));
1016               while (pos) {
1017                 PetscInt gid;
1018 
1019                 PetscCall(PetscCDIntNdGetID(pos, &gid));
1020                 if (lid + my0 == gid) {
1021                   /* id_llist_2[lastid] = id_llist_2[flid];   /\* remove lid from oldslidj list *\/ */
1022                   PetscCheck(last, PETSC_COMM_SELF, PETSC_ERR_PLIB, "last cannot be null");
1023                   PetscCall(PetscCDRemoveNextNode(aggs_2, oldslidj, last));
1024                   /* ghost (PetscScalar)statej will add this later */
1025                   hav = 1;
1026                   break;
1027                 } else last = pos;
1028                 PetscCall(PetscCDGetNextPos(aggs_2, oldslidj, &pos));
1029               }
1030               if (hav != 1) {
1031                 PetscCheck(hav, PETSC_COMM_SELF, PETSC_ERR_PLIB, "failed to find (hav=%" PetscInt_FMT ") adj in 'selected' lists - structurally unsymmetric matrix", hav);
1032                 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "found node %" PetscInt_FMT " times???", hav);
1033               }
1034             } else {
1035               /* TODO: ghosts remove this later */
1036             }
1037           }
1038         }
1039       }
1040     } /* selected/deleted */
1041   } /* node loop */
1042 
1043   if (isMPI) {
1044     PetscScalar    *cpcol_2_parent, *cpcol_2_gid;
1045     Vec             tempVec, ghostgids2, ghostparents2;
1046     PetscInt        cpid, nghost_2;
1047     PCGAMGHashTable gid_cpid;
1048 
1049     PetscCall(VecGetSize(mpimat_2->lvec, &nghost_2));
1050     PetscCall(MatCreateVecs(Gmat_2, &tempVec, NULL));
1051 
1052     /* get 'cpcol_2_parent' */
1053     for (kk = 0, j = my0; kk < nloc; kk++, j++) { PetscCall(VecSetValues(tempVec, 1, &j, &lid_parent_gid[kk], INSERT_VALUES)); }
1054     PetscCall(VecAssemblyBegin(tempVec));
1055     PetscCall(VecAssemblyEnd(tempVec));
1056     PetscCall(VecDuplicate(mpimat_2->lvec, &ghostparents2));
1057     PetscCall(VecScatterBegin(mpimat_2->Mvctx, tempVec, ghostparents2, INSERT_VALUES, SCATTER_FORWARD));
1058     PetscCall(VecScatterEnd(mpimat_2->Mvctx, tempVec, ghostparents2, INSERT_VALUES, SCATTER_FORWARD));
1059     PetscCall(VecGetArray(ghostparents2, &cpcol_2_parent));
1060 
1061     /* get 'cpcol_2_gid' */
1062     for (kk = 0, j = my0; kk < nloc; kk++, j++) {
1063       PetscScalar v = (PetscScalar)j;
1064 
1065       PetscCall(VecSetValues(tempVec, 1, &j, &v, INSERT_VALUES));
1066     }
1067     PetscCall(VecAssemblyBegin(tempVec));
1068     PetscCall(VecAssemblyEnd(tempVec));
1069     PetscCall(VecDuplicate(mpimat_2->lvec, &ghostgids2));
1070     PetscCall(VecScatterBegin(mpimat_2->Mvctx, tempVec, ghostgids2, INSERT_VALUES, SCATTER_FORWARD));
1071     PetscCall(VecScatterEnd(mpimat_2->Mvctx, tempVec, ghostgids2, INSERT_VALUES, SCATTER_FORWARD));
1072     PetscCall(VecGetArray(ghostgids2, &cpcol_2_gid));
1073     PetscCall(VecDestroy(&tempVec));
1074 
1075     /* look for deleted ghosts and add to table */
1076     PetscCall(PCGAMGHashTableCreate(2 * nghost_2 + 1, &gid_cpid));
1077     for (cpid = 0; cpid < nghost_2; cpid++) {
1078       NState state = (NState)PetscRealPart(cpcol_2_state[cpid]);
1079 
1080       if (state == DELETED) {
1081         PetscInt sgid_new = (PetscInt)PetscRealPart(cpcol_2_parent[cpid]);
1082         PetscInt sgid_old = (PetscInt)PetscRealPart(cpcol_2_par_orig[cpid]);
1083 
1084         if (sgid_old == -1 && sgid_new != -1) {
1085           PetscInt gid = (PetscInt)PetscRealPart(cpcol_2_gid[cpid]);
1086 
1087           PetscCall(PCGAMGHashTableAdd(&gid_cpid, gid, cpid));
1088         }
1089       }
1090     }
1091 
1092     /* look for deleted ghosts and see if they moved - remove it */
1093     for (lid = 0; lid < nloc; lid++) {
1094       NState state = lid_state[lid];
1095 
1096       if (IS_SELECTED(state)) {
1097         PetscCDIntNd *pos, *last = NULL;
1098 
1099         /* look for deleted ghosts and see if they moved */
1100         PetscCall(PetscCDGetHeadPos(aggs_2, lid, &pos));
1101         while (pos) {
1102           PetscInt gid;
1103 
1104           PetscCall(PetscCDIntNdGetID(pos, &gid));
1105           if (gid < my0 || gid >= Iend) {
1106             PetscCall(PCGAMGHashTableFind(&gid_cpid, gid, &cpid));
1107             if (cpid != -1) {
1108               /* a moved ghost - */
1109               /* id_llist_2[lastid] = id_llist_2[flid];    /\* remove 'flid' from list *\/ */
1110               PetscCall(PetscCDRemoveNextNode(aggs_2, lid, last));
1111             } else last = pos;
1112           } else last = pos;
1113 
1114           PetscCall(PetscCDGetNextPos(aggs_2, lid, &pos));
1115         } /* loop over list of deleted */
1116       } /* selected */
1117     }
1118     PetscCall(PCGAMGHashTableDestroy(&gid_cpid));
1119 
1120     /* look at ghosts, see if they changed - and it */
1121     for (cpid = 0; cpid < nghost_2; cpid++) {
1122       PetscInt sgid_new = (PetscInt)PetscRealPart(cpcol_2_parent[cpid]);
1123 
1124       if (sgid_new >= my0 && sgid_new < Iend) { /* this is mine */
1125         PetscInt      gid      = (PetscInt)PetscRealPart(cpcol_2_gid[cpid]);
1126         PetscInt      slid_new = sgid_new - my0, hav = 0;
1127         PetscCDIntNd *pos;
1128 
1129         /* search for this gid to see if I have it */
1130         PetscCall(PetscCDGetHeadPos(aggs_2, slid_new, &pos));
1131         while (pos) {
1132           PetscInt gidj;
1133 
1134           PetscCall(PetscCDIntNdGetID(pos, &gidj));
1135           PetscCall(PetscCDGetNextPos(aggs_2, slid_new, &pos));
1136 
1137           if (gidj == gid) {
1138             hav = 1;
1139             break;
1140           }
1141         }
1142         if (hav != 1) {
1143           /* insert 'flidj' into head of llist */
1144           PetscCall(PetscCDAppendID(aggs_2, slid_new, gid));
1145         }
1146       }
1147     }
1148     PetscCall(VecRestoreArray(mpimat_1->lvec, &cpcol_1_state));
1149     PetscCall(VecRestoreArray(mpimat_2->lvec, &cpcol_2_state));
1150     PetscCall(VecRestoreArray(ghostparents2, &cpcol_2_parent));
1151     PetscCall(VecRestoreArray(ghostgids2, &cpcol_2_gid));
1152     PetscCall(VecDestroy(&ghostgids2));
1153     PetscCall(VecDestroy(&ghostparents2));
1154     PetscCall(VecDestroy(&ghost_par_orig2));
1155   }
1156   PetscCall(PetscFree3(lid_state, lid_parent_gid, lid_cprowID_1));
1157   PetscFunctionReturn(PETSC_SUCCESS);
1158 }
1159 
1160 /*
1161    PCGAMGCoarsen_AGG - supports squaring the graph (deprecated) and new graph for
1162      communication of QR data used with HEM and MISk coarsening
1163 
1164   Input Parameter:
1165    . a_pc - this
1166 
1167   Input/Output Parameter:
1168    . a_Gmat1 - graph to coarsen (in), graph off processor edges for QR gather scatter (out)
1169 
1170   Output Parameter:
1171    . agg_lists - list of aggregates
1172 
1173 */
1174 static PetscErrorCode PCGAMGCoarsen_AGG(PC a_pc, Mat *a_Gmat1, PetscCoarsenData **agg_lists)
1175 {
1176   PC_MG       *mg          = (PC_MG *)a_pc->data;
1177   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
1178   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
1179   Mat          Gmat2, Gmat1 = *a_Gmat1; /* aggressive graph */
1180   IS           perm;
1181   PetscInt     Istart, Iend, Ii, nloc, bs, nn;
1182   PetscInt    *permute, *degree;
1183   PetscBool   *bIndexSet;
1184   PetscReal    hashfact;
1185   PetscInt     iSwapIndex;
1186   PetscRandom  random;
1187   MPI_Comm     comm;
1188 
1189   PetscFunctionBegin;
1190   PetscCall(PetscObjectGetComm((PetscObject)Gmat1, &comm));
1191   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_COARSEN], 0, 0, 0, 0));
1192   PetscCall(MatGetLocalSize(Gmat1, &nn, NULL));
1193   PetscCall(MatGetBlockSize(Gmat1, &bs));
1194   PetscCheck(bs == 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "bs %" PetscInt_FMT " must be 1", bs);
1195   nloc = nn / bs;
1196   /* get MIS aggs - randomize */
1197   PetscCall(PetscMalloc2(nloc, &permute, nloc, &degree));
1198   PetscCall(PetscCalloc1(nloc, &bIndexSet));
1199   for (Ii = 0; Ii < nloc; Ii++) permute[Ii] = Ii;
1200   PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &random));
1201   PetscCall(MatGetOwnershipRange(Gmat1, &Istart, &Iend));
1202   for (Ii = 0; Ii < nloc; Ii++) {
1203     PetscInt nc;
1204 
1205     PetscCall(MatGetRow(Gmat1, Istart + Ii, &nc, NULL, NULL));
1206     degree[Ii] = nc;
1207     PetscCall(MatRestoreRow(Gmat1, Istart + Ii, &nc, NULL, NULL));
1208   }
1209   for (Ii = 0; Ii < nloc; Ii++) {
1210     PetscCall(PetscRandomGetValueReal(random, &hashfact));
1211     iSwapIndex = (PetscInt)(hashfact * nloc) % nloc;
1212     if (!bIndexSet[iSwapIndex] && iSwapIndex != Ii) {
1213       PetscInt iTemp = permute[iSwapIndex];
1214 
1215       permute[iSwapIndex]   = permute[Ii];
1216       permute[Ii]           = iTemp;
1217       iTemp                 = degree[iSwapIndex];
1218       degree[iSwapIndex]    = degree[Ii];
1219       degree[Ii]            = iTemp;
1220       bIndexSet[iSwapIndex] = PETSC_TRUE;
1221     }
1222   }
1223   // apply minimum degree ordering -- NEW
1224   if (pc_gamg_agg->use_minimum_degree_ordering) { PetscCall(PetscSortIntWithArray(nloc, degree, permute)); }
1225   PetscCall(PetscFree(bIndexSet));
1226   PetscCall(PetscRandomDestroy(&random));
1227   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nloc, permute, PETSC_USE_POINTER, &perm));
1228   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_MIS], 0, 0, 0, 0));
1229   // square graph
1230   if (pc_gamg->current_level < pc_gamg_agg->aggressive_coarsening_levels && pc_gamg_agg->use_aggressive_square_graph) {
1231     PetscCall(PCGAMGSquareGraph_GAMG(a_pc, Gmat1, &Gmat2));
1232   } else Gmat2 = Gmat1;
1233   // switch to old MIS-1 for square graph
1234   if (pc_gamg->current_level < pc_gamg_agg->aggressive_coarsening_levels) {
1235     if (!pc_gamg_agg->use_aggressive_square_graph) PetscCall(MatCoarsenMISKSetDistance(pc_gamg_agg->crs, pc_gamg_agg->aggressive_mis_k)); // hardwire to MIS-2
1236     else PetscCall(MatCoarsenSetType(pc_gamg_agg->crs, MATCOARSENMIS));                                                                   // old MIS -- side effect
1237   } else if (pc_gamg_agg->use_aggressive_square_graph && pc_gamg_agg->aggressive_coarsening_levels > 0) {                                 // we reset the MIS
1238     const char *prefix;
1239 
1240     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)a_pc, &prefix));
1241     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc_gamg_agg->crs, prefix));
1242     PetscCall(MatCoarsenSetFromOptions(pc_gamg_agg->crs)); // get the default back on non-aggressive levels when square graph switched to old MIS
1243   }
1244   PetscCall(MatCoarsenSetAdjacency(pc_gamg_agg->crs, Gmat2));
1245   PetscCall(MatCoarsenSetStrictAggs(pc_gamg_agg->crs, PETSC_TRUE));
1246   PetscCall(MatCoarsenSetGreedyOrdering(pc_gamg_agg->crs, perm));
1247   PetscCall(MatCoarsenApply(pc_gamg_agg->crs));
1248   PetscCall(MatCoarsenGetData(pc_gamg_agg->crs, agg_lists)); /* output */
1249 
1250   PetscCall(ISDestroy(&perm));
1251   PetscCall(PetscFree2(permute, degree));
1252   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_MIS], 0, 0, 0, 0));
1253 
1254   if (Gmat2 != Gmat1) { // square graph, we need ghosts for selected
1255     PetscCoarsenData *llist = *agg_lists;
1256 
1257     PetscCall(fixAggregatesWithSquare(a_pc, Gmat2, Gmat1, *agg_lists));
1258     PetscCall(MatDestroy(&Gmat1));
1259     *a_Gmat1 = Gmat2;                          /* output */
1260     PetscCall(PetscCDSetMat(llist, *a_Gmat1)); /* Need a graph with ghosts here */
1261   }
1262   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_COARSEN], 0, 0, 0, 0));
1263   PetscFunctionReturn(PETSC_SUCCESS);
1264 }
1265 
1266 /*
1267  PCGAMGConstructProlongator_AGG
1268 
1269  Input Parameter:
1270  . pc - this
1271  . Amat - matrix on this fine level
1272  . Graph - used to get ghost data for nodes in
1273  . agg_lists - list of aggregates
1274  Output Parameter:
1275  . a_P_out - prolongation operator to the next level
1276  */
1277 static PetscErrorCode PCGAMGConstructProlongator_AGG(PC pc, Mat Amat, PetscCoarsenData *agg_lists, Mat *a_P_out)
1278 {
1279   PC_MG         *mg      = (PC_MG *)pc->data;
1280   PC_GAMG       *pc_gamg = (PC_GAMG *)mg->innerctx;
1281   const PetscInt col_bs  = pc_gamg->data_cell_cols;
1282   PetscInt       Istart, Iend, nloc, ii, jj, kk, my0, nLocalSelected, bs;
1283   Mat            Gmat, Prol;
1284   PetscMPIInt    size;
1285   MPI_Comm       comm;
1286   PetscReal     *data_w_ghost;
1287   PetscInt       myCrs0, nbnodes = 0, *flid_fgid;
1288   MatType        mtype;
1289 
1290   PetscFunctionBegin;
1291   PetscCall(PetscObjectGetComm((PetscObject)Amat, &comm));
1292   PetscCheck(col_bs >= 1, comm, PETSC_ERR_PLIB, "Column bs cannot be less than 1");
1293   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROL], 0, 0, 0, 0));
1294   PetscCallMPI(MPI_Comm_size(comm, &size));
1295   PetscCall(MatGetOwnershipRange(Amat, &Istart, &Iend));
1296   PetscCall(MatGetBlockSize(Amat, &bs));
1297   nloc = (Iend - Istart) / bs;
1298   my0  = Istart / bs;
1299   PetscCheck((Iend - Istart) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "(Iend %" PetscInt_FMT " - Istart %" PetscInt_FMT ") not divisible by bs %" PetscInt_FMT, Iend, Istart, bs);
1300   PetscCall(PetscCDGetMat(agg_lists, &Gmat)); // get auxiliary matrix for ghost edges for size > 1
1301 
1302   /* get 'nLocalSelected' */
1303   for (ii = 0, nLocalSelected = 0; ii < nloc; ii++) {
1304     PetscBool ise;
1305 
1306     /* filter out singletons 0 or 1? */
1307     PetscCall(PetscCDIsEmptyAt(agg_lists, ii, &ise));
1308     if (!ise) nLocalSelected++;
1309   }
1310 
1311   /* create prolongator, create P matrix */
1312   PetscCall(MatGetType(Amat, &mtype));
1313   PetscCall(MatCreate(comm, &Prol));
1314   PetscCall(MatSetSizes(Prol, nloc * bs, nLocalSelected * col_bs, PETSC_DETERMINE, PETSC_DETERMINE));
1315   PetscCall(MatSetBlockSizes(Prol, bs, col_bs)); // should this be before MatSetSizes?
1316   PetscCall(MatSetType(Prol, mtype));
1317 #if PetscDefined(HAVE_DEVICE)
1318   PetscBool flg;
1319   PetscCall(MatBoundToCPU(Amat, &flg));
1320   PetscCall(MatBindToCPU(Prol, flg));
1321   if (flg) PetscCall(MatSetBindingPropagates(Prol, PETSC_TRUE));
1322 #endif
1323   PetscCall(MatSeqAIJSetPreallocation(Prol, col_bs, NULL));
1324   PetscCall(MatMPIAIJSetPreallocation(Prol, col_bs, NULL, col_bs, NULL));
1325 
1326   /* can get all points "removed" */
1327   PetscCall(MatGetSize(Prol, &kk, &ii));
1328   if (!ii) {
1329     PetscCall(PetscInfo(pc, "%s: No selected points on coarse grid\n", ((PetscObject)pc)->prefix));
1330     PetscCall(MatDestroy(&Prol));
1331     *a_P_out = NULL; /* out */
1332     PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROL], 0, 0, 0, 0));
1333     PetscFunctionReturn(PETSC_SUCCESS);
1334   }
1335   PetscCall(PetscInfo(pc, "%s: New grid %" PetscInt_FMT " nodes\n", ((PetscObject)pc)->prefix, ii / col_bs));
1336   PetscCall(MatGetOwnershipRangeColumn(Prol, &myCrs0, &kk));
1337 
1338   PetscCheck((kk - myCrs0) % col_bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "(kk %" PetscInt_FMT " -myCrs0 %" PetscInt_FMT ") not divisible by col_bs %" PetscInt_FMT, kk, myCrs0, col_bs);
1339   myCrs0 = myCrs0 / col_bs;
1340   PetscCheck((kk / col_bs - myCrs0) == nLocalSelected, PETSC_COMM_SELF, PETSC_ERR_PLIB, "(kk %" PetscInt_FMT "/col_bs %" PetscInt_FMT " - myCrs0 %" PetscInt_FMT ") != nLocalSelected %" PetscInt_FMT ")", kk, col_bs, myCrs0, nLocalSelected);
1341 
1342   /* create global vector of data in 'data_w_ghost' */
1343   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROLA], 0, 0, 0, 0));
1344   if (size > 1) { /* get ghost null space data */
1345     PetscReal *tmp_gdata, *tmp_ldata, *tp2;
1346 
1347     PetscCall(PetscMalloc1(nloc, &tmp_ldata));
1348     for (jj = 0; jj < col_bs; jj++) {
1349       for (kk = 0; kk < bs; kk++) {
1350         PetscInt         ii, stride;
1351         const PetscReal *tp = PetscSafePointerPlusOffset(pc_gamg->data, jj * bs * nloc + kk);
1352 
1353         for (ii = 0; ii < nloc; ii++, tp += bs) tmp_ldata[ii] = *tp;
1354 
1355         PetscCall(PCGAMGGetDataWithGhosts(Gmat, 1, tmp_ldata, &stride, &tmp_gdata));
1356 
1357         if (!jj && !kk) { /* now I know how many total nodes - allocate TODO: move below and do in one 'col_bs' call */
1358           PetscCall(PetscMalloc1(stride * bs * col_bs, &data_w_ghost));
1359           nbnodes = bs * stride;
1360         }
1361         tp2 = PetscSafePointerPlusOffset(data_w_ghost, jj * bs * stride + kk);
1362         for (ii = 0; ii < stride; ii++, tp2 += bs) *tp2 = tmp_gdata[ii];
1363         PetscCall(PetscFree(tmp_gdata));
1364       }
1365     }
1366     PetscCall(PetscFree(tmp_ldata));
1367   } else {
1368     nbnodes      = bs * nloc;
1369     data_w_ghost = pc_gamg->data;
1370   }
1371 
1372   /* get 'flid_fgid' TODO - move up to get 'stride' and do get null space data above in one step (jj loop) */
1373   if (size > 1) {
1374     PetscReal *fid_glid_loc, *fiddata;
1375     PetscInt   stride;
1376 
1377     PetscCall(PetscMalloc1(nloc, &fid_glid_loc));
1378     for (kk = 0; kk < nloc; kk++) fid_glid_loc[kk] = (PetscReal)(my0 + kk);
1379     PetscCall(PCGAMGGetDataWithGhosts(Gmat, 1, fid_glid_loc, &stride, &fiddata));
1380     PetscCall(PetscMalloc1(stride, &flid_fgid)); /* copy real data to in */
1381     for (kk = 0; kk < stride; kk++) flid_fgid[kk] = (PetscInt)fiddata[kk];
1382     PetscCall(PetscFree(fiddata));
1383 
1384     PetscCheck(stride == nbnodes / bs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "stride %" PetscInt_FMT " != nbnodes %" PetscInt_FMT "/bs %" PetscInt_FMT, stride, nbnodes, bs);
1385     PetscCall(PetscFree(fid_glid_loc));
1386   } else {
1387     PetscCall(PetscMalloc1(nloc, &flid_fgid));
1388     for (kk = 0; kk < nloc; kk++) flid_fgid[kk] = my0 + kk;
1389   }
1390   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROLA], 0, 0, 0, 0));
1391   /* get P0 */
1392   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROLB], 0, 0, 0, 0));
1393   {
1394     PetscReal *data_out = NULL;
1395 
1396     PetscCall(formProl0(agg_lists, bs, col_bs, myCrs0, nbnodes, data_w_ghost, flid_fgid, &data_out, Prol));
1397     PetscCall(PetscFree(pc_gamg->data));
1398 
1399     pc_gamg->data           = data_out;
1400     pc_gamg->data_cell_rows = col_bs;
1401     pc_gamg->data_sz        = col_bs * col_bs * nLocalSelected;
1402   }
1403   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROLB], 0, 0, 0, 0));
1404   if (size > 1) PetscCall(PetscFree(data_w_ghost));
1405   PetscCall(PetscFree(flid_fgid));
1406 
1407   *a_P_out = Prol; /* out */
1408   PetscCall(MatViewFromOptions(Prol, NULL, "-pc_gamg_agg_view_initial_prolongation"));
1409 
1410   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROL], 0, 0, 0, 0));
1411   PetscFunctionReturn(PETSC_SUCCESS);
1412 }
1413 
1414 /*
1415    PCGAMGOptimizeProlongator_AGG - given the initial prolongator optimizes it by smoothed aggregation pc_gamg_agg->nsmooths times
1416 
1417   Input Parameter:
1418    . pc - this
1419    . Amat - matrix on this fine level
1420  In/Output Parameter:
1421    . a_P - prolongation operator to the next level
1422 */
1423 static PetscErrorCode PCGAMGOptimizeProlongator_AGG(PC pc, Mat Amat, Mat *a_P)
1424 {
1425   PC_MG       *mg          = (PC_MG *)pc->data;
1426   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
1427   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
1428   PetscInt     jj;
1429   Mat          Prol = *a_P;
1430   MPI_Comm     comm;
1431   KSP          eksp;
1432   Vec          bb, xx;
1433   PC           epc;
1434   PetscReal    alpha, emax, emin;
1435 
1436   PetscFunctionBegin;
1437   PetscCall(PetscObjectGetComm((PetscObject)Amat, &comm));
1438   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_OPT], 0, 0, 0, 0));
1439 
1440   /* compute maximum singular value of operator to be used in smoother */
1441   if (0 < pc_gamg_agg->nsmooths) {
1442     /* get eigen estimates */
1443     if (pc_gamg->emax > 0) {
1444       emin = pc_gamg->emin;
1445       emax = pc_gamg->emax;
1446     } else {
1447       const char *prefix;
1448 
1449       PetscCall(MatCreateVecs(Amat, &bb, NULL));
1450       PetscCall(MatCreateVecs(Amat, &xx, NULL));
1451       PetscCall(KSPSetNoisy_Private(Amat, bb));
1452 
1453       PetscCall(KSPCreate(comm, &eksp));
1454       PetscCall(KSPSetNestLevel(eksp, pc->kspnestlevel));
1455       PetscCall(PCGetOptionsPrefix(pc, &prefix));
1456       PetscCall(KSPSetOptionsPrefix(eksp, prefix));
1457       PetscCall(KSPAppendOptionsPrefix(eksp, "pc_gamg_esteig_"));
1458       {
1459         PetscBool isset, sflg;
1460 
1461         PetscCall(MatIsSPDKnown(Amat, &isset, &sflg));
1462         if (isset && sflg) PetscCall(KSPSetType(eksp, KSPCG));
1463       }
1464       PetscCall(KSPSetErrorIfNotConverged(eksp, pc->erroriffailure));
1465       PetscCall(KSPSetNormType(eksp, KSP_NORM_NONE));
1466 
1467       PetscCall(KSPSetInitialGuessNonzero(eksp, PETSC_FALSE));
1468       PetscCall(KSPSetOperators(eksp, Amat, Amat));
1469 
1470       PetscCall(KSPGetPC(eksp, &epc));
1471       PetscCall(PCSetType(epc, PCJACOBI)); /* smoother in smoothed agg. */
1472 
1473       PetscCall(KSPSetTolerances(eksp, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, 10)); // 10 is safer, but 5 is often fine, can override with -pc_gamg_esteig_ksp_max_it -mg_levels_ksp_chebyshev_esteig 0,0.25,0,1.2
1474 
1475       PetscCall(KSPSetFromOptions(eksp));
1476       PetscCall(KSPSetComputeSingularValues(eksp, PETSC_TRUE));
1477       PetscCall(KSPSolve(eksp, bb, xx));
1478       PetscCall(KSPCheckSolve(eksp, pc, xx));
1479 
1480       PetscCall(KSPComputeExtremeSingularValues(eksp, &emax, &emin));
1481       PetscCall(PetscInfo(pc, "%s: Smooth P0: max eigen=%e min=%e PC=%s\n", ((PetscObject)pc)->prefix, (double)emax, (double)emin, PCJACOBI));
1482       PetscCall(VecDestroy(&xx));
1483       PetscCall(VecDestroy(&bb));
1484       PetscCall(KSPDestroy(&eksp));
1485     }
1486     if (pc_gamg->use_sa_esteig) {
1487       mg->min_eigen_DinvA[pc_gamg->current_level] = emin;
1488       mg->max_eigen_DinvA[pc_gamg->current_level] = emax;
1489       PetscCall(PetscInfo(pc, "%s: Smooth P0: level %" PetscInt_FMT ", cache spectra %g %g\n", ((PetscObject)pc)->prefix, pc_gamg->current_level, (double)emin, (double)emax));
1490     } else {
1491       mg->min_eigen_DinvA[pc_gamg->current_level] = 0;
1492       mg->max_eigen_DinvA[pc_gamg->current_level] = 0;
1493     }
1494   } else {
1495     mg->min_eigen_DinvA[pc_gamg->current_level] = 0;
1496     mg->max_eigen_DinvA[pc_gamg->current_level] = 0;
1497   }
1498 
1499   /* smooth P0 */
1500   if (pc_gamg_agg->nsmooths > 0) {
1501     Vec diag;
1502 
1503     /* TODO: Set a PCFailedReason and exit the building of the AMG preconditioner */
1504     PetscCheck(emax != 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Computed maximum singular value as zero");
1505 
1506     PetscCall(MatCreateVecs(Amat, &diag, NULL));
1507     PetscCall(MatGetDiagonal(Amat, diag)); /* effectively PCJACOBI */
1508     PetscCall(VecReciprocal(diag));
1509 
1510     for (jj = 0; jj < pc_gamg_agg->nsmooths; jj++) {
1511       Mat tMat;
1512 
1513       PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_OPTSM], 0, 0, 0, 0));
1514       /*
1515         Smooth aggregation on the prolongator
1516 
1517         P_{i} := (I - 1.4/emax D^{-1}A) P_i\{i-1}
1518       */
1519       PetscCall(PetscLogEventBegin(petsc_gamg_setup_matmat_events[pc_gamg->current_level][2], 0, 0, 0, 0));
1520       PetscCall(MatMatMult(Amat, Prol, MAT_INITIAL_MATRIX, PETSC_CURRENT, &tMat));
1521       PetscCall(PetscLogEventEnd(petsc_gamg_setup_matmat_events[pc_gamg->current_level][2], 0, 0, 0, 0));
1522       PetscCall(MatProductClear(tMat));
1523       PetscCall(MatDiagonalScale(tMat, diag, NULL));
1524 
1525       /* TODO: Document the 1.4 and don't hardwire it in this routine */
1526       alpha = -1.4 / emax;
1527       PetscCall(MatAYPX(tMat, alpha, Prol, SUBSET_NONZERO_PATTERN));
1528       PetscCall(MatDestroy(&Prol));
1529       Prol = tMat;
1530       PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_OPTSM], 0, 0, 0, 0));
1531     }
1532     PetscCall(VecDestroy(&diag));
1533   }
1534   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_OPT], 0, 0, 0, 0));
1535   PetscCall(MatViewFromOptions(Prol, NULL, "-pc_gamg_agg_view_prolongation"));
1536   *a_P = Prol;
1537   PetscFunctionReturn(PETSC_SUCCESS);
1538 }
1539 
1540 /*MC
1541   PCGAMGAGG - Smooth aggregation, {cite}`vanek1996algebraic`, {cite}`vanek2001convergence`, variant of PETSc's algebraic multigrid (`PCGAMG`) preconditioner
1542 
1543   Options Database Keys:
1544 + -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation to construct prolongation
1545 . -pc_gamg_aggressive_coarsening <n,default=1> - number of aggressive coarsening (MIS-2) levels from finest.
1546 . -pc_gamg_aggressive_square_graph <bool,default=true> - Use square graph (A'A), alternative is MIS-k (k=2), for aggressive coarsening
1547 . -pc_gamg_mis_k_minimum_degree_ordering <bool,default=false> - Use minimum degree ordering in greedy MIS algorithm
1548 . -pc_gamg_pc_gamg_asm_hem_aggs <n,default=0> - Number of HEM aggregation steps for ASM smoother
1549 - -pc_gamg_aggressive_mis_k <n,default=2> - Number (k) distance in MIS coarsening (>2 is 'aggressive')
1550 
1551   Level: intermediate
1552 
1553   Notes:
1554   To obtain good performance for `PCGAMG` for vector valued problems you must
1555   call `MatSetBlockSize()` to indicate the number of degrees of freedom per grid point.
1556   Call `MatSetNearNullSpace()` (or `PCSetCoordinates()` if solving the equations of elasticity) to indicate the near null space of the operator
1557 
1558   The many options for `PCMG` and `PCGAMG` such as controlling the smoothers on each level etc. also work for `PCGAMGAGG`
1559 
1560 .seealso: `PCGAMG`, [the Users Manual section on PCGAMG](sec_amg), [the Users Manual section on PCMG](sec_mg), [](ch_ksp), `PCCreate()`, `PCSetType()`,
1561           `MatSetBlockSize()`, `PCMGType`, `PCSetCoordinates()`, `MatSetNearNullSpace()`, `PCGAMGSetType()`,
1562           `PCGAMGAGG`, `PCGAMGGEO`, `PCGAMGCLASSICAL`, `PCGAMGSetProcEqLim()`, `PCGAMGSetCoarseEqLim()`, `PCGAMGSetRepartition()`, `PCGAMGRegister()`,
1563           `PCGAMGSetReuseInterpolation()`, `PCGAMGASMSetUseAggs()`, `PCGAMGSetParallelCoarseGridSolve()`, `PCGAMGSetNlevels()`, `PCGAMGSetThreshold()`,
1564           `PCGAMGGetType()`, `PCGAMGSetUseSAEstEig()`
1565 M*/
1566 PetscErrorCode PCCreateGAMG_AGG(PC pc)
1567 {
1568   PC_MG       *mg      = (PC_MG *)pc->data;
1569   PC_GAMG     *pc_gamg = (PC_GAMG *)mg->innerctx;
1570   PC_GAMG_AGG *pc_gamg_agg;
1571 
1572   PetscFunctionBegin;
1573   /* create sub context for SA */
1574   PetscCall(PetscNew(&pc_gamg_agg));
1575   pc_gamg->subctx = pc_gamg_agg;
1576 
1577   pc_gamg->ops->setfromoptions = PCSetFromOptions_GAMG_AGG;
1578   pc_gamg->ops->destroy        = PCDestroy_GAMG_AGG;
1579   /* reset does not do anything; setup not virtual */
1580 
1581   /* set internal function pointers */
1582   pc_gamg->ops->creategraph       = PCGAMGCreateGraph_AGG;
1583   pc_gamg->ops->coarsen           = PCGAMGCoarsen_AGG;
1584   pc_gamg->ops->prolongator       = PCGAMGConstructProlongator_AGG;
1585   pc_gamg->ops->optprolongator    = PCGAMGOptimizeProlongator_AGG;
1586   pc_gamg->ops->createdefaultdata = PCSetData_AGG;
1587   pc_gamg->ops->view              = PCView_GAMG_AGG;
1588 
1589   pc_gamg_agg->nsmooths                     = 1;
1590   pc_gamg_agg->aggressive_coarsening_levels = 1;
1591   pc_gamg_agg->use_aggressive_square_graph  = PETSC_TRUE;
1592   pc_gamg_agg->use_minimum_degree_ordering  = PETSC_FALSE;
1593   pc_gamg_agg->use_low_mem_filter           = PETSC_FALSE;
1594   pc_gamg_agg->aggressive_mis_k             = 2;
1595   pc_gamg_agg->graph_symmetrize             = PETSC_TRUE;
1596 
1597   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetNSmooths_C", PCGAMGSetNSmooths_AGG));
1598   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetAggressiveLevels_C", PCGAMGSetAggressiveLevels_AGG));
1599   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetAggressiveSquareGraph_C", PCGAMGSetAggressiveSquareGraph_AGG));
1600   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGMISkSetMinDegreeOrdering_C", PCGAMGMISkSetMinDegreeOrdering_AGG));
1601   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetLowMemoryFilter_C", PCGAMGSetLowMemoryFilter_AGG));
1602   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGMISkSetAggressive_C", PCGAMGMISkSetAggressive_AGG));
1603   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetGraphSymmetrize_C", PCGAMGSetGraphSymmetrize_AGG));
1604   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_AGG));
1605   PetscFunctionReturn(PETSC_SUCCESS);
1606 }
1607