xref: /petsc/src/ksp/pc/impls/gamg/agg.c (revision d2522c19e8fa9bca20aaca277941d9a63e71db6a)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 
5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/
6 #include <petscblaslapack.h>
7 #include <petscdm.h>
8 #include <petsc/private/kspimpl.h>
9 
10 typedef struct {
11   PetscInt  nsmooths;
12   PetscBool symmetrize_graph;
13   PetscInt  aggressive_coarsening_levels; // number of aggressive coarsening levels (square or MISk)
14 } PC_GAMG_AGG;
15 
16 /*@
17    PCGAMGSetNSmooths - Set number of smoothing steps (1 is typical)
18 
19    Logically Collective on PC
20 
21    Input Parameters:
22 .  pc - the preconditioner context
23 
24    Options Database Key:
25 .  -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation
26 
27    Level: intermediate
28 
29 .seealso: `()`
30 @*/
31 PetscErrorCode PCGAMGSetNSmooths(PC pc, PetscInt n) {
32   PetscFunctionBegin;
33   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
34   PetscValidLogicalCollectiveInt(pc, n, 2);
35   PetscTryMethod(pc, "PCGAMGSetNSmooths_C", (PC, PetscInt), (pc, n));
36   PetscFunctionReturn(0);
37 }
38 
39 static PetscErrorCode PCGAMGSetNSmooths_AGG(PC pc, PetscInt n) {
40   PC_MG       *mg          = (PC_MG *)pc->data;
41   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
42   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
43 
44   PetscFunctionBegin;
45   pc_gamg_agg->nsmooths = n;
46   PetscFunctionReturn(0);
47 }
48 
49 /*@
50    PCGAMGSetSymmetrizeGraph - Symmetrize the graph before computing the aggregation. Some algorithms require the graph be symmetric
51 
52    Logically Collective on PC
53 
54    Input Parameters:
55 +  pc - the preconditioner context
56 -  n - PETSC_TRUE or PETSC_FALSE
57 
58    Options Database Key:
59 .  -pc_gamg_symmetrize_graph <true,default=false> - symmetrize the graph before computing the aggregation
60 
61    Level: intermediate
62 
63 .seealso: `PCGAMGSetAggressiveLevels()`
64 @*/
65 PetscErrorCode PCGAMGSetSymmetrizeGraph(PC pc, PetscBool n) {
66   PetscFunctionBegin;
67   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
68   PetscValidLogicalCollectiveBool(pc, n, 2);
69   PetscTryMethod(pc, "PCGAMGSetSymmetrizeGraph_C", (PC, PetscBool), (pc, n));
70   PetscFunctionReturn(0);
71 }
72 
73 static PetscErrorCode PCGAMGSetSymmetrizeGraph_AGG(PC pc, PetscBool n) {
74   PC_MG       *mg          = (PC_MG *)pc->data;
75   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
76   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
77 
78   PetscFunctionBegin;
79   pc_gamg_agg->symmetrize_graph = n;
80   PetscFunctionReturn(0);
81 }
82 
83 /*@
84    PCGAMGSetAggressiveLevels -  Aggressive coarsening on first n levels
85 
86    Logically Collective on PC
87 
88    Input Parameters:
89 +  pc - the preconditioner context
90 -  n - 0, 1 or more
91 
92    Options Database Key:
93 .  -pc_gamg_aggressive_coarsening <n,default = 1> - Number of levels to square the graph on before aggregating it
94 
95    Level: intermediate
96 
97 .seealso: `PCGAMGSetSymmetrizeGraph()`, `PCGAMGSetThreshold()`
98 @*/
99 PetscErrorCode PCGAMGSetAggressiveLevels(PC pc, PetscInt n) {
100   PetscFunctionBegin;
101   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
102   PetscValidLogicalCollectiveInt(pc, n, 2);
103   PetscTryMethod(pc, "PCGAMGSetAggressiveLevels_C", (PC, PetscInt), (pc, n));
104   PetscFunctionReturn(0);
105 }
106 
107 static PetscErrorCode PCGAMGSetAggressiveLevels_AGG(PC pc, PetscInt n) {
108   PC_MG       *mg          = (PC_MG *)pc->data;
109   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
110   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
111 
112   PetscFunctionBegin;
113   pc_gamg_agg->aggressive_coarsening_levels = n;
114   PetscFunctionReturn(0);
115 }
116 
117 static PetscErrorCode PCSetFromOptions_GAMG_AGG(PC pc, PetscOptionItems *PetscOptionsObject) {
118   PC_MG       *mg          = (PC_MG *)pc->data;
119   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
120   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
121 
122   PetscFunctionBegin;
123   PetscOptionsHeadBegin(PetscOptionsObject, "GAMG-AGG options");
124   {
125     PetscBool flg;
126     PetscCall(PetscOptionsInt("-pc_gamg_agg_nsmooths", "smoothing steps for smoothed aggregation, usually 1", "PCGAMGSetNSmooths", pc_gamg_agg->nsmooths, &pc_gamg_agg->nsmooths, NULL));
127     PetscCall(PetscOptionsBool("-pc_gamg_symmetrize_graph", "Set for asymmetric matrices", "PCGAMGSetSymmetrizeGraph", pc_gamg_agg->symmetrize_graph, &pc_gamg_agg->symmetrize_graph, NULL));
128     pc_gamg_agg->aggressive_coarsening_levels = 1;
129     PetscCall(
130       PetscOptionsInt("-pc_gamg_square_graph", "Number of aggressive coarsening (MIS-2) levels from finest (alias for -pc_gamg_aggressive_coarsening, deprecated)", "PCGAMGSetAggressiveLevels", pc_gamg_agg->aggressive_coarsening_levels, &pc_gamg_agg->aggressive_coarsening_levels, &flg));
131     if (!flg) {
132       PetscCall(PetscOptionsInt("-pc_gamg_aggressive_coarsening", "Number of aggressive coarsening (MIS-2) levels from finest", "PCGAMGSetAggressiveLevels", pc_gamg_agg->aggressive_coarsening_levels, &pc_gamg_agg->aggressive_coarsening_levels, NULL));
133     } else {
134       PetscCall(PetscOptionsInt("-pc_gamg_aggressive_coarsening", "Number of aggressive coarsening (MIS-2) levels from finest", "PCGAMGSetAggressiveLevels", pc_gamg_agg->aggressive_coarsening_levels, &pc_gamg_agg->aggressive_coarsening_levels, &flg));
135       if (flg) PetscCall(PetscInfo(pc, "Warning: both -pc_gamg_square_graph and -pc_gamg_aggressive_coarsening are used. -pc_gamg_square_graph is deprecated, Number of aggressive levels is %d\n", (int)pc_gamg_agg->aggressive_coarsening_levels));
136     }
137   }
138   PetscOptionsHeadEnd();
139   PetscFunctionReturn(0);
140 }
141 
142 /* -------------------------------------------------------------------------- */
143 static PetscErrorCode PCDestroy_GAMG_AGG(PC pc) {
144   PC_MG   *mg      = (PC_MG *)pc->data;
145   PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx;
146 
147   PetscFunctionBegin;
148   PetscCall(PetscFree(pc_gamg->subctx));
149   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetNSmooths_C", NULL));
150   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetSymmetrizeGraph_C", NULL));
151   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetAggressiveLevels_C", NULL));
152   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
153   PetscFunctionReturn(0);
154 }
155 
156 /* -------------------------------------------------------------------------- */
157 /*
158    PCSetCoordinates_AGG
159      - collective
160 
161    Input Parameter:
162    . pc - the preconditioner context
163    . ndm - dimesion of data (used for dof/vertex for Stokes)
164    . a_nloc - number of vertices local
165    . coords - [a_nloc][ndm] - interleaved coordinate data: {x_0, y_0, z_0, x_1, y_1, ...}
166 */
167 
168 static PetscErrorCode PCSetCoordinates_AGG(PC pc, PetscInt ndm, PetscInt a_nloc, PetscReal *coords) {
169   PC_MG   *mg      = (PC_MG *)pc->data;
170   PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx;
171   PetscInt arrsz, kk, ii, jj, nloc, ndatarows, ndf;
172   Mat      mat = pc->pmat;
173 
174   PetscFunctionBegin;
175   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
176   PetscValidHeaderSpecific(mat, MAT_CLASSID, 1);
177   nloc = a_nloc;
178 
179   /* SA: null space vectors */
180   PetscCall(MatGetBlockSize(mat, &ndf));               /* this does not work for Stokes */
181   if (coords && ndf == 1) pc_gamg->data_cell_cols = 1; /* scalar w/ coords and SA (not needed) */
182   else if (coords) {
183     PetscCheck(ndm <= ndf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "degrees of motion %" PetscInt_FMT " > block size %" PetscInt_FMT, ndm, ndf);
184     pc_gamg->data_cell_cols = (ndm == 2 ? 3 : 6); /* displacement elasticity */
185     if (ndm != ndf) { PetscCheck(pc_gamg->data_cell_cols == ndf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Don't know how to create null space for ndm=%" PetscInt_FMT ", ndf=%" PetscInt_FMT ".  Use MatSetNearNullSpace().", ndm, ndf); }
186   } else pc_gamg->data_cell_cols = ndf; /* no data, force SA with constant null space vectors */
187   pc_gamg->data_cell_rows = ndatarows = ndf;
188   PetscCheck(pc_gamg->data_cell_cols > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "pc_gamg->data_cell_cols %" PetscInt_FMT " <= 0", pc_gamg->data_cell_cols);
189   arrsz = nloc * pc_gamg->data_cell_rows * pc_gamg->data_cell_cols;
190 
191   if (!pc_gamg->data || (pc_gamg->data_sz != arrsz)) {
192     PetscCall(PetscFree(pc_gamg->data));
193     PetscCall(PetscMalloc1(arrsz + 1, &pc_gamg->data));
194   }
195   /* copy data in - column oriented */
196   for (kk = 0; kk < nloc; kk++) {
197     const PetscInt M    = nloc * pc_gamg->data_cell_rows; /* stride into data */
198     PetscReal     *data = &pc_gamg->data[kk * ndatarows]; /* start of cell */
199     if (pc_gamg->data_cell_cols == 1) *data = 1.0;
200     else {
201       /* translational modes */
202       for (ii = 0; ii < ndatarows; ii++) {
203         for (jj = 0; jj < ndatarows; jj++) {
204           if (ii == jj) data[ii * M + jj] = 1.0;
205           else data[ii * M + jj] = 0.0;
206         }
207       }
208 
209       /* rotational modes */
210       if (coords) {
211         if (ndm == 2) {
212           data += 2 * M;
213           data[0] = -coords[2 * kk + 1];
214           data[1] = coords[2 * kk];
215         } else {
216           data += 3 * M;
217           data[0]         = 0.0;
218           data[M + 0]     = coords[3 * kk + 2];
219           data[2 * M + 0] = -coords[3 * kk + 1];
220           data[1]         = -coords[3 * kk + 2];
221           data[M + 1]     = 0.0;
222           data[2 * M + 1] = coords[3 * kk];
223           data[2]         = coords[3 * kk + 1];
224           data[M + 2]     = -coords[3 * kk];
225           data[2 * M + 2] = 0.0;
226         }
227       }
228     }
229   }
230   pc_gamg->data_sz = arrsz;
231   PetscFunctionReturn(0);
232 }
233 
234 /* -------------------------------------------------------------------------- */
235 /*
236    PCSetData_AGG - called if data is not set with PCSetCoordinates.
237       Looks in Mat for near null space.
238       Does not work for Stokes
239 
240   Input Parameter:
241    . pc -
242    . a_A - matrix to get (near) null space out of.
243 */
244 static PetscErrorCode PCSetData_AGG(PC pc, Mat a_A) {
245   PC_MG       *mg      = (PC_MG *)pc->data;
246   PC_GAMG     *pc_gamg = (PC_GAMG *)mg->innerctx;
247   MatNullSpace mnull;
248 
249   PetscFunctionBegin;
250   PetscCall(MatGetNearNullSpace(a_A, &mnull));
251   if (!mnull) {
252     DM dm;
253     PetscCall(PCGetDM(pc, &dm));
254     if (!dm) { PetscCall(MatGetDM(a_A, &dm)); }
255     if (dm) {
256       PetscObject deformation;
257       PetscInt    Nf;
258 
259       PetscCall(DMGetNumFields(dm, &Nf));
260       if (Nf) {
261         PetscCall(DMGetField(dm, 0, NULL, &deformation));
262         PetscCall(PetscObjectQuery((PetscObject)deformation, "nearnullspace", (PetscObject *)&mnull));
263         if (!mnull) { PetscCall(PetscObjectQuery((PetscObject)deformation, "nullspace", (PetscObject *)&mnull)); }
264       }
265     }
266   }
267 
268   if (!mnull) {
269     PetscInt bs, NN, MM;
270     PetscCall(MatGetBlockSize(a_A, &bs));
271     PetscCall(MatGetLocalSize(a_A, &MM, &NN));
272     PetscCheck(MM % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MM %" PetscInt_FMT " must be divisible by bs %" PetscInt_FMT, MM, bs);
273     PetscCall(PCSetCoordinates_AGG(pc, bs, MM / bs, NULL));
274   } else {
275     PetscReal         *nullvec;
276     PetscBool          has_const;
277     PetscInt           i, j, mlocal, nvec, bs;
278     const Vec         *vecs;
279     const PetscScalar *v;
280 
281     PetscCall(MatGetLocalSize(a_A, &mlocal, NULL));
282     PetscCall(MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs));
283     pc_gamg->data_sz = (nvec + !!has_const) * mlocal;
284     PetscCall(PetscMalloc1((nvec + !!has_const) * mlocal, &nullvec));
285     if (has_const)
286       for (i = 0; i < mlocal; i++) nullvec[i] = 1.0;
287     for (i = 0; i < nvec; i++) {
288       PetscCall(VecGetArrayRead(vecs[i], &v));
289       for (j = 0; j < mlocal; j++) nullvec[(i + !!has_const) * mlocal + j] = PetscRealPart(v[j]);
290       PetscCall(VecRestoreArrayRead(vecs[i], &v));
291     }
292     pc_gamg->data           = nullvec;
293     pc_gamg->data_cell_cols = (nvec + !!has_const);
294     PetscCall(MatGetBlockSize(a_A, &bs));
295     pc_gamg->data_cell_rows = bs;
296   }
297   PetscFunctionReturn(0);
298 }
299 
300 /* -------------------------------------------------------------------------- */
301 /*
302   formProl0 - collect null space data for each aggregate, do QR, put R in coarse grid data and Q in P_0
303 
304   Input Parameter:
305    . agg_llists - list of arrays with aggregates -- list from selected vertices of aggregate unselected vertices
306    . bs - row block size
307    . nSAvec - column bs of new P
308    . my0crs - global index of start of locals
309    . data_stride - bs*(nloc nodes + ghost nodes) [data_stride][nSAvec]
310    . data_in[data_stride*nSAvec] - local data on fine grid
311    . flid_fgid[data_stride/bs] - make local to global IDs, includes ghosts in 'locals_llist'
312 
313   Output Parameter:
314    . a_data_out - in with fine grid data (w/ghosts), out with coarse grid data
315    . a_Prol - prolongation operator
316 
317 */
318 static PetscErrorCode formProl0(PetscCoarsenData *agg_llists, PetscInt bs, PetscInt nSAvec, PetscInt my0crs, PetscInt data_stride, PetscReal data_in[], const PetscInt flid_fgid[], PetscReal **a_data_out, Mat a_Prol) {
319   PetscInt        Istart, my0, Iend, nloc, clid, flid = 0, aggID, kk, jj, ii, mm, nSelected, minsz, nghosts, out_data_stride;
320   MPI_Comm        comm;
321   PetscReal      *out_data;
322   PetscCDIntNd   *pos;
323   PCGAMGHashTable fgid_flid;
324 
325   PetscFunctionBegin;
326   PetscCall(PetscObjectGetComm((PetscObject)a_Prol, &comm));
327   PetscCall(MatGetOwnershipRange(a_Prol, &Istart, &Iend));
328   nloc = (Iend - Istart) / bs;
329   my0  = Istart / bs;
330   PetscCheck((Iend - Istart) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Iend %" PetscInt_FMT " - Istart %" PetscInt_FMT " must be divisible by bs %" PetscInt_FMT, Iend, Istart, bs);
331   Iend /= bs;
332   nghosts = data_stride / bs - nloc;
333 
334   PetscCall(PCGAMGHashTableCreate(2 * nghosts + 1, &fgid_flid));
335   for (kk = 0; kk < nghosts; kk++) { PetscCall(PCGAMGHashTableAdd(&fgid_flid, flid_fgid[nloc + kk], nloc + kk)); }
336 
337   /* count selected -- same as number of cols of P */
338   for (nSelected = mm = 0; mm < nloc; mm++) {
339     PetscBool ise;
340     PetscCall(PetscCDEmptyAt(agg_llists, mm, &ise));
341     if (!ise) nSelected++;
342   }
343   PetscCall(MatGetOwnershipRangeColumn(a_Prol, &ii, &jj));
344   PetscCheck((ii / nSAvec) == my0crs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "ii %" PetscInt_FMT " /nSAvec %" PetscInt_FMT "  != my0crs %" PetscInt_FMT, ii, nSAvec, my0crs);
345   PetscCheck(nSelected == (jj - ii) / nSAvec, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nSelected %" PetscInt_FMT " != (jj %" PetscInt_FMT " - ii %" PetscInt_FMT ")/nSAvec %" PetscInt_FMT, nSelected, jj, ii, nSAvec);
346 
347   /* aloc space for coarse point data (output) */
348   out_data_stride = nSelected * nSAvec;
349 
350   PetscCall(PetscMalloc1(out_data_stride * nSAvec, &out_data));
351   for (ii = 0; ii < out_data_stride * nSAvec; ii++) out_data[ii] = PETSC_MAX_REAL;
352   *a_data_out = out_data; /* output - stride nSelected*nSAvec */
353 
354   /* find points and set prolongation */
355   minsz = 100;
356   for (mm = clid = 0; mm < nloc; mm++) {
357     PetscCall(PetscCDSizeAt(agg_llists, mm, &jj));
358     if (jj > 0) {
359       const PetscInt lid = mm, cgid = my0crs + clid;
360       PetscInt       cids[100]; /* max bs */
361       PetscBLASInt   asz = jj, M = asz * bs, N = nSAvec, INFO;
362       PetscBLASInt   Mdata = M + ((N - M > 0) ? N - M : 0), LDA = Mdata, LWORK = N * bs;
363       PetscScalar   *qqc, *qqr, *TAU, *WORK;
364       PetscInt      *fids;
365       PetscReal     *data;
366 
367       /* count agg */
368       if (asz < minsz) minsz = asz;
369 
370       /* get block */
371       PetscCall(PetscMalloc5(Mdata * N, &qqc, M * N, &qqr, N, &TAU, LWORK, &WORK, M, &fids));
372 
373       aggID = 0;
374       PetscCall(PetscCDGetHeadPos(agg_llists, lid, &pos));
375       while (pos) {
376         PetscInt gid1;
377         PetscCall(PetscCDIntNdGetID(pos, &gid1));
378         PetscCall(PetscCDGetNextPos(agg_llists, lid, &pos));
379 
380         if (gid1 >= my0 && gid1 < Iend) flid = gid1 - my0;
381         else {
382           PetscCall(PCGAMGHashTableFind(&fgid_flid, gid1, &flid));
383           PetscCheck(flid >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cannot find gid1 in table");
384         }
385         /* copy in B_i matrix - column oriented */
386         data = &data_in[flid * bs];
387         for (ii = 0; ii < bs; ii++) {
388           for (jj = 0; jj < N; jj++) {
389             PetscReal d                       = data[jj * data_stride + ii];
390             qqc[jj * Mdata + aggID * bs + ii] = d;
391           }
392         }
393         /* set fine IDs */
394         for (kk = 0; kk < bs; kk++) fids[aggID * bs + kk] = flid_fgid[flid] * bs + kk;
395         aggID++;
396       }
397 
398       /* pad with zeros */
399       for (ii = asz * bs; ii < Mdata; ii++) {
400         for (jj = 0; jj < N; jj++, kk++) { qqc[jj * Mdata + ii] = .0; }
401       }
402 
403       /* QR */
404       PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF));
405       PetscCallBLAS("LAPACKgeqrf", LAPACKgeqrf_(&Mdata, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO));
406       PetscCall(PetscFPTrapPop());
407       PetscCheck(INFO == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "xGEQRF error");
408       /* get R - column oriented - output B_{i+1} */
409       {
410         PetscReal *data = &out_data[clid * nSAvec];
411         for (jj = 0; jj < nSAvec; jj++) {
412           for (ii = 0; ii < nSAvec; ii++) {
413             PetscCheck(data[jj * out_data_stride + ii] == PETSC_MAX_REAL, PETSC_COMM_SELF, PETSC_ERR_PLIB, "data[jj*out_data_stride + ii] != %e", (double)PETSC_MAX_REAL);
414             if (ii <= jj) data[jj * out_data_stride + ii] = PetscRealPart(qqc[jj * Mdata + ii]);
415             else data[jj * out_data_stride + ii] = 0.;
416           }
417         }
418       }
419 
420       /* get Q - row oriented */
421       PetscCallBLAS("LAPACKorgqr", LAPACKorgqr_(&Mdata, &N, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO));
422       PetscCheck(INFO == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "xORGQR error arg %" PetscBLASInt_FMT, -INFO);
423 
424       for (ii = 0; ii < M; ii++) {
425         for (jj = 0; jj < N; jj++) { qqr[N * ii + jj] = qqc[jj * Mdata + ii]; }
426       }
427 
428       /* add diagonal block of P0 */
429       for (kk = 0; kk < N; kk++) { cids[kk] = N * cgid + kk; /* global col IDs in P0 */ }
430       PetscCall(MatSetValues(a_Prol, M, fids, N, cids, qqr, INSERT_VALUES));
431       PetscCall(PetscFree5(qqc, qqr, TAU, WORK, fids));
432       clid++;
433     } /* coarse agg */
434   }   /* for all fine nodes */
435   PetscCall(MatAssemblyBegin(a_Prol, MAT_FINAL_ASSEMBLY));
436   PetscCall(MatAssemblyEnd(a_Prol, MAT_FINAL_ASSEMBLY));
437   PetscCall(PCGAMGHashTableDestroy(&fgid_flid));
438   PetscFunctionReturn(0);
439 }
440 
441 static PetscErrorCode PCView_GAMG_AGG(PC pc, PetscViewer viewer) {
442   PC_MG       *mg          = (PC_MG *)pc->data;
443   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
444   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
445 
446   PetscFunctionBegin;
447   PetscCall(PetscViewerASCIIPrintf(viewer, "      AGG specific options\n"));
448   PetscCall(PetscViewerASCIIPrintf(viewer, "        Symmetric graph %s\n", pc_gamg_agg->symmetrize_graph ? "true" : "false"));
449   PetscCall(PetscViewerASCIIPrintf(viewer, "        Number of levels to square graph %d\n", (int)pc_gamg_agg->aggressive_coarsening_levels));
450   PetscCall(PetscViewerASCIIPrintf(viewer, "        Number smoothing steps %d\n", (int)pc_gamg_agg->nsmooths));
451   PetscFunctionReturn(0);
452 }
453 
454 /* -------------------------------------------------------------------------- */
455 /*
456    PCGAMGGraph_AGG
457 
458   Input Parameter:
459    . pc - this
460    . Amat - matrix on this fine level
461 
462   Output Parameter:
463    . a_Gmat -
464 */
465 static PetscErrorCode PCGAMGGraph_AGG(PC pc, Mat Amat, Mat *a_Gmat) {
466   PC_MG                    *mg          = (PC_MG *)pc->data;
467   PC_GAMG                  *pc_gamg     = (PC_GAMG *)mg->innerctx;
468   const PetscReal           vfilter     = pc_gamg->threshold[pc_gamg->current_level];
469   PC_GAMG_AGG              *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
470   Mat                       Gmat, F = NULL;
471   MPI_Comm                  comm;
472   PetscBool /* set,flg , */ symm;
473 
474   PetscFunctionBegin;
475   PetscCall(PetscObjectGetComm((PetscObject)Amat, &comm));
476 
477   /* PetscCall(MatIsSymmetricKnown(Amat, &set, &flg)); || !(set && flg) -- this causes lot of symm calls */
478   symm = (PetscBool)(pc_gamg_agg->symmetrize_graph); /* && !pc_gamg_agg->square_graph; */
479 
480   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_GRAPH], 0, 0, 0, 0));
481   PetscCall(MatCreateGraph(Amat, symm, PETSC_TRUE, &Gmat));
482   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_GRAPH], 0, 0, 0, 0));
483 
484   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_FILTER], 0, 0, 0, 0));
485   PetscCall(MatFilter(Gmat, vfilter, &F));
486   if (F) {
487     PetscCall(MatDestroy(&Gmat));
488     Gmat = F;
489   }
490   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_FILTER], 0, 0, 0, 0));
491 
492   *a_Gmat = Gmat;
493 
494   PetscFunctionReturn(0);
495 }
496 
497 /* -------------------------------------------------------------------------- */
498 /*
499    PCGAMGCoarsen_AGG - supports squaring the graph (deprecated) and new graph for
500      communication of QR data used with HEM and MISk coarsening
501 
502   Input Parameter:
503    . a_pc - this
504 
505   Input/Output Parameter:
506    . a_Gmat1 - graph to coarsen (in), graph off processor edges for QR gather scatter (out)
507 
508   Output Parameter:
509    . agg_lists - list of aggregates
510 
511 */
512 static PetscErrorCode PCGAMGCoarsen_AGG(PC a_pc, Mat *a_Gmat1, PetscCoarsenData **agg_lists) {
513   PC_MG       *mg          = (PC_MG *)a_pc->data;
514   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
515   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
516   Mat          mat, Gmat1 = *a_Gmat1; /* aggressive graph */
517   IS           perm;
518   PetscInt     Istart, Iend, Ii, nloc, bs, nn;
519   PetscInt    *permute, *degree;
520   PetscBool   *bIndexSet;
521   MatCoarsen   crs;
522   MPI_Comm     comm;
523   PetscReal    hashfact;
524   PetscInt     iSwapIndex;
525   PetscRandom  random;
526 
527   PetscFunctionBegin;
528   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_COARSEN], 0, 0, 0, 0));
529   PetscCall(PetscObjectGetComm((PetscObject)Gmat1, &comm));
530   PetscCall(MatGetLocalSize(Gmat1, &nn, NULL));
531   PetscCall(MatGetBlockSize(Gmat1, &bs));
532   PetscCheck(bs == 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "bs %" PetscInt_FMT " must be 1", bs);
533   nloc = nn / bs;
534 
535   /* get MIS aggs - randomize */
536   PetscCall(PetscMalloc2(nloc, &permute, nloc, &degree));
537   PetscCall(PetscCalloc1(nloc, &bIndexSet));
538   for (Ii = 0; Ii < nloc; Ii++) permute[Ii] = Ii;
539   PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &random));
540   PetscCall(MatGetOwnershipRange(Gmat1, &Istart, &Iend));
541   for (Ii = 0; Ii < nloc; Ii++) {
542     PetscInt nc;
543     PetscCall(MatGetRow(Gmat1, Istart + Ii, &nc, NULL, NULL));
544     degree[Ii] = nc;
545     PetscCall(MatRestoreRow(Gmat1, Istart + Ii, &nc, NULL, NULL));
546   }
547   for (Ii = 0; Ii < nloc; Ii++) {
548     PetscCall(PetscRandomGetValueReal(random, &hashfact));
549     iSwapIndex = (PetscInt)(hashfact * nloc) % nloc;
550     if (!bIndexSet[iSwapIndex] && iSwapIndex != Ii) {
551       PetscInt iTemp        = permute[iSwapIndex];
552       permute[iSwapIndex]   = permute[Ii];
553       permute[Ii]           = iTemp;
554       iTemp                 = degree[iSwapIndex];
555       degree[iSwapIndex]    = degree[Ii];
556       degree[Ii]            = iTemp;
557       bIndexSet[iSwapIndex] = PETSC_TRUE;
558     }
559   }
560   // create minimum degree ordering
561   PetscCall(PetscSortIntWithArray(nloc, degree, permute));
562 
563   PetscCall(PetscFree(bIndexSet));
564   PetscCall(PetscRandomDestroy(&random));
565   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nloc, permute, PETSC_USE_POINTER, &perm));
566   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_MIS], 0, 0, 0, 0));
567   PetscCall(MatCoarsenCreate(comm, &crs));
568   PetscCall(MatCoarsenSetFromOptions(crs));
569   PetscCall(MatCoarsenSetGreedyOrdering(crs, perm));
570   PetscCall(MatCoarsenSetAdjacency(crs, Gmat1));
571   PetscCall(MatCoarsenSetStrictAggs(crs, PETSC_TRUE));
572   if (pc_gamg->current_level < pc_gamg_agg->aggressive_coarsening_levels) PetscCall(MatCoarsenMISKSetDistance(crs, 2)); // hardwire to MIS-2
573   else PetscCall(MatCoarsenMISKSetDistance(crs, 1));                                                                    // MIS
574   PetscCall(MatCoarsenApply(crs));
575   PetscCall(MatCoarsenViewFromOptions(crs, NULL, "-mat_coarsen_view"));
576   PetscCall(MatCoarsenGetData(crs, agg_lists)); /* output */
577   PetscCall(MatCoarsenDestroy(&crs));
578 
579   PetscCall(ISDestroy(&perm));
580   PetscCall(PetscFree2(permute, degree));
581   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_MIS], 0, 0, 0, 0));
582 
583   {
584     PetscCoarsenData *llist = *agg_lists;
585     /* see if we have a matrix that takes precedence (returned from MatCoarsenApply) */
586     PetscCall(PetscCDGetMat(llist, &mat));
587     if (mat) {
588       PetscCall(MatDestroy(&Gmat1));
589       *a_Gmat1 = mat; /* output */
590     }
591   }
592   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_COARSEN], 0, 0, 0, 0));
593   PetscFunctionReturn(0);
594 }
595 
596 /* -------------------------------------------------------------------------- */
597 /*
598  PCGAMGProlongator_AGG
599 
600  Input Parameter:
601  . pc - this
602  . Amat - matrix on this fine level
603  . Graph - used to get ghost data for nodes in
604  . agg_lists - list of aggregates
605  Output Parameter:
606  . a_P_out - prolongation operator to the next level
607  */
608 static PetscErrorCode PCGAMGProlongator_AGG(PC pc, Mat Amat, Mat Gmat, PetscCoarsenData *agg_lists, Mat *a_P_out) {
609   PC_MG         *mg      = (PC_MG *)pc->data;
610   PC_GAMG       *pc_gamg = (PC_GAMG *)mg->innerctx;
611   const PetscInt col_bs  = pc_gamg->data_cell_cols;
612   PetscInt       Istart, Iend, nloc, ii, jj, kk, my0, nLocalSelected, bs;
613   Mat            Prol;
614   PetscMPIInt    size;
615   MPI_Comm       comm;
616   PetscReal     *data_w_ghost;
617   PetscInt       myCrs0, nbnodes = 0, *flid_fgid;
618   MatType        mtype;
619 
620   PetscFunctionBegin;
621   PetscCall(PetscObjectGetComm((PetscObject)Amat, &comm));
622   PetscCheck(col_bs >= 1, comm, PETSC_ERR_PLIB, "Column bs cannot be less than 1");
623   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROL], 0, 0, 0, 0));
624   PetscCallMPI(MPI_Comm_size(comm, &size));
625   PetscCall(MatGetOwnershipRange(Amat, &Istart, &Iend));
626   PetscCall(MatGetBlockSize(Amat, &bs));
627   nloc = (Iend - Istart) / bs;
628   my0  = Istart / bs;
629   PetscCheck((Iend - Istart) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "(Iend %" PetscInt_FMT " - Istart %" PetscInt_FMT ") not divisible by bs %" PetscInt_FMT, Iend, Istart, bs);
630 
631   /* get 'nLocalSelected' */
632   for (ii = 0, nLocalSelected = 0; ii < nloc; ii++) {
633     PetscBool ise;
634     /* filter out singletons 0 or 1? */
635     PetscCall(PetscCDEmptyAt(agg_lists, ii, &ise));
636     if (!ise) nLocalSelected++;
637   }
638 
639   /* create prolongator, create P matrix */
640   PetscCall(MatGetType(Amat, &mtype));
641   PetscCall(MatCreate(comm, &Prol));
642   PetscCall(MatSetSizes(Prol, nloc * bs, nLocalSelected * col_bs, PETSC_DETERMINE, PETSC_DETERMINE));
643   PetscCall(MatSetBlockSizes(Prol, bs, col_bs));
644   PetscCall(MatSetType(Prol, mtype));
645   PetscCall(MatSeqAIJSetPreallocation(Prol, col_bs, NULL));
646   PetscCall(MatMPIAIJSetPreallocation(Prol, col_bs, NULL, col_bs, NULL));
647 
648   /* can get all points "removed" */
649   PetscCall(MatGetSize(Prol, &kk, &ii));
650   if (!ii) {
651     PetscCall(PetscInfo(pc, "%s: No selected points on coarse grid\n", ((PetscObject)pc)->prefix));
652     PetscCall(MatDestroy(&Prol));
653     *a_P_out = NULL; /* out */
654     PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROL], 0, 0, 0, 0));
655     PetscFunctionReturn(0);
656   }
657   PetscCall(PetscInfo(pc, "%s: New grid %" PetscInt_FMT " nodes\n", ((PetscObject)pc)->prefix, ii / col_bs));
658   PetscCall(MatGetOwnershipRangeColumn(Prol, &myCrs0, &kk));
659 
660   PetscCheck((kk - myCrs0) % col_bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "(kk %" PetscInt_FMT " -myCrs0 %" PetscInt_FMT ") not divisible by col_bs %" PetscInt_FMT, kk, myCrs0, col_bs);
661   myCrs0 = myCrs0 / col_bs;
662   PetscCheck((kk / col_bs - myCrs0) == nLocalSelected, PETSC_COMM_SELF, PETSC_ERR_PLIB, "(kk %" PetscInt_FMT "/col_bs %" PetscInt_FMT " - myCrs0 %" PetscInt_FMT ") != nLocalSelected %" PetscInt_FMT ")", kk, col_bs, myCrs0, nLocalSelected);
663 
664   /* create global vector of data in 'data_w_ghost' */
665   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROLA], 0, 0, 0, 0));
666   if (size > 1) { /* get ghost null space data */
667     PetscReal *tmp_gdata, *tmp_ldata, *tp2;
668     PetscCall(PetscMalloc1(nloc, &tmp_ldata));
669     for (jj = 0; jj < col_bs; jj++) {
670       for (kk = 0; kk < bs; kk++) {
671         PetscInt         ii, stride;
672         const PetscReal *tp = pc_gamg->data + jj * bs * nloc + kk;
673         for (ii = 0; ii < nloc; ii++, tp += bs) tmp_ldata[ii] = *tp;
674 
675         PetscCall(PCGAMGGetDataWithGhosts(Gmat, 1, tmp_ldata, &stride, &tmp_gdata));
676 
677         if (!jj && !kk) { /* now I know how many total nodes - allocate TODO: move below and do in one 'col_bs' call */
678           PetscCall(PetscMalloc1(stride * bs * col_bs, &data_w_ghost));
679           nbnodes = bs * stride;
680         }
681         tp2 = data_w_ghost + jj * bs * stride + kk;
682         for (ii = 0; ii < stride; ii++, tp2 += bs) *tp2 = tmp_gdata[ii];
683         PetscCall(PetscFree(tmp_gdata));
684       }
685     }
686     PetscCall(PetscFree(tmp_ldata));
687   } else {
688     nbnodes      = bs * nloc;
689     data_w_ghost = (PetscReal *)pc_gamg->data;
690   }
691 
692   /* get 'flid_fgid' TODO - move up to get 'stride' and do get null space data above in one step (jj loop) */
693   if (size > 1) {
694     PetscReal *fid_glid_loc, *fiddata;
695     PetscInt   stride;
696 
697     PetscCall(PetscMalloc1(nloc, &fid_glid_loc));
698     for (kk = 0; kk < nloc; kk++) fid_glid_loc[kk] = (PetscReal)(my0 + kk);
699     PetscCall(PCGAMGGetDataWithGhosts(Gmat, 1, fid_glid_loc, &stride, &fiddata));
700     PetscCall(PetscMalloc1(stride, &flid_fgid)); /* copy real data to in */
701     for (kk = 0; kk < stride; kk++) flid_fgid[kk] = (PetscInt)fiddata[kk];
702     PetscCall(PetscFree(fiddata));
703 
704     PetscCheck(stride == nbnodes / bs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "stride %" PetscInt_FMT " != nbnodes %" PetscInt_FMT "/bs %" PetscInt_FMT, stride, nbnodes, bs);
705     PetscCall(PetscFree(fid_glid_loc));
706   } else {
707     PetscCall(PetscMalloc1(nloc, &flid_fgid));
708     for (kk = 0; kk < nloc; kk++) flid_fgid[kk] = my0 + kk;
709   }
710   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROLA], 0, 0, 0, 0));
711   /* get P0 */
712   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROLB], 0, 0, 0, 0));
713   {
714     PetscReal *data_out = NULL;
715     PetscCall(formProl0(agg_lists, bs, col_bs, myCrs0, nbnodes, data_w_ghost, flid_fgid, &data_out, Prol));
716     PetscCall(PetscFree(pc_gamg->data));
717 
718     pc_gamg->data           = data_out;
719     pc_gamg->data_cell_rows = col_bs;
720     pc_gamg->data_sz        = col_bs * col_bs * nLocalSelected;
721   }
722   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROLB], 0, 0, 0, 0));
723   if (size > 1) { PetscCall(PetscFree(data_w_ghost)); }
724   PetscCall(PetscFree(flid_fgid));
725 
726   *a_P_out = Prol; /* out */
727 
728   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROL], 0, 0, 0, 0));
729   PetscFunctionReturn(0);
730 }
731 
732 /* -------------------------------------------------------------------------- */
733 /*
734    PCGAMGOptProlongator_AGG
735 
736   Input Parameter:
737    . pc - this
738    . Amat - matrix on this fine level
739  In/Output Parameter:
740    . a_P - prolongation operator to the next level
741 */
742 static PetscErrorCode PCGAMGOptProlongator_AGG(PC pc, Mat Amat, Mat *a_P) {
743   PC_MG       *mg          = (PC_MG *)pc->data;
744   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
745   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
746   PetscInt     jj;
747   Mat          Prol = *a_P;
748   MPI_Comm     comm;
749   KSP          eksp;
750   Vec          bb, xx;
751   PC           epc;
752   PetscReal    alpha, emax, emin;
753 
754   PetscFunctionBegin;
755   PetscCall(PetscObjectGetComm((PetscObject)Amat, &comm));
756   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_OPT], 0, 0, 0, 0));
757 
758   /* compute maximum singular value of operator to be used in smoother */
759   if (0 < pc_gamg_agg->nsmooths) {
760     /* get eigen estimates */
761     if (pc_gamg->emax > 0) {
762       emin = pc_gamg->emin;
763       emax = pc_gamg->emax;
764     } else {
765       const char *prefix;
766 
767       PetscCall(MatCreateVecs(Amat, &bb, NULL));
768       PetscCall(MatCreateVecs(Amat, &xx, NULL));
769       PetscCall(KSPSetNoisy_Private(bb));
770 
771       PetscCall(KSPCreate(comm, &eksp));
772       PetscCall(PCGetOptionsPrefix(pc, &prefix));
773       PetscCall(KSPSetOptionsPrefix(eksp, prefix));
774       PetscCall(KSPAppendOptionsPrefix(eksp, "pc_gamg_esteig_"));
775       {
776         PetscBool isset, sflg;
777         PetscCall(MatIsSPDKnown(Amat, &isset, &sflg));
778         if (isset && sflg) PetscCall(KSPSetType(eksp, KSPCG));
779       }
780       PetscCall(KSPSetErrorIfNotConverged(eksp, pc->erroriffailure));
781       PetscCall(KSPSetNormType(eksp, KSP_NORM_NONE));
782 
783       PetscCall(KSPSetInitialGuessNonzero(eksp, PETSC_FALSE));
784       PetscCall(KSPSetOperators(eksp, Amat, Amat));
785 
786       PetscCall(KSPGetPC(eksp, &epc));
787       PetscCall(PCSetType(epc, PCJACOBI)); /* smoother in smoothed agg. */
788 
789       PetscCall(KSPSetTolerances(eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10)); // 10 is safer, but 5 is often fine, can override with -pc_gamg_esteig_ksp_max_it -mg_levels_ksp_chebyshev_esteig 0,0.25,0,1.2
790 
791       PetscCall(KSPSetFromOptions(eksp));
792       PetscCall(KSPSetComputeSingularValues(eksp, PETSC_TRUE));
793       PetscCall(KSPSolve(eksp, bb, xx));
794       PetscCall(KSPCheckSolve(eksp, pc, xx));
795 
796       PetscCall(KSPComputeExtremeSingularValues(eksp, &emax, &emin));
797       PetscCall(PetscInfo(pc, "%s: Smooth P0: max eigen=%e min=%e PC=%s\n", ((PetscObject)pc)->prefix, (double)emax, (double)emin, PCJACOBI));
798       PetscCall(VecDestroy(&xx));
799       PetscCall(VecDestroy(&bb));
800       PetscCall(KSPDestroy(&eksp));
801     }
802     if (pc_gamg->use_sa_esteig) {
803       mg->min_eigen_DinvA[pc_gamg->current_level] = emin;
804       mg->max_eigen_DinvA[pc_gamg->current_level] = emax;
805       PetscCall(PetscInfo(pc, "%s: Smooth P0: level %" PetscInt_FMT ", cache spectra %g %g\n", ((PetscObject)pc)->prefix, pc_gamg->current_level, (double)emin, (double)emax));
806     } else {
807       mg->min_eigen_DinvA[pc_gamg->current_level] = 0;
808       mg->max_eigen_DinvA[pc_gamg->current_level] = 0;
809     }
810   } else {
811     mg->min_eigen_DinvA[pc_gamg->current_level] = 0;
812     mg->max_eigen_DinvA[pc_gamg->current_level] = 0;
813   }
814 
815   /* smooth P0 */
816   for (jj = 0; jj < pc_gamg_agg->nsmooths; jj++) {
817     Mat tMat;
818     Vec diag;
819 
820     PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_OPTSM], 0, 0, 0, 0));
821 
822     /* smooth P1 := (I - omega/lam D^{-1}A)P0 */
823     PetscCall(PetscLogEventBegin(petsc_gamg_setup_matmat_events[pc_gamg->current_level][2], 0, 0, 0, 0));
824     PetscCall(MatMatMult(Amat, Prol, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &tMat));
825     PetscCall(PetscLogEventEnd(petsc_gamg_setup_matmat_events[pc_gamg->current_level][2], 0, 0, 0, 0));
826     PetscCall(MatProductClear(tMat));
827     PetscCall(MatCreateVecs(Amat, &diag, NULL));
828     PetscCall(MatGetDiagonal(Amat, diag)); /* effectively PCJACOBI */
829     PetscCall(VecReciprocal(diag));
830     PetscCall(MatDiagonalScale(tMat, diag, NULL));
831     PetscCall(VecDestroy(&diag));
832 
833     /* TODO: Set a PCFailedReason and exit the building of the AMG preconditioner */
834     PetscCheck(emax != 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Computed maximum singular value as zero");
835     /* TODO: Document the 1.4 and don't hardwire it in this routine */
836     alpha = -1.4 / emax;
837 
838     PetscCall(MatAYPX(tMat, alpha, Prol, SUBSET_NONZERO_PATTERN));
839     PetscCall(MatDestroy(&Prol));
840     Prol = tMat;
841     PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_OPTSM], 0, 0, 0, 0));
842   }
843   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_OPT], 0, 0, 0, 0));
844   *a_P = Prol;
845   PetscFunctionReturn(0);
846 }
847 
848 /* -------------------------------------------------------------------------- */
849 /*
850    PCCreateGAMG_AGG
851 
852   Input Parameter:
853    . pc -
854 */
855 PetscErrorCode PCCreateGAMG_AGG(PC pc) {
856   PC_MG       *mg      = (PC_MG *)pc->data;
857   PC_GAMG     *pc_gamg = (PC_GAMG *)mg->innerctx;
858   PC_GAMG_AGG *pc_gamg_agg;
859 
860   PetscFunctionBegin;
861   /* create sub context for SA */
862   PetscCall(PetscNewLog(pc, &pc_gamg_agg));
863   pc_gamg->subctx = pc_gamg_agg;
864 
865   pc_gamg->ops->setfromoptions = PCSetFromOptions_GAMG_AGG;
866   pc_gamg->ops->destroy        = PCDestroy_GAMG_AGG;
867   /* reset does not do anything; setup not virtual */
868 
869   /* set internal function pointers */
870   pc_gamg->ops->graph             = PCGAMGGraph_AGG;
871   pc_gamg->ops->coarsen           = PCGAMGCoarsen_AGG;
872   pc_gamg->ops->prolongator       = PCGAMGProlongator_AGG;
873   pc_gamg->ops->optprolongator    = PCGAMGOptProlongator_AGG;
874   pc_gamg->ops->createdefaultdata = PCSetData_AGG;
875   pc_gamg->ops->view              = PCView_GAMG_AGG;
876 
877   pc_gamg_agg->aggressive_coarsening_levels = 0;
878   pc_gamg_agg->symmetrize_graph             = PETSC_FALSE;
879   pc_gamg_agg->nsmooths                     = 1;
880 
881   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetNSmooths_C", PCGAMGSetNSmooths_AGG));
882   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetSymmetrizeGraph_C", PCGAMGSetSymmetrizeGraph_AGG));
883   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetAggressiveLevels_C", PCGAMGSetAggressiveLevels_AGG));
884   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_AGG));
885   PetscFunctionReturn(0);
886 }
887