xref: /petsc/src/ksp/pc/impls/gamg/agg.c (revision fdf6c4e30aafdbc795e4f76379caa977fd5cdf5a)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 
5 #include <../src/ksp/pc/impls/gamg/gamg.h>        /*I "petscpc.h" I*/
6 #include <petscblaslapack.h>
7 #include <petscdm.h>
8 #include <petsc/private/kspimpl.h>
9 
10 typedef struct {
11   PetscInt  nsmooths;
12   PetscBool symmetrize_graph;
13   PetscInt  aggressive_coarsening_levels; // number of aggressive coarsening levels (square or MISk)
14 } PC_GAMG_AGG;
15 
16 /*@
17    PCGAMGSetNSmooths - Set number of smoothing steps (1 is typical)
18 
19    Logically Collective on PC
20 
21    Input Parameters:
22 .  pc - the preconditioner context
23 
24    Options Database Key:
25 .  -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation
26 
27    Level: intermediate
28 
29 .seealso: `()`
30 @*/
31 PetscErrorCode PCGAMGSetNSmooths(PC pc, PetscInt n)
32 {
33   PetscFunctionBegin;
34   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
35   PetscValidLogicalCollectiveInt(pc,n,2);
36   PetscTryMethod(pc,"PCGAMGSetNSmooths_C",(PC,PetscInt),(pc,n));
37   PetscFunctionReturn(0);
38 }
39 
40 static PetscErrorCode PCGAMGSetNSmooths_AGG(PC pc, PetscInt n)
41 {
42   PC_MG       *mg          = (PC_MG*)pc->data;
43   PC_GAMG     *pc_gamg     = (PC_GAMG*)mg->innerctx;
44   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
45 
46   PetscFunctionBegin;
47   pc_gamg_agg->nsmooths = n;
48   PetscFunctionReturn(0);
49 }
50 
51 /*@
52    PCGAMGSetSymmetrizeGraph - Symmetrize the graph before computing the aggregation. Some algorithms require the graph be symmetric
53 
54    Logically Collective on PC
55 
56    Input Parameters:
57 +  pc - the preconditioner context
58 -  n - PETSC_TRUE or PETSC_FALSE
59 
60    Options Database Key:
61 .  -pc_gamg_symmetrize_graph <true,default=false> - symmetrize the graph before computing the aggregation
62 
63    Level: intermediate
64 
65 .seealso: `PCGAMGSetAggressiveLevels()`
66 @*/
67 PetscErrorCode PCGAMGSetSymmetrizeGraph(PC pc, PetscBool n)
68 {
69   PetscFunctionBegin;
70   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
71   PetscValidLogicalCollectiveBool(pc,n,2);
72   PetscTryMethod(pc,"PCGAMGSetSymmetrizeGraph_C",(PC,PetscBool),(pc,n));
73   PetscFunctionReturn(0);
74 }
75 
76 static PetscErrorCode PCGAMGSetSymmetrizeGraph_AGG(PC pc, PetscBool n)
77 {
78   PC_MG       *mg          = (PC_MG*)pc->data;
79   PC_GAMG     *pc_gamg     = (PC_GAMG*)mg->innerctx;
80   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
81 
82   PetscFunctionBegin;
83   pc_gamg_agg->symmetrize_graph = n;
84   PetscFunctionReturn(0);
85 }
86 
87 /*@
88    PCGAMGSetAggressiveLevels -  Aggressive coarsening on first n levels
89 
90    Logically Collective on PC
91 
92    Input Parameters:
93 +  pc - the preconditioner context
94 -  n - 0, 1 or more
95 
96    Options Database Key:
97 .  -pc_gamg_aggressive_coarsening <n,default = 1> - Number of levels to square the graph on before aggregating it
98 
99    Level: intermediate
100 
101 .seealso: `PCGAMGSetSymmetrizeGraph()`, `PCGAMGSetThreshold()`
102 @*/
103 PetscErrorCode PCGAMGSetAggressiveLevels(PC pc, PetscInt n)
104 {
105   PetscFunctionBegin;
106   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
107   PetscValidLogicalCollectiveInt(pc,n,2);
108   PetscTryMethod(pc,"PCGAMGSetAggressiveLevels_C",(PC,PetscInt),(pc,n));
109   PetscFunctionReturn(0);
110 }
111 
112 static PetscErrorCode PCGAMGSetAggressiveLevels_AGG(PC pc, PetscInt n)
113 {
114   PC_MG       *mg          = (PC_MG*)pc->data;
115   PC_GAMG     *pc_gamg     = (PC_GAMG*)mg->innerctx;
116   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
117 
118   PetscFunctionBegin;
119   pc_gamg_agg->aggressive_coarsening_levels = n;
120   PetscFunctionReturn(0);
121 }
122 
123 static PetscErrorCode PCSetFromOptions_GAMG_AGG(PetscOptionItems *PetscOptionsObject,PC pc)
124 {
125   PC_MG          *mg          = (PC_MG*)pc->data;
126   PC_GAMG        *pc_gamg     = (PC_GAMG*)mg->innerctx;
127   PC_GAMG_AGG    *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
128 
129   PetscFunctionBegin;
130   PetscOptionsHeadBegin(PetscOptionsObject,"GAMG-AGG options");
131   {
132     PetscBool flg;
133     PetscCall(PetscOptionsInt("-pc_gamg_agg_nsmooths","smoothing steps for smoothed aggregation, usually 1","PCGAMGSetNSmooths",pc_gamg_agg->nsmooths,&pc_gamg_agg->nsmooths,NULL));
134     PetscCall(PetscOptionsBool("-pc_gamg_symmetrize_graph","Set for asymmetric matrices","PCGAMGSetSymmetrizeGraph",pc_gamg_agg->symmetrize_graph,&pc_gamg_agg->symmetrize_graph,NULL));
135     pc_gamg_agg->aggressive_coarsening_levels = 1;
136     PetscCall(PetscOptionsInt("-pc_gamg_square_graph","Number of aggressive coarsening (MIS-2) levels from finest (alias for -pc_gamg_aggressive_coarsening, deprecated)","PCGAMGSetAggressiveLevels",pc_gamg_agg->aggressive_coarsening_levels,&pc_gamg_agg->aggressive_coarsening_levels,&flg));
137     if (!flg) {
138       PetscCall(PetscOptionsInt("-pc_gamg_aggressive_coarsening","Number of aggressive coarsening (MIS-2) levels from finest","PCGAMGSetAggressiveLevels",pc_gamg_agg->aggressive_coarsening_levels,&pc_gamg_agg->aggressive_coarsening_levels,NULL));
139     } else {
140       PetscCall(PetscOptionsInt("-pc_gamg_aggressive_coarsening","Number of aggressive coarsening (MIS-2) levels from finest","PCGAMGSetAggressiveLevels",pc_gamg_agg->aggressive_coarsening_levels,&pc_gamg_agg->aggressive_coarsening_levels,&flg));
141       if (flg) PetscCall(PetscInfo(pc,"Warning: both -pc_gamg_square_graph and -pc_gamg_aggressive_coarsening are used. -pc_gamg_square_graph is deprecated, Number of aggressive levels is %d\n",(int)pc_gamg_agg->aggressive_coarsening_levels));
142     }
143   }
144   PetscOptionsHeadEnd();
145   PetscFunctionReturn(0);
146 }
147 
148 /* -------------------------------------------------------------------------- */
149 static PetscErrorCode PCDestroy_GAMG_AGG(PC pc)
150 {
151   PC_MG          *mg          = (PC_MG*)pc->data;
152   PC_GAMG        *pc_gamg     = (PC_GAMG*)mg->innerctx;
153 
154   PetscFunctionBegin;
155   PetscCall(PetscFree(pc_gamg->subctx));
156   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNSmooths_C",NULL));
157   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetSymmetrizeGraph_C",NULL));
158   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetAggressiveLevels_C",NULL));
159   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCSetCoordinates_C",NULL));
160   PetscFunctionReturn(0);
161 }
162 
163 /* -------------------------------------------------------------------------- */
164 /*
165    PCSetCoordinates_AGG
166      - collective
167 
168    Input Parameter:
169    . pc - the preconditioner context
170    . ndm - dimesion of data (used for dof/vertex for Stokes)
171    . a_nloc - number of vertices local
172    . coords - [a_nloc][ndm] - interleaved coordinate data: {x_0, y_0, z_0, x_1, y_1, ...}
173 */
174 
175 static PetscErrorCode PCSetCoordinates_AGG(PC pc, PetscInt ndm, PetscInt a_nloc, PetscReal *coords)
176 {
177   PC_MG          *mg      = (PC_MG*)pc->data;
178   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
179   PetscInt       arrsz,kk,ii,jj,nloc,ndatarows,ndf;
180   Mat            mat = pc->pmat;
181 
182   PetscFunctionBegin;
183   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
184   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
185   nloc = a_nloc;
186 
187   /* SA: null space vectors */
188   PetscCall(MatGetBlockSize(mat, &ndf)); /* this does not work for Stokes */
189   if (coords && ndf==1) pc_gamg->data_cell_cols = 1; /* scalar w/ coords and SA (not needed) */
190   else if (coords) {
191     PetscCheck(ndm <= ndf,PETSC_COMM_SELF,PETSC_ERR_PLIB,"degrees of motion %" PetscInt_FMT " > block size %" PetscInt_FMT,ndm,ndf);
192     pc_gamg->data_cell_cols = (ndm==2 ? 3 : 6); /* displacement elasticity */
193     if (ndm != ndf) {
194       PetscCheck(pc_gamg->data_cell_cols == ndf,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Don't know how to create null space for ndm=%" PetscInt_FMT ", ndf=%" PetscInt_FMT ".  Use MatSetNearNullSpace().",ndm,ndf);
195     }
196   } else pc_gamg->data_cell_cols = ndf; /* no data, force SA with constant null space vectors */
197   pc_gamg->data_cell_rows = ndatarows = ndf;
198   PetscCheck(pc_gamg->data_cell_cols > 0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"pc_gamg->data_cell_cols %" PetscInt_FMT " <= 0",pc_gamg->data_cell_cols);
199   arrsz = nloc*pc_gamg->data_cell_rows*pc_gamg->data_cell_cols;
200 
201   if (!pc_gamg->data || (pc_gamg->data_sz != arrsz)) {
202     PetscCall(PetscFree(pc_gamg->data));
203     PetscCall(PetscMalloc1(arrsz+1, &pc_gamg->data));
204   }
205   /* copy data in - column oriented */
206   for (kk=0; kk<nloc; kk++) {
207     const PetscInt M     = nloc*pc_gamg->data_cell_rows; /* stride into data */
208     PetscReal      *data = &pc_gamg->data[kk*ndatarows]; /* start of cell */
209     if (pc_gamg->data_cell_cols==1) *data = 1.0;
210     else {
211       /* translational modes */
212       for (ii=0;ii<ndatarows;ii++) {
213         for (jj=0;jj<ndatarows;jj++) {
214           if (ii==jj)data[ii*M + jj] = 1.0;
215           else data[ii*M + jj] = 0.0;
216         }
217       }
218 
219       /* rotational modes */
220       if (coords) {
221         if (ndm == 2) {
222           data   += 2*M;
223           data[0] = -coords[2*kk+1];
224           data[1] =  coords[2*kk];
225         } else {
226           data   += 3*M;
227           data[0] = 0.0;             data[M+0] =  coords[3*kk+2]; data[2*M+0] = -coords[3*kk+1];
228           data[1] = -coords[3*kk+2]; data[M+1] = 0.0;             data[2*M+1] =  coords[3*kk];
229           data[2] =  coords[3*kk+1]; data[M+2] = -coords[3*kk];   data[2*M+2] = 0.0;
230         }
231       }
232     }
233   }
234   pc_gamg->data_sz = arrsz;
235   PetscFunctionReturn(0);
236 }
237 
238 /* -------------------------------------------------------------------------- */
239 /*
240    PCSetData_AGG - called if data is not set with PCSetCoordinates.
241       Looks in Mat for near null space.
242       Does not work for Stokes
243 
244   Input Parameter:
245    . pc -
246    . a_A - matrix to get (near) null space out of.
247 */
248 static PetscErrorCode PCSetData_AGG(PC pc, Mat a_A)
249 {
250   PC_MG          *mg      = (PC_MG*)pc->data;
251   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
252   MatNullSpace   mnull;
253 
254   PetscFunctionBegin;
255   PetscCall(MatGetNearNullSpace(a_A, &mnull));
256   if (!mnull) {
257     DM dm;
258     PetscCall(PCGetDM(pc, &dm));
259     if (!dm) {
260       PetscCall(MatGetDM(a_A, &dm));
261     }
262     if (dm) {
263       PetscObject deformation;
264       PetscInt    Nf;
265 
266       PetscCall(DMGetNumFields(dm, &Nf));
267       if (Nf) {
268         PetscCall(DMGetField(dm, 0, NULL, &deformation));
269         PetscCall(PetscObjectQuery((PetscObject)deformation,"nearnullspace",(PetscObject*)&mnull));
270         if (!mnull) {
271           PetscCall(PetscObjectQuery((PetscObject)deformation,"nullspace",(PetscObject*)&mnull));
272         }
273       }
274     }
275   }
276 
277   if (!mnull) {
278     PetscInt bs,NN,MM;
279     PetscCall(MatGetBlockSize(a_A, &bs));
280     PetscCall(MatGetLocalSize(a_A, &MM, &NN));
281     PetscCheck(MM % bs == 0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"MM %" PetscInt_FMT " must be divisible by bs %" PetscInt_FMT,MM,bs);
282     PetscCall(PCSetCoordinates_AGG(pc, bs, MM/bs, NULL));
283   } else {
284     PetscReal         *nullvec;
285     PetscBool         has_const;
286     PetscInt          i,j,mlocal,nvec,bs;
287     const Vec         *vecs;
288     const PetscScalar *v;
289 
290     PetscCall(MatGetLocalSize(a_A,&mlocal,NULL));
291     PetscCall(MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs));
292     pc_gamg->data_sz = (nvec+!!has_const)*mlocal;
293     PetscCall(PetscMalloc1((nvec+!!has_const)*mlocal,&nullvec));
294     if (has_const) for (i=0; i<mlocal; i++) nullvec[i] = 1.0;
295     for (i=0; i<nvec; i++) {
296       PetscCall(VecGetArrayRead(vecs[i],&v));
297       for (j=0; j<mlocal; j++) nullvec[(i+!!has_const)*mlocal + j] = PetscRealPart(v[j]);
298       PetscCall(VecRestoreArrayRead(vecs[i],&v));
299     }
300     pc_gamg->data           = nullvec;
301     pc_gamg->data_cell_cols = (nvec+!!has_const);
302     PetscCall(MatGetBlockSize(a_A, &bs));
303     pc_gamg->data_cell_rows = bs;
304   }
305   PetscFunctionReturn(0);
306 }
307 
308 /* -------------------------------------------------------------------------- */
309 /*
310   formProl0 - collect null space data for each aggregate, do QR, put R in coarse grid data and Q in P_0
311 
312   Input Parameter:
313    . agg_llists - list of arrays with aggregates -- list from selected vertices of aggregate unselected vertices
314    . bs - row block size
315    . nSAvec - column bs of new P
316    . my0crs - global index of start of locals
317    . data_stride - bs*(nloc nodes + ghost nodes) [data_stride][nSAvec]
318    . data_in[data_stride*nSAvec] - local data on fine grid
319    . flid_fgid[data_stride/bs] - make local to global IDs, includes ghosts in 'locals_llist'
320 
321   Output Parameter:
322    . a_data_out - in with fine grid data (w/ghosts), out with coarse grid data
323    . a_Prol - prolongation operator
324 
325 */
326 static PetscErrorCode formProl0(PetscCoarsenData *agg_llists,PetscInt bs,PetscInt nSAvec,PetscInt my0crs,PetscInt data_stride,PetscReal data_in[],const PetscInt flid_fgid[],PetscReal **a_data_out,Mat a_Prol)
327 {
328   PetscInt        Istart,my0,Iend,nloc,clid,flid = 0,aggID,kk,jj,ii,mm,nSelected,minsz,nghosts,out_data_stride;
329   MPI_Comm        comm;
330   PetscReal       *out_data;
331   PetscCDIntNd    *pos;
332   PCGAMGHashTable fgid_flid;
333 
334   PetscFunctionBegin;
335   PetscCall(PetscObjectGetComm((PetscObject)a_Prol,&comm));
336   PetscCall(MatGetOwnershipRange(a_Prol, &Istart, &Iend));
337   nloc = (Iend-Istart)/bs; my0 = Istart/bs;
338   PetscCheck((Iend-Istart) % bs == 0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Iend %" PetscInt_FMT " - Istart %" PetscInt_FMT " must be divisible by bs %" PetscInt_FMT,Iend,Istart,bs);
339   Iend   /= bs;
340   nghosts = data_stride/bs - nloc;
341 
342   PetscCall(PCGAMGHashTableCreate(2*nghosts+1, &fgid_flid));
343   for (kk=0; kk<nghosts; kk++) {
344     PetscCall(PCGAMGHashTableAdd(&fgid_flid, flid_fgid[nloc+kk], nloc+kk));
345   }
346 
347   /* count selected -- same as number of cols of P */
348   for (nSelected=mm=0; mm<nloc; mm++) {
349     PetscBool ise;
350     PetscCall(PetscCDEmptyAt(agg_llists, mm, &ise));
351     if (!ise) nSelected++;
352   }
353   PetscCall(MatGetOwnershipRangeColumn(a_Prol, &ii, &jj));
354   PetscCheck((ii/nSAvec) == my0crs,PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %" PetscInt_FMT " /nSAvec %" PetscInt_FMT "  != my0crs %" PetscInt_FMT,ii,nSAvec,my0crs);
355   PetscCheck(nSelected == (jj-ii)/nSAvec,PETSC_COMM_SELF,PETSC_ERR_PLIB,"nSelected %" PetscInt_FMT " != (jj %" PetscInt_FMT " - ii %" PetscInt_FMT ")/nSAvec %" PetscInt_FMT,nSelected,jj,ii,nSAvec);
356 
357   /* aloc space for coarse point data (output) */
358   out_data_stride = nSelected*nSAvec;
359 
360   PetscCall(PetscMalloc1(out_data_stride*nSAvec, &out_data));
361   for (ii=0;ii<out_data_stride*nSAvec;ii++) out_data[ii]=PETSC_MAX_REAL;
362   *a_data_out = out_data; /* output - stride nSelected*nSAvec */
363 
364   /* find points and set prolongation */
365   minsz = 100;
366   for (mm = clid = 0; mm < nloc; mm++) {
367     PetscCall(PetscCDSizeAt(agg_llists, mm, &jj));
368     if (jj > 0) {
369       const PetscInt lid = mm, cgid = my0crs + clid;
370       PetscInt       cids[100]; /* max bs */
371       PetscBLASInt   asz  =jj,M=asz*bs,N=nSAvec,INFO;
372       PetscBLASInt   Mdata=M+((N-M>0) ? N-M : 0),LDA=Mdata,LWORK=N*bs;
373       PetscScalar    *qqc,*qqr,*TAU,*WORK;
374       PetscInt       *fids;
375       PetscReal      *data;
376 
377       /* count agg */
378       if (asz<minsz) minsz = asz;
379 
380       /* get block */
381       PetscCall(PetscMalloc5(Mdata*N, &qqc,M*N, &qqr,N, &TAU,LWORK, &WORK,M, &fids));
382 
383       aggID = 0;
384       PetscCall(PetscCDGetHeadPos(agg_llists,lid,&pos));
385       while (pos) {
386         PetscInt gid1;
387         PetscCall(PetscCDIntNdGetID(pos, &gid1));
388         PetscCall(PetscCDGetNextPos(agg_llists,lid,&pos));
389 
390         if (gid1 >= my0 && gid1 < Iend) flid = gid1 - my0;
391         else {
392           PetscCall(PCGAMGHashTableFind(&fgid_flid, gid1, &flid));
393           PetscCheck(flid >= 0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cannot find gid1 in table");
394         }
395         /* copy in B_i matrix - column oriented */
396         data = &data_in[flid*bs];
397         for (ii = 0; ii < bs; ii++) {
398           for (jj = 0; jj < N; jj++) {
399             PetscReal d = data[jj*data_stride + ii];
400             qqc[jj*Mdata + aggID*bs + ii] = d;
401           }
402         }
403         /* set fine IDs */
404         for (kk=0; kk<bs; kk++) fids[aggID*bs + kk] = flid_fgid[flid]*bs + kk;
405         aggID++;
406       }
407 
408       /* pad with zeros */
409       for (ii = asz*bs; ii < Mdata; ii++) {
410         for (jj = 0; jj < N; jj++, kk++) {
411           qqc[jj*Mdata + ii] = .0;
412         }
413       }
414 
415       /* QR */
416       PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF));
417       PetscStackCallBLAS("LAPACKgeqrf",LAPACKgeqrf_(&Mdata, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO));
418       PetscCall(PetscFPTrapPop());
419       PetscCheck(INFO == 0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"xGEQRF error");
420       /* get R - column oriented - output B_{i+1} */
421       {
422         PetscReal *data = &out_data[clid*nSAvec];
423         for (jj = 0; jj < nSAvec; jj++) {
424           for (ii = 0; ii < nSAvec; ii++) {
425             PetscCheck(data[jj*out_data_stride + ii] == PETSC_MAX_REAL,PETSC_COMM_SELF,PETSC_ERR_PLIB,"data[jj*out_data_stride + ii] != %e",(double)PETSC_MAX_REAL);
426            if (ii <= jj) data[jj*out_data_stride + ii] = PetscRealPart(qqc[jj*Mdata + ii]);
427            else data[jj*out_data_stride + ii] = 0.;
428           }
429         }
430       }
431 
432       /* get Q - row oriented */
433       PetscStackCallBLAS("LAPACKorgqr",LAPACKorgqr_(&Mdata, &N, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO));
434       PetscCheck(INFO == 0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"xORGQR error arg %" PetscBLASInt_FMT,-INFO);
435 
436       for (ii = 0; ii < M; ii++) {
437         for (jj = 0; jj < N; jj++) {
438           qqr[N*ii + jj] = qqc[jj*Mdata + ii];
439         }
440       }
441 
442       /* add diagonal block of P0 */
443       for (kk=0; kk<N; kk++) {
444         cids[kk] = N*cgid + kk; /* global col IDs in P0 */
445       }
446       PetscCall(MatSetValues(a_Prol,M,fids,N,cids,qqr,INSERT_VALUES));
447       PetscCall(PetscFree5(qqc,qqr,TAU,WORK,fids));
448       clid++;
449     } /* coarse agg */
450   } /* for all fine nodes */
451   PetscCall(MatAssemblyBegin(a_Prol,MAT_FINAL_ASSEMBLY));
452   PetscCall(MatAssemblyEnd(a_Prol,MAT_FINAL_ASSEMBLY));
453   PetscCall(PCGAMGHashTableDestroy(&fgid_flid));
454   PetscFunctionReturn(0);
455 }
456 
457 static PetscErrorCode PCView_GAMG_AGG(PC pc,PetscViewer viewer)
458 {
459   PC_MG          *mg      = (PC_MG*)pc->data;
460   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
461   PC_GAMG_AGG    *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
462 
463   PetscFunctionBegin;
464   PetscCall(PetscViewerASCIIPrintf(viewer,"      AGG specific options\n"));
465   PetscCall(PetscViewerASCIIPrintf(viewer,"        Symmetric graph %s\n",pc_gamg_agg->symmetrize_graph ? "true" : "false"));
466   PetscCall(PetscViewerASCIIPrintf(viewer,"        Number of levels to square graph %d\n",(int)pc_gamg_agg->aggressive_coarsening_levels));
467   PetscCall(PetscViewerASCIIPrintf(viewer,"        Number smoothing steps %d\n",(int)pc_gamg_agg->nsmooths));
468   PetscFunctionReturn(0);
469 }
470 
471 /* -------------------------------------------------------------------------- */
472 /*
473    PCGAMGGraph_AGG
474 
475   Input Parameter:
476    . pc - this
477    . Amat - matrix on this fine level
478 
479   Output Parameter:
480    . a_Gmat -
481 */
482 static PetscErrorCode PCGAMGGraph_AGG(PC pc,Mat Amat,Mat *a_Gmat)
483 {
484   PC_MG                     *mg          = (PC_MG*)pc->data;
485   PC_GAMG                   *pc_gamg     = (PC_GAMG*)mg->innerctx;
486   const PetscReal           vfilter      = pc_gamg->threshold[pc_gamg->current_level];
487   PC_GAMG_AGG               *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
488   Mat                       Gmat,F=NULL;
489   MPI_Comm                  comm;
490   PetscBool /* set,flg , */ symm;
491 
492   PetscFunctionBegin;
493   PetscCall(PetscObjectGetComm((PetscObject)Amat,&comm));
494 
495   /* PetscCall(MatIsSymmetricKnown(Amat, &set, &flg)); || !(set && flg) -- this causes lot of symm calls */
496   symm = (PetscBool)(pc_gamg_agg->symmetrize_graph); /* && !pc_gamg_agg->square_graph; */
497 
498   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_GRAPH],0,0,0,0));
499   PetscCall(MatCreateGraph(Amat, symm, PETSC_TRUE, &Gmat));
500   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_GRAPH],0,0,0,0));
501 
502   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_FILTER],0,0,0,0));
503   PetscCall(MatFilter(Gmat, vfilter,&F));
504   if (F) {
505     PetscCall(MatDestroy(&Gmat));
506     Gmat = F;
507   }
508   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_FILTER],0,0,0,0));
509 
510   *a_Gmat = Gmat;
511 
512   PetscFunctionReturn(0);
513 }
514 
515 /* -------------------------------------------------------------------------- */
516 /*
517    PCGAMGCoarsen_AGG - supports squaring the graph (deprecated) and new graph for
518      communication of QR data used with HEM and MISk coarsening
519 
520   Input Parameter:
521    . a_pc - this
522 
523   Input/Output Parameter:
524    . a_Gmat1 - graph to coarsen (in), graph off processor edges for QR gather scatter (out)
525 
526   Output Parameter:
527    . agg_lists - list of aggregates
528 
529 */
530 static PetscErrorCode PCGAMGCoarsen_AGG(PC a_pc,Mat *a_Gmat1, PetscCoarsenData **agg_lists)
531 {
532   PC_MG          *mg          = (PC_MG*)a_pc->data;
533   PC_GAMG        *pc_gamg     = (PC_GAMG*)mg->innerctx;
534   PC_GAMG_AGG    *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
535   Mat            mat, Gmat1 = *a_Gmat1;  /* aggressive graph */
536   IS             perm;
537   PetscInt       Istart,Iend,Ii,nloc,bs,nn;
538   PetscInt       *permute,*degree;
539   PetscBool      *bIndexSet;
540   MatCoarsen     crs;
541   MPI_Comm       comm;
542   PetscReal      hashfact;
543   PetscInt       iSwapIndex;
544   PetscRandom    random;
545 
546   PetscFunctionBegin;
547   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_COARSEN],0,0,0,0));
548   PetscCall(PetscObjectGetComm((PetscObject)Gmat1,&comm));
549   PetscCall(MatGetLocalSize(Gmat1, &nn, NULL));
550   PetscCall(MatGetBlockSize(Gmat1, &bs));
551   PetscCheck(bs == 1,PETSC_COMM_SELF,PETSC_ERR_PLIB,"bs %" PetscInt_FMT " must be 1",bs);
552   nloc = nn/bs;
553 
554   /* get MIS aggs - randomize */
555   PetscCall(PetscMalloc2(nloc, &permute,nloc, &degree));
556   PetscCall(PetscCalloc1(nloc, &bIndexSet));
557   for (Ii = 0; Ii < nloc; Ii++) permute[Ii] = Ii;
558   PetscCall(PetscRandomCreate(PETSC_COMM_SELF,&random));
559   PetscCall(MatGetOwnershipRange(Gmat1, &Istart, &Iend));
560   for (Ii = 0; Ii < nloc; Ii++) {
561     PetscInt nc;
562     PetscCall(MatGetRow(Gmat1,Istart+Ii,&nc,NULL,NULL));
563     degree[Ii] = nc;
564     PetscCall(MatRestoreRow(Gmat1,Istart+Ii,&nc,NULL,NULL));
565   }
566   for (Ii = 0; Ii < nloc; Ii++) {
567     PetscCall(PetscRandomGetValueReal(random,&hashfact));
568     iSwapIndex = (PetscInt) (hashfact*nloc)%nloc;
569     if (!bIndexSet[iSwapIndex] && iSwapIndex != Ii) {
570       PetscInt iTemp = permute[iSwapIndex];
571       permute[iSwapIndex]   = permute[Ii];
572       permute[Ii]           = iTemp;
573       iTemp = degree[iSwapIndex];
574       degree[iSwapIndex]   = degree[Ii];
575       degree[Ii]           = iTemp;
576       bIndexSet[iSwapIndex] = PETSC_TRUE;
577     }
578   }
579   // create minimum degree ordering
580   PetscCall(PetscSortIntWithArray(nloc,degree,permute));
581 
582   PetscCall(PetscFree(bIndexSet));
583   PetscCall(PetscRandomDestroy(&random));
584   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nloc, permute, PETSC_USE_POINTER, &perm));
585   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_MIS],0,0,0,0));
586   PetscCall(MatCoarsenCreate(comm, &crs));
587   PetscCall(MatCoarsenSetFromOptions(crs));
588   PetscCall(MatCoarsenSetGreedyOrdering(crs, perm));
589   PetscCall(MatCoarsenSetAdjacency(crs, Gmat1));
590   PetscCall(MatCoarsenSetStrictAggs(crs, PETSC_TRUE));
591   if (pc_gamg->current_level < pc_gamg_agg->aggressive_coarsening_levels) PetscCall(MatCoarsenMISKSetDistance(crs,2)); // hardwire to MIS-2
592   else PetscCall(MatCoarsenMISKSetDistance(crs,1)); // MIS
593   PetscCall(MatCoarsenApply(crs));
594   PetscCall(MatCoarsenViewFromOptions(crs,NULL,"-mat_coarsen_view"));
595   PetscCall(MatCoarsenGetData(crs, agg_lists)); /* output */
596   PetscCall(MatCoarsenDestroy(&crs));
597 
598   PetscCall(ISDestroy(&perm));
599   PetscCall(PetscFree2(permute,degree));
600   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_MIS],0,0,0,0));
601 
602   {
603     PetscCoarsenData *llist = *agg_lists;
604     /* see if we have a matrix that takes precedence (returned from MatCoarsenApply) */
605     PetscCall(PetscCDGetMat(llist, &mat));
606     if (mat) {
607       PetscCall(MatDestroy(&Gmat1));
608       *a_Gmat1 = mat; /* output */
609     }
610   }
611   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_COARSEN],0,0,0,0));
612   PetscFunctionReturn(0);
613 }
614 
615 /* -------------------------------------------------------------------------- */
616 /*
617  PCGAMGProlongator_AGG
618 
619  Input Parameter:
620  . pc - this
621  . Amat - matrix on this fine level
622  . Graph - used to get ghost data for nodes in
623  . agg_lists - list of aggregates
624  Output Parameter:
625  . a_P_out - prolongation operator to the next level
626  */
627 static PetscErrorCode PCGAMGProlongator_AGG(PC pc,Mat Amat,Mat Gmat,PetscCoarsenData *agg_lists,Mat *a_P_out)
628 {
629   PC_MG          *mg       = (PC_MG*)pc->data;
630   PC_GAMG        *pc_gamg  = (PC_GAMG*)mg->innerctx;
631   const PetscInt col_bs = pc_gamg->data_cell_cols;
632   PetscInt       Istart,Iend,nloc,ii,jj,kk,my0,nLocalSelected,bs;
633   Mat            Prol;
634   PetscMPIInt    size;
635   MPI_Comm       comm;
636   PetscReal      *data_w_ghost;
637   PetscInt       myCrs0, nbnodes=0, *flid_fgid;
638   MatType        mtype;
639 
640   PetscFunctionBegin;
641   PetscCall(PetscObjectGetComm((PetscObject)Amat,&comm));
642   PetscCheck(col_bs >= 1,comm,PETSC_ERR_PLIB,"Column bs cannot be less than 1");
643   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROL],0,0,0,0));
644   PetscCallMPI(MPI_Comm_size(comm, &size));
645   PetscCall(MatGetOwnershipRange(Amat, &Istart, &Iend));
646   PetscCall(MatGetBlockSize(Amat, &bs));
647   nloc = (Iend-Istart)/bs; my0 = Istart/bs;
648   PetscCheck((Iend-Istart) % bs == 0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"(Iend %" PetscInt_FMT " - Istart %" PetscInt_FMT ") not divisible by bs %" PetscInt_FMT,Iend,Istart,bs);
649 
650   /* get 'nLocalSelected' */
651   for (ii=0, nLocalSelected = 0; ii < nloc; ii++) {
652     PetscBool ise;
653     /* filter out singletons 0 or 1? */
654     PetscCall(PetscCDEmptyAt(agg_lists, ii, &ise));
655     if (!ise) nLocalSelected++;
656   }
657 
658   /* create prolongator, create P matrix */
659   PetscCall(MatGetType(Amat,&mtype));
660   PetscCall(MatCreate(comm, &Prol));
661   PetscCall(MatSetSizes(Prol,nloc*bs,nLocalSelected*col_bs,PETSC_DETERMINE,PETSC_DETERMINE));
662   PetscCall(MatSetBlockSizes(Prol, bs, col_bs));
663   PetscCall(MatSetType(Prol, mtype));
664   PetscCall(MatSeqAIJSetPreallocation(Prol,col_bs, NULL));
665   PetscCall(MatMPIAIJSetPreallocation(Prol,col_bs, NULL, col_bs, NULL));
666 
667   /* can get all points "removed" */
668   PetscCall(MatGetSize(Prol, &kk, &ii));
669   if (!ii) {
670     PetscCall(PetscInfo(pc,"%s: No selected points on coarse grid\n",((PetscObject)pc)->prefix));
671     PetscCall(MatDestroy(&Prol));
672     *a_P_out = NULL;  /* out */
673     PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROL],0,0,0,0));
674     PetscFunctionReturn(0);
675   }
676   PetscCall(PetscInfo(pc,"%s: New grid %" PetscInt_FMT " nodes\n",((PetscObject)pc)->prefix,ii/col_bs));
677   PetscCall(MatGetOwnershipRangeColumn(Prol, &myCrs0, &kk));
678 
679   PetscCheck((kk-myCrs0) % col_bs == 0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"(kk %" PetscInt_FMT " -myCrs0 %" PetscInt_FMT ") not divisible by col_bs %" PetscInt_FMT,kk,myCrs0,col_bs);
680   myCrs0 = myCrs0/col_bs;
681   PetscCheck((kk/col_bs-myCrs0) == nLocalSelected,PETSC_COMM_SELF,PETSC_ERR_PLIB,"(kk %" PetscInt_FMT "/col_bs %" PetscInt_FMT " - myCrs0 %" PetscInt_FMT ") != nLocalSelected %" PetscInt_FMT ")",kk,col_bs,myCrs0,nLocalSelected);
682 
683   /* create global vector of data in 'data_w_ghost' */
684   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROLA],0,0,0,0));
685   if (size > 1) { /* get ghost null space data */
686     PetscReal *tmp_gdata,*tmp_ldata,*tp2;
687     PetscCall(PetscMalloc1(nloc, &tmp_ldata));
688     for (jj = 0; jj < col_bs; jj++) {
689       for (kk = 0; kk < bs; kk++) {
690         PetscInt        ii,stride;
691         const PetscReal *tp = pc_gamg->data + jj*bs*nloc + kk;
692         for (ii = 0; ii < nloc; ii++, tp += bs) tmp_ldata[ii] = *tp;
693 
694         PetscCall(PCGAMGGetDataWithGhosts(Gmat, 1, tmp_ldata, &stride, &tmp_gdata));
695 
696         if (!jj && !kk) { /* now I know how many total nodes - allocate TODO: move below and do in one 'col_bs' call */
697           PetscCall(PetscMalloc1(stride*bs*col_bs, &data_w_ghost));
698           nbnodes = bs*stride;
699         }
700         tp2 = data_w_ghost + jj*bs*stride + kk;
701         for (ii = 0; ii < stride; ii++, tp2 += bs) *tp2 = tmp_gdata[ii];
702         PetscCall(PetscFree(tmp_gdata));
703       }
704     }
705     PetscCall(PetscFree(tmp_ldata));
706   } else {
707     nbnodes      = bs*nloc;
708     data_w_ghost = (PetscReal*)pc_gamg->data;
709   }
710 
711   /* get 'flid_fgid' TODO - move up to get 'stride' and do get null space data above in one step (jj loop) */
712   if (size > 1) {
713     PetscReal *fid_glid_loc,*fiddata;
714     PetscInt  stride;
715 
716     PetscCall(PetscMalloc1(nloc, &fid_glid_loc));
717     for (kk=0; kk<nloc; kk++) fid_glid_loc[kk] = (PetscReal)(my0+kk);
718     PetscCall(PCGAMGGetDataWithGhosts(Gmat, 1, fid_glid_loc, &stride, &fiddata));
719     PetscCall(PetscMalloc1(stride, &flid_fgid)); /* copy real data to in */
720     for (kk=0; kk<stride; kk++) flid_fgid[kk] = (PetscInt)fiddata[kk];
721     PetscCall(PetscFree(fiddata));
722 
723     PetscCheck(stride == nbnodes/bs,PETSC_COMM_SELF,PETSC_ERR_PLIB,"stride %" PetscInt_FMT " != nbnodes %" PetscInt_FMT "/bs %" PetscInt_FMT,stride,nbnodes,bs);
724     PetscCall(PetscFree(fid_glid_loc));
725   } else {
726     PetscCall(PetscMalloc1(nloc, &flid_fgid));
727     for (kk=0; kk<nloc; kk++) flid_fgid[kk] = my0 + kk;
728   }
729   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROLA],0,0,0,0));
730   /* get P0 */
731   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROLB],0,0,0,0));
732   {
733     PetscReal *data_out = NULL;
734     PetscCall(formProl0(agg_lists, bs, col_bs, myCrs0, nbnodes,data_w_ghost, flid_fgid, &data_out, Prol));
735     PetscCall(PetscFree(pc_gamg->data));
736 
737     pc_gamg->data           = data_out;
738     pc_gamg->data_cell_rows = col_bs;
739     pc_gamg->data_sz        = col_bs*col_bs*nLocalSelected;
740   }
741   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROLB],0,0,0,0));
742   if (size > 1) {PetscCall(PetscFree(data_w_ghost));}
743   PetscCall(PetscFree(flid_fgid));
744 
745   *a_P_out = Prol;  /* out */
746 
747   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROL],0,0,0,0));
748   PetscFunctionReturn(0);
749 }
750 
751 /* -------------------------------------------------------------------------- */
752 /*
753    PCGAMGOptProlongator_AGG
754 
755   Input Parameter:
756    . pc - this
757    . Amat - matrix on this fine level
758  In/Output Parameter:
759    . a_P - prolongation operator to the next level
760 */
761 static PetscErrorCode PCGAMGOptProlongator_AGG(PC pc,Mat Amat,Mat *a_P)
762 {
763   PC_MG          *mg          = (PC_MG*)pc->data;
764   PC_GAMG        *pc_gamg     = (PC_GAMG*)mg->innerctx;
765   PC_GAMG_AGG    *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
766   PetscInt       jj;
767   Mat            Prol  = *a_P;
768   MPI_Comm       comm;
769   KSP            eksp;
770   Vec            bb, xx;
771   PC             epc;
772   PetscReal      alpha, emax, emin;
773 
774   PetscFunctionBegin;
775   PetscCall(PetscObjectGetComm((PetscObject)Amat,&comm));
776   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_OPT],0,0,0,0));
777 
778   /* compute maximum singular value of operator to be used in smoother */
779   if (0 < pc_gamg_agg->nsmooths) {
780     /* get eigen estimates */
781     if (pc_gamg->emax > 0) {
782       emin = pc_gamg->emin;
783       emax = pc_gamg->emax;
784     } else {
785       const char *prefix;
786 
787       PetscCall(MatCreateVecs(Amat, &bb, NULL));
788       PetscCall(MatCreateVecs(Amat, &xx, NULL));
789       PetscCall(KSPSetNoisy_Private(bb));
790 
791       PetscCall(KSPCreate(comm,&eksp));
792       PetscCall(PCGetOptionsPrefix(pc,&prefix));
793       PetscCall(KSPSetOptionsPrefix(eksp,prefix));
794       PetscCall(KSPAppendOptionsPrefix(eksp,"pc_gamg_esteig_"));
795       {
796         PetscBool isset,sflg;
797         PetscCall(MatIsSPDKnown(Amat, &isset, &sflg));
798         if (isset && sflg) PetscCall(KSPSetType(eksp, KSPCG));
799       }
800       PetscCall(KSPSetErrorIfNotConverged(eksp,pc->erroriffailure));
801       PetscCall(KSPSetNormType(eksp, KSP_NORM_NONE));
802 
803       PetscCall(KSPSetInitialGuessNonzero(eksp, PETSC_FALSE));
804       PetscCall(KSPSetOperators(eksp, Amat, Amat));
805 
806       PetscCall(KSPGetPC(eksp, &epc));
807       PetscCall(PCSetType(epc, PCJACOBI));  /* smoother in smoothed agg. */
808 
809       PetscCall(KSPSetTolerances(eksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT, 10)); // 10 is safer, but 5 is often fine, can override with -pc_gamg_esteig_ksp_max_it -mg_levels_ksp_chebyshev_esteig 0,0.25,0,1.2
810 
811       PetscCall(KSPSetFromOptions(eksp));
812       PetscCall(KSPSetComputeSingularValues(eksp,PETSC_TRUE));
813       PetscCall(KSPSolve(eksp, bb, xx));
814       PetscCall(KSPCheckSolve(eksp,pc,xx));
815 
816       PetscCall(KSPComputeExtremeSingularValues(eksp, &emax, &emin));
817       PetscCall(PetscInfo(pc,"%s: Smooth P0: max eigen=%e min=%e PC=%s\n",((PetscObject)pc)->prefix,(double)emax,(double)emin,PCJACOBI));
818       PetscCall(VecDestroy(&xx));
819       PetscCall(VecDestroy(&bb));
820       PetscCall(KSPDestroy(&eksp));
821     }
822     if (pc_gamg->use_sa_esteig) {
823       mg->min_eigen_DinvA[pc_gamg->current_level] = emin;
824       mg->max_eigen_DinvA[pc_gamg->current_level] = emax;
825       PetscCall(PetscInfo(pc,"%s: Smooth P0: level %" PetscInt_FMT ", cache spectra %g %g\n",((PetscObject)pc)->prefix,pc_gamg->current_level,(double)emin,(double)emax));
826     } else {
827       mg->min_eigen_DinvA[pc_gamg->current_level] = 0;
828       mg->max_eigen_DinvA[pc_gamg->current_level] = 0;
829     }
830   } else {
831     mg->min_eigen_DinvA[pc_gamg->current_level] = 0;
832     mg->max_eigen_DinvA[pc_gamg->current_level] = 0;
833   }
834 
835   /* smooth P0 */
836   for (jj = 0; jj < pc_gamg_agg->nsmooths; jj++) {
837     Mat tMat;
838     Vec diag;
839 
840     PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_OPTSM],0,0,0,0));
841 
842     /* smooth P1 := (I - omega/lam D^{-1}A)P0 */
843     PetscCall(PetscLogEventBegin(petsc_gamg_setup_matmat_events[pc_gamg->current_level][2],0,0,0,0));
844     PetscCall(MatMatMult(Amat, Prol, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &tMat));
845     PetscCall(PetscLogEventEnd(petsc_gamg_setup_matmat_events[pc_gamg->current_level][2],0,0,0,0));
846     PetscCall(MatProductClear(tMat));
847     PetscCall(MatCreateVecs(Amat, &diag, NULL));
848     PetscCall(MatGetDiagonal(Amat, diag)); /* effectively PCJACOBI */
849     PetscCall(VecReciprocal(diag));
850     PetscCall(MatDiagonalScale(tMat, diag, NULL));
851     PetscCall(VecDestroy(&diag));
852 
853     /* TODO: Set a PCFailedReason and exit the building of the AMG preconditioner */
854     PetscCheck(emax != 0.0,PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"Computed maximum singular value as zero");
855     /* TODO: Document the 1.4 and don't hardwire it in this routine */
856     alpha = -1.4/emax;
857 
858     PetscCall(MatAYPX(tMat, alpha, Prol, SUBSET_NONZERO_PATTERN));
859     PetscCall(MatDestroy(&Prol));
860     Prol = tMat;
861     PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_OPTSM],0,0,0,0));
862   }
863   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_OPT],0,0,0,0));
864   *a_P = Prol;
865   PetscFunctionReturn(0);
866 }
867 
868 /* -------------------------------------------------------------------------- */
869 /*
870    PCCreateGAMG_AGG
871 
872   Input Parameter:
873    . pc -
874 */
875 PetscErrorCode  PCCreateGAMG_AGG(PC pc)
876 {
877   PC_MG          *mg      = (PC_MG*)pc->data;
878   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
879   PC_GAMG_AGG    *pc_gamg_agg;
880 
881   PetscFunctionBegin;
882   /* create sub context for SA */
883   PetscCall(PetscNewLog(pc,&pc_gamg_agg));
884   pc_gamg->subctx = pc_gamg_agg;
885 
886   pc_gamg->ops->setfromoptions = PCSetFromOptions_GAMG_AGG;
887   pc_gamg->ops->destroy        = PCDestroy_GAMG_AGG;
888   /* reset does not do anything; setup not virtual */
889 
890   /* set internal function pointers */
891   pc_gamg->ops->graph             = PCGAMGGraph_AGG;
892   pc_gamg->ops->coarsen           = PCGAMGCoarsen_AGG;
893   pc_gamg->ops->prolongator       = PCGAMGProlongator_AGG;
894   pc_gamg->ops->optprolongator    = PCGAMGOptProlongator_AGG;
895   pc_gamg->ops->createdefaultdata = PCSetData_AGG;
896   pc_gamg->ops->view              = PCView_GAMG_AGG;
897 
898   pc_gamg_agg->aggressive_coarsening_levels = 0;
899   pc_gamg_agg->symmetrize_graph    = PETSC_FALSE;
900   pc_gamg_agg->nsmooths     = 1;
901 
902   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNSmooths_C",PCGAMGSetNSmooths_AGG));
903   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetSymmetrizeGraph_C",PCGAMGSetSymmetrizeGraph_AGG));
904   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetAggressiveLevels_C",PCGAMGSetAggressiveLevels_AGG));
905   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCSetCoordinates_C",PCSetCoordinates_AGG));
906   PetscFunctionReturn(0);
907 }
908