xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision 430ada49b1d04c4eae1f1fbb355e35e8ebf4cb99)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 #include <petsc/private/matimpl.h>
5 #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */
7 
8 #if defined PETSC_GAMG_USE_LOG
9 PetscLogEvent petsc_gamg_setup_events[NUM_SET];
10 #endif
11 
12 #if defined PETSC_USE_LOG
13 PetscLogEvent PC_GAMGGraph_AGG;
14 PetscLogEvent PC_GAMGGraph_GEO;
15 PetscLogEvent PC_GAMGCoarsen_AGG;
16 PetscLogEvent PC_GAMGCoarsen_GEO;
17 PetscLogEvent PC_GAMGProlongator_AGG;
18 PetscLogEvent PC_GAMGProlongator_GEO;
19 PetscLogEvent PC_GAMGOptProlongator_AGG;
20 #endif
21 
22 /* #define GAMG_STAGES */
23 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
24 static PetscLogStage gamg_stages[PETSC_GAMG_MAXLEVELS];
25 #endif
26 
27 static PetscFunctionList GAMGList = 0;
28 static PetscBool PCGAMGPackageInitialized;
29 
30 /* ----------------------------------------------------------------------------- */
31 PetscErrorCode PCReset_GAMG(PC pc)
32 {
33   PetscErrorCode ierr;
34   PC_MG          *mg      = (PC_MG*)pc->data;
35   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
36 
37   PetscFunctionBegin;
38   if (pc_gamg->data) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"This should not happen, cleaned up in SetUp\n");
39   pc_gamg->data_sz = 0;
40   ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr);
41   PetscFunctionReturn(0);
42 }
43 
44 /* -------------------------------------------------------------------------- */
45 /*
46    PCGAMGCreateLevel_GAMG: create coarse op with RAP.  repartition and/or reduce number
47      of active processors.
48 
49    Input Parameter:
50    . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
51           'pc_gamg->data_sz' are changed via repartitioning/reduction.
52    . Amat_fine - matrix on this fine (k) level
53    . cr_bs - coarse block size
54    In/Output Parameter:
55    . a_P_inout - prolongation operator to the next level (k-->k-1)
56    . a_nactive_proc - number of active procs
57    Output Parameter:
58    . a_Amat_crs - coarse matrix that is created (k-1)
59 */
60 
61 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,IS * Pcolumnperm, PetscBool is_last)
62 {
63   PetscErrorCode  ierr;
64   PC_MG           *mg         = (PC_MG*)pc->data;
65   PC_GAMG         *pc_gamg    = (PC_GAMG*)mg->innerctx;
66   Mat             Cmat,Pold=*a_P_inout;
67   MPI_Comm        comm;
68   PetscMPIInt     rank,size,new_size,nactive=*a_nactive_proc;
69   PetscInt        ncrs_eq,ncrs,f_bs;
70 
71   PetscFunctionBegin;
72   ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr);
73   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
74   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
75   ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr);
76   ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr);
77 
78   /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/
79   ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr);
80   if (pc_gamg->data_cell_rows>0) {
81     ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
82   } else {
83     PetscInt  bs;
84     ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr);
85     ncrs = ncrs_eq/bs;
86   }
87 
88   /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
89   if (is_last && !pc_gamg->use_parallel_coarse_grid_solver) new_size = 1;
90   else {
91     PetscInt ncrs_eq_glob;
92     ierr     = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr);
93     new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
94     if (!new_size) new_size = 1; /* not likely, posible? */
95     else if (new_size >= nactive) new_size = nactive; /* no change, rare */
96   }
97 
98   if (Pcolumnperm) *Pcolumnperm = NULL;
99 
100   if (!pc_gamg->repart && new_size==nactive) {
101     *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
102     /* we know that the grid structure can be reused in MatPtAP */
103   } else {
104     /* we know that the grid structure can NOT be reused in MatPtAP */
105     PetscInt       *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old;
106     IS             is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices;
107 
108     nloc_old = ncrs_eq/cr_bs;
109     if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
110 #if defined PETSC_GAMG_USE_LOG
111     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
112 #endif
113     /* make 'is_eq_newproc' */
114     ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
115     if (pc_gamg->repart) {
116       /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
117       Mat adj;
118 
119      ierr = PetscInfo3(pc,"Repartition: size (active): %D --> %D, %D local equations\n",*a_nactive_proc,new_size,ncrs_eq);CHKERRQ(ierr);
120 
121       /* get 'adj' */
122       if (cr_bs == 1) {
123         ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
124       } else {
125         /* make a scalar matrix to partition (no Stokes here) */
126         Mat               tMat;
127         PetscInt          Istart_crs,Iend_crs,ncols,jj,Ii;
128         const PetscScalar *vals;
129         const PetscInt    *idx;
130         PetscInt          *d_nnz, *o_nnz, M, N;
131         static PetscInt   llev = 0;
132         MatType           mtype;
133 
134         ierr = PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);CHKERRQ(ierr);
135         ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr);
136         ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr);
137         for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
138           ierr      = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
139           d_nnz[jj] = ncols/cr_bs;
140           o_nnz[jj] = ncols/cr_bs;
141           ierr      = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
142           if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs;
143           if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs;
144         }
145 
146         ierr = MatGetType(Amat_fine,&mtype);CHKERRQ(ierr);
147         ierr = MatCreate(comm, &tMat);CHKERRQ(ierr);
148         ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr);
149         ierr = MatSetType(tMat,mtype);CHKERRQ(ierr);
150         ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr);
151         ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
152         ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
153 
154         for (ii = Istart_crs; ii < Iend_crs; ii++) {
155           PetscInt dest_row = ii/cr_bs;
156           ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
157           for (jj = 0; jj < ncols; jj++) {
158             PetscInt    dest_col = idx[jj]/cr_bs;
159             PetscScalar v        = 1.0;
160             ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr);
161           }
162           ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
163         }
164         ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
165         ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
166 
167         if (llev++ == -1) {
168           PetscViewer viewer; char fname[32];
169           ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr);
170           PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
171           ierr = MatView(tMat, viewer);CHKERRQ(ierr);
172           ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
173         }
174         ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
175         ierr = MatDestroy(&tMat);CHKERRQ(ierr);
176       } /* create 'adj' */
177 
178       { /* partition: get newproc_idx */
179         char            prefix[256];
180         const char      *pcpre;
181         const PetscInt  *is_idx;
182         MatPartitioning mpart;
183         IS              proc_is;
184         PetscInt        targetPE;
185 
186         ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr);
187         ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr);
188         ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
189         ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
190         ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
191         ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
192         ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr);
193         ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr);
194         ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
195 
196         /* collect IS info */
197         ierr     = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr);
198         ierr     = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr);
199         targetPE = 1; /* bring to "front" of machine */
200         /*targetPE = size/new_size;*/ /* spread partitioning across machine */
201         for (kk = jj = 0 ; kk < nloc_old ; kk++) {
202           for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
203             newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
204           }
205         }
206         ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr);
207         ierr = ISDestroy(&proc_is);CHKERRQ(ierr);
208       }
209       ierr = MatDestroy(&adj);CHKERRQ(ierr);
210 
211       ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr);
212       ierr = PetscFree(newproc_idx);CHKERRQ(ierr);
213     } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */
214       PetscInt rfactor,targetPE;
215 
216       /* find factor */
217       if (new_size == 1) rfactor = size; /* easy */
218       else {
219         PetscReal best_fact = 0.;
220         jj = -1;
221         for (kk = 1 ; kk <= size ; kk++) {
222           if (!(size%kk)) { /* a candidate */
223             PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
224             if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
225             if (fact > best_fact) {
226               best_fact = fact; jj = kk;
227             }
228           }
229         }
230         if (jj != -1) rfactor = jj;
231         else rfactor = 1; /* does this happen .. a prime */
232       }
233       new_size = size/rfactor;
234 
235       if (new_size==nactive) {
236         *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
237         ierr        = PetscFree(counts);CHKERRQ(ierr);
238         ierr = PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);CHKERRQ(ierr);
239 #if defined PETSC_GAMG_USE_LOG
240         ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
241 #endif
242         PetscFunctionReturn(0);
243       }
244 
245       ierr = PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);CHKERRQ(ierr);
246       targetPE = rank/rfactor;
247       ierr     = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr);
248     } /* end simple 'is_eq_newproc' */
249 
250     /*
251      Create an index set from the is_eq_newproc index set to indicate the mapping TO
252      */
253     ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr);
254     is_eq_num_prim = is_eq_num;
255     /*
256       Determine how many equations/vertices are assigned to each processor
257      */
258     ierr        = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr);
259     ncrs_eq_new = counts[rank];
260     ierr        = ISDestroy(&is_eq_newproc);CHKERRQ(ierr);
261     ncrs_new = ncrs_eq_new/cr_bs; /* eqs */
262 
263     ierr = PetscFree(counts);CHKERRQ(ierr);
264 #if defined PETSC_GAMG_USE_LOG
265     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
266 #endif
267     /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */
268     {
269     Vec            src_crd, dest_crd;
270     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
271     VecScatter     vecscat;
272     PetscScalar    *array;
273     IS isscat;
274 
275     /* move data (for primal equations only) */
276     /* Create a vector to contain the newly ordered element information */
277     ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr);
278     ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr);
279     ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */
280     /*
281      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
282      a block size of ...).  Note, ISs are expanded into equation space by 'cr_bs'.
283      */
284     ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr);
285     ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
286     for (ii=0,jj=0; ii<ncrs; ii++) {
287       PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
288       for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
289     }
290     ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
291     ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr);
292     ierr = PetscFree(tidx);CHKERRQ(ierr);
293     /*
294      Create a vector to contain the original vertex information for each element
295      */
296     ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr);
297     for (jj=0; jj<ndata_cols; jj++) {
298       const PetscInt stride0=ncrs*pc_gamg->data_cell_rows;
299       for (ii=0; ii<ncrs; ii++) {
300         for (kk=0; kk<ndata_rows; kk++) {
301           PetscInt    ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
302           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
303           ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr);
304         }
305       }
306     }
307     ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr);
308     ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr);
309     /*
310       Scatter the element vertex information (still in the original vertex ordering)
311       to the correct processor
312     */
313     ierr = VecScatterCreateWithData(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr);
314     ierr = ISDestroy(&isscat);CHKERRQ(ierr);
315     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
316     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
317     ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr);
318     ierr = VecDestroy(&src_crd);CHKERRQ(ierr);
319     /*
320       Put the element vertex data into a new allocation of the gdata->ele
321     */
322     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
323     ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr);
324 
325     pc_gamg->data_sz = node_data_sz*ncrs_new;
326     strideNew        = ncrs_new*ndata_rows;
327 
328     ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr);
329     for (jj=0; jj<ndata_cols; jj++) {
330       for (ii=0; ii<ncrs_new; ii++) {
331         for (kk=0; kk<ndata_rows; kk++) {
332           PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
333           pc_gamg->data[ix] = PetscRealPart(array[jx]);
334         }
335       }
336     }
337     ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr);
338     ierr = VecDestroy(&dest_crd);CHKERRQ(ierr);
339     }
340     /* move A and P (columns) with new layout */
341 #if defined PETSC_GAMG_USE_LOG
342     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
343 #endif
344 
345     /*
346       Invert for MatCreateSubMatrix
347     */
348     ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr);
349     ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */
350     ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr);
351     if (is_eq_num != is_eq_num_prim) {
352       ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */
353     }
354     if (Pcolumnperm) {
355       ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr);
356       *Pcolumnperm = new_eq_indices;
357     }
358     ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr);
359 #if defined PETSC_GAMG_USE_LOG
360     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
361     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
362 #endif
363     /* 'a_Amat_crs' output */
364     {
365       Mat mat;
366       ierr        = MatCreateSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr);
367       *a_Amat_crs = mat;
368     }
369     ierr = MatDestroy(&Cmat);CHKERRQ(ierr);
370 
371 #if defined PETSC_GAMG_USE_LOG
372     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
373 #endif
374     /* prolongator */
375     {
376       IS       findices;
377       PetscInt Istart,Iend;
378       Mat      Pnew;
379 
380       ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr);
381 #if defined PETSC_GAMG_USE_LOG
382       ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
383 #endif
384       ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr);
385       ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr);
386       ierr = MatCreateSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr);
387       ierr = ISDestroy(&findices);CHKERRQ(ierr);
388 
389 #if defined PETSC_GAMG_USE_LOG
390       ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
391 #endif
392       ierr = MatDestroy(a_P_inout);CHKERRQ(ierr);
393 
394       /* output - repartitioned */
395       *a_P_inout = Pnew;
396     }
397     ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr);
398 
399     *a_nactive_proc = new_size; /* output */
400   }
401   PetscFunctionReturn(0);
402 }
403 
404 /* -------------------------------------------------------------------------- */
405 /*
406    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
407                     by setting data structures and options.
408 
409    Input Parameter:
410 .  pc - the preconditioner context
411 
412 */
413 PetscErrorCode PCSetUp_GAMG(PC pc)
414 {
415   PetscErrorCode ierr;
416   PC_MG          *mg      = (PC_MG*)pc->data;
417   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
418   Mat            Pmat     = pc->pmat;
419   PetscInt       fine_level,level,level1,bs,M,N,qq,lidx,nASMBlocksArr[PETSC_GAMG_MAXLEVELS];
420   MPI_Comm       comm;
421   PetscMPIInt    rank,size,nactivepe;
422   Mat            Aarr[PETSC_GAMG_MAXLEVELS],Parr[PETSC_GAMG_MAXLEVELS];
423   IS             *ASMLocalIDsArr[PETSC_GAMG_MAXLEVELS];
424   PetscLogDouble nnz0=0.,nnztot=0.;
425   MatInfo        info;
426   PetscBool      is_last = PETSC_FALSE;
427 
428   PetscFunctionBegin;
429   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
430   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
431   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
432 
433   if (pc_gamg->setup_count++ > 0) {
434     if ((PetscBool)(!pc_gamg->reuse_prol)) {
435       /* reset everything */
436       ierr = PCReset_MG(pc);CHKERRQ(ierr);
437       pc->setupcalled = 0;
438     } else {
439       PC_MG_Levels **mglevels = mg->levels;
440       /* just do Galerkin grids */
441       Mat          B,dA,dB;
442 
443       if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
444       if (pc_gamg->Nlevels > 1) {
445         /* currently only handle case where mat and pmat are the same on coarser levels */
446         ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr);
447         /* (re)set to get dirty flag */
448         ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr);
449 
450         for (level=pc_gamg->Nlevels-2; level>=0; level--) {
451           /* 2nd solve, matrix structure can change from repartitioning or process reduction but don't know if we have process reduction here. Should fix */
452           if (pc_gamg->setup_count==2 /* && pc_gamg->repart||reduction */) {
453             ierr = PetscInfo2(pc,"new RAP after first solve level %D, %D setup\n",level,pc_gamg->setup_count);CHKERRQ(ierr);
454             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,2.0,&B);CHKERRQ(ierr);
455             ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr);
456             mglevels[level]->A = B;
457           } else {
458             ierr = PetscInfo2(pc,"RAP after first solve reusing matrix level %D, %D setup\n",level,pc_gamg->setup_count);CHKERRQ(ierr);
459             ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr);
460             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr);
461           }
462           ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr);
463           dB   = B;
464         }
465       }
466 
467       ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
468       PetscFunctionReturn(0);
469     }
470   }
471 
472   if (!pc_gamg->data) {
473     if (pc_gamg->orig_data) {
474       ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
475       ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr);
476 
477       pc_gamg->data_sz        = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
478       pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
479       pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;
480 
481       ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr);
482       for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
483     } else {
484       if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
485       ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr);
486     }
487   }
488 
489   /* cache original data for reuse */
490   if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) {
491     ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr);
492     for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
493     pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
494     pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
495   }
496 
497   /* get basic dims */
498   ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
499   ierr = MatGetSize(Pmat, &M, &N);CHKERRQ(ierr);
500 
501   ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */
502   nnz0   = info.nz_used;
503   nnztot = info.nz_used;
504   ierr = PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,(int)(nnz0/(PetscReal)M+0.5),size);CHKERRQ(ierr);
505 
506   /* Get A_i and R_i */
507   for (level=0, Aarr[0]=Pmat, nactivepe = size; level < (pc_gamg->Nlevels-1) && (!level || M>pc_gamg->coarse_eq_limit); level++) {
508     pc_gamg->current_level = level;
509     level1 = level + 1;
510 #if defined PETSC_GAMG_USE_LOG
511     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
512 #if (defined GAMG_STAGES)
513     ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr);
514 #endif
515 #endif
516     { /* construct prolongator */
517       Mat              Gmat;
518       PetscCoarsenData *agg_lists;
519       Mat              Prol11;
520 
521       ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr);
522       ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr);
523       ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr);
524 
525       /* could have failed to create new level */
526       if (Prol11) {
527         /* get new block size of coarse matrices */
528         ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr);
529 
530         if (pc_gamg->ops->optprolongator) {
531           /* smooth */
532           ierr = pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);CHKERRQ(ierr);
533         }
534 
535         Parr[level1] = Prol11;
536       } else Parr[level1] = NULL; /* failed to coarsen */
537 
538       if (pc_gamg->use_aggs_in_asm) {
539         PetscInt bs;
540         ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr);
541         ierr = PetscCDGetASMBlocks(agg_lists, bs, Gmat, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr);
542       }
543 
544       ierr = MatDestroy(&Gmat);CHKERRQ(ierr);
545       ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr);
546     } /* construct prolongator scope */
547 #if defined PETSC_GAMG_USE_LOG
548     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
549 #endif
550     if (!level) Aarr[0] = Pmat; /* use Pmat for finest level setup */
551     if (!Parr[level1]) { /* failed to coarsen */
552       ierr =  PetscInfo1(pc,"Stop gridding, level %D\n",level);CHKERRQ(ierr);
553 #if defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES
554       ierr = PetscLogStagePop();CHKERRQ(ierr);
555 #endif
556       break;
557     }
558 #if defined PETSC_GAMG_USE_LOG
559     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
560 #endif
561     ierr = MatGetSize(Parr[level1], &M, &N);CHKERRQ(ierr); /* N is next M, a loop test variables */
562     if (is_last) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Is last ????????");
563     if (N <= pc_gamg->coarse_eq_limit) is_last = PETSC_TRUE;
564     if (level1 == pc_gamg->Nlevels-1) is_last = PETSC_TRUE;
565     ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs, &Parr[level1], &Aarr[level1], &nactivepe, NULL, is_last);CHKERRQ(ierr);
566 
567 #if defined PETSC_GAMG_USE_LOG
568     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
569 #endif
570     ierr = MatGetSize(Aarr[level1], &M, &N);CHKERRQ(ierr); /* M is loop test variables */
571     ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr);
572     nnztot += info.nz_used;
573     ierr = PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);CHKERRQ(ierr);
574 
575 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
576     ierr = PetscLogStagePop();CHKERRQ(ierr);
577 #endif
578     /* stop if one node or one proc -- could pull back for singular problems */
579     if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) {
580       ierr =  PetscInfo2(pc,"HARD stop of coarsening on level %D.  Grid too small: %D block nodes\n",level,M/bs);CHKERRQ(ierr);
581       level++;
582       break;
583     }
584   } /* levels */
585   ierr                  = PetscFree(pc_gamg->data);CHKERRQ(ierr);
586 
587   ierr = PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);CHKERRQ(ierr);
588   pc_gamg->Nlevels = level + 1;
589   fine_level       = level;
590   ierr             = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
591 
592   if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
593     /* set default smoothers & set operators */
594     for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) {
595       KSP smoother;
596       PC  subpc;
597 
598       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
599       ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
600 
601       ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
602       /* set ops */
603       ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr);
604       ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr);
605 
606       /* set defaults */
607       ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr);
608 
609       /* set blocks for ASM smoother that uses the 'aggregates' */
610       if (pc_gamg->use_aggs_in_asm) {
611         PetscInt sz;
612         IS       *iss;
613 
614         sz   = nASMBlocksArr[level];
615         iss   = ASMLocalIDsArr[level];
616         ierr = PCSetType(subpc, PCASM);CHKERRQ(ierr);
617         ierr = PCASMSetOverlap(subpc, 0);CHKERRQ(ierr);
618         ierr = PCASMSetType(subpc,PC_ASM_BASIC);CHKERRQ(ierr);
619         if (!sz) {
620           IS       is;
621           ierr = ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_COPY_VALUES, &is);CHKERRQ(ierr);
622           ierr = PCASMSetLocalSubdomains(subpc, 1, NULL, &is);CHKERRQ(ierr);
623           ierr = ISDestroy(&is);CHKERRQ(ierr);
624         } else {
625           PetscInt kk;
626           ierr = PCASMSetLocalSubdomains(subpc, sz, NULL, iss);CHKERRQ(ierr);
627           for (kk=0; kk<sz; kk++) {
628             ierr = ISDestroy(&iss[kk]);CHKERRQ(ierr);
629           }
630           ierr = PetscFree(iss);CHKERRQ(ierr);
631         }
632         ASMLocalIDsArr[level] = NULL;
633         nASMBlocksArr[level]  = 0;
634       } else {
635         ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr);
636       }
637     }
638     {
639       /* coarse grid */
640       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
641       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
642       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
643       ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr);
644       if (!pc_gamg->use_parallel_coarse_grid_solver) {
645         ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
646         ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
647         ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr);
648         ierr = PCSetUp(subpc);CHKERRQ(ierr);
649         ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr);
650         if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
651         ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr);
652         ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr);
653         ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr);
654         ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr);
655         ierr = KSPSetType(k2[0], KSPPREONLY);CHKERRQ(ierr);
656         /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
657          * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
658          * view every subdomain as though they were different. */
659         ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
660       }
661     }
662 
663     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
664     ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr);
665     ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr);
666     ierr = PetscOptionsEnd();CHKERRQ(ierr);
667     ierr = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr);
668 
669     /* clean up */
670     for (level=1; level<pc_gamg->Nlevels; level++) {
671       ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr);
672       ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr);
673     }
674     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
675   } else {
676     KSP smoother;
677     ierr = PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");CHKERRQ(ierr);
678     ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr);
679     ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr);
680     ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr);
681     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
682   }
683   PetscFunctionReturn(0);
684 }
685 
686 /* ------------------------------------------------------------------------- */
687 /*
688  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
689    that was created with PCCreate_GAMG().
690 
691    Input Parameter:
692 .  pc - the preconditioner context
693 
694    Application Interface Routine: PCDestroy()
695 */
696 PetscErrorCode PCDestroy_GAMG(PC pc)
697 {
698   PetscErrorCode ierr;
699   PC_MG          *mg     = (PC_MG*)pc->data;
700   PC_GAMG        *pc_gamg= (PC_GAMG*)mg->innerctx;
701 
702   PetscFunctionBegin;
703   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
704   if (pc_gamg->ops->destroy) {
705     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
706   }
707   ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr);
708   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
709   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
710   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
711   PetscFunctionReturn(0);
712 }
713 
714 /*@
715    PCGAMGSetProcEqLim - Set number of equations to aim for per process on the coarse grids via processor reduction.
716 
717    Logically Collective on PC
718 
719    Input Parameters:
720 +  pc - the preconditioner context
721 -  n - the number of equations
722 
723 
724    Options Database Key:
725 .  -pc_gamg_process_eq_limit <limit>
726 
727    Notes:
728     GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit> equations on each process
729           that has degrees of freedom
730 
731    Level: intermediate
732 
733    Concepts: Unstructured multigrid preconditioner
734 
735 .seealso: PCGAMGSetCoarseEqLim()
736 @*/
737 PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
738 {
739   PetscErrorCode ierr;
740 
741   PetscFunctionBegin;
742   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
743   ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
744   PetscFunctionReturn(0);
745 }
746 
747 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
748 {
749   PC_MG   *mg      = (PC_MG*)pc->data;
750   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
751 
752   PetscFunctionBegin;
753   if (n>0) pc_gamg->min_eq_proc = n;
754   PetscFunctionReturn(0);
755 }
756 
757 /*@
758    PCGAMGSetCoarseEqLim - Set maximum number of equations on coarsest grid.
759 
760  Collective on PC
761 
762    Input Parameters:
763 +  pc - the preconditioner context
764 -  n - maximum number of equations to aim for
765 
766    Options Database Key:
767 .  -pc_gamg_coarse_eq_limit <limit>
768 
769    Notes: For example -pc_gamg_coarse_eq_limit 1000 will stop coarsening once the coarse grid
770      has less than 1000 unknowns.
771 
772    Level: intermediate
773 
774    Concepts: Unstructured multigrid preconditioner
775 
776 .seealso: PCGAMGSetProcEqLim()
777 @*/
778 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
779 {
780   PetscErrorCode ierr;
781 
782   PetscFunctionBegin;
783   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
784   ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
785   PetscFunctionReturn(0);
786 }
787 
788 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
789 {
790   PC_MG   *mg      = (PC_MG*)pc->data;
791   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
792 
793   PetscFunctionBegin;
794   if (n>0) pc_gamg->coarse_eq_limit = n;
795   PetscFunctionReturn(0);
796 }
797 
798 /*@
799    PCGAMGSetRepartition - Repartition the degrees of freedom across the processors on the coarser grids
800 
801    Collective on PC
802 
803    Input Parameters:
804 +  pc - the preconditioner context
805 -  n - PETSC_TRUE or PETSC_FALSE
806 
807    Options Database Key:
808 .  -pc_gamg_repartition <true,false>
809 
810    Notes:
811     this will generally improve the loading balancing of the work on each level
812 
813    Level: intermediate
814 
815    Concepts: Unstructured multigrid preconditioner
816 
817 .seealso: ()
818 @*/
819 PetscErrorCode PCGAMGSetRepartition(PC pc, PetscBool n)
820 {
821   PetscErrorCode ierr;
822 
823   PetscFunctionBegin;
824   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
825   ierr = PetscTryMethod(pc,"PCGAMGSetRepartition_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
826   PetscFunctionReturn(0);
827 }
828 
829 static PetscErrorCode PCGAMGSetRepartition_GAMG(PC pc, PetscBool n)
830 {
831   PC_MG   *mg      = (PC_MG*)pc->data;
832   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
833 
834   PetscFunctionBegin;
835   pc_gamg->repart = n;
836   PetscFunctionReturn(0);
837 }
838 
839 /*@
840    PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding algebraic multigrid preconditioner
841 
842    Collective on PC
843 
844    Input Parameters:
845 +  pc - the preconditioner context
846 -  n - PETSC_TRUE or PETSC_FALSE
847 
848    Options Database Key:
849 .  -pc_gamg_reuse_interpolation <true,false>
850 
851    Level: intermediate
852 
853    Notes:
854     this may negatively affect the convergence rate of the method on new matrices if the matrix entries change a great deal, but allows
855           rebuilding the preconditioner quicker.
856 
857    Concepts: Unstructured multigrid preconditioner
858 
859 .seealso: ()
860 @*/
861 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n)
862 {
863   PetscErrorCode ierr;
864 
865   PetscFunctionBegin;
866   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
867   ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
868   PetscFunctionReturn(0);
869 }
870 
871 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n)
872 {
873   PC_MG   *mg      = (PC_MG*)pc->data;
874   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
875 
876   PetscFunctionBegin;
877   pc_gamg->reuse_prol = n;
878   PetscFunctionReturn(0);
879 }
880 
881 /*@
882    PCGAMGASMSetUseAggs - Have the PCGAMG smoother on each level use the aggregates defined by the coarsening process as the subdomains for the additive Schwarz preconditioner.
883 
884    Collective on PC
885 
886    Input Parameters:
887 +  pc - the preconditioner context
888 -  flg - PETSC_TRUE to use aggregates, PETSC_FALSE to not
889 
890    Options Database Key:
891 .  -pc_gamg_asm_use_agg
892 
893    Level: intermediate
894 
895    Concepts: Unstructured multigrid preconditioner
896 
897 .seealso: ()
898 @*/
899 PetscErrorCode PCGAMGASMSetUseAggs(PC pc, PetscBool flg)
900 {
901   PetscErrorCode ierr;
902 
903   PetscFunctionBegin;
904   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
905   ierr = PetscTryMethod(pc,"PCGAMGASMSetUseAggs_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr);
906   PetscFunctionReturn(0);
907 }
908 
909 static PetscErrorCode PCGAMGASMSetUseAggs_GAMG(PC pc, PetscBool flg)
910 {
911   PC_MG   *mg      = (PC_MG*)pc->data;
912   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
913 
914   PetscFunctionBegin;
915   pc_gamg->use_aggs_in_asm = flg;
916   PetscFunctionReturn(0);
917 }
918 
919 /*@
920    PCGAMGSetUseParallelCoarseGridSolve - allow a parallel coarse grid solver
921 
922    Collective on PC
923 
924    Input Parameters:
925 +  pc - the preconditioner context
926 -  flg - PETSC_TRUE to not force coarse grid onto one processor
927 
928    Options Database Key:
929 .  -pc_gamg_use_parallel_coarse_grid_solver
930 
931    Level: intermediate
932 
933    Concepts: Unstructured multigrid preconditioner
934 
935 .seealso: ()
936 @*/
937 PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve(PC pc, PetscBool flg)
938 {
939   PetscErrorCode ierr;
940 
941   PetscFunctionBegin;
942   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
943   ierr = PetscTryMethod(pc,"PCGAMGSetUseParallelCoarseGridSolve_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr);
944   PetscFunctionReturn(0);
945 }
946 
947 static PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve_GAMG(PC pc, PetscBool flg)
948 {
949   PC_MG   *mg      = (PC_MG*)pc->data;
950   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
951 
952   PetscFunctionBegin;
953   pc_gamg->use_parallel_coarse_grid_solver = flg;
954   PetscFunctionReturn(0);
955 }
956 
957 /*@
958    PCGAMGSetNlevels -  Sets the maximum number of levels PCGAMG will use
959 
960    Not collective on PC
961 
962    Input Parameters:
963 +  pc - the preconditioner
964 -  n - the maximum number of levels to use
965 
966    Options Database Key:
967 .  -pc_mg_levels
968 
969    Level: intermediate
970 
971    Concepts: Unstructured multigrid preconditioner
972 
973 .seealso: ()
974 @*/
975 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
976 {
977   PetscErrorCode ierr;
978 
979   PetscFunctionBegin;
980   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
981   ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
982   PetscFunctionReturn(0);
983 }
984 
985 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
986 {
987   PC_MG   *mg      = (PC_MG*)pc->data;
988   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
989 
990   PetscFunctionBegin;
991   pc_gamg->Nlevels = n;
992   PetscFunctionReturn(0);
993 }
994 
995 /*@
996    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
997 
998    Not collective on PC
999 
1000    Input Parameters:
1001 +  pc - the preconditioner context
1002 -  threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph
1003 
1004    Options Database Key:
1005 .  -pc_gamg_threshold <threshold>
1006 
1007    Notes:
1008     Increasing the threshold decreases the rate of coarsening. Conversely reducing the threshold increases the rate of coarsening (aggressive coarsening) and thereby reduces the complexity of the coarse grids, and generally results in slower solver converge rates. Reducing coarse grid complexity reduced the complexity of Galerkin coarse grid construction considerably.
1009     Before coarsening or aggregating the graph, GAMG removes small values from the graph with this threshold, and thus reducing the coupling in the graph and a different (perhaps better) coarser set of points.
1010 
1011    Level: intermediate
1012 
1013    Concepts: Unstructured multigrid preconditioner
1014 
1015 .seealso: PCGAMGFilterGraph(), PCGAMGSetSquareGraph()
1016 @*/
1017 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal v[], PetscInt n)
1018 {
1019   PetscErrorCode ierr;
1020 
1021   PetscFunctionBegin;
1022   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1023   ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal[],PetscInt),(pc,v,n));CHKERRQ(ierr);
1024   PetscFunctionReturn(0);
1025 }
1026 
1027 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal v[], PetscInt n)
1028 {
1029   PC_MG   *mg      = (PC_MG*)pc->data;
1030   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1031   PetscInt i;
1032   PetscFunctionBegin;
1033   for (i=0;i<n;i++) pc_gamg->threshold[i] = v[i];
1034   do {pc_gamg->threshold[i] = pc_gamg->threshold[i-1]*pc_gamg->threshold_scale;} while (++i<PETSC_GAMG_MAXLEVELS);
1035   PetscFunctionReturn(0);
1036 }
1037 
1038 /*@
1039    PCGAMGSetThresholdScale - Relative threshold reduction at each level
1040 
1041    Not collective on PC
1042 
1043    Input Parameters:
1044 +  pc - the preconditioner context
1045 -  scale - the threshold value reduction, ussually < 1.0
1046 
1047    Options Database Key:
1048 .  -pc_gamg_threshold_scale <v>
1049 
1050    Level: advanced
1051 
1052    Concepts: Unstructured multigrid preconditioner
1053 
1054 .seealso: ()
1055 @*/
1056 PetscErrorCode PCGAMGSetThresholdScale(PC pc, PetscReal v)
1057 {
1058   PetscErrorCode ierr;
1059 
1060   PetscFunctionBegin;
1061   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1062   ierr = PetscTryMethod(pc,"PCGAMGSetThresholdScale_C",(PC,PetscReal),(pc,v));CHKERRQ(ierr);
1063   PetscFunctionReturn(0);
1064 }
1065 
1066 static PetscErrorCode PCGAMGSetThresholdScale_GAMG(PC pc, PetscReal v)
1067 {
1068   PC_MG   *mg      = (PC_MG*)pc->data;
1069   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1070   PetscFunctionBegin;
1071   pc_gamg->threshold_scale = v;
1072   PetscFunctionReturn(0);
1073 }
1074 
1075 /*@C
1076    PCGAMGSetType - Set solution method
1077 
1078    Collective on PC
1079 
1080    Input Parameters:
1081 +  pc - the preconditioner context
1082 -  type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL
1083 
1084    Options Database Key:
1085 .  -pc_gamg_type <agg,geo,classical> - type of algebraic multigrid to apply
1086 
1087    Level: intermediate
1088 
1089    Concepts: Unstructured multigrid preconditioner
1090 
1091 .seealso: PCGAMGGetType(), PCGAMG, PCGAMGType
1092 @*/
1093 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1094 {
1095   PetscErrorCode ierr;
1096 
1097   PetscFunctionBegin;
1098   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1099   ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr);
1100   PetscFunctionReturn(0);
1101 }
1102 
1103 /*@C
1104    PCGAMGGetType - Get solution method
1105 
1106    Collective on PC
1107 
1108    Input Parameter:
1109 .  pc - the preconditioner context
1110 
1111    Output Parameter:
1112 .  type - the type of algorithm used
1113 
1114    Level: intermediate
1115 
1116    Concepts: Unstructured multigrid preconditioner
1117 
1118 .seealso: PCGAMGSetType(), PCGAMGType
1119 @*/
1120 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type)
1121 {
1122   PetscErrorCode ierr;
1123 
1124   PetscFunctionBegin;
1125   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1126   ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr);
1127   PetscFunctionReturn(0);
1128 }
1129 
1130 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type)
1131 {
1132   PC_MG          *mg      = (PC_MG*)pc->data;
1133   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1134 
1135   PetscFunctionBegin;
1136   *type = pc_gamg->type;
1137   PetscFunctionReturn(0);
1138 }
1139 
1140 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1141 {
1142   PetscErrorCode ierr,(*r)(PC);
1143   PC_MG          *mg      = (PC_MG*)pc->data;
1144   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1145 
1146   PetscFunctionBegin;
1147   pc_gamg->type = type;
1148   ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr);
1149   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1150   if (pc_gamg->ops->destroy) {
1151     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
1152     ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr);
1153     pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1154     /* cleaning up common data in pc_gamg - this should disapear someday */
1155     pc_gamg->data_cell_cols = 0;
1156     pc_gamg->data_cell_rows = 0;
1157     pc_gamg->orig_data_cell_cols = 0;
1158     pc_gamg->orig_data_cell_rows = 0;
1159     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
1160     pc_gamg->data_sz = 0;
1161   }
1162   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
1163   ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr);
1164   ierr = (*r)(pc);CHKERRQ(ierr);
1165   PetscFunctionReturn(0);
1166 }
1167 
1168 /* -------------------------------------------------------------------------- */
1169 /*
1170    PCMGGetGridComplexity - compute coarse grid complexity of MG hierarchy
1171 
1172    Input Parameter:
1173 .  pc - the preconditioner context
1174 
1175    Output Parameter:
1176 .  gc - grid complexity = sum_i(nnz_i) / nnz_0
1177 
1178    Level: advanced
1179 */
1180 static PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc)
1181 {
1182   PetscErrorCode ierr;
1183   PC_MG          *mg      = (PC_MG*)pc->data;
1184   PC_MG_Levels   **mglevels = mg->levels;
1185   PetscInt       lev, nnz0 = -1;
1186   MatInfo        info;
1187   PetscFunctionBegin;
1188   if (!mg->nlevels) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MG has no levels");
1189   for (lev=0, *gc=0; lev<mg->nlevels; lev++) {
1190     Mat dB;
1191     ierr = KSPGetOperators(mglevels[lev]->smoothd,NULL,&dB);CHKERRQ(ierr);
1192     ierr = MatGetInfo(dB,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */
1193     *gc += (PetscReal)info.nz_used;
1194     if (lev==mg->nlevels-1) nnz0 = info.nz_used;
1195   }
1196   if (nnz0) *gc /= (PetscReal)nnz0;
1197   else *gc = 0;
1198   PetscFunctionReturn(0);
1199 }
1200 
1201 static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer)
1202 {
1203   PetscErrorCode ierr,i;
1204   PC_MG          *mg      = (PC_MG*)pc->data;
1205   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1206   PetscReal       gc=0;
1207   PetscFunctionBegin;
1208   ierr = PetscViewerASCIIPrintf(viewer,"    GAMG specific options\n");CHKERRQ(ierr);
1209   ierr = PetscViewerASCIIPrintf(viewer,"      Threshold for dropping small values in graph on each level =");CHKERRQ(ierr);
1210   for (i=0;i<pc_gamg->current_level;i++) {
1211     ierr = PetscViewerASCIIPrintf(viewer," %g",(double)pc_gamg->threshold[i]);CHKERRQ(ierr);
1212   }
1213   ierr = PetscViewerASCIIPrintf(viewer,"\n");CHKERRQ(ierr);
1214   ierr = PetscViewerASCIIPrintf(viewer,"      Threshold scaling factor for each level not specified = %g\n",(double)pc_gamg->threshold_scale);CHKERRQ(ierr);
1215   if (pc_gamg->use_aggs_in_asm) {
1216     ierr = PetscViewerASCIIPrintf(viewer,"      Using aggregates from coarsening process to define subdomains for PCASM\n");CHKERRQ(ierr);
1217   }
1218   if (pc_gamg->use_parallel_coarse_grid_solver) {
1219     ierr = PetscViewerASCIIPrintf(viewer,"      Using parallel coarse grid solver (all coarse grid equations not put on one process)\n");CHKERRQ(ierr);
1220   }
1221   if (pc_gamg->ops->view) {
1222     ierr = (*pc_gamg->ops->view)(pc,viewer);CHKERRQ(ierr);
1223   }
1224   ierr = PCMGGetGridComplexity(pc,&gc);CHKERRQ(ierr);
1225   ierr = PetscViewerASCIIPrintf(viewer,"      Complexity:    grid = %g\n",gc);CHKERRQ(ierr);
1226   PetscFunctionReturn(0);
1227 }
1228 
1229 PetscErrorCode PCSetFromOptions_GAMG(PetscOptionItems *PetscOptionsObject,PC pc)
1230 {
1231   PetscErrorCode ierr;
1232   PC_MG          *mg      = (PC_MG*)pc->data;
1233   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1234   PetscBool      flag;
1235   MPI_Comm       comm;
1236   char           prefix[256];
1237   PetscInt       i,n;
1238   const char     *pcpre;
1239 
1240   PetscFunctionBegin;
1241   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1242   ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr);
1243   {
1244     char tname[256];
1245     ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr);
1246     if (flag) {
1247       ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr);
1248     }
1249     ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGSetRepartition",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr);
1250     ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr);
1251     ierr = PetscOptionsBool("-pc_gamg_asm_use_agg","Use aggregation aggregates for ASM smoother","PCGAMGASMSetUseAggs",pc_gamg->use_aggs_in_asm,&pc_gamg->use_aggs_in_asm,NULL);CHKERRQ(ierr);
1252     ierr = PetscOptionsBool("-pc_gamg_use_parallel_coarse_grid_solver","Use parallel coarse grid solver (otherwise put last grid on one process)","PCGAMGSetUseParallelCoarseGridSolve",pc_gamg->use_parallel_coarse_grid_solver,&pc_gamg->use_parallel_coarse_grid_solver,NULL);CHKERRQ(ierr);
1253     ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr);
1254     ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr);
1255     ierr = PetscOptionsReal("-pc_gamg_threshold_scale","Scaling of threshold for each level not specified","PCGAMGSetThresholdScale",pc_gamg->threshold_scale,&pc_gamg->threshold_scale,NULL);CHKERRQ(ierr);
1256     n = PETSC_GAMG_MAXLEVELS;
1257     ierr = PetscOptionsRealArray("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&n,&flag);CHKERRQ(ierr);
1258     if (!flag || n < PETSC_GAMG_MAXLEVELS) {
1259       if (!flag) n = 1;
1260       i = n;
1261       do {pc_gamg->threshold[i] = pc_gamg->threshold[i-1]*pc_gamg->threshold_scale;} while (++i<PETSC_GAMG_MAXLEVELS);
1262     }
1263     ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
1264 
1265     /* set options for subtype */
1266     if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);}
1267   }
1268   ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
1269   ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
1270   ierr = PetscOptionsTail();CHKERRQ(ierr);
1271   PetscFunctionReturn(0);
1272 }
1273 
1274 /* -------------------------------------------------------------------------- */
1275 /*MC
1276      PCGAMG - Geometric algebraic multigrid (AMG) preconditioner
1277 
1278    Options Database Keys:
1279 +   -pc_gamg_type <type> - one of agg, geo, or classical
1280 .   -pc_gamg_repartition  <true,default=false> - repartition the degrees of freedom accross the coarse grids as they are determined
1281 .   -pc_gamg_reuse_interpolation <true,default=false> - when rebuilding the algebraic multigrid preconditioner reuse the previously computed interpolations
1282 .   -pc_gamg_asm_use_agg <true,default=false> - use the aggregates from the coasening process to defined the subdomains on each level for the PCASM smoother
1283 .   -pc_gamg_process_eq_limit <limit, default=50> - GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit>
1284                                         equations on each process that has degrees of freedom
1285 .   -pc_gamg_coarse_eq_limit <limit, default=50> - Set maximum number of equations on coarsest grid to aim for.
1286 .   -pc_gamg_threshold[] <thresh,default=0> - Before aggregating the graph GAMG will remove small values from the graph on each level
1287 -   -pc_gamg_threshold_scale <scale,default=1> - Scaling of threshold on each coarser grid if not specified
1288 
1289    Options Database Keys for default Aggregation:
1290 +  -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation
1291 .  -pc_gamg_sym_graph <true,default=false> - symmetrize the graph before computing the aggregation
1292 -  -pc_gamg_square_graph <n,default=1> - number of levels to square the graph before aggregating it
1293 
1294    Multigrid options:
1295 +  -pc_mg_cycles <v> - v or w, see PCMGSetCycleType()
1296 .  -pc_mg_distinct_smoothup - configure the up and down (pre and post) smoothers separately, see PCMGSetDistinctSmoothUp()
1297 .  -pc_mg_type <multiplicative> - (one of) additive multiplicative full kascade
1298 -  -pc_mg_levels <levels> - Number of levels of multigrid to use.
1299 
1300 
1301   Notes:
1302     In order to obtain good performance for PCGAMG for vector valued problems you must
1303        Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point
1304        Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator
1305        See the Users Manual Chapter 4 for more details
1306 
1307   Level: intermediate
1308 
1309   Concepts: algebraic multigrid
1310 
1311 .seealso:  PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(),
1312            PCGAMGSetCoarseEqLim(), PCGAMGSetRepartition(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGASMSetUseAggs(), PCGAMGSetUseParallelCoarseGridSolve(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType(), PCGAMGSetReuseInterpolation()
1313 M*/
1314 
1315 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1316 {
1317   PetscErrorCode ierr,i;
1318   PC_GAMG        *pc_gamg;
1319   PC_MG          *mg;
1320 
1321   PetscFunctionBegin;
1322    /* register AMG type */
1323   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1324 
1325   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1326   ierr = PCSetType(pc, PCMG);CHKERRQ(ierr);
1327   ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr);
1328 
1329   /* create a supporting struct and attach it to pc */
1330   ierr         = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr);
1331   ierr         = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr);
1332   mg           = (PC_MG*)pc->data;
1333   mg->innerctx = pc_gamg;
1334 
1335   ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr);
1336 
1337   pc_gamg->setup_count = 0;
1338   /* these should be in subctx but repartitioning needs simple arrays */
1339   pc_gamg->data_sz = 0;
1340   pc_gamg->data    = 0;
1341 
1342   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1343   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1344   pc->ops->setup          = PCSetUp_GAMG;
1345   pc->ops->reset          = PCReset_GAMG;
1346   pc->ops->destroy        = PCDestroy_GAMG;
1347   mg->view                = PCView_GAMG;
1348 
1349   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr);
1350   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr);
1351   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartition_C",PCGAMGSetRepartition_GAMG);CHKERRQ(ierr);
1352   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr);
1353   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGASMSetUseAggs_C",PCGAMGASMSetUseAggs_GAMG);CHKERRQ(ierr);
1354   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseParallelCoarseGridSolve_C",PCGAMGSetUseParallelCoarseGridSolve_GAMG);CHKERRQ(ierr);
1355   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr);
1356   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThresholdScale_C",PCGAMGSetThresholdScale_GAMG);CHKERRQ(ierr);
1357   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr);
1358   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr);
1359   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr);
1360   pc_gamg->repart           = PETSC_FALSE;
1361   pc_gamg->reuse_prol       = PETSC_FALSE;
1362   pc_gamg->use_aggs_in_asm  = PETSC_FALSE;
1363   pc_gamg->use_parallel_coarse_grid_solver = PETSC_FALSE;
1364   pc_gamg->min_eq_proc      = 50;
1365   pc_gamg->coarse_eq_limit  = 50;
1366   for (i=0;i<PETSC_GAMG_MAXLEVELS;i++) pc_gamg->threshold[i] = 0.;
1367   pc_gamg->threshold_scale = 1.;
1368   pc_gamg->Nlevels          = PETSC_GAMG_MAXLEVELS;
1369   pc_gamg->current_level    = 0; /* don't need to init really */
1370   pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1371 
1372   /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */
1373   ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr);
1374   PetscFunctionReturn(0);
1375 }
1376 
1377 /*@C
1378  PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1379     from PCInitializePackage().
1380 
1381  Level: developer
1382 
1383  .keywords: PC, PCGAMG, initialize, package
1384  .seealso: PetscInitialize()
1385 @*/
1386 PetscErrorCode PCGAMGInitializePackage(void)
1387 {
1388   PetscErrorCode ierr;
1389 
1390   PetscFunctionBegin;
1391   if (PCGAMGPackageInitialized) PetscFunctionReturn(0);
1392   PCGAMGPackageInitialized = PETSC_TRUE;
1393   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr);
1394   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr);
1395   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr);
1396   ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr);
1397 
1398   /* general events */
1399   ierr = PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);CHKERRQ(ierr);
1400   ierr = PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);CHKERRQ(ierr);
1401   ierr = PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr);
1402   ierr = PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr);
1403   ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr);
1404   ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr);
1405   ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);CHKERRQ(ierr);
1406 
1407 #if defined PETSC_GAMG_USE_LOG
1408   ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr);
1409   ierr = PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr);
1410   /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1411   /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1412   /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1413   ierr = PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr);
1414   ierr = PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr);
1415   ierr = PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr);
1416   ierr = PetscLogEventRegister("    search-set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr);
1417   ierr = PetscLogEventRegister("  SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr);
1418   ierr = PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr);
1419   ierr = PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr);
1420   ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr);
1421   ierr = PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr);
1422   ierr = PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr);
1423   ierr = PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr);
1424   ierr = PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr);
1425 
1426   /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1427   /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1428   /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1429   /* create timer stages */
1430 #if defined GAMG_STAGES
1431   {
1432     char     str[32];
1433     PetscInt lidx;
1434     sprintf(str,"MG Level %d (finest)",0);
1435     ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr);
1436     for (lidx=1; lidx<9; lidx++) {
1437       sprintf(str,"MG Level %d",lidx);
1438       ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr);
1439     }
1440   }
1441 #endif
1442 #endif
1443   PetscFunctionReturn(0);
1444 }
1445 
1446 /*@C
1447  PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is
1448     called from PetscFinalize() automatically.
1449 
1450  Level: developer
1451 
1452  .keywords: Petsc, destroy, package
1453  .seealso: PetscFinalize()
1454 @*/
1455 PetscErrorCode PCGAMGFinalizePackage(void)
1456 {
1457   PetscErrorCode ierr;
1458 
1459   PetscFunctionBegin;
1460   PCGAMGPackageInitialized = PETSC_FALSE;
1461   ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr);
1462   PetscFunctionReturn(0);
1463 }
1464 
1465 /*@C
1466  PCGAMGRegister - Register a PCGAMG implementation.
1467 
1468  Input Parameters:
1469  + type - string that will be used as the name of the GAMG type.
1470  - create - function for creating the gamg context.
1471 
1472   Level: advanced
1473 
1474  .seealso: PCGAMGType, PCGAMG, PCGAMGSetType()
1475 @*/
1476 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC))
1477 {
1478   PetscErrorCode ierr;
1479 
1480   PetscFunctionBegin;
1481   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1482   ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr);
1483   PetscFunctionReturn(0);
1484 }
1485 
1486