xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision 5a856986583887c326abe5dfd149e8184a29cd80)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 #include <petsc/private/matimpl.h>
5 #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */
7 
8 #if defined PETSC_GAMG_USE_LOG
9 PetscLogEvent petsc_gamg_setup_events[NUM_SET];
10 #endif
11 
12 #if defined PETSC_USE_LOG
13 PetscLogEvent PC_GAMGGraph_AGG;
14 PetscLogEvent PC_GAMGGraph_GEO;
15 PetscLogEvent PC_GAMGCoarsen_AGG;
16 PetscLogEvent PC_GAMGCoarsen_GEO;
17 PetscLogEvent PC_GAMGProlongator_AGG;
18 PetscLogEvent PC_GAMGProlongator_GEO;
19 PetscLogEvent PC_GAMGOptProlongator_AGG;
20 #endif
21 
22 /* #define GAMG_STAGES */
23 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
24 static PetscLogStage gamg_stages[PETSC_GAMG_MAXLEVELS];
25 #endif
26 
27 static PetscFunctionList GAMGList = 0;
28 static PetscBool PCGAMGPackageInitialized;
29 
30 /* ----------------------------------------------------------------------------- */
31 PetscErrorCode PCReset_GAMG(PC pc)
32 {
33   PetscErrorCode ierr;
34   PC_MG          *mg      = (PC_MG*)pc->data;
35   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
36 
37   PetscFunctionBegin;
38   ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
39   pc_gamg->data_sz = 0;
40   ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr);
41   PetscFunctionReturn(0);
42 }
43 
44 /* -------------------------------------------------------------------------- */
45 /*
46    PCGAMGCreateLevel_GAMG: create coarse op with RAP.  repartition and/or reduce number
47      of active processors.
48 
49    Input Parameter:
50    . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
51           'pc_gamg->data_sz' are changed via repartitioning/reduction.
52    . Amat_fine - matrix on this fine (k) level
53    . cr_bs - coarse block size
54    In/Output Parameter:
55    . a_P_inout - prolongation operator to the next level (k-->k-1)
56    . a_nactive_proc - number of active procs
57    Output Parameter:
58    . a_Amat_crs - coarse matrix that is created (k-1)
59 */
60 
61 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,IS * Pcolumnperm, PetscBool is_last)
62 {
63   PetscErrorCode  ierr;
64   PC_MG           *mg         = (PC_MG*)pc->data;
65   PC_GAMG         *pc_gamg    = (PC_GAMG*)mg->innerctx;
66   Mat             Cmat,Pold=*a_P_inout;
67   MPI_Comm        comm;
68   PetscMPIInt     rank,size,new_size,nactive=*a_nactive_proc;
69   PetscInt        ncrs_eq,ncrs,f_bs;
70 
71   PetscFunctionBegin;
72   ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr);
73   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
74   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
75   ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr);
76   ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr);
77 
78   /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/
79   ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr);
80   if (pc_gamg->data_cell_rows>0) {
81     ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
82   } else {
83     PetscInt  bs;
84     ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr);
85     ncrs = ncrs_eq/bs;
86   }
87 
88   /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
89   if (is_last && !pc_gamg->use_parallel_coarse_grid_solver) new_size = 1;
90   else {
91     PetscInt ncrs_eq_glob;
92     ierr     = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr);
93     new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
94     if (!new_size) new_size = 1; /* not likely, posible? */
95     else if (new_size >= nactive) new_size = nactive; /* no change, rare */
96   }
97 
98   if (Pcolumnperm) *Pcolumnperm = NULL;
99 
100   if (!pc_gamg->repart && new_size==nactive) {
101     *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
102     /* we know that the grid structure can be reused in MatPtAP */
103   } else {
104     /* we know that the grid structure can NOT be reused in MatPtAP */
105     PetscInt       *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old;
106     IS             is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices;
107 
108     nloc_old = ncrs_eq/cr_bs;
109     if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
110 #if defined PETSC_GAMG_USE_LOG
111     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
112 #endif
113     /* make 'is_eq_newproc' */
114     ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
115     if (pc_gamg->repart && new_size!=nactive) {
116       /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
117       Mat adj;
118 
119      ierr = PetscInfo3(pc,"Repartition: size (active): %D --> %D, %D local equations\n",*a_nactive_proc,new_size,ncrs_eq);CHKERRQ(ierr);
120 
121       /* get 'adj' */
122       if (cr_bs == 1) {
123         ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
124       } else {
125         /* make a scalar matrix to partition (no Stokes here) */
126         Mat               tMat;
127         PetscInt          Istart_crs,Iend_crs,ncols,jj,Ii;
128         const PetscScalar *vals;
129         const PetscInt    *idx;
130         PetscInt          *d_nnz, *o_nnz, M, N;
131         static PetscInt   llev = 0;
132         MatType           mtype;
133 
134         ierr = PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);CHKERRQ(ierr);
135         ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr);
136         ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr);
137         for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
138           ierr      = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
139           d_nnz[jj] = ncols/cr_bs;
140           o_nnz[jj] = ncols/cr_bs;
141           ierr      = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
142           if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs;
143           if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs;
144         }
145 
146         ierr = MatGetType(Amat_fine,&mtype);CHKERRQ(ierr);
147         ierr = MatCreate(comm, &tMat);CHKERRQ(ierr);
148         ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr);
149         ierr = MatSetType(tMat,mtype);CHKERRQ(ierr);
150         ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr);
151         ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
152         ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
153 
154         for (ii = Istart_crs; ii < Iend_crs; ii++) {
155           PetscInt dest_row = ii/cr_bs;
156           ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
157           for (jj = 0; jj < ncols; jj++) {
158             PetscInt    dest_col = idx[jj]/cr_bs;
159             PetscScalar v        = 1.0;
160             ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr);
161           }
162           ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
163         }
164         ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
165         ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
166 
167         if (llev++ == -1) {
168           PetscViewer viewer; char fname[32];
169           ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr);
170           PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
171           ierr = MatView(tMat, viewer);CHKERRQ(ierr);
172           ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
173         }
174         ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
175         ierr = MatDestroy(&tMat);CHKERRQ(ierr);
176       } /* create 'adj' */
177 
178       { /* partition: get newproc_idx */
179         char            prefix[256];
180         const char      *pcpre;
181         const PetscInt  *is_idx;
182         MatPartitioning mpart;
183         IS              proc_is;
184         PetscInt        targetPE;
185 
186         ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr);
187         ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr);
188         ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
189         ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
190         ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
191         ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
192         ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr);
193         ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr);
194         ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
195 
196         /* collect IS info */
197         ierr     = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr);
198         ierr     = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr);
199         targetPE = 1; /* bring to "front" of machine */
200         /*targetPE = size/new_size;*/ /* spread partitioning across machine */
201         for (kk = jj = 0 ; kk < nloc_old ; kk++) {
202           for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
203             newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
204           }
205         }
206         ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr);
207         ierr = ISDestroy(&proc_is);CHKERRQ(ierr);
208       }
209       ierr = MatDestroy(&adj);CHKERRQ(ierr);
210 
211       ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr);
212       ierr = PetscFree(newproc_idx);CHKERRQ(ierr);
213     } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */
214       PetscInt rfactor,targetPE;
215 
216       /* find factor */
217       if (new_size == 1) rfactor = size; /* easy */
218       else {
219         PetscReal best_fact = 0.;
220         jj = -1;
221         for (kk = 1 ; kk <= size ; kk++) {
222           if (!(size%kk)) { /* a candidate */
223             PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
224             if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
225             if (fact > best_fact) {
226               best_fact = fact; jj = kk;
227             }
228           }
229         }
230         if (jj != -1) rfactor = jj;
231         else rfactor = 1; /* does this happen .. a prime */
232       }
233       new_size = size/rfactor;
234 
235       if (new_size==nactive) {
236         *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
237         ierr        = PetscFree(counts);CHKERRQ(ierr);
238         ierr = PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);CHKERRQ(ierr);
239 #if defined PETSC_GAMG_USE_LOG
240         ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
241 #endif
242         PetscFunctionReturn(0);
243       }
244 
245       ierr = PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);CHKERRQ(ierr);
246       targetPE = rank/rfactor;
247       ierr     = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr);
248     } /* end simple 'is_eq_newproc' */
249 
250     /*
251      Create an index set from the is_eq_newproc index set to indicate the mapping TO
252      */
253     ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr);
254     is_eq_num_prim = is_eq_num;
255     /*
256       Determine how many equations/vertices are assigned to each processor
257      */
258     ierr        = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr);
259     ncrs_eq_new = counts[rank];
260     ierr        = ISDestroy(&is_eq_newproc);CHKERRQ(ierr);
261     ncrs_new = ncrs_eq_new/cr_bs; /* eqs */
262 
263     ierr = PetscFree(counts);CHKERRQ(ierr);
264 #if defined PETSC_GAMG_USE_LOG
265     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
266 #endif
267     /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */
268     {
269     Vec            src_crd, dest_crd;
270     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
271     VecScatter     vecscat;
272     PetscScalar    *array;
273     IS isscat;
274 
275     /* move data (for primal equations only) */
276     /* Create a vector to contain the newly ordered element information */
277     ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr);
278     ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr);
279     ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */
280     /*
281      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
282      a block size of ...).  Note, ISs are expanded into equation space by 'cr_bs'.
283      */
284     ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr);
285     ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
286     for (ii=0,jj=0; ii<ncrs; ii++) {
287       PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
288       for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
289     }
290     ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
291     ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr);
292     ierr = PetscFree(tidx);CHKERRQ(ierr);
293     /*
294      Create a vector to contain the original vertex information for each element
295      */
296     ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr);
297     for (jj=0; jj<ndata_cols; jj++) {
298       const PetscInt stride0=ncrs*pc_gamg->data_cell_rows;
299       for (ii=0; ii<ncrs; ii++) {
300         for (kk=0; kk<ndata_rows; kk++) {
301           PetscInt    ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
302           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
303           ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr);
304         }
305       }
306     }
307     ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr);
308     ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr);
309     /*
310       Scatter the element vertex information (still in the original vertex ordering)
311       to the correct processor
312     */
313     ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr);
314     ierr = ISDestroy(&isscat);CHKERRQ(ierr);
315     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
316     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
317     ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr);
318     ierr = VecDestroy(&src_crd);CHKERRQ(ierr);
319     /*
320       Put the element vertex data into a new allocation of the gdata->ele
321     */
322     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
323     ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr);
324 
325     pc_gamg->data_sz = node_data_sz*ncrs_new;
326     strideNew        = ncrs_new*ndata_rows;
327 
328     ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr);
329     for (jj=0; jj<ndata_cols; jj++) {
330       for (ii=0; ii<ncrs_new; ii++) {
331         for (kk=0; kk<ndata_rows; kk++) {
332           PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
333           pc_gamg->data[ix] = PetscRealPart(array[jx]);
334         }
335       }
336     }
337     ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr);
338     ierr = VecDestroy(&dest_crd);CHKERRQ(ierr);
339     }
340     /* move A and P (columns) with new layout */
341 #if defined PETSC_GAMG_USE_LOG
342     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
343 #endif
344 
345     /*
346       Invert for MatCreateSubMatrix
347     */
348     ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr);
349     ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */
350     ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr);
351     if (is_eq_num != is_eq_num_prim) {
352       ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */
353     }
354     if (Pcolumnperm) {
355       ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr);
356       *Pcolumnperm = new_eq_indices;
357     }
358     ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr);
359 #if defined PETSC_GAMG_USE_LOG
360     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
361     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
362 #endif
363     /* 'a_Amat_crs' output */
364     {
365       Mat mat;
366       ierr        = MatCreateSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr);
367       *a_Amat_crs = mat;
368     }
369     ierr = MatDestroy(&Cmat);CHKERRQ(ierr);
370 
371 #if defined PETSC_GAMG_USE_LOG
372     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
373 #endif
374     /* prolongator */
375     {
376       IS       findices;
377       PetscInt Istart,Iend;
378       Mat      Pnew;
379 
380       ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr);
381 #if defined PETSC_GAMG_USE_LOG
382       ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
383 #endif
384       ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr);
385       ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr);
386       ierr = MatCreateSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr);
387       ierr = ISDestroy(&findices);CHKERRQ(ierr);
388 
389 #if defined PETSC_GAMG_USE_LOG
390       ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
391 #endif
392       ierr = MatDestroy(a_P_inout);CHKERRQ(ierr);
393 
394       /* output - repartitioned */
395       *a_P_inout = Pnew;
396     }
397     ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr);
398 
399     *a_nactive_proc = new_size; /* output */
400   }
401   PetscFunctionReturn(0);
402 }
403 
404 /* -------------------------------------------------------------------------- */
405 /*
406    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
407                     by setting data structures and options.
408 
409    Input Parameter:
410 .  pc - the preconditioner context
411 
412 */
413 PetscErrorCode PCSetUp_GAMG(PC pc)
414 {
415   PetscErrorCode ierr;
416   PC_MG          *mg      = (PC_MG*)pc->data;
417   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
418   Mat            Pmat     = pc->pmat;
419   PetscInt       fine_level,level,level1,bs,M,N,qq,lidx,nASMBlocksArr[PETSC_GAMG_MAXLEVELS];
420   MPI_Comm       comm;
421   PetscMPIInt    rank,size,nactivepe;
422   Mat            Aarr[PETSC_GAMG_MAXLEVELS],Parr[PETSC_GAMG_MAXLEVELS];
423   IS             *ASMLocalIDsArr[PETSC_GAMG_MAXLEVELS];
424   PetscLogDouble nnz0=0.,nnztot=0.;
425   MatInfo        info;
426   PetscBool      is_last = PETSC_FALSE;
427 
428   PetscFunctionBegin;
429   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
430   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
431   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
432 
433   if (pc_gamg->setup_count++ > 0) {
434     if ((PetscBool)(!pc_gamg->reuse_prol)) {
435       /* reset everything */
436       ierr = PCReset_MG(pc);CHKERRQ(ierr);
437       pc->setupcalled = 0;
438     } else {
439       PC_MG_Levels **mglevels = mg->levels;
440       /* just do Galerkin grids */
441       Mat          B,dA,dB;
442 
443       if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
444       if (pc_gamg->Nlevels > 1) {
445         /* currently only handle case where mat and pmat are the same on coarser levels */
446         ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr);
447         /* (re)set to get dirty flag */
448         ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr);
449 
450         for (level=pc_gamg->Nlevels-2; level>=0; level--) {
451           /* 2nd solve, matrix structure can change from repartitioning or process reduction but don't know if we have process reduction here. Should fix */
452           if (pc_gamg->setup_count==2 /* && pc_gamg->repart||reduction */) {
453             ierr = PetscInfo2(pc,"new RAP after first solve level %D, %D setup\n",level,pc_gamg->setup_count);CHKERRQ(ierr);
454             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,2.0,&B);CHKERRQ(ierr);
455             ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr);
456             mglevels[level]->A = B;
457           } else {
458             ierr = PetscInfo2(pc,"RAP after first solve reusing matrix level %D, %D setup\n",level,pc_gamg->setup_count);CHKERRQ(ierr);
459             ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr);
460             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr);
461           }
462           ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr);
463           dB   = B;
464         }
465       }
466 
467       ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
468       PetscFunctionReturn(0);
469     }
470   }
471 
472   if (!pc_gamg->data) {
473     if (pc_gamg->orig_data) {
474       ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
475       ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr);
476 
477       pc_gamg->data_sz        = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
478       pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
479       pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;
480 
481       ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr);
482       for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
483     } else {
484       if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
485       ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr);
486     }
487   }
488 
489   /* cache original data for reuse */
490   if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) {
491     ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr);
492     for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
493     pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
494     pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
495   }
496 
497   /* get basic dims */
498   ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
499   ierr = MatGetSize(Pmat, &M, &N);CHKERRQ(ierr);
500 
501   ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */
502   nnz0   = info.nz_used;
503   nnztot = info.nz_used;
504   ierr = PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,(int)(nnz0/(PetscReal)M+0.5),size);CHKERRQ(ierr);
505 
506   /* Get A_i and R_i */
507   for (level=0, Aarr[0]=Pmat, nactivepe = size; level < (pc_gamg->Nlevels-1) && (!level || M>pc_gamg->coarse_eq_limit); level++) {
508     pc_gamg->current_level = level;
509     level1 = level + 1;
510 #if defined PETSC_GAMG_USE_LOG
511     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
512 #if (defined GAMG_STAGES)
513     ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr);
514 #endif
515 #endif
516     { /* construct prolongator */
517       Mat              Gmat;
518       PetscCoarsenData *agg_lists;
519       Mat              Prol11;
520 
521       ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr);
522       ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr);
523       ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr);
524 
525       /* could have failed to create new level */
526       if (Prol11) {
527         /* get new block size of coarse matrices */
528         ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr);
529 
530         if (pc_gamg->ops->optprolongator) {
531           /* smooth */
532           ierr = pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);CHKERRQ(ierr);
533         }
534 
535         if (pc_gamg->use_aggs_in_asm) {
536           PetscInt bs;
537           ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr);
538           ierr = PetscCDGetASMBlocks(agg_lists, bs, Gmat, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr);
539         }
540 
541         Parr[level1] = Prol11;
542       } else Parr[level1] = NULL; /* failed to coarsen */
543 
544       ierr = MatDestroy(&Gmat);CHKERRQ(ierr);
545       ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr);
546     } /* construct prolongator scope */
547 #if defined PETSC_GAMG_USE_LOG
548     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
549 #endif
550     if (!level) Aarr[0] = Pmat; /* use Pmat for finest level setup */
551     if (!Parr[level1]) { /* failed to coarsen */
552       ierr =  PetscInfo1(pc,"Stop gridding, level %D\n",level);CHKERRQ(ierr);
553 #if defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES
554       ierr = PetscLogStagePop();CHKERRQ(ierr);
555 #endif
556       break;
557     }
558 #if defined PETSC_GAMG_USE_LOG
559     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
560 #endif
561     ierr = MatGetSize(Parr[level1], &M, &N);CHKERRQ(ierr); /* N is next M, a loop test variables */
562     if (is_last) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Is last ????????");
563     if (N <= pc_gamg->coarse_eq_limit) is_last = PETSC_TRUE;
564     if (level1 == pc_gamg->Nlevels-1) is_last = PETSC_TRUE;
565     ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs, &Parr[level1], &Aarr[level1], &nactivepe, NULL, is_last);CHKERRQ(ierr);
566 
567 #if defined PETSC_GAMG_USE_LOG
568     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
569 #endif
570     ierr = MatGetSize(Aarr[level1], &M, &N);CHKERRQ(ierr); /* M is loop test variables */
571     ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr);
572     nnztot += info.nz_used;
573     ierr = PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);CHKERRQ(ierr);
574 
575 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
576     ierr = PetscLogStagePop();CHKERRQ(ierr);
577 #endif
578     /* stop if one node or one proc -- could pull back for singular problems */
579     if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) {
580       ierr =  PetscInfo2(pc,"HARD stop of coarsening on level %D.  Grid too small: %D block nodes\n",level,M/bs);CHKERRQ(ierr);
581       level++;
582       break;
583     }
584   } /* levels */
585   ierr                  = PetscFree(pc_gamg->data);CHKERRQ(ierr);
586 
587   ierr = PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);CHKERRQ(ierr);
588   pc_gamg->Nlevels = level + 1;
589   fine_level       = level;
590   ierr             = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
591 
592   if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
593     /* set default smoothers & set operators */
594     for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) {
595       KSP smoother;
596       PC  subpc;
597 
598       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
599       ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
600 
601       ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
602       /* set ops */
603       ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr);
604       ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr);
605 
606       /* set defaults */
607       ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr);
608 
609       /* set blocks for ASM smoother that uses the 'aggregates' */
610       if (pc_gamg->use_aggs_in_asm) {
611         PetscInt sz;
612         IS       *iss;
613 
614         sz   = nASMBlocksArr[level];
615         iss   = ASMLocalIDsArr[level];
616         ierr = PCSetType(subpc, PCASM);CHKERRQ(ierr);
617         ierr = PCASMSetOverlap(subpc, 0);CHKERRQ(ierr);
618         ierr = PCASMSetType(subpc,PC_ASM_BASIC);CHKERRQ(ierr);
619         if (!sz) {
620           IS       is;
621           ierr = ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_COPY_VALUES, &is);CHKERRQ(ierr);
622           ierr = PCASMSetLocalSubdomains(subpc, 1, NULL, &is);CHKERRQ(ierr);
623           ierr = ISDestroy(&is);CHKERRQ(ierr);
624         } else {
625           PetscInt kk;
626           ierr = PCASMSetLocalSubdomains(subpc, sz, NULL, iss);CHKERRQ(ierr);
627           for (kk=0; kk<sz; kk++) {
628             ierr = ISDestroy(&iss[kk]);CHKERRQ(ierr);
629           }
630           ierr = PetscFree(iss);CHKERRQ(ierr);
631         }
632         ASMLocalIDsArr[level] = NULL;
633         nASMBlocksArr[level]  = 0;
634       } else {
635         ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr);
636       }
637     }
638     {
639       /* coarse grid */
640       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
641       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
642       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
643       ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr);
644       if (!pc_gamg->use_parallel_coarse_grid_solver) {
645         ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
646         ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
647         ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr);
648         ierr = PCSetUp(subpc);CHKERRQ(ierr);
649         ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr);
650         if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
651         ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr);
652         ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr);
653         ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr);
654         ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr);
655         ierr = KSPSetType(k2[0], KSPPREONLY);CHKERRQ(ierr);
656         /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
657          * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
658          * view every subdomain as though they were different. */
659         ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
660       }
661     }
662 
663     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
664     ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr);
665     ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr);
666     ierr = PetscOptionsEnd();CHKERRQ(ierr);
667     ierr = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr);
668 
669     /* clean up */
670     for (level=1; level<pc_gamg->Nlevels; level++) {
671       ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr);
672       ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr);
673     }
674     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
675   } else {
676     KSP smoother;
677     ierr = PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");CHKERRQ(ierr);
678     ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr);
679     ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr);
680     ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr);
681     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
682   }
683   PetscFunctionReturn(0);
684 }
685 
686 /* ------------------------------------------------------------------------- */
687 /*
688  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
689    that was created with PCCreate_GAMG().
690 
691    Input Parameter:
692 .  pc - the preconditioner context
693 
694    Application Interface Routine: PCDestroy()
695 */
696 PetscErrorCode PCDestroy_GAMG(PC pc)
697 {
698   PetscErrorCode ierr;
699   PC_MG          *mg     = (PC_MG*)pc->data;
700   PC_GAMG        *pc_gamg= (PC_GAMG*)mg->innerctx;
701 
702   PetscFunctionBegin;
703   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
704   if (pc_gamg->ops->destroy) {
705     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
706   }
707   ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr);
708   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
709   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
710   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
711   PetscFunctionReturn(0);
712 }
713 
714 /*@
715    PCGAMGSetProcEqLim - Set number of equations to aim for per process on the coarse grids via processor reduction.
716 
717    Logically Collective on PC
718 
719    Input Parameters:
720 +  pc - the preconditioner context
721 -  n - the number of equations
722 
723 
724    Options Database Key:
725 .  -pc_gamg_process_eq_limit <limit>
726 
727    Notes:
728     GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit> equations on each process
729           that has degrees of freedom
730 
731    Level: intermediate
732 
733 .seealso: PCGAMGSetCoarseEqLim()
734 @*/
735 PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
736 {
737   PetscErrorCode ierr;
738 
739   PetscFunctionBegin;
740   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
741   ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
742   PetscFunctionReturn(0);
743 }
744 
745 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
746 {
747   PC_MG   *mg      = (PC_MG*)pc->data;
748   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
749 
750   PetscFunctionBegin;
751   if (n>0) pc_gamg->min_eq_proc = n;
752   PetscFunctionReturn(0);
753 }
754 
755 /*@
756    PCGAMGSetCoarseEqLim - Set maximum number of equations on coarsest grid.
757 
758  Collective on PC
759 
760    Input Parameters:
761 +  pc - the preconditioner context
762 -  n - maximum number of equations to aim for
763 
764    Options Database Key:
765 .  -pc_gamg_coarse_eq_limit <limit>
766 
767    Notes: For example -pc_gamg_coarse_eq_limit 1000 will stop coarsening once the coarse grid
768      has less than 1000 unknowns.
769 
770    Level: intermediate
771 
772 .seealso: PCGAMGSetProcEqLim()
773 @*/
774 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
775 {
776   PetscErrorCode ierr;
777 
778   PetscFunctionBegin;
779   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
780   ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
781   PetscFunctionReturn(0);
782 }
783 
784 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
785 {
786   PC_MG   *mg      = (PC_MG*)pc->data;
787   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
788 
789   PetscFunctionBegin;
790   if (n>0) pc_gamg->coarse_eq_limit = n;
791   PetscFunctionReturn(0);
792 }
793 
794 /*@
795    PCGAMGSetRepartition - Repartition the degrees of freedom across the processors on the coarser grids
796 
797    Collective on PC
798 
799    Input Parameters:
800 +  pc - the preconditioner context
801 -  n - PETSC_TRUE or PETSC_FALSE
802 
803    Options Database Key:
804 .  -pc_gamg_repartition <true,false>
805 
806    Notes:
807     this will generally improve the loading balancing of the work on each level
808 
809    Level: intermediate
810 
811 .seealso: ()
812 @*/
813 PetscErrorCode PCGAMGSetRepartition(PC pc, PetscBool n)
814 {
815   PetscErrorCode ierr;
816 
817   PetscFunctionBegin;
818   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
819   ierr = PetscTryMethod(pc,"PCGAMGSetRepartition_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
820   PetscFunctionReturn(0);
821 }
822 
823 static PetscErrorCode PCGAMGSetRepartition_GAMG(PC pc, PetscBool n)
824 {
825   PC_MG   *mg      = (PC_MG*)pc->data;
826   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
827 
828   PetscFunctionBegin;
829   pc_gamg->repart = n;
830   PetscFunctionReturn(0);
831 }
832 
833 /*@
834    PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding algebraic multigrid preconditioner
835 
836    Collective on PC
837 
838    Input Parameters:
839 +  pc - the preconditioner context
840 -  n - PETSC_TRUE or PETSC_FALSE
841 
842    Options Database Key:
843 .  -pc_gamg_reuse_interpolation <true,false>
844 
845    Level: intermediate
846 
847    Notes:
848     this may negatively affect the convergence rate of the method on new matrices if the matrix entries change a great deal, but allows
849           rebuilding the preconditioner quicker.
850 
851 .seealso: ()
852 @*/
853 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n)
854 {
855   PetscErrorCode ierr;
856 
857   PetscFunctionBegin;
858   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
859   ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
860   PetscFunctionReturn(0);
861 }
862 
863 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n)
864 {
865   PC_MG   *mg      = (PC_MG*)pc->data;
866   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
867 
868   PetscFunctionBegin;
869   pc_gamg->reuse_prol = n;
870   PetscFunctionReturn(0);
871 }
872 
873 /*@
874    PCGAMGASMSetUseAggs - Have the PCGAMG smoother on each level use the aggregates defined by the coarsening process as the subdomains for the additive Schwarz preconditioner.
875 
876    Collective on PC
877 
878    Input Parameters:
879 +  pc - the preconditioner context
880 -  flg - PETSC_TRUE to use aggregates, PETSC_FALSE to not
881 
882    Options Database Key:
883 .  -pc_gamg_asm_use_agg
884 
885    Level: intermediate
886 
887 .seealso: ()
888 @*/
889 PetscErrorCode PCGAMGASMSetUseAggs(PC pc, PetscBool flg)
890 {
891   PetscErrorCode ierr;
892 
893   PetscFunctionBegin;
894   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
895   ierr = PetscTryMethod(pc,"PCGAMGASMSetUseAggs_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr);
896   PetscFunctionReturn(0);
897 }
898 
899 static PetscErrorCode PCGAMGASMSetUseAggs_GAMG(PC pc, PetscBool flg)
900 {
901   PC_MG   *mg      = (PC_MG*)pc->data;
902   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
903 
904   PetscFunctionBegin;
905   pc_gamg->use_aggs_in_asm = flg;
906   PetscFunctionReturn(0);
907 }
908 
909 /*@
910    PCGAMGSetUseParallelCoarseGridSolve - allow a parallel coarse grid solver
911 
912    Collective on PC
913 
914    Input Parameters:
915 +  pc - the preconditioner context
916 -  flg - PETSC_TRUE to not force coarse grid onto one processor
917 
918    Options Database Key:
919 .  -pc_gamg_use_parallel_coarse_grid_solver
920 
921    Level: intermediate
922 
923 .seealso: ()
924 @*/
925 PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve(PC pc, PetscBool flg)
926 {
927   PetscErrorCode ierr;
928 
929   PetscFunctionBegin;
930   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
931   ierr = PetscTryMethod(pc,"PCGAMGSetUseParallelCoarseGridSolve_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 static PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve_GAMG(PC pc, PetscBool flg)
936 {
937   PC_MG   *mg      = (PC_MG*)pc->data;
938   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
939 
940   PetscFunctionBegin;
941   pc_gamg->use_parallel_coarse_grid_solver = flg;
942   PetscFunctionReturn(0);
943 }
944 
945 /*@
946    PCGAMGSetNlevels -  Sets the maximum number of levels PCGAMG will use
947 
948    Not collective on PC
949 
950    Input Parameters:
951 +  pc - the preconditioner
952 -  n - the maximum number of levels to use
953 
954    Options Database Key:
955 .  -pc_mg_levels
956 
957    Level: intermediate
958 
959 .seealso: ()
960 @*/
961 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
962 {
963   PetscErrorCode ierr;
964 
965   PetscFunctionBegin;
966   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
967   ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
968   PetscFunctionReturn(0);
969 }
970 
971 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
972 {
973   PC_MG   *mg      = (PC_MG*)pc->data;
974   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
975 
976   PetscFunctionBegin;
977   pc_gamg->Nlevels = n;
978   PetscFunctionReturn(0);
979 }
980 
981 /*@
982    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
983 
984    Not collective on PC
985 
986    Input Parameters:
987 +  pc - the preconditioner context
988 -  threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph
989 
990    Options Database Key:
991 .  -pc_gamg_threshold <threshold>
992 
993    Notes:
994     Increasing the threshold decreases the rate of coarsening. Conversely reducing the threshold increases the rate of coarsening (aggressive coarsening) and thereby reduces the complexity of the coarse grids, and generally results in slower solver converge rates. Reducing coarse grid complexity reduced the complexity of Galerkin coarse grid construction considerably.
995     Before coarsening or aggregating the graph, GAMG removes small values from the graph with this threshold, and thus reducing the coupling in the graph and a different (perhaps better) coarser set of points.
996 
997    Level: intermediate
998 
999 .seealso: PCGAMGFilterGraph(), PCGAMGSetSquareGraph()
1000 @*/
1001 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal v[], PetscInt n)
1002 {
1003   PetscErrorCode ierr;
1004 
1005   PetscFunctionBegin;
1006   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1007   ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal[],PetscInt),(pc,v,n));CHKERRQ(ierr);
1008   PetscFunctionReturn(0);
1009 }
1010 
1011 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal v[], PetscInt n)
1012 {
1013   PC_MG   *mg      = (PC_MG*)pc->data;
1014   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1015   PetscInt i;
1016   PetscFunctionBegin;
1017   for (i=0;i<n;i++) pc_gamg->threshold[i] = v[i];
1018   do {pc_gamg->threshold[i] = pc_gamg->threshold[i-1]*pc_gamg->threshold_scale;} while (++i<PETSC_GAMG_MAXLEVELS);
1019   PetscFunctionReturn(0);
1020 }
1021 
1022 /*@
1023    PCGAMGSetThresholdScale - Relative threshold reduction at each level
1024 
1025    Not collective on PC
1026 
1027    Input Parameters:
1028 +  pc - the preconditioner context
1029 -  scale - the threshold value reduction, ussually < 1.0
1030 
1031    Options Database Key:
1032 .  -pc_gamg_threshold_scale <v>
1033 
1034    Level: advanced
1035 
1036 .seealso: ()
1037 @*/
1038 PetscErrorCode PCGAMGSetThresholdScale(PC pc, PetscReal v)
1039 {
1040   PetscErrorCode ierr;
1041 
1042   PetscFunctionBegin;
1043   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1044   ierr = PetscTryMethod(pc,"PCGAMGSetThresholdScale_C",(PC,PetscReal),(pc,v));CHKERRQ(ierr);
1045   PetscFunctionReturn(0);
1046 }
1047 
1048 static PetscErrorCode PCGAMGSetThresholdScale_GAMG(PC pc, PetscReal v)
1049 {
1050   PC_MG   *mg      = (PC_MG*)pc->data;
1051   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1052   PetscFunctionBegin;
1053   pc_gamg->threshold_scale = v;
1054   PetscFunctionReturn(0);
1055 }
1056 
1057 /*@C
1058    PCGAMGSetType - Set solution method
1059 
1060    Collective on PC
1061 
1062    Input Parameters:
1063 +  pc - the preconditioner context
1064 -  type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL
1065 
1066    Options Database Key:
1067 .  -pc_gamg_type <agg,geo,classical> - type of algebraic multigrid to apply
1068 
1069    Level: intermediate
1070 
1071 .seealso: PCGAMGGetType(), PCGAMG, PCGAMGType
1072 @*/
1073 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1074 {
1075   PetscErrorCode ierr;
1076 
1077   PetscFunctionBegin;
1078   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1079   ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr);
1080   PetscFunctionReturn(0);
1081 }
1082 
1083 /*@C
1084    PCGAMGGetType - Get solution method
1085 
1086    Collective on PC
1087 
1088    Input Parameter:
1089 .  pc - the preconditioner context
1090 
1091    Output Parameter:
1092 .  type - the type of algorithm used
1093 
1094    Level: intermediate
1095 
1096 .seealso: PCGAMGSetType(), PCGAMGType
1097 @*/
1098 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type)
1099 {
1100   PetscErrorCode ierr;
1101 
1102   PetscFunctionBegin;
1103   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1104   ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr);
1105   PetscFunctionReturn(0);
1106 }
1107 
1108 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type)
1109 {
1110   PC_MG          *mg      = (PC_MG*)pc->data;
1111   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1112 
1113   PetscFunctionBegin;
1114   *type = pc_gamg->type;
1115   PetscFunctionReturn(0);
1116 }
1117 
1118 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1119 {
1120   PetscErrorCode ierr,(*r)(PC);
1121   PC_MG          *mg      = (PC_MG*)pc->data;
1122   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1123 
1124   PetscFunctionBegin;
1125   pc_gamg->type = type;
1126   ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr);
1127   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1128   if (pc_gamg->ops->destroy) {
1129     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
1130     ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr);
1131     pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1132     /* cleaning up common data in pc_gamg - this should disapear someday */
1133     pc_gamg->data_cell_cols = 0;
1134     pc_gamg->data_cell_rows = 0;
1135     pc_gamg->orig_data_cell_cols = 0;
1136     pc_gamg->orig_data_cell_rows = 0;
1137     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
1138     pc_gamg->data_sz = 0;
1139   }
1140   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
1141   ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr);
1142   ierr = (*r)(pc);CHKERRQ(ierr);
1143   PetscFunctionReturn(0);
1144 }
1145 
1146 /* -------------------------------------------------------------------------- */
1147 /*
1148    PCMGGetGridComplexity - compute coarse grid complexity of MG hierarchy
1149 
1150    Input Parameter:
1151 .  pc - the preconditioner context
1152 
1153    Output Parameter:
1154 .  gc - grid complexity = sum_i(nnz_i) / nnz_0
1155 
1156    Level: advanced
1157 */
1158 static PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc)
1159 {
1160   PetscErrorCode ierr;
1161   PC_MG          *mg      = (PC_MG*)pc->data;
1162   PC_MG_Levels   **mglevels = mg->levels;
1163   PetscInt       lev, nnz0 = -1;
1164   MatInfo        info;
1165   PetscFunctionBegin;
1166   if (!mg->nlevels) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MG has no levels");
1167   for (lev=0, *gc=0; lev<mg->nlevels; lev++) {
1168     Mat dB;
1169     ierr = KSPGetOperators(mglevels[lev]->smoothd,NULL,&dB);CHKERRQ(ierr);
1170     ierr = MatGetInfo(dB,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */
1171     *gc += (PetscReal)info.nz_used;
1172     if (lev==mg->nlevels-1) nnz0 = info.nz_used;
1173   }
1174   if (nnz0) *gc /= (PetscReal)nnz0;
1175   else *gc = 0;
1176   PetscFunctionReturn(0);
1177 }
1178 
1179 static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer)
1180 {
1181   PetscErrorCode ierr,i;
1182   PC_MG          *mg      = (PC_MG*)pc->data;
1183   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1184   PetscReal       gc=0;
1185   PetscFunctionBegin;
1186   ierr = PetscViewerASCIIPrintf(viewer,"    GAMG specific options\n");CHKERRQ(ierr);
1187   ierr = PetscViewerASCIIPrintf(viewer,"      Threshold for dropping small values in graph on each level =");CHKERRQ(ierr);
1188   for (i=0;i<pc_gamg->current_level;i++) {
1189     ierr = PetscViewerASCIIPrintf(viewer," %g",(double)pc_gamg->threshold[i]);CHKERRQ(ierr);
1190   }
1191   ierr = PetscViewerASCIIPrintf(viewer,"\n");CHKERRQ(ierr);
1192   ierr = PetscViewerASCIIPrintf(viewer,"      Threshold scaling factor for each level not specified = %g\n",(double)pc_gamg->threshold_scale);CHKERRQ(ierr);
1193   if (pc_gamg->use_aggs_in_asm) {
1194     ierr = PetscViewerASCIIPrintf(viewer,"      Using aggregates from coarsening process to define subdomains for PCASM\n");CHKERRQ(ierr);
1195   }
1196   if (pc_gamg->use_parallel_coarse_grid_solver) {
1197     ierr = PetscViewerASCIIPrintf(viewer,"      Using parallel coarse grid solver (all coarse grid equations not put on one process)\n");CHKERRQ(ierr);
1198   }
1199   if (pc_gamg->ops->view) {
1200     ierr = (*pc_gamg->ops->view)(pc,viewer);CHKERRQ(ierr);
1201   }
1202   ierr = PCMGGetGridComplexity(pc,&gc);CHKERRQ(ierr);
1203   ierr = PetscViewerASCIIPrintf(viewer,"      Complexity:    grid = %g\n",gc);CHKERRQ(ierr);
1204   PetscFunctionReturn(0);
1205 }
1206 
1207 PetscErrorCode PCSetFromOptions_GAMG(PetscOptionItems *PetscOptionsObject,PC pc)
1208 {
1209   PetscErrorCode ierr;
1210   PC_MG          *mg      = (PC_MG*)pc->data;
1211   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1212   PetscBool      flag;
1213   MPI_Comm       comm;
1214   char           prefix[256];
1215   PetscInt       i,n;
1216   const char     *pcpre;
1217 
1218   PetscFunctionBegin;
1219   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1220   ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr);
1221   {
1222     char tname[256];
1223     ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr);
1224     if (flag) {
1225       ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr);
1226     }
1227     ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGSetRepartition",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr);
1228     ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr);
1229     ierr = PetscOptionsBool("-pc_gamg_asm_use_agg","Use aggregation aggregates for ASM smoother","PCGAMGASMSetUseAggs",pc_gamg->use_aggs_in_asm,&pc_gamg->use_aggs_in_asm,NULL);CHKERRQ(ierr);
1230     ierr = PetscOptionsBool("-pc_gamg_use_parallel_coarse_grid_solver","Use parallel coarse grid solver (otherwise put last grid on one process)","PCGAMGSetUseParallelCoarseGridSolve",pc_gamg->use_parallel_coarse_grid_solver,&pc_gamg->use_parallel_coarse_grid_solver,NULL);CHKERRQ(ierr);
1231     ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr);
1232     ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr);
1233     ierr = PetscOptionsReal("-pc_gamg_threshold_scale","Scaling of threshold for each level not specified","PCGAMGSetThresholdScale",pc_gamg->threshold_scale,&pc_gamg->threshold_scale,NULL);CHKERRQ(ierr);
1234     n = PETSC_GAMG_MAXLEVELS;
1235     ierr = PetscOptionsRealArray("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&n,&flag);CHKERRQ(ierr);
1236     if (!flag || n < PETSC_GAMG_MAXLEVELS) {
1237       if (!flag) n = 1;
1238       i = n;
1239       do {pc_gamg->threshold[i] = pc_gamg->threshold[i-1]*pc_gamg->threshold_scale;} while (++i<PETSC_GAMG_MAXLEVELS);
1240     }
1241     ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
1242 
1243     /* set options for subtype */
1244     if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);}
1245   }
1246   ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
1247   ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
1248   ierr = PetscOptionsTail();CHKERRQ(ierr);
1249   PetscFunctionReturn(0);
1250 }
1251 
1252 /* -------------------------------------------------------------------------- */
1253 /*MC
1254      PCGAMG - Geometric algebraic multigrid (AMG) preconditioner
1255 
1256    Options Database Keys:
1257 +   -pc_gamg_type <type> - one of agg, geo, or classical
1258 .   -pc_gamg_repartition  <true,default=false> - repartition the degrees of freedom accross the coarse grids as they are determined
1259 .   -pc_gamg_reuse_interpolation <true,default=false> - when rebuilding the algebraic multigrid preconditioner reuse the previously computed interpolations
1260 .   -pc_gamg_asm_use_agg <true,default=false> - use the aggregates from the coasening process to defined the subdomains on each level for the PCASM smoother
1261 .   -pc_gamg_process_eq_limit <limit, default=50> - GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit>
1262                                         equations on each process that has degrees of freedom
1263 .   -pc_gamg_coarse_eq_limit <limit, default=50> - Set maximum number of equations on coarsest grid to aim for.
1264 .   -pc_gamg_threshold[] <thresh,default=0> - Before aggregating the graph GAMG will remove small values from the graph on each level
1265 -   -pc_gamg_threshold_scale <scale,default=1> - Scaling of threshold on each coarser grid if not specified
1266 
1267    Options Database Keys for default Aggregation:
1268 +  -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation
1269 .  -pc_gamg_sym_graph <true,default=false> - symmetrize the graph before computing the aggregation
1270 -  -pc_gamg_square_graph <n,default=1> - number of levels to square the graph before aggregating it
1271 
1272    Multigrid options:
1273 +  -pc_mg_cycles <v> - v or w, see PCMGSetCycleType()
1274 .  -pc_mg_distinct_smoothup - configure the up and down (pre and post) smoothers separately, see PCMGSetDistinctSmoothUp()
1275 .  -pc_mg_type <multiplicative> - (one of) additive multiplicative full kascade
1276 -  -pc_mg_levels <levels> - Number of levels of multigrid to use.
1277 
1278 
1279   Notes:
1280     In order to obtain good performance for PCGAMG for vector valued problems you must
1281        Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point
1282        Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator
1283        See the Users Manual Chapter 4 for more details
1284 
1285   Level: intermediate
1286 
1287 .seealso:  PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(),
1288            PCGAMGSetCoarseEqLim(), PCGAMGSetRepartition(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGASMSetUseAggs(), PCGAMGSetUseParallelCoarseGridSolve(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType(), PCGAMGSetReuseInterpolation()
1289 M*/
1290 
1291 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1292 {
1293   PetscErrorCode ierr,i;
1294   PC_GAMG        *pc_gamg;
1295   PC_MG          *mg;
1296 
1297   PetscFunctionBegin;
1298    /* register AMG type */
1299   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1300 
1301   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1302   ierr = PCSetType(pc, PCMG);CHKERRQ(ierr);
1303   ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr);
1304 
1305   /* create a supporting struct and attach it to pc */
1306   ierr         = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr);
1307   ierr         = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr);
1308   mg           = (PC_MG*)pc->data;
1309   mg->innerctx = pc_gamg;
1310 
1311   ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr);
1312 
1313   pc_gamg->setup_count = 0;
1314   /* these should be in subctx but repartitioning needs simple arrays */
1315   pc_gamg->data_sz = 0;
1316   pc_gamg->data    = 0;
1317 
1318   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1319   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1320   pc->ops->setup          = PCSetUp_GAMG;
1321   pc->ops->reset          = PCReset_GAMG;
1322   pc->ops->destroy        = PCDestroy_GAMG;
1323   mg->view                = PCView_GAMG;
1324 
1325   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCMGGetLevels_C",PCMGGetLevels_MG);CHKERRQ(ierr);
1326   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCMGSetLevels_C",PCMGSetLevels_MG);CHKERRQ(ierr);
1327   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr);
1328   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr);
1329   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartition_C",PCGAMGSetRepartition_GAMG);CHKERRQ(ierr);
1330   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr);
1331   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGASMSetUseAggs_C",PCGAMGASMSetUseAggs_GAMG);CHKERRQ(ierr);
1332   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseParallelCoarseGridSolve_C",PCGAMGSetUseParallelCoarseGridSolve_GAMG);CHKERRQ(ierr);
1333   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr);
1334   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThresholdScale_C",PCGAMGSetThresholdScale_GAMG);CHKERRQ(ierr);
1335   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr);
1336   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr);
1337   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr);
1338   pc_gamg->repart           = PETSC_FALSE;
1339   pc_gamg->reuse_prol       = PETSC_FALSE;
1340   pc_gamg->use_aggs_in_asm  = PETSC_FALSE;
1341   pc_gamg->use_parallel_coarse_grid_solver = PETSC_FALSE;
1342   pc_gamg->min_eq_proc      = 50;
1343   pc_gamg->coarse_eq_limit  = 50;
1344   for (i=0;i<PETSC_GAMG_MAXLEVELS;i++) pc_gamg->threshold[i] = 0.;
1345   pc_gamg->threshold_scale = 1.;
1346   pc_gamg->Nlevels          = PETSC_GAMG_MAXLEVELS;
1347   pc_gamg->current_level    = 0; /* don't need to init really */
1348   pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1349 
1350   /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */
1351   ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr);
1352   PetscFunctionReturn(0);
1353 }
1354 
1355 /*@C
1356  PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1357     from PCInitializePackage().
1358 
1359  Level: developer
1360 
1361  .seealso: PetscInitialize()
1362 @*/
1363 PetscErrorCode PCGAMGInitializePackage(void)
1364 {
1365   PetscErrorCode ierr;
1366 
1367   PetscFunctionBegin;
1368   if (PCGAMGPackageInitialized) PetscFunctionReturn(0);
1369   PCGAMGPackageInitialized = PETSC_TRUE;
1370   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr);
1371   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr);
1372   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr);
1373   ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr);
1374 
1375   /* general events */
1376   ierr = PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);CHKERRQ(ierr);
1377   ierr = PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);CHKERRQ(ierr);
1378   ierr = PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr);
1379   ierr = PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr);
1380   ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr);
1381   ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr);
1382   ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);CHKERRQ(ierr);
1383 
1384 #if defined PETSC_GAMG_USE_LOG
1385   ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr);
1386   ierr = PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr);
1387   /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1388   /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1389   /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1390   ierr = PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr);
1391   ierr = PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr);
1392   ierr = PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr);
1393   ierr = PetscLogEventRegister("    search-set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr);
1394   ierr = PetscLogEventRegister("  SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr);
1395   ierr = PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr);
1396   ierr = PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr);
1397   ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr);
1398   ierr = PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr);
1399   ierr = PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr);
1400   ierr = PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr);
1401   ierr = PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr);
1402 
1403   /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1404   /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1405   /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1406   /* create timer stages */
1407 #if defined GAMG_STAGES
1408   {
1409     char     str[32];
1410     PetscInt lidx;
1411     sprintf(str,"MG Level %d (finest)",0);
1412     ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr);
1413     for (lidx=1; lidx<9; lidx++) {
1414       sprintf(str,"MG Level %d",lidx);
1415       ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr);
1416     }
1417   }
1418 #endif
1419 #endif
1420   PetscFunctionReturn(0);
1421 }
1422 
1423 /*@C
1424  PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is
1425     called from PetscFinalize() automatically.
1426 
1427  Level: developer
1428 
1429  .seealso: PetscFinalize()
1430 @*/
1431 PetscErrorCode PCGAMGFinalizePackage(void)
1432 {
1433   PetscErrorCode ierr;
1434 
1435   PetscFunctionBegin;
1436   PCGAMGPackageInitialized = PETSC_FALSE;
1437   ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr);
1438   PetscFunctionReturn(0);
1439 }
1440 
1441 /*@C
1442  PCGAMGRegister - Register a PCGAMG implementation.
1443 
1444  Input Parameters:
1445  + type - string that will be used as the name of the GAMG type.
1446  - create - function for creating the gamg context.
1447 
1448   Level: advanced
1449 
1450  .seealso: PCGAMGType, PCGAMG, PCGAMGSetType()
1451 @*/
1452 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC))
1453 {
1454   PetscErrorCode ierr;
1455 
1456   PetscFunctionBegin;
1457   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1458   ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr);
1459   PetscFunctionReturn(0);
1460 }
1461 
1462