xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision 4e4bbfaa3814cc83b5851d85be69070845f5653e)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 #include <petsc/private/matimpl.h>
5 #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */
7 
8 #if defined PETSC_GAMG_USE_LOG
9 PetscLogEvent petsc_gamg_setup_events[NUM_SET];
10 #endif
11 
12 #if defined PETSC_USE_LOG
13 PetscLogEvent PC_GAMGGraph_AGG;
14 PetscLogEvent PC_GAMGGraph_GEO;
15 PetscLogEvent PC_GAMGCoarsen_AGG;
16 PetscLogEvent PC_GAMGCoarsen_GEO;
17 PetscLogEvent PC_GAMGProlongator_AGG;
18 PetscLogEvent PC_GAMGProlongator_GEO;
19 PetscLogEvent PC_GAMGOptProlongator_AGG;
20 #endif
21 
22 /* #define GAMG_STAGES */
23 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
24 static PetscLogStage gamg_stages[PETSC_GAMG_MAXLEVELS];
25 #endif
26 
27 static PetscFunctionList GAMGList = 0;
28 static PetscBool PCGAMGPackageInitialized;
29 
30 /* ----------------------------------------------------------------------------- */
31 PetscErrorCode PCReset_GAMG(PC pc)
32 {
33   PetscErrorCode ierr;
34   PC_MG          *mg      = (PC_MG*)pc->data;
35   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
36 
37   PetscFunctionBegin;
38   ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
39   pc_gamg->data_sz = 0;
40   ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr);
41   PetscFunctionReturn(0);
42 }
43 
44 /* -------------------------------------------------------------------------- */
45 /*
46    PCGAMGCreateLevel_GAMG: create coarse op with RAP.  repartition and/or reduce number
47      of active processors.
48 
49    Input Parameter:
50    . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
51           'pc_gamg->data_sz' are changed via repartitioning/reduction.
52    . Amat_fine - matrix on this fine (k) level
53    . cr_bs - coarse block size
54    In/Output Parameter:
55    . a_P_inout - prolongation operator to the next level (k-->k-1)
56    . a_nactive_proc - number of active procs
57    Output Parameter:
58    . a_Amat_crs - coarse matrix that is created (k-1)
59 */
60 
61 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,IS * Pcolumnperm, PetscBool is_last)
62 {
63   PetscErrorCode  ierr;
64   PC_MG           *mg         = (PC_MG*)pc->data;
65   PC_GAMG         *pc_gamg    = (PC_GAMG*)mg->innerctx;
66   Mat             Cmat,Pold=*a_P_inout;
67   MPI_Comm        comm;
68   PetscMPIInt     rank,size,new_size,nactive=*a_nactive_proc;
69   PetscInt        ncrs_eq,ncrs,f_bs;
70 
71   PetscFunctionBegin;
72   ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr);
73   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
74   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
75   ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr);
76   ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr);
77 
78   /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/
79   ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr);
80   if (pc_gamg->data_cell_rows>0) {
81     ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
82   } else {
83     PetscInt  bs;
84     ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr);
85     ncrs = ncrs_eq/bs;
86   }
87 
88   /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
89   if (is_last && !pc_gamg->use_parallel_coarse_grid_solver) new_size = 1;
90   else {
91     PetscInt ncrs_eq_glob;
92     ierr     = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr);
93     new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
94     if (!new_size) new_size = 1; /* not likely, posible? */
95     else if (new_size >= nactive) new_size = nactive; /* no change, rare */
96   }
97 
98   if (Pcolumnperm) *Pcolumnperm = NULL;
99 
100   if (new_size==nactive) {
101     *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
102     /* we know that the grid structure can be reused in MatPtAP */
103   } else { /* reduce active processors - we know that the grid structure can NOT be reused in MatPtAP */
104     PetscInt       *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old;
105     IS             is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices;
106     nloc_old = ncrs_eq/cr_bs;
107     if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
108 #if defined PETSC_GAMG_USE_LOG
109     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
110 #endif
111     /* make 'is_eq_newproc' */
112     ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
113     if (pc_gamg->repart) {
114       /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
115       Mat adj;
116 
117       ierr = PetscInfo3(pc,"Repartition: size (active): %D --> %D, %D local equations\n",*a_nactive_proc,new_size,ncrs_eq);CHKERRQ(ierr);
118 
119       /* get 'adj' */
120       if (cr_bs == 1) {
121         ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
122       } else {
123         /* make a scalar matrix to partition (no Stokes here) */
124         Mat               tMat;
125         PetscInt          Istart_crs,Iend_crs,ncols,jj,Ii;
126         const PetscScalar *vals;
127         const PetscInt    *idx;
128         PetscInt          *d_nnz, *o_nnz, M, N;
129         static PetscInt   llev = 0;
130         MatType           mtype;
131 
132         ierr = PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);CHKERRQ(ierr);
133         ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr);
134         ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr);
135         for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
136           ierr      = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
137           d_nnz[jj] = ncols/cr_bs;
138           o_nnz[jj] = ncols/cr_bs;
139           ierr      = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
140           if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs;
141           if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs;
142         }
143 
144         ierr = MatGetType(Amat_fine,&mtype);CHKERRQ(ierr);
145         ierr = MatCreate(comm, &tMat);CHKERRQ(ierr);
146         ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr);
147         ierr = MatSetType(tMat,mtype);CHKERRQ(ierr);
148         ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr);
149         ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
150         ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
151 
152         for (ii = Istart_crs; ii < Iend_crs; ii++) {
153           PetscInt dest_row = ii/cr_bs;
154           ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
155           for (jj = 0; jj < ncols; jj++) {
156             PetscInt    dest_col = idx[jj]/cr_bs;
157             PetscScalar v        = 1.0;
158             ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr);
159           }
160           ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
161         }
162         ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
163         ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
164 
165         if (llev++ == -1) {
166           PetscViewer viewer; char fname[32];
167           ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr);
168           PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
169           ierr = MatView(tMat, viewer);CHKERRQ(ierr);
170           ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
171         }
172         ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
173         ierr = MatDestroy(&tMat);CHKERRQ(ierr);
174       } /* create 'adj' */
175 
176       { /* partition: get newproc_idx */
177         char            prefix[256];
178         const char      *pcpre;
179         const PetscInt  *is_idx;
180         MatPartitioning mpart;
181         IS              proc_is;
182         PetscInt        targetPE;
183 
184         ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr);
185         ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr);
186         ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
187         ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
188         ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
189         ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
190         ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr);
191         ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr);
192         ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
193 
194         /* collect IS info */
195         ierr     = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr);
196         ierr     = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr);
197         targetPE = 1; /* bring to "front" of machine */
198         /*targetPE = size/new_size;*/ /* spread partitioning across machine */
199         for (kk = jj = 0 ; kk < nloc_old ; kk++) {
200           for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
201             newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
202           }
203         }
204         ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr);
205         ierr = ISDestroy(&proc_is);CHKERRQ(ierr);
206       }
207       ierr = MatDestroy(&adj);CHKERRQ(ierr);
208 
209       ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr);
210       ierr = PetscFree(newproc_idx);CHKERRQ(ierr);
211     } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */
212       PetscInt rfactor,targetPE;
213 
214       /* find factor */
215       if (new_size == 1) rfactor = size; /* easy */
216       else {
217         PetscReal best_fact = 0.;
218         jj = -1;
219         for (kk = 1 ; kk <= size ; kk++) {
220           if (!(size%kk)) { /* a candidate */
221             PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
222             if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
223             if (fact > best_fact) {
224               best_fact = fact; jj = kk;
225             }
226           }
227         }
228         if (jj != -1) rfactor = jj;
229         else rfactor = 1; /* does this happen .. a prime */
230       }
231       new_size = size/rfactor;
232 
233       if (new_size==nactive) {
234         *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
235         ierr        = PetscFree(counts);CHKERRQ(ierr);
236         ierr = PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);CHKERRQ(ierr);
237 #if defined PETSC_GAMG_USE_LOG
238         ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
239 #endif
240         PetscFunctionReturn(0);
241       }
242 
243       ierr = PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);CHKERRQ(ierr);
244       targetPE = rank/rfactor;
245       ierr     = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr);
246     } /* end simple 'is_eq_newproc' */
247 
248     /*
249       Create an index set from the is_eq_newproc index set to indicate the mapping TO
250     */
251     ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr);
252     is_eq_num_prim = is_eq_num;
253     /*
254       Determine how many equations/vertices are assigned to each processor
255     */
256     ierr        = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr);
257     ncrs_eq_new = counts[rank];
258     ierr        = ISDestroy(&is_eq_newproc);CHKERRQ(ierr);
259     ncrs_new = ncrs_eq_new/cr_bs; /* eqs */
260 
261     ierr = PetscFree(counts);CHKERRQ(ierr);
262 #if defined PETSC_GAMG_USE_LOG
263     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
264 #endif
265     /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */
266     {
267       Vec            src_crd, dest_crd;
268       const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
269       VecScatter     vecscat;
270       PetscScalar    *array;
271       IS isscat;
272       /* move data (for primal equations only) */
273       /* Create a vector to contain the newly ordered element information */
274       ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr);
275       ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr);
276       ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */
277       /*
278 	There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
279 	a block size of ...).  Note, ISs are expanded into equation space by 'cr_bs'.
280       */
281       ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr);
282       ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
283       for (ii=0,jj=0; ii<ncrs; ii++) {
284 	PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
285 	for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
286       }
287       ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
288       ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr);
289       ierr = PetscFree(tidx);CHKERRQ(ierr);
290       /*
291 	Create a vector to contain the original vertex information for each element
292       */
293       ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr);
294       for (jj=0; jj<ndata_cols; jj++) {
295 	const PetscInt stride0=ncrs*pc_gamg->data_cell_rows;
296 	for (ii=0; ii<ncrs; ii++) {
297 	  for (kk=0; kk<ndata_rows; kk++) {
298 	    PetscInt    ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
299 	    PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
300 	    ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr);
301 	  }
302 	}
303       }
304       ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr);
305       ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr);
306       /*
307 	Scatter the element vertex information (still in the original vertex ordering)
308 	to the correct processor
309       */
310       ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr);
311       ierr = ISDestroy(&isscat);CHKERRQ(ierr);
312       ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
313       ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
314       ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr);
315       ierr = VecDestroy(&src_crd);CHKERRQ(ierr);
316       /*
317 	Put the element vertex data into a new allocation of the gdata->ele
318       */
319       ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
320       ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr);
321 
322       pc_gamg->data_sz = node_data_sz*ncrs_new;
323       strideNew        = ncrs_new*ndata_rows;
324 
325       ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr);
326       for (jj=0; jj<ndata_cols; jj++) {
327 	for (ii=0; ii<ncrs_new; ii++) {
328 	  for (kk=0; kk<ndata_rows; kk++) {
329 	    PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
330 	    pc_gamg->data[ix] = PetscRealPart(array[jx]);
331 	  }
332 	}
333       }
334       ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr);
335       ierr = VecDestroy(&dest_crd);CHKERRQ(ierr);
336     }
337     /* move A and P (columns) with new layout */
338 #if defined PETSC_GAMG_USE_LOG
339     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
340 #endif
341     /*
342       Invert for MatCreateSubMatrix
343     */
344     ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr);
345     ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */
346     ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr);
347     if (is_eq_num != is_eq_num_prim) {
348       ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */
349     }
350     if (Pcolumnperm) {
351       ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr);
352       *Pcolumnperm = new_eq_indices;
353     }
354     ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr);
355 #if defined PETSC_GAMG_USE_LOG
356     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
357     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
358 #endif
359     /* 'a_Amat_crs' output */
360     {
361       Mat mat;
362       ierr        = MatCreateSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr);
363       *a_Amat_crs = mat;
364     }
365     ierr = MatDestroy(&Cmat);CHKERRQ(ierr);
366 
367 #if defined PETSC_GAMG_USE_LOG
368     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
369 #endif
370     /* prolongator */
371     {
372       IS       findices;
373       PetscInt Istart,Iend;
374       Mat      Pnew;
375 
376       ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr);
377 #if defined PETSC_GAMG_USE_LOG
378       ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
379 #endif
380       ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr);
381       ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr);
382       ierr = MatCreateSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr);
383       ierr = ISDestroy(&findices);CHKERRQ(ierr);
384 
385 #if defined PETSC_GAMG_USE_LOG
386       ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
387 #endif
388       ierr = MatDestroy(a_P_inout);CHKERRQ(ierr);
389 
390       /* output - repartitioned */
391       *a_P_inout = Pnew;
392     }
393     ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr);
394 
395     *a_nactive_proc = new_size; /* output */
396   }
397   PetscFunctionReturn(0);
398 }
399 
400 /* -------------------------------------------------------------------------- */
401 /*
402    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
403                     by setting data structures and options.
404 
405    Input Parameter:
406 .  pc - the preconditioner context
407 
408 */
409 PetscErrorCode PCSetUp_GAMG(PC pc)
410 {
411   PetscErrorCode ierr;
412   PC_MG          *mg      = (PC_MG*)pc->data;
413   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
414   Mat            Pmat     = pc->pmat;
415   PetscInt       fine_level,level,level1,bs,M,N,qq,lidx,nASMBlocksArr[PETSC_GAMG_MAXLEVELS];
416   MPI_Comm       comm;
417   PetscMPIInt    rank,size,nactivepe;
418   Mat            Aarr[PETSC_GAMG_MAXLEVELS],Parr[PETSC_GAMG_MAXLEVELS];
419   IS             *ASMLocalIDsArr[PETSC_GAMG_MAXLEVELS];
420   PetscLogDouble nnz0=0.,nnztot=0.;
421   MatInfo        info;
422   PetscBool      is_last = PETSC_FALSE;
423 
424   PetscFunctionBegin;
425   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
426   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
427   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
428 
429   if (pc_gamg->setup_count++ > 0) {
430     if ((PetscBool)(!pc_gamg->reuse_prol)) {
431       /* reset everything */
432       ierr = PCReset_MG(pc);CHKERRQ(ierr);
433       pc->setupcalled = 0;
434     } else {
435       PC_MG_Levels **mglevels = mg->levels;
436       /* just do Galerkin grids */
437       Mat          B,dA,dB;
438 
439       if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
440       if (pc_gamg->Nlevels > 1) {
441         /* currently only handle case where mat and pmat are the same on coarser levels */
442         ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr);
443         /* (re)set to get dirty flag */
444         ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr);
445 
446         for (level=pc_gamg->Nlevels-2; level>=0; level--) {
447           /* 2nd solve, matrix structure can change from repartitioning or process reduction but don't know if we have process reduction here. Should fix */
448           if (pc_gamg->setup_count==2 /* && pc_gamg->repart||reduction */) {
449             ierr = PetscInfo2(pc,"new RAP after first solve level %D, %D setup\n",level,pc_gamg->setup_count);CHKERRQ(ierr);
450             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,2.0,&B);CHKERRQ(ierr);
451             ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr);
452             mglevels[level]->A = B;
453           } else {
454             ierr = PetscInfo2(pc,"RAP after first solve reusing matrix level %D, %D setup\n",level,pc_gamg->setup_count);CHKERRQ(ierr);
455             ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr);
456             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr);
457           }
458           ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr);
459           dB   = B;
460         }
461       }
462 
463       ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
464       PetscFunctionReturn(0);
465     }
466   }
467 
468   if (!pc_gamg->data) {
469     if (pc_gamg->orig_data) {
470       ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
471       ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr);
472 
473       pc_gamg->data_sz        = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
474       pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
475       pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;
476 
477       ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr);
478       for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
479     } else {
480       if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
481       ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr);
482     }
483   }
484 
485   /* cache original data for reuse */
486   if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) {
487     ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr);
488     for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
489     pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
490     pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
491   }
492 
493   /* get basic dims */
494   ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
495   ierr = MatGetSize(Pmat, &M, &N);CHKERRQ(ierr);
496 
497   ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */
498   nnz0   = info.nz_used;
499   nnztot = info.nz_used;
500   ierr = PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,(int)(nnz0/(PetscReal)M+0.5),size);CHKERRQ(ierr);
501 
502   /* Get A_i and R_i */
503   for (level=0, Aarr[0]=Pmat, nactivepe = size; level < (pc_gamg->Nlevels-1) && (!level || M>pc_gamg->coarse_eq_limit); level++) {
504     pc_gamg->current_level = level;
505     level1 = level + 1;
506 #if defined PETSC_GAMG_USE_LOG
507     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
508 #if (defined GAMG_STAGES)
509     ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr);
510 #endif
511 #endif
512     { /* construct prolongator */
513       Mat              Gmat;
514       PetscCoarsenData *agg_lists;
515       Mat              Prol11;
516 
517       ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr);
518       ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr);
519       ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr);
520 
521       /* could have failed to create new level */
522       if (Prol11) {
523         /* get new block size of coarse matrices */
524         ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr);
525 
526         if (pc_gamg->ops->optprolongator) {
527           /* smooth */
528           ierr = pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);CHKERRQ(ierr);
529         }
530 
531         if (pc_gamg->use_aggs_in_asm) {
532           PetscInt bs;
533           ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr);
534           ierr = PetscCDGetASMBlocks(agg_lists, bs, Gmat, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr);
535         }
536 
537         Parr[level1] = Prol11;
538       } else Parr[level1] = NULL; /* failed to coarsen */
539 
540       ierr = MatDestroy(&Gmat);CHKERRQ(ierr);
541       ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr);
542     } /* construct prolongator scope */
543 #if defined PETSC_GAMG_USE_LOG
544     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
545 #endif
546     if (!level) Aarr[0] = Pmat; /* use Pmat for finest level setup */
547     if (!Parr[level1]) { /* failed to coarsen */
548       ierr =  PetscInfo1(pc,"Stop gridding, level %D\n",level);CHKERRQ(ierr);
549 #if defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES
550       ierr = PetscLogStagePop();CHKERRQ(ierr);
551 #endif
552       break;
553     }
554 #if defined PETSC_GAMG_USE_LOG
555     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
556 #endif
557     ierr = MatGetSize(Parr[level1], &M, &N);CHKERRQ(ierr); /* N is next M, a loop test variables */
558     if (is_last) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Is last ????????");
559     if (N <= pc_gamg->coarse_eq_limit) is_last = PETSC_TRUE;
560     if (level1 == pc_gamg->Nlevels-1) is_last = PETSC_TRUE;
561     ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs, &Parr[level1], &Aarr[level1], &nactivepe, NULL, is_last);CHKERRQ(ierr);
562 
563 #if defined PETSC_GAMG_USE_LOG
564     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
565 #endif
566     ierr = MatGetSize(Aarr[level1], &M, &N);CHKERRQ(ierr); /* M is loop test variables */
567     ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr);
568     nnztot += info.nz_used;
569     ierr = PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);CHKERRQ(ierr);
570 
571 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
572     ierr = PetscLogStagePop();CHKERRQ(ierr);
573 #endif
574     /* stop if one node or one proc -- could pull back for singular problems */
575     if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) {
576       ierr =  PetscInfo2(pc,"HARD stop of coarsening on level %D.  Grid too small: %D block nodes\n",level,M/bs);CHKERRQ(ierr);
577       level++;
578       break;
579     }
580   } /* levels */
581   ierr                  = PetscFree(pc_gamg->data);CHKERRQ(ierr);
582 
583   ierr = PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);CHKERRQ(ierr);
584   pc_gamg->Nlevels = level + 1;
585   fine_level       = level;
586   ierr             = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
587 
588   if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
589     /* set default smoothers & set operators */
590     for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) {
591       KSP smoother;
592       PC  subpc;
593 
594       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
595       ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
596 
597       ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
598       /* set ops */
599       ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr);
600       ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr);
601 
602       /* set defaults */
603       ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr);
604 
605       /* set blocks for ASM smoother that uses the 'aggregates' */
606       if (pc_gamg->use_aggs_in_asm) {
607         PetscInt sz;
608         IS       *iss;
609 
610         sz   = nASMBlocksArr[level];
611         iss  = ASMLocalIDsArr[level];
612         ierr = PCSetType(subpc, PCASM);CHKERRQ(ierr);
613         ierr = PCASMSetOverlap(subpc, 0);CHKERRQ(ierr);
614         ierr = PCASMSetType(subpc,PC_ASM_BASIC);CHKERRQ(ierr);
615         if (!sz) {
616           IS       is;
617           ierr = ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_COPY_VALUES, &is);CHKERRQ(ierr);
618           ierr = PCASMSetLocalSubdomains(subpc, 1, NULL, &is);CHKERRQ(ierr);
619           ierr = ISDestroy(&is);CHKERRQ(ierr);
620         } else {
621           PetscInt kk;
622           ierr = PCASMSetLocalSubdomains(subpc, sz, NULL, iss);CHKERRQ(ierr);
623           for (kk=0; kk<sz; kk++) {
624             ierr = ISDestroy(&iss[kk]);CHKERRQ(ierr);
625           }
626           ierr = PetscFree(iss);CHKERRQ(ierr);
627         }
628         ASMLocalIDsArr[level] = NULL;
629         nASMBlocksArr[level]  = 0;
630       } else {
631         ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr);
632       }
633     }
634     {
635       /* coarse grid */
636       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
637       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
638       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
639       ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr);
640       if (!pc_gamg->use_parallel_coarse_grid_solver) {
641         ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
642         ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
643         ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr);
644         ierr = PCSetUp(subpc);CHKERRQ(ierr);
645         ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr);
646         if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
647         ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr);
648         ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr);
649         ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr);
650         ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr);
651         ierr = KSPSetType(k2[0], KSPPREONLY);CHKERRQ(ierr);
652         /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
653          * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
654          * view every subdomain as though they were different. */
655         ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
656       }
657     }
658 
659     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
660     ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr);
661     ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr);
662     ierr = PetscOptionsEnd();CHKERRQ(ierr);
663     ierr = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr);
664 
665     /* clean up */
666     for (level=1; level<pc_gamg->Nlevels; level++) {
667       ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr);
668       ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr);
669     }
670     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
671   } else {
672     KSP smoother;
673     ierr = PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");CHKERRQ(ierr);
674     ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr);
675     ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr);
676     ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr);
677     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
678   }
679   PetscFunctionReturn(0);
680 }
681 
682 /* ------------------------------------------------------------------------- */
683 /*
684  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
685    that was created with PCCreate_GAMG().
686 
687    Input Parameter:
688 .  pc - the preconditioner context
689 
690    Application Interface Routine: PCDestroy()
691 */
692 PetscErrorCode PCDestroy_GAMG(PC pc)
693 {
694   PetscErrorCode ierr;
695   PC_MG          *mg     = (PC_MG*)pc->data;
696   PC_GAMG        *pc_gamg= (PC_GAMG*)mg->innerctx;
697 
698   PetscFunctionBegin;
699   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
700   if (pc_gamg->ops->destroy) {
701     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
702   }
703   ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr);
704   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
705   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
706   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
707   PetscFunctionReturn(0);
708 }
709 
710 /*@
711    PCGAMGSetProcEqLim - Set number of equations to aim for per process on the coarse grids via processor reduction.
712 
713    Logically Collective on PC
714 
715    Input Parameters:
716 +  pc - the preconditioner context
717 -  n - the number of equations
718 
719 
720    Options Database Key:
721 .  -pc_gamg_process_eq_limit <limit>
722 
723    Notes:
724     GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit> equations on each process
725           that has degrees of freedom
726 
727    Level: intermediate
728 
729 .seealso: PCGAMGSetCoarseEqLim()
730 @*/
731 PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
732 {
733   PetscErrorCode ierr;
734 
735   PetscFunctionBegin;
736   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
737   ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
738   PetscFunctionReturn(0);
739 }
740 
741 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
742 {
743   PC_MG   *mg      = (PC_MG*)pc->data;
744   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
745 
746   PetscFunctionBegin;
747   if (n>0) pc_gamg->min_eq_proc = n;
748   PetscFunctionReturn(0);
749 }
750 
751 /*@
752    PCGAMGSetCoarseEqLim - Set maximum number of equations on coarsest grid.
753 
754  Collective on PC
755 
756    Input Parameters:
757 +  pc - the preconditioner context
758 -  n - maximum number of equations to aim for
759 
760    Options Database Key:
761 .  -pc_gamg_coarse_eq_limit <limit>
762 
763    Notes: For example -pc_gamg_coarse_eq_limit 1000 will stop coarsening once the coarse grid
764      has less than 1000 unknowns.
765 
766    Level: intermediate
767 
768 .seealso: PCGAMGSetProcEqLim()
769 @*/
770 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
771 {
772   PetscErrorCode ierr;
773 
774   PetscFunctionBegin;
775   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
776   ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
777   PetscFunctionReturn(0);
778 }
779 
780 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
781 {
782   PC_MG   *mg      = (PC_MG*)pc->data;
783   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
784 
785   PetscFunctionBegin;
786   if (n>0) pc_gamg->coarse_eq_limit = n;
787   PetscFunctionReturn(0);
788 }
789 
790 /*@
791    PCGAMGSetRepartition - Repartition the degrees of freedom across the processors on the coarser grids
792 
793    Collective on PC
794 
795    Input Parameters:
796 +  pc - the preconditioner context
797 -  n - PETSC_TRUE or PETSC_FALSE
798 
799    Options Database Key:
800 .  -pc_gamg_repartition <true,false>
801 
802    Notes:
803     this will generally improve the loading balancing of the work on each level
804 
805    Level: intermediate
806 
807 .seealso: ()
808 @*/
809 PetscErrorCode PCGAMGSetRepartition(PC pc, PetscBool n)
810 {
811   PetscErrorCode ierr;
812 
813   PetscFunctionBegin;
814   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
815   ierr = PetscTryMethod(pc,"PCGAMGSetRepartition_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
816   PetscFunctionReturn(0);
817 }
818 
819 static PetscErrorCode PCGAMGSetRepartition_GAMG(PC pc, PetscBool n)
820 {
821   PC_MG   *mg      = (PC_MG*)pc->data;
822   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
823 
824   PetscFunctionBegin;
825   pc_gamg->repart = n;
826   PetscFunctionReturn(0);
827 }
828 
829 /*@
830    PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding algebraic multigrid preconditioner
831 
832    Collective on PC
833 
834    Input Parameters:
835 +  pc - the preconditioner context
836 -  n - PETSC_TRUE or PETSC_FALSE
837 
838    Options Database Key:
839 .  -pc_gamg_reuse_interpolation <true,false>
840 
841    Level: intermediate
842 
843    Notes:
844     this may negatively affect the convergence rate of the method on new matrices if the matrix entries change a great deal, but allows
845           rebuilding the preconditioner quicker.
846 
847 .seealso: ()
848 @*/
849 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n)
850 {
851   PetscErrorCode ierr;
852 
853   PetscFunctionBegin;
854   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
855   ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
856   PetscFunctionReturn(0);
857 }
858 
859 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n)
860 {
861   PC_MG   *mg      = (PC_MG*)pc->data;
862   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
863 
864   PetscFunctionBegin;
865   pc_gamg->reuse_prol = n;
866   PetscFunctionReturn(0);
867 }
868 
869 /*@
870    PCGAMGASMSetUseAggs - Have the PCGAMG smoother on each level use the aggregates defined by the coarsening process as the subdomains for the additive Schwarz preconditioner.
871 
872    Collective on PC
873 
874    Input Parameters:
875 +  pc - the preconditioner context
876 -  flg - PETSC_TRUE to use aggregates, PETSC_FALSE to not
877 
878    Options Database Key:
879 .  -pc_gamg_asm_use_agg
880 
881    Level: intermediate
882 
883 .seealso: ()
884 @*/
885 PetscErrorCode PCGAMGASMSetUseAggs(PC pc, PetscBool flg)
886 {
887   PetscErrorCode ierr;
888 
889   PetscFunctionBegin;
890   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
891   ierr = PetscTryMethod(pc,"PCGAMGASMSetUseAggs_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr);
892   PetscFunctionReturn(0);
893 }
894 
895 static PetscErrorCode PCGAMGASMSetUseAggs_GAMG(PC pc, PetscBool flg)
896 {
897   PC_MG   *mg      = (PC_MG*)pc->data;
898   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
899 
900   PetscFunctionBegin;
901   pc_gamg->use_aggs_in_asm = flg;
902   PetscFunctionReturn(0);
903 }
904 
905 /*@
906    PCGAMGSetUseParallelCoarseGridSolve - allow a parallel coarse grid solver
907 
908    Collective on PC
909 
910    Input Parameters:
911 +  pc - the preconditioner context
912 -  flg - PETSC_TRUE to not force coarse grid onto one processor
913 
914    Options Database Key:
915 .  -pc_gamg_use_parallel_coarse_grid_solver
916 
917    Level: intermediate
918 
919 .seealso: ()
920 @*/
921 PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve(PC pc, PetscBool flg)
922 {
923   PetscErrorCode ierr;
924 
925   PetscFunctionBegin;
926   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
927   ierr = PetscTryMethod(pc,"PCGAMGSetUseParallelCoarseGridSolve_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr);
928   PetscFunctionReturn(0);
929 }
930 
931 static PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve_GAMG(PC pc, PetscBool flg)
932 {
933   PC_MG   *mg      = (PC_MG*)pc->data;
934   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
935 
936   PetscFunctionBegin;
937   pc_gamg->use_parallel_coarse_grid_solver = flg;
938   PetscFunctionReturn(0);
939 }
940 
941 /*@
942    PCGAMGSetNlevels -  Sets the maximum number of levels PCGAMG will use
943 
944    Not collective on PC
945 
946    Input Parameters:
947 +  pc - the preconditioner
948 -  n - the maximum number of levels to use
949 
950    Options Database Key:
951 .  -pc_mg_levels
952 
953    Level: intermediate
954 
955 .seealso: ()
956 @*/
957 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
958 {
959   PetscErrorCode ierr;
960 
961   PetscFunctionBegin;
962   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
963   ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
964   PetscFunctionReturn(0);
965 }
966 
967 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
968 {
969   PC_MG   *mg      = (PC_MG*)pc->data;
970   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
971 
972   PetscFunctionBegin;
973   pc_gamg->Nlevels = n;
974   PetscFunctionReturn(0);
975 }
976 
977 /*@
978    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
979 
980    Not collective on PC
981 
982    Input Parameters:
983 +  pc - the preconditioner context
984 -  threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph
985 
986    Options Database Key:
987 .  -pc_gamg_threshold <threshold>
988 
989    Notes:
990     Increasing the threshold decreases the rate of coarsening. Conversely reducing the threshold increases the rate of coarsening (aggressive coarsening) and thereby reduces the complexity of the coarse grids, and generally results in slower solver converge rates. Reducing coarse grid complexity reduced the complexity of Galerkin coarse grid construction considerably.
991     Before coarsening or aggregating the graph, GAMG removes small values from the graph with this threshold, and thus reducing the coupling in the graph and a different (perhaps better) coarser set of points.
992 
993    Level: intermediate
994 
995 .seealso: PCGAMGFilterGraph(), PCGAMGSetSquareGraph()
996 @*/
997 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal v[], PetscInt n)
998 {
999   PetscErrorCode ierr;
1000 
1001   PetscFunctionBegin;
1002   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1003   ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal[],PetscInt),(pc,v,n));CHKERRQ(ierr);
1004   PetscFunctionReturn(0);
1005 }
1006 
1007 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal v[], PetscInt n)
1008 {
1009   PC_MG   *mg      = (PC_MG*)pc->data;
1010   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1011   PetscInt i;
1012   PetscFunctionBegin;
1013   for (i=0;i<n;i++) pc_gamg->threshold[i] = v[i];
1014   do {pc_gamg->threshold[i] = pc_gamg->threshold[i-1]*pc_gamg->threshold_scale;} while (++i<PETSC_GAMG_MAXLEVELS);
1015   PetscFunctionReturn(0);
1016 }
1017 
1018 /*@
1019    PCGAMGSetThresholdScale - Relative threshold reduction at each level
1020 
1021    Not collective on PC
1022 
1023    Input Parameters:
1024 +  pc - the preconditioner context
1025 -  scale - the threshold value reduction, ussually < 1.0
1026 
1027    Options Database Key:
1028 .  -pc_gamg_threshold_scale <v>
1029 
1030    Level: advanced
1031 
1032 .seealso: ()
1033 @*/
1034 PetscErrorCode PCGAMGSetThresholdScale(PC pc, PetscReal v)
1035 {
1036   PetscErrorCode ierr;
1037 
1038   PetscFunctionBegin;
1039   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1040   ierr = PetscTryMethod(pc,"PCGAMGSetThresholdScale_C",(PC,PetscReal),(pc,v));CHKERRQ(ierr);
1041   PetscFunctionReturn(0);
1042 }
1043 
1044 static PetscErrorCode PCGAMGSetThresholdScale_GAMG(PC pc, PetscReal v)
1045 {
1046   PC_MG   *mg      = (PC_MG*)pc->data;
1047   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1048   PetscFunctionBegin;
1049   pc_gamg->threshold_scale = v;
1050   PetscFunctionReturn(0);
1051 }
1052 
1053 /*@C
1054    PCGAMGSetType - Set solution method
1055 
1056    Collective on PC
1057 
1058    Input Parameters:
1059 +  pc - the preconditioner context
1060 -  type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL
1061 
1062    Options Database Key:
1063 .  -pc_gamg_type <agg,geo,classical> - type of algebraic multigrid to apply
1064 
1065    Level: intermediate
1066 
1067 .seealso: PCGAMGGetType(), PCGAMG, PCGAMGType
1068 @*/
1069 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1070 {
1071   PetscErrorCode ierr;
1072 
1073   PetscFunctionBegin;
1074   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1075   ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr);
1076   PetscFunctionReturn(0);
1077 }
1078 
1079 /*@C
1080    PCGAMGGetType - Get solution method
1081 
1082    Collective on PC
1083 
1084    Input Parameter:
1085 .  pc - the preconditioner context
1086 
1087    Output Parameter:
1088 .  type - the type of algorithm used
1089 
1090    Level: intermediate
1091 
1092 .seealso: PCGAMGSetType(), PCGAMGType
1093 @*/
1094 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type)
1095 {
1096   PetscErrorCode ierr;
1097 
1098   PetscFunctionBegin;
1099   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1100   ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr);
1101   PetscFunctionReturn(0);
1102 }
1103 
1104 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type)
1105 {
1106   PC_MG          *mg      = (PC_MG*)pc->data;
1107   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1108 
1109   PetscFunctionBegin;
1110   *type = pc_gamg->type;
1111   PetscFunctionReturn(0);
1112 }
1113 
1114 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1115 {
1116   PetscErrorCode ierr,(*r)(PC);
1117   PC_MG          *mg      = (PC_MG*)pc->data;
1118   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1119 
1120   PetscFunctionBegin;
1121   pc_gamg->type = type;
1122   ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr);
1123   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1124   if (pc_gamg->ops->destroy) {
1125     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
1126     ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr);
1127     pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1128     /* cleaning up common data in pc_gamg - this should disapear someday */
1129     pc_gamg->data_cell_cols = 0;
1130     pc_gamg->data_cell_rows = 0;
1131     pc_gamg->orig_data_cell_cols = 0;
1132     pc_gamg->orig_data_cell_rows = 0;
1133     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
1134     pc_gamg->data_sz = 0;
1135   }
1136   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
1137   ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr);
1138   ierr = (*r)(pc);CHKERRQ(ierr);
1139   PetscFunctionReturn(0);
1140 }
1141 
1142 /* -------------------------------------------------------------------------- */
1143 /*
1144    PCMGGetGridComplexity - compute coarse grid complexity of MG hierarchy
1145 
1146    Input Parameter:
1147 .  pc - the preconditioner context
1148 
1149    Output Parameter:
1150 .  gc - grid complexity = sum_i(nnz_i) / nnz_0
1151 
1152    Level: advanced
1153 */
1154 static PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc)
1155 {
1156   PetscErrorCode ierr;
1157   PC_MG          *mg      = (PC_MG*)pc->data;
1158   PC_MG_Levels   **mglevels = mg->levels;
1159   PetscInt       lev, nnz0 = -1;
1160   MatInfo        info;
1161   PetscFunctionBegin;
1162   if (!mg->nlevels) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MG has no levels");
1163   for (lev=0, *gc=0; lev<mg->nlevels; lev++) {
1164     Mat dB;
1165     ierr = KSPGetOperators(mglevels[lev]->smoothd,NULL,&dB);CHKERRQ(ierr);
1166     ierr = MatGetInfo(dB,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */
1167     *gc += (PetscReal)info.nz_used;
1168     if (lev==mg->nlevels-1) nnz0 = info.nz_used;
1169   }
1170   if (nnz0) *gc /= (PetscReal)nnz0;
1171   else *gc = 0;
1172   PetscFunctionReturn(0);
1173 }
1174 
1175 static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer)
1176 {
1177   PetscErrorCode ierr,i;
1178   PC_MG          *mg      = (PC_MG*)pc->data;
1179   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1180   PetscReal       gc=0;
1181   PetscFunctionBegin;
1182   ierr = PetscViewerASCIIPrintf(viewer,"    GAMG specific options\n");CHKERRQ(ierr);
1183   ierr = PetscViewerASCIIPrintf(viewer,"      Threshold for dropping small values in graph on each level =");CHKERRQ(ierr);
1184   for (i=0;i<pc_gamg->current_level;i++) {
1185     ierr = PetscViewerASCIIPrintf(viewer," %g",(double)pc_gamg->threshold[i]);CHKERRQ(ierr);
1186   }
1187   ierr = PetscViewerASCIIPrintf(viewer,"\n");CHKERRQ(ierr);
1188   ierr = PetscViewerASCIIPrintf(viewer,"      Threshold scaling factor for each level not specified = %g\n",(double)pc_gamg->threshold_scale);CHKERRQ(ierr);
1189   if (pc_gamg->use_aggs_in_asm) {
1190     ierr = PetscViewerASCIIPrintf(viewer,"      Using aggregates from coarsening process to define subdomains for PCASM\n");CHKERRQ(ierr);
1191   }
1192   if (pc_gamg->use_parallel_coarse_grid_solver) {
1193     ierr = PetscViewerASCIIPrintf(viewer,"      Using parallel coarse grid solver (all coarse grid equations not put on one process)\n");CHKERRQ(ierr);
1194   }
1195   if (pc_gamg->ops->view) {
1196     ierr = (*pc_gamg->ops->view)(pc,viewer);CHKERRQ(ierr);
1197   }
1198   ierr = PCMGGetGridComplexity(pc,&gc);CHKERRQ(ierr);
1199   ierr = PetscViewerASCIIPrintf(viewer,"      Complexity:    grid = %g\n",gc);CHKERRQ(ierr);
1200   PetscFunctionReturn(0);
1201 }
1202 
1203 PetscErrorCode PCSetFromOptions_GAMG(PetscOptionItems *PetscOptionsObject,PC pc)
1204 {
1205   PetscErrorCode ierr;
1206   PC_MG          *mg      = (PC_MG*)pc->data;
1207   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1208   PetscBool      flag;
1209   MPI_Comm       comm;
1210   char           prefix[256];
1211   PetscInt       i,n;
1212   const char     *pcpre;
1213 
1214   PetscFunctionBegin;
1215   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1216   ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr);
1217   {
1218     char tname[256];
1219     ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr);
1220     if (flag) {
1221       ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr);
1222     }
1223     ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGSetRepartition",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr);
1224     ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr);
1225     ierr = PetscOptionsBool("-pc_gamg_asm_use_agg","Use aggregation aggregates for ASM smoother","PCGAMGASMSetUseAggs",pc_gamg->use_aggs_in_asm,&pc_gamg->use_aggs_in_asm,NULL);CHKERRQ(ierr);
1226     ierr = PetscOptionsBool("-pc_gamg_use_parallel_coarse_grid_solver","Use parallel coarse grid solver (otherwise put last grid on one process)","PCGAMGSetUseParallelCoarseGridSolve",pc_gamg->use_parallel_coarse_grid_solver,&pc_gamg->use_parallel_coarse_grid_solver,NULL);CHKERRQ(ierr);
1227     ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr);
1228     ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr);
1229     ierr = PetscOptionsReal("-pc_gamg_threshold_scale","Scaling of threshold for each level not specified","PCGAMGSetThresholdScale",pc_gamg->threshold_scale,&pc_gamg->threshold_scale,NULL);CHKERRQ(ierr);
1230     n = PETSC_GAMG_MAXLEVELS;
1231     ierr = PetscOptionsRealArray("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&n,&flag);CHKERRQ(ierr);
1232     if (!flag || n < PETSC_GAMG_MAXLEVELS) {
1233       if (!flag) n = 1;
1234       i = n;
1235       do {pc_gamg->threshold[i] = pc_gamg->threshold[i-1]*pc_gamg->threshold_scale;} while (++i<PETSC_GAMG_MAXLEVELS);
1236     }
1237     ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
1238 
1239     /* set options for subtype */
1240     if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);}
1241   }
1242   ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
1243   ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
1244   ierr = PetscOptionsTail();CHKERRQ(ierr);
1245   PetscFunctionReturn(0);
1246 }
1247 
1248 /* -------------------------------------------------------------------------- */
1249 /*MC
1250      PCGAMG - Geometric algebraic multigrid (AMG) preconditioner
1251 
1252    Options Database Keys:
1253 +   -pc_gamg_type <type> - one of agg, geo, or classical
1254 .   -pc_gamg_repartition  <true,default=false> - repartition the degrees of freedom accross the coarse grids as they are determined
1255 .   -pc_gamg_reuse_interpolation <true,default=false> - when rebuilding the algebraic multigrid preconditioner reuse the previously computed interpolations
1256 .   -pc_gamg_asm_use_agg <true,default=false> - use the aggregates from the coasening process to defined the subdomains on each level for the PCASM smoother
1257 .   -pc_gamg_process_eq_limit <limit, default=50> - GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit>
1258                                         equations on each process that has degrees of freedom
1259 .   -pc_gamg_coarse_eq_limit <limit, default=50> - Set maximum number of equations on coarsest grid to aim for.
1260 .   -pc_gamg_threshold[] <thresh,default=0> - Before aggregating the graph GAMG will remove small values from the graph on each level
1261 -   -pc_gamg_threshold_scale <scale,default=1> - Scaling of threshold on each coarser grid if not specified
1262 
1263    Options Database Keys for default Aggregation:
1264 +  -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation
1265 .  -pc_gamg_sym_graph <true,default=false> - symmetrize the graph before computing the aggregation
1266 -  -pc_gamg_square_graph <n,default=1> - number of levels to square the graph before aggregating it
1267 
1268    Multigrid options:
1269 +  -pc_mg_cycles <v> - v or w, see PCMGSetCycleType()
1270 .  -pc_mg_distinct_smoothup - configure the up and down (pre and post) smoothers separately, see PCMGSetDistinctSmoothUp()
1271 .  -pc_mg_type <multiplicative> - (one of) additive multiplicative full kascade
1272 -  -pc_mg_levels <levels> - Number of levels of multigrid to use.
1273 
1274 
1275   Notes:
1276     In order to obtain good performance for PCGAMG for vector valued problems you must
1277        Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point
1278        Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator
1279        See the Users Manual Chapter 4 for more details
1280 
1281   Level: intermediate
1282 
1283 .seealso:  PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(),
1284            PCGAMGSetCoarseEqLim(), PCGAMGSetRepartition(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGASMSetUseAggs(), PCGAMGSetUseParallelCoarseGridSolve(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType(), PCGAMGSetReuseInterpolation()
1285 M*/
1286 
1287 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1288 {
1289   PetscErrorCode ierr,i;
1290   PC_GAMG        *pc_gamg;
1291   PC_MG          *mg;
1292 
1293   PetscFunctionBegin;
1294    /* register AMG type */
1295   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1296 
1297   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1298   ierr = PCSetType(pc, PCMG);CHKERRQ(ierr);
1299   ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr);
1300 
1301   /* create a supporting struct and attach it to pc */
1302   ierr         = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr);
1303   ierr         = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr);
1304   mg           = (PC_MG*)pc->data;
1305   mg->innerctx = pc_gamg;
1306 
1307   ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr);
1308 
1309   pc_gamg->setup_count = 0;
1310   /* these should be in subctx but repartitioning needs simple arrays */
1311   pc_gamg->data_sz = 0;
1312   pc_gamg->data    = 0;
1313 
1314   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1315   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1316   pc->ops->setup          = PCSetUp_GAMG;
1317   pc->ops->reset          = PCReset_GAMG;
1318   pc->ops->destroy        = PCDestroy_GAMG;
1319   mg->view                = PCView_GAMG;
1320 
1321   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCMGGetLevels_C",PCMGGetLevels_MG);CHKERRQ(ierr);
1322   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCMGSetLevels_C",PCMGSetLevels_MG);CHKERRQ(ierr);
1323   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr);
1324   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr);
1325   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartition_C",PCGAMGSetRepartition_GAMG);CHKERRQ(ierr);
1326   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr);
1327   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGASMSetUseAggs_C",PCGAMGASMSetUseAggs_GAMG);CHKERRQ(ierr);
1328   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseParallelCoarseGridSolve_C",PCGAMGSetUseParallelCoarseGridSolve_GAMG);CHKERRQ(ierr);
1329   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr);
1330   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThresholdScale_C",PCGAMGSetThresholdScale_GAMG);CHKERRQ(ierr);
1331   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr);
1332   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr);
1333   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr);
1334   pc_gamg->repart           = PETSC_FALSE;
1335   pc_gamg->reuse_prol       = PETSC_FALSE;
1336   pc_gamg->use_aggs_in_asm  = PETSC_FALSE;
1337   pc_gamg->use_parallel_coarse_grid_solver = PETSC_FALSE;
1338   pc_gamg->min_eq_proc      = 50;
1339   pc_gamg->coarse_eq_limit  = 50;
1340   for (i=0;i<PETSC_GAMG_MAXLEVELS;i++) pc_gamg->threshold[i] = 0.;
1341   pc_gamg->threshold_scale = 1.;
1342   pc_gamg->Nlevels          = PETSC_GAMG_MAXLEVELS;
1343   pc_gamg->current_level    = 0; /* don't need to init really */
1344   pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1345 
1346   /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */
1347   ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr);
1348   PetscFunctionReturn(0);
1349 }
1350 
1351 /*@C
1352  PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1353     from PCInitializePackage().
1354 
1355  Level: developer
1356 
1357  .seealso: PetscInitialize()
1358 @*/
1359 PetscErrorCode PCGAMGInitializePackage(void)
1360 {
1361   PetscErrorCode ierr;
1362 
1363   PetscFunctionBegin;
1364   if (PCGAMGPackageInitialized) PetscFunctionReturn(0);
1365   PCGAMGPackageInitialized = PETSC_TRUE;
1366   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr);
1367   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr);
1368   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr);
1369   ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr);
1370 
1371   /* general events */
1372   ierr = PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);CHKERRQ(ierr);
1373   ierr = PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);CHKERRQ(ierr);
1374   ierr = PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr);
1375   ierr = PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr);
1376   ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr);
1377   ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr);
1378   ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);CHKERRQ(ierr);
1379 
1380 #if defined PETSC_GAMG_USE_LOG
1381   ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr);
1382   ierr = PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr);
1383   /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1384   /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1385   /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1386   ierr = PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr);
1387   ierr = PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr);
1388   ierr = PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr);
1389   ierr = PetscLogEventRegister("    search-set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr);
1390   ierr = PetscLogEventRegister("  SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr);
1391   ierr = PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr);
1392   ierr = PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr);
1393   ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr);
1394   ierr = PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr);
1395   ierr = PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr);
1396   ierr = PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr);
1397   ierr = PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr);
1398 
1399   /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1400   /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1401   /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1402   /* create timer stages */
1403 #if defined GAMG_STAGES
1404   {
1405     char     str[32];
1406     PetscInt lidx;
1407     sprintf(str,"MG Level %d (finest)",0);
1408     ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr);
1409     for (lidx=1; lidx<9; lidx++) {
1410       sprintf(str,"MG Level %d",lidx);
1411       ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr);
1412     }
1413   }
1414 #endif
1415 #endif
1416   PetscFunctionReturn(0);
1417 }
1418 
1419 /*@C
1420  PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is
1421     called from PetscFinalize() automatically.
1422 
1423  Level: developer
1424 
1425  .seealso: PetscFinalize()
1426 @*/
1427 PetscErrorCode PCGAMGFinalizePackage(void)
1428 {
1429   PetscErrorCode ierr;
1430 
1431   PetscFunctionBegin;
1432   PCGAMGPackageInitialized = PETSC_FALSE;
1433   ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr);
1434   PetscFunctionReturn(0);
1435 }
1436 
1437 /*@C
1438  PCGAMGRegister - Register a PCGAMG implementation.
1439 
1440  Input Parameters:
1441  + type - string that will be used as the name of the GAMG type.
1442  - create - function for creating the gamg context.
1443 
1444   Level: advanced
1445 
1446  .seealso: PCGAMGType, PCGAMG, PCGAMGSetType()
1447 @*/
1448 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC))
1449 {
1450   PetscErrorCode ierr;
1451 
1452   PetscFunctionBegin;
1453   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1454   ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr);
1455   PetscFunctionReturn(0);
1456 }
1457 
1458