xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision 94ef8dde638caef1d0cd84a7dc8a2db65fcda8b6)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 #include <petsc/private/matimpl.h>
5 #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */
7 
8 #if defined PETSC_GAMG_USE_LOG
9 PetscLogEvent petsc_gamg_setup_events[NUM_SET];
10 #endif
11 
12 #if defined PETSC_USE_LOG
13 PetscLogEvent PC_GAMGGraph_AGG;
14 PetscLogEvent PC_GAMGGraph_GEO;
15 PetscLogEvent PC_GAMGCoarsen_AGG;
16 PetscLogEvent PC_GAMGCoarsen_GEO;
17 PetscLogEvent PC_GAMGProlongator_AGG;
18 PetscLogEvent PC_GAMGProlongator_GEO;
19 PetscLogEvent PC_GAMGOptProlongator_AGG;
20 #endif
21 
22 /* #define GAMG_STAGES */
23 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
24 static PetscLogStage gamg_stages[PETSC_GAMG_MAXLEVELS];
25 #endif
26 
27 static PetscFunctionList GAMGList = 0;
28 static PetscBool PCGAMGPackageInitialized;
29 
30 /* ----------------------------------------------------------------------------- */
31 PetscErrorCode PCReset_GAMG(PC pc)
32 {
33   PetscErrorCode ierr;
34   PC_MG          *mg      = (PC_MG*)pc->data;
35   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
36 
37   PetscFunctionBegin;
38   if (pc_gamg->data) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"This should not happen, cleaned up in SetUp\n");
39   pc_gamg->data_sz = 0;
40   ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr);
41   PetscFunctionReturn(0);
42 }
43 
44 /* -------------------------------------------------------------------------- */
45 /*
46    PCGAMGCreateLevel_GAMG: create coarse op with RAP.  repartition and/or reduce number
47      of active processors.
48 
49    Input Parameter:
50    . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
51           'pc_gamg->data_sz' are changed via repartitioning/reduction.
52    . Amat_fine - matrix on this fine (k) level
53    . cr_bs - coarse block size
54    In/Output Parameter:
55    . a_P_inout - prolongation operator to the next level (k-->k-1)
56    . a_nactive_proc - number of active procs
57    Output Parameter:
58    . a_Amat_crs - coarse matrix that is created (k-1)
59 */
60 
61 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,IS * Pcolumnperm, PetscBool is_last)
62 {
63   PetscErrorCode  ierr;
64   PC_MG           *mg         = (PC_MG*)pc->data;
65   PC_GAMG         *pc_gamg    = (PC_GAMG*)mg->innerctx;
66   Mat             Cmat,Pold=*a_P_inout;
67   MPI_Comm        comm;
68   PetscMPIInt     rank,size,new_size,nactive=*a_nactive_proc;
69   PetscInt        ncrs_eq,ncrs,f_bs;
70 
71   PetscFunctionBegin;
72   ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr);
73   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
74   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
75   ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr);
76   ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr);
77 
78   /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/
79   ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr);
80   if (pc_gamg->data_cell_rows>0) {
81     ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
82   } else {
83     PetscInt  bs;
84     ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr);
85     ncrs = ncrs_eq/bs;
86   }
87 
88   /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
89   if (is_last && !pc_gamg->use_parallel_coarse_grid_solver) new_size = 1;
90   else {
91     PetscInt ncrs_eq_glob;
92     ierr     = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr);
93     new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
94     if (!new_size) new_size = 1; /* not likely, posible? */
95     else if (new_size >= nactive) new_size = nactive; /* no change, rare */
96   }
97 
98   if (Pcolumnperm) *Pcolumnperm = NULL;
99 
100   if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
101   else {
102     PetscInt       *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old;
103     IS             is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices;
104 
105     nloc_old = ncrs_eq/cr_bs;
106     if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
107 #if defined PETSC_GAMG_USE_LOG
108     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
109 #endif
110     /* make 'is_eq_newproc' */
111     ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
112     if (pc_gamg->repart) {
113       /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
114       Mat adj;
115 
116      ierr = PetscInfo3(pc,"Repartition: size (active): %D --> %D, %D local equations\n",*a_nactive_proc,new_size,ncrs_eq);CHKERRQ(ierr);
117 
118       /* get 'adj' */
119       if (cr_bs == 1) {
120         ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
121       } else {
122         /* make a scalar matrix to partition (no Stokes here) */
123         Mat               tMat;
124         PetscInt          Istart_crs,Iend_crs,ncols,jj,Ii;
125         const PetscScalar *vals;
126         const PetscInt    *idx;
127         PetscInt          *d_nnz, *o_nnz, M, N;
128         static PetscInt   llev = 0;
129         MatType           mtype;
130 
131         ierr = PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);CHKERRQ(ierr);
132         ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr);
133         ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr);
134         for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
135           ierr      = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
136           d_nnz[jj] = ncols/cr_bs;
137           o_nnz[jj] = ncols/cr_bs;
138           ierr      = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
139           if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs;
140           if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs;
141         }
142 
143         ierr = MatGetType(Amat_fine,&mtype);CHKERRQ(ierr);
144         ierr = MatCreate(comm, &tMat);CHKERRQ(ierr);
145         ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr);
146         ierr = MatSetType(tMat,mtype);CHKERRQ(ierr);
147         ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr);
148         ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
149         ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
150 
151         for (ii = Istart_crs; ii < Iend_crs; ii++) {
152           PetscInt dest_row = ii/cr_bs;
153           ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
154           for (jj = 0; jj < ncols; jj++) {
155             PetscInt    dest_col = idx[jj]/cr_bs;
156             PetscScalar v        = 1.0;
157             ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr);
158           }
159           ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
160         }
161         ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
162         ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
163 
164         if (llev++ == -1) {
165           PetscViewer viewer; char fname[32];
166           ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr);
167           PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
168           ierr = MatView(tMat, viewer);CHKERRQ(ierr);
169           ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
170         }
171         ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
172         ierr = MatDestroy(&tMat);CHKERRQ(ierr);
173       } /* create 'adj' */
174 
175       { /* partition: get newproc_idx */
176         char            prefix[256];
177         const char      *pcpre;
178         const PetscInt  *is_idx;
179         MatPartitioning mpart;
180         IS              proc_is;
181         PetscInt        targetPE;
182 
183         ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr);
184         ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr);
185         ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
186         ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
187         ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
188         ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
189         ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr);
190         ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr);
191         ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
192 
193         /* collect IS info */
194         ierr     = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr);
195         ierr     = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr);
196         targetPE = 1; /* bring to "front" of machine */
197         /*targetPE = size/new_size;*/ /* spread partitioning across machine */
198         for (kk = jj = 0 ; kk < nloc_old ; kk++) {
199           for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
200             newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
201           }
202         }
203         ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr);
204         ierr = ISDestroy(&proc_is);CHKERRQ(ierr);
205       }
206       ierr = MatDestroy(&adj);CHKERRQ(ierr);
207 
208       ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr);
209       ierr = PetscFree(newproc_idx);CHKERRQ(ierr);
210     } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */
211       PetscInt rfactor,targetPE;
212 
213       /* find factor */
214       if (new_size == 1) rfactor = size; /* easy */
215       else {
216         PetscReal best_fact = 0.;
217         jj = -1;
218         for (kk = 1 ; kk <= size ; kk++) {
219           if (!(size%kk)) { /* a candidate */
220             PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
221             if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
222             if (fact > best_fact) {
223               best_fact = fact; jj = kk;
224             }
225           }
226         }
227         if (jj != -1) rfactor = jj;
228         else rfactor = 1; /* does this happen .. a prime */
229       }
230       new_size = size/rfactor;
231 
232       if (new_size==nactive) {
233         *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
234         ierr        = PetscFree(counts);CHKERRQ(ierr);
235         ierr = PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);CHKERRQ(ierr);
236 #if defined PETSC_GAMG_USE_LOG
237         ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
238 #endif
239         PetscFunctionReturn(0);
240       }
241 
242       ierr = PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);CHKERRQ(ierr);
243       targetPE = rank/rfactor;
244       ierr     = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr);
245     } /* end simple 'is_eq_newproc' */
246 
247     /*
248      Create an index set from the is_eq_newproc index set to indicate the mapping TO
249      */
250     ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr);
251     is_eq_num_prim = is_eq_num;
252     /*
253       Determine how many equations/vertices are assigned to each processor
254      */
255     ierr        = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr);
256     ncrs_eq_new = counts[rank];
257     ierr        = ISDestroy(&is_eq_newproc);CHKERRQ(ierr);
258     ncrs_new = ncrs_eq_new/cr_bs; /* eqs */
259 
260     ierr = PetscFree(counts);CHKERRQ(ierr);
261 #if defined PETSC_GAMG_USE_LOG
262     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
263 #endif
264     /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */
265     {
266     Vec            src_crd, dest_crd;
267     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
268     VecScatter     vecscat;
269     PetscScalar    *array;
270     IS isscat;
271 
272     /* move data (for primal equations only) */
273     /* Create a vector to contain the newly ordered element information */
274     ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr);
275     ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr);
276     ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */
277     /*
278      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
279      a block size of ...).  Note, ISs are expanded into equation space by 'cr_bs'.
280      */
281     ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr);
282     ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
283     for (ii=0,jj=0; ii<ncrs; ii++) {
284       PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
285       for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
286     }
287     ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
288     ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr);
289     ierr = PetscFree(tidx);CHKERRQ(ierr);
290     /*
291      Create a vector to contain the original vertex information for each element
292      */
293     ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr);
294     for (jj=0; jj<ndata_cols; jj++) {
295       const PetscInt stride0=ncrs*pc_gamg->data_cell_rows;
296       for (ii=0; ii<ncrs; ii++) {
297         for (kk=0; kk<ndata_rows; kk++) {
298           PetscInt    ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
299           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
300           ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr);
301         }
302       }
303     }
304     ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr);
305     ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr);
306     /*
307       Scatter the element vertex information (still in the original vertex ordering)
308       to the correct processor
309     */
310     ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr);
311     ierr = ISDestroy(&isscat);CHKERRQ(ierr);
312     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
313     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
314     ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr);
315     ierr = VecDestroy(&src_crd);CHKERRQ(ierr);
316     /*
317       Put the element vertex data into a new allocation of the gdata->ele
318     */
319     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
320     ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr);
321 
322     pc_gamg->data_sz = node_data_sz*ncrs_new;
323     strideNew        = ncrs_new*ndata_rows;
324 
325     ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr);
326     for (jj=0; jj<ndata_cols; jj++) {
327       for (ii=0; ii<ncrs_new; ii++) {
328         for (kk=0; kk<ndata_rows; kk++) {
329           PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
330           pc_gamg->data[ix] = PetscRealPart(array[jx]);
331         }
332       }
333     }
334     ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr);
335     ierr = VecDestroy(&dest_crd);CHKERRQ(ierr);
336     }
337     /* move A and P (columns) with new layout */
338 #if defined PETSC_GAMG_USE_LOG
339     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
340 #endif
341 
342     /*
343       Invert for MatCreateSubMatrix
344     */
345     ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr);
346     ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */
347     ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr);
348     if (is_eq_num != is_eq_num_prim) {
349       ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */
350     }
351     if (Pcolumnperm) {
352       ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr);
353       *Pcolumnperm = new_eq_indices;
354     }
355     ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr);
356 #if defined PETSC_GAMG_USE_LOG
357     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
358     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
359 #endif
360     /* 'a_Amat_crs' output */
361     {
362       Mat mat;
363       ierr        = MatCreateSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr);
364       *a_Amat_crs = mat;
365     }
366     ierr = MatDestroy(&Cmat);CHKERRQ(ierr);
367 
368 #if defined PETSC_GAMG_USE_LOG
369     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
370 #endif
371     /* prolongator */
372     {
373       IS       findices;
374       PetscInt Istart,Iend;
375       Mat      Pnew;
376 
377       ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr);
378 #if defined PETSC_GAMG_USE_LOG
379       ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
380 #endif
381       ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr);
382       ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr);
383       ierr = MatCreateSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr);
384       ierr = ISDestroy(&findices);CHKERRQ(ierr);
385 
386 #if defined PETSC_GAMG_USE_LOG
387       ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
388 #endif
389       ierr = MatDestroy(a_P_inout);CHKERRQ(ierr);
390 
391       /* output - repartitioned */
392       *a_P_inout = Pnew;
393     }
394     ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr);
395 
396     *a_nactive_proc = new_size; /* output */
397   }
398   PetscFunctionReturn(0);
399 }
400 
401 /* -------------------------------------------------------------------------- */
402 /*
403    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
404                     by setting data structures and options.
405 
406    Input Parameter:
407 .  pc - the preconditioner context
408 
409 */
410 PetscErrorCode PCSetUp_GAMG(PC pc)
411 {
412   PetscErrorCode ierr;
413   PC_MG          *mg      = (PC_MG*)pc->data;
414   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
415   Mat            Pmat     = pc->pmat;
416   PetscInt       fine_level,level,level1,bs,M,N,qq,lidx,nASMBlocksArr[PETSC_GAMG_MAXLEVELS];
417   MPI_Comm       comm;
418   PetscMPIInt    rank,size,nactivepe;
419   Mat            Aarr[PETSC_GAMG_MAXLEVELS],Parr[PETSC_GAMG_MAXLEVELS];
420   IS             *ASMLocalIDsArr[PETSC_GAMG_MAXLEVELS];
421   PetscLogDouble nnz0=0.,nnztot=0.;
422   MatInfo        info;
423   PetscBool      is_last = PETSC_FALSE;
424 
425   PetscFunctionBegin;
426   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
427   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
428   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
429 
430   if (pc_gamg->setup_count++ > 0) {
431     if ((PetscBool)(!pc_gamg->reuse_prol)) {
432       /* reset everything */
433       ierr = PCReset_MG(pc);CHKERRQ(ierr);
434       pc->setupcalled = 0;
435     } else {
436       PC_MG_Levels **mglevels = mg->levels;
437       /* just do Galerkin grids */
438       Mat          B,dA,dB;
439 
440      if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
441       if (pc_gamg->Nlevels > 1) {
442         /* currently only handle case where mat and pmat are the same on coarser levels */
443         ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr);
444         /* (re)set to get dirty flag */
445         ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr);
446 
447         for (level=pc_gamg->Nlevels-2; level>=0; level--) {
448           /* the first time through the matrix structure has changed from repartitioning */
449           if (pc_gamg->setup_count==2) {
450             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr);
451             ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr);
452 
453             mglevels[level]->A = B;
454           } else {
455             ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr);
456             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr);
457           }
458           ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr);
459           dB   = B;
460         }
461       }
462 
463       ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
464       PetscFunctionReturn(0);
465     }
466   }
467 
468   if (!pc_gamg->data) {
469     if (pc_gamg->orig_data) {
470       ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
471       ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr);
472 
473       pc_gamg->data_sz        = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
474       pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
475       pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;
476 
477       ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr);
478       for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
479     } else {
480       if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
481       ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr);
482     }
483   }
484 
485   /* cache original data for reuse */
486   if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) {
487     ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr);
488     for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
489     pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
490     pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
491   }
492 
493   /* get basic dims */
494   ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
495   ierr = MatGetSize(Pmat, &M, &N);CHKERRQ(ierr);
496 
497   ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */
498   nnz0   = info.nz_used;
499   nnztot = info.nz_used;
500   ierr = PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,(int)(nnz0/(PetscReal)M+0.5),size);CHKERRQ(ierr);
501 
502   /* Get A_i and R_i */
503   for (level=0, Aarr[0]=Pmat, nactivepe = size; level < (pc_gamg->Nlevels-1) && (!level || M>pc_gamg->coarse_eq_limit); level++) {
504     pc_gamg->current_level = level;
505     level1 = level + 1;
506 #if defined PETSC_GAMG_USE_LOG
507     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
508 #if (defined GAMG_STAGES)
509     ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr);
510 #endif
511 #endif
512     { /* construct prolongator */
513       Mat              Gmat;
514       PetscCoarsenData *agg_lists;
515       Mat              Prol11;
516 
517       ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr);
518       ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr);
519       ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr);
520 
521       /* could have failed to create new level */
522       if (Prol11) {
523         /* get new block size of coarse matrices */
524         ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr);
525 
526         if (pc_gamg->ops->optprolongator) {
527           /* smooth */
528           ierr = pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);CHKERRQ(ierr);
529         }
530 
531         Parr[level1] = Prol11;
532       } else Parr[level1] = NULL; /* failed to coarsen */
533 
534       if (pc_gamg->use_aggs_in_asm) {
535         PetscInt bs;
536         ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr);
537         ierr = PetscCDGetASMBlocks(agg_lists, bs, Gmat, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr);
538       }
539 
540       ierr = MatDestroy(&Gmat);CHKERRQ(ierr);
541       ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr);
542     } /* construct prolongator scope */
543 #if defined PETSC_GAMG_USE_LOG
544     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
545 #endif
546     if (!level) Aarr[0] = Pmat; /* use Pmat for finest level setup */
547     if (!Parr[level1]) { /* failed to coarsen */
548       ierr =  PetscInfo1(pc,"Stop gridding, level %D\n",level);CHKERRQ(ierr);
549 #if defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES
550       ierr = PetscLogStagePop();CHKERRQ(ierr);
551 #endif
552       break;
553     }
554 #if defined PETSC_GAMG_USE_LOG
555     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
556 #endif
557     ierr = MatGetSize(Parr[level1], &M, &N);CHKERRQ(ierr); /* N is next M, a loop test variables */
558     if (is_last) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Is last ????????");
559     if (N <= pc_gamg->coarse_eq_limit) is_last = PETSC_TRUE;
560     ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs, &Parr[level1], &Aarr[level1], &nactivepe, NULL, is_last);CHKERRQ(ierr);
561 
562 #if defined PETSC_GAMG_USE_LOG
563     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
564 #endif
565     ierr = MatGetSize(Aarr[level1], &M, &N);CHKERRQ(ierr); /* M is loop test variables */
566     ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr);
567     nnztot += info.nz_used;
568     ierr = PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);CHKERRQ(ierr);
569 
570 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
571     ierr = PetscLogStagePop();CHKERRQ(ierr);
572 #endif
573     /* stop if one node or one proc -- could pull back for singular problems */
574     if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) {
575       ierr =  PetscInfo2(pc,"HARD stop of coarsening on level %D.  Grid too small: %D block nodes\n",level,M/bs);CHKERRQ(ierr);
576       level++;
577       break;
578     }
579   } /* levels */
580   ierr                  = PetscFree(pc_gamg->data);CHKERRQ(ierr);
581 
582   ierr = PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);CHKERRQ(ierr);
583   pc_gamg->Nlevels = level + 1;
584   fine_level       = level;
585   ierr             = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
586 
587   if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
588     /* set default smoothers & set operators */
589     for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) {
590       KSP smoother;
591       PC  subpc;
592 
593       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
594       ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
595 
596       ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
597       /* set ops */
598       ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr);
599       ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr);
600 
601       /* set defaults */
602       ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr);
603 
604       /* set blocks for ASM smoother that uses the 'aggregates' */
605       if (pc_gamg->use_aggs_in_asm) {
606         PetscInt sz;
607         IS       *iss;
608 
609         sz   = nASMBlocksArr[level];
610         iss   = ASMLocalIDsArr[level];
611         ierr = PCSetType(subpc, PCASM);CHKERRQ(ierr);
612         ierr = PCASMSetOverlap(subpc, 0);CHKERRQ(ierr);
613         ierr = PCASMSetType(subpc,PC_ASM_BASIC);CHKERRQ(ierr);
614         if (!sz) {
615           IS       is;
616           ierr = ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_COPY_VALUES, &is);CHKERRQ(ierr);
617           ierr = PCASMSetLocalSubdomains(subpc, 1, NULL, &is);CHKERRQ(ierr);
618           ierr = ISDestroy(&is);CHKERRQ(ierr);
619         } else {
620           PetscInt kk;
621           ierr = PCASMSetLocalSubdomains(subpc, sz, NULL, iss);CHKERRQ(ierr);
622           for (kk=0; kk<sz; kk++) {
623             ierr = ISDestroy(&iss[kk]);CHKERRQ(ierr);
624           }
625           ierr = PetscFree(iss);CHKERRQ(ierr);
626         }
627         ASMLocalIDsArr[level] = NULL;
628         nASMBlocksArr[level]  = 0;
629       } else {
630         ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr);
631       }
632     }
633     {
634       /* coarse grid */
635       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
636       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
637       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
638       ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr);
639       if (!pc_gamg->use_parallel_coarse_grid_solver) {
640         ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
641         ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
642         ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr);
643         ierr = PCSetUp(subpc);CHKERRQ(ierr);
644         ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr);
645         if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
646         ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr);
647         ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr);
648         ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr);
649         ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr);
650         ierr = KSPSetType(k2[0], KSPPREONLY);CHKERRQ(ierr);
651         /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
652          * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
653          * view every subdomain as though they were different. */
654         ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
655       }
656     }
657 
658     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
659     ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr);
660     ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr);
661     ierr = PetscOptionsEnd();CHKERRQ(ierr);
662     ierr = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr);
663 
664     /* clean up */
665     for (level=1; level<pc_gamg->Nlevels; level++) {
666       ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr);
667       ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr);
668     }
669     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
670   } else {
671     KSP smoother;
672     ierr = PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");CHKERRQ(ierr);
673     ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr);
674     ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr);
675     ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr);
676     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
677   }
678   PetscFunctionReturn(0);
679 }
680 
681 /* ------------------------------------------------------------------------- */
682 /*
683  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
684    that was created with PCCreate_GAMG().
685 
686    Input Parameter:
687 .  pc - the preconditioner context
688 
689    Application Interface Routine: PCDestroy()
690 */
691 PetscErrorCode PCDestroy_GAMG(PC pc)
692 {
693   PetscErrorCode ierr;
694   PC_MG          *mg     = (PC_MG*)pc->data;
695   PC_GAMG        *pc_gamg= (PC_GAMG*)mg->innerctx;
696 
697   PetscFunctionBegin;
698   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
699   if (pc_gamg->ops->destroy) {
700     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
701   }
702   ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr);
703   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
704   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
705   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
706   PetscFunctionReturn(0);
707 }
708 
709 /*@
710    PCGAMGSetProcEqLim - Set number of equations to aim for per process on the coarse grids via processor reduction.
711 
712    Logically Collective on PC
713 
714    Input Parameters:
715 +  pc - the preconditioner context
716 -  n - the number of equations
717 
718 
719    Options Database Key:
720 .  -pc_gamg_process_eq_limit <limit>
721 
722    Notes: GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit> equations on each process
723           that has degrees of freedom
724 
725    Level: intermediate
726 
727    Concepts: Unstructured multigrid preconditioner
728 
729 .seealso: PCGAMGSetCoarseEqLim()
730 @*/
731 PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
732 {
733   PetscErrorCode ierr;
734 
735   PetscFunctionBegin;
736   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
737   ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
738   PetscFunctionReturn(0);
739 }
740 
741 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
742 {
743   PC_MG   *mg      = (PC_MG*)pc->data;
744   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
745 
746   PetscFunctionBegin;
747   if (n>0) pc_gamg->min_eq_proc = n;
748   PetscFunctionReturn(0);
749 }
750 
751 /*@
752    PCGAMGSetCoarseEqLim - Set maximum number of equations on coarsest grid.
753 
754  Collective on PC
755 
756    Input Parameters:
757 +  pc - the preconditioner context
758 -  n - maximum number of equations to aim for
759 
760    Options Database Key:
761 .  -pc_gamg_coarse_eq_limit <limit>
762 
763    Level: intermediate
764 
765    Concepts: Unstructured multigrid preconditioner
766 
767 .seealso: PCGAMGSetProcEqLim()
768 @*/
769 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
770 {
771   PetscErrorCode ierr;
772 
773   PetscFunctionBegin;
774   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
775   ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
776   PetscFunctionReturn(0);
777 }
778 
779 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
780 {
781   PC_MG   *mg      = (PC_MG*)pc->data;
782   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
783 
784   PetscFunctionBegin;
785   if (n>0) pc_gamg->coarse_eq_limit = n;
786   PetscFunctionReturn(0);
787 }
788 
789 /*@
790    PCGAMGSetRepartition - Repartition the degrees of freedom across the processors on the coarser grids
791 
792    Collective on PC
793 
794    Input Parameters:
795 +  pc - the preconditioner context
796 -  n - PETSC_TRUE or PETSC_FALSE
797 
798    Options Database Key:
799 .  -pc_gamg_repartition <true,false>
800 
801    Notes: this will generally improve the loading balancing of the work on each level
802 
803    Level: intermediate
804 
805    Concepts: Unstructured multigrid preconditioner
806 
807 .seealso: ()
808 @*/
809 PetscErrorCode PCGAMGSetRepartition(PC pc, PetscBool n)
810 {
811   PetscErrorCode ierr;
812 
813   PetscFunctionBegin;
814   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
815   ierr = PetscTryMethod(pc,"PCGAMGSetRepartition_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
816   PetscFunctionReturn(0);
817 }
818 
819 static PetscErrorCode PCGAMGSetRepartition_GAMG(PC pc, PetscBool n)
820 {
821   PC_MG   *mg      = (PC_MG*)pc->data;
822   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
823 
824   PetscFunctionBegin;
825   pc_gamg->repart = n;
826   PetscFunctionReturn(0);
827 }
828 
829 /*@
830    PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding algebraic multigrid preconditioner
831 
832    Collective on PC
833 
834    Input Parameters:
835 +  pc - the preconditioner context
836 -  n - PETSC_TRUE or PETSC_FALSE
837 
838    Options Database Key:
839 .  -pc_gamg_reuse_interpolation <true,false>
840 
841    Level: intermediate
842 
843    Notes: this may negatively affect the convergence rate of the method on new matrices if the matrix entries change a great deal, but allows
844           rebuilding the preconditioner quicker.
845 
846    Concepts: Unstructured multigrid preconditioner
847 
848 .seealso: ()
849 @*/
850 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n)
851 {
852   PetscErrorCode ierr;
853 
854   PetscFunctionBegin;
855   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
856   ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
857   PetscFunctionReturn(0);
858 }
859 
860 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n)
861 {
862   PC_MG   *mg      = (PC_MG*)pc->data;
863   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
864 
865   PetscFunctionBegin;
866   pc_gamg->reuse_prol = n;
867   PetscFunctionReturn(0);
868 }
869 
870 /*@
871    PCGAMGASMSetUseAggs - Have the PCGAMG smoother on each level use the aggregates defined by the coarsening process as the subdomains for the additive Schwarz preconditioner.
872 
873    Collective on PC
874 
875    Input Parameters:
876 +  pc - the preconditioner context
877 -  flg - PETSC_TRUE to use aggregates, PETSC_FALSE to not
878 
879    Options Database Key:
880 .  -pc_gamg_asm_use_agg
881 
882    Level: intermediate
883 
884    Concepts: Unstructured multigrid preconditioner
885 
886 .seealso: ()
887 @*/
888 PetscErrorCode PCGAMGASMSetUseAggs(PC pc, PetscBool flg)
889 {
890   PetscErrorCode ierr;
891 
892   PetscFunctionBegin;
893   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
894   ierr = PetscTryMethod(pc,"PCGAMGASMSetUseAggs_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr);
895   PetscFunctionReturn(0);
896 }
897 
898 static PetscErrorCode PCGAMGASMSetUseAggs_GAMG(PC pc, PetscBool flg)
899 {
900   PC_MG   *mg      = (PC_MG*)pc->data;
901   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
902 
903   PetscFunctionBegin;
904   pc_gamg->use_aggs_in_asm = flg;
905   PetscFunctionReturn(0);
906 }
907 
908 /*@
909    PCGAMGSetUseParallelCoarseGridSolve - allow a parallel coarse grid solver
910 
911    Collective on PC
912 
913    Input Parameters:
914 +  pc - the preconditioner context
915 -  flg - PETSC_TRUE to not force coarse grid onto one processor
916 
917    Options Database Key:
918 .  -pc_gamg_use_parallel_coarse_grid_solver
919 
920    Level: intermediate
921 
922    Concepts: Unstructured multigrid preconditioner
923 
924 .seealso: ()
925 @*/
926 PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve(PC pc, PetscBool flg)
927 {
928   PetscErrorCode ierr;
929 
930   PetscFunctionBegin;
931   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
932   ierr = PetscTryMethod(pc,"PCGAMGSetUseParallelCoarseGridSolve_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr);
933   PetscFunctionReturn(0);
934 }
935 
936 static PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve_GAMG(PC pc, PetscBool flg)
937 {
938   PC_MG   *mg      = (PC_MG*)pc->data;
939   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
940 
941   PetscFunctionBegin;
942   pc_gamg->use_parallel_coarse_grid_solver = flg;
943   PetscFunctionReturn(0);
944 }
945 
946 /*@
947    PCGAMGSetNlevels -  Sets the maximum number of levels PCGAMG will use
948 
949    Not collective on PC
950 
951    Input Parameters:
952 +  pc - the preconditioner
953 -  n - the maximum number of levels to use
954 
955    Options Database Key:
956 .  -pc_mg_levels
957 
958    Level: intermediate
959 
960    Concepts: Unstructured multigrid preconditioner
961 
962 .seealso: ()
963 @*/
964 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
965 {
966   PetscErrorCode ierr;
967 
968   PetscFunctionBegin;
969   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
970   ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
971   PetscFunctionReturn(0);
972 }
973 
974 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
975 {
976   PC_MG   *mg      = (PC_MG*)pc->data;
977   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
978 
979   PetscFunctionBegin;
980   pc_gamg->Nlevels = n;
981   PetscFunctionReturn(0);
982 }
983 
984 /*@
985    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
986 
987    Not collective on PC
988 
989    Input Parameters:
990 +  pc - the preconditioner context
991 -  threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph
992 
993    Options Database Key:
994 .  -pc_gamg_threshold <threshold>
995 
996    Notes: Before aggregating the graph GAMG will remove small values from the graph thus reducing the coupling in the graph and a different
997     (perhaps better) coarser set of points.
998 
999    Level: intermediate
1000 
1001    Concepts: Unstructured multigrid preconditioner
1002 
1003 .seealso: PCGAMGFilterGraph()
1004 @*/
1005 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal v[], PetscInt n)
1006 {
1007   PetscErrorCode ierr;
1008 
1009   PetscFunctionBegin;
1010   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1011   ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal[],PetscInt),(pc,v,n));CHKERRQ(ierr);
1012   PetscFunctionReturn(0);
1013 }
1014 
1015 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal v[], PetscInt n)
1016 {
1017   PC_MG   *mg      = (PC_MG*)pc->data;
1018   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1019   PetscInt i;
1020   PetscFunctionBegin;
1021   for (i=0;i<n;i++) pc_gamg->threshold[i] = v[i];
1022   do {pc_gamg->threshold[i] = pc_gamg->threshold[i-1]*pc_gamg->threshold_scale;} while (++i<PETSC_GAMG_MAXLEVELS);
1023   PetscFunctionReturn(0);
1024 }
1025 
1026 /*@
1027    PCGAMGSetThresholdScale - Relative threshold reduction at each level
1028 
1029    Not collective on PC
1030 
1031    Input Parameters:
1032 +  pc - the preconditioner context
1033 -  scale - the threshold value reduction, ussually < 1.0
1034 
1035    Options Database Key:
1036 .  -pc_gamg_threshold_scale <v>
1037 
1038    Level: advanced
1039 
1040    Concepts: Unstructured multigrid preconditioner
1041 
1042 .seealso: ()
1043 @*/
1044 PetscErrorCode PCGAMGSetThresholdScale(PC pc, PetscReal v)
1045 {
1046   PetscErrorCode ierr;
1047 
1048   PetscFunctionBegin;
1049   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1050   ierr = PetscTryMethod(pc,"PCGAMGSetThresholdScale_C",(PC,PetscReal),(pc,v));CHKERRQ(ierr);
1051   PetscFunctionReturn(0);
1052 }
1053 
1054 static PetscErrorCode PCGAMGSetThresholdScale_GAMG(PC pc, PetscReal v)
1055 {
1056   PC_MG   *mg      = (PC_MG*)pc->data;
1057   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1058   PetscFunctionBegin;
1059   pc_gamg->threshold_scale = v;
1060   PetscFunctionReturn(0);
1061 }
1062 
1063 /*@C
1064    PCGAMGSetType - Set solution method
1065 
1066    Collective on PC
1067 
1068    Input Parameters:
1069 +  pc - the preconditioner context
1070 -  type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL
1071 
1072    Options Database Key:
1073 .  -pc_gamg_type <agg,geo,classical> - type of algebraic multigrid to apply
1074 
1075    Level: intermediate
1076 
1077    Concepts: Unstructured multigrid preconditioner
1078 
1079 .seealso: PCGAMGGetType(), PCGAMG, PCGAMGType
1080 @*/
1081 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1082 {
1083   PetscErrorCode ierr;
1084 
1085   PetscFunctionBegin;
1086   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1087   ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr);
1088   PetscFunctionReturn(0);
1089 }
1090 
1091 /*@C
1092    PCGAMGGetType - Get solution method
1093 
1094    Collective on PC
1095 
1096    Input Parameter:
1097 .  pc - the preconditioner context
1098 
1099    Output Parameter:
1100 .  type - the type of algorithm used
1101 
1102    Level: intermediate
1103 
1104    Concepts: Unstructured multigrid preconditioner
1105 
1106 .seealso: PCGAMGSetType(), PCGAMGType
1107 @*/
1108 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type)
1109 {
1110   PetscErrorCode ierr;
1111 
1112   PetscFunctionBegin;
1113   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1114   ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr);
1115   PetscFunctionReturn(0);
1116 }
1117 
1118 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type)
1119 {
1120   PC_MG          *mg      = (PC_MG*)pc->data;
1121   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1122 
1123   PetscFunctionBegin;
1124   *type = pc_gamg->type;
1125   PetscFunctionReturn(0);
1126 }
1127 
1128 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1129 {
1130   PetscErrorCode ierr,(*r)(PC);
1131   PC_MG          *mg      = (PC_MG*)pc->data;
1132   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1133 
1134   PetscFunctionBegin;
1135   pc_gamg->type = type;
1136   ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr);
1137   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1138   if (pc_gamg->ops->destroy) {
1139     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
1140     ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr);
1141     pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1142     /* cleaning up common data in pc_gamg - this should disapear someday */
1143     pc_gamg->data_cell_cols = 0;
1144     pc_gamg->data_cell_rows = 0;
1145     pc_gamg->orig_data_cell_cols = 0;
1146     pc_gamg->orig_data_cell_rows = 0;
1147     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
1148     pc_gamg->data_sz = 0;
1149   }
1150   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
1151   ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr);
1152   ierr = (*r)(pc);CHKERRQ(ierr);
1153   PetscFunctionReturn(0);
1154 }
1155 
1156 static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer)
1157 {
1158   PetscErrorCode ierr,i;
1159   PC_MG          *mg      = (PC_MG*)pc->data;
1160   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1161 
1162   PetscFunctionBegin;
1163   ierr = PetscViewerASCIIPrintf(viewer,"    GAMG specific options\n");CHKERRQ(ierr);
1164   ierr = PetscViewerASCIIPrintf(viewer,"      Threshold for dropping small values in graph on each level =");CHKERRQ(ierr);
1165   for (i=0;i<pc_gamg->current_level;i++) {
1166     ierr = PetscViewerASCIIPrintf(viewer," %g",(double)pc_gamg->threshold[i]);CHKERRQ(ierr);
1167   }
1168   ierr = PetscViewerASCIIPrintf(viewer,"\n");CHKERRQ(ierr);
1169   ierr = PetscViewerASCIIPrintf(viewer,"      Threshold scaling factor for each level not specified = %g\n",(double)pc_gamg->threshold_scale);CHKERRQ(ierr);
1170   if (pc_gamg->use_aggs_in_asm) {
1171     ierr = PetscViewerASCIIPrintf(viewer,"      Using aggregates from coarsening process to define subdomains for PCASM\n");CHKERRQ(ierr);
1172   }
1173   if (pc_gamg->use_parallel_coarse_grid_solver) {
1174     ierr = PetscViewerASCIIPrintf(viewer,"      Using parallel coarse grid solver (all coarse grid equations not put on one process)\n");CHKERRQ(ierr);
1175   }
1176   if (pc_gamg->ops->view) {
1177     ierr = (*pc_gamg->ops->view)(pc,viewer);CHKERRQ(ierr);
1178   }
1179   PetscFunctionReturn(0);
1180 }
1181 
1182 PetscErrorCode PCSetFromOptions_GAMG(PetscOptionItems *PetscOptionsObject,PC pc)
1183 {
1184   PetscErrorCode ierr;
1185   PC_MG          *mg      = (PC_MG*)pc->data;
1186   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1187   PetscBool      flag;
1188   MPI_Comm       comm;
1189   char           prefix[256];
1190   PetscInt       i,n;
1191   const char     *pcpre;
1192 
1193   PetscFunctionBegin;
1194   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1195   ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr);
1196   {
1197     char tname[256];
1198     ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr);
1199     if (flag) {
1200       ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr);
1201     }
1202     ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGSetRepartition",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr);
1203     ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr);
1204     ierr = PetscOptionsBool("-pc_gamg_asm_use_agg","Use aggregation agragates for ASM smoother","PCGAMGASMSetUseAggs",pc_gamg->use_aggs_in_asm,&pc_gamg->use_aggs_in_asm,NULL);CHKERRQ(ierr);
1205     ierr = PetscOptionsBool("-pc_gamg_use_parallel_coarse_grid_solver","Use parallel coarse grid solver (otherwise put last grid on one process)","PCGAMGSetUseParallelCoarseGridSolve",pc_gamg->use_parallel_coarse_grid_solver,&pc_gamg->use_parallel_coarse_grid_solver,NULL);CHKERRQ(ierr);
1206     ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr);
1207     ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr);
1208     ierr = PetscOptionsReal("-pc_gamg_threshold_scale","scaleing of threshold for each level not specified","PCGAMGSetThresholdScale",pc_gamg->threshold_scale,&pc_gamg->threshold_scale,NULL);CHKERRQ(ierr);
1209     n = PETSC_GAMG_MAXLEVELS;
1210     ierr = PetscOptionsRealArray("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&n,&flag);CHKERRQ(ierr);
1211     if (!flag || n < PETSC_GAMG_MAXLEVELS) {
1212       if (!flag) n = 1;
1213       i = n;
1214       do {pc_gamg->threshold[i] = pc_gamg->threshold[i-1]*pc_gamg->threshold_scale;} while (++i<PETSC_GAMG_MAXLEVELS);
1215     }
1216     ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
1217 
1218     /* set options for subtype */
1219     if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);}
1220   }
1221   ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
1222   ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
1223   ierr = PetscOptionsTail();CHKERRQ(ierr);
1224   PetscFunctionReturn(0);
1225 }
1226 
1227 /* -------------------------------------------------------------------------- */
1228 /*MC
1229      PCGAMG - Geometric algebraic multigrid (AMG) preconditioner
1230 
1231    Options Database Keys:
1232 +   -pc_gamg_type <type> - one of agg, geo, or classical
1233 .   -pc_gamg_repartition  <true,default=false> - repartition the degrees of freedom accross the coarse grids as they are determined
1234 .   -pc_gamg_reuse_interpolation <true,default=false> - when rebuilding the algebraic multigrid preconditioner reuse the previously computed interpolations
1235 .   -pc_gamg_asm_use_agg <true,default=false> - use the aggregates from the coasening process to defined the subdomains on each level for the PCASM smoother
1236 .   -pc_gamg_process_eq_limit <limit, default=50> - GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit>
1237                                         equations on each process that has degrees of freedom
1238 .   -pc_gamg_coarse_eq_limit <limit, default=50> - Set maximum number of equations on coarsest grid to aim for.
1239 -   -pc_gamg_threshold[] <thresh,default=0> - Before aggregating the graph GAMG will remove small values from the graph on each level
1240 -   -pc_gamg_threshold_scale <scale,default=1> - Scaling of threshold on each coarser grid if not specified
1241 
1242    Options Database Keys for default Aggregation:
1243 +  -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation
1244 .  -pc_gamg_sym_graph <true,default=false> - symmetrize the graph before computing the aggregation
1245 -  -pc_gamg_square_graph <n,default=1> - number of levels to square the graph before aggregating it
1246 
1247    Multigrid options(inherited):
1248 +  -pc_mg_cycles <v>: v or w (PCMGSetCycleType())
1249 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1250 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1251 .  -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade
1252 -  -pc_mg_levels <levels> - Number of levels of multigrid to use.
1253 
1254 
1255   Notes: In order to obtain good performance for PCGAMG for vector valued problems you must
1256 $       Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point
1257 $       Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator
1258 $       See the Users Manual Chapter 4 for more details
1259 
1260   Level: intermediate
1261 
1262   Concepts: algebraic multigrid
1263 
1264 .seealso:  PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(),
1265            PCGAMGSetCoarseEqLim(), PCGAMGSetRepartition(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGASMSetUseAggs(), PCGAMGSetUseParallelCoarseGridSolve(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType(), PCGAMGSetReuseInterpolation()
1266 M*/
1267 
1268 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1269 {
1270   PetscErrorCode ierr,i;
1271   PC_GAMG        *pc_gamg;
1272   PC_MG          *mg;
1273 
1274   PetscFunctionBegin;
1275    /* register AMG type */
1276   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1277 
1278   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1279   ierr = PCSetType(pc, PCMG);CHKERRQ(ierr);
1280   ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr);
1281 
1282   /* create a supporting struct and attach it to pc */
1283   ierr         = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr);
1284   ierr         = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr);
1285   mg           = (PC_MG*)pc->data;
1286   mg->innerctx = pc_gamg;
1287 
1288   ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr);
1289 
1290   pc_gamg->setup_count = 0;
1291   /* these should be in subctx but repartitioning needs simple arrays */
1292   pc_gamg->data_sz = 0;
1293   pc_gamg->data    = 0;
1294 
1295   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1296   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1297   pc->ops->setup          = PCSetUp_GAMG;
1298   pc->ops->reset          = PCReset_GAMG;
1299   pc->ops->destroy        = PCDestroy_GAMG;
1300   mg->view                = PCView_GAMG;
1301 
1302   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr);
1303   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr);
1304   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartition_C",PCGAMGSetRepartition_GAMG);CHKERRQ(ierr);
1305   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr);
1306   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGASMSetUseAggs_C",PCGAMGASMSetUseAggs_GAMG);CHKERRQ(ierr);
1307   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseParallelCoarseGridSolve_C",PCGAMGSetUseParallelCoarseGridSolve_GAMG);CHKERRQ(ierr);
1308   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr);
1309   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThresholdScale_C",PCGAMGSetThresholdScale_GAMG);CHKERRQ(ierr);
1310   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr);
1311   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr);
1312   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr);
1313   pc_gamg->repart           = PETSC_FALSE;
1314   pc_gamg->reuse_prol       = PETSC_FALSE;
1315   pc_gamg->use_aggs_in_asm  = PETSC_FALSE;
1316   pc_gamg->use_parallel_coarse_grid_solver = PETSC_FALSE;
1317   pc_gamg->min_eq_proc      = 50;
1318   pc_gamg->coarse_eq_limit  = 50;
1319   for (i=0;i<PETSC_GAMG_MAXLEVELS;i++) pc_gamg->threshold[i] = 0.;
1320   pc_gamg->threshold_scale = 1.;
1321   pc_gamg->Nlevels          = PETSC_GAMG_MAXLEVELS;
1322   pc_gamg->current_level    = 0; /* don't need to init really */
1323   pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1324 
1325   /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */
1326   ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr);
1327   PetscFunctionReturn(0);
1328 }
1329 
1330 /*@C
1331  PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1332     from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG()
1333     when using static libraries.
1334 
1335  Level: developer
1336 
1337  .keywords: PC, PCGAMG, initialize, package
1338  .seealso: PetscInitialize()
1339 @*/
1340 PetscErrorCode PCGAMGInitializePackage(void)
1341 {
1342   PetscErrorCode ierr;
1343 
1344   PetscFunctionBegin;
1345   if (PCGAMGPackageInitialized) PetscFunctionReturn(0);
1346   PCGAMGPackageInitialized = PETSC_TRUE;
1347   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr);
1348   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr);
1349   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr);
1350   ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr);
1351 
1352   /* general events */
1353   ierr = PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);CHKERRQ(ierr);
1354   ierr = PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);CHKERRQ(ierr);
1355   ierr = PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr);
1356   ierr = PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr);
1357   ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr);
1358   ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr);
1359   ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);CHKERRQ(ierr);
1360 
1361 #if defined PETSC_GAMG_USE_LOG
1362   ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr);
1363   ierr = PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr);
1364   /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1365   /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1366   /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1367   ierr = PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr);
1368   ierr = PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr);
1369   ierr = PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr);
1370   ierr = PetscLogEventRegister("    search-set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr);
1371   ierr = PetscLogEventRegister("  SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr);
1372   ierr = PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr);
1373   ierr = PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr);
1374   ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr);
1375   ierr = PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr);
1376   ierr = PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr);
1377   ierr = PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr);
1378   ierr = PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr);
1379 
1380   /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1381   /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1382   /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1383   /* create timer stages */
1384 #if defined GAMG_STAGES
1385   {
1386     char     str[32];
1387     PetscInt lidx;
1388     sprintf(str,"MG Level %d (finest)",0);
1389     ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr);
1390     for (lidx=1; lidx<9; lidx++) {
1391       sprintf(str,"MG Level %d",lidx);
1392       ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr);
1393     }
1394   }
1395 #endif
1396 #endif
1397   PetscFunctionReturn(0);
1398 }
1399 
1400 /*@C
1401  PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is
1402     called from PetscFinalize() automatically.
1403 
1404  Level: developer
1405 
1406  .keywords: Petsc, destroy, package
1407  .seealso: PetscFinalize()
1408 @*/
1409 PetscErrorCode PCGAMGFinalizePackage(void)
1410 {
1411   PetscErrorCode ierr;
1412 
1413   PetscFunctionBegin;
1414   PCGAMGPackageInitialized = PETSC_FALSE;
1415   ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr);
1416   PetscFunctionReturn(0);
1417 }
1418 
1419 /*@C
1420  PCGAMGRegister - Register a PCGAMG implementation.
1421 
1422  Input Parameters:
1423  + type - string that will be used as the name of the GAMG type.
1424  - create - function for creating the gamg context.
1425 
1426   Level: advanced
1427 
1428  .seealso: PCGAMGType, PCGAMG, PCGAMGSetType()
1429 @*/
1430 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC))
1431 {
1432   PetscErrorCode ierr;
1433 
1434   PetscFunctionBegin;
1435   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1436   ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr);
1437   PetscFunctionReturn(0);
1438 }
1439 
1440