xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision c688d0420b4e513ff34944d1e1ad7d4e50aafa8d)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 #include <petsc/private/matimpl.h>
5 #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */
7 
8 #if defined PETSC_GAMG_USE_LOG
9 PetscLogEvent petsc_gamg_setup_events[NUM_SET];
10 #endif
11 
12 #if defined PETSC_USE_LOG
13 PetscLogEvent PC_GAMGGraph_AGG;
14 PetscLogEvent PC_GAMGGraph_GEO;
15 PetscLogEvent PC_GAMGCoarsen_AGG;
16 PetscLogEvent PC_GAMGCoarsen_GEO;
17 PetscLogEvent PC_GAMGProlongator_AGG;
18 PetscLogEvent PC_GAMGProlongator_GEO;
19 PetscLogEvent PC_GAMGOptProlongator_AGG;
20 #endif
21 
22 #define GAMG_MAXLEVELS 30
23 
24 /* #define GAMG_STAGES */
25 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
26 static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
27 #endif
28 
29 static PetscFunctionList GAMGList = 0;
30 static PetscBool PCGAMGPackageInitialized;
31 
32 /* ----------------------------------------------------------------------------- */
33 #undef __FUNCT__
34 #define __FUNCT__ "PCReset_GAMG"
35 PetscErrorCode PCReset_GAMG(PC pc)
36 {
37   PetscErrorCode ierr;
38   PC_MG          *mg      = (PC_MG*)pc->data;
39   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
40 
41   PetscFunctionBegin;
42   if (pc_gamg->data) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"This should not happen, cleaned up in SetUp\n");
43   pc_gamg->data_sz = 0;
44   ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr);
45   PetscFunctionReturn(0);
46 }
47 
48 /* -------------------------------------------------------------------------- */
49 /*
50    PCGAMGCreateLevel_GAMG: create coarse op with RAP.  repartition and/or reduce number
51      of active processors.
52 
53    Input Parameter:
54    . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
55           'pc_gamg->data_sz' are changed via repartitioning/reduction.
56    . Amat_fine - matrix on this fine (k) level
57    . cr_bs - coarse block size
58    In/Output Parameter:
59    . a_P_inout - prolongation operator to the next level (k-->k-1)
60    . a_nactive_proc - number of active procs
61    Output Parameter:
62    . a_Amat_crs - coarse matrix that is created (k-1)
63 */
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "PCGAMGCreateLevel_GAMG"
67 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,IS * Pcolumnperm, PetscBool is_last)
68 {
69   PetscErrorCode  ierr;
70   PC_MG           *mg         = (PC_MG*)pc->data;
71   PC_GAMG         *pc_gamg    = (PC_GAMG*)mg->innerctx;
72   Mat             Cmat,Pold=*a_P_inout;
73   MPI_Comm        comm;
74   PetscMPIInt     rank,size,new_size,nactive=*a_nactive_proc;
75   PetscInt        ncrs_eq,ncrs,f_bs;
76 
77   PetscFunctionBegin;
78   ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr);
79   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
80   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
81   ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr);
82   ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr);
83 
84   /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/
85   ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr);
86   if (pc_gamg->data_cell_rows>0) {
87     ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
88   } else {
89     PetscInt  bs;
90     ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr);
91     ncrs = ncrs_eq/bs;
92   }
93 
94   /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
95   if (is_last && !pc_gamg->use_parallel_coarse_grid_solver) new_size = 1;
96   else {
97     PetscInt ncrs_eq_glob;
98     ierr     = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr);
99     new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
100     if (!new_size) new_size = 1; /* not likely, posible? */
101     else if (new_size >= nactive) new_size = nactive; /* no change, rare */
102   }
103 
104   if (Pcolumnperm) *Pcolumnperm = NULL;
105 
106   if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
107   else {
108     PetscInt       *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old;
109     IS             is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices;
110 
111     nloc_old = ncrs_eq/cr_bs;
112     if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
113 #if defined PETSC_GAMG_USE_LOG
114     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
115 #endif
116     /* make 'is_eq_newproc' */
117     ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
118     if (pc_gamg->repart) {
119       /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
120       Mat adj;
121 
122      ierr = PetscInfo3(pc,"Repartition: size (active): %D --> %D, %D local equations\n",*a_nactive_proc,new_size,ncrs_eq);CHKERRQ(ierr);
123 
124       /* get 'adj' */
125       if (cr_bs == 1) {
126         ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
127       } else {
128         /* make a scalar matrix to partition (no Stokes here) */
129         Mat               tMat;
130         PetscInt          Istart_crs,Iend_crs,ncols,jj,Ii;
131         const PetscScalar *vals;
132         const PetscInt    *idx;
133         PetscInt          *d_nnz, *o_nnz, M, N;
134         static PetscInt   llev = 0;
135         MatType           mtype;
136 
137         ierr = PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);CHKERRQ(ierr);
138         ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr);
139         ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr);
140         for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
141           ierr      = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
142           d_nnz[jj] = ncols/cr_bs;
143           o_nnz[jj] = ncols/cr_bs;
144           ierr      = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
145           if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs;
146           if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs;
147         }
148 
149         ierr = MatGetType(Amat_fine,&mtype);CHKERRQ(ierr);
150         ierr = MatCreate(comm, &tMat);CHKERRQ(ierr);
151         ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr);
152         ierr = MatSetType(tMat,mtype);CHKERRQ(ierr);
153         ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr);
154         ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
155         ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
156 
157         for (ii = Istart_crs; ii < Iend_crs; ii++) {
158           PetscInt dest_row = ii/cr_bs;
159           ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
160           for (jj = 0; jj < ncols; jj++) {
161             PetscInt    dest_col = idx[jj]/cr_bs;
162             PetscScalar v        = 1.0;
163             ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr);
164           }
165           ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
166         }
167         ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
168         ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
169 
170         if (llev++ == -1) {
171           PetscViewer viewer; char fname[32];
172           ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr);
173           PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
174           ierr = MatView(tMat, viewer);CHKERRQ(ierr);
175           ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
176         }
177         ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
178         ierr = MatDestroy(&tMat);CHKERRQ(ierr);
179       } /* create 'adj' */
180 
181       { /* partition: get newproc_idx */
182         char            prefix[256];
183         const char      *pcpre;
184         const PetscInt  *is_idx;
185         MatPartitioning mpart;
186         IS              proc_is;
187         PetscInt        targetPE;
188 
189         ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr);
190         ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr);
191         ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
192         ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
193         ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
194         ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
195         ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr);
196         ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr);
197         ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
198 
199         /* collect IS info */
200         ierr     = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr);
201         ierr     = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr);
202         targetPE = 1; /* bring to "front" of machine */
203         /*targetPE = size/new_size;*/ /* spread partitioning across machine */
204         for (kk = jj = 0 ; kk < nloc_old ; kk++) {
205           for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
206             newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
207           }
208         }
209         ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr);
210         ierr = ISDestroy(&proc_is);CHKERRQ(ierr);
211       }
212       ierr = MatDestroy(&adj);CHKERRQ(ierr);
213 
214       ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr);
215       ierr = PetscFree(newproc_idx);CHKERRQ(ierr);
216     } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */
217       PetscInt rfactor,targetPE;
218 
219       /* find factor */
220       if (new_size == 1) rfactor = size; /* easy */
221       else {
222         PetscReal best_fact = 0.;
223         jj = -1;
224         for (kk = 1 ; kk <= size ; kk++) {
225           if (!(size%kk)) { /* a candidate */
226             PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
227             if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
228             if (fact > best_fact) {
229               best_fact = fact; jj = kk;
230             }
231           }
232         }
233         if (jj != -1) rfactor = jj;
234         else rfactor = 1; /* does this happen .. a prime */
235       }
236       new_size = size/rfactor;
237 
238       if (new_size==nactive) {
239         *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
240         ierr        = PetscFree(counts);CHKERRQ(ierr);
241         ierr = PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);CHKERRQ(ierr);
242 #if defined PETSC_GAMG_USE_LOG
243         ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
244 #endif
245         PetscFunctionReturn(0);
246       }
247 
248       ierr = PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);CHKERRQ(ierr);
249       targetPE = rank/rfactor;
250       ierr     = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr);
251     } /* end simple 'is_eq_newproc' */
252 
253     /*
254      Create an index set from the is_eq_newproc index set to indicate the mapping TO
255      */
256     ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr);
257     is_eq_num_prim = is_eq_num;
258     /*
259       Determine how many equations/vertices are assigned to each processor
260      */
261     ierr        = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr);
262     ncrs_eq_new = counts[rank];
263     ierr        = ISDestroy(&is_eq_newproc);CHKERRQ(ierr);
264     ncrs_new = ncrs_eq_new/cr_bs; /* eqs */
265 
266     ierr = PetscFree(counts);CHKERRQ(ierr);
267 #if defined PETSC_GAMG_USE_LOG
268     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
269 #endif
270     /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */
271     {
272     Vec            src_crd, dest_crd;
273     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
274     VecScatter     vecscat;
275     PetscScalar    *array;
276     IS isscat;
277 
278     /* move data (for primal equations only) */
279     /* Create a vector to contain the newly ordered element information */
280     ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr);
281     ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr);
282     ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */
283     /*
284      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
285      a block size of ...).  Note, ISs are expanded into equation space by 'cr_bs'.
286      */
287     ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr);
288     ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
289     for (ii=0,jj=0; ii<ncrs; ii++) {
290       PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
291       for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
292     }
293     ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
294     ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr);
295     ierr = PetscFree(tidx);CHKERRQ(ierr);
296     /*
297      Create a vector to contain the original vertex information for each element
298      */
299     ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr);
300     for (jj=0; jj<ndata_cols; jj++) {
301       const PetscInt stride0=ncrs*pc_gamg->data_cell_rows;
302       for (ii=0; ii<ncrs; ii++) {
303         for (kk=0; kk<ndata_rows; kk++) {
304           PetscInt    ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
305           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
306           ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr);
307         }
308       }
309     }
310     ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr);
311     ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr);
312     /*
313       Scatter the element vertex information (still in the original vertex ordering)
314       to the correct processor
315     */
316     ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr);
317     ierr = ISDestroy(&isscat);CHKERRQ(ierr);
318     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
319     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
320     ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr);
321     ierr = VecDestroy(&src_crd);CHKERRQ(ierr);
322     /*
323       Put the element vertex data into a new allocation of the gdata->ele
324     */
325     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
326     ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr);
327 
328     pc_gamg->data_sz = node_data_sz*ncrs_new;
329     strideNew        = ncrs_new*ndata_rows;
330 
331     ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr);
332     for (jj=0; jj<ndata_cols; jj++) {
333       for (ii=0; ii<ncrs_new; ii++) {
334         for (kk=0; kk<ndata_rows; kk++) {
335           PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
336           pc_gamg->data[ix] = PetscRealPart(array[jx]);
337         }
338       }
339     }
340     ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr);
341     ierr = VecDestroy(&dest_crd);CHKERRQ(ierr);
342     }
343     /* move A and P (columns) with new layout */
344 #if defined PETSC_GAMG_USE_LOG
345     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
346 #endif
347 
348     /*
349       Invert for MatGetSubMatrix
350     */
351     ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr);
352     ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */
353     ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr);
354     if (is_eq_num != is_eq_num_prim) {
355       ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */
356     }
357     if (Pcolumnperm) {
358       ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr);
359       *Pcolumnperm = new_eq_indices;
360     }
361     ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr);
362 #if defined PETSC_GAMG_USE_LOG
363     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
364     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
365 #endif
366     /* 'a_Amat_crs' output */
367     {
368       Mat mat;
369       ierr        = MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr);
370       *a_Amat_crs = mat;
371     }
372     ierr = MatDestroy(&Cmat);CHKERRQ(ierr);
373 
374 #if defined PETSC_GAMG_USE_LOG
375     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
376 #endif
377     /* prolongator */
378     {
379       IS       findices;
380       PetscInt Istart,Iend;
381       Mat      Pnew;
382 
383       ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr);
384 #if defined PETSC_GAMG_USE_LOG
385       ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
386 #endif
387       ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr);
388       ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr);
389       ierr = MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr);
390       ierr = ISDestroy(&findices);CHKERRQ(ierr);
391 
392 #if defined PETSC_GAMG_USE_LOG
393       ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
394 #endif
395       ierr = MatDestroy(a_P_inout);CHKERRQ(ierr);
396 
397       /* output - repartitioned */
398       *a_P_inout = Pnew;
399     }
400     ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr);
401 
402     *a_nactive_proc = new_size; /* output */
403   }
404   PetscFunctionReturn(0);
405 }
406 
407 /* -------------------------------------------------------------------------- */
408 /*
409    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
410                     by setting data structures and options.
411 
412    Input Parameter:
413 .  pc - the preconditioner context
414 
415 */
416 #undef __FUNCT__
417 #define __FUNCT__ "PCSetUp_GAMG"
418 PetscErrorCode PCSetUp_GAMG(PC pc)
419 {
420   PetscErrorCode ierr;
421   PC_MG          *mg      = (PC_MG*)pc->data;
422   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
423   Mat            Pmat     = pc->pmat;
424   PetscInt       fine_level,level,level1,bs,M,N,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS];
425   MPI_Comm       comm;
426   PetscMPIInt    rank,size,nactivepe;
427   Mat            Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS];
428   IS             *ASMLocalIDsArr[GAMG_MAXLEVELS];
429   PetscLogDouble nnz0=0.,nnztot=0.;
430   MatInfo        info;
431   PetscBool      is_last = PETSC_FALSE;
432 
433   PetscFunctionBegin;
434   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
435   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
436   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
437 
438   if (pc_gamg->setup_count++ > 0) {
439     if ((PetscBool)(!pc_gamg->reuse_prol)) {
440       /* reset everything */
441       ierr = PCReset_MG(pc);CHKERRQ(ierr);
442       pc->setupcalled = 0;
443     } else {
444       PC_MG_Levels **mglevels = mg->levels;
445       /* just do Galerkin grids */
446       Mat          B,dA,dB;
447 
448      if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
449       if (pc_gamg->Nlevels > 1) {
450         /* currently only handle case where mat and pmat are the same on coarser levels */
451         ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr);
452         /* (re)set to get dirty flag */
453         ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr);
454 
455         for (level=pc_gamg->Nlevels-2; level>=0; level--) {
456           /* the first time through the matrix structure has changed from repartitioning */
457           if (pc_gamg->setup_count==2) {
458             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr);
459             ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr);
460 
461             mglevels[level]->A = B;
462           } else {
463             ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr);
464             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr);
465           }
466           ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr);
467           dB   = B;
468         }
469       }
470 
471       ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
472       PetscFunctionReturn(0);
473     }
474   }
475 
476   if (!pc_gamg->data) {
477     if (pc_gamg->orig_data) {
478       ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
479       ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr);
480 
481       pc_gamg->data_sz        = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
482       pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
483       pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;
484 
485       ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr);
486       for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
487     } else {
488       if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
489       ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr);
490     }
491   }
492 
493   /* cache original data for reuse */
494   if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) {
495     ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr);
496     for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
497     pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
498     pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
499   }
500 
501   /* get basic dims */
502   ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
503   ierr = MatGetSize(Pmat, &M, &N);CHKERRQ(ierr);
504 
505   ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */
506   nnz0   = info.nz_used;
507   nnztot = info.nz_used;
508   ierr = PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,(int)(nnz0/(PetscReal)M+0.5),size);CHKERRQ(ierr);
509 
510   /* Get A_i and R_i */
511   for (level=0, Aarr[0]=Pmat, nactivepe = size; level < (pc_gamg->Nlevels-1) && (!level || M>pc_gamg->coarse_eq_limit); level++) {
512     pc_gamg->current_level = level;
513     level1 = level + 1;
514 #if defined PETSC_GAMG_USE_LOG
515     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
516 #if (defined GAMG_STAGES)
517     ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr);
518 #endif
519 #endif
520     { /* construct prolongator */
521       Mat              Gmat;
522       PetscCoarsenData *agg_lists;
523       Mat              Prol11;
524 
525       ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr);
526       ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr);
527       ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr);
528 
529       /* could have failed to create new level */
530       if (Prol11) {
531         /* get new block size of coarse matrices */
532         ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr);
533 
534         if (pc_gamg->ops->optprolongator) {
535           /* smooth */
536           ierr = pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);CHKERRQ(ierr);
537         }
538 
539         Parr[level1] = Prol11;
540       } else Parr[level1] = NULL; /* failed to coarsen */
541 
542       if (pc_gamg->use_aggs_in_asm) {
543         PetscInt bs;
544         ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr);
545         ierr = PetscCDGetASMBlocks(agg_lists, bs, Gmat, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr);
546       }
547 
548       ierr = MatDestroy(&Gmat);CHKERRQ(ierr);
549       ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr);
550     } /* construct prolongator scope */
551 #if defined PETSC_GAMG_USE_LOG
552     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
553 #endif
554     if (!level) Aarr[0] = Pmat; /* use Pmat for finest level setup */
555     if (!Parr[level1]) { /* failed to coarsen */
556       ierr =  PetscInfo1(pc,"Stop gridding, level %D\n",level);CHKERRQ(ierr);
557 #if defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES
558       ierr = PetscLogStagePop();CHKERRQ(ierr);
559 #endif
560       break;
561     }
562 #if defined PETSC_GAMG_USE_LOG
563     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
564 #endif
565     ierr = MatGetSize(Parr[level1], &M, &N);CHKERRQ(ierr); /* N is next M, a loop test variables */
566     if (is_last) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Is last ????????");
567     if (N <= pc_gamg->coarse_eq_limit) is_last = PETSC_TRUE;
568     ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs, &Parr[level1], &Aarr[level1], &nactivepe, NULL, is_last);CHKERRQ(ierr);
569 
570 #if defined PETSC_GAMG_USE_LOG
571     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
572 #endif
573     ierr = MatGetSize(Aarr[level1], &M, &N);CHKERRQ(ierr); /* M is loop test variables */
574     ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr);
575     nnztot += info.nz_used;
576     ierr = PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);CHKERRQ(ierr);
577 
578 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
579     ierr = PetscLogStagePop();CHKERRQ(ierr);
580 #endif
581     /* stop if one node or one proc -- could pull back for singular problems */
582     if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) {
583       ierr =  PetscInfo2(pc,"HARD stop of coarsening on level %D.  Grid too small: %D block nodes\n",level,M/bs);CHKERRQ(ierr);
584       level++;
585       break;
586     }
587   } /* levels */
588   ierr                  = PetscFree(pc_gamg->data);CHKERRQ(ierr);
589 
590   ierr = PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);CHKERRQ(ierr);
591   pc_gamg->Nlevels = level + 1;
592   fine_level       = level;
593   ierr             = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
594 
595   if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
596     /* set default smoothers & set operators */
597     for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) {
598       KSP smoother;
599       PC  subpc;
600 
601       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
602       ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
603 
604       ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
605       /* set ops */
606       ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr);
607       ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr);
608 
609       /* set defaults */
610       ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr);
611 
612       /* set blocks for ASM smoother that uses the 'aggregates' */
613       if (pc_gamg->use_aggs_in_asm) {
614         PetscInt sz;
615         IS       *iss;
616 
617         sz   = nASMBlocksArr[level];
618         iss   = ASMLocalIDsArr[level];
619         ierr = PCSetType(subpc, PCASM);CHKERRQ(ierr);
620         ierr = PCASMSetOverlap(subpc, 0);CHKERRQ(ierr);
621         ierr = PCASMSetType(subpc,PC_ASM_BASIC);CHKERRQ(ierr);
622         if (!sz) {
623           IS       is;
624           ierr = ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_COPY_VALUES, &is);CHKERRQ(ierr);
625           ierr = PCASMSetLocalSubdomains(subpc, 1, NULL, &is);CHKERRQ(ierr);
626           ierr = ISDestroy(&is);CHKERRQ(ierr);
627         } else {
628           PetscInt kk;
629           ierr = PCASMSetLocalSubdomains(subpc, sz, NULL, iss);CHKERRQ(ierr);
630           for (kk=0; kk<sz; kk++) {
631             ierr = ISDestroy(&iss[kk]);CHKERRQ(ierr);
632           }
633           ierr = PetscFree(iss);CHKERRQ(ierr);
634         }
635         ASMLocalIDsArr[level] = NULL;
636         nASMBlocksArr[level]  = 0;
637       } else {
638         ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr);
639       }
640     }
641     {
642       /* coarse grid */
643       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
644       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
645       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
646       ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr);
647       if (!pc_gamg->use_parallel_coarse_grid_solver) {
648         ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
649         ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
650         ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr);
651         ierr = PCSetUp(subpc);CHKERRQ(ierr);
652         ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr);
653         if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
654         ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr);
655         ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr);
656         ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr);
657         ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr);
658         ierr = KSPSetType(k2[0], KSPPREONLY);CHKERRQ(ierr);
659         /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
660          * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
661          * view every subdomain as though they were different. */
662         ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
663       }
664     }
665 
666     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
667     ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr);
668     ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr);
669     ierr = PetscOptionsEnd();CHKERRQ(ierr);
670     ierr = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr);
671 
672     /* clean up */
673     for (level=1; level<pc_gamg->Nlevels; level++) {
674       ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr);
675       ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr);
676     }
677     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
678   } else {
679     KSP smoother;
680     ierr = PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");CHKERRQ(ierr);
681     ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr);
682     ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr);
683     ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr);
684     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
685   }
686   PetscFunctionReturn(0);
687 }
688 
689 /* ------------------------------------------------------------------------- */
690 /*
691  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
692    that was created with PCCreate_GAMG().
693 
694    Input Parameter:
695 .  pc - the preconditioner context
696 
697    Application Interface Routine: PCDestroy()
698 */
699 #undef __FUNCT__
700 #define __FUNCT__ "PCDestroy_GAMG"
701 PetscErrorCode PCDestroy_GAMG(PC pc)
702 {
703   PetscErrorCode ierr;
704   PC_MG          *mg     = (PC_MG*)pc->data;
705   PC_GAMG        *pc_gamg= (PC_GAMG*)mg->innerctx;
706 
707   PetscFunctionBegin;
708   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
709   if (pc_gamg->ops->destroy) {
710     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
711   }
712   ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr);
713   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
714   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
715   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
716   PetscFunctionReturn(0);
717 }
718 
719 #undef __FUNCT__
720 #define __FUNCT__ "PCGAMGSetProcEqLim"
721 /*@
722    PCGAMGSetProcEqLim - Set number of equations to aim for per process on the coarse grids via processor reduction.
723 
724    Logically Collective on PC
725 
726    Input Parameters:
727 +  pc - the preconditioner context
728 -  n - the number of equations
729 
730 
731    Options Database Key:
732 .  -pc_gamg_process_eq_limit <limit>
733 
734    Notes: GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit> equations on each process
735           that has degrees of freedom
736 
737    Level: intermediate
738 
739    Concepts: Unstructured multigrid preconditioner
740 
741 .seealso: PCGAMGSetCoarseEqLim()
742 @*/
743 PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
744 {
745   PetscErrorCode ierr;
746 
747   PetscFunctionBegin;
748   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
749   ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
750   PetscFunctionReturn(0);
751 }
752 
753 #undef __FUNCT__
754 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG"
755 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
756 {
757   PC_MG   *mg      = (PC_MG*)pc->data;
758   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
759 
760   PetscFunctionBegin;
761   if (n>0) pc_gamg->min_eq_proc = n;
762   PetscFunctionReturn(0);
763 }
764 
765 #undef __FUNCT__
766 #define __FUNCT__ "PCGAMGSetCoarseEqLim"
767 /*@
768    PCGAMGSetCoarseEqLim - Set maximum number of equations on coarsest grid.
769 
770  Collective on PC
771 
772    Input Parameters:
773 +  pc - the preconditioner context
774 -  n - maximum number of equations to aim for
775 
776    Options Database Key:
777 .  -pc_gamg_coarse_eq_limit <limit>
778 
779    Level: intermediate
780 
781    Concepts: Unstructured multigrid preconditioner
782 
783 .seealso: PCGAMGSetProcEqLim()
784 @*/
785 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
786 {
787   PetscErrorCode ierr;
788 
789   PetscFunctionBegin;
790   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
791   ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
792   PetscFunctionReturn(0);
793 }
794 
795 #undef __FUNCT__
796 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG"
797 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
798 {
799   PC_MG   *mg      = (PC_MG*)pc->data;
800   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
801 
802   PetscFunctionBegin;
803   if (n>0) pc_gamg->coarse_eq_limit = n;
804   PetscFunctionReturn(0);
805 }
806 
807 #undef __FUNCT__
808 #define __FUNCT__ "PCGAMGSetRepartition"
809 /*@
810    PCGAMGSetRepartition - Repartition the degrees of freedom across the processors on the coarser grids
811 
812    Collective on PC
813 
814    Input Parameters:
815 +  pc - the preconditioner context
816 -  n - PETSC_TRUE or PETSC_FALSE
817 
818    Options Database Key:
819 .  -pc_gamg_repartition <true,false>
820 
821    Notes: this will generally improve the loading balancing of the work on each level
822 
823    Level: intermediate
824 
825    Concepts: Unstructured multigrid preconditioner
826 
827 .seealso: ()
828 @*/
829 PetscErrorCode PCGAMGSetRepartition(PC pc, PetscBool n)
830 {
831   PetscErrorCode ierr;
832 
833   PetscFunctionBegin;
834   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
835   ierr = PetscTryMethod(pc,"PCGAMGSetRepartition_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
836   PetscFunctionReturn(0);
837 }
838 
839 #undef __FUNCT__
840 #define __FUNCT__ "PCGAMGSetRepartition_GAMG"
841 static PetscErrorCode PCGAMGSetRepartition_GAMG(PC pc, PetscBool n)
842 {
843   PC_MG   *mg      = (PC_MG*)pc->data;
844   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
845 
846   PetscFunctionBegin;
847   pc_gamg->repart = n;
848   PetscFunctionReturn(0);
849 }
850 
851 #undef __FUNCT__
852 #define __FUNCT__ "PCGAMGSetReuseInterpolation"
853 /*@
854    PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding algebraic multigrid preconditioner
855 
856    Collective on PC
857 
858    Input Parameters:
859 +  pc - the preconditioner context
860 -  n - PETSC_TRUE or PETSC_FALSE
861 
862    Options Database Key:
863 .  -pc_gamg_reuse_interpolation <true,false>
864 
865    Level: intermediate
866 
867    Notes: this may negatively affect the convergence rate of the method on new matrices if the matrix entries change a great deal, but allows
868           rebuilding the preconditioner quicker.
869 
870    Concepts: Unstructured multigrid preconditioner
871 
872 .seealso: ()
873 @*/
874 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n)
875 {
876   PetscErrorCode ierr;
877 
878   PetscFunctionBegin;
879   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
880   ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
881   PetscFunctionReturn(0);
882 }
883 
884 #undef __FUNCT__
885 #define __FUNCT__ "PCGAMGSetReuseInterpolation_GAMG"
886 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n)
887 {
888   PC_MG   *mg      = (PC_MG*)pc->data;
889   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
890 
891   PetscFunctionBegin;
892   pc_gamg->reuse_prol = n;
893   PetscFunctionReturn(0);
894 }
895 
896 #undef __FUNCT__
897 #define __FUNCT__ "PCGAMGASMSetUseAggs"
898 /*@
899    PCGAMGASMSetUseAggs - Have the PCGAMG smoother on each level use the aggregates defined by the coarsening process as the subdomains for the additive Schwarz preconditioner.
900 
901    Collective on PC
902 
903    Input Parameters:
904 +  pc - the preconditioner context
905 -  flg - PETSC_TRUE to use aggregates, PETSC_FALSE to not
906 
907    Options Database Key:
908 .  -pc_gamg_asm_use_agg
909 
910    Level: intermediate
911 
912    Concepts: Unstructured multigrid preconditioner
913 
914 .seealso: ()
915 @*/
916 PetscErrorCode PCGAMGASMSetUseAggs(PC pc, PetscBool flg)
917 {
918   PetscErrorCode ierr;
919 
920   PetscFunctionBegin;
921   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
922   ierr = PetscTryMethod(pc,"PCGAMGASMSetUseAggs_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr);
923   PetscFunctionReturn(0);
924 }
925 
926 #undef __FUNCT__
927 #define __FUNCT__ "PCGAMGASMSetUseAggs_GAMG"
928 static PetscErrorCode PCGAMGASMSetUseAggs_GAMG(PC pc, PetscBool flg)
929 {
930   PC_MG   *mg      = (PC_MG*)pc->data;
931   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
932 
933   PetscFunctionBegin;
934   pc_gamg->use_aggs_in_asm = flg;
935   PetscFunctionReturn(0);
936 }
937 
938 #undef __FUNCT__
939 #define __FUNCT__ "PCGAMGSetUseParallelCoarseGridSolve"
940 /*@
941    PCGAMGSetUseParallelCoarseGridSolve - allow a parallel coarse grid solver
942 
943    Collective on PC
944 
945    Input Parameters:
946 +  pc - the preconditioner context
947 -  flg - PETSC_TRUE to not force coarse grid onto one processor
948 
949    Options Database Key:
950 .  -pc_gamg_use_parallel_coarse_grid_solver
951 
952    Level: intermediate
953 
954    Concepts: Unstructured multigrid preconditioner
955 
956 .seealso: ()
957 @*/
958 PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve(PC pc, PetscBool flg)
959 {
960   PetscErrorCode ierr;
961 
962   PetscFunctionBegin;
963   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
964   ierr = PetscTryMethod(pc,"PCGAMGSetUseParallelCoarseGridSolve_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr);
965   PetscFunctionReturn(0);
966 }
967 
968 #undef __FUNCT__
969 #define __FUNCT__ "PCGAMGSetUseParallelCoarseGridSolve_GAMG"
970 static PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve_GAMG(PC pc, PetscBool flg)
971 {
972   PC_MG   *mg      = (PC_MG*)pc->data;
973   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
974 
975   PetscFunctionBegin;
976   pc_gamg->use_parallel_coarse_grid_solver = flg;
977   PetscFunctionReturn(0);
978 }
979 
980 #undef __FUNCT__
981 #define __FUNCT__ "PCGAMGSetNlevels"
982 /*@
983    PCGAMGSetNlevels -  Sets the maximum number of levels PCGAMG will use
984 
985    Not collective on PC
986 
987    Input Parameters:
988 +  pc - the preconditioner
989 -  n - the maximum number of levels to use
990 
991    Options Database Key:
992 .  -pc_mg_levels
993 
994    Level: intermediate
995 
996    Concepts: Unstructured multigrid preconditioner
997 
998 .seealso: ()
999 @*/
1000 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
1001 {
1002   PetscErrorCode ierr;
1003 
1004   PetscFunctionBegin;
1005   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1006   ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
1007   PetscFunctionReturn(0);
1008 }
1009 
1010 #undef __FUNCT__
1011 #define __FUNCT__ "PCGAMGSetNlevels_GAMG"
1012 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
1013 {
1014   PC_MG   *mg      = (PC_MG*)pc->data;
1015   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1016 
1017   PetscFunctionBegin;
1018   pc_gamg->Nlevels = n;
1019   PetscFunctionReturn(0);
1020 }
1021 
1022 #undef __FUNCT__
1023 #define __FUNCT__ "PCGAMGSetThreshold"
1024 /*@
1025    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
1026 
1027    Not collective on PC
1028 
1029    Input Parameters:
1030 +  pc - the preconditioner context
1031 -  threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph
1032 
1033    Options Database Key:
1034 .  -pc_gamg_threshold <threshold>
1035 
1036    Notes: Before aggregating the graph GAMG will remove small values from the graph thus reducing the coupling in the graph and a different
1037     (perhaps better) coarser set of points.
1038 
1039    Level: intermediate
1040 
1041    Concepts: Unstructured multigrid preconditioner
1042 
1043 .seealso: ()
1044 @*/
1045 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1046 {
1047   PetscErrorCode ierr;
1048 
1049   PetscFunctionBegin;
1050   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1051   ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr);
1052   PetscFunctionReturn(0);
1053 }
1054 
1055 #undef __FUNCT__
1056 #define __FUNCT__ "PCGAMGSetThreshold_GAMG"
1057 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1058 {
1059   PC_MG   *mg      = (PC_MG*)pc->data;
1060   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1061 
1062   PetscFunctionBegin;
1063   pc_gamg->threshold = n;
1064   PetscFunctionReturn(0);
1065 }
1066 
1067 #undef __FUNCT__
1068 #define __FUNCT__ "PCGAMGSetType"
1069 /*@
1070    PCGAMGSetType - Set solution method
1071 
1072    Collective on PC
1073 
1074    Input Parameters:
1075 +  pc - the preconditioner context
1076 -  type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL
1077 
1078    Options Database Key:
1079 .  -pc_gamg_type <agg,geo,classical> - type of algebraic multigrid to apply
1080 
1081    Level: intermediate
1082 
1083    Concepts: Unstructured multigrid preconditioner
1084 
1085 .seealso: PCGAMGGetType(), PCGAMG, PCGAMGType
1086 @*/
1087 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1088 {
1089   PetscErrorCode ierr;
1090 
1091   PetscFunctionBegin;
1092   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1093   ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr);
1094   PetscFunctionReturn(0);
1095 }
1096 
1097 #undef __FUNCT__
1098 #define __FUNCT__ "PCGAMGGetType"
1099 /*@
1100    PCGAMGGetType - Get solution method
1101 
1102    Collective on PC
1103 
1104    Input Parameter:
1105 .  pc - the preconditioner context
1106 
1107    Output Parameter:
1108 .  type - the type of algorithm used
1109 
1110    Level: intermediate
1111 
1112    Concepts: Unstructured multigrid preconditioner
1113 
1114 .seealso: PCGAMGSetType(), PCGAMGType
1115 @*/
1116 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type)
1117 {
1118   PetscErrorCode ierr;
1119 
1120   PetscFunctionBegin;
1121   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1122   ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr);
1123   PetscFunctionReturn(0);
1124 }
1125 
1126 #undef __FUNCT__
1127 #define __FUNCT__ "PCGAMGGetType_GAMG"
1128 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type)
1129 {
1130   PC_MG          *mg      = (PC_MG*)pc->data;
1131   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1132 
1133   PetscFunctionBegin;
1134   *type = pc_gamg->type;
1135   PetscFunctionReturn(0);
1136 }
1137 
1138 #undef __FUNCT__
1139 #define __FUNCT__ "PCGAMGSetType_GAMG"
1140 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1141 {
1142   PetscErrorCode ierr,(*r)(PC);
1143   PC_MG          *mg      = (PC_MG*)pc->data;
1144   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1145 
1146   PetscFunctionBegin;
1147   pc_gamg->type = type;
1148   ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr);
1149   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1150   if (pc_gamg->ops->destroy) {
1151     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
1152     ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr);
1153     pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1154     /* cleaning up common data in pc_gamg - this should disapear someday */
1155     pc_gamg->data_cell_cols = 0;
1156     pc_gamg->data_cell_rows = 0;
1157     pc_gamg->orig_data_cell_cols = 0;
1158     pc_gamg->orig_data_cell_rows = 0;
1159     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
1160     pc_gamg->data_sz = 0;
1161   }
1162   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
1163   ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr);
1164   ierr = (*r)(pc);CHKERRQ(ierr);
1165   PetscFunctionReturn(0);
1166 }
1167 
1168 #undef __FUNCT__
1169 #define __FUNCT__ "PCView_GAMG"
1170 static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer)
1171 {
1172   PetscErrorCode ierr;
1173   PC_MG          *mg      = (PC_MG*)pc->data;
1174   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1175 
1176   PetscFunctionBegin;
1177   ierr = PetscViewerASCIIPrintf(viewer,"    GAMG specific options\n");CHKERRQ(ierr);
1178   ierr = PetscViewerASCIIPrintf(viewer,"      Threshold for dropping small values from graph %g\n",(double)pc_gamg->threshold);CHKERRQ(ierr);
1179   if (pc_gamg->use_aggs_in_asm) {
1180     ierr = PetscViewerASCIIPrintf(viewer,"      Using aggregates from coarsening process to define subdomains for PCASM\n");CHKERRQ(ierr);
1181   }
1182   if (pc_gamg->use_parallel_coarse_grid_solver) {
1183     ierr = PetscViewerASCIIPrintf(viewer,"      Using parallel coarse grid solver (all coarse grid equations not put on one process)\n");CHKERRQ(ierr);
1184   }
1185   if (pc_gamg->ops->view) {
1186     ierr = (*pc_gamg->ops->view)(pc,viewer);CHKERRQ(ierr);
1187   }
1188   PetscFunctionReturn(0);
1189 }
1190 
1191 #undef __FUNCT__
1192 #define __FUNCT__ "PCSetFromOptions_GAMG"
1193 PetscErrorCode PCSetFromOptions_GAMG(PetscOptionItems *PetscOptionsObject,PC pc)
1194 {
1195   PetscErrorCode ierr;
1196   PC_MG          *mg      = (PC_MG*)pc->data;
1197   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1198   PetscBool      flag;
1199   MPI_Comm       comm;
1200   char           prefix[256];
1201   const char     *pcpre;
1202 
1203   PetscFunctionBegin;
1204   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1205   ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr);
1206   {
1207     char tname[256];
1208     ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr);
1209     if (flag) {
1210       ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr);
1211     }
1212     ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGSetRepartition",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr);
1213     ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr);
1214     ierr = PetscOptionsBool("-pc_gamg_asm_use_agg","Use aggregation agragates for ASM smoother","PCGAMGASMSetUseAggs",pc_gamg->use_aggs_in_asm,&pc_gamg->use_aggs_in_asm,NULL);CHKERRQ(ierr);
1215     ierr = PetscOptionsBool("-pc_gamg_use_parallel_coarse_grid_solver","Use parallel coarse grid solver (otherwise put last grid on one process)","PCGAMGSetUseParallelCoarseGridSolve",pc_gamg->use_parallel_coarse_grid_solver,&pc_gamg->use_parallel_coarse_grid_solver,NULL);CHKERRQ(ierr);
1216     ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr);
1217     ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr);
1218     ierr = PetscOptionsReal("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&pc_gamg->threshold,&flag);CHKERRQ(ierr);
1219     ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
1220 
1221     /* set options for subtype */
1222     if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);}
1223   }
1224   ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
1225   ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
1226   ierr = PetscOptionsTail();CHKERRQ(ierr);
1227   PetscFunctionReturn(0);
1228 }
1229 
1230 /* -------------------------------------------------------------------------- */
1231 /*MC
1232      PCGAMG - Geometric algebraic multigrid (AMG) preconditioner
1233 
1234    Options Database Keys:
1235 +   -pc_gamg_type <type> - one of agg, geo, or classical
1236 .   -pc_gamg_repartition  <true,default=false> - repartition the degrees of freedom accross the coarse grids as they are determined
1237 .   -pc_gamg_reuse_interpolation <true,default=false> - when rebuilding the algebraic multigrid preconditioner reuse the previously computed interpolations
1238 .   -pc_gamg_asm_use_agg <true,default=false> - use the aggregates from the coasening process to defined the subdomains on each level for the PCASM smoother
1239 .   -pc_gamg_process_eq_limit <limit, default=50> - GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit>
1240                                         equations on each process that has degrees of freedom
1241 .   -pc_gamg_coarse_eq_limit <limit, default=50> - Set maximum number of equations on coarsest grid to aim for.
1242 -   -pc_gamg_threshold <thresh,default=0> - Before aggregating the graph GAMG will remove small values from the graph thus reducing the coupling in the graph and a different
1243 
1244    Options Database Keys for default Aggregation:
1245 +  -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation
1246 .  -pc_gamg_sym_graph <true,default=false> - symmetrize the graph before computing the aggregation
1247 -  -pc_gamg_square_graph <n,default=1> - number of levels to square the graph before aggregating it
1248 
1249    Multigrid options(inherited):
1250 +  -pc_mg_cycles <v>: v or w (PCMGSetCycleType())
1251 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1252 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1253 .  -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade
1254 -  -pc_mg_levels <levels> - Number of levels of multigrid to use.
1255 
1256 
1257   Notes: In order to obtain good performance for PCGAMG for vector valued problems you must
1258 $       Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point
1259 $       Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator
1260 $       See the Users Manual Chapter 4 for more details
1261 
1262   Level: intermediate
1263 
1264   Concepts: algebraic multigrid
1265 
1266 .seealso:  PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(),
1267            PCGAMGSetCoarseEqLim(), PCGAMGSetRepartition(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGASMSetUseAggs(), PCGAMGSetUseParallelCoarseGridSolve(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType(), PCGAMGSetReuseInterpolation()
1268 M*/
1269 
1270 #undef __FUNCT__
1271 #define __FUNCT__ "PCCreate_GAMG"
1272 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1273 {
1274   PetscErrorCode ierr;
1275   PC_GAMG        *pc_gamg;
1276   PC_MG          *mg;
1277 
1278   PetscFunctionBegin;
1279    /* register AMG type */
1280   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1281 
1282   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1283   ierr = PCSetType(pc, PCMG);CHKERRQ(ierr);
1284   ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr);
1285 
1286   /* create a supporting struct and attach it to pc */
1287   ierr         = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr);
1288   ierr         = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr);
1289   mg           = (PC_MG*)pc->data;
1290   mg->innerctx = pc_gamg;
1291 
1292   ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr);
1293 
1294   pc_gamg->setup_count = 0;
1295   /* these should be in subctx but repartitioning needs simple arrays */
1296   pc_gamg->data_sz = 0;
1297   pc_gamg->data    = 0;
1298 
1299   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1300   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1301   pc->ops->setup          = PCSetUp_GAMG;
1302   pc->ops->reset          = PCReset_GAMG;
1303   pc->ops->destroy        = PCDestroy_GAMG;
1304   mg->view                = PCView_GAMG;
1305 
1306   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr);
1307   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr);
1308   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartition_C",PCGAMGSetRepartition_GAMG);CHKERRQ(ierr);
1309   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr);
1310   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGASMSetUseAggs_C",PCGAMGASMSetUseAggs_GAMG);CHKERRQ(ierr);
1311   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseParallelCoarseGridSolve_C",PCGAMGSetUseParallelCoarseGridSolve_GAMG);CHKERRQ(ierr);
1312   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr);
1313   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr);
1314   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr);
1315   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr);
1316   pc_gamg->repart           = PETSC_FALSE;
1317   pc_gamg->reuse_prol       = PETSC_FALSE;
1318   pc_gamg->use_aggs_in_asm  = PETSC_FALSE;
1319   pc_gamg->use_parallel_coarse_grid_solver = PETSC_FALSE;
1320   pc_gamg->min_eq_proc      = 50;
1321   pc_gamg->coarse_eq_limit  = 50;
1322   pc_gamg->threshold        = 0.;
1323   pc_gamg->Nlevels          = GAMG_MAXLEVELS;
1324   pc_gamg->current_level    = 0; /* don't need to init really */
1325   pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1326 
1327   /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */
1328   ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr);
1329   PetscFunctionReturn(0);
1330 }
1331 
1332 #undef __FUNCT__
1333 #define __FUNCT__ "PCGAMGInitializePackage"
1334 /*@C
1335  PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1336     from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG()
1337     when using static libraries.
1338 
1339  Level: developer
1340 
1341  .keywords: PC, PCGAMG, initialize, package
1342  .seealso: PetscInitialize()
1343 @*/
1344 PetscErrorCode PCGAMGInitializePackage(void)
1345 {
1346   PetscErrorCode ierr;
1347 
1348   PetscFunctionBegin;
1349   if (PCGAMGPackageInitialized) PetscFunctionReturn(0);
1350   PCGAMGPackageInitialized = PETSC_TRUE;
1351   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr);
1352   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr);
1353   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr);
1354   ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr);
1355 
1356   /* general events */
1357   ierr = PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);CHKERRQ(ierr);
1358   ierr = PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);CHKERRQ(ierr);
1359   ierr = PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr);
1360   ierr = PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr);
1361   ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr);
1362   ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr);
1363   ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);CHKERRQ(ierr);
1364 
1365 #if defined PETSC_GAMG_USE_LOG
1366   ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr);
1367   ierr = PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr);
1368   /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1369   /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1370   /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1371   ierr = PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr);
1372   ierr = PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr);
1373   ierr = PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr);
1374   ierr = PetscLogEventRegister("    search-set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr);
1375   ierr = PetscLogEventRegister("  SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr);
1376   ierr = PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr);
1377   ierr = PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr);
1378   ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr);
1379   ierr = PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr);
1380   ierr = PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr);
1381   ierr = PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr);
1382   ierr = PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr);
1383 
1384   /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1385   /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1386   /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1387   /* create timer stages */
1388 #if defined GAMG_STAGES
1389   {
1390     char     str[32];
1391     PetscInt lidx;
1392     sprintf(str,"MG Level %d (finest)",0);
1393     ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr);
1394     for (lidx=1; lidx<9; lidx++) {
1395       sprintf(str,"MG Level %d",lidx);
1396       ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr);
1397     }
1398   }
1399 #endif
1400 #endif
1401   PetscFunctionReturn(0);
1402 }
1403 
1404 #undef __FUNCT__
1405 #define __FUNCT__ "PCGAMGFinalizePackage"
1406 /*@C
1407  PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is
1408     called from PetscFinalize() automatically.
1409 
1410  Level: developer
1411 
1412  .keywords: Petsc, destroy, package
1413  .seealso: PetscFinalize()
1414 @*/
1415 PetscErrorCode PCGAMGFinalizePackage(void)
1416 {
1417   PetscErrorCode ierr;
1418 
1419   PetscFunctionBegin;
1420   PCGAMGPackageInitialized = PETSC_FALSE;
1421   ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr);
1422   PetscFunctionReturn(0);
1423 }
1424 
1425 #undef __FUNCT__
1426 #define __FUNCT__ "PCGAMGRegister"
1427 /*@C
1428  PCGAMGRegister - Register a PCGAMG implementation.
1429 
1430  Input Parameters:
1431  + type - string that will be used as the name of the GAMG type.
1432  - create - function for creating the gamg context.
1433 
1434   Level: advanced
1435 
1436  .seealso: PCGAMGType, PCGAMG, PCGAMGSetType()
1437 @*/
1438 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC))
1439 {
1440   PetscErrorCode ierr;
1441 
1442   PetscFunctionBegin;
1443   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1444   ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr);
1445   PetscFunctionReturn(0);
1446 }
1447 
1448