xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision aed5ffcbc31e41b96407a92d0c19d6464f10f960)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 #include "petsc-private/matimpl.h"
5 #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
6 #include <petsc-private/kspimpl.h>
7 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */
8 
9 #if defined PETSC_GAMG_USE_LOG
10 PetscLogEvent petsc_gamg_setup_events[NUM_SET];
11 #endif
12 
13 #if defined PETSC_USE_LOG
14 PetscLogEvent PC_GAMGGgraph_AGG;
15 PetscLogEvent PC_GAMGGgraph_GEO;
16 PetscLogEvent PC_GAMGCoarsen_AGG;
17 PetscLogEvent PC_GAMGCoarsen_GEO;
18 PetscLogEvent PC_GAMGProlongator_AGG;
19 PetscLogEvent PC_GAMGProlongator_GEO;
20 PetscLogEvent PC_GAMGOptprol_AGG;
21 #endif
22 
23 #define GAMG_MAXLEVELS 30
24 
25 /* #define GAMG_STAGES */
26 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
27 static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
28 #endif
29 
30 static PetscFunctionList GAMGList = 0;
31 static PetscBool PCGAMGPackageInitialized;
32 
33 /* ----------------------------------------------------------------------------- */
34 #undef __FUNCT__
35 #define __FUNCT__ "PCReset_GAMG"
36 PetscErrorCode PCReset_GAMG(PC pc)
37 {
38   PetscErrorCode ierr;
39   PC_MG          *mg      = (PC_MG*)pc->data;
40   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
41 
42   PetscFunctionBegin;
43   if (pc_gamg->data) { /* this should not happen, cleaned up in SetUp */
44     PetscPrintf(PetscObjectComm((PetscObject)pc),"***[%d]%s this should not happen, cleaned up in SetUp\n",0,__FUNCT__);
45     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
46   }
47   pc_gamg->data = NULL; pc_gamg->data_sz = 0;
48 
49   if (pc_gamg->orig_data) {
50     ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr);
51   }
52   PetscFunctionReturn(0);
53 }
54 
55 /* -------------------------------------------------------------------------- */
56 /*
57    createLevel: create coarse op with RAP.  repartition and/or reduce number
58      of active processors.
59 
60    Input Parameter:
61    . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
62           'pc_gamg->data_sz' are changed via repartitioning/reduction.
63    . Amat_fine - matrix on this fine (k) level
64    . cr_bs - coarse block size
65    . isLast -
66    In/Output Parameter:
67    . a_P_inout - prolongation operator to the next level (k-->k-1)
68    . a_nactive_proc - number of active procs
69    Output Parameter:
70    . a_Amat_crs - coarse matrix that is created (k-1)
71 */
72 
73 #undef __FUNCT__
74 #define __FUNCT__ "createLevel"
75 static PetscErrorCode createLevel(const PC pc,const Mat Amat_fine,const PetscInt cr_bs,const PetscBool isLast,
76                                   Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc)
77 {
78   PetscErrorCode  ierr;
79   PC_MG           *mg         = (PC_MG*)pc->data;
80   PC_GAMG         *pc_gamg    = (PC_GAMG*)mg->innerctx;
81   const PetscBool repart      = pc_gamg->repart;
82   const PetscInt  min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit;
83   Mat             Cmat,Pold=*a_P_inout;
84   MPI_Comm        comm;
85   PetscMPIInt     rank,size,new_size,nactive=*a_nactive_proc;
86   PetscInt        ncrs_eq,ncrs_prim,f_bs;
87 
88   PetscFunctionBegin;
89   ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr);
90   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
91   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
92   ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr);
93   /* RAP */
94   ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr);
95 
96   /* set 'ncrs_prim' (nodes), 'ncrs_eq' (equations)*/
97   ncrs_prim = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
98   if (pc_gamg->data_sz % (pc_gamg->data_cell_cols*pc_gamg->data_cell_rows)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"pc_gamg->data_sz %D not divisible by (pc_gamg->data_cell_cols %D *pc_gamg->data_cell_rows %D)",pc_gamg->data_sz,pc_gamg->data_cell_cols,pc_gamg->data_cell_rows);
99   ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr);
100 
101   /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
102   {
103     PetscInt ncrs_eq_glob;
104     ierr     = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr);
105     new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
106     if (new_size == 0 || ncrs_eq_glob < coarse_max) new_size = 1;
107     else if (new_size >= nactive) new_size = nactive; /* no change, rare */
108     if (isLast) new_size = 1;
109   }
110 
111   if (!repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
112   else {
113     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
114     PetscInt       *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_prim_new,ncrs_eq_new,nloc_old;
115     IS             is_eq_newproc,is_eq_num,is_eq_num_prim,isscat,new_eq_indices;
116     VecScatter     vecscat;
117     PetscScalar    *array;
118     Vec            src_crd, dest_crd;
119 
120     nloc_old = ncrs_eq/cr_bs;
121     if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
122 #if defined PETSC_GAMG_USE_LOG
123     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
124 #endif
125     /* make 'is_eq_newproc' */
126     ierr = PetscMalloc(size*sizeof(PetscInt), &counts);CHKERRQ(ierr);
127     if (repart) {
128       /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
129       Mat adj;
130 
131       if (pc_gamg->verbose>0) {
132         if (pc_gamg->verbose==1) PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,ncrs_eq);
133         else {
134           PetscInt n;
135           ierr = MPI_Allreduce(&ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
136           PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,n);
137         }
138       }
139 
140       /* get 'adj' */
141       if (cr_bs == 1) {
142         ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
143       } else {
144         /* make a scalar matrix to partition (no Stokes here) */
145         Mat               tMat;
146         PetscInt          Istart_crs,Iend_crs,ncols,jj,Ii;
147         const PetscScalar *vals;
148         const PetscInt    *idx;
149         PetscInt          *d_nnz, *o_nnz, M, N;
150         static PetscInt   llev = 0;
151 
152         ierr = PetscMalloc(ncrs_prim*sizeof(PetscInt), &d_nnz);CHKERRQ(ierr);
153         ierr = PetscMalloc(ncrs_prim*sizeof(PetscInt), &o_nnz);CHKERRQ(ierr);
154         ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr);
155         ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr);
156         for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
157           ierr      = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
158           d_nnz[jj] = ncols/cr_bs;
159           o_nnz[jj] = ncols/cr_bs;
160           ierr      = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
161           if (d_nnz[jj] > ncrs_prim) d_nnz[jj] = ncrs_prim;
162           if (o_nnz[jj] > (M/cr_bs-ncrs_prim)) o_nnz[jj] = M/cr_bs-ncrs_prim;
163         }
164 
165         ierr = MatCreate(comm, &tMat);CHKERRQ(ierr);
166         ierr = MatSetSizes(tMat, ncrs_prim, ncrs_prim,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr);
167         ierr = MatSetType(tMat,MATAIJ);CHKERRQ(ierr);
168         ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr);
169         ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
170         ierr = PetscFree(d_nnz);CHKERRQ(ierr);
171         ierr = PetscFree(o_nnz);CHKERRQ(ierr);
172 
173         for (ii = Istart_crs; ii < Iend_crs; ii++) {
174           PetscInt dest_row = ii/cr_bs;
175           ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
176           for (jj = 0; jj < ncols; jj++) {
177             PetscInt    dest_col = idx[jj]/cr_bs;
178             PetscScalar v        = 1.0;
179             ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr);
180           }
181           ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
182         }
183         ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
184         ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
185 
186         if (llev++ == -1) {
187           PetscViewer viewer; char fname[32];
188           ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr);
189           PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
190           ierr = MatView(tMat, viewer);CHKERRQ(ierr);
191           ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
192         }
193 
194         ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
195 
196         ierr = MatDestroy(&tMat);CHKERRQ(ierr);
197       } /* create 'adj' */
198 
199       { /* partition: get newproc_idx */
200         char            prefix[256];
201         const char      *pcpre;
202         const PetscInt  *is_idx;
203         MatPartitioning mpart;
204         IS              proc_is;
205         PetscInt        targetPE;
206 
207         ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr);
208         ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr);
209         ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
210         ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
211         ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
212         ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
213         ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr);
214         ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr);
215         ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
216 
217         /* collect IS info */
218         ierr     = PetscMalloc(ncrs_eq*sizeof(PetscInt), &newproc_idx);CHKERRQ(ierr);
219         ierr     = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr);
220         targetPE = 1; /* bring to "front" of machine */
221         /*targetPE = size/new_size;*/ /* spread partitioning across machine */
222         for (kk = jj = 0 ; kk < nloc_old ; kk++) {
223           for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
224             newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
225           }
226         }
227         ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr);
228         ierr = ISDestroy(&proc_is);CHKERRQ(ierr);
229       }
230       ierr = MatDestroy(&adj);CHKERRQ(ierr);
231 
232       ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr);
233       if (newproc_idx != 0) {
234         ierr = PetscFree(newproc_idx);CHKERRQ(ierr);
235       }
236     } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */
237 
238       PetscInt rfactor,targetPE;
239       /* find factor */
240       if (new_size == 1) rfactor = size; /* easy */
241       else {
242         PetscReal best_fact = 0.;
243         jj = -1;
244         for (kk = 1 ; kk <= size ; kk++) {
245           if (size%kk==0) { /* a candidate */
246             PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
247             if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
248             if (fact > best_fact) {
249               best_fact = fact; jj = kk;
250             }
251           }
252         }
253         if (jj != -1) rfactor = jj;
254         else rfactor = 1; /* does this happen .. a prime */
255       }
256       new_size = size/rfactor;
257 
258       if (new_size==nactive) {
259         *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
260         ierr        = PetscFree(counts);CHKERRQ(ierr);
261         if (pc_gamg->verbose>0) {
262           PetscPrintf(comm,"\t[%d]%s aggregate processors noop: new_size=%d, neq(loc)=%d\n",rank,__FUNCT__,new_size,ncrs_eq);
263         }
264         PetscFunctionReturn(0);
265       }
266 
267       if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",rank,__FUNCT__,ncrs_eq);
268       targetPE = rank/rfactor;
269       ierr     = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr);
270     } /* end simple 'is_eq_newproc' */
271 
272     /*
273      Create an index set from the is_eq_newproc index set to indicate the mapping TO
274      */
275     ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr);
276     is_eq_num_prim = is_eq_num;
277     /*
278       Determine how many equations/vertices are assigned to each processor
279      */
280     ierr        = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr);
281     ncrs_eq_new = counts[rank];
282     ierr        = ISDestroy(&is_eq_newproc);CHKERRQ(ierr);
283     ncrs_prim_new = ncrs_eq_new/cr_bs; /* eqs */
284 
285     ierr = PetscFree(counts);CHKERRQ(ierr);
286 #if defined PETSC_GAMG_USE_LOG
287     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
288 #endif
289 
290     /* move data (for primal equations only) */
291     /* Create a vector to contain the newly ordered element information */
292     ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr);
293     ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_prim_new, PETSC_DECIDE);CHKERRQ(ierr);
294     ierr = VecSetFromOptions(dest_crd);CHKERRQ(ierr); /* this is needed! */
295     /*
296      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
297      a block size of ...).  Note, ISs are expanded into equation space by 'cr_bs'.
298      */
299     ierr = PetscMalloc((ncrs_prim*node_data_sz)*sizeof(PetscInt), &tidx);CHKERRQ(ierr);
300     ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
301     for (ii=0,jj=0; ii<ncrs_prim; ii++) {
302       PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
303       for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
304     }
305     ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
306     ierr = ISCreateGeneral(comm, node_data_sz*ncrs_prim, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr);
307     ierr = PetscFree(tidx);CHKERRQ(ierr);
308     /*
309      Create a vector to contain the original vertex information for each element
310      */
311     ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs_prim, &src_crd);CHKERRQ(ierr);
312     for (jj=0; jj<ndata_cols; jj++) {
313       const PetscInt stride0=ncrs_prim*pc_gamg->data_cell_rows;
314       for (ii=0; ii<ncrs_prim; ii++) {
315         for (kk=0; kk<ndata_rows; kk++) {
316           PetscInt    ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
317           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
318           ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr);
319         }
320       }
321     }
322     ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr);
323     ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr);
324     /*
325       Scatter the element vertex information (still in the original vertex ordering)
326       to the correct processor
327     */
328     ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr);
329     ierr = ISDestroy(&isscat);CHKERRQ(ierr);
330     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
331     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
332     ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr);
333     ierr = VecDestroy(&src_crd);CHKERRQ(ierr);
334     /*
335       Put the element vertex data into a new allocation of the gdata->ele
336     */
337     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
338     ierr = PetscMalloc(node_data_sz*ncrs_prim_new*sizeof(PetscReal), &pc_gamg->data);CHKERRQ(ierr);
339 
340     pc_gamg->data_sz = node_data_sz*ncrs_prim_new;
341     strideNew        = ncrs_prim_new*ndata_rows;
342 
343     ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr);
344     for (jj=0; jj<ndata_cols; jj++) {
345       for (ii=0; ii<ncrs_prim_new; ii++) {
346         for (kk=0; kk<ndata_rows; kk++) {
347           PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
348           pc_gamg->data[ix] = PetscRealPart(array[jx]);
349         }
350       }
351     }
352     ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr);
353     ierr = VecDestroy(&dest_crd);CHKERRQ(ierr);
354 
355     /* move A and P (columns) with new layout */
356 #if defined PETSC_GAMG_USE_LOG
357     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
358 #endif
359 
360     /*
361       Invert for MatGetSubMatrix
362     */
363     ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr);
364     ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */
365     ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr);
366     if (is_eq_num != is_eq_num_prim) {
367       ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */
368     }
369     ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr);
370 #if defined PETSC_GAMG_USE_LOG
371     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
372     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
373 #endif
374     /* 'a_Amat_crs' output */
375     {
376       Mat mat;
377       ierr        = MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr);
378       *a_Amat_crs = mat;
379 
380       if (!PETSC_TRUE) {
381         PetscInt cbs, rbs;
382         ierr = MatGetBlockSizes(Cmat, &rbs, &cbs);CHKERRQ(ierr);
383         ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr);
384         ierr = MatGetBlockSizes(mat, &rbs, &cbs);CHKERRQ(ierr);
385         ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",rank,__FUNCT__,rbs,cbs,cr_bs);CHKERRQ(ierr);
386       }
387     }
388     ierr = MatDestroy(&Cmat);CHKERRQ(ierr);
389 
390 #if defined PETSC_GAMG_USE_LOG
391     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
392 #endif
393     /* prolongator */
394     {
395       IS       findices;
396       PetscInt Istart,Iend;
397       Mat      Pnew;
398       ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr);
399 #if defined PETSC_GAMG_USE_LOG
400       ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
401 #endif
402       ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr);
403       ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr);
404       ierr = MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr);
405       ierr = ISDestroy(&findices);CHKERRQ(ierr);
406 
407       if (!PETSC_TRUE) {
408         PetscInt cbs, rbs;
409         ierr = MatGetBlockSizes(Pold, &rbs, &cbs);CHKERRQ(ierr);
410         ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr);
411         ierr = MatGetBlockSizes(Pnew, &rbs, &cbs);CHKERRQ(ierr);
412         ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr);
413       }
414 #if defined PETSC_GAMG_USE_LOG
415       ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
416 #endif
417       ierr = MatDestroy(a_P_inout);CHKERRQ(ierr);
418 
419       /* output - repartitioned */
420       *a_P_inout = Pnew;
421     }
422     ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr);
423 
424     *a_nactive_proc = new_size; /* output */
425   }
426 
427   /* outout matrix data */
428   if (!PETSC_TRUE) {
429     PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs;
430     if (llev==0) {
431       sprintf(fname,"Cmat_%d.m",llev++);
432       PetscViewerASCIIOpen(comm,fname,&viewer);
433       ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr);
434       ierr = MatView(Amat_fine, viewer);CHKERRQ(ierr);
435       ierr = PetscViewerDestroy(&viewer);
436     }
437     sprintf(fname,"Cmat_%d.m",llev++);
438     PetscViewerASCIIOpen(comm,fname,&viewer);
439     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr);
440     ierr = MatView(Cmat, viewer);CHKERRQ(ierr);
441     ierr = PetscViewerDestroy(&viewer);
442   }
443   PetscFunctionReturn(0);
444 }
445 
446 /* -------------------------------------------------------------------------- */
447 /*
448    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
449                     by setting data structures and options.
450 
451    Input Parameter:
452 .  pc - the preconditioner context
453 
454    Application Interface Routine: PCSetUp()
455 
456    Notes:
457    The interface routine PCSetUp() is not usually called directly by
458    the user, but instead is called by PCApply() if necessary.
459 */
460 #undef __FUNCT__
461 #define __FUNCT__ "PCSetUp_GAMG"
462 PetscErrorCode PCSetUp_GAMG(PC pc)
463 {
464   PetscErrorCode ierr;
465   PC_MG          *mg      = (PC_MG*)pc->data;
466   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
467   Mat            Pmat     = pc->pmat;
468   PetscInt       fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS];
469   MPI_Comm       comm;
470   PetscMPIInt    rank,size,nactivepe;
471   Mat            Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS];
472   PetscReal      emaxs[GAMG_MAXLEVELS];
473   IS             *ASMLocalIDsArr[GAMG_MAXLEVELS];
474   PetscLogDouble nnz0=0.,nnztot=0.;
475   MatInfo        info;
476   PetscBool      redo_mesh_setup = (PetscBool)(!pc_gamg->reuse_prol);
477 
478   PetscFunctionBegin;
479   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
480   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
481   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
482 
483   if (pc_gamg->verbose>2) PetscPrintf(comm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",rank,__FUNCT__,pc_gamg->setup_count,pc->setupcalled);
484 
485   if (pc_gamg->setup_count++ > 0) {
486     if (redo_mesh_setup) {
487       /* reset everything */
488       ierr = PCReset_MG(pc);CHKERRQ(ierr);
489       pc->setupcalled = 0;
490     } else {
491       PC_MG_Levels **mglevels = mg->levels;
492       /* just do Galerkin grids */
493       Mat          B,dA,dB;
494 
495      if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
496       if (pc_gamg->Nlevels > 1) {
497         /* currently only handle case where mat and pmat are the same on coarser levels */
498         ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB,NULL);CHKERRQ(ierr);
499         /* (re)set to get dirty flag */
500         ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
501 
502         for (level=pc_gamg->Nlevels-2; level>=0; level--) {
503           /* the first time through the matrix structure has changed from repartitioning */
504           if (pc_gamg->setup_count==2 && (pc_gamg->repart || level==0)) {
505             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr);
506             ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr);
507 
508             mglevels[level]->A = B;
509           } else {
510             ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B,NULL);CHKERRQ(ierr);
511             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr);
512           }
513           ierr = KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
514           dB   = B;
515         }
516       }
517 
518       ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
519 
520       /* PCSetUp_MG seems to insists on setting this to GMRES */
521       ierr = KSPSetType(mglevels[0]->smoothd, KSPPREONLY);CHKERRQ(ierr);
522       PetscFunctionReturn(0);
523     }
524   }
525 
526   if (!pc_gamg->data) {
527     if (pc_gamg->orig_data) {
528       ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
529       ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr);
530 
531       pc_gamg->data_sz        = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
532       pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
533       pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;
534 
535       ierr = PetscMalloc(pc_gamg->data_sz*sizeof(PetscReal), &pc_gamg->data);CHKERRQ(ierr);
536       for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
537     } else {
538       if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
539       ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr);
540     }
541   }
542 
543   /* cache original data for reuse */
544   if (!pc_gamg->orig_data && redo_mesh_setup) {
545     ierr = PetscMalloc(pc_gamg->data_sz*sizeof(PetscReal), &pc_gamg->orig_data);CHKERRQ(ierr);
546     for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
547     pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
548     pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
549   }
550 
551   /* get basic dims */
552   ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
553 
554   ierr = MatGetSize(Pmat, &M, &qq);CHKERRQ(ierr);
555   if (pc_gamg->verbose) {
556     PetscInt NN = M;
557     if (pc_gamg->verbose==1) {
558       ierr =  MatGetInfo(Pmat,MAT_LOCAL,&info);CHKERRQ(ierr);
559       ierr = MatGetLocalSize(Pmat, &NN, &qq);CHKERRQ(ierr);
560     } else {
561       ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr);
562     }
563     nnz0   = info.nz_used;
564     nnztot = info.nz_used;
565     ierr   = PetscPrintf(comm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",
566                          rank,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,
567                          (int)(nnz0/(PetscReal)NN),size);CHKERRQ(ierr);
568   }
569 
570   /* Get A_i and R_i */
571   for (level=0, Aarr[0]=Pmat, nactivepe = size; /* hard wired stopping logic */
572        level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit);  /* && (size==1 || nactivepe>1); */
573        level++) {
574     level1 = level + 1;
575 #if defined PETSC_GAMG_USE_LOG
576     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
577 #if (defined GAMG_STAGES)
578     ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr);
579 #endif
580 #endif
581     { /* construct prolongator */
582       Mat              Gmat;
583       PetscCoarsenData *agg_lists;
584       Mat              Prol11;
585 
586       ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr);
587       ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr);
588       ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr);
589 
590       /* could have failed to create new level */
591       if (Prol11) {
592         /* get new block size of coarse matrices */
593         ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr);
594 
595         if (pc_gamg->ops->optprol) {
596           /* smooth */
597           ierr = pc_gamg->ops->optprol(pc, Aarr[level], &Prol11);CHKERRQ(ierr);
598         }
599 
600         Parr[level1] = Prol11;
601       } else Parr[level1] = NULL;
602 
603       if (pc_gamg->use_aggs_in_gasm) {
604         ierr = PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr);
605       }
606 
607       ierr = MatDestroy(&Gmat);CHKERRQ(ierr);
608       ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr);
609     } /* construct prolongator scope */
610 #if defined PETSC_GAMG_USE_LOG
611     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
612 #endif
613     /* cache eigen estimate */
614     if (pc_gamg->emax_id != -1) {
615       PetscBool flag;
616       ierr = PetscObjectComposedDataGetReal((PetscObject)Aarr[level], pc_gamg->emax_id, emaxs[level], flag);CHKERRQ(ierr);
617       if (!flag) emaxs[level] = -1.;
618     } else emaxs[level] = -1.;
619     if (level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */
620     if (!Parr[level1]) {
621       if (pc_gamg->verbose) {
622         ierr =  PetscPrintf(comm,"\t[%d]%s stop gridding, level %d\n",rank,__FUNCT__,level);CHKERRQ(ierr);
623       }
624       break;
625     }
626 #if defined PETSC_GAMG_USE_LOG
627     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
628 #endif
629 
630     ierr = createLevel(pc, Aarr[level], bs, (PetscBool)(level==pc_gamg->Nlevels-2),
631                        &Parr[level1], &Aarr[level1], &nactivepe);CHKERRQ(ierr);
632 
633 #if defined PETSC_GAMG_USE_LOG
634     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
635 #endif
636     ierr = MatGetSize(Aarr[level1], &M, &qq);CHKERRQ(ierr);
637 
638     if (pc_gamg->verbose > 0) {
639       PetscInt NN = M;
640       if (pc_gamg->verbose==1) {
641         ierr = MatGetInfo(Aarr[level1],MAT_LOCAL,&info);CHKERRQ(ierr);
642         ierr = MatGetLocalSize(Aarr[level1], &NN, &qq);CHKERRQ(ierr);
643       } else {
644         ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr);
645       }
646 
647       nnztot += info.nz_used;
648       ierr    = PetscPrintf(comm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",
649                             rank,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols,
650                             (int)(info.nz_used/(PetscReal)NN), nactivepe);CHKERRQ(ierr);
651     }
652 
653     /* stop if one node -- could pull back for singular problems */
654     if (M/pc_gamg->data_cell_cols < 2) {
655       level++;
656       break;
657     }
658 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
659     ierr = PetscLogStagePop();CHKERRQ(ierr);
660 #endif
661   } /* levels */
662 
663   if (pc_gamg->data) {
664     ierr          = PetscFree(pc_gamg->data);CHKERRQ(ierr);
665     pc_gamg->data = NULL;
666   }
667 
668   if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0);
669   pc_gamg->Nlevels = level + 1;
670   fine_level       = level;
671   ierr             = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
672 
673   /* simple setup */
674   if (!PETSC_TRUE) {
675     PC_MG_Levels **mglevels = mg->levels;
676     for (lidx=0,level=pc_gamg->Nlevels-1;
677          lidx<fine_level;
678          lidx++, level--) {
679       ierr = PCMGSetInterpolation(pc, lidx+1, Parr[level]);CHKERRQ(ierr);
680       ierr = KSPSetOperators(mglevels[lidx]->smoothd, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN);CHKERRQ(ierr);
681       ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr);
682       ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr);
683     }
684     ierr = KSPSetOperators(mglevels[fine_level]->smoothd, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN);CHKERRQ(ierr);
685 
686     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
687   } else if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
688     /* set default smoothers & set operators */
689     for (lidx = 1, level = pc_gamg->Nlevels-2;
690          lidx <= fine_level;
691          lidx++, level--) {
692       KSP smoother;
693       PC  subpc;
694 
695       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
696       ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
697 
698       ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
699       /* set ops */
700       ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN);CHKERRQ(ierr);
701       ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr);
702 
703       /* set defaults */
704       ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr);
705 
706       /* override defaults and command line args (!) */
707       if (pc_gamg->use_aggs_in_gasm) {
708         PetscInt sz;
709         IS       *is;
710 
711         sz   = nASMBlocksArr[level];
712         is   = ASMLocalIDsArr[level];
713         ierr = PCSetType(subpc, PCGASM);CHKERRQ(ierr);
714         if (sz==0) {
715           IS       is;
716           PetscInt my0,kk;
717           ierr = MatGetOwnershipRange(Aarr[level], &my0, &kk);CHKERRQ(ierr);
718           ierr = ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);CHKERRQ(ierr);
719           ierr = PCGASMSetSubdomains(subpc, 1, &is, NULL);CHKERRQ(ierr);
720           ierr = ISDestroy(&is);CHKERRQ(ierr);
721         } else {
722           PetscInt kk;
723           ierr = PCGASMSetSubdomains(subpc, sz, is, NULL);CHKERRQ(ierr);
724           for (kk=0; kk<sz; kk++) {
725             ierr = ISDestroy(&is[kk]);CHKERRQ(ierr);
726           }
727           ierr = PetscFree(is);CHKERRQ(ierr);
728         }
729         ierr = PCGASMSetOverlap(subpc, 0);CHKERRQ(ierr);
730 
731         ASMLocalIDsArr[level] = NULL;
732         nASMBlocksArr[level]  = 0;
733         ierr                  = PCGASMSetType(subpc, PC_GASM_BASIC);CHKERRQ(ierr);
734       } else {
735         ierr = PCSetType(subpc, PCJACOBI);CHKERRQ(ierr);
736       }
737     }
738     {
739       /* coarse grid */
740       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
741       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
742       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
743       ierr = KSPSetOperators(smoother, Lmat, Lmat, SAME_NONZERO_PATTERN);CHKERRQ(ierr);
744       ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
745       ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
746       ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr);
747       ierr = PCSetUp(subpc);CHKERRQ(ierr);
748       ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr);
749       if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
750       ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr);
751       ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr);
752       ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr);
753       ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr);
754       /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
755        * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
756        * view every subdomain as though they were different. */
757       ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
758     }
759 
760     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
761     ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr);
762     ierr = PCSetFromOptions_MG(pc);CHKERRQ(ierr);
763     ierr = PetscOptionsEnd();CHKERRQ(ierr);
764     if (mg->galerkin != 2) SETERRQ(comm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used.");
765 
766     /* create cheby smoothers */
767     for (lidx = 1, level = pc_gamg->Nlevels-2;
768          lidx <= fine_level;
769          lidx++, level--) {
770       KSP       smoother;
771       PetscBool flag;
772       PC        subpc;
773 
774       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
775       ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
776 
777       /* do my own cheby */
778       ierr = PetscObjectTypeCompare((PetscObject)smoother, KSPCHEBYSHEV, &flag);CHKERRQ(ierr);
779       if (flag) {
780         PetscReal emax, emin;
781         ierr = PetscObjectTypeCompare((PetscObject)subpc, PCJACOBI, &flag);CHKERRQ(ierr);
782         if (flag && emaxs[level] > 0.0) emax=emaxs[level]; /* eigen estimate only for diagnal PC */
783         else { /* eigen estimate 'emax' */
784           KSP eksp;
785           Mat Lmat = Aarr[level];
786           Vec bb, xx;
787 
788           ierr = MatGetVecs(Lmat, &bb, 0);CHKERRQ(ierr);
789           ierr = MatGetVecs(Lmat, &xx, 0);CHKERRQ(ierr);
790           {
791             PetscRandom rctx;
792             ierr = PetscRandomCreate(comm,&rctx);CHKERRQ(ierr);
793             ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr);
794             ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr);
795             ierr = PetscRandomDestroy(&rctx);CHKERRQ(ierr);
796           }
797 
798           /* zeroing out BC rows -- needed for crazy matrices */
799           {
800             PetscInt    Istart,Iend,ncols,jj,Ii;
801             PetscScalar zero = 0.0;
802             ierr = MatGetOwnershipRange(Lmat, &Istart, &Iend);CHKERRQ(ierr);
803             for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) {
804               ierr = MatGetRow(Lmat,Ii,&ncols,0,0);CHKERRQ(ierr);
805               if (ncols <= 1) {
806                 ierr = VecSetValues(bb, 1, &Ii, &zero, INSERT_VALUES);CHKERRQ(ierr);
807               }
808               ierr = MatRestoreRow(Lmat,Ii,&ncols,0,0);CHKERRQ(ierr);
809             }
810             ierr = VecAssemblyBegin(bb);CHKERRQ(ierr);
811             ierr = VecAssemblyEnd(bb);CHKERRQ(ierr);
812           }
813 
814           ierr = KSPCreate(comm, &eksp);CHKERRQ(ierr);
815           ierr = KSPSetTolerances(eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10);CHKERRQ(ierr);
816           ierr = KSPSetNormType(eksp, KSP_NORM_NONE);CHKERRQ(ierr);
817           ierr = KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);CHKERRQ(ierr);
818           ierr = KSPAppendOptionsPrefix(eksp, "gamg_est_");CHKERRQ(ierr);
819           ierr = KSPSetFromOptions(eksp);CHKERRQ(ierr);
820 
821           ierr = KSPSetInitialGuessNonzero(eksp, PETSC_FALSE);CHKERRQ(ierr);
822           ierr = KSPSetOperators(eksp, Lmat, Lmat, SAME_NONZERO_PATTERN);CHKERRQ(ierr);
823           ierr = KSPSetComputeSingularValues(eksp,PETSC_TRUE);CHKERRQ(ierr);
824 
825           /* set PC type to be same as smoother */
826           ierr = KSPSetPC(eksp, subpc);CHKERRQ(ierr);
827 
828           /* solve - keep stuff out of logging */
829           ierr = PetscLogEventDeactivate(KSP_Solve);CHKERRQ(ierr);
830           ierr = PetscLogEventDeactivate(PC_Apply);CHKERRQ(ierr);
831           ierr = KSPSolve(eksp, bb, xx);CHKERRQ(ierr);
832           ierr = PetscLogEventActivate(KSP_Solve);CHKERRQ(ierr);
833           ierr = PetscLogEventActivate(PC_Apply);CHKERRQ(ierr);
834 
835           ierr = KSPComputeExtremeSingularValues(eksp, &emax, &emin);CHKERRQ(ierr);
836 
837           ierr = VecDestroy(&xx);CHKERRQ(ierr);
838           ierr = VecDestroy(&bb);CHKERRQ(ierr);
839           ierr = KSPDestroy(&eksp);CHKERRQ(ierr);
840 
841           if (pc_gamg->verbose > 0) {
842             PetscInt N1, tt;
843             ierr = MatGetSize(Aarr[level], &N1, &tt);CHKERRQ(ierr);
844             PetscPrintf(comm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1);
845           }
846         }
847         {
848           PetscInt N1, N0;
849           ierr = MatGetSize(Aarr[level], &N1, NULL);CHKERRQ(ierr);
850           ierr = MatGetSize(Aarr[level+1], &N0, NULL);CHKERRQ(ierr);
851           /* heuristic - is this crap? */
852           /* emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); */
853           emin  = emax * pc_gamg->eigtarget[0];
854           emax *= pc_gamg->eigtarget[1];
855         }
856         ierr = KSPChebyshevSetEigenvalues(smoother, emax, emin);CHKERRQ(ierr);
857       } /* setup checby flag */
858     } /* non-coarse levels */
859 
860     /* clean up */
861     for (level=1; level<pc_gamg->Nlevels; level++) {
862       ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr);
863       ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr);
864     }
865 
866     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
867 
868     if (PETSC_TRUE) {
869       KSP smoother;  /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */
870       ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr);
871       ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr);
872     }
873   } else {
874     KSP smoother;
875     if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",rank,__FUNCT__);
876     ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr);
877     ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN);CHKERRQ(ierr);
878     ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr);
879     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
880   }
881   PetscFunctionReturn(0);
882 }
883 
884 /* ------------------------------------------------------------------------- */
885 /*
886  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
887    that was created with PCCreate_GAMG().
888 
889    Input Parameter:
890 .  pc - the preconditioner context
891 
892    Application Interface Routine: PCDestroy()
893 */
894 #undef __FUNCT__
895 #define __FUNCT__ "PCDestroy_GAMG"
896 PetscErrorCode PCDestroy_GAMG(PC pc)
897 {
898   PetscErrorCode ierr;
899   PC_MG          *mg     = (PC_MG*)pc->data;
900   PC_GAMG        *pc_gamg= (PC_GAMG*)mg->innerctx;
901 
902   PetscFunctionBegin;
903   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
904   if (pc_gamg->ops->destroy) {
905     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
906   }
907   ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr);
908   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
909   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
910   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
911   PetscFunctionReturn(0);
912 }
913 
914 
915 #undef __FUNCT__
916 #define __FUNCT__ "PCGAMGSetProcEqLim"
917 /*@
918    PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via
919    processor reduction.
920 
921    Not Collective on PC
922 
923    Input Parameters:
924 .  pc - the preconditioner context
925 
926 
927    Options Database Key:
928 .  -pc_gamg_process_eq_limit
929 
930    Level: intermediate
931 
932    Concepts: Unstructured multrigrid preconditioner
933 
934 .seealso: ()
935 @*/
936 PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
937 {
938   PetscErrorCode ierr;
939 
940   PetscFunctionBegin;
941   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
942   ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
943   PetscFunctionReturn(0);
944 }
945 
946 #undef __FUNCT__
947 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG"
948 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
949 {
950   PC_MG   *mg      = (PC_MG*)pc->data;
951   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
952 
953   PetscFunctionBegin;
954   if (n>0) pc_gamg->min_eq_proc = n;
955   PetscFunctionReturn(0);
956 }
957 
958 #undef __FUNCT__
959 #define __FUNCT__ "PCGAMGSetCoarseEqLim"
960 /*@
961    PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids.
962 
963  Collective on PC
964 
965    Input Parameters:
966 .  pc - the preconditioner context
967 
968 
969    Options Database Key:
970 .  -pc_gamg_coarse_eq_limit
971 
972    Level: intermediate
973 
974    Concepts: Unstructured multrigrid preconditioner
975 
976 .seealso: ()
977  @*/
978 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
979 {
980   PetscErrorCode ierr;
981 
982   PetscFunctionBegin;
983   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
984   ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
985   PetscFunctionReturn(0);
986 }
987 
988 #undef __FUNCT__
989 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG"
990 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
991 {
992   PC_MG   *mg      = (PC_MG*)pc->data;
993   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
994 
995   PetscFunctionBegin;
996   if (n>0) pc_gamg->coarse_eq_limit = n;
997   PetscFunctionReturn(0);
998 }
999 
1000 #undef __FUNCT__
1001 #define __FUNCT__ "PCGAMGSetRepartitioning"
1002 /*@
1003    PCGAMGSetRepartitioning - Repartition the coarse grids
1004 
1005    Collective on PC
1006 
1007    Input Parameters:
1008 .  pc - the preconditioner context
1009 
1010 
1011    Options Database Key:
1012 .  -pc_gamg_repartition
1013 
1014    Level: intermediate
1015 
1016    Concepts: Unstructured multrigrid preconditioner
1017 
1018 .seealso: ()
1019 @*/
1020 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n)
1021 {
1022   PetscErrorCode ierr;
1023 
1024   PetscFunctionBegin;
1025   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1026   ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
1027   PetscFunctionReturn(0);
1028 }
1029 
1030 #undef __FUNCT__
1031 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG"
1032 static PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n)
1033 {
1034   PC_MG   *mg      = (PC_MG*)pc->data;
1035   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1036 
1037   PetscFunctionBegin;
1038   pc_gamg->repart = n;
1039   PetscFunctionReturn(0);
1040 }
1041 
1042 #undef __FUNCT__
1043 #define __FUNCT__ "PCGAMGSetReuseProl"
1044 /*@
1045    PCGAMGSetReuseProl - Reuse prlongation
1046 
1047    Collective on PC
1048 
1049    Input Parameters:
1050 .  pc - the preconditioner context
1051 
1052 
1053    Options Database Key:
1054 .  -pc_gamg_reuse_interpolation
1055 
1056    Level: intermediate
1057 
1058    Concepts: Unstructured multrigrid preconditioner
1059 
1060 .seealso: ()
1061 @*/
1062 PetscErrorCode PCGAMGSetReuseProl(PC pc, PetscBool n)
1063 {
1064   PetscErrorCode ierr;
1065 
1066   PetscFunctionBegin;
1067   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1068   ierr = PetscTryMethod(pc,"PCGAMGSetReuseProl_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
1069   PetscFunctionReturn(0);
1070 }
1071 
1072 #undef __FUNCT__
1073 #define __FUNCT__ "PCGAMGSetReuseProl_GAMG"
1074 static PetscErrorCode PCGAMGSetReuseProl_GAMG(PC pc, PetscBool n)
1075 {
1076   PC_MG   *mg      = (PC_MG*)pc->data;
1077   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1078 
1079   PetscFunctionBegin;
1080   pc_gamg->reuse_prol = n;
1081   PetscFunctionReturn(0);
1082 }
1083 
1084 #undef __FUNCT__
1085 #define __FUNCT__ "PCGAMGSetUseASMAggs"
1086 /*@
1087    PCGAMGSetUseASMAggs -
1088 
1089    Collective on PC
1090 
1091    Input Parameters:
1092 .  pc - the preconditioner context
1093 
1094 
1095    Options Database Key:
1096 .  -pc_gamg_use_agg_gasm
1097 
1098    Level: intermediate
1099 
1100    Concepts: Unstructured multrigrid preconditioner
1101 
1102 .seealso: ()
1103 @*/
1104 PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n)
1105 {
1106   PetscErrorCode ierr;
1107 
1108   PetscFunctionBegin;
1109   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1110   ierr = PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
1111   PetscFunctionReturn(0);
1112 }
1113 
1114 #undef __FUNCT__
1115 #define __FUNCT__ "PCGAMGSetUseASMAggs_GAMG"
1116 static PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n)
1117 {
1118   PC_MG   *mg      = (PC_MG*)pc->data;
1119   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1120 
1121   PetscFunctionBegin;
1122   pc_gamg->use_aggs_in_gasm = n;
1123   PetscFunctionReturn(0);
1124 }
1125 
1126 #undef __FUNCT__
1127 #define __FUNCT__ "PCGAMGSetNlevels"
1128 /*@
1129    PCGAMGSetNlevels -
1130 
1131    Not collective on PC
1132 
1133    Input Parameters:
1134 .  pc - the preconditioner context
1135 
1136 
1137    Options Database Key:
1138 .  -pc_mg_levels
1139 
1140    Level: intermediate
1141 
1142    Concepts: Unstructured multrigrid preconditioner
1143 
1144 .seealso: ()
1145 @*/
1146 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
1147 {
1148   PetscErrorCode ierr;
1149 
1150   PetscFunctionBegin;
1151   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1152   ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
1153   PetscFunctionReturn(0);
1154 }
1155 
1156 #undef __FUNCT__
1157 #define __FUNCT__ "PCGAMGSetNlevels_GAMG"
1158 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
1159 {
1160   PC_MG   *mg      = (PC_MG*)pc->data;
1161   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1162 
1163   PetscFunctionBegin;
1164   pc_gamg->Nlevels = n;
1165   PetscFunctionReturn(0);
1166 }
1167 
1168 #undef __FUNCT__
1169 #define __FUNCT__ "PCGAMGSetThreshold"
1170 /*@
1171    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
1172 
1173    Not collective on PC
1174 
1175    Input Parameters:
1176 .  pc - the preconditioner context
1177 
1178 
1179    Options Database Key:
1180 .  -pc_gamg_threshold
1181 
1182    Level: intermediate
1183 
1184    Concepts: Unstructured multrigrid preconditioner
1185 
1186 .seealso: ()
1187 @*/
1188 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1189 {
1190   PetscErrorCode ierr;
1191 
1192   PetscFunctionBegin;
1193   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1194   ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr);
1195   PetscFunctionReturn(0);
1196 }
1197 
1198 #undef __FUNCT__
1199 #define __FUNCT__ "PCGAMGSetThreshold_GAMG"
1200 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1201 {
1202   PC_MG   *mg      = (PC_MG*)pc->data;
1203   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1204 
1205   PetscFunctionBegin;
1206   pc_gamg->threshold = n;
1207   PetscFunctionReturn(0);
1208 }
1209 
1210 #undef __FUNCT__
1211 #define __FUNCT__ "PCGAMGSetType"
1212 /*@
1213    PCGAMGSetType - Set solution method - calls sub create method
1214 
1215    Collective on PC
1216 
1217    Input Parameters:
1218 .  pc - the preconditioner context
1219 
1220 
1221    Options Database Key:
1222 .  -pc_gamg_type
1223 
1224    Level: intermediate
1225 
1226    Concepts: Unstructured multrigrid preconditioner
1227 
1228 .seealso: ()
1229 @*/
1230 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1231 {
1232   PetscErrorCode ierr;
1233 
1234   PetscFunctionBegin;
1235   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1236   ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr);
1237   PetscFunctionReturn(0);
1238 }
1239 
1240 #undef __FUNCT__
1241 #define __FUNCT__ "PCGAMGSetType_GAMG"
1242 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1243 {
1244   PetscErrorCode ierr,(*r)(PC);
1245   PC_MG          *mg      = (PC_MG*)pc->data;
1246   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1247 
1248   PetscFunctionBegin;
1249   ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr);
1250   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1251   if (pc_gamg->ops->destroy) {
1252     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
1253     ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr);
1254   }
1255   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
1256   ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr);
1257   ierr = (*r)(pc);CHKERRQ(ierr);
1258   PetscFunctionReturn(0);
1259 }
1260 
1261 #undef __FUNCT__
1262 #define __FUNCT__ "PCSetFromOptions_GAMG"
1263 PetscErrorCode PCSetFromOptions_GAMG(PC pc)
1264 {
1265   PetscErrorCode ierr;
1266   PC_MG          *mg      = (PC_MG*)pc->data;
1267   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1268   PetscBool      flag;
1269   PetscInt       two   = 2;
1270   MPI_Comm       comm;
1271 
1272   PetscFunctionBegin;
1273   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1274   ierr = PetscOptionsHead("GAMG options");CHKERRQ(ierr);
1275   {
1276     /* -pc_gamg_type */
1277     {
1278       char tname[256] = PCGAMGAGG;
1279       const char *deftype = pc_gamg->gamg_type_name ? pc_gamg->gamg_type_name : tname;
1280       ierr = PetscOptionsList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, tname, tname, sizeof(tname), &flag);CHKERRQ(ierr);
1281       /* call PCCreateGAMG_XYZ */
1282       if (flag || !pc_gamg->gamg_type_name) {
1283         ierr = PCGAMGSetType(pc, flag ? tname : deftype);CHKERRQ(ierr);
1284       }
1285     }
1286     /* -pc_gamg_verbose */
1287     ierr = PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG",
1288                            "none", pc_gamg->verbose,
1289                            &pc_gamg->verbose, NULL);CHKERRQ(ierr);
1290     /* -pc_gamg_repartition */
1291     ierr = PetscOptionsBool("-pc_gamg_repartition",
1292                             "Repartion coarse grids (false)",
1293                             "PCGAMGRepartitioning",
1294                             pc_gamg->repart,
1295                             &pc_gamg->repart,
1296                             &flag);CHKERRQ(ierr);
1297     /* -pc_gamg_reuse_interpolation */
1298     ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation",
1299                             "Reuse prolongation operator (true)",
1300                             "PCGAMGReuseProl",
1301                             pc_gamg->reuse_prol,
1302                             &pc_gamg->reuse_prol,
1303                             &flag);CHKERRQ(ierr);
1304     /* -pc_gamg_use_agg_gasm */
1305     ierr = PetscOptionsBool("-pc_gamg_use_agg_gasm",
1306                             "Use aggregation agragates for GASM smoother (false)",
1307                             "PCGAMGUseASMAggs",
1308                             pc_gamg->use_aggs_in_gasm,
1309                             &pc_gamg->use_aggs_in_gasm,
1310                             &flag);CHKERRQ(ierr);
1311     /* -pc_gamg_process_eq_limit */
1312     ierr = PetscOptionsInt("-pc_gamg_process_eq_limit",
1313                            "Limit (goal) on number of equations per process on coarse grids",
1314                            "PCGAMGSetProcEqLim",
1315                            pc_gamg->min_eq_proc,
1316                            &pc_gamg->min_eq_proc,
1317                            &flag);CHKERRQ(ierr);
1318     /* -pc_gamg_coarse_eq_limit */
1319     ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit",
1320                            "Limit on number of equations for the coarse grid",
1321                            "PCGAMGSetCoarseEqLim",
1322                            pc_gamg->coarse_eq_limit,
1323                            &pc_gamg->coarse_eq_limit,
1324                            &flag);CHKERRQ(ierr);
1325     /* -pc_gamg_threshold */
1326     ierr = PetscOptionsReal("-pc_gamg_threshold",
1327                             "Relative threshold to use for dropping edges in aggregation graph",
1328                             "PCGAMGSetThreshold",
1329                             pc_gamg->threshold,
1330                             &pc_gamg->threshold,
1331                             &flag);CHKERRQ(ierr);
1332     if (flag && pc_gamg->verbose) {
1333       ierr = PetscPrintf(comm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold);CHKERRQ(ierr);
1334     }
1335     /* -pc_gamg_eigtarget */
1336     ierr = PetscOptionsRealArray("-pc_gamg_eigtarget","Target eigenvalue range as fraction of estimated maximum eigenvalue","PCGAMGSetEigTarget",pc_gamg->eigtarget,&two,NULL);CHKERRQ(ierr);
1337     ierr = PetscOptionsInt("-pc_mg_levels",
1338                            "Set number of MG levels",
1339                            "PCGAMGSetNlevels",
1340                            pc_gamg->Nlevels,
1341                            &pc_gamg->Nlevels,
1342                            &flag);CHKERRQ(ierr);
1343 
1344     /* set options for subtype */
1345     if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(pc);CHKERRQ(ierr);}
1346   }
1347   ierr = PetscOptionsTail();CHKERRQ(ierr);
1348   PetscFunctionReturn(0);
1349 }
1350 
1351 /* -------------------------------------------------------------------------- */
1352 /*MC
1353      PCGAMG - Geometric algebraic multigrid (AMG) preconditioning framework.
1354        - This is the entry point to GAMG, registered in pcregis.c
1355 
1356    Options Database Keys:
1357    Multigrid options(inherited)
1358 +  -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (PCMGSetCycleType)
1359 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1360 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1361 -  -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade
1362 
1363   Level: intermediate
1364 
1365   Concepts: multigrid
1366 
1367 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
1368            PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(),
1369            PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
1370            PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
1371            PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
1372 M*/
1373 
1374 #undef __FUNCT__
1375 #define __FUNCT__ "PCCreate_GAMG"
1376 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1377 {
1378   PetscErrorCode ierr;
1379   PC_GAMG        *pc_gamg;
1380   PC_MG          *mg;
1381 #if defined PETSC_GAMG_USE_LOG
1382   static long count = 0;
1383 #endif
1384 
1385   PetscFunctionBegin;
1386   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1387   ierr = PCSetType(pc, PCMG);CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */
1388   ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr);
1389 
1390   /* create a supporting struct and attach it to pc */
1391   ierr         = PetscNewLog(pc, PC_GAMG, &pc_gamg);CHKERRQ(ierr);
1392   mg           = (PC_MG*)pc->data;
1393   mg->galerkin = 2;             /* Use Galerkin, but it is computed externally */
1394   mg->innerctx = pc_gamg;
1395 
1396   ierr = PetscNewLog(pc,struct _PCGAMGOps,&pc_gamg->ops);CHKERRQ(ierr);
1397 
1398   pc_gamg->setup_count = 0;
1399   /* these should be in subctx but repartitioning needs simple arrays */
1400   pc_gamg->data_sz = 0;
1401   pc_gamg->data    = 0;
1402 
1403   /* register AMG type */
1404 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
1405   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1406 #endif
1407 
1408   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1409   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1410   pc->ops->setup          = PCSetUp_GAMG;
1411   pc->ops->reset          = PCReset_GAMG;
1412   pc->ops->destroy        = PCDestroy_GAMG;
1413 
1414   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr);
1415   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr);
1416   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartitioning_C",PCGAMGSetRepartitioning_GAMG);CHKERRQ(ierr);
1417   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseProl_C",PCGAMGSetReuseProl_GAMG);CHKERRQ(ierr);
1418   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseASMAggs_C",PCGAMGSetUseASMAggs_GAMG);CHKERRQ(ierr);
1419   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr);
1420   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr);
1421   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr);
1422   pc_gamg->repart           = PETSC_FALSE;
1423   pc_gamg->reuse_prol       = PETSC_FALSE;
1424   pc_gamg->use_aggs_in_gasm = PETSC_FALSE;
1425   pc_gamg->min_eq_proc      = 50;
1426   pc_gamg->coarse_eq_limit  = 800;
1427   pc_gamg->threshold        = 0.;
1428   pc_gamg->Nlevels          = GAMG_MAXLEVELS;
1429   pc_gamg->verbose          = 0;
1430   pc_gamg->emax_id          = -1;
1431   pc_gamg->eigtarget[0]     = 0.05;
1432   pc_gamg->eigtarget[1]     = 1.05;
1433 
1434   /* private events */
1435 #if defined PETSC_GAMG_USE_LOG
1436   if (count++ == 0) {
1437     ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr);
1438     ierr = PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr);
1439     /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1440     /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1441     /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1442     ierr = PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr);
1443     ierr = PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr);
1444     ierr = PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr);
1445     ierr = PetscLogEventRegister("    search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr);
1446     ierr = PetscLogEventRegister("  SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr);
1447     ierr = PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr);
1448     ierr = PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr);
1449     ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr);
1450     ierr = PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr);
1451     ierr = PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr);
1452     ierr = PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr);
1453     ierr = PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr);
1454 
1455     /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1456     /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1457     /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1458     /* create timer stages */
1459 #if defined GAMG_STAGES
1460     {
1461       char     str[32];
1462       PetscInt lidx;
1463       sprintf(str,"MG Level %d (finest)",0);
1464       ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr);
1465       for (lidx=1; lidx<9; lidx++) {
1466         sprintf(str,"MG Level %d",lidx);
1467         ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr);
1468       }
1469     }
1470 #endif
1471   }
1472 #endif
1473 
1474   PetscFunctionReturn(0);
1475 }
1476 
1477 #undef __FUNCT__
1478 #define __FUNCT__ "PCGAMGInitializePackage"
1479 /*@C
1480  PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1481  from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG()
1482  when using static libraries.
1483 
1484  Level: developer
1485 
1486  .keywords: PC, PCGAMG, initialize, package
1487  .seealso: PetscInitialize()
1488 @*/
1489 PetscErrorCode PCGAMGInitializePackage(void)
1490 {
1491   PetscErrorCode ierr;
1492 
1493   PetscFunctionBegin;
1494   if (PCGAMGPackageInitialized) PetscFunctionReturn(0);
1495   PCGAMGPackageInitialized = PETSC_TRUE;
1496   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr);
1497   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr);
1498   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr);
1499   ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr);
1500 
1501   /* general events */
1502 #if defined PETSC_USE_LOG
1503   ierr = PetscLogEventRegister("PCGAMGgraph_AGG", 0, &PC_GAMGGgraph_AGG);CHKERRQ(ierr);
1504   ierr = PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO);CHKERRQ(ierr);
1505   ierr = PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr);
1506   ierr = PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr);
1507   ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr);
1508   ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr);
1509   ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG);CHKERRQ(ierr);
1510 #endif
1511 
1512   PetscFunctionReturn(0);
1513 }
1514 
1515 #undef __FUNCT__
1516 #define __FUNCT__ "PCGAMGFinalizePackage"
1517 /*@C
1518  PCGAMGFinalizePackage - This function destroys everything in the PCGAMG package. It is
1519  called from PetscFinalize().
1520 
1521  Level: developer
1522 
1523  .keywords: Petsc, destroy, package
1524  .seealso: PetscFinalize()
1525 @*/
1526 PetscErrorCode PCGAMGFinalizePackage(void)
1527 {
1528   PetscErrorCode ierr;
1529 
1530   PetscFunctionBegin;
1531   PCGAMGPackageInitialized = PETSC_FALSE;
1532   ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr);
1533   PetscFunctionReturn(0);
1534 }
1535