xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision 5f042095cd6794de959e41580753d6ff4dec299c)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 #include "petsc-private/matimpl.h"
5 #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
6 #include <petsc-private/kspimpl.h>
7 
8 #if defined PETSC_GAMG_USE_LOG
9 PetscLogEvent petsc_gamg_setup_events[NUM_SET];
10 #endif
11 
12 #if defined PETSC_USE_LOG
13 PetscLogEvent PC_GAMGGgraph_AGG;
14 PetscLogEvent PC_GAMGGgraph_GEO;
15 PetscLogEvent PC_GAMGCoarsen_AGG;
16 PetscLogEvent PC_GAMGCoarsen_GEO;
17 PetscLogEvent PC_GAMGProlongator_AGG;
18 PetscLogEvent PC_GAMGProlongator_GEO;
19 PetscLogEvent PC_GAMGOptprol_AGG;
20 #endif
21 
22 #define GAMG_MAXLEVELS 30
23 
24 /* #define GAMG_STAGES */
25 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
26 static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
27 #endif
28 
29 static PetscFList GAMGList = 0;
30 
31 /* ----------------------------------------------------------------------------- */
32 #undef __FUNCT__
33 #define __FUNCT__ "PCReset_GAMG"
34 PetscErrorCode PCReset_GAMG(PC pc)
35 {
36   PetscErrorCode  ierr;
37   PC_MG           *mg = (PC_MG*)pc->data;
38   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
39 
40   PetscFunctionBegin;
41   if( pc_gamg->data != 0 ) { /* this should not happen, cleaned up in SetUp */
42     ierr = PetscFree(pc_gamg->data); CHKERRQ(ierr);
43   }
44   pc_gamg->data = 0; pc_gamg->data_sz = 0;
45   PetscFunctionReturn(0);
46 }
47 
48 /* -------------------------------------------------------------------------- */
49 /*
50    createLevel: create coarse op with RAP.  repartition and/or reduce number
51      of active processors.
52 
53    Input Parameter:
54    . pc - parameters
55    . Amat_fine - matrix on this fine (k) level
56    . cbs - coarse block size
57    . isLast -
58    In/Output Parameter:
59    . a_P_inout - prolongation operator to the next level (k-1)
60    . a_nactive_proc - number of active procs
61    Output Parameter:
62    . a_Amat_crs - coarse matrix that is created (k-1)
63 */
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "createLevel"
67 static PetscErrorCode createLevel( const PC pc,
68                             const Mat Amat_fine,
69                             const PetscInt cbs,
70                             const PetscBool isLast,
71                             Mat *a_P_inout,
72                             PetscMPIInt *a_nactive_proc,
73                             Mat *a_Amat_crs
74                             )
75 {
76   PC_MG           *mg = (PC_MG*)pc->data;
77   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
78   const PetscBool  repart = pc_gamg->repart;
79   const PetscInt   min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit;
80   PetscErrorCode   ierr;
81   Mat              Cmat,Pnew,Pold=*a_P_inout;
82   IS               new_indices,isnum;
83   MPI_Comm         wcomm = ((PetscObject)Amat_fine)->comm;
84   PetscMPIInt      mype,npe,new_npe,nactive = *a_nactive_proc;
85   PetscInt         neq,NN,Istart,Iend,Istart0,Iend0,ncrs_new,ncrs0,rfactor;
86 
87   PetscFunctionBegin;
88   ierr = MPI_Comm_rank( wcomm, &mype ); CHKERRQ(ierr);
89   ierr = MPI_Comm_size( wcomm, &npe );  CHKERRQ(ierr);
90 
91   /* RAP */
92 #ifdef USE_R
93   /* make R wih brute force for now */
94   ierr = MatTranspose( Pold, Pnew );
95   ierr = MatDestroy( &Pold );  CHKERRQ(ierr);
96   ierr = MatRARt( Amat_fine, Pnew, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr);
97   Pold = Pnew;
98 #else
99   ierr = MatPtAP( Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr);
100 #endif
101   ierr = MatSetBlockSize( Cmat, cbs );      CHKERRQ(ierr);
102   ierr = MatGetOwnershipRange( Cmat, &Istart0, &Iend0 ); CHKERRQ(ierr);
103   ncrs0 = (Iend0-Istart0)/cbs; assert((Iend0-Istart0)%cbs == 0);
104 
105   /* get number of PEs to make active, reduce */
106   ierr = MatGetSize( Cmat, &neq, &NN );  CHKERRQ(ierr);
107   new_npe = (PetscMPIInt)((float)neq/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
108   if( new_npe == 0 || neq < coarse_max ) new_npe = 1;
109   else if (new_npe >= nactive ) new_npe = nactive; /* no change, rare */
110   if( isLast ) new_npe = 1;
111 
112   if( !repart && new_npe==nactive ) {
113     *a_Amat_crs = Cmat; /* output - no repartitioning or reduction */
114   }
115   else {
116     /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates accordingly */
117     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,data_sz=ndata_rows*ndata_cols;
118     const PetscInt  stride0=ncrs0*pc_gamg->data_cell_rows;
119     PetscInt        *counts,is_sz,*newproc_idx,ii,jj,kk,strideNew,*tidx,inpe,targetPE;
120     IS              isnewproc;
121     VecScatter      vecscat;
122     PetscScalar    *array;
123     Vec             src_crd, dest_crd;
124     IS              isscat;
125 
126     ierr = PetscMalloc( npe*sizeof(PetscInt), &counts ); CHKERRQ(ierr);
127 #if defined PETSC_GAMG_USE_LOG
128       ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
129 #endif
130     if( repart ) {
131       /* create sub communicator  */
132       Mat             adj;
133 
134       if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s repartition: npe (active): %d --> %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,neq);
135 
136       /* MatPartitioningApply call MatConvert, which is collective */
137       if( cbs == 1 ) {
138 	ierr = MatConvert( Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );   CHKERRQ(ierr);
139       }
140       else{
141 	/* make a scalar matrix to partition */
142 	Mat tMat;
143 	PetscInt ncols,jj,Ii;
144 	const PetscScalar *vals;
145 	const PetscInt *idx;
146 	PetscInt *d_nnz, *o_nnz;
147 	static PetscInt llev = 0;
148 
149 	ierr = PetscMalloc( ncrs0*sizeof(PetscInt), &d_nnz ); CHKERRQ(ierr);
150 	ierr = PetscMalloc( ncrs0*sizeof(PetscInt), &o_nnz ); CHKERRQ(ierr);
151 	for ( Ii = Istart0, jj = 0 ; Ii < Iend0 ; Ii += cbs, jj++ ) {
152 	  ierr = MatGetRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr);
153 	  d_nnz[jj] = ncols/cbs;
154 	  o_nnz[jj] = ncols/cbs;
155 	  ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr);
156 	  if( d_nnz[jj] > ncrs0 ) d_nnz[jj] = ncrs0;
157 	  if( o_nnz[jj] > (neq/cbs-ncrs0) ) o_nnz[jj] = neq/cbs-ncrs0;
158 	}
159 
160 	ierr = MatCreateAIJ( wcomm, ncrs0, ncrs0,
161                              PETSC_DETERMINE, PETSC_DETERMINE,
162                              0, d_nnz, 0, o_nnz,
163                              &tMat );
164 	CHKERRQ(ierr);
165 	ierr = PetscFree( d_nnz ); CHKERRQ(ierr);
166 	ierr = PetscFree( o_nnz ); CHKERRQ(ierr);
167 
168 	for ( ii = Istart0; ii < Iend0; ii++ ) {
169 	  PetscInt dest_row = ii/cbs;
170 	  ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr);
171 	  for( jj = 0 ; jj < ncols ; jj++ ){
172 	    PetscInt dest_col = idx[jj]/cbs;
173 	    PetscScalar v = 1.0;
174 	    ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES); CHKERRQ(ierr);
175 	  }
176 	  ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr);
177 	}
178 	ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
179 	ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
180 
181 	if( llev++ == -1 ) {
182 	  PetscViewer viewer; char fname[32];
183 	  ierr = PetscSNPrintf(fname,sizeof fname,"part_mat_%D.mat",llev);CHKERRQ(ierr);
184 	  PetscViewerBinaryOpen(wcomm,fname,FILE_MODE_WRITE,&viewer);
185 	  ierr = MatView( tMat, viewer ); CHKERRQ(ierr);
186 	  ierr = PetscViewerDestroy( &viewer );
187 	}
188 
189 	ierr = MatConvert( tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );   CHKERRQ(ierr);
190 
191 	ierr = MatDestroy( &tMat );  CHKERRQ(ierr);
192       }
193 
194       { /* partition: get newproc_idx, set is_sz */
195 	char prefix[256];
196 	const char *pcpre;
197 	const PetscInt *is_idx;
198 	MatPartitioning  mpart;
199 	IS proc_is;
200 
201 	ierr = MatPartitioningCreate( wcomm, &mpart ); CHKERRQ(ierr);
202 	ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr);
203 	ierr = PCGetOptionsPrefix( pc, &pcpre );CHKERRQ(ierr);
204 	ierr = PetscSNPrintf(prefix,sizeof prefix,"%spc_gamg_",pcpre?pcpre:"");CHKERRQ(ierr);
205 	ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
206 	ierr = MatPartitioningSetFromOptions( mpart );    CHKERRQ(ierr);
207 	ierr = MatPartitioningSetNParts( mpart, new_npe );CHKERRQ(ierr);
208 	ierr = MatPartitioningApply( mpart, &proc_is ); CHKERRQ(ierr);
209 	ierr = MatPartitioningDestroy( &mpart );          CHKERRQ(ierr);
210 
211 	/* collect IS info */
212 	ierr = ISGetLocalSize( proc_is, &is_sz );       CHKERRQ(ierr);
213 	ierr = PetscMalloc( cbs*is_sz*sizeof(PetscInt), &newproc_idx ); CHKERRQ(ierr);
214 	ierr = ISGetIndices( proc_is, &is_idx );        CHKERRQ(ierr);
215 	NN = 1; /* bring to "front" of machine */
216 	/*NN = npe/new_npe;*/ /* spread partitioning across machine */
217 	for( kk = jj = 0 ; kk < is_sz ; kk++ ){
218 	  for( ii = 0 ; ii < cbs ; ii++, jj++ ){
219 	    newproc_idx[jj] = is_idx[kk] * NN; /* distribution */
220 	  }
221 	}
222 	ierr = ISRestoreIndices( proc_is, &is_idx );     CHKERRQ(ierr);
223 	ierr = ISDestroy( &proc_is );                    CHKERRQ(ierr);
224 
225 	is_sz *= cbs;
226       }
227       ierr = MatDestroy( &adj );                       CHKERRQ(ierr);
228 
229       ierr = ISCreateGeneral( wcomm, is_sz, newproc_idx, PETSC_COPY_VALUES, &isnewproc );
230       CHKERRQ(ierr);
231       if( newproc_idx != 0 ) {
232 	ierr = PetscFree( newproc_idx );  CHKERRQ(ierr);
233       }
234     }
235     else { /* simple aggreagtion of parts */
236       /* find factor */
237       if( new_npe == 1 ) rfactor = npe; /* easy */
238       else {
239 	PetscReal best_fact = 0.;
240 	jj = -1;
241 	for( kk = 1 ; kk <= npe ; kk++ ){
242 	  if( npe%kk==0 ) { /* a candidate */
243 	    PetscReal nactpe = (PetscReal)npe/(PetscReal)kk, fact = nactpe/(PetscReal)new_npe;
244 	    if(fact > 1.0) fact = 1./fact; /* keep fact < 1 */
245 	    if( fact > best_fact ) {
246 	      best_fact = fact; jj = kk;
247 	    }
248 	  }
249 	}
250 	if(jj!=-1) rfactor = jj;
251 	else rfactor = 1; /* prime? */
252       }
253       new_npe = npe/rfactor;
254 
255       if( new_npe==nactive ) {
256 	*a_Amat_crs = Cmat; /* output - no repartitioning or reduction */
257 	ierr = PetscFree( counts );  CHKERRQ(ierr);
258 	*a_nactive_proc = new_npe; /* output */
259 	if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s aggregate processors noop: new_npe=%d, neq=%d\n",mype,__FUNCT__,new_npe,neq);
260 	PetscFunctionReturn(0);
261       }
262 
263       /* reduce - isnewproc */
264       targetPE = mype/rfactor;
265 
266       if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s aggregate processors: npe: %d --> %d, neq=%d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,neq);
267       is_sz = ncrs0*cbs;
268       ierr = ISCreateStride( wcomm, is_sz, targetPE, 0, &isnewproc );
269       CHKERRQ(ierr);
270     }
271 
272     /*
273       Create an index set from the isnewproc index set to indicate the mapping TO
274     */
275     ierr = ISPartitioningToNumbering( isnewproc, &isnum ); CHKERRQ(ierr);
276     /*
277       Determine how many elements are assigned to each processor
278     */
279     inpe = npe;
280     ierr = ISPartitioningCount( isnewproc, inpe, counts ); CHKERRQ(ierr);
281     ierr = ISDestroy( &isnewproc );                       CHKERRQ(ierr);
282     ncrs_new = counts[mype]/cbs;
283     strideNew = ncrs_new*ndata_rows;
284 #if defined PETSC_GAMG_USE_LOG
285       ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);   CHKERRQ(ierr);
286 #endif
287     /* Create a vector to contain the newly ordered element information */
288     ierr = VecCreate( wcomm, &dest_crd );
289     ierr = VecSetSizes( dest_crd, data_sz*ncrs_new, PETSC_DECIDE ); CHKERRQ(ierr);
290     ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /* this is needed! */
291     /*
292      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
293      a block size of ...).  Note, ISs are expanded into equation space by 'cbs'.
294      */
295     ierr = PetscMalloc( (ncrs0*data_sz)*sizeof(PetscInt), &tidx ); CHKERRQ(ierr);
296     ierr = ISGetIndices( isnum, &idx ); CHKERRQ(ierr);
297     for(ii=0,jj=0; ii<ncrs0 ; ii++) {
298       PetscInt id = idx[ii*cbs]/cbs; /* get node back */
299       for( kk=0; kk<data_sz ; kk++, jj++) tidx[jj] = id*data_sz + kk;
300     }
301     ierr = ISRestoreIndices( isnum, &idx ); CHKERRQ(ierr);
302     ierr = ISCreateGeneral( wcomm, data_sz*ncrs0, tidx, PETSC_COPY_VALUES, &isscat );
303     CHKERRQ(ierr);
304     ierr = PetscFree( tidx );  CHKERRQ(ierr);
305     /*
306      Create a vector to contain the original vertex information for each element
307      */
308     ierr = VecCreateSeq( PETSC_COMM_SELF, data_sz*ncrs0, &src_crd ); CHKERRQ(ierr);
309     for( jj=0; jj<ndata_cols ; jj++ ) {
310       for( ii=0 ; ii<ncrs0 ; ii++) {
311 	for( kk=0; kk<ndata_rows ; kk++ ) {
312 	  PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*data_sz + kk*ndata_cols + jj;
313           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
314 	  ierr = VecSetValues( src_crd, 1, &jx, &tt, INSERT_VALUES );  CHKERRQ(ierr);
315 	}
316       }
317     }
318     ierr = VecAssemblyBegin(src_crd); CHKERRQ(ierr);
319     ierr = VecAssemblyEnd(src_crd); CHKERRQ(ierr);
320     /*
321       Scatter the element vertex information (still in the original vertex ordering)
322       to the correct processor
323     */
324     ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat);
325     CHKERRQ(ierr);
326     ierr = ISDestroy( &isscat );       CHKERRQ(ierr);
327     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
328     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
329     ierr = VecScatterDestroy( &vecscat );       CHKERRQ(ierr);
330     ierr = VecDestroy( &src_crd );       CHKERRQ(ierr);
331     /*
332       Put the element vertex data into a new allocation of the gdata->ele
333     */
334     ierr = PetscFree( pc_gamg->data );    CHKERRQ(ierr);
335     ierr = PetscMalloc( data_sz*ncrs_new*sizeof(PetscReal), &pc_gamg->data );    CHKERRQ(ierr);
336     pc_gamg->data_sz = data_sz*ncrs_new;
337     ierr = VecGetArray( dest_crd, &array );    CHKERRQ(ierr);
338     for( jj=0; jj<ndata_cols ; jj++ ) {
339       for( ii=0 ; ii<ncrs_new ; ii++) {
340 	for( kk=0; kk<ndata_rows ; kk++ ) {
341 	  PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*data_sz + kk*ndata_cols + jj;
342 	  pc_gamg->data[ix] = PetscRealPart(array[jx]);
343 	  array[jx] = 1.e300;
344 	}
345       }
346     }
347     ierr = VecRestoreArray( dest_crd, &array );    CHKERRQ(ierr);
348     ierr = VecDestroy( &dest_crd );    CHKERRQ(ierr);
349 #if defined PETSC_GAMG_USE_LOG
350     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
351 #endif
352     /*
353       Invert for MatGetSubMatrix
354     */
355     ierr = ISInvertPermutation( isnum, ncrs_new*cbs, &new_indices ); CHKERRQ(ierr);
356     ierr = ISSort( new_indices ); CHKERRQ(ierr); /* is this needed? */
357     ierr = ISDestroy( &isnum ); CHKERRQ(ierr);
358 #if defined PETSC_GAMG_USE_LOG
359     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
360     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
361 #endif
362     /* A_crs output */
363     ierr = MatGetSubMatrix( Cmat, new_indices, new_indices, MAT_INITIAL_MATRIX, a_Amat_crs );
364     CHKERRQ(ierr);
365 
366     ierr = MatDestroy( &Cmat ); CHKERRQ(ierr);
367     Cmat = *a_Amat_crs; /* output */
368     ierr = MatSetBlockSize( Cmat, cbs );      CHKERRQ(ierr);
369 #if defined PETSC_GAMG_USE_LOG
370     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
371 #endif
372     /* prolongator */
373     ierr = MatGetOwnershipRange( Pold, &Istart, &Iend );    CHKERRQ(ierr);
374     {
375       IS findices;
376 #if defined PETSC_GAMG_USE_LOG
377       ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
378 #endif
379       ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices);   CHKERRQ(ierr);
380 #ifdef USE_R
381       ierr = MatGetSubMatrix( Pold, new_indices, findices, MAT_INITIAL_MATRIX, &Pnew );
382 #else
383       ierr = MatGetSubMatrix( Pold, findices, new_indices, MAT_INITIAL_MATRIX, &Pnew );
384 #endif
385       CHKERRQ(ierr);
386       ierr = ISDestroy( &findices ); CHKERRQ(ierr);
387 #if defined PETSC_GAMG_USE_LOG
388       ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
389 #endif
390     }
391     ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr);
392     *a_P_inout = Pnew; /* output - repartitioned */
393     ierr = ISDestroy( &new_indices ); CHKERRQ(ierr);
394     ierr = PetscFree( counts );  CHKERRQ(ierr);
395   }
396 
397   *a_nactive_proc = new_npe; /* output */
398 
399   if( !PETSC_TRUE ) {
400     PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs;
401     if(llev==0) {
402       sprintf(fname,"Cmat_%d.m",llev++);
403       PetscViewerASCIIOpen(wcomm,fname,&viewer);
404       ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
405       ierr = MatView(Amat_fine, viewer ); CHKERRQ(ierr);
406       ierr = PetscViewerDestroy( &viewer );
407     }
408     sprintf(fname,"Cmat_%d.m",llev++);
409     PetscViewerASCIIOpen(wcomm,fname,&viewer);
410     ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
411     ierr = MatView(Cmat, viewer ); CHKERRQ(ierr);
412     ierr = PetscViewerDestroy( &viewer );
413   }
414 
415   PetscFunctionReturn(0);
416 }
417 
418 /* -------------------------------------------------------------------------- */
419 /*
420    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
421                     by setting data structures and options.
422 
423    Input Parameter:
424 .  pc - the preconditioner context
425 
426    Application Interface Routine: PCSetUp()
427 
428    Notes:
429    The interface routine PCSetUp() is not usually called directly by
430    the user, but instead is called by PCApply() if necessary.
431 */
432 #undef __FUNCT__
433 #define __FUNCT__ "PCSetUp_GAMG"
434 PetscErrorCode PCSetUp_GAMG( PC pc )
435 {
436   PetscErrorCode  ierr;
437   PC_MG           *mg = (PC_MG*)pc->data;
438   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
439   Mat              Pmat = pc->pmat;
440   PetscInt         fine_level,level,level1,M,N,bs,nloc,lidx,Istart,Iend;
441   MPI_Comm         wcomm = ((PetscObject)pc)->comm;
442   PetscMPIInt      mype,npe,nactivepe;
443   Mat              Aarr[GAMG_MAXLEVELS], Parr[GAMG_MAXLEVELS];
444   PetscReal        emaxs[GAMG_MAXLEVELS];
445   PetscLogDouble   nnz0=0,nnztot=0;
446   MatInfo          info;
447 
448   PetscFunctionBegin;
449   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
450   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
451   if( pc_gamg->setup_count++ > 0 ) {
452     PC_MG_Levels   **mglevels = mg->levels;
453     /* just do Galerkin grids */
454     Mat B,dA,dB;
455     assert(pc->setupcalled);
456 
457     if( pc_gamg->Nlevels > 1 ) {
458       /* currently only handle case where mat and pmat are the same on coarser levels */
459       ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB,PETSC_NULL);CHKERRQ(ierr);
460       /* (re)set to get dirty flag */
461       ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
462 
463       for (level=pc_gamg->Nlevels-2; level>-1; level--) {
464         /* the first time through the matrix structure has changed from repartitioning */
465         if( pc_gamg->setup_count==2 /*&& (pc_gamg->repart || level==0)*/) {
466           ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr);
467           ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr);
468           mglevels[level]->A = B;
469         }
470         else {
471           ierr = KSPGetOperators(mglevels[level]->smoothd,PETSC_NULL,&B,PETSC_NULL);CHKERRQ(ierr);
472           ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr);
473         }
474         ierr = KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
475         dB = B;
476         }
477     }
478 
479     ierr = PCSetUp_MG( pc );CHKERRQ( ierr );
480 
481     /* PCSetUp_MG seems to insists on setting this to GMRES */
482     ierr = KSPSetType( mglevels[0]->smoothd, KSPPREONLY ); CHKERRQ(ierr);
483 
484     PetscFunctionReturn(0);
485   }
486   assert(pc->setupcalled == 0);
487 
488   /* GAMG requires input of fine-grid matrix. It determines nlevels. */
489   ierr = MatGetBlockSize( Pmat, &bs ); CHKERRQ(ierr);
490   ierr = MatGetSize( Pmat, &M, &N );CHKERRQ(ierr);
491   ierr = MatGetOwnershipRange( Pmat, &Istart, &Iend ); CHKERRQ(ierr);
492   nloc = (Iend-Istart)/bs; assert((Iend-Istart)%bs == 0);
493 
494   if( pc_gamg->data == 0 && nloc > 0 ) {
495     if(!pc_gamg->createdefaultdata){
496       SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_LIB,"'createdefaultdata' not set?!?! need to support NULL data!!!");
497     }
498     ierr = pc_gamg->createdefaultdata( pc ); CHKERRQ(ierr);
499   }
500 
501   /* Get A_i and R_i */
502   if (pc_gamg->verbose) {
503     if(pc_gamg->verbose==1) ierr =  MatGetInfo(Pmat,MAT_LOCAL,&info);
504     else ierr =  MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);
505     CHKERRQ(ierr);
506     nnz0 = info.nz_used;
507     nnztot = info.nz_used;
508     PetscPrintf(wcomm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",
509                 mype,__FUNCT__,0,N,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,
510                 (int)(nnz0/(PetscReal)N),npe);
511   }
512 
513   for ( level=0, Aarr[0] = Pmat, nactivepe = npe; /* hard wired stopping logic */
514         level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); /* && (npe==1 || nactivepe>1); */
515         level++ ){
516     level1 = level + 1;
517 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
518     ierr = PetscLogStagePush(gamg_stages[level]); CHKERRQ( ierr );
519 #endif
520 #if defined PETSC_GAMG_USE_LOG
521     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0); CHKERRQ(ierr);
522 #endif
523     { /* construct prolongator */
524       Mat Gmat;
525       PetscCoarsenData *agg_lists;
526 
527       ierr = pc_gamg->graph( pc, Aarr[level], &Gmat ); CHKERRQ(ierr);
528       ierr = pc_gamg->coarsen( pc, &Gmat, &agg_lists ); CHKERRQ(ierr);
529       ierr = pc_gamg->prolongator( pc, Aarr[level], Gmat, agg_lists, &Parr[level1] );
530       CHKERRQ(ierr);
531 
532       if( Parr[level1] ){
533         /* get new block size of coarse matrices */
534         if( pc_gamg->col_bs_id != -1 ){
535           PetscBool flag;
536           ierr = PetscObjectComposedDataGetInt( (PetscObject)Parr[level1], pc_gamg->col_bs_id, bs, flag );
537           CHKERRQ( ierr ); assert(flag);
538         }
539       }
540 
541       if( pc_gamg->optprol && Parr[level1] ){
542         /* smooth */
543         ierr = pc_gamg->optprol( pc, Aarr[level], &Parr[level1] ); CHKERRQ(ierr);
544       }
545 
546       ierr = MatDestroy( &Gmat );      CHKERRQ(ierr);
547       ierr = PetscCDDestroy( agg_lists );  CHKERRQ(ierr);
548     }
549 #if defined PETSC_GAMG_USE_LOG
550     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
551 #endif
552     /* cache eigen estimate */
553     if( pc_gamg->emax_id != -1 ){
554       PetscBool flag;
555       ierr = PetscObjectComposedDataGetReal( (PetscObject)Aarr[level], pc_gamg->emax_id, emaxs[level], flag );
556       CHKERRQ( ierr );
557       if( !flag ) emaxs[level] = -1.;
558     }
559     else emaxs[level] = -1.;
560     if(level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */
561     if( !Parr[level1] ) {
562       if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s stop gridding, level %d\n",mype,__FUNCT__,level);
563       break;
564     }
565 #if defined PETSC_GAMG_USE_LOG
566     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
567 #endif
568     ierr = createLevel( pc, Aarr[level], bs,
569                         (PetscBool)(level==pc_gamg->Nlevels-2),
570                         &Parr[level1], &nactivepe, &Aarr[level1] );
571     CHKERRQ(ierr);
572 #if defined PETSC_GAMG_USE_LOG
573     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
574 #endif
575     ierr = MatGetSize( Aarr[level1], &M, &N );CHKERRQ(ierr);
576     if (pc_gamg->verbose){
577       PetscInt NN = M;
578       if(pc_gamg->verbose==1) {
579         ierr = MatGetInfo(Aarr[level1],MAT_LOCAL,&info);
580         ierr = MatGetLocalSize( Aarr[level1], &NN, &N );CHKERRQ(ierr);
581       }
582       else ierr = MatGetInfo(Aarr[level1],MAT_GLOBAL_SUM,&info);
583       CHKERRQ(ierr);
584       nnztot += info.nz_used;
585       PetscPrintf(wcomm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",
586                   mype,__FUNCT__,(int)level1,N,pc_gamg->data_cell_cols,
587                   (int)(info.nz_used/(PetscReal)NN), nactivepe );
588       CHKERRQ(ierr);
589     }
590     /* stop if one node */
591     if( M/pc_gamg->data_cell_cols < 2 ) {
592       level++;
593       break;
594     }
595     /* Vec diag; PetscScalar *data_arr,v; PetscInt Istart,Iend,kk,nloceq,id; */
596     /* v = 1.e-10; /\* LU factor has hard wired numbers for small diags so this needs to match (yuk) *\/ */
597     /* ierr = MatGetOwnershipRange(Aarr[level1], &Istart, &Iend); CHKERRQ(ierr); */
598     /* nloceq = Iend-Istart; */
599     /* ierr = MatGetVecs( Aarr[level1], &diag, 0 );    CHKERRQ(ierr); */
600     /* ierr = MatGetDiagonal( Aarr[level1], diag );    CHKERRQ(ierr); */
601     /* ierr = VecGetArray( diag, &data_arr );   CHKERRQ(ierr); */
602     /* for(kk=0;kk<nloceq;kk++){ */
603     /*   if(data_arr[kk]==0.0) { */
604     /*     id = kk + Istart; */
605     /*     ierr = MatSetValues(Aarr[level1],1,&id,1,&id,&v,INSERT_VALUES); */
606     /*     CHKERRQ(ierr); */
607     /*     PetscPrintf(PETSC_COMM_SELF,"\t[%d]%s warning: added zero to diag (%d) on level %d \n",mype,__FUNCT__,id,level1); */
608     /*   } */
609     /* } */
610     /* ierr = VecRestoreArray( diag, &data_arr ); CHKERRQ(ierr); */
611     /* ierr = VecDestroy( &diag );                CHKERRQ(ierr); */
612     /* ierr = MatAssemblyBegin(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); */
613     /* ierr = MatAssemblyEnd(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); */
614 
615 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
616     ierr = PetscLogStagePop(); CHKERRQ( ierr );
617 #endif
618   } /* levels */
619 
620   if( pc_gamg->data ) {
621     ierr = PetscFree( pc_gamg->data ); CHKERRQ( ierr );
622     pc_gamg->data = 0;
623   }
624 
625   if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0);
626   pc_gamg->Nlevels = level + 1;
627   fine_level = level;
628   ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,PETSC_NULL);CHKERRQ(ierr);
629 
630   /* simple setup */
631   if( !PETSC_TRUE ){
632     PC_MG_Levels **mglevels = mg->levels;
633     for (lidx=0,level=pc_gamg->Nlevels-1;
634          lidx<fine_level;
635          lidx++, level--){
636       ierr = PCMGSetInterpolation( pc, lidx+1, Parr[level] );CHKERRQ(ierr);
637       ierr = KSPSetOperators( mglevels[lidx]->smoothd, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN );   CHKERRQ(ierr);
638       ierr = MatDestroy( &Parr[level] );  CHKERRQ(ierr);
639       ierr = MatDestroy( &Aarr[level] );  CHKERRQ(ierr);
640     }
641     ierr = KSPSetOperators( mglevels[fine_level]->smoothd, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN );   CHKERRQ(ierr);
642 
643     ierr = PCSetUp_MG( pc );  CHKERRQ( ierr );
644   }
645   else if( pc_gamg->Nlevels > 1 ) { /* don't setup MG if one level */
646     /* set default smoothers & set operators */
647     for ( lidx = 1, level = pc_gamg->Nlevels-2;
648           lidx <= fine_level;
649           lidx++, level--) {
650       KSP smoother; PC subpc;
651       /* set defaults */
652       ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr);
653       ierr = KSPSetType( smoother, KSPCHEBYCHEV );CHKERRQ(ierr);
654       ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr);
655       ierr = PCSetType( subpc, PCJACOBI ); CHKERRQ(ierr);
656       ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
657       /* set ops */
658       ierr = KSPSetOperators( smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN );   CHKERRQ(ierr);
659       ierr = PCMGSetInterpolation( pc, lidx, Parr[level+1] );CHKERRQ(ierr);
660     }
661     {
662       /* coarse grid */
663       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
664       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
665       ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr);
666       ierr = KSPSetOperators( smoother, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ(ierr);
667       ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
668       ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr);
669       ierr = PCSetType( subpc, PCBJACOBI ); CHKERRQ(ierr);
670       ierr = PCSetUp( subpc ); CHKERRQ(ierr);
671       ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr);      assert(ii==1);
672       ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr);
673       ierr = PCSetType( pc2, PCLU ); CHKERRQ(ierr);
674     }
675 
676     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
677     ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr);
678     ierr = PCSetFromOptions_MG(pc); CHKERRQ(ierr);
679     ierr = PetscOptionsEnd();CHKERRQ(ierr);
680     if (mg->galerkin != 2) SETERRQ(wcomm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used.");
681 
682     /* create cheby smoothers */
683     for ( lidx = 1, level = pc_gamg->Nlevels-2;
684           lidx <= fine_level;
685           lidx++, level--) {
686       KSP smoother;
687       PetscBool isCheb;
688 
689       ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr);
690       /* do my own cheby */
691       ierr = PetscTypeCompare( (PetscObject)smoother, KSPCHEBYCHEV, &isCheb ); CHKERRQ(ierr);
692       if( isCheb ) {
693         PetscReal emax, emin;
694         PC subpc;
695 
696         ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr);
697         ierr = PetscTypeCompare( (PetscObject)subpc, PCJACOBI, &isCheb ); CHKERRQ(ierr);
698         if( isCheb && emaxs[level] > 0.0 ) emax=emaxs[level]; /* eigen estimate only for diagnal PC */
699         else{ /* eigen estimate 'emax' */
700           KSP eksp; Mat Lmat = Aarr[level];
701           Vec bb, xx;
702 
703           ierr = MatGetVecs( Lmat, &bb, 0 );         CHKERRQ(ierr);
704           ierr = MatGetVecs( Lmat, &xx, 0 );         CHKERRQ(ierr);
705           {
706             PetscRandom    rctx;
707             ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr);
708             ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr);
709             ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr);
710             ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr);
711           }
712           ierr = KSPCreate(wcomm,&eksp);CHKERRQ(ierr);
713           ierr = KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10 );
714           CHKERRQ(ierr);
715           ierr = KSPSetNormType( eksp, KSP_NORM_NONE );                 CHKERRQ(ierr);
716 
717           ierr = KSPAppendOptionsPrefix( eksp, "est_");         CHKERRQ(ierr);
718           ierr = KSPSetFromOptions( eksp );    CHKERRQ(ierr);
719 
720           ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr);
721           ierr = KSPSetOperators( eksp, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ( ierr );
722           ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr);
723 
724           { /* set PC type to be same as smoother - does not get all parameters!!! */
725             const PCType type;    PC pc;
726             ierr = PCGetType( subpc, &type );   CHKERRQ(ierr);
727             ierr = KSPGetPC( eksp, &pc );CHKERRQ( ierr );
728             ierr = PCSetType( pc, type ); CHKERRQ(ierr);
729           }
730 
731           /* solve - keep stuff out of logging */
732           ierr = PetscLogEventDeactivate(KSP_Solve);CHKERRQ(ierr);
733           ierr = PetscLogEventDeactivate(PC_Apply);CHKERRQ(ierr);
734           ierr = KSPSolve( eksp, bb, xx ); CHKERRQ(ierr);
735           ierr = PetscLogEventActivate(KSP_Solve);CHKERRQ(ierr);
736           ierr = PetscLogEventActivate(PC_Apply);CHKERRQ(ierr);
737 
738           ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr);
739 
740           ierr = VecDestroy( &xx );       CHKERRQ(ierr);
741           ierr = VecDestroy( &bb );       CHKERRQ(ierr);
742           ierr = KSPDestroy( &eksp );       CHKERRQ(ierr);
743 
744           if (pc_gamg->verbose) {
745             PetscPrintf(wcomm,"\t\t\t%s PC setup max eigen=%e min=%e\n",__FUNCT__,emax,emin);
746           }
747         }
748         {
749           PetscInt N1, N0, tt;
750           ierr = MatGetSize( Aarr[level], &N1, &tt );         CHKERRQ(ierr);
751           ierr = MatGetSize( Aarr[level+1], &N0, &tt );       CHKERRQ(ierr);
752           /* heuristic - is this crap? */
753           emin = 1.*emax/((PetscReal)N1/(PetscReal)N0);
754           emax *= 1.05;
755         }
756         ierr = KSPChebychevSetEigenvalues( smoother, emax, emin );CHKERRQ(ierr);
757       }
758     }
759 
760     /* clean up */
761     for(level=1;level<pc_gamg->Nlevels;level++){
762       ierr = MatDestroy( &Parr[level] );  CHKERRQ(ierr);
763       ierr = MatDestroy( &Aarr[level] );  CHKERRQ(ierr);
764     }
765 
766     ierr = PCSetUp_MG( pc );CHKERRQ( ierr );
767 
768     {
769       KSP smoother;  /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */
770       ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr);
771       ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr);
772     }
773   }
774   else {
775     KSP smoother;
776     if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",mype,__FUNCT__);
777     ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr);
778     ierr = KSPSetOperators( smoother, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN );   CHKERRQ(ierr);
779     ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr);
780     ierr = PCSetUp_MG( pc );CHKERRQ( ierr );
781   }
782 
783   PetscFunctionReturn(0);
784 }
785 
786 /* ------------------------------------------------------------------------- */
787 /*
788  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
789    that was created with PCCreate_GAMG().
790 
791    Input Parameter:
792 .  pc - the preconditioner context
793 
794    Application Interface Routine: PCDestroy()
795 */
796 #undef __FUNCT__
797 #define __FUNCT__ "PCDestroy_GAMG"
798 PetscErrorCode PCDestroy_GAMG( PC pc )
799 {
800   PetscErrorCode  ierr;
801   PC_MG           *mg = (PC_MG*)pc->data;
802   PC_GAMG         *pc_gamg= (PC_GAMG*)mg->innerctx;
803 
804   PetscFunctionBegin;
805   ierr = PCReset_GAMG( pc );CHKERRQ(ierr);
806   ierr = PetscFree( pc_gamg );CHKERRQ(ierr);
807   ierr = PCDestroy_MG( pc );CHKERRQ(ierr);
808   PetscFunctionReturn(0);
809 }
810 
811 
812 #undef __FUNCT__
813 #define __FUNCT__ "PCGAMGSetProcEqLim"
814 /*@
815    PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via
816    processor reduction.
817 
818    Not Collective on PC
819 
820    Input Parameters:
821 .  pc - the preconditioner context
822 
823 
824    Options Database Key:
825 .  -pc_gamg_process_eq_limit
826 
827    Level: intermediate
828 
829    Concepts: Unstructured multrigrid preconditioner
830 
831 .seealso: ()
832 @*/
833 PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
834 {
835   PetscErrorCode ierr;
836 
837   PetscFunctionBegin;
838   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
839   ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
840   PetscFunctionReturn(0);
841 }
842 
843 EXTERN_C_BEGIN
844 #undef __FUNCT__
845 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG"
846 PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
847 {
848   PC_MG           *mg = (PC_MG*)pc->data;
849   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
850 
851   PetscFunctionBegin;
852   if(n>0) pc_gamg->min_eq_proc = n;
853   PetscFunctionReturn(0);
854 }
855 EXTERN_C_END
856 
857 #undef __FUNCT__
858 #define __FUNCT__ "PCGAMGSetCoarseEqLim"
859 /*@
860    PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids.
861 
862  Collective on PC
863 
864    Input Parameters:
865 .  pc - the preconditioner context
866 
867 
868    Options Database Key:
869 .  -pc_gamg_coarse_eq_limit
870 
871    Level: intermediate
872 
873    Concepts: Unstructured multrigrid preconditioner
874 
875 .seealso: ()
876  @*/
877 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
878 {
879   PetscErrorCode ierr;
880 
881   PetscFunctionBegin;
882   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
883   ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
884   PetscFunctionReturn(0);
885 }
886 
887 EXTERN_C_BEGIN
888 #undef __FUNCT__
889 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG"
890 PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
891 {
892   PC_MG           *mg = (PC_MG*)pc->data;
893   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
894 
895   PetscFunctionBegin;
896   if(n>0) pc_gamg->coarse_eq_limit = n;
897   PetscFunctionReturn(0);
898 }
899 EXTERN_C_END
900 
901 #undef __FUNCT__
902 #define __FUNCT__ "PCGAMGSetRepartitioning"
903 /*@
904    PCGAMGSetRepartitioning - Repartition the coarse grids
905 
906    Collective on PC
907 
908    Input Parameters:
909 .  pc - the preconditioner context
910 
911 
912    Options Database Key:
913 .  -pc_gamg_repartition
914 
915    Level: intermediate
916 
917    Concepts: Unstructured multrigrid preconditioner
918 
919 .seealso: ()
920 @*/
921 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n)
922 {
923   PetscErrorCode ierr;
924 
925   PetscFunctionBegin;
926   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
927   ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
928   PetscFunctionReturn(0);
929 }
930 
931 EXTERN_C_BEGIN
932 #undef __FUNCT__
933 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG"
934 PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n)
935 {
936   PC_MG           *mg = (PC_MG*)pc->data;
937   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
938 
939   PetscFunctionBegin;
940   pc_gamg->repart = n;
941   PetscFunctionReturn(0);
942 }
943 EXTERN_C_END
944 
945 #undef __FUNCT__
946 #define __FUNCT__ "PCGAMGSetNlevels"
947 /*@
948    PCGAMGSetNlevels -
949 
950    Not collective on PC
951 
952    Input Parameters:
953 .  pc - the preconditioner context
954 
955 
956    Options Database Key:
957 .  -pc_mg_levels
958 
959    Level: intermediate
960 
961    Concepts: Unstructured multrigrid preconditioner
962 
963 .seealso: ()
964 @*/
965 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
966 {
967   PetscErrorCode ierr;
968 
969   PetscFunctionBegin;
970   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
971   ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
972   PetscFunctionReturn(0);
973 }
974 
975 EXTERN_C_BEGIN
976 #undef __FUNCT__
977 #define __FUNCT__ "PCGAMGSetNlevels_GAMG"
978 PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
979 {
980   PC_MG           *mg = (PC_MG*)pc->data;
981   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
982 
983   PetscFunctionBegin;
984   pc_gamg->Nlevels = n;
985   PetscFunctionReturn(0);
986 }
987 EXTERN_C_END
988 
989 #undef __FUNCT__
990 #define __FUNCT__ "PCGAMGSetThreshold"
991 /*@
992    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
993 
994    Not collective on PC
995 
996    Input Parameters:
997 .  pc - the preconditioner context
998 
999 
1000    Options Database Key:
1001 .  -pc_gamg_threshold
1002 
1003    Level: intermediate
1004 
1005    Concepts: Unstructured multrigrid preconditioner
1006 
1007 .seealso: ()
1008 @*/
1009 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1010 {
1011   PetscErrorCode ierr;
1012 
1013   PetscFunctionBegin;
1014   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1015   ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr);
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 EXTERN_C_BEGIN
1020 #undef __FUNCT__
1021 #define __FUNCT__ "PCGAMGSetThreshold_GAMG"
1022 PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1023 {
1024   PC_MG           *mg = (PC_MG*)pc->data;
1025   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
1026 
1027   PetscFunctionBegin;
1028   pc_gamg->threshold = n;
1029   PetscFunctionReturn(0);
1030 }
1031 EXTERN_C_END
1032 
1033 #undef __FUNCT__
1034 #define __FUNCT__ "PCGAMGSetType"
1035 /*@
1036    PCGAMGSetType - Set solution method - calls sub create method
1037 
1038    Collective on PC
1039 
1040    Input Parameters:
1041 .  pc - the preconditioner context
1042 
1043 
1044    Options Database Key:
1045 .  -pc_gamg_type
1046 
1047    Level: intermediate
1048 
1049    Concepts: Unstructured multrigrid preconditioner
1050 
1051 .seealso: ()
1052 @*/
1053 PetscErrorCode PCGAMGSetType( PC pc, const PCGAMGType type )
1054 {
1055   PetscErrorCode ierr;
1056 
1057   PetscFunctionBegin;
1058   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1059   ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,const PCGAMGType),(pc,type));
1060   CHKERRQ(ierr);
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 EXTERN_C_BEGIN
1065 #undef __FUNCT__
1066 #define __FUNCT__ "PCGAMGSetType_GAMG"
1067 PetscErrorCode PCGAMGSetType_GAMG( PC pc, const PCGAMGType type )
1068 {
1069   PetscErrorCode ierr,(*r)(PC);
1070 
1071   PetscFunctionBegin;
1072   ierr = PetscFListFind(GAMGList,((PetscObject)pc)->comm,type,PETSC_FALSE,(PetscVoidStarFunction)&r);
1073   CHKERRQ(ierr);
1074 
1075   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1076 
1077   /* call sub create method */
1078   ierr = (*r)(pc); CHKERRQ(ierr);
1079 
1080   PetscFunctionReturn(0);
1081 }
1082 EXTERN_C_END
1083 
1084 #undef __FUNCT__
1085 #define __FUNCT__ "PCSetFromOptions_GAMG"
1086 PetscErrorCode PCSetFromOptions_GAMG( PC pc )
1087 {
1088   PetscErrorCode  ierr;
1089   PC_MG           *mg = (PC_MG*)pc->data;
1090   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
1091   PetscBool        flag;
1092   MPI_Comm         wcomm = ((PetscObject)pc)->comm;
1093 
1094   PetscFunctionBegin;
1095   ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr);
1096   {
1097     /* -pc_gamg_verbose */
1098     ierr = PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG",
1099                            "none", pc_gamg->verbose,
1100                            &pc_gamg->verbose, PETSC_NULL );
1101     CHKERRQ(ierr);
1102 
1103     /* -pc_gamg_repartition */
1104     ierr = PetscOptionsBool("-pc_gamg_repartition",
1105                             "Repartion coarse grids (false)",
1106                             "PCGAMGRepartitioning",
1107                             pc_gamg->repart,
1108                             &pc_gamg->repart,
1109                             &flag);
1110     CHKERRQ(ierr);
1111 
1112     /* -pc_gamg_process_eq_limit */
1113     ierr = PetscOptionsInt("-pc_gamg_process_eq_limit",
1114                            "Limit (goal) on number of equations per process on coarse grids",
1115                            "PCGAMGSetProcEqLim",
1116                            pc_gamg->min_eq_proc,
1117                            &pc_gamg->min_eq_proc,
1118                            &flag );
1119     CHKERRQ(ierr);
1120 
1121     /* -pc_gamg_coarse_eq_limit */
1122     ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit",
1123                            "Limit on number of equations for the coarse grid",
1124                            "PCGAMGSetCoarseEqLim",
1125                            pc_gamg->coarse_eq_limit,
1126                            &pc_gamg->coarse_eq_limit,
1127                            &flag );
1128     CHKERRQ(ierr);
1129 
1130     /* -pc_gamg_threshold */
1131     ierr = PetscOptionsReal("-pc_gamg_threshold",
1132                             "Relative threshold to use for dropping edges in aggregation graph",
1133                             "PCGAMGSetThreshold",
1134                             pc_gamg->threshold,
1135                             &pc_gamg->threshold,
1136                             &flag );
1137     CHKERRQ(ierr);
1138     if(flag && pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold);
1139 
1140     ierr = PetscOptionsInt("-pc_mg_levels",
1141                            "Set number of MG levels",
1142                            "PCGAMGSetNlevels",
1143                            pc_gamg->Nlevels,
1144                            &pc_gamg->Nlevels,
1145                            &flag );
1146   }
1147   ierr = PetscOptionsTail();CHKERRQ(ierr);
1148 
1149   PetscFunctionReturn(0);
1150 }
1151 
1152 /* -------------------------------------------------------------------------- */
1153 /*MC
1154      PCCreate_GAMG - Geometric algebraic multigrid (AMG) preconditioning framework.
1155        - This is the entry point to GAMG, registered in pcregis.c
1156 
1157    Options Database Keys:
1158    Multigrid options(inherited)
1159 +  -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (PCMGSetCycleType)
1160 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1161 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1162 -  -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade
1163 
1164   Level: intermediate
1165 
1166   Concepts: multigrid
1167 
1168 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
1169            PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(),
1170            PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
1171            PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
1172            PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
1173 M*/
1174 EXTERN_C_BEGIN
1175 #undef __FUNCT__
1176 #define __FUNCT__ "PCCreate_GAMG"
1177 PetscErrorCode  PCCreate_GAMG( PC pc )
1178 {
1179   PetscErrorCode  ierr;
1180   PC_GAMG         *pc_gamg;
1181   PC_MG           *mg;
1182 
1183 #if defined PETSC_GAMG_USE_LOG
1184   static long count = 0;
1185 #endif
1186 
1187   PetscFunctionBegin;
1188   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1189   ierr = PCSetType( pc, PCMG );  CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */
1190   ierr = PetscObjectChangeTypeName( (PetscObject)pc, PCGAMG ); CHKERRQ(ierr);
1191 
1192   /* create a supporting struct and attach it to pc */
1193   ierr = PetscNewLog( pc, PC_GAMG, &pc_gamg ); CHKERRQ(ierr);
1194   mg = (PC_MG*)pc->data;
1195   mg->galerkin = 2;             /* Use Galerkin, but it is computed externally */
1196   mg->innerctx = pc_gamg;
1197 
1198   pc_gamg->setup_count = 0;
1199   /* these should be in subctx but repartitioning needs simple arrays */
1200   pc_gamg->data_sz = 0;
1201   pc_gamg->data = 0;
1202 
1203   /* register AMG type */
1204   if( !GAMGList ){
1205     ierr = PetscFListAdd(&GAMGList,GAMGGEO,"PCCreateGAMG_GEO",(void(*)(void))PCCreateGAMG_GEO);CHKERRQ(ierr);
1206     ierr = PetscFListAdd(&GAMGList,GAMGAGG,"PCCreateGAMG_AGG",(void(*)(void))PCCreateGAMG_AGG);CHKERRQ(ierr);
1207   }
1208 
1209   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1210   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1211   pc->ops->setup          = PCSetUp_GAMG;
1212   pc->ops->reset          = PCReset_GAMG;
1213   pc->ops->destroy        = PCDestroy_GAMG;
1214 
1215   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1216 					    "PCGAMGSetProcEqLim_C",
1217 					    "PCGAMGSetProcEqLim_GAMG",
1218 					    PCGAMGSetProcEqLim_GAMG);
1219   CHKERRQ(ierr);
1220 
1221   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1222 					    "PCGAMGSetCoarseEqLim_C",
1223 					    "PCGAMGSetCoarseEqLim_GAMG",
1224 					    PCGAMGSetCoarseEqLim_GAMG);
1225   CHKERRQ(ierr);
1226 
1227   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1228 					    "PCGAMGSetRepartitioning_C",
1229 					    "PCGAMGSetRepartitioning_GAMG",
1230 					    PCGAMGSetRepartitioning_GAMG);
1231   CHKERRQ(ierr);
1232 
1233   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1234 					    "PCGAMGSetThreshold_C",
1235 					    "PCGAMGSetThreshold_GAMG",
1236 					    PCGAMGSetThreshold_GAMG);
1237   CHKERRQ(ierr);
1238 
1239   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1240 					    "PCGAMGSetType_C",
1241 					    "PCGAMGSetType_GAMG",
1242 					    PCGAMGSetType_GAMG);
1243   CHKERRQ(ierr);
1244 
1245   pc_gamg->repart = PETSC_FALSE;
1246   pc_gamg->min_eq_proc = 100;
1247   pc_gamg->coarse_eq_limit = 800;
1248   pc_gamg->threshold = 0.05;
1249   pc_gamg->Nlevels = GAMG_MAXLEVELS;
1250   pc_gamg->verbose = 0;
1251   pc_gamg->emax_id = -1;
1252   pc_gamg->col_bs_id = -1;
1253 
1254   /* instantiate derived type */
1255   ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr);
1256   {
1257     char tname[256] = GAMGAGG;
1258     ierr = PetscOptionsList("-pc_gamg_type","Type of GAMG method","PCGAMGSetType",
1259                             GAMGList, tname, tname, sizeof(tname), PETSC_NULL );
1260     CHKERRQ(ierr);
1261     ierr = PCGAMGSetType( pc, tname ); CHKERRQ(ierr);
1262   }
1263   ierr = PetscOptionsTail();CHKERRQ(ierr);
1264 
1265   /* private events */
1266 #if defined PETSC_GAMG_USE_LOG
1267   if( count++ == 0 ) {
1268     PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);
1269     PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);
1270     /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1271     /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1272     /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1273     PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);
1274     PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);
1275     PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);
1276     PetscLogEventRegister("    search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);
1277     PetscLogEventRegister("  SA: colect data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);
1278     PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);
1279     PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);
1280     PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);
1281     PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);
1282     PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);
1283     PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);
1284     PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);
1285 
1286     /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1287     /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1288     /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1289     /* create timer stages */
1290 #if defined GAMG_STAGES
1291     {
1292       char str[32];
1293       sprintf(str,"MG Level %d (finest)",0);
1294       PetscLogStageRegister(str, &gamg_stages[0]);
1295       PetscInt lidx;
1296       for (lidx=1;lidx<9;lidx++){
1297 	sprintf(str,"MG Level %d",lidx);
1298 	PetscLogStageRegister(str, &gamg_stages[lidx]);
1299       }
1300     }
1301 #endif
1302   }
1303 #endif
1304   /* general events */
1305 #if defined PETSC_USE_LOG
1306   PetscLogEventRegister("PCGAMGgraph_AGG", PC_CLASSID, &PC_GAMGGgraph_AGG);
1307   PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO);
1308   PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);
1309   PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);
1310   PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);
1311   PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);
1312   PetscLogEventRegister("PCGAMGProlOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG);
1313 #endif
1314 
1315   PetscFunctionReturn(0);
1316 }
1317 EXTERN_C_END
1318