xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision 4e5d74a76bd8fa54fede1e0a3eb6ccefe767a7da)
1 /*
2  GAMG geometric-algebric multiogrid PC - Mark Adams 2011
3  */
4 #include <../src/ksp/pc/impls/gamg/gamg.h>
5 /* Private context for the GAMG preconditioner */
6 typedef struct gamg_TAG {
7   PetscInt       m_dim;
8   PetscInt       m_Nlevels;
9   PetscInt       m_data_sz;
10   PetscReal     *m_data; /* blocked vector of vertex data on fine grid (coordinates) */
11 } PC_GAMG;
12 
13 #define TOP_GRID_LIM 1000
14 
15 /* -----------------------------------------------------------------------------*/
16 #undef __FUNCT__
17 #define __FUNCT__ "PCReset_GAMG"
18 PetscErrorCode PCReset_GAMG(PC pc)
19 {
20   PetscErrorCode  ierr;
21   PC_MG           *mg = (PC_MG*)pc->data;
22   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
23 
24   PetscFunctionBegin;
25   ierr = PetscFree(pc_gamg->m_data);CHKERRQ(ierr);
26   PetscFunctionReturn(0);
27 }
28 
29 /* -------------------------------------------------------------------------- */
30 /*
31    PCSetCoordinates_GAMG
32 
33    Input Parameter:
34    .  pc - the preconditioner context
35 */
36 EXTERN_C_BEGIN
37 #undef __FUNCT__
38 #define __FUNCT__ "PCSetCoordinates_GAMG"
39 PetscErrorCode PCSetCoordinates_GAMG(PC pc, const int ndm,PetscReal *coords )
40 {
41   PC_MG          *mg = (PC_MG*)pc->data;
42   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
43   PetscErrorCode ierr;
44   PetscInt       bs, my0, tt;
45   Mat            mat = pc->pmat;
46   PetscInt       arrsz;
47 
48   PetscFunctionBegin;
49   ierr  = MatGetBlockSize( mat, &bs );               CHKERRQ( ierr );
50   ierr  = MatGetOwnershipRange( mat, &my0, &tt ); CHKERRQ(ierr);
51   arrsz = (tt-my0)/bs*ndm;
52 
53   // put coordinates
54   if (!pc_gamg->m_data || (pc_gamg->m_data_sz != arrsz)) {
55     ierr = PetscFree( pc_gamg->m_data );  CHKERRQ(ierr);
56     ierr = PetscMalloc(arrsz*sizeof(double), &pc_gamg->m_data ); CHKERRQ(ierr);
57   }
58 
59   /* copy data in */
60   for(tt=0;tt<arrsz;tt++){
61     pc_gamg->m_data[tt] = coords[tt];
62   }
63   pc_gamg->m_data_sz = arrsz;
64   pc_gamg->m_dim = ndm;
65 
66   PetscFunctionReturn(0);
67 }
68 EXTERN_C_END
69 
70 /* -------------------------------------------------------------------------- */
71 /*
72    partitionLevel
73 
74    Input Parameter:
75    . a_Amat_fine - matrix on this fine (k) level
76    . a_dime - 2 or 3
77    In/Output Parameter:
78    . a_P_inout - prolongation operator to the next level (k-1)
79    . a_coarse_crds - coordinates that need to be moved
80    . a_active_proc - number of active procs
81    Output Parameter:
82    . a_Amat_crs - coarse matrix that is created (k-1)
83 */
84 
85 #undef __FUNCT__
86 #define __FUNCT__ "partitionLevel"
87 PetscErrorCode partitionLevel( Mat a_Amat_fine,
88                                PetscInt a_dim,
89                                Mat *a_P_inout,
90                                PetscReal **a_coarse_crds,
91                                PetscMPIInt *a_active_proc,
92                                Mat *a_Amat_crs
93                             )
94 {
95   PetscErrorCode   ierr;
96   Mat              Amat, Pnew, Pold = *a_P_inout;
97   IS               new_indices,isnum;
98   MPI_Comm         wcomm = ((PetscObject)a_Amat_fine)->comm;
99   PetscMPIInt      nactive_procs,mype,npe;
100   PetscInt         Istart,Iend,Istart0,Iend0,ncrs0,ncrs_new,bs=1; /* bs ??? */
101 
102   PetscFunctionBegin;
103   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
104   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
105   /* RAP */
106   ierr = MatPtAP( a_Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Amat ); CHKERRQ(ierr);
107   ierr = MatGetOwnershipRange( Amat, &Istart0, &Iend0 );    CHKERRQ(ierr); /* x2 size of 'a_coarse_crds' */
108   ncrs0 = (Iend0 - Istart0)/bs;
109 
110   /* Repartition Amat_{k} and move colums of P^{k}_{k-1} and coordinates accordingly */
111   {
112     PetscInt        neq,N,counts[npe];
113     IS              isnewproc;
114     PetscMPIInt     new_npe,targ_npe;
115 
116     ierr = MatGetSize( Amat, &neq, &N );CHKERRQ(ierr);
117 #define MIN_EQ_PROC 100
118     nactive_procs = *a_active_proc;
119     targ_npe = neq/MIN_EQ_PROC; /* hardwire min. number of eq/proc */
120     if( targ_npe == 0 || neq < TOP_GRID_LIM ) new_npe = 1; /* chop coarsest grid */
121     else if (targ_npe >= nactive_procs ) new_npe = nactive_procs; /* no change */
122     else {
123       PetscMPIInt     factstart,fact;
124       new_npe = -9999;
125       factstart = nactive_procs;
126       for(fact=factstart;fact>0;fact--){ /* try to find a better number of procs */
127         if( nactive_procs%fact==0 && neq/(nactive_procs/fact) > MIN_EQ_PROC ) {
128           new_npe = nactive_procs/fact;
129         }
130       }
131       assert(new_npe != -9999);
132     }
133     *a_active_proc = new_npe; /* output for next time */
134 PetscPrintf(PETSC_COMM_WORLD,"\t\t%s num active procs = %d (N loc = %d)\n",__FUNCT__,new_npe,ncrs0);
135     { /* partition: get 'isnewproc' */
136       MatPartitioning  mpart;
137       Mat              adj;
138       const PetscInt  *is_idx;
139       PetscInt         is_sz,kk,jj,old_fact=(npe/nactive_procs),new_fact=(npe/new_npe),*isnewproc_idx;
140       /* create sub communicator  */
141       MPI_Comm cm,new_comm;
142       int membershipKey = mype % old_fact;
143       ierr = MPI_Comm_split(wcomm, membershipKey, mype, &cm); CHKERRQ(ierr);
144       ierr = PetscCommDuplicate( cm, &new_comm, PETSC_NULL ); CHKERRQ(ierr);
145       ierr = MPI_Comm_free( &cm );                            CHKERRQ(ierr);
146 
147       /* MatPartitioningApply call MatConvert, which is collective */
148       ierr = MatConvert(Amat,MATMPIADJ,MAT_INITIAL_MATRIX,&adj);CHKERRQ(ierr);
149       if( membershipKey == 0 ) {
150         /* hack to fix global data that pmetis.c uses in 'adj' */
151         for( kk=0 , jj=0 ; kk<=npe ; jj++, kk += old_fact ) {
152           adj->rmap->range[jj] = adj->rmap->range[kk];
153         }
154         ierr = MatPartitioningCreate( new_comm, &mpart ); CHKERRQ(ierr);
155         ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr);
156         ierr = MatPartitioningSetFromOptions( mpart ); CHKERRQ(ierr);
157         ierr = MatPartitioningSetNParts( mpart, new_npe ); CHKERRQ(ierr);
158         ierr = MatPartitioningApply( mpart, &isnewproc ); CHKERRQ(ierr);
159         ierr = MatPartitioningDestroy( &mpart ); CHKERRQ(ierr);
160         /* collect IS info */
161         ierr = ISGetLocalSize( isnewproc, &is_sz );        CHKERRQ(ierr);
162         ierr = PetscMalloc( is_sz*sizeof(PetscInt), &isnewproc_idx ); CHKERRQ(ierr);
163         ierr = ISGetIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
164         for(kk=0;kk<is_sz;kk++){
165           /* spread partitioning across machine - probably the right thing to do but machine specific */
166           isnewproc_idx[kk] = is_idx[kk] * new_fact;
167         }
168         ierr = ISRestoreIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
169         ierr = ISDestroy( &isnewproc );                    CHKERRQ(ierr);
170       }
171       else {
172         isnewproc_idx = 0;
173         is_sz = 0;
174       }
175       ierr = MatDestroy( &adj );                       CHKERRQ(ierr);
176       ierr = MPI_Comm_free( &new_comm );    CHKERRQ(ierr);
177       ierr = ISCreateGeneral( wcomm, is_sz, isnewproc_idx, PETSC_COPY_VALUES, &isnewproc );
178       if( membershipKey == 0 ) {
179         ierr = PetscFree( isnewproc_idx );  CHKERRQ(ierr);
180       }
181     }
182 
183     /*
184      Create an index set from the isnewproc index set to indicate the mapping TO
185      */
186     ierr = ISPartitioningToNumbering( isnewproc, &isnum ); CHKERRQ(ierr);
187     /*
188      Determine how many elements are assigned to each processor
189      */
190     ierr = ISPartitioningCount( isnewproc, npe, counts ); CHKERRQ(ierr);
191     ierr = ISDestroy( &isnewproc );                       CHKERRQ(ierr);
192     ncrs_new = counts[mype];
193   }
194 
195   { /* Create a vector to contain the newly ordered element information */
196     const PetscInt *idx;
197     PetscInt        i,j,k;
198     IS              isscat;
199     PetscScalar    *array;
200     Vec             src_crd, dest_crd;
201     PetscReal      *coords = *a_coarse_crds;
202     VecScatter      vecscat;
203 
204     ierr = VecCreate( wcomm, &dest_crd );
205     ierr = VecSetSizes( dest_crd, a_dim*ncrs_new, PETSC_DECIDE ); CHKERRQ(ierr);
206     ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /*funny vector-get global options?*/
207     /*
208      There are three data items per cell (element), the integer vertex numbers of its three
209      coordinates (we convert to double to use the scatter) (one can think
210      of the vectors of having a block size of 3, then there is one index in idx[] for each element)
211      */
212     {
213       PetscInt tidx[ncrs0*a_dim];
214       ierr = ISGetIndices( isnum, &idx ); CHKERRQ(ierr);
215       for (i=0,j=0; i<ncrs0; i++) {
216         for (k=0; k<a_dim; k++) tidx[j++] = idx[i]*a_dim + k;
217       }
218       ierr = ISCreateGeneral( wcomm, a_dim*ncrs0, tidx, PETSC_COPY_VALUES, &isscat );
219       CHKERRQ(ierr);
220       ierr = ISRestoreIndices( isnum, &idx ); CHKERRQ(ierr);
221     }
222     /*
223      Create a vector to contain the original vertex information for each element
224      */
225     ierr = VecCreateSeq( PETSC_COMM_SELF, a_dim*ncrs0, &src_crd ); CHKERRQ(ierr);
226     ierr = VecGetArray( src_crd, &array );                        CHKERRQ(ierr);
227     for (i=0; i<a_dim*ncrs0; i++) array[i] = coords[i];
228     ierr = VecRestoreArray( src_crd, &array );                     CHKERRQ(ierr);
229     /*
230      Scatter the element vertex information (still in the original vertex ordering)
231      to the correct processor
232      */
233     ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat);
234     CHKERRQ(ierr);
235     ierr = ISDestroy( &isscat );       CHKERRQ(ierr);
236     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
237     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
238     ierr = VecScatterDestroy( &vecscat );       CHKERRQ(ierr);
239     ierr = VecDestroy( &src_crd );       CHKERRQ(ierr);
240     /*
241      Put the element vertex data into a new allocation of the gdata->ele
242      */
243     ierr = PetscFree( *a_coarse_crds );    CHKERRQ(ierr);
244     ierr = PetscMalloc( a_dim*ncrs_new*sizeof(PetscReal), a_coarse_crds );    CHKERRQ(ierr);
245     coords = *a_coarse_crds; /* convient */
246     ierr = VecGetArray( dest_crd, &array );    CHKERRQ(ierr);
247     for (i=0; i<a_dim*ncrs_new; i++) coords[i] = PetscRealPart(array[i]);
248     ierr = VecRestoreArray( dest_crd, &array );    CHKERRQ(ierr);
249     ierr = VecDestroy( &dest_crd );    CHKERRQ(ierr);
250   }
251   /*
252    Invert for MatGetSubMatrix
253    */
254   ierr = ISInvertPermutation( isnum, ncrs_new, &new_indices ); CHKERRQ(ierr);
255   ierr = ISSort( new_indices ); CHKERRQ(ierr); /* is this needed? */
256   ierr = ISDestroy( &isnum ); CHKERRQ(ierr);
257   /* A_crs output */
258   ierr = MatGetSubMatrix( Amat, new_indices, new_indices, MAT_INITIAL_MATRIX, a_Amat_crs );
259   CHKERRQ(ierr);
260   ierr = MatDestroy( &Amat ); CHKERRQ(ierr);
261   Amat = *a_Amat_crs;
262   /* prolongator */
263   ierr = MatGetOwnershipRange( Pold, &Istart, &Iend );    CHKERRQ(ierr);
264   {
265     IS findices;
266     ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices);   CHKERRQ(ierr);
267     ierr = MatGetSubMatrix( Pold, findices, new_indices, MAT_INITIAL_MATRIX, &Pnew );
268     CHKERRQ(ierr);
269     ierr = ISDestroy( &findices ); CHKERRQ(ierr);
270   }
271   ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr);
272   *a_P_inout = Pnew; /* output */
273 
274   ierr = ISDestroy( &new_indices ); CHKERRQ(ierr);
275 
276   PetscFunctionReturn(0);
277 }
278 
279 #define GAMG_MAXLEVELS 20
280 #if defined(PETSC_USE_LOG)
281 PetscLogStage  gamg_stages[20];
282 #endif
283 /* -------------------------------------------------------------------------- */
284 /*
285    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
286                     by setting data structures and options.
287 
288    Input Parameter:
289 .  pc - the preconditioner context
290 
291    Application Interface Routine: PCSetUp()
292 
293    Notes:
294    The interface routine PCSetUp() is not usually called directly by
295    the user, but instead is called by PCApply() if necessary.
296 */
297 #undef __FUNCT__
298 #define __FUNCT__ "PCSetUp_GAMG"
299 PetscErrorCode PCSetUp_GAMG( PC pc )
300 {
301   PetscErrorCode  ierr;
302   PC_MG           *mg = (PC_MG*)pc->data;
303   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
304   Mat              Amat = pc->mat, Pmat = pc->pmat;
305   PetscBool        isSeq, isMPI;
306   PetscInt         fine_level, level, level1, M, N, bs, lidx;
307   MPI_Comm         wcomm = ((PetscObject)pc)->comm;
308   PetscMPIInt      mype,npe,nactivepe;
309   PetscBool isOK;
310 
311   Mat Aarr[GAMG_MAXLEVELS], Parr[GAMG_MAXLEVELS];  PetscReal *coarse_crds = 0, *crds = pc_gamg->m_data;
312 
313   PetscFunctionBegin;
314   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
315   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
316   if (pc->setupcalled){
317     /* no state data in GAMG to destroy (now) */
318     ierr = PCReset_MG(pc); CHKERRQ(ierr);
319   }
320   if (!pc_gamg->m_data) SETERRQ(wcomm,PETSC_ERR_SUP,"PCSetUp_GAMG called before PCSetCoordinates");
321   /* setup special features of PCGAMG */
322   ierr = PetscTypeCompare((PetscObject)Amat, MATSEQAIJ, &isSeq);CHKERRQ(ierr);
323   ierr = PetscTypeCompare((PetscObject)Amat, MATMPIAIJ, &isMPI);CHKERRQ(ierr);
324   if (isMPI) {
325   } else if (isSeq) {
326   } else SETERRQ1(wcomm,PETSC_ERR_ARG_WRONG, "Matrix type '%s' cannot be used with GAMG. GAMG can only handle AIJ matrices.",((PetscObject)Amat)->type_name);
327 
328   /* GAMG requires input of fine-grid matrix. It determines nlevels. */
329   ierr = MatGetBlockSize( Amat, &bs ); CHKERRQ(ierr);
330   if(bs!=1) SETERRQ1(wcomm,PETSC_ERR_ARG_WRONG, "GAMG only supports scalar prblems bs = '%d'.",bs);
331 
332   /* Get A_i and R_i */
333   ierr = MatGetSize( Amat, &M, &N );CHKERRQ(ierr);
334 PetscPrintf(PETSC_COMM_WORLD,"[%d]%s level %d N=%d\n",0,__FUNCT__,0,N);
335   for (level=0, Aarr[0] = Pmat, nactivepe = npe;
336        level < GAMG_MAXLEVELS-1 && (level==0 || M>TOP_GRID_LIM); /* hard wired stopping logic */
337        level++ ) {
338     level1 = level + 1;
339     ierr = PetscLogEventBegin(gamg_setup_stages[SET1],0,0,0,0);CHKERRQ(ierr);
340     ierr = createProlongation( Aarr[level], crds, pc_gamg->m_dim,
341                                &Parr[level1], &coarse_crds, &isOK );
342     CHKERRQ(ierr);
343     ierr = PetscLogEventEnd(gamg_setup_stages[SET1],0,0,0,0);CHKERRQ(ierr);
344 
345     ierr = PetscFree( crds ); CHKERRQ( ierr );
346     if(level==0) Aarr[0] = Amat; /* use Pmat for finest level setup, but use mat for solver */
347     if( isOK ) {
348       ierr = PetscLogEventBegin(gamg_setup_stages[SET2],0,0,0,0);CHKERRQ(ierr);
349       ierr = partitionLevel( Aarr[level], pc_gamg->m_dim, &Parr[level1], &coarse_crds, &nactivepe, &Aarr[level1] );
350       CHKERRQ(ierr);
351       ierr = PetscLogEventEnd(gamg_setup_stages[SET2],0,0,0,0);CHKERRQ(ierr);
352       ierr = MatGetSize( Aarr[level1], &M, &N );CHKERRQ(ierr);
353 PetscPrintf(PETSC_COMM_WORLD,"[%d]%s done level %d N=%d, active pe = %d\n",0,__FUNCT__,level1,N,nactivepe);
354     }
355     else{
356       break;
357     }
358     crds = coarse_crds;
359   }
360   ierr = PetscFree( coarse_crds ); CHKERRQ( ierr );
361 PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s %d levels\n",0,__FUNCT__,level + 1);
362   pc_gamg->m_data = 0; /* destroyed coordinate data */
363   pc_gamg->m_Nlevels = level + 1;
364   fine_level = level;
365   ierr = PCMGSetLevels(pc,pc_gamg->m_Nlevels,PETSC_NULL);CHKERRQ(ierr);
366 
367   /* set default smoothers */
368   PetscReal emax = 2.0, emin;
369   for (level=1; level<=fine_level; level++){
370     KSP smoother; PC subpc;
371     ierr = PCMGGetSmoother(pc,level,&smoother);CHKERRQ(ierr);
372     ierr = KSPSetType(smoother,KSPCHEBYCHEV);CHKERRQ(ierr);
373     emin = emax/10.0; /* fix!!! */
374     ierr = KSPGetPC(smoother,&subpc);CHKERRQ(ierr);
375     ierr = PCSetType(subpc,PCJACOBI);CHKERRQ(ierr);
376     ierr = KSPChebychevSetEigenvalues(smoother, emax, emin);CHKERRQ(ierr); /* need auto !!!!*/
377   }
378   ierr = PCSetFromOptions_MG(pc); CHKERRQ(ierr); /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
379   {
380     PetscBool galerkin;
381     ierr = PCMGGetGalerkin( pc,  &galerkin); CHKERRQ(ierr);
382     if(galerkin){
383       SETERRQ(wcomm,PETSC_ERR_ARG_WRONG, "GAMG does galerkin manually so it must not be used in PC_MG.");
384     }
385   }
386   {
387     char str[32];
388     sprintf(str,"MG Level %d (%d)",0,pc_gamg->m_Nlevels-1);
389     PetscLogStageRegister(str, &gamg_stages[fine_level]);
390   }
391   /* create coarse level and the interpolation between the levels */
392   for (level=0,lidx=pc_gamg->m_Nlevels-1; level<fine_level; level++,lidx--){
393     level1 = level + 1;
394     ierr = PCMGSetInterpolation(pc,level1,Parr[lidx]);CHKERRQ(ierr);
395     if( !PETSC_TRUE ) {
396       PetscViewer viewer; char fname[32];
397       sprintf(fname,"Amat_%d.m",lidx);
398       ierr = PetscViewerASCIIOpen( wcomm, fname, &viewer );  CHKERRQ(ierr);
399       ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
400       ierr = MatView( Aarr[lidx], viewer ); CHKERRQ(ierr);
401       ierr = PetscViewerDestroy( &viewer );
402     }
403     ierr = MatDestroy( &Parr[lidx] );  CHKERRQ(ierr);
404     {
405       KSP smoother;
406       ierr = PCMGGetSmoother(pc,level,&smoother); CHKERRQ(ierr);
407       ierr = KSPSetOperators( smoother, Aarr[lidx], Aarr[lidx], DIFFERENT_NONZERO_PATTERN );
408       CHKERRQ(ierr);
409       ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
410     }
411     ierr = MatDestroy( &Aarr[lidx] );  CHKERRQ(ierr);
412     {
413       char str[32];
414       sprintf(str,"MG Level %d (%d)",level+1,lidx-1);
415       PetscLogStageRegister(str, &gamg_stages[lidx-1]);
416     }
417   }
418   { /* fine level (no P) */
419     KSP smoother;
420     ierr = PCMGGetSmoother(pc,fine_level,&smoother); CHKERRQ(ierr);
421     ierr = KSPSetOperators( smoother, Aarr[0], Aarr[0], DIFFERENT_NONZERO_PATTERN );
422     CHKERRQ(ierr);
423     ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
424   }
425   /* setupcalled is set to 0 so that MG is setup from scratch */
426   pc->setupcalled = 0;
427   ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
428   PetscFunctionReturn(0);
429 }
430 
431 /* -------------------------------------------------------------------------- */
432 /*
433    PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
434    that was created with PCCreate_GAMG().
435 
436    Input Parameter:
437 .  pc - the preconditioner context
438 
439    Application Interface Routine: PCDestroy()
440 */
441 #undef __FUNCT__
442 #define __FUNCT__ "PCDestroy_GAMG"
443 PetscErrorCode PCDestroy_GAMG(PC pc)
444 {
445   PetscErrorCode  ierr;
446   PC_MG           *mg = (PC_MG*)pc->data;
447   PC_GAMG         *pc_gamg= (PC_GAMG*)mg->innerctx;
448 
449   PetscFunctionBegin;
450   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
451   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
452   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
453   PetscFunctionReturn(0);
454 }
455 
456 #undef __FUNCT__
457 #define __FUNCT__ "PCSetFromOptions_GAMG"
458 PetscErrorCode PCSetFromOptions_GAMG(PC pc)
459 {
460   /* PetscErrorCode  ierr; */
461   /* PC_MG           *mg = (PC_MG*)pc->data; */
462   /* PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx; */
463   /* MPI_Comm        comm = ((PetscObject)pc)->comm; */
464 
465   PetscFunctionBegin;
466   PetscFunctionReturn(0);
467 }
468 
469 /* -------------------------------------------------------------------------- */
470 /*
471  PCCreate_GAMG - Creates a GAMG preconditioner context, PC_GAMG
472 
473    Input Parameter:
474 .  pc - the preconditioner context
475 
476    Application Interface Routine: PCCreate()
477 
478   */
479  /* MC
480      PCGAMG - Use algebraic multigrid preconditioning. This preconditioner requires you provide
481        fine grid discretization matrix and coordinates on the fine grid.
482 
483    Options Database Key:
484    Multigrid options(inherited)
485 +  -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (MGSetCycles)
486 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (MGSetNumberSmoothUp)
487 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (MGSetNumberSmoothDown)
488    -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade
489    GAMG options:
490 
491    Level: intermediate
492   Concepts: multigrid
493 
494 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
495            PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), MPSetCycles(), PCMGSetNumberSmoothDown(),
496            PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
497            PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
498            PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
499 M */
500 
501 EXTERN_C_BEGIN
502 #undef __FUNCT__
503 #define __FUNCT__ "PCCreate_GAMG"
504 PetscErrorCode  PCCreate_GAMG(PC pc)
505 {
506   PetscErrorCode  ierr;
507   PC_GAMG         *pc_gamg;
508   PC_MG           *mg;
509   PetscClassId     cookie;
510 
511   PetscFunctionBegin;
512   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
513   ierr = PCSetType(pc,PCMG);CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */
514   ierr = PetscObjectChangeTypeName((PetscObject)pc,PCGAMG);CHKERRQ(ierr);
515 
516   /* create a supporting struct and attach it to pc */
517   ierr = PetscNewLog(pc,PC_GAMG,&pc_gamg);CHKERRQ(ierr);
518   mg = (PC_MG*)pc->data;
519   mg->innerctx = pc_gamg;
520 
521   pc_gamg->m_Nlevels    = -1;
522 
523   /* overwrite the pointers of PCMG by the functions of PCGAMG */
524   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
525   pc->ops->setup          = PCSetUp_GAMG;
526   pc->ops->reset          = PCReset_GAMG;
527   pc->ops->destroy        = PCDestroy_GAMG;
528 
529   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
530 					    "PCSetCoordinates_C",
531 					    "PCSetCoordinates_GAMG",
532 					    PCSetCoordinates_GAMG);CHKERRQ(ierr);
533 
534   PetscClassIdRegister("GAMG Setup",&cookie);
535   PetscLogEventRegister("GAMG-createProl", cookie, &gamg_setup_stages[SET1]);
536   PetscLogEventRegister("GAMG-partLevel", cookie, &gamg_setup_stages[SET2]);
537   PetscLogEventRegister("GAMG-MIS Graph", cookie, &gamg_setup_stages[SET3]);
538   PetscLogEventRegister("GAMG-MIS-Agg", cookie, &gamg_setup_stages[SET4]);
539   PetscLogEventRegister("GAMG-growSupp", cookie, &gamg_setup_stages[SET5]);
540   PetscLogEventRegister("GAMG-tri-Prol", cookie, &gamg_setup_stages[SET6]);
541   PetscLogEventRegister("GAMG-find-prol", cookie, &gamg_setup_stages[FIND_V]);
542 
543   PetscFunctionReturn(0);
544 }
545 EXTERN_C_END
546