xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision 3564f093082c4e39e657fbd7ab2e39df4067bb32)
1 /*
2  GAMG geometric-algebric multiogrid PC - Mark Adams 2011
3  */
4 #include <../src/ksp/pc/impls/gamg/gamg.h>
5 
6 PetscLogEvent gamg_setup_stages[NUM_SET];
7 
8 /* Private context for the GAMG preconditioner */
9 typedef struct gamg_TAG {
10   PetscInt       m_dim;
11   PetscInt       m_Nlevels;
12   PetscInt       m_data_sz;
13   PetscReal     *m_data; /* blocked vector of vertex data on fine grid (coordinates) */
14 } PC_GAMG;
15 
16 #define TOP_GRID_LIM 100
17 
18 /* -----------------------------------------------------------------------------*/
19 #undef __FUNCT__
20 #define __FUNCT__ "PCReset_GAMG"
21 PetscErrorCode PCReset_GAMG(PC pc)
22 {
23   PetscErrorCode  ierr;
24   PC_MG           *mg = (PC_MG*)pc->data;
25   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
26 
27   PetscFunctionBegin;
28   ierr = PetscFree(pc_gamg->m_data);CHKERRQ(ierr);
29   pc_gamg->m_data = 0; pc_gamg->m_data_sz = 0;
30   PetscFunctionReturn(0);
31 }
32 
33 /* -------------------------------------------------------------------------- */
34 /*
35    PCSetCoordinates_GAMG
36 
37    Input Parameter:
38    .  pc - the preconditioner context
39 */
40 EXTERN_C_BEGIN
41 #undef __FUNCT__
42 #define __FUNCT__ "PCSetCoordinates_GAMG"
43 PetscErrorCode PCSetCoordinates_GAMG( PC a_pc, PetscInt a_ndm, PetscReal *a_coords )
44 {
45   PC_MG          *mg = (PC_MG*)a_pc->data;
46   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
47   PetscErrorCode ierr;
48   PetscInt       arrsz,bs,my0,kk,ii,jj,nloc,sz,M;
49   Mat            Amat = a_pc->pmat;
50   PetscBool      useSA = PETSC_FALSE, flag;
51   char           str[16];
52 
53   PetscFunctionBegin;
54   ierr  = MatGetBlockSize( Amat, &bs );               CHKERRQ( ierr );
55   ierr  = MatGetOwnershipRange( Amat, &my0, &kk ); CHKERRQ(ierr);
56   nloc = (kk-my0)/bs; M = kk - my0;
57   if((kk-my0)%bs!=0) SETERRQ1(((PetscObject)Amat)->comm,PETSC_ERR_ARG_WRONG, "Bad local size %d.",nloc);
58 
59   ierr  = PetscOptionsGetString(PETSC_NULL,"-pc_gamg_type",str,16,&flag);    CHKERRQ( ierr );
60   useSA = (PetscBool)(flag && strcmp(str,"sa") == 0);
61 
62   if(a_coords == 0) useSA = PETSC_TRUE; /* use SA if no data */
63   if( !useSA ) sz = a_ndm; /* coordinates */
64   else{ /* SA: null space vectors */
65     if(a_coords != 0) sz = (a_ndm==2 ? 3*bs : 6*bs); /* elasticity */
66     else sz = bs*bs; /* no data, force SA with constant null space vectors */
67     if(a_coords != 0 && bs==1 ) sz = 1; /* scalar problem with coords and SA (not needed) */
68   }
69   arrsz = nloc*sz;
70 
71   /* create data - syntactic sugar that should be refactored at some point */
72   if (!pc_gamg->m_data || (pc_gamg->m_data_sz != arrsz)) {
73     ierr = PetscFree( pc_gamg->m_data );  CHKERRQ(ierr);
74     ierr = PetscMalloc((arrsz+1)*sizeof(double), &pc_gamg->m_data ); CHKERRQ(ierr);
75   }
76   for(kk=0;kk<arrsz;kk++)pc_gamg->m_data[kk] = -999.;
77   pc_gamg->m_data[arrsz] = -99.;
78   /* copy data in - column oriented */
79   if( useSA ) {
80     for(kk=0;kk<nloc;kk++){
81       PetscReal *data = &pc_gamg->m_data[kk*bs];
82       if( sz==1 ) *data = 1.0;
83       else {
84         for(ii=0;ii<bs;ii++)
85 	  for(jj=0;jj<bs;jj++)
86 	    if(ii==jj)data[ii*M + jj] = 1.0; /* translational modes */
87 	    else data[ii*M + jj] = 0.0;
88         if( a_coords != 0 ) {
89           if( a_ndm == 2 ){ /* rotational modes */
90             data += 2*M;
91             data[0] = -a_coords[2*kk+1];
92             data[1] =  a_coords[2*kk];
93           }
94           else {
95             data += 3*M;
96             data[0] = 0.0;               data[M+0] =  a_coords[3*kk+2]; data[2*M+0] = -a_coords[3*kk+1];
97             data[1] = -a_coords[3*kk+2]; data[M+1] = 0.0;               data[2*M+1] =  a_coords[3*kk];
98             data[2] =  a_coords[3*kk+1]; data[M+2] = -a_coords[3*kk];   data[2*M+2] = 0.0;
99           }
100         }
101       }
102     }
103   }
104   else {
105     for( kk = 0 ; kk < nloc ; kk++ ){
106       for( ii = 0 ; ii < a_ndm ; ii++ ) {
107         pc_gamg->m_data[ii*nloc + kk] =  a_coords[kk*a_ndm + ii];
108       }
109     }
110   }
111 
112   assert(pc_gamg->m_data[arrsz] == -99.);
113   for(kk=0;kk<arrsz;kk++){
114     assert(pc_gamg->m_data[kk] != -999.); // debug
115   }
116 
117   pc_gamg->m_data_sz = arrsz;
118   pc_gamg->m_dim = a_ndm;
119 
120   PetscFunctionReturn(0);
121 }
122 EXTERN_C_END
123 
124 /* -------------------------------------------------------------------------- */
125 /*
126    partitionLevel
127 
128    Input Parameter:
129    . a_Amat_fine - matrix on this fine (k) level
130    . a_data_sz - size of data to move (coarse grid)
131    In/Output Parameter:
132    . a_P_inout - prolongation operator to the next level (k-1)
133    . a_coarse_data - data that need to be moved
134    . a_active_proc - number of active procs
135    Output Parameter:
136    . a_Amat_crs - coarse matrix that is created (k-1)
137 */
138 
139 #undef __FUNCT__
140 #define __FUNCT__ "partitionLevel"
141 PetscErrorCode partitionLevel( Mat a_Amat_fine,
142                                PetscInt a_data_sz,
143 			       PetscInt a_cbs,
144                                Mat *a_P_inout,
145                                PetscReal **a_coarse_data,
146                                PetscMPIInt *a_active_proc,
147                                Mat *a_Amat_crs
148                                )
149 {
150   PetscErrorCode   ierr;
151   Mat              Cmat,Pnew,Pold=*a_P_inout;
152   IS               new_indices,isnum;
153   MPI_Comm         wcomm = ((PetscObject)a_Amat_fine)->comm;
154   PetscMPIInt      nactive_procs,mype,npe;
155   PetscInt         Istart,Iend,Istart0,Iend0,ncrs0,ncrs_new,fbs;
156   PetscInt         neq,NN;
157   PetscMPIInt      new_npe,targ_npe;
158 
159   PetscFunctionBegin;
160   ierr = MPI_Comm_rank( wcomm, &mype ); CHKERRQ(ierr);
161   ierr = MPI_Comm_size( wcomm, &npe );  CHKERRQ(ierr);
162   ierr = MatGetBlockSize( a_Amat_fine, &fbs ); CHKERRQ(ierr);
163   /* RAP */
164   ierr = MatPtAP( a_Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr);
165   ierr = MatSetBlockSize( Cmat, a_cbs );      CHKERRQ(ierr);
166 
167   ierr = MatGetOwnershipRange( Cmat, &Istart0, &Iend0 ); CHKERRQ(ierr);
168   ncrs0 = (Iend0-Istart0)/a_cbs; assert((Iend0-Istart0)%a_cbs == 0);
169 
170   /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates accordingly */
171   ierr = MatGetSize( Cmat, &neq, &NN );CHKERRQ(ierr);
172 #define MIN_EQ_PROC 100
173   nactive_procs = *a_active_proc;
174   targ_npe = neq/MIN_EQ_PROC; /* hardwire min. number of eq/proc */
175   if( targ_npe == 0 || neq < TOP_GRID_LIM ) new_npe = 1; /* chop coarsest grid */
176   else if (targ_npe >= nactive_procs ) new_npe = nactive_procs; /* no change */
177   else {
178     PetscMPIInt factstart,fact;
179     new_npe = -9999;
180     factstart = nactive_procs;
181     for(fact=factstart;fact>0;fact--){ /* try to find a better number of procs */
182       if( nactive_procs%fact==0 && neq/(nactive_procs/fact) > MIN_EQ_PROC ) {
183         new_npe = nactive_procs/fact;
184       }
185     }
186     if(new_npe == -9999) new_npe = nactive_procs;
187   }
188 
189   if( PETSC_TRUE ) { /* partition: get 'isnewproc' */
190     MatPartitioning  mpart;
191     Mat              adj;
192     const PetscInt  *is_idx;
193     PetscInt         is_sz,kk,jj,ii,old_fact=(npe/nactive_procs),new_fact=(npe/new_npe),*isnewproc_idx,counts[npe];
194     /* create sub communicator  */
195     MPI_Comm         cm,new_comm;
196     PetscInt         membershipKey = mype % old_fact;
197     IS               isnewproc;
198 
199     *a_active_proc = new_npe; /* output for next time */
200     ierr = MPI_Comm_split(wcomm, membershipKey, mype, &cm); CHKERRQ(ierr);
201     ierr = PetscCommDuplicate( cm, &new_comm, PETSC_NULL ); CHKERRQ(ierr);
202     ierr = MPI_Comm_free( &cm );                            CHKERRQ(ierr);
203 
204     /* MatPartitioningApply call MatConvert, which is collective */
205     if( a_cbs==1) {
206       ierr = MatConvert( Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );   CHKERRQ(ierr);
207     }
208     else {
209       /* make a scalar matrix to partition */
210       Mat tMat;
211       PetscInt Ii,ncols; const PetscScalar *vals; const PetscInt *idx;
212       ierr = MatCreateMPIAIJ( wcomm, ncrs0, ncrs0,
213                               PETSC_DETERMINE, PETSC_DETERMINE,
214                               25, PETSC_NULL, 10, PETSC_NULL,
215                               &tMat );
216 
217       for ( Ii = Istart0; Ii < Iend0; Ii++ ) {
218         PetscInt dest_row = Ii/a_cbs;
219         ierr = MatGetRow(Cmat,Ii,&ncols,&idx,&vals); CHKERRQ(ierr);
220         for( jj = 0 ; jj < ncols ; jj++ ){
221           PetscInt dest_col = idx[jj]/a_cbs;
222           PetscScalar v = 1.0;
223           ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES); CHKERRQ(ierr);
224         }
225         ierr = MatRestoreRow(Cmat,Ii,&ncols,&idx,&vals); CHKERRQ(ierr);
226       }
227       ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
228       ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
229 
230       ierr = MatConvert( tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );   CHKERRQ(ierr);
231 
232       ierr = MatDestroy( &tMat );  CHKERRQ(ierr);
233     }
234     if( membershipKey == 0 ) {
235       if(ncrs0==0)SETERRQ(wcomm,PETSC_ERR_LIB,"zero local nodes -- increase min");
236       /* hack to fix global data that pmetis.c uses in 'adj' */
237       for( kk=0 , jj=0 ; kk<=npe ; jj++, kk += old_fact ) {
238         adj->rmap->range[jj] = adj->rmap->range[kk];
239       }
240       ierr = MatPartitioningCreate( new_comm, &mpart ); CHKERRQ(ierr);
241       ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr);
242       ierr = MatPartitioningSetFromOptions( mpart ); CHKERRQ(ierr);
243       ierr = MatPartitioningSetNParts( mpart, new_npe ); CHKERRQ(ierr);
244       ierr = MatPartitioningApply( mpart, &isnewproc ); CHKERRQ(ierr);
245       ierr = MatPartitioningDestroy( &mpart ); CHKERRQ(ierr);
246       /* collect IS info */
247       ierr = ISGetLocalSize( isnewproc, &is_sz );        CHKERRQ(ierr);
248       ierr = PetscMalloc( a_cbs*is_sz*sizeof(PetscInt), &isnewproc_idx ); CHKERRQ(ierr);
249       ierr = ISGetIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
250       /* spread partitioning across machine - probably the right thing to do but machine spec. */
251       for(kk=0,jj=0;kk<is_sz;kk++){
252         for(ii=0 ; ii<a_cbs ; ii++, jj++ ) { /* expand for equation level by 'bs' */
253           isnewproc_idx[jj] = is_idx[kk] * new_fact;
254         }
255       }
256       ierr = ISRestoreIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
257       ierr = ISDestroy( &isnewproc );                    CHKERRQ(ierr);
258       is_sz *= a_cbs;
259     }
260     else {
261       isnewproc_idx = 0;
262       is_sz = 0;
263     }
264     ierr = MatDestroy( &adj );                       CHKERRQ(ierr);
265     ierr = MPI_Comm_free( &new_comm );    CHKERRQ(ierr);
266     ierr = ISCreateGeneral( wcomm, is_sz, isnewproc_idx, PETSC_COPY_VALUES, &isnewproc );
267     if( membershipKey == 0 ) {
268       ierr = PetscFree( isnewproc_idx );  CHKERRQ(ierr);
269     }
270 
271     /*
272      Create an index set from the isnewproc index set to indicate the mapping TO
273      */
274     ierr = ISPartitioningToNumbering( isnewproc, &isnum ); CHKERRQ(ierr);
275     /*
276      Determine how many elements are assigned to each processor
277      */
278     ierr = ISPartitioningCount( isnewproc, npe, counts ); CHKERRQ(ierr);
279     ierr = ISDestroy( &isnewproc );                       CHKERRQ(ierr);
280     ncrs_new = counts[mype]/a_cbs;
281 
282     { /* Create a vector to contain the newly ordered element information */
283       const PetscInt *idx;
284       PetscInt        ii,jj,kk;
285       IS              isscat;
286       PetscScalar    *array;
287       Vec             src_crd, dest_crd;
288       PetscReal      *data = *a_coarse_data;
289       VecScatter      vecscat;
290       PetscInt        tidx[ncrs0*a_data_sz];
291 
292       ierr = VecCreate( wcomm, &dest_crd );
293       ierr = VecSetSizes( dest_crd, a_data_sz*ncrs_new, PETSC_DECIDE ); CHKERRQ(ierr);
294       ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /*funny vector-get global options?*/
295       /*
296        There are 'a_data_sz' data items per node, (one can think of the vectors of having a
297        block size of 'a_data_sz').  Note, ISs are expanded into equation space by 'a_cbs'.
298        */
299       ierr = ISGetIndices( isnum, &idx ); CHKERRQ(ierr);
300       for(ii=0,jj=0; ii<ncrs0 ; ii++) {
301         PetscInt id = idx[ii*a_cbs]/a_cbs;
302         for( kk=0; kk<a_data_sz ; kk++, jj++) tidx[jj] = id*a_data_sz + kk;
303       }
304       ierr = ISRestoreIndices( isnum, &idx ); CHKERRQ(ierr);
305       ierr = ISCreateGeneral( wcomm, a_data_sz*ncrs0, tidx, PETSC_COPY_VALUES, &isscat );
306       CHKERRQ(ierr);
307       /*
308        Create a vector to contain the original vertex information for each element
309        */
310       ierr = VecCreateSeq( PETSC_COMM_SELF, a_data_sz*ncrs0, &src_crd ); CHKERRQ(ierr);
311       for(ii=0, jj=0; ii<ncrs0 ; ii++) {
312         for( kk=0; kk < a_data_sz ; kk++, jj++) {
313           ierr = VecSetValues( src_crd, 1, &jj, &data[kk*ncrs0 + ii], INSERT_VALUES );  CHKERRQ(ierr);
314         }
315       }
316       ierr = VecAssemblyBegin(src_crd); CHKERRQ(ierr);
317       ierr = VecAssemblyEnd(src_crd); CHKERRQ(ierr);
318       /*
319        Scatter the element vertex information (still in the original vertex ordering)
320        to the correct processor
321        */
322       ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat);
323       CHKERRQ(ierr);
324       ierr = ISDestroy( &isscat );       CHKERRQ(ierr);
325       ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
326       ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
327       ierr = VecScatterDestroy( &vecscat );       CHKERRQ(ierr);
328       ierr = VecDestroy( &src_crd );       CHKERRQ(ierr);
329       /*
330        Put the element vertex data into a new allocation of the gdata->ele
331        */
332       ierr = PetscFree( *a_coarse_data );    CHKERRQ(ierr);
333       ierr = PetscMalloc( a_data_sz*ncrs_new*sizeof(PetscReal), a_coarse_data );    CHKERRQ(ierr);
334       VecGetLocalSize( dest_crd, &ii ); assert(ii==a_data_sz*ncrs_new);
335 
336       ierr = VecGetArray( dest_crd, &array );    CHKERRQ(ierr);
337       data = *a_coarse_data;
338       for(ii=0, jj=0; ii<ncrs_new ; ii++) {
339         for( kk=0; kk < a_data_sz ; kk++, jj++) {
340           data[kk*ncrs_new + ii] = PetscRealPart(array[jj]);
341         }
342       }
343       ierr = VecRestoreArray( dest_crd, &array );    CHKERRQ(ierr);
344       ierr = VecDestroy( &dest_crd );    CHKERRQ(ierr);
345     }
346     /*
347      Invert for MatGetSubMatrix
348      */
349     ierr = ISInvertPermutation( isnum, ncrs_new*a_cbs, &new_indices ); CHKERRQ(ierr);
350     ierr = ISSort( new_indices ); CHKERRQ(ierr); /* is this needed? */
351     ierr = ISDestroy( &isnum ); CHKERRQ(ierr);
352     /* A_crs output */
353     ierr = MatGetSubMatrix( Cmat, new_indices, new_indices, MAT_INITIAL_MATRIX, a_Amat_crs );
354     CHKERRQ(ierr);
355 
356     ierr = MatDestroy( &Cmat ); CHKERRQ(ierr);
357     Cmat = *a_Amat_crs; /* output */
358     ierr = MatSetBlockSize( Cmat, a_cbs );      CHKERRQ(ierr);
359 
360     /* prolongator */
361     ierr = MatGetOwnershipRange( Pold, &Istart, &Iend );    CHKERRQ(ierr);
362     {
363       IS findices;
364       ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices);   CHKERRQ(ierr);
365       ierr = MatGetSubMatrix( Pold, findices, new_indices, MAT_INITIAL_MATRIX, &Pnew );
366       CHKERRQ(ierr);
367       ierr = ISDestroy( &findices ); CHKERRQ(ierr);
368     }
369     ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr);
370     *a_P_inout = Pnew; /* output */
371     ierr = ISDestroy( &new_indices ); CHKERRQ(ierr);
372   }
373   else {
374     *a_Amat_crs = Cmat; /* output */
375   }
376 
377   PetscFunctionReturn(0);
378 }
379 
380 #define GAMG_MAXLEVELS 30
381 #if defined(PETSC_USE_LOG)
382 PetscLogStage  gamg_stages[20];
383 #endif
384 /* -------------------------------------------------------------------------- */
385 /*
386    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
387                     by setting data structures and options.
388 
389    Input Parameter:
390 .  pc - the preconditioner context
391 
392    Application Interface Routine: PCSetUp()
393 
394    Notes:
395    The interface routine PCSetUp() is not usually called directly by
396    the user, but instead is called by PCApply() if necessary.
397 */
398 #undef __FUNCT__
399 #define __FUNCT__ "PCSetUp_GAMG"
400 PetscErrorCode PCSetUp_GAMG( PC a_pc )
401 {
402   PetscErrorCode  ierr;
403   PC_MG           *mg = (PC_MG*)a_pc->data;
404   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
405   Mat              Amat = a_pc->mat, Pmat = a_pc->pmat;
406   PetscInt         fine_level, level, level1, M, N, bs, nloc, lidx, data_sz, Istart, Iend;
407   MPI_Comm         wcomm = ((PetscObject)a_pc)->comm;
408   PetscMPIInt      mype,npe,nactivepe;
409   PetscBool        isOK;
410   Mat              Aarr[GAMG_MAXLEVELS], Parr[GAMG_MAXLEVELS];
411   PetscReal       *coarse_data = 0, *data, emaxs[GAMG_MAXLEVELS];
412 
413   PetscFunctionBegin;
414   if( a_pc->setupcalled ) {
415     /* no state data in GAMG to destroy */
416     ierr = PCReset_MG( a_pc ); CHKERRQ(ierr);
417   }
418   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
419   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
420   /* GAMG requires input of fine-grid matrix. It determines nlevels. */
421   ierr = MatGetBlockSize( Amat, &bs ); CHKERRQ(ierr);
422   ierr = MatGetSize( Amat, &M, &N );CHKERRQ(ierr);
423   ierr = MatGetOwnershipRange( Amat, &Istart, &Iend ); CHKERRQ(ierr);
424   nloc = (Iend-Istart)/bs; assert((Iend-Istart)%bs == 0);
425 
426   /* get data of not around */
427   if( pc_gamg->m_data == 0 && nloc > 0 ) {
428     ierr  = PCSetCoordinates_GAMG( a_pc, -1, 0 );    CHKERRQ( ierr );
429   }
430   data = pc_gamg->m_data;
431   data_sz = pc_gamg->m_data_sz/nloc; assert(data!=0 || data_sz==0);
432 
433   /* Get A_i and R_i */
434 PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s level %d N=%d, data size=%d m_data_sz=%d nloc=%d, np=%d\n",0,__FUNCT__,0,N,data_sz,pc_gamg->m_data_sz,nloc,npe);
435   for ( level=0, Aarr[0] = Pmat, nactivepe = npe; /* hard wired stopping logic */
436         level < GAMG_MAXLEVELS-1 && (level==0 || M/bs>TOP_GRID_LIM) && (npe==1 || nactivepe>1);
437         level++ ) {
438     level1 = level + 1;
439     ierr = PetscLogEventBegin(gamg_setup_stages[SET1],0,0,0,0);CHKERRQ(ierr);
440     ierr = createProlongation( Aarr[level], data, pc_gamg->m_dim, &bs, &data_sz,
441                                &Parr[level1], &coarse_data, &isOK, &emaxs[level] );
442     CHKERRQ(ierr);
443     ierr = PetscLogEventEnd(gamg_setup_stages[SET1],0,0,0,0);CHKERRQ(ierr);
444 
445     ierr = PetscFree( data ); CHKERRQ( ierr );
446     if(level==0) Aarr[0] = Amat; /* use Pmat for finest level setup, but use mat for solver */
447     if( isOK ) {
448       ierr = PetscLogEventBegin(gamg_setup_stages[SET2],0,0,0,0);CHKERRQ(ierr);
449       ierr = partitionLevel( Aarr[level], data_sz, bs,
450                              &Parr[level1], &coarse_data, &nactivepe, &Aarr[level1] );
451       CHKERRQ(ierr);
452       ierr = PetscLogEventEnd(gamg_setup_stages[SET2],0,0,0,0);CHKERRQ(ierr);
453       ierr = MatGetSize( Aarr[level1], &M, &N );CHKERRQ(ierr);
454       PetscPrintf(PETSC_COMM_WORLD,"\t\t[%d]%s %d) N=%d, %d data size, %d active pes\n",0,__FUNCT__,level1,N,data_sz,nactivepe);
455       /* coarse grids with SA can have zero row/cols from singleton aggregates */
456       /* aggregation method can probably gaurrentee this does not happen! - be safe for now */
457       if( PETSC_TRUE ){
458         Vec diag; PetscScalar *data_arr,v; PetscInt Istart,Iend,kk,nloc,id;
459         ierr = MatGetOwnershipRange(Aarr[level1], &Istart, &Iend); CHKERRQ(ierr);
460         nloc = Iend-Istart;
461         ierr = MatGetVecs( Aarr[level1], &diag, 0 );    CHKERRQ(ierr);
462         ierr = MatGetDiagonal( Aarr[level1], diag );    CHKERRQ(ierr);
463         ierr = VecGetArray( diag, &data_arr );   CHKERRQ(ierr);
464         for(kk=0;kk<nloc;kk++){
465           if(data_arr[kk]==0.0) {
466             id = kk + Istart; v = 1.0;
467             ierr = MatSetValues(Aarr[level1],1,&id,1,&id,&v,INSERT_VALUES);
468             CHKERRQ(ierr);
469 PetscPrintf(PETSC_COMM_SELF,"[%d]%s warning: added diag to zero (%d)\n",mype,__FUNCT__,id);
470           }
471         }
472         ierr = VecRestoreArray( diag, &data_arr ); CHKERRQ(ierr);
473         ierr = VecDestroy( &diag );                CHKERRQ(ierr);
474         ierr = MatAssemblyBegin(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
475         ierr = MatAssemblyEnd(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
476       }
477     }
478     else{
479       break;
480     }
481     data = coarse_data;
482   }
483   ierr = PetscFree( coarse_data ); CHKERRQ( ierr );
484 PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s %d levels\n",0,__FUNCT__,level + 1);
485   pc_gamg->m_data = 0; /* destroyed coordinate data */
486   pc_gamg->m_Nlevels = level + 1;
487   fine_level = level;
488   ierr = PCMGSetLevels(a_pc,pc_gamg->m_Nlevels,PETSC_NULL);CHKERRQ(ierr);
489 
490   /* set default smoothers */
491   for ( lidx=1, level = pc_gamg->m_Nlevels-2;
492         lidx <= fine_level;
493         lidx++, level--) {
494     PetscReal emax, emin;
495     KSP smoother; PC subpc;
496     ierr = PCMGGetSmoother( a_pc, lidx, &smoother ); CHKERRQ(ierr);
497     ierr = KSPSetType( smoother, KSPCHEBYCHEV );CHKERRQ(ierr);
498     if( emaxs[level] > 0.0 ) emax = emaxs[level];
499     else{ /* eigen estimate 'emax' */
500       KSP eksp; Mat Lmat = Aarr[level];
501       Vec bb, xx; PC pc;
502 
503       ierr = MatGetVecs( Lmat, &bb, 0 );         CHKERRQ(ierr);
504       ierr = MatGetVecs( Lmat, &xx, 0 );         CHKERRQ(ierr);
505       {
506 	PetscRandom    rctx;
507 	ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr);
508 	ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr);
509 	ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr);
510 	ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr);
511       }
512       ierr = KSPCreate(wcomm,&eksp);CHKERRQ(ierr);
513       ierr = KSPSetType( eksp, KSPCG );                      CHKERRQ(ierr);
514       ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr);
515       ierr = KSPSetOperators( eksp, Lmat, Lmat, DIFFERENT_NONZERO_PATTERN ); CHKERRQ( ierr );
516       ierr = KSPGetPC( eksp, &pc );CHKERRQ( ierr );
517       ierr = PCSetType( pc, PETSC_GAMG_SMOOTHER ); CHKERRQ(ierr); /* should be same as above */
518       ierr = KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10 );
519       CHKERRQ(ierr);
520       //ierr = KSPSetConvergenceTest( eksp, KSPSkipConverged, 0, 0 ); CHKERRQ(ierr);
521       ierr = KSPSetNormType( eksp, KSP_NORM_NONE );                 CHKERRQ(ierr);
522 
523       ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr);
524       ierr = KSPSolve( eksp, bb, xx ); CHKERRQ(ierr);
525       ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr);
526       ierr = VecDestroy( &xx );       CHKERRQ(ierr);
527       ierr = VecDestroy( &bb );       CHKERRQ(ierr);
528       ierr = KSPDestroy( &eksp );       CHKERRQ(ierr);
529       PetscPrintf(PETSC_COMM_WORLD,"%s max eigen = %e min = %e\n",__FUNCT__,emax,emin);
530     }
531     {
532       PetscInt N1, N0, tt;
533       ierr = MatGetSize( Aarr[level], &N1, &tt );         CHKERRQ(ierr);
534       ierr = MatGetSize( Aarr[level+1], &N0, &tt );       CHKERRQ(ierr);
535       emin = .5*emax/((PetscReal)N1/(PetscReal)N0); /* this should be about the coarsening rate */
536       emax *= 1.05;
537 PetscPrintf(PETSC_COMM_WORLD,"\t%s eigen reduction factor = %g\n",__FUNCT__,emax/emin);
538     }
539 
540     ierr = KSPSetOperators( smoother, Aarr[level], Aarr[level], DIFFERENT_NONZERO_PATTERN );
541     ierr = KSPChebychevSetEigenvalues( smoother, emax, emin );CHKERRQ(ierr);
542     ierr = KSPSetTolerances(smoother,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,2); CHKERRQ(ierr);
543     ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr);
544     ierr = PCSetType( subpc, PETSC_GAMG_SMOOTHER ); CHKERRQ(ierr);
545     ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
546   }
547   {
548     KSP smoother; /* coarse grid */
549     Mat Lmat = Aarr[pc_gamg->m_Nlevels-1];
550     ierr = PCMGGetSmoother( a_pc, 0, &smoother ); CHKERRQ(ierr);
551     ierr = KSPSetOperators( smoother, Lmat, Lmat, DIFFERENT_NONZERO_PATTERN );
552     CHKERRQ(ierr);
553     ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
554   }
555   /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
556   ierr = PCSetFromOptions_MG(a_pc); CHKERRQ(ierr);
557   {
558     PetscBool galerkin;
559     ierr = PCMGGetGalerkin( a_pc,  &galerkin); CHKERRQ(ierr);
560     if(galerkin){
561       SETERRQ(wcomm,PETSC_ERR_ARG_WRONG, "GAMG does galerkin manually so it must not be used in PC_MG.");
562     }
563   }
564 
565   /* set interpolation between the levels, create timer stages, clean up */
566   if( PETSC_FALSE ) {
567     char str[32];
568     sprintf(str,"MG Level %d (%d)",0,pc_gamg->m_Nlevels-1);
569     PetscLogStageRegister(str, &gamg_stages[fine_level]);
570   }
571   for (lidx=0,level=pc_gamg->m_Nlevels-1;
572        lidx<fine_level;
573        lidx++, level--){
574     ierr = PCMGSetInterpolation( a_pc, lidx+1, Parr[level] );CHKERRQ(ierr);
575     if( !PETSC_TRUE ) {
576       PetscViewer viewer; char fname[32];
577       sprintf(fname,"Pmat_%d.m",level);
578       ierr = PetscViewerASCIIOpen( wcomm, fname, &viewer );  CHKERRQ(ierr);
579       ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
580       ierr = MatView( Parr[level], viewer ); CHKERRQ(ierr);
581       ierr = PetscViewerDestroy( &viewer );
582       sprintf(fname,"Amat_%d.m",level);
583       ierr = PetscViewerASCIIOpen( wcomm, fname, &viewer );  CHKERRQ(ierr);
584       ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
585       ierr = MatView( Aarr[level], viewer ); CHKERRQ(ierr);
586       ierr = PetscViewerDestroy( &viewer );
587     }
588     ierr = MatDestroy( &Parr[level] );  CHKERRQ(ierr);
589     ierr = MatDestroy( &Aarr[level] );  CHKERRQ(ierr);
590     if( PETSC_FALSE ) {
591       char str[32];
592       sprintf(str,"MG Level %d (%d)",lidx+1,level-1);
593       PetscLogStageRegister(str, &gamg_stages[level-1]);
594     }
595   }
596 
597   /* setupcalled is set to 0 so that MG is setup from scratch */
598   a_pc->setupcalled = 0;
599   ierr = PCSetUp_MG(a_pc);CHKERRQ(ierr);
600 
601   PetscFunctionReturn(0);
602 }
603 
604 /* ------------------------------------------------------------------------- */
605 /*
606    PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
607    that was created with PCCreate_GAMG().
608 
609    Input Parameter:
610 .  pc - the preconditioner context
611 
612    Application Interface Routine: PCDestroy()
613 */
614 #undef __FUNCT__
615 #define __FUNCT__ "PCDestroy_GAMG"
616 PetscErrorCode PCDestroy_GAMG(PC pc)
617 {
618   PetscErrorCode  ierr;
619   PC_MG           *mg = (PC_MG*)pc->data;
620   PC_GAMG         *pc_gamg= (PC_GAMG*)mg->innerctx;
621 
622   PetscFunctionBegin;
623   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
624   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
625   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
626   PetscFunctionReturn(0);
627 }
628 
629 #undef __FUNCT__
630 #define __FUNCT__ "PCSetFromOptions_GAMG"
631 PetscErrorCode PCSetFromOptions_GAMG(PC pc)
632 {
633   /* PetscErrorCode  ierr; */
634   /* PC_MG           *mg = (PC_MG*)pc->data; */
635   /* PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx; */
636   /* MPI_Comm        comm = ((PetscObject)pc)->comm; */
637 
638   PetscFunctionBegin;
639   PetscFunctionReturn(0);
640 }
641 
642 /* -------------------------------------------------------------------------- */
643 /*
644  PCCreate_GAMG - Creates a GAMG preconditioner context, PC_GAMG
645 
646    Input Parameter:
647 .  pc - the preconditioner context
648 
649    Application Interface Routine: PCCreate()
650 
651   */
652  /* MC
653      PCGAMG - Use algebraic multigrid preconditioning. This preconditioner requires you provide
654        fine grid discretization matrix and coordinates on the fine grid.
655 
656    Options Database Key:
657    Multigrid options(inherited)
658 +  -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (MGSetCycles)
659 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (MGSetNumberSmoothUp)
660 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (MGSetNumberSmoothDown)
661    -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade
662    GAMG options:
663 
664    Level: intermediate
665   Concepts: multigrid
666 
667 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
668            PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), MPSetCycles(), PCMGSetNumberSmoothDown(),
669            PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
670            PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
671            PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
672 M */
673 
674 EXTERN_C_BEGIN
675 #undef __FUNCT__
676 #define __FUNCT__ "PCCreate_GAMG"
677 PetscErrorCode  PCCreate_GAMG(PC pc)
678 {
679   PetscErrorCode  ierr;
680   PC_GAMG         *pc_gamg;
681   PC_MG           *mg;
682   PetscClassId     cookie;
683 
684   PetscFunctionBegin;
685   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
686   ierr = PCSetType(pc,PCMG);CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */
687   ierr = PetscObjectChangeTypeName((PetscObject)pc,PCGAMG);CHKERRQ(ierr);
688 
689   /* create a supporting struct and attach it to pc */
690   ierr = PetscNewLog(pc,PC_GAMG,&pc_gamg);CHKERRQ(ierr);
691   mg = (PC_MG*)pc->data;
692   mg->innerctx = pc_gamg;
693 
694   pc_gamg->m_Nlevels    = -1;
695 
696   /* overwrite the pointers of PCMG by the functions of PCGAMG */
697   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
698   pc->ops->setup          = PCSetUp_GAMG;
699   pc->ops->reset          = PCReset_GAMG;
700   pc->ops->destroy        = PCDestroy_GAMG;
701 
702   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
703 					    "PCSetCoordinates_C",
704 					    "PCSetCoordinates_GAMG",
705 					    PCSetCoordinates_GAMG);CHKERRQ(ierr);
706 
707   PetscClassIdRegister("GAMG Setup",&cookie);
708   PetscLogEventRegister("GAMG-createProl", cookie, &gamg_setup_stages[SET1]);
709   PetscLogEventRegister("GAMG-partLevel", cookie, &gamg_setup_stages[SET2]);
710   PetscLogEventRegister("GAMG-MIS Graph", cookie, &gamg_setup_stages[SET3]);
711   PetscLogEventRegister("GAMG-MIS-Agg", cookie, &gamg_setup_stages[SET4]);
712   PetscLogEventRegister("GAMG-growSupp", cookie, &gamg_setup_stages[SET5]);
713   PetscLogEventRegister("GAMG-tri-Prol", cookie, &gamg_setup_stages[SET6]);
714   PetscLogEventRegister("GAMG-sa-init", cookie, &gamg_setup_stages[SET7]);
715   PetscLogEventRegister("GAMG-sa-frmProl0", cookie, &gamg_setup_stages[SET8]);
716   PetscLogEventRegister("GAMG-sa-smooth", cookie, &gamg_setup_stages[SET9]);
717 
718   PetscFunctionReturn(0);
719 }
720 EXTERN_C_END
721