xref: /petsc/src/ksp/pc/impls/wb/wb.c (revision 94ef8dde638caef1d0cd84a7dc8a2db65fcda8b6)
1 
2 #include <petscdmda.h>   /*I "petscdmda.h" I*/
3 #include <petsc/private/pcmgimpl.h>   /*I "petscksp.h" I*/
4 #include <petscctable.h>
5 
6 typedef struct {
7   PCExoticType type;
8   Mat          P;            /* the constructed interpolation matrix */
9   PetscBool    directSolve;  /* use direct LU factorization to construct interpolation */
10   KSP          ksp;
11 } PC_Exotic;
12 
13 const char *const PCExoticTypes[] = {"face","wirebasket","PCExoticType","PC_Exotic",0};
14 
15 
16 /*
17       DMDAGetWireBasketInterpolation - Gets the interpolation for a wirebasket based coarse space
18 
19 */
20 PetscErrorCode DMDAGetWireBasketInterpolation(DM da,PC_Exotic *exotic,Mat Aglobal,MatReuse reuse,Mat *P)
21 {
22   PetscErrorCode         ierr;
23   PetscInt               dim,i,j,k,m,n,p,dof,Nint,Nface,Nwire,Nsurf,*Iint,*Isurf,cint = 0,csurf = 0,istart,jstart,kstart,*II,N,c = 0;
24   PetscInt               mwidth,nwidth,pwidth,cnt,mp,np,pp,Ntotal,gl[26],*globals,Ng,*IIint,*IIsurf,Nt;
25   Mat                    Xint, Xsurf,Xint_tmp;
26   IS                     isint,issurf,is,row,col;
27   ISLocalToGlobalMapping ltg;
28   MPI_Comm               comm;
29   Mat                    A,Aii,Ais,Asi,*Aholder,iAii;
30   MatFactorInfo          info;
31   PetscScalar            *xsurf,*xint;
32 #if defined(PETSC_USE_DEBUG_foo)
33   PetscScalar            tmp;
34 #endif
35   PetscTable             ht;
36 
37   PetscFunctionBegin;
38   ierr = DMDAGetInfo(da,&dim,0,0,0,&mp,&np,&pp,&dof,0,0,0,0,0);CHKERRQ(ierr);
39   if (dof != 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only for single field problems");
40   if (dim != 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only coded for 3d problems");
41   ierr   = DMDAGetCorners(da,0,0,0,&m,&n,&p);CHKERRQ(ierr);
42   ierr   = DMDAGetGhostCorners(da,&istart,&jstart,&kstart,&mwidth,&nwidth,&pwidth);CHKERRQ(ierr);
43   istart = istart ? -1 : 0;
44   jstart = jstart ? -1 : 0;
45   kstart = kstart ? -1 : 0;
46 
47   /*
48     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
49     to all the local degrees of freedom (this includes the vertices, edges and faces).
50 
51     Xint are the subset of the interpolation into the interior
52 
53     Xface are the interpolation onto faces but not into the interior
54 
55     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
56                                       Xint
57     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
58                                       Xsurf
59   */
60   N     = (m - istart)*(n - jstart)*(p - kstart);
61   Nint  = (m-2-istart)*(n-2-jstart)*(p-2-kstart);
62   Nface = 2*((m-2-istart)*(n-2-jstart) + (m-2-istart)*(p-2-kstart) + (n-2-jstart)*(p-2-kstart));
63   Nwire = 4*((m-2-istart) + (n-2-jstart) + (p-2-kstart)) + 8;
64   Nsurf = Nface + Nwire;
65   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nint,26,NULL,&Xint);CHKERRQ(ierr);
66   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nsurf,26,NULL,&Xsurf);CHKERRQ(ierr);
67   ierr  = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
68 
69   /*
70      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
71      Xsurf will be all zero (thus making the coarse matrix singular).
72   */
73   if (m-istart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in X direction must be at least 3");
74   if (n-jstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Y direction must be at least 3");
75   if (p-kstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Z direction must be at least 3");
76 
77   cnt = 0;
78 
79   xsurf[cnt++] = 1;
80   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + Nsurf] = 1;
81   xsurf[cnt++ + 2*Nsurf] = 1;
82 
83   for (j=1; j<n-1-jstart; j++) {
84     xsurf[cnt++ + 3*Nsurf] = 1;
85     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 4*Nsurf] = 1;
86     xsurf[cnt++ + 5*Nsurf] = 1;
87   }
88 
89   xsurf[cnt++ + 6*Nsurf] = 1;
90   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 7*Nsurf] = 1;
91   xsurf[cnt++ + 8*Nsurf] = 1;
92 
93   for (k=1; k<p-1-kstart; k++) {
94     xsurf[cnt++ + 9*Nsurf] = 1;
95     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 10*Nsurf] = 1;
96     xsurf[cnt++ + 11*Nsurf] = 1;
97 
98     for (j=1; j<n-1-jstart; j++) {
99       xsurf[cnt++ + 12*Nsurf] = 1;
100       /* these are the interior nodes */
101       xsurf[cnt++ + 13*Nsurf] = 1;
102     }
103 
104     xsurf[cnt++ + 14*Nsurf] = 1;
105     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 15*Nsurf] = 1;
106     xsurf[cnt++ + 16*Nsurf] = 1;
107   }
108 
109   xsurf[cnt++ + 17*Nsurf] = 1;
110   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 18*Nsurf] = 1;
111   xsurf[cnt++ + 19*Nsurf] = 1;
112 
113   for (j=1;j<n-1-jstart;j++) {
114     xsurf[cnt++ + 20*Nsurf] = 1;
115     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 21*Nsurf] = 1;
116     xsurf[cnt++ + 22*Nsurf] = 1;
117   }
118 
119   xsurf[cnt++ + 23*Nsurf] = 1;
120   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 24*Nsurf] = 1;
121   xsurf[cnt++ + 25*Nsurf] = 1;
122 
123 
124   /* interpolations only sum to 1 when using direct solver */
125 #if defined(PETSC_USE_DEBUG_foo)
126   for (i=0; i<Nsurf; i++) {
127     tmp = 0.0;
128     for (j=0; j<26; j++) tmp += xsurf[i+j*Nsurf];
129     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xsurf interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
130   }
131 #endif
132   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
133   /* ierr = MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);*/
134 
135 
136   /*
137        I are the indices for all the needed vertices (in global numbering)
138        Iint are the indices for the interior values, I surf for the surface values
139             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
140              is NOT the local DMDA ordering.)
141        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
142   */
143 #define Endpoint(a,start,b) (a == 0 || a == (b-1-start))
144   ierr = PetscMalloc3(N,&II,Nint,&Iint,Nsurf,&Isurf);CHKERRQ(ierr);
145   ierr = PetscMalloc2(Nint,&IIint,Nsurf,&IIsurf);CHKERRQ(ierr);
146   for (k=0; k<p-kstart; k++) {
147     for (j=0; j<n-jstart; j++) {
148       for (i=0; i<m-istart; i++) {
149         II[c++] = i + j*mwidth + k*mwidth*nwidth;
150 
151         if (!Endpoint(i,istart,m) && !Endpoint(j,jstart,n) && !Endpoint(k,kstart,p)) {
152           IIint[cint]  = i + j*mwidth + k*mwidth*nwidth;
153           Iint[cint++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
154         } else {
155           IIsurf[csurf]  = i + j*mwidth + k*mwidth*nwidth;
156           Isurf[csurf++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
157         }
158       }
159     }
160   }
161   if (c != N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"c != N");
162   if (cint != Nint) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"cint != Nint");
163   if (csurf != Nsurf) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"csurf != Nsurf");
164   ierr = DMGetLocalToGlobalMapping(da,&ltg);CHKERRQ(ierr);
165   ierr = ISLocalToGlobalMappingApply(ltg,N,II,II);CHKERRQ(ierr);
166   ierr = ISLocalToGlobalMappingApply(ltg,Nint,IIint,IIint);CHKERRQ(ierr);
167   ierr = ISLocalToGlobalMappingApply(ltg,Nsurf,IIsurf,IIsurf);CHKERRQ(ierr);
168   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
169   ierr = ISCreateGeneral(comm,N,II,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
170   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nint,Iint,PETSC_COPY_VALUES,&isint);CHKERRQ(ierr);
171   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nsurf,Isurf,PETSC_COPY_VALUES,&issurf);CHKERRQ(ierr);
172   ierr = PetscFree3(II,Iint,Isurf);CHKERRQ(ierr);
173 
174   ierr = MatCreateSubMatrices(Aglobal,1,&is,&is,MAT_INITIAL_MATRIX,&Aholder);CHKERRQ(ierr);
175   A    = *Aholder;
176   ierr = PetscFree(Aholder);CHKERRQ(ierr);
177 
178   ierr = MatCreateSubMatrix(A,isint,isint,MAT_INITIAL_MATRIX,&Aii);CHKERRQ(ierr);
179   ierr = MatCreateSubMatrix(A,isint,issurf,MAT_INITIAL_MATRIX,&Ais);CHKERRQ(ierr);
180   ierr = MatCreateSubMatrix(A,issurf,isint,MAT_INITIAL_MATRIX,&Asi);CHKERRQ(ierr);
181 
182   /*
183      Solve for the interpolation onto the interior Xint
184   */
185   ierr = MatMatMult(Ais,Xsurf,MAT_INITIAL_MATRIX,PETSC_DETERMINE,&Xint_tmp);CHKERRQ(ierr);
186   ierr = MatScale(Xint_tmp,-1.0);CHKERRQ(ierr);
187   if (exotic->directSolve) {
188     ierr = MatGetFactor(Aii,MATSOLVERPETSC,MAT_FACTOR_LU,&iAii);CHKERRQ(ierr);
189     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
190     ierr = MatGetOrdering(Aii,MATORDERINGND,&row,&col);CHKERRQ(ierr);
191     ierr = MatLUFactorSymbolic(iAii,Aii,row,col,&info);CHKERRQ(ierr);
192     ierr = ISDestroy(&row);CHKERRQ(ierr);
193     ierr = ISDestroy(&col);CHKERRQ(ierr);
194     ierr = MatLUFactorNumeric(iAii,Aii,&info);CHKERRQ(ierr);
195     ierr = MatMatSolve(iAii,Xint_tmp,Xint);CHKERRQ(ierr);
196     ierr = MatDestroy(&iAii);CHKERRQ(ierr);
197   } else {
198     Vec         b,x;
199     PetscScalar *xint_tmp;
200 
201     ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
202     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&x);CHKERRQ(ierr);
203     ierr = MatDenseGetArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
204     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&b);CHKERRQ(ierr);
205     ierr = KSPSetOperators(exotic->ksp,Aii,Aii);CHKERRQ(ierr);
206     for (i=0; i<26; i++) {
207       ierr = VecPlaceArray(x,xint+i*Nint);CHKERRQ(ierr);
208       ierr = VecPlaceArray(b,xint_tmp+i*Nint);CHKERRQ(ierr);
209       ierr = KSPSolve(exotic->ksp,b,x);CHKERRQ(ierr);
210       ierr = VecResetArray(x);CHKERRQ(ierr);
211       ierr = VecResetArray(b);CHKERRQ(ierr);
212     }
213     ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
214     ierr = MatDenseRestoreArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
215     ierr = VecDestroy(&x);CHKERRQ(ierr);
216     ierr = VecDestroy(&b);CHKERRQ(ierr);
217   }
218   ierr = MatDestroy(&Xint_tmp);CHKERRQ(ierr);
219 
220 #if defined(PETSC_USE_DEBUG_foo)
221   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
222   for (i=0; i<Nint; i++) {
223     tmp = 0.0;
224     for (j=0; j<26; j++) tmp += xint[i+j*Nint];
225 
226     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xint interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
227   }
228   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
229   /* ierr =MatView(Xint,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); */
230 #endif
231 
232 
233   /*         total vertices             total faces                                  total edges */
234   Ntotal = (mp + 1)*(np + 1)*(pp + 1) + mp*np*(pp+1) + mp*pp*(np+1) + np*pp*(mp+1) + mp*(np+1)*(pp+1) + np*(mp+1)*(pp+1) +  pp*(mp+1)*(np+1);
235 
236   /*
237       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
238   */
239   cnt = 0;
240 
241   gl[cnt++] = 0;  { gl[cnt++] = 1;} gl[cnt++] = m-istart-1;
242   { gl[cnt++] = mwidth;  { gl[cnt++] = mwidth+1;} gl[cnt++] = mwidth + m-istart-1;}
243   gl[cnt++] = mwidth*(n-jstart-1);  { gl[cnt++] = mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*(n-jstart-1) + m-istart-1;
244   {
245     gl[cnt++] = mwidth*nwidth;  { gl[cnt++] = mwidth*nwidth+1;}  gl[cnt++] = mwidth*nwidth+ m-istart-1;
246     { gl[cnt++] = mwidth*nwidth + mwidth; /* these are the interior nodes */ gl[cnt++] = mwidth*nwidth + mwidth+m-istart-1;}
247     gl[cnt++] = mwidth*nwidth+ mwidth*(n-jstart-1);   { gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1) + m-istart-1;
248   }
249   gl[cnt++] = mwidth*nwidth*(p-kstart-1); { gl[cnt++] = mwidth*nwidth*(p-kstart-1)+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1) +  m-istart-1;
250   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth;   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1)+mwidth+m-istart-1;}
251   gl[cnt++] = mwidth*nwidth*(p-kstart-1) +  mwidth*(n-jstart-1);  { gl[cnt++] = mwidth*nwidth*(p-kstart-1)+ mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth*(n-jstart-1) + m-istart-1;
252 
253   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
254   /* convert that to global numbering and get them on all processes */
255   ierr = ISLocalToGlobalMappingApply(ltg,26,gl,gl);CHKERRQ(ierr);
256   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
257   ierr = PetscMalloc1(26*mp*np*pp,&globals);CHKERRQ(ierr);
258   ierr = MPI_Allgather(gl,26,MPIU_INT,globals,26,MPIU_INT,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
259 
260   /* Number the coarse grid points from 0 to Ntotal */
261   ierr = MatGetSize(Aglobal,&Nt,NULL);CHKERRQ(ierr);
262   ierr = PetscTableCreate(Ntotal/3,Nt+1,&ht);CHKERRQ(ierr);
263   for (i=0; i<26*mp*np*pp; i++) {
264     ierr = PetscTableAddCount(ht,globals[i]+1);CHKERRQ(ierr);
265   }
266   ierr = PetscTableGetCount(ht,&cnt);CHKERRQ(ierr);
267   if (cnt != Ntotal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hash table size %D not equal to total number coarse grid points %D",cnt,Ntotal);
268   ierr = PetscFree(globals);CHKERRQ(ierr);
269   for (i=0; i<26; i++) {
270     ierr = PetscTableFind(ht,gl[i]+1,&gl[i]);CHKERRQ(ierr);
271     gl[i]--;
272   }
273   ierr = PetscTableDestroy(&ht);CHKERRQ(ierr);
274   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
275 
276   /* construct global interpolation matrix */
277   ierr = MatGetLocalSize(Aglobal,&Ng,NULL);CHKERRQ(ierr);
278   if (reuse == MAT_INITIAL_MATRIX) {
279     ierr = MatCreateAIJ(PetscObjectComm((PetscObject)da),Ng,PETSC_DECIDE,PETSC_DECIDE,Ntotal,Nint+Nsurf,NULL,Nint+Nsurf,NULL,P);CHKERRQ(ierr);
280   } else {
281     ierr = MatZeroEntries(*P);CHKERRQ(ierr);
282   }
283   ierr = MatSetOption(*P,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
284   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
285   ierr = MatSetValues(*P,Nint,IIint,26,gl,xint,INSERT_VALUES);CHKERRQ(ierr);
286   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
287   ierr = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
288   ierr = MatSetValues(*P,Nsurf,IIsurf,26,gl,xsurf,INSERT_VALUES);CHKERRQ(ierr);
289   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
290   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
291   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
292   ierr = PetscFree2(IIint,IIsurf);CHKERRQ(ierr);
293 
294 #if defined(PETSC_USE_DEBUG_foo)
295   {
296     Vec         x,y;
297     PetscScalar *yy;
298     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),Ng,PETSC_DETERMINE,&y);CHKERRQ(ierr);
299     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),PETSC_DETERMINE,Ntotal,&x);CHKERRQ(ierr);
300     ierr = VecSet(x,1.0);CHKERRQ(ierr);
301     ierr = MatMult(*P,x,y);CHKERRQ(ierr);
302     ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
303     for (i=0; i<Ng; i++) {
304       if (PetscAbsScalar(yy[i]-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong p interpolation at i %D value %g",i,(double)PetscAbsScalar(yy[i]));
305     }
306     ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
307     ierr = VecDestroy(x);CHKERRQ(ierr);
308     ierr = VecDestroy(y);CHKERRQ(ierr);
309   }
310 #endif
311 
312   ierr = MatDestroy(&Aii);CHKERRQ(ierr);
313   ierr = MatDestroy(&Ais);CHKERRQ(ierr);
314   ierr = MatDestroy(&Asi);CHKERRQ(ierr);
315   ierr = MatDestroy(&A);CHKERRQ(ierr);
316   ierr = ISDestroy(&is);CHKERRQ(ierr);
317   ierr = ISDestroy(&isint);CHKERRQ(ierr);
318   ierr = ISDestroy(&issurf);CHKERRQ(ierr);
319   ierr = MatDestroy(&Xint);CHKERRQ(ierr);
320   ierr = MatDestroy(&Xsurf);CHKERRQ(ierr);
321   PetscFunctionReturn(0);
322 }
323 
324 /*
325       DMDAGetFaceInterpolation - Gets the interpolation for a face based coarse space
326 
327 */
328 PetscErrorCode DMDAGetFaceInterpolation(DM da,PC_Exotic *exotic,Mat Aglobal,MatReuse reuse,Mat *P)
329 {
330   PetscErrorCode         ierr;
331   PetscInt               dim,i,j,k,m,n,p,dof,Nint,Nface,Nwire,Nsurf,*Iint,*Isurf,cint = 0,csurf = 0,istart,jstart,kstart,*II,N,c = 0;
332   PetscInt               mwidth,nwidth,pwidth,cnt,mp,np,pp,Ntotal,gl[6],*globals,Ng,*IIint,*IIsurf,Nt;
333   Mat                    Xint, Xsurf,Xint_tmp;
334   IS                     isint,issurf,is,row,col;
335   ISLocalToGlobalMapping ltg;
336   MPI_Comm               comm;
337   Mat                    A,Aii,Ais,Asi,*Aholder,iAii;
338   MatFactorInfo          info;
339   PetscScalar            *xsurf,*xint;
340 #if defined(PETSC_USE_DEBUG_foo)
341   PetscScalar            tmp;
342 #endif
343   PetscTable             ht;
344 
345   PetscFunctionBegin;
346   ierr = DMDAGetInfo(da,&dim,0,0,0,&mp,&np,&pp,&dof,0,0,0,0,0);CHKERRQ(ierr);
347   if (dof != 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only for single field problems");
348   if (dim != 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only coded for 3d problems");
349   ierr   = DMDAGetCorners(da,0,0,0,&m,&n,&p);CHKERRQ(ierr);
350   ierr   = DMDAGetGhostCorners(da,&istart,&jstart,&kstart,&mwidth,&nwidth,&pwidth);CHKERRQ(ierr);
351   istart = istart ? -1 : 0;
352   jstart = jstart ? -1 : 0;
353   kstart = kstart ? -1 : 0;
354 
355   /*
356     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
357     to all the local degrees of freedom (this includes the vertices, edges and faces).
358 
359     Xint are the subset of the interpolation into the interior
360 
361     Xface are the interpolation onto faces but not into the interior
362 
363     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
364                                       Xint
365     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
366                                       Xsurf
367   */
368   N     = (m - istart)*(n - jstart)*(p - kstart);
369   Nint  = (m-2-istart)*(n-2-jstart)*(p-2-kstart);
370   Nface = 2*((m-2-istart)*(n-2-jstart) + (m-2-istart)*(p-2-kstart) + (n-2-jstart)*(p-2-kstart));
371   Nwire = 4*((m-2-istart) + (n-2-jstart) + (p-2-kstart)) + 8;
372   Nsurf = Nface + Nwire;
373   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nint,6,NULL,&Xint);CHKERRQ(ierr);
374   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nsurf,6,NULL,&Xsurf);CHKERRQ(ierr);
375   ierr  = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
376 
377   /*
378      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
379      Xsurf will be all zero (thus making the coarse matrix singular).
380   */
381   if (m-istart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in X direction must be at least 3");
382   if (n-jstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Y direction must be at least 3");
383   if (p-kstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Z direction must be at least 3");
384 
385   cnt = 0;
386   for (j=1; j<n-1-jstart; j++) {
387     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 0*Nsurf] = 1;
388   }
389 
390   for (k=1; k<p-1-kstart; k++) {
391     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 1*Nsurf] = 1;
392     for (j=1; j<n-1-jstart; j++) {
393       xsurf[cnt++ + 2*Nsurf] = 1;
394       /* these are the interior nodes */
395       xsurf[cnt++ + 3*Nsurf] = 1;
396     }
397     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 4*Nsurf] = 1;
398   }
399   for (j=1;j<n-1-jstart;j++) {
400     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 5*Nsurf] = 1;
401   }
402 
403 #if defined(PETSC_USE_DEBUG_foo)
404   for (i=0; i<Nsurf; i++) {
405     tmp = 0.0;
406     for (j=0; j<6; j++) tmp += xsurf[i+j*Nsurf];
407 
408     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xsurf interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
409   }
410 #endif
411   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
412   /* ierr = MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);*/
413 
414 
415   /*
416        I are the indices for all the needed vertices (in global numbering)
417        Iint are the indices for the interior values, I surf for the surface values
418             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
419              is NOT the local DMDA ordering.)
420        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
421   */
422 #define Endpoint(a,start,b) (a == 0 || a == (b-1-start))
423   ierr = PetscMalloc3(N,&II,Nint,&Iint,Nsurf,&Isurf);CHKERRQ(ierr);
424   ierr = PetscMalloc2(Nint,&IIint,Nsurf,&IIsurf);CHKERRQ(ierr);
425   for (k=0; k<p-kstart; k++) {
426     for (j=0; j<n-jstart; j++) {
427       for (i=0; i<m-istart; i++) {
428         II[c++] = i + j*mwidth + k*mwidth*nwidth;
429 
430         if (!Endpoint(i,istart,m) && !Endpoint(j,jstart,n) && !Endpoint(k,kstart,p)) {
431           IIint[cint]  = i + j*mwidth + k*mwidth*nwidth;
432           Iint[cint++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
433         } else {
434           IIsurf[csurf]  = i + j*mwidth + k*mwidth*nwidth;
435           Isurf[csurf++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
436         }
437       }
438     }
439   }
440   if (c != N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"c != N");
441   if (cint != Nint) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"cint != Nint");
442   if (csurf != Nsurf) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"csurf != Nsurf");
443   ierr = DMGetLocalToGlobalMapping(da,&ltg);CHKERRQ(ierr);
444   ierr = ISLocalToGlobalMappingApply(ltg,N,II,II);CHKERRQ(ierr);
445   ierr = ISLocalToGlobalMappingApply(ltg,Nint,IIint,IIint);CHKERRQ(ierr);
446   ierr = ISLocalToGlobalMappingApply(ltg,Nsurf,IIsurf,IIsurf);CHKERRQ(ierr);
447   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
448   ierr = ISCreateGeneral(comm,N,II,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
449   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nint,Iint,PETSC_COPY_VALUES,&isint);CHKERRQ(ierr);
450   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nsurf,Isurf,PETSC_COPY_VALUES,&issurf);CHKERRQ(ierr);
451   ierr = PetscFree3(II,Iint,Isurf);CHKERRQ(ierr);
452 
453   ierr = ISSort(is);CHKERRQ(ierr);
454   ierr = MatCreateSubMatrices(Aglobal,1,&is,&is,MAT_INITIAL_MATRIX,&Aholder);CHKERRQ(ierr);
455   A    = *Aholder;
456   ierr = PetscFree(Aholder);CHKERRQ(ierr);
457 
458   ierr = MatCreateSubMatrix(A,isint,isint,MAT_INITIAL_MATRIX,&Aii);CHKERRQ(ierr);
459   ierr = MatCreateSubMatrix(A,isint,issurf,MAT_INITIAL_MATRIX,&Ais);CHKERRQ(ierr);
460   ierr = MatCreateSubMatrix(A,issurf,isint,MAT_INITIAL_MATRIX,&Asi);CHKERRQ(ierr);
461 
462   /*
463      Solve for the interpolation onto the interior Xint
464   */
465   ierr = MatMatMult(Ais,Xsurf,MAT_INITIAL_MATRIX,PETSC_DETERMINE,&Xint_tmp);CHKERRQ(ierr);
466   ierr = MatScale(Xint_tmp,-1.0);CHKERRQ(ierr);
467 
468   if (exotic->directSolve) {
469     ierr = MatGetFactor(Aii,MATSOLVERPETSC,MAT_FACTOR_LU,&iAii);CHKERRQ(ierr);
470     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
471     ierr = MatGetOrdering(Aii,MATORDERINGND,&row,&col);CHKERRQ(ierr);
472     ierr = MatLUFactorSymbolic(iAii,Aii,row,col,&info);CHKERRQ(ierr);
473     ierr = ISDestroy(&row);CHKERRQ(ierr);
474     ierr = ISDestroy(&col);CHKERRQ(ierr);
475     ierr = MatLUFactorNumeric(iAii,Aii,&info);CHKERRQ(ierr);
476     ierr = MatMatSolve(iAii,Xint_tmp,Xint);CHKERRQ(ierr);
477     ierr = MatDestroy(&iAii);CHKERRQ(ierr);
478   } else {
479     Vec         b,x;
480     PetscScalar *xint_tmp;
481 
482     ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
483     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&x);CHKERRQ(ierr);
484     ierr = MatDenseGetArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
485     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&b);CHKERRQ(ierr);
486     ierr = KSPSetOperators(exotic->ksp,Aii,Aii);CHKERRQ(ierr);
487     for (i=0; i<6; i++) {
488       ierr = VecPlaceArray(x,xint+i*Nint);CHKERRQ(ierr);
489       ierr = VecPlaceArray(b,xint_tmp+i*Nint);CHKERRQ(ierr);
490       ierr = KSPSolve(exotic->ksp,b,x);CHKERRQ(ierr);
491       ierr = VecResetArray(x);CHKERRQ(ierr);
492       ierr = VecResetArray(b);CHKERRQ(ierr);
493     }
494     ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
495     ierr = MatDenseRestoreArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
496     ierr = VecDestroy(&x);CHKERRQ(ierr);
497     ierr = VecDestroy(&b);CHKERRQ(ierr);
498   }
499   ierr = MatDestroy(&Xint_tmp);CHKERRQ(ierr);
500 
501 #if defined(PETSC_USE_DEBUG_foo)
502   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
503   for (i=0; i<Nint; i++) {
504     tmp = 0.0;
505     for (j=0; j<6; j++) tmp += xint[i+j*Nint];
506 
507     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xint interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
508   }
509   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
510   /* ierr =MatView(Xint,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); */
511 #endif
512 
513 
514   /*         total faces    */
515   Ntotal =  mp*np*(pp+1) + mp*pp*(np+1) + np*pp*(mp+1);
516 
517   /*
518       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
519   */
520   cnt = 0;
521   { gl[cnt++] = mwidth+1;}
522   {
523     { gl[cnt++] = mwidth*nwidth+1;}
524     { gl[cnt++] = mwidth*nwidth + mwidth; /* these are the interior nodes */ gl[cnt++] = mwidth*nwidth + mwidth+m-istart-1;}
525     { gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1)+1;}
526   }
527   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth+1;}
528 
529   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
530   /* convert that to global numbering and get them on all processes */
531   ierr = ISLocalToGlobalMappingApply(ltg,6,gl,gl);CHKERRQ(ierr);
532   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
533   ierr = PetscMalloc1(6*mp*np*pp,&globals);CHKERRQ(ierr);
534   ierr = MPI_Allgather(gl,6,MPIU_INT,globals,6,MPIU_INT,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
535 
536   /* Number the coarse grid points from 0 to Ntotal */
537   ierr = MatGetSize(Aglobal,&Nt,NULL);CHKERRQ(ierr);
538   ierr = PetscTableCreate(Ntotal/3,Nt+1,&ht);CHKERRQ(ierr);
539   for (i=0; i<6*mp*np*pp; i++) {
540     ierr = PetscTableAddCount(ht,globals[i]+1);CHKERRQ(ierr);
541   }
542   ierr = PetscTableGetCount(ht,&cnt);CHKERRQ(ierr);
543   if (cnt != Ntotal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hash table size %D not equal to total number coarse grid points %D",cnt,Ntotal);
544   ierr = PetscFree(globals);CHKERRQ(ierr);
545   for (i=0; i<6; i++) {
546     ierr = PetscTableFind(ht,gl[i]+1,&gl[i]);CHKERRQ(ierr);
547     gl[i]--;
548   }
549   ierr = PetscTableDestroy(&ht);CHKERRQ(ierr);
550   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
551 
552   /* construct global interpolation matrix */
553   ierr = MatGetLocalSize(Aglobal,&Ng,NULL);CHKERRQ(ierr);
554   if (reuse == MAT_INITIAL_MATRIX) {
555     ierr = MatCreateAIJ(PetscObjectComm((PetscObject)da),Ng,PETSC_DECIDE,PETSC_DECIDE,Ntotal,Nint+Nsurf,NULL,Nint,NULL,P);CHKERRQ(ierr);
556   } else {
557     ierr = MatZeroEntries(*P);CHKERRQ(ierr);
558   }
559   ierr = MatSetOption(*P,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
560   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
561   ierr = MatSetValues(*P,Nint,IIint,6,gl,xint,INSERT_VALUES);CHKERRQ(ierr);
562   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
563   ierr = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
564   ierr = MatSetValues(*P,Nsurf,IIsurf,6,gl,xsurf,INSERT_VALUES);CHKERRQ(ierr);
565   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
566   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
567   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
568   ierr = PetscFree2(IIint,IIsurf);CHKERRQ(ierr);
569 
570 
571 #if defined(PETSC_USE_DEBUG_foo)
572   {
573     Vec         x,y;
574     PetscScalar *yy;
575     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),Ng,PETSC_DETERMINE,&y);CHKERRQ(ierr);
576     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),PETSC_DETERMINE,Ntotal,&x);CHKERRQ(ierr);
577     ierr = VecSet(x,1.0);CHKERRQ(ierr);
578     ierr = MatMult(*P,x,y);CHKERRQ(ierr);
579     ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
580     for (i=0; i<Ng; i++) {
581       if (PetscAbsScalar(yy[i]-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong p interpolation at i %D value %g",i,(double)PetscAbsScalar(yy[i]));
582     }
583     ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
584     ierr = VecDestroy(x);CHKERRQ(ierr);
585     ierr = VecDestroy(y);CHKERRQ(ierr);
586   }
587 #endif
588 
589   ierr = MatDestroy(&Aii);CHKERRQ(ierr);
590   ierr = MatDestroy(&Ais);CHKERRQ(ierr);
591   ierr = MatDestroy(&Asi);CHKERRQ(ierr);
592   ierr = MatDestroy(&A);CHKERRQ(ierr);
593   ierr = ISDestroy(&is);CHKERRQ(ierr);
594   ierr = ISDestroy(&isint);CHKERRQ(ierr);
595   ierr = ISDestroy(&issurf);CHKERRQ(ierr);
596   ierr = MatDestroy(&Xint);CHKERRQ(ierr);
597   ierr = MatDestroy(&Xsurf);CHKERRQ(ierr);
598   PetscFunctionReturn(0);
599 }
600 
601 
602 /*@
603    PCExoticSetType - Sets the type of coarse grid interpolation to use
604 
605    Logically Collective on PC
606 
607    Input Parameters:
608 +  pc - the preconditioner context
609 -  type - either PC_EXOTIC_FACE or PC_EXOTIC_WIREBASKET (defaults to face)
610 
611    Notes: The face based interpolation has 1 degree of freedom per face and ignores the
612      edge and vertex values completely in the coarse problem. For any seven point
613      stencil the interpolation of a constant on all faces into the interior is that constant.
614 
615      The wirebasket interpolation has 1 degree of freedom per vertex, per edge and
616      per face. A constant on the subdomain boundary is interpolated as that constant
617      in the interior of the domain.
618 
619      The coarse grid matrix is obtained via the Galerkin computation A_c = R A R^T, hence
620      if A is nonsingular A_c is also nonsingular.
621 
622      Both interpolations are suitable for only scalar problems.
623 
624    Level: intermediate
625 
626 
627 .seealso: PCEXOTIC, PCExoticType()
628 @*/
629 PetscErrorCode  PCExoticSetType(PC pc,PCExoticType type)
630 {
631   PetscErrorCode ierr;
632 
633   PetscFunctionBegin;
634   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
635   PetscValidLogicalCollectiveEnum(pc,type,2);
636   ierr = PetscTryMethod(pc,"PCExoticSetType_C",(PC,PCExoticType),(pc,type));CHKERRQ(ierr);
637   PetscFunctionReturn(0);
638 }
639 
640 static PetscErrorCode  PCExoticSetType_Exotic(PC pc,PCExoticType type)
641 {
642   PC_MG     *mg  = (PC_MG*)pc->data;
643   PC_Exotic *ctx = (PC_Exotic*) mg->innerctx;
644 
645   PetscFunctionBegin;
646   ctx->type = type;
647   PetscFunctionReturn(0);
648 }
649 
650 PetscErrorCode PCSetUp_Exotic(PC pc)
651 {
652   PetscErrorCode ierr;
653   Mat            A;
654   PC_MG          *mg   = (PC_MG*)pc->data;
655   PC_Exotic      *ex   = (PC_Exotic*) mg->innerctx;
656   MatReuse       reuse = (ex->P) ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX;
657 
658   PetscFunctionBegin;
659   if (!pc->dm) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Need to call PCSetDM() before using this PC");
660   ierr = PCGetOperators(pc,NULL,&A);CHKERRQ(ierr);
661   if (ex->type == PC_EXOTIC_FACE) {
662     ierr = DMDAGetFaceInterpolation(pc->dm,ex,A,reuse,&ex->P);CHKERRQ(ierr);
663   } else if (ex->type == PC_EXOTIC_WIREBASKET) {
664     ierr = DMDAGetWireBasketInterpolation(pc->dm,ex,A,reuse,&ex->P);CHKERRQ(ierr);
665   } else SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"Unknown exotic coarse space %d",ex->type);
666   ierr = PCMGSetInterpolation(pc,1,ex->P);CHKERRQ(ierr);
667   /* if PC has attached DM we must remove it or the PCMG will use it to compute incorrect sized vectors and interpolations */
668   ierr = PCSetDM(pc,NULL);CHKERRQ(ierr);
669   ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
670   PetscFunctionReturn(0);
671 }
672 
673 PetscErrorCode PCDestroy_Exotic(PC pc)
674 {
675   PetscErrorCode ierr;
676   PC_MG          *mg  = (PC_MG*)pc->data;
677   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
678 
679   PetscFunctionBegin;
680   ierr = MatDestroy(&ctx->P);CHKERRQ(ierr);
681   ierr = KSPDestroy(&ctx->ksp);CHKERRQ(ierr);
682   ierr = PetscFree(ctx);CHKERRQ(ierr);
683   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
684   PetscFunctionReturn(0);
685 }
686 
687 PetscErrorCode PCView_Exotic(PC pc,PetscViewer viewer)
688 {
689   PC_MG          *mg = (PC_MG*)pc->data;
690   PetscErrorCode ierr;
691   PetscBool      iascii;
692   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
693 
694   PetscFunctionBegin;
695   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
696   if (iascii) {
697     ierr = PetscViewerASCIIPrintf(viewer,"    Exotic type = %s\n",PCExoticTypes[ctx->type]);CHKERRQ(ierr);
698     if (ctx->directSolve) {
699       ierr = PetscViewerASCIIPrintf(viewer,"      Using direct solver to construct interpolation\n");CHKERRQ(ierr);
700     } else {
701       PetscViewer sviewer;
702       PetscMPIInt rank;
703 
704       ierr = PetscViewerASCIIPrintf(viewer,"      Using iterative solver to construct interpolation\n");CHKERRQ(ierr);
705       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
706       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);  /* should not need to push this twice? */
707       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
708       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
709       if (!rank) {
710         ierr = KSPView(ctx->ksp,sviewer);CHKERRQ(ierr);
711       }
712       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
713       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
714       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
715     }
716   }
717   ierr = PCView_MG(pc,viewer);CHKERRQ(ierr);
718   PetscFunctionReturn(0);
719 }
720 
721 PetscErrorCode PCSetFromOptions_Exotic(PetscOptionItems *PetscOptionsObject,PC pc)
722 {
723   PetscErrorCode ierr;
724   PetscBool      flg;
725   PC_MG          *mg = (PC_MG*)pc->data;
726   PCExoticType   mgctype;
727   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
728 
729   PetscFunctionBegin;
730   ierr = PetscOptionsHead(PetscOptionsObject,"Exotic coarse space options");CHKERRQ(ierr);
731   ierr = PetscOptionsEnum("-pc_exotic_type","face or wirebasket","PCExoticSetType",PCExoticTypes,(PetscEnum)ctx->type,(PetscEnum*)&mgctype,&flg);CHKERRQ(ierr);
732   if (flg) {
733     ierr = PCExoticSetType(pc,mgctype);CHKERRQ(ierr);
734   }
735   ierr = PetscOptionsBool("-pc_exotic_direct_solver","use direct solver to construct interpolation","None",ctx->directSolve,&ctx->directSolve,NULL);CHKERRQ(ierr);
736   if (!ctx->directSolve) {
737     if (!ctx->ksp) {
738       const char *prefix;
739       ierr = KSPCreate(PETSC_COMM_SELF,&ctx->ksp);CHKERRQ(ierr);
740       ierr = KSPSetErrorIfNotConverged(ctx->ksp,pc->erroriffailure);CHKERRQ(ierr);
741       ierr = PetscObjectIncrementTabLevel((PetscObject)ctx->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
742       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ctx->ksp);CHKERRQ(ierr);
743       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
744       ierr = KSPSetOptionsPrefix(ctx->ksp,prefix);CHKERRQ(ierr);
745       ierr = KSPAppendOptionsPrefix(ctx->ksp,"exotic_");CHKERRQ(ierr);
746     }
747     ierr = KSPSetFromOptions(ctx->ksp);CHKERRQ(ierr);
748   }
749   ierr = PetscOptionsTail();CHKERRQ(ierr);
750   PetscFunctionReturn(0);
751 }
752 
753 
754 /*MC
755      PCEXOTIC - Two level overlapping Schwarz preconditioner with exotic (non-standard) coarse grid spaces
756 
757      This uses the PCMG infrastructure restricted to two levels and the face and wirebasket based coarse
758    grid spaces.
759 
760    Notes: By default this uses GMRES on the fine grid smoother so this should be used with KSPFGMRES or the smoother changed to not use GMRES
761 
762    References:
763 +  1. - These coarse grid spaces originate in the work of Bramble, Pasciak  and Schatz, "The Construction
764    of Preconditioners for Elliptic Problems by Substructing IV", Mathematics of Computation, volume 53, 1989.
765 .  2. - They were generalized slightly in "Domain Decomposition Method for Linear Elasticity", Ph. D. thesis, Barry Smith,
766    New York University, 1990.
767 .  3. - They were then explored in great detail in Dryja, Smith, Widlund, "Schwarz Analysis
768    of Iterative Substructuring Methods for Elliptic Problems in Three Dimensions, SIAM Journal on Numerical
769    Analysis, volume 31. 1994. These were developed in the context of iterative substructuring preconditioners.
770 .  4. - They were then ingeniously applied as coarse grid spaces for overlapping Schwarz methods by Dohrmann and Widlund.
771    They refer to them as GDSW (generalized Dryja, Smith, Widlund preconditioners). See, for example,
772    Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Extending theory for domain decomposition algorithms to irregular subdomains. In Ulrich Langer, Marco
773    Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
774    of the 17th International Conference on Domain Decomposition Methods in
775    Science and Engineering, held in Strobl, Austria, 2006, number 60 in
776    Springer Verlag, Lecture Notes in Computational Science and Engineering, 2007.
777 .  5. -  Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. A family of energy minimizing coarse spaces for overlapping Schwarz preconditioners. In Ulrich Langer,
778    Marco Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
779    of the 17th International Conference on Domain Decomposition Methods
780    in Science and Engineering, held in Strobl, Austria, 2006, number 60 in
781    Springer Verlag, Lecture Notes in Computational Science and Engineering, 2007
782 .  6. - Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Domain decomposition
783    for less regular subdomains: Overlapping Schwarz in two dimensions. SIAM J.
784    Numer. Anal., 46(4), 2008.
785 -  7. - Clark R. Dohrmann and Olof B. Widlund. An overlapping Schwarz
786    algorithm for almost incompressible elasticity. Technical Report
787    TR2008 912, Department of Computer Science, Courant Institute
788    of Mathematical Sciences, New York University, May 2008. URL:
789 
790    Options Database: The usual PCMG options are supported, such as -mg_levels_pc_type <type> -mg_coarse_pc_type <type>
791       -pc_mg_type <type>
792 
793    Level: advanced
794 
795 .seealso:  PCMG, PCSetDM(), PCExoticType, PCExoticSetType()
796 M*/
797 
798 PETSC_EXTERN PetscErrorCode PCCreate_Exotic(PC pc)
799 {
800   PetscErrorCode ierr;
801   PC_Exotic      *ex;
802   PC_MG          *mg;
803 
804   PetscFunctionBegin;
805   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
806   if (pc->ops->destroy) {
807     ierr =  (*pc->ops->destroy)(pc);CHKERRQ(ierr);
808     pc->data = 0;
809   }
810   ierr = PetscFree(((PetscObject)pc)->type_name);CHKERRQ(ierr);
811   ((PetscObject)pc)->type_name = 0;
812 
813   ierr         = PCSetType(pc,PCMG);CHKERRQ(ierr);
814   ierr         = PCMGSetLevels(pc,2,NULL);CHKERRQ(ierr);
815   ierr         = PCMGSetGalerkin(pc,PC_MG_GALERKIN_PMAT);CHKERRQ(ierr);
816   ierr         = PetscNew(&ex);CHKERRQ(ierr); \
817   ex->type     = PC_EXOTIC_FACE;
818   mg           = (PC_MG*) pc->data;
819   mg->innerctx = ex;
820 
821 
822   pc->ops->setfromoptions = PCSetFromOptions_Exotic;
823   pc->ops->view           = PCView_Exotic;
824   pc->ops->destroy        = PCDestroy_Exotic;
825   pc->ops->setup          = PCSetUp_Exotic;
826 
827   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCExoticSetType_C",PCExoticSetType_Exotic);CHKERRQ(ierr);
828   PetscFunctionReturn(0);
829 }
830