xref: /petsc/src/ksp/pc/impls/wb/wb.c (revision e5a36eccef3d6b83a2c625c30d0dfd5adb4001f2)
1 
2 #include <petscdmda.h>   /*I "petscdmda.h" I*/
3 #include <petsc/private/pcmgimpl.h>   /*I "petscksp.h" I*/
4 #include <petscctable.h>
5 
6 typedef struct {
7   PCExoticType type;
8   Mat          P;            /* the constructed interpolation matrix */
9   PetscBool    directSolve;  /* use direct LU factorization to construct interpolation */
10   KSP          ksp;
11 } PC_Exotic;
12 
13 const char *const PCExoticTypes[] = {"face","wirebasket","PCExoticType","PC_Exotic",0};
14 
15 
16 /*
17       DMDAGetWireBasketInterpolation - Gets the interpolation for a wirebasket based coarse space
18 
19 */
20 PetscErrorCode DMDAGetWireBasketInterpolation(PC pc,DM da,PC_Exotic *exotic,Mat Aglobal,MatReuse reuse,Mat *P)
21 {
22   PetscErrorCode         ierr;
23   PetscInt               dim,i,j,k,m,n,p,dof,Nint,Nface,Nwire,Nsurf,*Iint,*Isurf,cint = 0,csurf = 0,istart,jstart,kstart,*II,N,c = 0;
24   PetscInt               mwidth,nwidth,pwidth,cnt,mp,np,pp,Ntotal,gl[26],*globals,Ng,*IIint,*IIsurf,Nt;
25   Mat                    Xint, Xsurf,Xint_tmp;
26   IS                     isint,issurf,is,row,col;
27   ISLocalToGlobalMapping ltg;
28   MPI_Comm               comm;
29   Mat                    A,Aii,Ais,Asi,*Aholder,iAii;
30   MatFactorInfo          info;
31   PetscScalar            *xsurf,*xint;
32 #if defined(PETSC_USE_DEBUG_foo)
33   PetscScalar            tmp;
34 #endif
35   PetscTable             ht;
36 
37   PetscFunctionBegin;
38   ierr = DMDAGetInfo(da,&dim,0,0,0,&mp,&np,&pp,&dof,0,0,0,0,0);CHKERRQ(ierr);
39   if (dof != 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only for single field problems");
40   if (dim != 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only coded for 3d problems");
41   ierr   = DMDAGetCorners(da,0,0,0,&m,&n,&p);CHKERRQ(ierr);
42   ierr   = DMDAGetGhostCorners(da,&istart,&jstart,&kstart,&mwidth,&nwidth,&pwidth);CHKERRQ(ierr);
43   istart = istart ? -1 : 0;
44   jstart = jstart ? -1 : 0;
45   kstart = kstart ? -1 : 0;
46 
47   /*
48     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
49     to all the local degrees of freedom (this includes the vertices, edges and faces).
50 
51     Xint are the subset of the interpolation into the interior
52 
53     Xface are the interpolation onto faces but not into the interior
54 
55     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
56                                       Xint
57     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
58                                       Xsurf
59   */
60   N     = (m - istart)*(n - jstart)*(p - kstart);
61   Nint  = (m-2-istart)*(n-2-jstart)*(p-2-kstart);
62   Nface = 2*((m-2-istart)*(n-2-jstart) + (m-2-istart)*(p-2-kstart) + (n-2-jstart)*(p-2-kstart));
63   Nwire = 4*((m-2-istart) + (n-2-jstart) + (p-2-kstart)) + 8;
64   Nsurf = Nface + Nwire;
65   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nint,26,NULL,&Xint);CHKERRQ(ierr);
66   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nsurf,26,NULL,&Xsurf);CHKERRQ(ierr);
67   ierr  = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
68 
69   /*
70      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
71      Xsurf will be all zero (thus making the coarse matrix singular).
72   */
73   if (m-istart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in X direction must be at least 3");
74   if (n-jstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Y direction must be at least 3");
75   if (p-kstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Z direction must be at least 3");
76 
77   cnt = 0;
78 
79   xsurf[cnt++] = 1;
80   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + Nsurf] = 1;
81   xsurf[cnt++ + 2*Nsurf] = 1;
82 
83   for (j=1; j<n-1-jstart; j++) {
84     xsurf[cnt++ + 3*Nsurf] = 1;
85     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 4*Nsurf] = 1;
86     xsurf[cnt++ + 5*Nsurf] = 1;
87   }
88 
89   xsurf[cnt++ + 6*Nsurf] = 1;
90   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 7*Nsurf] = 1;
91   xsurf[cnt++ + 8*Nsurf] = 1;
92 
93   for (k=1; k<p-1-kstart; k++) {
94     xsurf[cnt++ + 9*Nsurf] = 1;
95     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 10*Nsurf] = 1;
96     xsurf[cnt++ + 11*Nsurf] = 1;
97 
98     for (j=1; j<n-1-jstart; j++) {
99       xsurf[cnt++ + 12*Nsurf] = 1;
100       /* these are the interior nodes */
101       xsurf[cnt++ + 13*Nsurf] = 1;
102     }
103 
104     xsurf[cnt++ + 14*Nsurf] = 1;
105     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 15*Nsurf] = 1;
106     xsurf[cnt++ + 16*Nsurf] = 1;
107   }
108 
109   xsurf[cnt++ + 17*Nsurf] = 1;
110   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 18*Nsurf] = 1;
111   xsurf[cnt++ + 19*Nsurf] = 1;
112 
113   for (j=1;j<n-1-jstart;j++) {
114     xsurf[cnt++ + 20*Nsurf] = 1;
115     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 21*Nsurf] = 1;
116     xsurf[cnt++ + 22*Nsurf] = 1;
117   }
118 
119   xsurf[cnt++ + 23*Nsurf] = 1;
120   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 24*Nsurf] = 1;
121   xsurf[cnt++ + 25*Nsurf] = 1;
122 
123 
124   /* interpolations only sum to 1 when using direct solver */
125 #if defined(PETSC_USE_DEBUG_foo)
126   for (i=0; i<Nsurf; i++) {
127     tmp = 0.0;
128     for (j=0; j<26; j++) tmp += xsurf[i+j*Nsurf];
129     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xsurf interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
130   }
131 #endif
132   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
133   /* ierr = MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);*/
134 
135 
136   /*
137        I are the indices for all the needed vertices (in global numbering)
138        Iint are the indices for the interior values, I surf for the surface values
139             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
140              is NOT the local DMDA ordering.)
141        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
142   */
143 #define Endpoint(a,start,b) (a == 0 || a == (b-1-start))
144   ierr = PetscMalloc3(N,&II,Nint,&Iint,Nsurf,&Isurf);CHKERRQ(ierr);
145   ierr = PetscMalloc2(Nint,&IIint,Nsurf,&IIsurf);CHKERRQ(ierr);
146   for (k=0; k<p-kstart; k++) {
147     for (j=0; j<n-jstart; j++) {
148       for (i=0; i<m-istart; i++) {
149         II[c++] = i + j*mwidth + k*mwidth*nwidth;
150 
151         if (!Endpoint(i,istart,m) && !Endpoint(j,jstart,n) && !Endpoint(k,kstart,p)) {
152           IIint[cint]  = i + j*mwidth + k*mwidth*nwidth;
153           Iint[cint++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
154         } else {
155           IIsurf[csurf]  = i + j*mwidth + k*mwidth*nwidth;
156           Isurf[csurf++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
157         }
158       }
159     }
160   }
161   if (c != N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"c != N");
162   if (cint != Nint) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"cint != Nint");
163   if (csurf != Nsurf) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"csurf != Nsurf");
164   ierr = DMGetLocalToGlobalMapping(da,&ltg);CHKERRQ(ierr);
165   ierr = ISLocalToGlobalMappingApply(ltg,N,II,II);CHKERRQ(ierr);
166   ierr = ISLocalToGlobalMappingApply(ltg,Nint,IIint,IIint);CHKERRQ(ierr);
167   ierr = ISLocalToGlobalMappingApply(ltg,Nsurf,IIsurf,IIsurf);CHKERRQ(ierr);
168   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
169   ierr = ISCreateGeneral(comm,N,II,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
170   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nint,Iint,PETSC_COPY_VALUES,&isint);CHKERRQ(ierr);
171   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nsurf,Isurf,PETSC_COPY_VALUES,&issurf);CHKERRQ(ierr);
172   ierr = PetscFree3(II,Iint,Isurf);CHKERRQ(ierr);
173 
174   ierr = MatCreateSubMatrices(Aglobal,1,&is,&is,MAT_INITIAL_MATRIX,&Aholder);CHKERRQ(ierr);
175   A    = *Aholder;
176   ierr = PetscFree(Aholder);CHKERRQ(ierr);
177 
178   ierr = MatCreateSubMatrix(A,isint,isint,MAT_INITIAL_MATRIX,&Aii);CHKERRQ(ierr);
179   ierr = MatCreateSubMatrix(A,isint,issurf,MAT_INITIAL_MATRIX,&Ais);CHKERRQ(ierr);
180   ierr = MatCreateSubMatrix(A,issurf,isint,MAT_INITIAL_MATRIX,&Asi);CHKERRQ(ierr);
181 
182   /*
183      Solve for the interpolation onto the interior Xint
184   */
185   ierr = MatMatMult(Ais,Xsurf,MAT_INITIAL_MATRIX,PETSC_DETERMINE,&Xint_tmp);CHKERRQ(ierr);
186   ierr = MatScale(Xint_tmp,-1.0);CHKERRQ(ierr);
187   if (exotic->directSolve) {
188     ierr = MatGetFactor(Aii,MATSOLVERPETSC,MAT_FACTOR_LU,&iAii);CHKERRQ(ierr);
189     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
190     ierr = MatGetOrdering(Aii,MATORDERINGND,&row,&col);CHKERRQ(ierr);
191     ierr = MatLUFactorSymbolic(iAii,Aii,row,col,&info);CHKERRQ(ierr);
192     ierr = ISDestroy(&row);CHKERRQ(ierr);
193     ierr = ISDestroy(&col);CHKERRQ(ierr);
194     ierr = MatLUFactorNumeric(iAii,Aii,&info);CHKERRQ(ierr);
195     ierr = MatMatSolve(iAii,Xint_tmp,Xint);CHKERRQ(ierr);
196     ierr = MatDestroy(&iAii);CHKERRQ(ierr);
197   } else {
198     Vec         b,x;
199     PetscScalar *xint_tmp;
200 
201     ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
202     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&x);CHKERRQ(ierr);
203     ierr = MatDenseGetArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
204     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&b);CHKERRQ(ierr);
205     ierr = KSPSetOperators(exotic->ksp,Aii,Aii);CHKERRQ(ierr);
206     for (i=0; i<26; i++) {
207       ierr = VecPlaceArray(x,xint+i*Nint);CHKERRQ(ierr);
208       ierr = VecPlaceArray(b,xint_tmp+i*Nint);CHKERRQ(ierr);
209       ierr = KSPSolve(exotic->ksp,b,x);CHKERRQ(ierr);
210       ierr = KSPCheckSolve(exotic->ksp,pc,x);CHKERRQ(ierr);
211       ierr = VecResetArray(x);CHKERRQ(ierr);
212       ierr = VecResetArray(b);CHKERRQ(ierr);
213     }
214     ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
215     ierr = MatDenseRestoreArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
216     ierr = VecDestroy(&x);CHKERRQ(ierr);
217     ierr = VecDestroy(&b);CHKERRQ(ierr);
218   }
219   ierr = MatDestroy(&Xint_tmp);CHKERRQ(ierr);
220 
221 #if defined(PETSC_USE_DEBUG_foo)
222   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
223   for (i=0; i<Nint; i++) {
224     tmp = 0.0;
225     for (j=0; j<26; j++) tmp += xint[i+j*Nint];
226 
227     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xint interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
228   }
229   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
230   /* ierr =MatView(Xint,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); */
231 #endif
232 
233 
234   /*         total vertices             total faces                                  total edges */
235   Ntotal = (mp + 1)*(np + 1)*(pp + 1) + mp*np*(pp+1) + mp*pp*(np+1) + np*pp*(mp+1) + mp*(np+1)*(pp+1) + np*(mp+1)*(pp+1) +  pp*(mp+1)*(np+1);
236 
237   /*
238       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
239   */
240   cnt = 0;
241 
242   gl[cnt++] = 0;  { gl[cnt++] = 1;} gl[cnt++] = m-istart-1;
243   { gl[cnt++] = mwidth;  { gl[cnt++] = mwidth+1;} gl[cnt++] = mwidth + m-istart-1;}
244   gl[cnt++] = mwidth*(n-jstart-1);  { gl[cnt++] = mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*(n-jstart-1) + m-istart-1;
245   {
246     gl[cnt++] = mwidth*nwidth;  { gl[cnt++] = mwidth*nwidth+1;}  gl[cnt++] = mwidth*nwidth+ m-istart-1;
247     { gl[cnt++] = mwidth*nwidth + mwidth; /* these are the interior nodes */ gl[cnt++] = mwidth*nwidth + mwidth+m-istart-1;}
248     gl[cnt++] = mwidth*nwidth+ mwidth*(n-jstart-1);   { gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1) + m-istart-1;
249   }
250   gl[cnt++] = mwidth*nwidth*(p-kstart-1); { gl[cnt++] = mwidth*nwidth*(p-kstart-1)+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1) +  m-istart-1;
251   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth;   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1)+mwidth+m-istart-1;}
252   gl[cnt++] = mwidth*nwidth*(p-kstart-1) +  mwidth*(n-jstart-1);  { gl[cnt++] = mwidth*nwidth*(p-kstart-1)+ mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth*(n-jstart-1) + m-istart-1;
253 
254   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
255   /* convert that to global numbering and get them on all processes */
256   ierr = ISLocalToGlobalMappingApply(ltg,26,gl,gl);CHKERRQ(ierr);
257   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
258   ierr = PetscMalloc1(26*mp*np*pp,&globals);CHKERRQ(ierr);
259   ierr = MPI_Allgather(gl,26,MPIU_INT,globals,26,MPIU_INT,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
260 
261   /* Number the coarse grid points from 0 to Ntotal */
262   ierr = MatGetSize(Aglobal,&Nt,NULL);CHKERRQ(ierr);
263   ierr = PetscTableCreate(Ntotal/3,Nt+1,&ht);CHKERRQ(ierr);
264   for (i=0; i<26*mp*np*pp; i++) {
265     ierr = PetscTableAddCount(ht,globals[i]+1);CHKERRQ(ierr);
266   }
267   ierr = PetscTableGetCount(ht,&cnt);CHKERRQ(ierr);
268   if (cnt != Ntotal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hash table size %D not equal to total number coarse grid points %D",cnt,Ntotal);
269   ierr = PetscFree(globals);CHKERRQ(ierr);
270   for (i=0; i<26; i++) {
271     ierr = PetscTableFind(ht,gl[i]+1,&gl[i]);CHKERRQ(ierr);
272     gl[i]--;
273   }
274   ierr = PetscTableDestroy(&ht);CHKERRQ(ierr);
275   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
276 
277   /* construct global interpolation matrix */
278   ierr = MatGetLocalSize(Aglobal,&Ng,NULL);CHKERRQ(ierr);
279   if (reuse == MAT_INITIAL_MATRIX) {
280     ierr = MatCreateAIJ(PetscObjectComm((PetscObject)da),Ng,PETSC_DECIDE,PETSC_DECIDE,Ntotal,Nint+Nsurf,NULL,Nint+Nsurf,NULL,P);CHKERRQ(ierr);
281   } else {
282     ierr = MatZeroEntries(*P);CHKERRQ(ierr);
283   }
284   ierr = MatSetOption(*P,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
285   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
286   ierr = MatSetValues(*P,Nint,IIint,26,gl,xint,INSERT_VALUES);CHKERRQ(ierr);
287   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
288   ierr = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
289   ierr = MatSetValues(*P,Nsurf,IIsurf,26,gl,xsurf,INSERT_VALUES);CHKERRQ(ierr);
290   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
291   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
292   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
293   ierr = PetscFree2(IIint,IIsurf);CHKERRQ(ierr);
294 
295 #if defined(PETSC_USE_DEBUG_foo)
296   {
297     Vec         x,y;
298     PetscScalar *yy;
299     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),Ng,PETSC_DETERMINE,&y);CHKERRQ(ierr);
300     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),PETSC_DETERMINE,Ntotal,&x);CHKERRQ(ierr);
301     ierr = VecSet(x,1.0);CHKERRQ(ierr);
302     ierr = MatMult(*P,x,y);CHKERRQ(ierr);
303     ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
304     for (i=0; i<Ng; i++) {
305       if (PetscAbsScalar(yy[i]-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong p interpolation at i %D value %g",i,(double)PetscAbsScalar(yy[i]));
306     }
307     ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
308     ierr = VecDestroy(x);CHKERRQ(ierr);
309     ierr = VecDestroy(y);CHKERRQ(ierr);
310   }
311 #endif
312 
313   ierr = MatDestroy(&Aii);CHKERRQ(ierr);
314   ierr = MatDestroy(&Ais);CHKERRQ(ierr);
315   ierr = MatDestroy(&Asi);CHKERRQ(ierr);
316   ierr = MatDestroy(&A);CHKERRQ(ierr);
317   ierr = ISDestroy(&is);CHKERRQ(ierr);
318   ierr = ISDestroy(&isint);CHKERRQ(ierr);
319   ierr = ISDestroy(&issurf);CHKERRQ(ierr);
320   ierr = MatDestroy(&Xint);CHKERRQ(ierr);
321   ierr = MatDestroy(&Xsurf);CHKERRQ(ierr);
322   PetscFunctionReturn(0);
323 }
324 
325 /*
326       DMDAGetFaceInterpolation - Gets the interpolation for a face based coarse space
327 
328 */
329 PetscErrorCode DMDAGetFaceInterpolation(PC pc,DM da,PC_Exotic *exotic,Mat Aglobal,MatReuse reuse,Mat *P)
330 {
331   PetscErrorCode         ierr;
332   PetscInt               dim,i,j,k,m,n,p,dof,Nint,Nface,Nwire,Nsurf,*Iint,*Isurf,cint = 0,csurf = 0,istart,jstart,kstart,*II,N,c = 0;
333   PetscInt               mwidth,nwidth,pwidth,cnt,mp,np,pp,Ntotal,gl[6],*globals,Ng,*IIint,*IIsurf,Nt;
334   Mat                    Xint, Xsurf,Xint_tmp;
335   IS                     isint,issurf,is,row,col;
336   ISLocalToGlobalMapping ltg;
337   MPI_Comm               comm;
338   Mat                    A,Aii,Ais,Asi,*Aholder,iAii;
339   MatFactorInfo          info;
340   PetscScalar            *xsurf,*xint;
341 #if defined(PETSC_USE_DEBUG_foo)
342   PetscScalar            tmp;
343 #endif
344   PetscTable             ht;
345 
346   PetscFunctionBegin;
347   ierr = DMDAGetInfo(da,&dim,0,0,0,&mp,&np,&pp,&dof,0,0,0,0,0);CHKERRQ(ierr);
348   if (dof != 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only for single field problems");
349   if (dim != 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only coded for 3d problems");
350   ierr   = DMDAGetCorners(da,0,0,0,&m,&n,&p);CHKERRQ(ierr);
351   ierr   = DMDAGetGhostCorners(da,&istart,&jstart,&kstart,&mwidth,&nwidth,&pwidth);CHKERRQ(ierr);
352   istart = istart ? -1 : 0;
353   jstart = jstart ? -1 : 0;
354   kstart = kstart ? -1 : 0;
355 
356   /*
357     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
358     to all the local degrees of freedom (this includes the vertices, edges and faces).
359 
360     Xint are the subset of the interpolation into the interior
361 
362     Xface are the interpolation onto faces but not into the interior
363 
364     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
365                                       Xint
366     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
367                                       Xsurf
368   */
369   N     = (m - istart)*(n - jstart)*(p - kstart);
370   Nint  = (m-2-istart)*(n-2-jstart)*(p-2-kstart);
371   Nface = 2*((m-2-istart)*(n-2-jstart) + (m-2-istart)*(p-2-kstart) + (n-2-jstart)*(p-2-kstart));
372   Nwire = 4*((m-2-istart) + (n-2-jstart) + (p-2-kstart)) + 8;
373   Nsurf = Nface + Nwire;
374   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nint,6,NULL,&Xint);CHKERRQ(ierr);
375   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nsurf,6,NULL,&Xsurf);CHKERRQ(ierr);
376   ierr  = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
377 
378   /*
379      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
380      Xsurf will be all zero (thus making the coarse matrix singular).
381   */
382   if (m-istart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in X direction must be at least 3");
383   if (n-jstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Y direction must be at least 3");
384   if (p-kstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Z direction must be at least 3");
385 
386   cnt = 0;
387   for (j=1; j<n-1-jstart; j++) {
388     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 0*Nsurf] = 1;
389   }
390 
391   for (k=1; k<p-1-kstart; k++) {
392     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 1*Nsurf] = 1;
393     for (j=1; j<n-1-jstart; j++) {
394       xsurf[cnt++ + 2*Nsurf] = 1;
395       /* these are the interior nodes */
396       xsurf[cnt++ + 3*Nsurf] = 1;
397     }
398     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 4*Nsurf] = 1;
399   }
400   for (j=1;j<n-1-jstart;j++) {
401     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 5*Nsurf] = 1;
402   }
403 
404 #if defined(PETSC_USE_DEBUG_foo)
405   for (i=0; i<Nsurf; i++) {
406     tmp = 0.0;
407     for (j=0; j<6; j++) tmp += xsurf[i+j*Nsurf];
408 
409     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xsurf interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
410   }
411 #endif
412   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
413   /* ierr = MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);*/
414 
415 
416   /*
417        I are the indices for all the needed vertices (in global numbering)
418        Iint are the indices for the interior values, I surf for the surface values
419             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
420              is NOT the local DMDA ordering.)
421        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
422   */
423 #define Endpoint(a,start,b) (a == 0 || a == (b-1-start))
424   ierr = PetscMalloc3(N,&II,Nint,&Iint,Nsurf,&Isurf);CHKERRQ(ierr);
425   ierr = PetscMalloc2(Nint,&IIint,Nsurf,&IIsurf);CHKERRQ(ierr);
426   for (k=0; k<p-kstart; k++) {
427     for (j=0; j<n-jstart; j++) {
428       for (i=0; i<m-istart; i++) {
429         II[c++] = i + j*mwidth + k*mwidth*nwidth;
430 
431         if (!Endpoint(i,istart,m) && !Endpoint(j,jstart,n) && !Endpoint(k,kstart,p)) {
432           IIint[cint]  = i + j*mwidth + k*mwidth*nwidth;
433           Iint[cint++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
434         } else {
435           IIsurf[csurf]  = i + j*mwidth + k*mwidth*nwidth;
436           Isurf[csurf++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
437         }
438       }
439     }
440   }
441   if (c != N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"c != N");
442   if (cint != Nint) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"cint != Nint");
443   if (csurf != Nsurf) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"csurf != Nsurf");
444   ierr = DMGetLocalToGlobalMapping(da,&ltg);CHKERRQ(ierr);
445   ierr = ISLocalToGlobalMappingApply(ltg,N,II,II);CHKERRQ(ierr);
446   ierr = ISLocalToGlobalMappingApply(ltg,Nint,IIint,IIint);CHKERRQ(ierr);
447   ierr = ISLocalToGlobalMappingApply(ltg,Nsurf,IIsurf,IIsurf);CHKERRQ(ierr);
448   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
449   ierr = ISCreateGeneral(comm,N,II,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
450   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nint,Iint,PETSC_COPY_VALUES,&isint);CHKERRQ(ierr);
451   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nsurf,Isurf,PETSC_COPY_VALUES,&issurf);CHKERRQ(ierr);
452   ierr = PetscFree3(II,Iint,Isurf);CHKERRQ(ierr);
453 
454   ierr = ISSort(is);CHKERRQ(ierr);
455   ierr = MatCreateSubMatrices(Aglobal,1,&is,&is,MAT_INITIAL_MATRIX,&Aholder);CHKERRQ(ierr);
456   A    = *Aholder;
457   ierr = PetscFree(Aholder);CHKERRQ(ierr);
458 
459   ierr = MatCreateSubMatrix(A,isint,isint,MAT_INITIAL_MATRIX,&Aii);CHKERRQ(ierr);
460   ierr = MatCreateSubMatrix(A,isint,issurf,MAT_INITIAL_MATRIX,&Ais);CHKERRQ(ierr);
461   ierr = MatCreateSubMatrix(A,issurf,isint,MAT_INITIAL_MATRIX,&Asi);CHKERRQ(ierr);
462 
463   /*
464      Solve for the interpolation onto the interior Xint
465   */
466   ierr = MatMatMult(Ais,Xsurf,MAT_INITIAL_MATRIX,PETSC_DETERMINE,&Xint_tmp);CHKERRQ(ierr);
467   ierr = MatScale(Xint_tmp,-1.0);CHKERRQ(ierr);
468 
469   if (exotic->directSolve) {
470     ierr = MatGetFactor(Aii,MATSOLVERPETSC,MAT_FACTOR_LU,&iAii);CHKERRQ(ierr);
471     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
472     ierr = MatGetOrdering(Aii,MATORDERINGND,&row,&col);CHKERRQ(ierr);
473     ierr = MatLUFactorSymbolic(iAii,Aii,row,col,&info);CHKERRQ(ierr);
474     ierr = ISDestroy(&row);CHKERRQ(ierr);
475     ierr = ISDestroy(&col);CHKERRQ(ierr);
476     ierr = MatLUFactorNumeric(iAii,Aii,&info);CHKERRQ(ierr);
477     ierr = MatMatSolve(iAii,Xint_tmp,Xint);CHKERRQ(ierr);
478     ierr = MatDestroy(&iAii);CHKERRQ(ierr);
479   } else {
480     Vec         b,x;
481     PetscScalar *xint_tmp;
482 
483     ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
484     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&x);CHKERRQ(ierr);
485     ierr = MatDenseGetArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
486     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&b);CHKERRQ(ierr);
487     ierr = KSPSetOperators(exotic->ksp,Aii,Aii);CHKERRQ(ierr);
488     for (i=0; i<6; i++) {
489       ierr = VecPlaceArray(x,xint+i*Nint);CHKERRQ(ierr);
490       ierr = VecPlaceArray(b,xint_tmp+i*Nint);CHKERRQ(ierr);
491       ierr = KSPSolve(exotic->ksp,b,x);CHKERRQ(ierr);
492       ierr = KSPCheckSolve(exotic->ksp,pc,x);CHKERRQ(ierr);
493       ierr = VecResetArray(x);CHKERRQ(ierr);
494       ierr = VecResetArray(b);CHKERRQ(ierr);
495     }
496     ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
497     ierr = MatDenseRestoreArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
498     ierr = VecDestroy(&x);CHKERRQ(ierr);
499     ierr = VecDestroy(&b);CHKERRQ(ierr);
500   }
501   ierr = MatDestroy(&Xint_tmp);CHKERRQ(ierr);
502 
503 #if defined(PETSC_USE_DEBUG_foo)
504   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
505   for (i=0; i<Nint; i++) {
506     tmp = 0.0;
507     for (j=0; j<6; j++) tmp += xint[i+j*Nint];
508 
509     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xint interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
510   }
511   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
512   /* ierr =MatView(Xint,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); */
513 #endif
514 
515 
516   /*         total faces    */
517   Ntotal =  mp*np*(pp+1) + mp*pp*(np+1) + np*pp*(mp+1);
518 
519   /*
520       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
521   */
522   cnt = 0;
523   { gl[cnt++] = mwidth+1;}
524   {
525     { gl[cnt++] = mwidth*nwidth+1;}
526     { gl[cnt++] = mwidth*nwidth + mwidth; /* these are the interior nodes */ gl[cnt++] = mwidth*nwidth + mwidth+m-istart-1;}
527     { gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1)+1;}
528   }
529   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth+1;}
530 
531   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
532   /* convert that to global numbering and get them on all processes */
533   ierr = ISLocalToGlobalMappingApply(ltg,6,gl,gl);CHKERRQ(ierr);
534   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
535   ierr = PetscMalloc1(6*mp*np*pp,&globals);CHKERRQ(ierr);
536   ierr = MPI_Allgather(gl,6,MPIU_INT,globals,6,MPIU_INT,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
537 
538   /* Number the coarse grid points from 0 to Ntotal */
539   ierr = MatGetSize(Aglobal,&Nt,NULL);CHKERRQ(ierr);
540   ierr = PetscTableCreate(Ntotal/3,Nt+1,&ht);CHKERRQ(ierr);
541   for (i=0; i<6*mp*np*pp; i++) {
542     ierr = PetscTableAddCount(ht,globals[i]+1);CHKERRQ(ierr);
543   }
544   ierr = PetscTableGetCount(ht,&cnt);CHKERRQ(ierr);
545   if (cnt != Ntotal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hash table size %D not equal to total number coarse grid points %D",cnt,Ntotal);
546   ierr = PetscFree(globals);CHKERRQ(ierr);
547   for (i=0; i<6; i++) {
548     ierr = PetscTableFind(ht,gl[i]+1,&gl[i]);CHKERRQ(ierr);
549     gl[i]--;
550   }
551   ierr = PetscTableDestroy(&ht);CHKERRQ(ierr);
552   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
553 
554   /* construct global interpolation matrix */
555   ierr = MatGetLocalSize(Aglobal,&Ng,NULL);CHKERRQ(ierr);
556   if (reuse == MAT_INITIAL_MATRIX) {
557     ierr = MatCreateAIJ(PetscObjectComm((PetscObject)da),Ng,PETSC_DECIDE,PETSC_DECIDE,Ntotal,Nint+Nsurf,NULL,Nint,NULL,P);CHKERRQ(ierr);
558   } else {
559     ierr = MatZeroEntries(*P);CHKERRQ(ierr);
560   }
561   ierr = MatSetOption(*P,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
562   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
563   ierr = MatSetValues(*P,Nint,IIint,6,gl,xint,INSERT_VALUES);CHKERRQ(ierr);
564   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
565   ierr = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
566   ierr = MatSetValues(*P,Nsurf,IIsurf,6,gl,xsurf,INSERT_VALUES);CHKERRQ(ierr);
567   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
568   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
569   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
570   ierr = PetscFree2(IIint,IIsurf);CHKERRQ(ierr);
571 
572 
573 #if defined(PETSC_USE_DEBUG_foo)
574   {
575     Vec         x,y;
576     PetscScalar *yy;
577     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),Ng,PETSC_DETERMINE,&y);CHKERRQ(ierr);
578     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),PETSC_DETERMINE,Ntotal,&x);CHKERRQ(ierr);
579     ierr = VecSet(x,1.0);CHKERRQ(ierr);
580     ierr = MatMult(*P,x,y);CHKERRQ(ierr);
581     ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
582     for (i=0; i<Ng; i++) {
583       if (PetscAbsScalar(yy[i]-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong p interpolation at i %D value %g",i,(double)PetscAbsScalar(yy[i]));
584     }
585     ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
586     ierr = VecDestroy(x);CHKERRQ(ierr);
587     ierr = VecDestroy(y);CHKERRQ(ierr);
588   }
589 #endif
590 
591   ierr = MatDestroy(&Aii);CHKERRQ(ierr);
592   ierr = MatDestroy(&Ais);CHKERRQ(ierr);
593   ierr = MatDestroy(&Asi);CHKERRQ(ierr);
594   ierr = MatDestroy(&A);CHKERRQ(ierr);
595   ierr = ISDestroy(&is);CHKERRQ(ierr);
596   ierr = ISDestroy(&isint);CHKERRQ(ierr);
597   ierr = ISDestroy(&issurf);CHKERRQ(ierr);
598   ierr = MatDestroy(&Xint);CHKERRQ(ierr);
599   ierr = MatDestroy(&Xsurf);CHKERRQ(ierr);
600   PetscFunctionReturn(0);
601 }
602 
603 
604 /*@
605    PCExoticSetType - Sets the type of coarse grid interpolation to use
606 
607    Logically Collective on PC
608 
609    Input Parameters:
610 +  pc - the preconditioner context
611 -  type - either PC_EXOTIC_FACE or PC_EXOTIC_WIREBASKET (defaults to face)
612 
613    Notes:
614     The face based interpolation has 1 degree of freedom per face and ignores the
615      edge and vertex values completely in the coarse problem. For any seven point
616      stencil the interpolation of a constant on all faces into the interior is that constant.
617 
618      The wirebasket interpolation has 1 degree of freedom per vertex, per edge and
619      per face. A constant on the subdomain boundary is interpolated as that constant
620      in the interior of the domain.
621 
622      The coarse grid matrix is obtained via the Galerkin computation A_c = R A R^T, hence
623      if A is nonsingular A_c is also nonsingular.
624 
625      Both interpolations are suitable for only scalar problems.
626 
627    Level: intermediate
628 
629 
630 .seealso: PCEXOTIC, PCExoticType()
631 @*/
632 PetscErrorCode  PCExoticSetType(PC pc,PCExoticType type)
633 {
634   PetscErrorCode ierr;
635 
636   PetscFunctionBegin;
637   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
638   PetscValidLogicalCollectiveEnum(pc,type,2);
639   ierr = PetscTryMethod(pc,"PCExoticSetType_C",(PC,PCExoticType),(pc,type));CHKERRQ(ierr);
640   PetscFunctionReturn(0);
641 }
642 
643 static PetscErrorCode  PCExoticSetType_Exotic(PC pc,PCExoticType type)
644 {
645   PC_MG     *mg  = (PC_MG*)pc->data;
646   PC_Exotic *ctx = (PC_Exotic*) mg->innerctx;
647 
648   PetscFunctionBegin;
649   ctx->type = type;
650   PetscFunctionReturn(0);
651 }
652 
653 PetscErrorCode PCSetUp_Exotic(PC pc)
654 {
655   PetscErrorCode ierr;
656   Mat            A;
657   PC_MG          *mg   = (PC_MG*)pc->data;
658   PC_Exotic      *ex   = (PC_Exotic*) mg->innerctx;
659   MatReuse       reuse = (ex->P) ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX;
660 
661   PetscFunctionBegin;
662   if (!pc->dm) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Need to call PCSetDM() before using this PC");
663   ierr = PCGetOperators(pc,NULL,&A);CHKERRQ(ierr);
664   if (ex->type == PC_EXOTIC_FACE) {
665     ierr = DMDAGetFaceInterpolation(pc,pc->dm,ex,A,reuse,&ex->P);CHKERRQ(ierr);
666   } else if (ex->type == PC_EXOTIC_WIREBASKET) {
667     ierr = DMDAGetWireBasketInterpolation(pc,pc->dm,ex,A,reuse,&ex->P);CHKERRQ(ierr);
668   } else SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"Unknown exotic coarse space %d",ex->type);
669   ierr = PCMGSetInterpolation(pc,1,ex->P);CHKERRQ(ierr);
670   /* if PC has attached DM we must remove it or the PCMG will use it to compute incorrect sized vectors and interpolations */
671   ierr = PCSetDM(pc,NULL);CHKERRQ(ierr);
672   ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
673   PetscFunctionReturn(0);
674 }
675 
676 PetscErrorCode PCDestroy_Exotic(PC pc)
677 {
678   PetscErrorCode ierr;
679   PC_MG          *mg  = (PC_MG*)pc->data;
680   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
681 
682   PetscFunctionBegin;
683   ierr = MatDestroy(&ctx->P);CHKERRQ(ierr);
684   ierr = KSPDestroy(&ctx->ksp);CHKERRQ(ierr);
685   ierr = PetscFree(ctx);CHKERRQ(ierr);
686   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
687   PetscFunctionReturn(0);
688 }
689 
690 PetscErrorCode PCView_Exotic(PC pc,PetscViewer viewer)
691 {
692   PC_MG          *mg = (PC_MG*)pc->data;
693   PetscErrorCode ierr;
694   PetscBool      iascii;
695   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
696 
697   PetscFunctionBegin;
698   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
699   if (iascii) {
700     ierr = PetscViewerASCIIPrintf(viewer,"    Exotic type = %s\n",PCExoticTypes[ctx->type]);CHKERRQ(ierr);
701     if (ctx->directSolve) {
702       ierr = PetscViewerASCIIPrintf(viewer,"      Using direct solver to construct interpolation\n");CHKERRQ(ierr);
703     } else {
704       PetscViewer sviewer;
705       PetscMPIInt rank;
706 
707       ierr = PetscViewerASCIIPrintf(viewer,"      Using iterative solver to construct interpolation\n");CHKERRQ(ierr);
708       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
709       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);  /* should not need to push this twice? */
710       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
711       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
712       if (!rank) {
713         ierr = KSPView(ctx->ksp,sviewer);CHKERRQ(ierr);
714       }
715       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
716       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
717       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
718     }
719   }
720   ierr = PCView_MG(pc,viewer);CHKERRQ(ierr);
721   PetscFunctionReturn(0);
722 }
723 
724 PetscErrorCode PCSetFromOptions_Exotic(PetscOptionItems *PetscOptionsObject,PC pc)
725 {
726   PetscErrorCode ierr;
727   PetscBool      flg;
728   PC_MG          *mg = (PC_MG*)pc->data;
729   PCExoticType   mgctype;
730   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
731 
732   PetscFunctionBegin;
733   ierr = PetscOptionsHead(PetscOptionsObject,"Exotic coarse space options");CHKERRQ(ierr);
734   ierr = PetscOptionsEnum("-pc_exotic_type","face or wirebasket","PCExoticSetType",PCExoticTypes,(PetscEnum)ctx->type,(PetscEnum*)&mgctype,&flg);CHKERRQ(ierr);
735   if (flg) {
736     ierr = PCExoticSetType(pc,mgctype);CHKERRQ(ierr);
737   }
738   ierr = PetscOptionsBool("-pc_exotic_direct_solver","use direct solver to construct interpolation","None",ctx->directSolve,&ctx->directSolve,NULL);CHKERRQ(ierr);
739   if (!ctx->directSolve) {
740     if (!ctx->ksp) {
741       const char *prefix;
742       ierr = KSPCreate(PETSC_COMM_SELF,&ctx->ksp);CHKERRQ(ierr);
743       ierr = KSPSetErrorIfNotConverged(ctx->ksp,pc->erroriffailure);CHKERRQ(ierr);
744       ierr = PetscObjectIncrementTabLevel((PetscObject)ctx->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
745       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ctx->ksp);CHKERRQ(ierr);
746       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
747       ierr = KSPSetOptionsPrefix(ctx->ksp,prefix);CHKERRQ(ierr);
748       ierr = KSPAppendOptionsPrefix(ctx->ksp,"exotic_");CHKERRQ(ierr);
749     }
750     ierr = KSPSetFromOptions(ctx->ksp);CHKERRQ(ierr);
751   }
752   ierr = PetscOptionsTail();CHKERRQ(ierr);
753   PetscFunctionReturn(0);
754 }
755 
756 
757 /*MC
758      PCEXOTIC - Two level overlapping Schwarz preconditioner with exotic (non-standard) coarse grid spaces
759 
760      This uses the PCMG infrastructure restricted to two levels and the face and wirebasket based coarse
761    grid spaces.
762 
763    Notes:
764     By default this uses GMRES on the fine grid smoother so this should be used with KSPFGMRES or the smoother changed to not use GMRES
765 
766    References:
767 +  1. - These coarse grid spaces originate in the work of Bramble, Pasciak  and Schatz, "The Construction
768    of Preconditioners for Elliptic Problems by Substructing IV", Mathematics of Computation, volume 53, 1989.
769 .  2. - They were generalized slightly in "Domain Decomposition Method for Linear Elasticity", Ph. D. thesis, Barry Smith,
770    New York University, 1990.
771 .  3. - They were then explored in great detail in Dryja, Smith, Widlund, "Schwarz Analysis
772    of Iterative Substructuring Methods for Elliptic Problems in Three Dimensions, SIAM Journal on Numerical
773    Analysis, volume 31. 1994. These were developed in the context of iterative substructuring preconditioners.
774 .  4. - They were then ingeniously applied as coarse grid spaces for overlapping Schwarz methods by Dohrmann and Widlund.
775    They refer to them as GDSW (generalized Dryja, Smith, Widlund preconditioners). See, for example,
776    Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Extending theory for domain decomposition algorithms to irregular subdomains. In Ulrich Langer, Marco
777    Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
778    of the 17th International Conference on Domain Decomposition Methods in
779    Science and Engineering, held in Strobl, Austria, 2006, number 60 in
780    Springer Verlag, Lecture Notes in Computational Science and Engineering, 2007.
781 .  5. -  Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. A family of energy minimizing coarse spaces for overlapping Schwarz preconditioners. In Ulrich Langer,
782    Marco Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
783    of the 17th International Conference on Domain Decomposition Methods
784    in Science and Engineering, held in Strobl, Austria, 2006, number 60 in
785    Springer Verlag, Lecture Notes in Computational Science and Engineering, 2007
786 .  6. - Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Domain decomposition
787    for less regular subdomains: Overlapping Schwarz in two dimensions. SIAM J.
788    Numer. Anal., 46(4), 2008.
789 -  7. - Clark R. Dohrmann and Olof B. Widlund. An overlapping Schwarz
790    algorithm for almost incompressible elasticity. Technical Report
791    TR2008 912, Department of Computer Science, Courant Institute
792    of Mathematical Sciences, New York University, May 2008. URL:
793 
794    Options Database: The usual PCMG options are supported, such as -mg_levels_pc_type <type> -mg_coarse_pc_type <type>
795       -pc_mg_type <type>
796 
797    Level: advanced
798 
799 .seealso:  PCMG, PCSetDM(), PCExoticType, PCExoticSetType()
800 M*/
801 
802 PETSC_EXTERN PetscErrorCode PCCreate_Exotic(PC pc)
803 {
804   PetscErrorCode ierr;
805   PC_Exotic      *ex;
806   PC_MG          *mg;
807 
808   PetscFunctionBegin;
809   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
810   if (pc->ops->destroy) {
811     ierr =  (*pc->ops->destroy)(pc);CHKERRQ(ierr);
812     pc->data = 0;
813   }
814   ierr = PetscFree(((PetscObject)pc)->type_name);CHKERRQ(ierr);
815   ((PetscObject)pc)->type_name = 0;
816 
817   ierr         = PCSetType(pc,PCMG);CHKERRQ(ierr);
818   ierr         = PCMGSetLevels(pc,2,NULL);CHKERRQ(ierr);
819   ierr         = PCMGSetGalerkin(pc,PC_MG_GALERKIN_PMAT);CHKERRQ(ierr);
820   ierr         = PetscNew(&ex);CHKERRQ(ierr); \
821   ex->type     = PC_EXOTIC_FACE;
822   mg           = (PC_MG*) pc->data;
823   mg->innerctx = ex;
824 
825 
826   pc->ops->setfromoptions = PCSetFromOptions_Exotic;
827   pc->ops->view           = PCView_Exotic;
828   pc->ops->destroy        = PCDestroy_Exotic;
829   pc->ops->setup          = PCSetUp_Exotic;
830 
831   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCExoticSetType_C",PCExoticSetType_Exotic);CHKERRQ(ierr);
832   PetscFunctionReturn(0);
833 }
834