xref: /petsc/src/ksp/pc/impls/wb/wb.c (revision 2205254efee3a00a594e5e2a3a70f74dcb40bc03)
1 
2 #include <petscpcmg.h>   /*I "petscpcmg.h" I*/
3 #include <petscdmda.h>   /*I "petscdmda.h" I*/
4 #include <../src/ksp/pc/impls/mg/mgimpl.h>
5 
6 typedef struct {
7   PCExoticType type;
8   Mat          P;            /* the constructed interpolation matrix */
9   PetscBool    directSolve;  /* use direct LU factorization to construct interpolation */
10   KSP          ksp;
11 } PC_Exotic;
12 
13 const char *const PCExoticTypes[] = {"face","wirebasket","PCExoticType","PC_Exotic",0};
14 
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "DMDAGetWireBasketInterpolation"
18 /*
19       DMDAGetWireBasketInterpolation - Gets the interpolation for a wirebasket based coarse space
20 
21 */
22 PetscErrorCode DMDAGetWireBasketInterpolation(DM da,PC_Exotic *exotic,Mat Aglobal,MatReuse reuse,Mat *P)
23 {
24   PetscErrorCode         ierr;
25   PetscInt               dim,i,j,k,m,n,p,dof,Nint,Nface,Nwire,Nsurf,*Iint,*Isurf,cint = 0,csurf = 0,istart,jstart,kstart,*II,N,c = 0;
26   PetscInt               mwidth,nwidth,pwidth,cnt,mp,np,pp,Ntotal,gl[26],*globals,Ng,*IIint,*IIsurf,Nt;
27   Mat                    Xint, Xsurf,Xint_tmp;
28   IS                     isint,issurf,is,row,col;
29   ISLocalToGlobalMapping ltg;
30   MPI_Comm               comm;
31   Mat                    A,Aii,Ais,Asi,*Aholder,iAii;
32   MatFactorInfo          info;
33   PetscScalar            *xsurf,*xint;
34 #if defined(PETSC_USE_DEBUG_foo)
35   PetscScalar            tmp;
36 #endif
37   PetscTable             ht;
38 
39   PetscFunctionBegin;
40   ierr = DMDAGetInfo(da,&dim,0,0,0,&mp,&np,&pp,&dof,0,0,0,0,0);CHKERRQ(ierr);
41   if (dof != 1) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP,"Only for single field problems");
42   if (dim != 3) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP,"Only coded for 3d problems");
43   ierr   = DMDAGetCorners(da,0,0,0,&m,&n,&p);CHKERRQ(ierr);
44   ierr   = DMDAGetGhostCorners(da,&istart,&jstart,&kstart,&mwidth,&nwidth,&pwidth);CHKERRQ(ierr);
45   istart = istart ? -1 : 0;
46   jstart = jstart ? -1 : 0;
47   kstart = kstart ? -1 : 0;
48 
49   /*
50     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
51     to all the local degrees of freedom (this includes the vertices, edges and faces).
52 
53     Xint are the subset of the interpolation into the interior
54 
55     Xface are the interpolation onto faces but not into the interior
56 
57     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
58                                       Xint
59     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
60                                       Xsurf
61   */
62   N     = (m - istart)*(n - jstart)*(p - kstart);
63   Nint  = (m-2-istart)*(n-2-jstart)*(p-2-kstart);
64   Nface = 2*((m-2-istart)*(n-2-jstart) + (m-2-istart)*(p-2-kstart) + (n-2-jstart)*(p-2-kstart));
65   Nwire = 4*((m-2-istart) + (n-2-jstart) + (p-2-kstart)) + 8;
66   Nsurf = Nface + Nwire;
67   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nint,26,PETSC_NULL,&Xint);CHKERRQ(ierr);
68   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nsurf,26,PETSC_NULL,&Xsurf);CHKERRQ(ierr);
69   ierr  = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
70 
71   /*
72      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
73      Xsurf will be all zero (thus making the coarse matrix singular).
74   */
75   if (m-istart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in X direction must be at least 3");
76   if (n-jstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Y direction must be at least 3");
77   if (p-kstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Z direction must be at least 3");
78 
79   cnt = 0;
80 
81   xsurf[cnt++] = 1;
82   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + Nsurf] = 1;
83   xsurf[cnt++ + 2*Nsurf] = 1;
84 
85   for (j=1; j<n-1-jstart; j++) {
86     xsurf[cnt++ + 3*Nsurf] = 1;
87     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 4*Nsurf] = 1;
88     xsurf[cnt++ + 5*Nsurf] = 1;
89   }
90 
91   xsurf[cnt++ + 6*Nsurf] = 1;
92   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 7*Nsurf] = 1;
93   xsurf[cnt++ + 8*Nsurf] = 1;
94 
95   for (k=1; k<p-1-kstart; k++) {
96     xsurf[cnt++ + 9*Nsurf] = 1;
97     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 10*Nsurf] = 1;
98     xsurf[cnt++ + 11*Nsurf] = 1;
99 
100     for (j=1; j<n-1-jstart; j++) {
101       xsurf[cnt++ + 12*Nsurf] = 1;
102       /* these are the interior nodes */
103       xsurf[cnt++ + 13*Nsurf] = 1;
104     }
105 
106     xsurf[cnt++ + 14*Nsurf] = 1;
107     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 15*Nsurf] = 1;
108     xsurf[cnt++ + 16*Nsurf] = 1;
109   }
110 
111   xsurf[cnt++ + 17*Nsurf] = 1;
112   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 18*Nsurf] = 1;
113   xsurf[cnt++ + 19*Nsurf] = 1;
114 
115   for (j=1;j<n-1-jstart;j++) {
116     xsurf[cnt++ + 20*Nsurf] = 1;
117     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 21*Nsurf] = 1;
118     xsurf[cnt++ + 22*Nsurf] = 1;
119   }
120 
121   xsurf[cnt++ + 23*Nsurf] = 1;
122   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 24*Nsurf] = 1;
123   xsurf[cnt++ + 25*Nsurf] = 1;
124 
125 
126   /* interpolations only sum to 1 when using direct solver */
127 #if defined(PETSC_USE_DEBUG_foo)
128   for (i=0; i<Nsurf; i++) {
129     tmp = 0.0;
130     for (j=0; j<26; j++) tmp += xsurf[i+j*Nsurf];
131     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xsurf interpolation at i %D value %G",i,PetscAbsScalar(tmp));
132   }
133 #endif
134   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
135   /* ierr = MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);*/
136 
137 
138   /*
139        I are the indices for all the needed vertices (in global numbering)
140        Iint are the indices for the interior values, I surf for the surface values
141             (This is just for the part of the global matrix obtained with MatGetSubMatrix(), it
142              is NOT the local DMDA ordering.)
143        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
144   */
145 #define Endpoint(a,start,b) (a == 0 || a == (b-1-start))
146   ierr = PetscMalloc3(N,PetscInt,&II,Nint,PetscInt,&Iint,Nsurf,PetscInt,&Isurf);CHKERRQ(ierr);
147   ierr = PetscMalloc2(Nint,PetscInt,&IIint,Nsurf,PetscInt,&IIsurf);CHKERRQ(ierr);
148   for (k=0; k<p-kstart; k++) {
149     for (j=0; j<n-jstart; j++) {
150       for (i=0; i<m-istart; i++) {
151         II[c++] = i + j*mwidth + k*mwidth*nwidth;
152 
153         if (!Endpoint(i,istart,m) && !Endpoint(j,jstart,n) && !Endpoint(k,kstart,p)) {
154           IIint[cint]  = i + j*mwidth + k*mwidth*nwidth;
155           Iint[cint++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
156         } else {
157           IIsurf[csurf]  = i + j*mwidth + k*mwidth*nwidth;
158           Isurf[csurf++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
159         }
160       }
161     }
162   }
163   if (c != N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"c != N");
164   if (cint != Nint) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"cint != Nint");
165   if (csurf != Nsurf) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"csurf != Nsurf");
166   ierr = DMGetLocalToGlobalMapping(da,&ltg);CHKERRQ(ierr);
167   ierr = ISLocalToGlobalMappingApply(ltg,N,II,II);CHKERRQ(ierr);
168   ierr = ISLocalToGlobalMappingApply(ltg,Nint,IIint,IIint);CHKERRQ(ierr);
169   ierr = ISLocalToGlobalMappingApply(ltg,Nsurf,IIsurf,IIsurf);CHKERRQ(ierr);
170   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
171   ierr = ISCreateGeneral(comm,N,II,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
172   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nint,Iint,PETSC_COPY_VALUES,&isint);CHKERRQ(ierr);
173   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nsurf,Isurf,PETSC_COPY_VALUES,&issurf);CHKERRQ(ierr);
174   ierr = PetscFree3(II,Iint,Isurf);CHKERRQ(ierr);
175 
176   ierr = MatGetSubMatrices(Aglobal,1,&is,&is,MAT_INITIAL_MATRIX,&Aholder);CHKERRQ(ierr);
177   A    = *Aholder;
178   ierr = PetscFree(Aholder);CHKERRQ(ierr);
179 
180   ierr = MatGetSubMatrix(A,isint,isint,MAT_INITIAL_MATRIX,&Aii);CHKERRQ(ierr);
181   ierr = MatGetSubMatrix(A,isint,issurf,MAT_INITIAL_MATRIX,&Ais);CHKERRQ(ierr);
182   ierr = MatGetSubMatrix(A,issurf,isint,MAT_INITIAL_MATRIX,&Asi);CHKERRQ(ierr);
183 
184   /*
185      Solve for the interpolation onto the interior Xint
186   */
187   ierr = MatMatMult(Ais,Xsurf,MAT_INITIAL_MATRIX,PETSC_DETERMINE,&Xint_tmp);CHKERRQ(ierr);
188   ierr = MatScale(Xint_tmp,-1.0);CHKERRQ(ierr);
189   if (exotic->directSolve) {
190     ierr = MatGetFactor(Aii,MATSOLVERPETSC,MAT_FACTOR_LU,&iAii);CHKERRQ(ierr);
191     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
192     ierr = MatGetOrdering(Aii,MATORDERINGND,&row,&col);CHKERRQ(ierr);
193     ierr = MatLUFactorSymbolic(iAii,Aii,row,col,&info);CHKERRQ(ierr);
194     ierr = ISDestroy(&row);CHKERRQ(ierr);
195     ierr = ISDestroy(&col);CHKERRQ(ierr);
196     ierr = MatLUFactorNumeric(iAii,Aii,&info);CHKERRQ(ierr);
197     ierr = MatMatSolve(iAii,Xint_tmp,Xint);CHKERRQ(ierr);
198     ierr = MatDestroy(&iAii);CHKERRQ(ierr);
199   } else {
200     Vec         b,x;
201     PetscScalar *xint_tmp;
202 
203     ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
204     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&x);CHKERRQ(ierr);
205     ierr = MatDenseGetArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
206     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&b);CHKERRQ(ierr);
207     ierr = KSPSetOperators(exotic->ksp,Aii,Aii,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
208     for (i=0; i<26; i++) {
209       ierr = VecPlaceArray(x,xint+i*Nint);CHKERRQ(ierr);
210       ierr = VecPlaceArray(b,xint_tmp+i*Nint);CHKERRQ(ierr);
211       ierr = KSPSolve(exotic->ksp,b,x);CHKERRQ(ierr);
212       ierr = VecResetArray(x);CHKERRQ(ierr);
213       ierr = VecResetArray(b);CHKERRQ(ierr);
214     }
215     ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
216     ierr = MatDenseRestoreArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
217     ierr = VecDestroy(&x);CHKERRQ(ierr);
218     ierr = VecDestroy(&b);CHKERRQ(ierr);
219   }
220   ierr = MatDestroy(&Xint_tmp);CHKERRQ(ierr);
221 
222 #if defined(PETSC_USE_DEBUG_foo)
223   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
224   for (i=0; i<Nint; i++) {
225     tmp = 0.0;
226     for (j=0; j<26; j++) tmp += xint[i+j*Nint];
227 
228     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xint interpolation at i %D value %G",i,PetscAbsScalar(tmp));
229   }
230   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
231   /* ierr =MatView(Xint,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); */
232 #endif
233 
234 
235   /*         total vertices             total faces                                  total edges */
236   Ntotal = (mp + 1)*(np + 1)*(pp + 1) + mp*np*(pp+1) + mp*pp*(np+1) + np*pp*(mp+1) + mp*(np+1)*(pp+1) + np*(mp+1)*(pp+1) +  pp*(mp+1)*(np+1);
237 
238   /*
239       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
240   */
241   cnt = 0;
242 
243   gl[cnt++] = 0;  { gl[cnt++] = 1;} gl[cnt++] = m-istart-1;
244   { gl[cnt++] = mwidth;  { gl[cnt++] = mwidth+1;} gl[cnt++] = mwidth + m-istart-1;}
245   gl[cnt++] = mwidth*(n-jstart-1);  { gl[cnt++] = mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*(n-jstart-1) + m-istart-1;
246   {
247     gl[cnt++] = mwidth*nwidth;  { gl[cnt++] = mwidth*nwidth+1;}  gl[cnt++] = mwidth*nwidth+ m-istart-1;
248     { gl[cnt++] = mwidth*nwidth + mwidth; /* these are the interior nodes */ gl[cnt++] = mwidth*nwidth + mwidth+m-istart-1;}
249     gl[cnt++] = mwidth*nwidth+ mwidth*(n-jstart-1);   { gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1) + m-istart-1;
250   }
251   gl[cnt++] = mwidth*nwidth*(p-kstart-1); { gl[cnt++] = mwidth*nwidth*(p-kstart-1)+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1) +  m-istart-1;
252   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth;   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1)+mwidth+m-istart-1;}
253   gl[cnt++] = mwidth*nwidth*(p-kstart-1) +  mwidth*(n-jstart-1);  { gl[cnt++] = mwidth*nwidth*(p-kstart-1)+ mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth*(n-jstart-1) + m-istart-1;
254 
255   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
256   /* convert that to global numbering and get them on all processes */
257   ierr = ISLocalToGlobalMappingApply(ltg,26,gl,gl);CHKERRQ(ierr);
258   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
259   ierr = PetscMalloc(26*mp*np*pp*sizeof(PetscInt),&globals);CHKERRQ(ierr);
260   ierr = MPI_Allgather(gl,26,MPIU_INT,globals,26,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
261 
262   /* Number the coarse grid points from 0 to Ntotal */
263   ierr = MatGetSize(Aglobal,&Nt,PETSC_NULL);CHKERRQ(ierr);
264   ierr = PetscTableCreate(Ntotal/3,Nt+1,&ht);CHKERRQ(ierr);
265   for (i=0; i<26*mp*np*pp; i++) {
266     ierr = PetscTableAddCount(ht,globals[i]+1);CHKERRQ(ierr);
267   }
268   ierr = PetscTableGetCount(ht,&cnt);CHKERRQ(ierr);
269   if (cnt != Ntotal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hash table size %D not equal to total number coarse grid points %D",cnt,Ntotal);
270   ierr = PetscFree(globals);CHKERRQ(ierr);
271   for (i=0; i<26; i++) {
272     ierr = PetscTableFind(ht,gl[i]+1,&gl[i]);CHKERRQ(ierr);
273     gl[i]--;
274   }
275   ierr = PetscTableDestroy(&ht);CHKERRQ(ierr);
276   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
277 
278   /* construct global interpolation matrix */
279   ierr = MatGetLocalSize(Aglobal,&Ng,PETSC_NULL);CHKERRQ(ierr);
280   if (reuse == MAT_INITIAL_MATRIX) {
281     ierr = MatCreateAIJ(((PetscObject)da)->comm,Ng,PETSC_DECIDE,PETSC_DECIDE,Ntotal,Nint+Nsurf,PETSC_NULL,Nint+Nsurf,PETSC_NULL,P);CHKERRQ(ierr);
282   } else {
283     ierr = MatZeroEntries(*P);CHKERRQ(ierr);
284   }
285   ierr = MatSetOption(*P,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
286   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
287   ierr = MatSetValues(*P,Nint,IIint,26,gl,xint,INSERT_VALUES);CHKERRQ(ierr);
288   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
289   ierr = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
290   ierr = MatSetValues(*P,Nsurf,IIsurf,26,gl,xsurf,INSERT_VALUES);CHKERRQ(ierr);
291   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
292   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
293   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
294   ierr = PetscFree2(IIint,IIsurf);CHKERRQ(ierr);
295 
296 #if defined(PETSC_USE_DEBUG_foo)
297   {
298     Vec         x,y;
299     PetscScalar *yy;
300     ierr = VecCreateMPI(((PetscObject)da)->comm,Ng,PETSC_DETERMINE,&y);CHKERRQ(ierr);
301     ierr = VecCreateMPI(((PetscObject)da)->comm,PETSC_DETERMINE,Ntotal,&x);CHKERRQ(ierr);
302     ierr = VecSet(x,1.0);CHKERRQ(ierr);
303     ierr = MatMult(*P,x,y);CHKERRQ(ierr);
304     ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
305     for (i=0; i<Ng; i++) {
306       if (PetscAbsScalar(yy[i]-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong p interpolation at i %D value %G",i,PetscAbsScalar(yy[i]));
307     }
308     ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
309     ierr = VecDestroy(x);CHKERRQ(ierr);
310     ierr = VecDestroy(y);CHKERRQ(ierr);
311   }
312 #endif
313 
314   ierr = MatDestroy(&Aii);CHKERRQ(ierr);
315   ierr = MatDestroy(&Ais);CHKERRQ(ierr);
316   ierr = MatDestroy(&Asi);CHKERRQ(ierr);
317   ierr = MatDestroy(&A);CHKERRQ(ierr);
318   ierr = ISDestroy(&is);CHKERRQ(ierr);
319   ierr = ISDestroy(&isint);CHKERRQ(ierr);
320   ierr = ISDestroy(&issurf);CHKERRQ(ierr);
321   ierr = MatDestroy(&Xint);CHKERRQ(ierr);
322   ierr = MatDestroy(&Xsurf);CHKERRQ(ierr);
323   PetscFunctionReturn(0);
324 }
325 
326 #undef __FUNCT__
327 #define __FUNCT__ "DMDAGetFaceInterpolation"
328 /*
329       DMDAGetFaceInterpolation - Gets the interpolation for a face based coarse space
330 
331 */
332 PetscErrorCode DMDAGetFaceInterpolation(DM da,PC_Exotic *exotic,Mat Aglobal,MatReuse reuse,Mat *P)
333 {
334   PetscErrorCode         ierr;
335   PetscInt               dim,i,j,k,m,n,p,dof,Nint,Nface,Nwire,Nsurf,*Iint,*Isurf,cint = 0,csurf = 0,istart,jstart,kstart,*II,N,c = 0;
336   PetscInt               mwidth,nwidth,pwidth,cnt,mp,np,pp,Ntotal,gl[6],*globals,Ng,*IIint,*IIsurf,Nt;
337   Mat                    Xint, Xsurf,Xint_tmp;
338   IS                     isint,issurf,is,row,col;
339   ISLocalToGlobalMapping ltg;
340   MPI_Comm               comm;
341   Mat                    A,Aii,Ais,Asi,*Aholder,iAii;
342   MatFactorInfo          info;
343   PetscScalar            *xsurf,*xint;
344 #if defined(PETSC_USE_DEBUG_foo)
345   PetscScalar            tmp;
346 #endif
347   PetscTable             ht;
348 
349   PetscFunctionBegin;
350   ierr = DMDAGetInfo(da,&dim,0,0,0,&mp,&np,&pp,&dof,0,0,0,0,0);CHKERRQ(ierr);
351   if (dof != 1) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP,"Only for single field problems");
352   if (dim != 3) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP,"Only coded for 3d problems");
353   ierr   = DMDAGetCorners(da,0,0,0,&m,&n,&p);CHKERRQ(ierr);
354   ierr   = DMDAGetGhostCorners(da,&istart,&jstart,&kstart,&mwidth,&nwidth,&pwidth);CHKERRQ(ierr);
355   istart = istart ? -1 : 0;
356   jstart = jstart ? -1 : 0;
357   kstart = kstart ? -1 : 0;
358 
359   /*
360     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
361     to all the local degrees of freedom (this includes the vertices, edges and faces).
362 
363     Xint are the subset of the interpolation into the interior
364 
365     Xface are the interpolation onto faces but not into the interior
366 
367     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
368                                       Xint
369     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
370                                       Xsurf
371   */
372   N     = (m - istart)*(n - jstart)*(p - kstart);
373   Nint  = (m-2-istart)*(n-2-jstart)*(p-2-kstart);
374   Nface = 2*((m-2-istart)*(n-2-jstart) + (m-2-istart)*(p-2-kstart) + (n-2-jstart)*(p-2-kstart));
375   Nwire = 4*((m-2-istart) + (n-2-jstart) + (p-2-kstart)) + 8;
376   Nsurf = Nface + Nwire;
377   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nint,6,PETSC_NULL,&Xint);CHKERRQ(ierr);
378   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nsurf,6,PETSC_NULL,&Xsurf);CHKERRQ(ierr);
379   ierr  = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
380 
381   /*
382      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
383      Xsurf will be all zero (thus making the coarse matrix singular).
384   */
385   if (m-istart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in X direction must be at least 3");
386   if (n-jstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Y direction must be at least 3");
387   if (p-kstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Z direction must be at least 3");
388 
389   cnt = 0;
390   for (j=1; j<n-1-jstart; j++) {
391     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 0*Nsurf] = 1;
392   }
393 
394   for (k=1; k<p-1-kstart; k++) {
395     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 1*Nsurf] = 1;
396     for (j=1; j<n-1-jstart; j++) {
397       xsurf[cnt++ + 2*Nsurf] = 1;
398       /* these are the interior nodes */
399       xsurf[cnt++ + 3*Nsurf] = 1;
400     }
401     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 4*Nsurf] = 1;
402   }
403   for (j=1;j<n-1-jstart;j++) {
404     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 5*Nsurf] = 1;
405   }
406 
407 #if defined(PETSC_USE_DEBUG_foo)
408   for (i=0; i<Nsurf; i++) {
409     tmp = 0.0;
410     for (j=0; j<6; j++) tmp += xsurf[i+j*Nsurf];
411 
412     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xsurf interpolation at i %D value %G",i,PetscAbsScalar(tmp));
413   }
414 #endif
415   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
416   /* ierr = MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);*/
417 
418 
419   /*
420        I are the indices for all the needed vertices (in global numbering)
421        Iint are the indices for the interior values, I surf for the surface values
422             (This is just for the part of the global matrix obtained with MatGetSubMatrix(), it
423              is NOT the local DMDA ordering.)
424        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
425   */
426 #define Endpoint(a,start,b) (a == 0 || a == (b-1-start))
427   ierr = PetscMalloc3(N,PetscInt,&II,Nint,PetscInt,&Iint,Nsurf,PetscInt,&Isurf);CHKERRQ(ierr);
428   ierr = PetscMalloc2(Nint,PetscInt,&IIint,Nsurf,PetscInt,&IIsurf);CHKERRQ(ierr);
429   for (k=0; k<p-kstart; k++) {
430     for (j=0; j<n-jstart; j++) {
431       for (i=0; i<m-istart; i++) {
432         II[c++] = i + j*mwidth + k*mwidth*nwidth;
433 
434         if (!Endpoint(i,istart,m) && !Endpoint(j,jstart,n) && !Endpoint(k,kstart,p)) {
435           IIint[cint]  = i + j*mwidth + k*mwidth*nwidth;
436           Iint[cint++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
437         } else {
438           IIsurf[csurf]  = i + j*mwidth + k*mwidth*nwidth;
439           Isurf[csurf++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
440         }
441       }
442     }
443   }
444   if (c != N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"c != N");
445   if (cint != Nint) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"cint != Nint");
446   if (csurf != Nsurf) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"csurf != Nsurf");
447   ierr = DMGetLocalToGlobalMapping(da,&ltg);CHKERRQ(ierr);
448   ierr = ISLocalToGlobalMappingApply(ltg,N,II,II);CHKERRQ(ierr);
449   ierr = ISLocalToGlobalMappingApply(ltg,Nint,IIint,IIint);CHKERRQ(ierr);
450   ierr = ISLocalToGlobalMappingApply(ltg,Nsurf,IIsurf,IIsurf);CHKERRQ(ierr);
451   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
452   ierr = ISCreateGeneral(comm,N,II,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
453   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nint,Iint,PETSC_COPY_VALUES,&isint);CHKERRQ(ierr);
454   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nsurf,Isurf,PETSC_COPY_VALUES,&issurf);CHKERRQ(ierr);
455   ierr = PetscFree3(II,Iint,Isurf);CHKERRQ(ierr);
456 
457   ierr = ISSort(is);CHKERRQ(ierr);
458   ierr = MatGetSubMatrices(Aglobal,1,&is,&is,MAT_INITIAL_MATRIX,&Aholder);CHKERRQ(ierr);
459   A    = *Aholder;
460   ierr = PetscFree(Aholder);CHKERRQ(ierr);
461 
462   ierr = MatGetSubMatrix(A,isint,isint,MAT_INITIAL_MATRIX,&Aii);CHKERRQ(ierr);
463   ierr = MatGetSubMatrix(A,isint,issurf,MAT_INITIAL_MATRIX,&Ais);CHKERRQ(ierr);
464   ierr = MatGetSubMatrix(A,issurf,isint,MAT_INITIAL_MATRIX,&Asi);CHKERRQ(ierr);
465 
466   /*
467      Solve for the interpolation onto the interior Xint
468   */
469   ierr = MatMatMult(Ais,Xsurf,MAT_INITIAL_MATRIX,PETSC_DETERMINE,&Xint_tmp);CHKERRQ(ierr);
470   ierr = MatScale(Xint_tmp,-1.0);CHKERRQ(ierr);
471 
472   if (exotic->directSolve) {
473     ierr = MatGetFactor(Aii,MATSOLVERPETSC,MAT_FACTOR_LU,&iAii);CHKERRQ(ierr);
474     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
475     ierr = MatGetOrdering(Aii,MATORDERINGND,&row,&col);CHKERRQ(ierr);
476     ierr = MatLUFactorSymbolic(iAii,Aii,row,col,&info);CHKERRQ(ierr);
477     ierr = ISDestroy(&row);CHKERRQ(ierr);
478     ierr = ISDestroy(&col);CHKERRQ(ierr);
479     ierr = MatLUFactorNumeric(iAii,Aii,&info);CHKERRQ(ierr);
480     ierr = MatMatSolve(iAii,Xint_tmp,Xint);CHKERRQ(ierr);
481     ierr = MatDestroy(&iAii);CHKERRQ(ierr);
482   } else {
483     Vec         b,x;
484     PetscScalar *xint_tmp;
485 
486     ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
487     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&x);CHKERRQ(ierr);
488     ierr = MatDenseGetArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
489     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&b);CHKERRQ(ierr);
490     ierr = KSPSetOperators(exotic->ksp,Aii,Aii,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
491     for (i=0; i<6; i++) {
492       ierr = VecPlaceArray(x,xint+i*Nint);CHKERRQ(ierr);
493       ierr = VecPlaceArray(b,xint_tmp+i*Nint);CHKERRQ(ierr);
494       ierr = KSPSolve(exotic->ksp,b,x);CHKERRQ(ierr);
495       ierr = VecResetArray(x);CHKERRQ(ierr);
496       ierr = VecResetArray(b);CHKERRQ(ierr);
497     }
498     ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
499     ierr = MatDenseRestoreArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
500     ierr = VecDestroy(&x);CHKERRQ(ierr);
501     ierr = VecDestroy(&b);CHKERRQ(ierr);
502   }
503   ierr = MatDestroy(&Xint_tmp);CHKERRQ(ierr);
504 
505 #if defined(PETSC_USE_DEBUG_foo)
506   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
507   for (i=0; i<Nint; i++) {
508     tmp = 0.0;
509     for (j=0; j<6; j++) tmp += xint[i+j*Nint];
510 
511     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xint interpolation at i %D value %G",i,PetscAbsScalar(tmp));
512   }
513   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
514   /* ierr =MatView(Xint,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); */
515 #endif
516 
517 
518   /*         total faces    */
519   Ntotal =  mp*np*(pp+1) + mp*pp*(np+1) + np*pp*(mp+1);
520 
521   /*
522       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
523   */
524   cnt = 0;
525   { gl[cnt++] = mwidth+1;}
526   {
527     { gl[cnt++] = mwidth*nwidth+1;}
528     { gl[cnt++] = mwidth*nwidth + mwidth; /* these are the interior nodes */ gl[cnt++] = mwidth*nwidth + mwidth+m-istart-1;}
529     { gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1)+1;}
530   }
531   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth+1;}
532 
533   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
534   /* convert that to global numbering and get them on all processes */
535   ierr = ISLocalToGlobalMappingApply(ltg,6,gl,gl);CHKERRQ(ierr);
536   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
537   ierr = PetscMalloc(6*mp*np*pp*sizeof(PetscInt),&globals);CHKERRQ(ierr);
538   ierr = MPI_Allgather(gl,6,MPIU_INT,globals,6,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
539 
540   /* Number the coarse grid points from 0 to Ntotal */
541   ierr = MatGetSize(Aglobal,&Nt,PETSC_NULL);CHKERRQ(ierr);
542   ierr = PetscTableCreate(Ntotal/3,Nt+1,&ht);CHKERRQ(ierr);
543   for (i=0; i<6*mp*np*pp; i++) {
544     ierr = PetscTableAddCount(ht,globals[i]+1);CHKERRQ(ierr);
545   }
546   ierr = PetscTableGetCount(ht,&cnt);CHKERRQ(ierr);
547   if (cnt != Ntotal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hash table size %D not equal to total number coarse grid points %D",cnt,Ntotal);
548   ierr = PetscFree(globals);CHKERRQ(ierr);
549   for (i=0; i<6; i++) {
550     ierr = PetscTableFind(ht,gl[i]+1,&gl[i]);CHKERRQ(ierr);
551     gl[i]--;
552   }
553   ierr = PetscTableDestroy(&ht);CHKERRQ(ierr);
554   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
555 
556   /* construct global interpolation matrix */
557   ierr = MatGetLocalSize(Aglobal,&Ng,PETSC_NULL);CHKERRQ(ierr);
558   if (reuse == MAT_INITIAL_MATRIX) {
559     ierr = MatCreateAIJ(((PetscObject)da)->comm,Ng,PETSC_DECIDE,PETSC_DECIDE,Ntotal,Nint+Nsurf,PETSC_NULL,Nint,PETSC_NULL,P);CHKERRQ(ierr);
560   } else {
561     ierr = MatZeroEntries(*P);CHKERRQ(ierr);
562   }
563   ierr = MatSetOption(*P,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
564   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
565   ierr = MatSetValues(*P,Nint,IIint,6,gl,xint,INSERT_VALUES);CHKERRQ(ierr);
566   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
567   ierr = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
568   ierr = MatSetValues(*P,Nsurf,IIsurf,6,gl,xsurf,INSERT_VALUES);CHKERRQ(ierr);
569   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
570   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
571   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
572   ierr = PetscFree2(IIint,IIsurf);CHKERRQ(ierr);
573 
574 
575 #if defined(PETSC_USE_DEBUG_foo)
576   {
577     Vec         x,y;
578     PetscScalar *yy;
579     ierr = VecCreateMPI(((PetscObject)da)->comm,Ng,PETSC_DETERMINE,&y);CHKERRQ(ierr);
580     ierr = VecCreateMPI(((PetscObject)da)->comm,PETSC_DETERMINE,Ntotal,&x);CHKERRQ(ierr);
581     ierr = VecSet(x,1.0);CHKERRQ(ierr);
582     ierr = MatMult(*P,x,y);CHKERRQ(ierr);
583     ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
584     for (i=0; i<Ng; i++) {
585       if (PetscAbsScalar(yy[i]-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong p interpolation at i %D value %G",i,PetscAbsScalar(yy[i]));
586     }
587     ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
588     ierr = VecDestroy(x);CHKERRQ(ierr);
589     ierr = VecDestroy(y);CHKERRQ(ierr);
590   }
591 #endif
592 
593   ierr = MatDestroy(&Aii);CHKERRQ(ierr);
594   ierr = MatDestroy(&Ais);CHKERRQ(ierr);
595   ierr = MatDestroy(&Asi);CHKERRQ(ierr);
596   ierr = MatDestroy(&A);CHKERRQ(ierr);
597   ierr = ISDestroy(&is);CHKERRQ(ierr);
598   ierr = ISDestroy(&isint);CHKERRQ(ierr);
599   ierr = ISDestroy(&issurf);CHKERRQ(ierr);
600   ierr = MatDestroy(&Xint);CHKERRQ(ierr);
601   ierr = MatDestroy(&Xsurf);CHKERRQ(ierr);
602   PetscFunctionReturn(0);
603 }
604 
605 
606 #undef __FUNCT__
607 #define __FUNCT__ "PCExoticSetType"
608 /*@
609    PCExoticSetType - Sets the type of coarse grid interpolation to use
610 
611    Logically Collective on PC
612 
613    Input Parameters:
614 +  pc - the preconditioner context
615 -  type - either PC_EXOTIC_FACE or PC_EXOTIC_WIREBASKET (defaults to face)
616 
617    Notes: The face based interpolation has 1 degree of freedom per face and ignores the
618      edge and vertex values completely in the coarse problem. For any seven point
619      stencil the interpolation of a constant on all faces into the interior is that constant.
620 
621      The wirebasket interpolation has 1 degree of freedom per vertex, per edge and
622      per face. A constant on the subdomain boundary is interpolated as that constant
623      in the interior of the domain.
624 
625      The coarse grid matrix is obtained via the Galerkin computation A_c = R A R^T, hence
626      if A is nonsingular A_c is also nonsingular.
627 
628      Both interpolations are suitable for only scalar problems.
629 
630    Level: intermediate
631 
632 
633 .seealso: PCEXOTIC, PCExoticType()
634 @*/
635 PetscErrorCode  PCExoticSetType(PC pc,PCExoticType type)
636 {
637   PetscErrorCode ierr;
638 
639   PetscFunctionBegin;
640   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
641   PetscValidLogicalCollectiveEnum(pc,type,2);
642   ierr = PetscTryMethod(pc,"PCExoticSetType_C",(PC,PCExoticType),(pc,type));CHKERRQ(ierr);
643   PetscFunctionReturn(0);
644 }
645 
646 #undef __FUNCT__
647 #define __FUNCT__ "PCExoticSetType_Exotic"
648 PetscErrorCode  PCExoticSetType_Exotic(PC pc,PCExoticType type)
649 {
650   PC_MG     *mg  = (PC_MG*)pc->data;
651   PC_Exotic *ctx = (PC_Exotic*) mg->innerctx;
652 
653   PetscFunctionBegin;
654   ctx->type = type;
655   PetscFunctionReturn(0);
656 }
657 
658 #undef __FUNCT__
659 #define __FUNCT__ "PCSetUp_Exotic"
660 PetscErrorCode PCSetUp_Exotic(PC pc)
661 {
662   PetscErrorCode ierr;
663   Mat            A;
664   PC_MG          *mg   = (PC_MG*)pc->data;
665   PC_Exotic      *ex   = (PC_Exotic*) mg->innerctx;
666   MatReuse       reuse = (ex->P) ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX;
667 
668   PetscFunctionBegin;
669   if (!pc->dm) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to call PCSetDM() before using this PC");
670   ierr = PCGetOperators(pc,PETSC_NULL,&A,PETSC_NULL);CHKERRQ(ierr);
671   if (ex->type == PC_EXOTIC_FACE) {
672     ierr = DMDAGetFaceInterpolation(pc->dm,ex,A,reuse,&ex->P);CHKERRQ(ierr);
673   } else if (ex->type == PC_EXOTIC_WIREBASKET) {
674     ierr = DMDAGetWireBasketInterpolation(pc->dm,ex,A,reuse,&ex->P);CHKERRQ(ierr);
675   } else SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_PLIB,"Unknown exotic coarse space %d",ex->type);
676   ierr = PCMGSetInterpolation(pc,1,ex->P);CHKERRQ(ierr);
677   /* if PC has attached DM we must remove it or the PCMG will use it to compute incorrect sized vectors and interpolations */
678   ierr = PCSetDM(pc,PETSC_NULL);CHKERRQ(ierr);
679   ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
680   PetscFunctionReturn(0);
681 }
682 
683 #undef __FUNCT__
684 #define __FUNCT__ "PCDestroy_Exotic"
685 PetscErrorCode PCDestroy_Exotic(PC pc)
686 {
687   PetscErrorCode ierr;
688   PC_MG          *mg  = (PC_MG*)pc->data;
689   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
690 
691   PetscFunctionBegin;
692   ierr = MatDestroy(&ctx->P);CHKERRQ(ierr);
693   ierr = KSPDestroy(&ctx->ksp);CHKERRQ(ierr);
694   ierr = PetscFree(ctx);CHKERRQ(ierr);
695   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
696   PetscFunctionReturn(0);
697 }
698 
699 #undef __FUNCT__
700 #define __FUNCT__ "PCView_Exotic"
701 PetscErrorCode PCView_Exotic(PC pc,PetscViewer viewer)
702 {
703   PC_MG          *mg = (PC_MG*)pc->data;
704   PetscErrorCode ierr;
705   PetscBool      iascii;
706   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
707 
708   PetscFunctionBegin;
709   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
710   if (iascii) {
711     ierr = PetscViewerASCIIPrintf(viewer,"    Exotic type = %s\n",PCExoticTypes[ctx->type]);CHKERRQ(ierr);
712     if (ctx->directSolve) {
713       ierr = PetscViewerASCIIPrintf(viewer,"      Using direct solver to construct interpolation\n");CHKERRQ(ierr);
714     } else {
715       PetscViewer sviewer;
716       PetscMPIInt rank;
717 
718       ierr = PetscViewerASCIIPrintf(viewer,"      Using iterative solver to construct interpolation\n");CHKERRQ(ierr);
719       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
720       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);  /* should not need to push this twice? */
721       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
722       ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
723       if (!rank) {
724         ierr = KSPView(ctx->ksp,sviewer);CHKERRQ(ierr);
725       }
726       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
727       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
728       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
729     }
730   }
731   ierr = PCView_MG(pc,viewer);CHKERRQ(ierr);
732   PetscFunctionReturn(0);
733 }
734 
735 #undef __FUNCT__
736 #define __FUNCT__ "PCSetFromOptions_Exotic"
737 PetscErrorCode PCSetFromOptions_Exotic(PC pc)
738 {
739   PetscErrorCode ierr;
740   PetscBool      flg;
741   PC_MG          *mg = (PC_MG*)pc->data;
742   PCExoticType   mgctype;
743   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
744 
745   PetscFunctionBegin;
746   ierr = PetscOptionsHead("Exotic coarse space options");CHKERRQ(ierr);
747   ierr = PetscOptionsEnum("-pc_exotic_type","face or wirebasket","PCExoticSetType",PCExoticTypes,(PetscEnum)ctx->type,(PetscEnum*)&mgctype,&flg);CHKERRQ(ierr);
748   if (flg) {
749     ierr = PCExoticSetType(pc,mgctype);CHKERRQ(ierr);
750   }
751   ierr = PetscOptionsBool("-pc_exotic_direct_solver","use direct solver to construct interpolation","None",ctx->directSolve,&ctx->directSolve,PETSC_NULL);CHKERRQ(ierr);
752   if (!ctx->directSolve) {
753     if (!ctx->ksp) {
754       const char *prefix;
755       ierr = KSPCreate(PETSC_COMM_SELF,&ctx->ksp);CHKERRQ(ierr);
756       ierr = PetscObjectIncrementTabLevel((PetscObject)ctx->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
757       ierr = PetscLogObjectParent(pc,ctx->ksp);CHKERRQ(ierr);
758       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
759       ierr = KSPSetOptionsPrefix(ctx->ksp,prefix);CHKERRQ(ierr);
760       ierr = KSPAppendOptionsPrefix(ctx->ksp,"exotic_");CHKERRQ(ierr);
761     }
762     ierr = KSPSetFromOptions(ctx->ksp);CHKERRQ(ierr);
763   }
764   ierr = PetscOptionsTail();CHKERRQ(ierr);
765   PetscFunctionReturn(0);
766 }
767 
768 
769 /*MC
770      PCEXOTIC - Two level overlapping Schwarz preconditioner with exotic (non-standard) coarse grid spaces
771 
772      This uses the PCMG infrastructure restricted to two levels and the face and wirebasket based coarse
773    grid spaces.
774 
775    Notes: By default this uses GMRES on the fine grid smoother so this should be used with KSPFGMRES or the smoother changed to not use GMRES
776 
777    References: These coarse grid spaces originate in the work of Bramble, Pasciak  and Schatz, "The Construction
778    of Preconditioners for Elliptic Problems by Substructing IV", Mathematics of Computation, volume 53 pages 1--24, 1989.
779    They were generalized slightly in "Domain Decomposition Method for Linear Elasticity", Ph. D. thesis, Barry Smith,
780    New York University, 1990. They were then explored in great detail in Dryja, Smith, Widlund, "Schwarz Analysis
781    of Iterative Substructuring Methods for Elliptic Problems in Three Dimensions, SIAM Journal on Numerical
782    Analysis, volume 31. pages 1662-1694, 1994. These were developed in the context of iterative substructuring preconditioners.
783    They were then ingeniously applied as coarse grid spaces for overlapping Schwarz methods by Dohrmann and Widlund.
784    They refer to them as GDSW (generalized Dryja, Smith, Widlund preconditioners). See, for example,
785    Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Extending theory for domain decomposition algorithms to irregular subdomains. In Ulrich Langer, Marco
786    Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
787    of the 17th International Conference on Domain Decomposition Methods in
788    Science and Engineering, held in Strobl, Austria, July 3-7, 2006, number 60 in
789    Springer-Verlag, Lecture Notes in Computational Science and Engineering, pages 255-261, 2007.
790    Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. A family of energy min-
791    imizing coarse spaces for overlapping Schwarz preconditioners. In Ulrich Langer,
792    Marco Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
793    of the 17th International Conference on Domain Decomposition Methods
794    in Science and Engineering, held in Strobl, Austria, July 3-7, 2006, number 60 in
795    Springer-Verlag, Lecture Notes in Computational Science and Engineering, pages 247-254, 2007
796    Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Domain decomposition
797    for less regular subdomains: Overlapping Schwarz in two dimensions. SIAM J.
798    Numer. Anal., 46(4):2153-2168, 2008.
799    Clark R. Dohrmann and Olof B. Widlund. An overlapping Schwarz
800    algorithm for almost incompressible elasticity. Technical Report
801    TR2008-912, Department of Computer Science, Courant Institute
802    of Mathematical Sciences, New York University, May 2008. URL:
803    http://cs.nyu.edu/csweb/Research/TechReports/TR2008-912/TR2008-912.pdf
804 
805    Options Database: The usual PCMG options are supported, such as -mg_levels_pc_type <type> -mg_coarse_pc_type <type>
806       -pc_mg_type <type>
807 
808    Level: advanced
809 
810 .seealso:  PCMG, PCSetDM(), PCExoticType, PCExoticSetType()
811 M*/
812 
813 EXTERN_C_BEGIN
814 #undef __FUNCT__
815 #define __FUNCT__ "PCCreate_Exotic"
816 PetscErrorCode  PCCreate_Exotic(PC pc)
817 {
818   PetscErrorCode ierr;
819   PC_Exotic      *ex;
820   PC_MG          *mg;
821 
822   PetscFunctionBegin;
823   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
824   if (pc->ops->destroy) {
825     ierr =  (*pc->ops->destroy)(pc);CHKERRQ(ierr);
826     pc->data = 0;
827   }
828   ierr = PetscFree(((PetscObject)pc)->type_name);CHKERRQ(ierr);
829   ((PetscObject)pc)->type_name = 0;
830 
831   ierr         = PCSetType(pc,PCMG);CHKERRQ(ierr);
832   ierr         = PCMGSetLevels(pc,2,PETSC_NULL);CHKERRQ(ierr);
833   ierr         = PCMGSetGalerkin(pc,PETSC_TRUE);CHKERRQ(ierr);
834   ierr         = PetscNew(PC_Exotic,&ex);CHKERRQ(ierr); \
835   ex->type     = PC_EXOTIC_FACE;
836   mg           = (PC_MG*) pc->data;
837   mg->innerctx = ex;
838 
839 
840   pc->ops->setfromoptions = PCSetFromOptions_Exotic;
841   pc->ops->view           = PCView_Exotic;
842   pc->ops->destroy        = PCDestroy_Exotic;
843   pc->ops->setup          = PCSetUp_Exotic;
844 
845   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCExoticSetType_C","PCExoticSetType_Exotic",PCExoticSetType_Exotic);CHKERRQ(ierr);
846   PetscFunctionReturn(0);
847 }
848 EXTERN_C_END
849