xref: /petsc/src/ksp/pc/impls/wb/wb.c (revision d2522c19e8fa9bca20aaca277941d9a63e71db6a)
1 
2 #include <petscdmda.h>              /*I "petscdmda.h" I*/
3 #include <petsc/private/pcmgimpl.h> /*I "petscksp.h" I*/
4 #include <petscctable.h>
5 
6 typedef struct {
7   PCExoticType type;
8   Mat          P;           /* the constructed interpolation matrix */
9   PetscBool    directSolve; /* use direct LU factorization to construct interpolation */
10   KSP          ksp;
11 } PC_Exotic;
12 
13 const char *const PCExoticTypes[] = {"face", "wirebasket", "PCExoticType", "PC_Exotic", NULL};
14 
15 /*
16       DMDAGetWireBasketInterpolation - Gets the interpolation for a wirebasket based coarse space
17 
18 */
19 PetscErrorCode DMDAGetWireBasketInterpolation(PC pc, DM da, PC_Exotic *exotic, Mat Aglobal, MatReuse reuse, Mat *P) {
20   PetscInt               dim, i, j, k, m, n, p, dof, Nint, Nface, Nwire, Nsurf, *Iint, *Isurf, cint = 0, csurf = 0, istart, jstart, kstart, *II, N, c = 0;
21   PetscInt               mwidth, nwidth, pwidth, cnt, mp, np, pp, Ntotal, gl[26], *globals, Ng, *IIint, *IIsurf, Nt;
22   Mat                    Xint, Xsurf, Xint_tmp;
23   IS                     isint, issurf, is, row, col;
24   ISLocalToGlobalMapping ltg;
25   MPI_Comm               comm;
26   Mat                    A, Aii, Ais, Asi, *Aholder, iAii;
27   MatFactorInfo          info;
28   PetscScalar           *xsurf, *xint;
29   const PetscScalar     *rxint;
30 #if defined(PETSC_USE_DEBUG_foo)
31   PetscScalar tmp;
32 #endif
33   PetscTable ht;
34 
35   PetscFunctionBegin;
36   PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, &mp, &np, &pp, &dof, NULL, NULL, NULL, NULL, NULL));
37   PetscCheck(dof == 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only for single field problems");
38   PetscCheck(dim == 3, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only coded for 3d problems");
39   PetscCall(DMDAGetCorners(da, NULL, NULL, NULL, &m, &n, &p));
40   PetscCall(DMDAGetGhostCorners(da, &istart, &jstart, &kstart, &mwidth, &nwidth, &pwidth));
41   istart = istart ? -1 : 0;
42   jstart = jstart ? -1 : 0;
43   kstart = kstart ? -1 : 0;
44 
45   /*
46     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
47     to all the local degrees of freedom (this includes the vertices, edges and faces).
48 
49     Xint are the subset of the interpolation into the interior
50 
51     Xface are the interpolation onto faces but not into the interior
52 
53     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
54                                       Xint
55     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
56                                       Xsurf
57   */
58   N     = (m - istart) * (n - jstart) * (p - kstart);
59   Nint  = (m - 2 - istart) * (n - 2 - jstart) * (p - 2 - kstart);
60   Nface = 2 * ((m - 2 - istart) * (n - 2 - jstart) + (m - 2 - istart) * (p - 2 - kstart) + (n - 2 - jstart) * (p - 2 - kstart));
61   Nwire = 4 * ((m - 2 - istart) + (n - 2 - jstart) + (p - 2 - kstart)) + 8;
62   Nsurf = Nface + Nwire;
63   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nint, 26, NULL, &Xint));
64   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nsurf, 26, NULL, &Xsurf));
65   PetscCall(MatDenseGetArray(Xsurf, &xsurf));
66 
67   /*
68      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
69      Xsurf will be all zero (thus making the coarse matrix singular).
70   */
71   PetscCheck(m - istart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in X direction must be at least 3");
72   PetscCheck(n - jstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Y direction must be at least 3");
73   PetscCheck(p - kstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Z direction must be at least 3");
74 
75   cnt = 0;
76 
77   xsurf[cnt++] = 1;
78   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + Nsurf] = 1;
79   xsurf[cnt++ + 2 * Nsurf] = 1;
80 
81   for (j = 1; j < n - 1 - jstart; j++) {
82     xsurf[cnt++ + 3 * Nsurf] = 1;
83     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 4 * Nsurf] = 1;
84     xsurf[cnt++ + 5 * Nsurf] = 1;
85   }
86 
87   xsurf[cnt++ + 6 * Nsurf] = 1;
88   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 7 * Nsurf] = 1;
89   xsurf[cnt++ + 8 * Nsurf] = 1;
90 
91   for (k = 1; k < p - 1 - kstart; k++) {
92     xsurf[cnt++ + 9 * Nsurf] = 1;
93     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 10 * Nsurf] = 1;
94     xsurf[cnt++ + 11 * Nsurf] = 1;
95 
96     for (j = 1; j < n - 1 - jstart; j++) {
97       xsurf[cnt++ + 12 * Nsurf] = 1;
98       /* these are the interior nodes */
99       xsurf[cnt++ + 13 * Nsurf] = 1;
100     }
101 
102     xsurf[cnt++ + 14 * Nsurf] = 1;
103     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 15 * Nsurf] = 1;
104     xsurf[cnt++ + 16 * Nsurf] = 1;
105   }
106 
107   xsurf[cnt++ + 17 * Nsurf] = 1;
108   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 18 * Nsurf] = 1;
109   xsurf[cnt++ + 19 * Nsurf] = 1;
110 
111   for (j = 1; j < n - 1 - jstart; j++) {
112     xsurf[cnt++ + 20 * Nsurf] = 1;
113     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 21 * Nsurf] = 1;
114     xsurf[cnt++ + 22 * Nsurf] = 1;
115   }
116 
117   xsurf[cnt++ + 23 * Nsurf] = 1;
118   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 24 * Nsurf] = 1;
119   xsurf[cnt++ + 25 * Nsurf] = 1;
120 
121   /* interpolations only sum to 1 when using direct solver */
122 #if defined(PETSC_USE_DEBUG_foo)
123   for (i = 0; i < Nsurf; i++) {
124     tmp = 0.0;
125     for (j = 0; j < 26; j++) tmp += xsurf[i + j * Nsurf];
126     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xsurf interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
127   }
128 #endif
129   PetscCall(MatDenseRestoreArray(Xsurf, &xsurf));
130   /* PetscCall(MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD));*/
131 
132   /*
133        I are the indices for all the needed vertices (in global numbering)
134        Iint are the indices for the interior values, I surf for the surface values
135             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
136              is NOT the local DMDA ordering.)
137        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
138   */
139 #define Endpoint(a, start, b) (a == 0 || a == (b - 1 - start))
140   PetscCall(PetscMalloc3(N, &II, Nint, &Iint, Nsurf, &Isurf));
141   PetscCall(PetscMalloc2(Nint, &IIint, Nsurf, &IIsurf));
142   for (k = 0; k < p - kstart; k++) {
143     for (j = 0; j < n - jstart; j++) {
144       for (i = 0; i < m - istart; i++) {
145         II[c++] = i + j * mwidth + k * mwidth * nwidth;
146 
147         if (!Endpoint(i, istart, m) && !Endpoint(j, jstart, n) && !Endpoint(k, kstart, p)) {
148           IIint[cint]  = i + j * mwidth + k * mwidth * nwidth;
149           Iint[cint++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
150         } else {
151           IIsurf[csurf]  = i + j * mwidth + k * mwidth * nwidth;
152           Isurf[csurf++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
153         }
154       }
155     }
156   }
157   PetscCheck(c == N, PETSC_COMM_SELF, PETSC_ERR_PLIB, "c != N");
158   PetscCheck(cint == Nint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "cint != Nint");
159   PetscCheck(csurf == Nsurf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "csurf != Nsurf");
160   PetscCall(DMGetLocalToGlobalMapping(da, &ltg));
161   PetscCall(ISLocalToGlobalMappingApply(ltg, N, II, II));
162   PetscCall(ISLocalToGlobalMappingApply(ltg, Nint, IIint, IIint));
163   PetscCall(ISLocalToGlobalMappingApply(ltg, Nsurf, IIsurf, IIsurf));
164   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
165   PetscCall(ISCreateGeneral(comm, N, II, PETSC_COPY_VALUES, &is));
166   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nint, Iint, PETSC_COPY_VALUES, &isint));
167   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nsurf, Isurf, PETSC_COPY_VALUES, &issurf));
168   PetscCall(PetscFree3(II, Iint, Isurf));
169 
170   PetscCall(MatCreateSubMatrices(Aglobal, 1, &is, &is, MAT_INITIAL_MATRIX, &Aholder));
171   A = *Aholder;
172   PetscCall(PetscFree(Aholder));
173 
174   PetscCall(MatCreateSubMatrix(A, isint, isint, MAT_INITIAL_MATRIX, &Aii));
175   PetscCall(MatCreateSubMatrix(A, isint, issurf, MAT_INITIAL_MATRIX, &Ais));
176   PetscCall(MatCreateSubMatrix(A, issurf, isint, MAT_INITIAL_MATRIX, &Asi));
177 
178   /*
179      Solve for the interpolation onto the interior Xint
180   */
181   PetscCall(MatMatMult(Ais, Xsurf, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &Xint_tmp));
182   PetscCall(MatScale(Xint_tmp, -1.0));
183   if (exotic->directSolve) {
184     PetscCall(MatGetFactor(Aii, MATSOLVERPETSC, MAT_FACTOR_LU, &iAii));
185     PetscCall(MatFactorInfoInitialize(&info));
186     PetscCall(MatGetOrdering(Aii, MATORDERINGND, &row, &col));
187     PetscCall(MatLUFactorSymbolic(iAii, Aii, row, col, &info));
188     PetscCall(ISDestroy(&row));
189     PetscCall(ISDestroy(&col));
190     PetscCall(MatLUFactorNumeric(iAii, Aii, &info));
191     PetscCall(MatMatSolve(iAii, Xint_tmp, Xint));
192     PetscCall(MatDestroy(&iAii));
193   } else {
194     Vec          b, x;
195     PetscScalar *xint_tmp;
196 
197     PetscCall(MatDenseGetArray(Xint, &xint));
198     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &x));
199     PetscCall(MatDenseGetArray(Xint_tmp, &xint_tmp));
200     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &b));
201     PetscCall(KSPSetOperators(exotic->ksp, Aii, Aii));
202     for (i = 0; i < 26; i++) {
203       PetscCall(VecPlaceArray(x, xint + i * Nint));
204       PetscCall(VecPlaceArray(b, xint_tmp + i * Nint));
205       PetscCall(KSPSolve(exotic->ksp, b, x));
206       PetscCall(KSPCheckSolve(exotic->ksp, pc, x));
207       PetscCall(VecResetArray(x));
208       PetscCall(VecResetArray(b));
209     }
210     PetscCall(MatDenseRestoreArray(Xint, &xint));
211     PetscCall(MatDenseRestoreArray(Xint_tmp, &xint_tmp));
212     PetscCall(VecDestroy(&x));
213     PetscCall(VecDestroy(&b));
214   }
215   PetscCall(MatDestroy(&Xint_tmp));
216 
217 #if defined(PETSC_USE_DEBUG_foo)
218   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
219   for (i = 0; i < Nint; i++) {
220     tmp = 0.0;
221     for (j = 0; j < 26; j++) tmp += rxint[i + j * Nint];
222 
223     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xint interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
224   }
225   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
226   /* PetscCall(MatView(Xint,PETSC_VIEWER_STDOUT_WORLD)); */
227 #endif
228 
229   /*         total vertices             total faces                                  total edges */
230   Ntotal = (mp + 1) * (np + 1) * (pp + 1) + mp * np * (pp + 1) + mp * pp * (np + 1) + np * pp * (mp + 1) + mp * (np + 1) * (pp + 1) + np * (mp + 1) * (pp + 1) + pp * (mp + 1) * (np + 1);
231 
232   /*
233       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
234   */
235   cnt = 0;
236 
237   gl[cnt++] = 0;
238   { gl[cnt++] = 1; }
239   gl[cnt++] = m - istart - 1;
240   {
241     gl[cnt++] = mwidth;
242     { gl[cnt++] = mwidth + 1; }
243     gl[cnt++] = mwidth + m - istart - 1;
244   }
245   gl[cnt++] = mwidth * (n - jstart - 1);
246   { gl[cnt++] = mwidth * (n - jstart - 1) + 1; }
247   gl[cnt++] = mwidth * (n - jstart - 1) + m - istart - 1;
248   {
249     gl[cnt++] = mwidth * nwidth;
250     { gl[cnt++] = mwidth * nwidth + 1; }
251     gl[cnt++] = mwidth * nwidth + m - istart - 1;
252     {
253       gl[cnt++] = mwidth * nwidth + mwidth; /* these are the interior nodes */
254       gl[cnt++] = mwidth * nwidth + mwidth + m - istart - 1;
255     }
256     gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1);
257     { gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + 1; }
258     gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + m - istart - 1;
259   }
260   gl[cnt++] = mwidth * nwidth * (p - kstart - 1);
261   { gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + 1; }
262   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + m - istart - 1;
263   {
264     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth;
265     { gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + 1; }
266     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + m - istart - 1;
267   }
268   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1);
269   { gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1) + 1; }
270   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1) + m - istart - 1;
271 
272   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
273   /* convert that to global numbering and get them on all processes */
274   PetscCall(ISLocalToGlobalMappingApply(ltg, 26, gl, gl));
275   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
276   PetscCall(PetscMalloc1(26 * mp * np * pp, &globals));
277   PetscCallMPI(MPI_Allgather(gl, 26, MPIU_INT, globals, 26, MPIU_INT, PetscObjectComm((PetscObject)da)));
278 
279   /* Number the coarse grid points from 0 to Ntotal */
280   PetscCall(MatGetSize(Aglobal, &Nt, NULL));
281   PetscCall(PetscTableCreate(Ntotal / 3, Nt + 1, &ht));
282   for (i = 0; i < 26 * mp * np * pp; i++) { PetscCall(PetscTableAddCount(ht, globals[i] + 1)); }
283   PetscCall(PetscTableGetCount(ht, &cnt));
284   PetscCheck(cnt == Ntotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Hash table size %" PetscInt_FMT " not equal to total number coarse grid points %" PetscInt_FMT, cnt, Ntotal);
285   PetscCall(PetscFree(globals));
286   for (i = 0; i < 26; i++) {
287     PetscCall(PetscTableFind(ht, gl[i] + 1, &gl[i]));
288     gl[i]--;
289   }
290   PetscCall(PetscTableDestroy(&ht));
291   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
292 
293   /* construct global interpolation matrix */
294   PetscCall(MatGetLocalSize(Aglobal, &Ng, NULL));
295   if (reuse == MAT_INITIAL_MATRIX) {
296     PetscCall(MatCreateAIJ(PetscObjectComm((PetscObject)da), Ng, PETSC_DECIDE, PETSC_DECIDE, Ntotal, Nint + Nsurf, NULL, Nint + Nsurf, NULL, P));
297   } else {
298     PetscCall(MatZeroEntries(*P));
299   }
300   PetscCall(MatSetOption(*P, MAT_ROW_ORIENTED, PETSC_FALSE));
301   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
302   PetscCall(MatSetValues(*P, Nint, IIint, 26, gl, rxint, INSERT_VALUES));
303   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
304   PetscCall(MatDenseGetArrayRead(Xsurf, &rxint));
305   PetscCall(MatSetValues(*P, Nsurf, IIsurf, 26, gl, rxint, INSERT_VALUES));
306   PetscCall(MatDenseRestoreArrayRead(Xsurf, &rxint));
307   PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
308   PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
309   PetscCall(PetscFree2(IIint, IIsurf));
310 
311 #if defined(PETSC_USE_DEBUG_foo)
312   {
313     Vec          x, y;
314     PetscScalar *yy;
315     PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), Ng, PETSC_DETERMINE, &y));
316     PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), PETSC_DETERMINE, Ntotal, &x));
317     PetscCall(VecSet(x, 1.0));
318     PetscCall(MatMult(*P, x, y));
319     PetscCall(VecGetArray(y, &yy));
320     for (i = 0; i < Ng; i++) { PetscCheck(PetscAbsScalar(yy[i] - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong p interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(yy[i])); }
321     PetscCall(VecRestoreArray(y, &yy));
322     PetscCall(VecDestroy(x));
323     PetscCall(VecDestroy(y));
324   }
325 #endif
326 
327   PetscCall(MatDestroy(&Aii));
328   PetscCall(MatDestroy(&Ais));
329   PetscCall(MatDestroy(&Asi));
330   PetscCall(MatDestroy(&A));
331   PetscCall(ISDestroy(&is));
332   PetscCall(ISDestroy(&isint));
333   PetscCall(ISDestroy(&issurf));
334   PetscCall(MatDestroy(&Xint));
335   PetscCall(MatDestroy(&Xsurf));
336   PetscFunctionReturn(0);
337 }
338 
339 /*
340       DMDAGetFaceInterpolation - Gets the interpolation for a face based coarse space
341 
342 */
343 PetscErrorCode DMDAGetFaceInterpolation(PC pc, DM da, PC_Exotic *exotic, Mat Aglobal, MatReuse reuse, Mat *P) {
344   PetscInt               dim, i, j, k, m, n, p, dof, Nint, Nface, Nwire, Nsurf, *Iint, *Isurf, cint = 0, csurf = 0, istart, jstart, kstart, *II, N, c = 0;
345   PetscInt               mwidth, nwidth, pwidth, cnt, mp, np, pp, Ntotal, gl[6], *globals, Ng, *IIint, *IIsurf, Nt;
346   Mat                    Xint, Xsurf, Xint_tmp;
347   IS                     isint, issurf, is, row, col;
348   ISLocalToGlobalMapping ltg;
349   MPI_Comm               comm;
350   Mat                    A, Aii, Ais, Asi, *Aholder, iAii;
351   MatFactorInfo          info;
352   PetscScalar           *xsurf, *xint;
353   const PetscScalar     *rxint;
354 #if defined(PETSC_USE_DEBUG_foo)
355   PetscScalar tmp;
356 #endif
357   PetscTable ht;
358 
359   PetscFunctionBegin;
360   PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, &mp, &np, &pp, &dof, NULL, NULL, NULL, NULL, NULL));
361   PetscCheck(dof == 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only for single field problems");
362   PetscCheck(dim == 3, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only coded for 3d problems");
363   PetscCall(DMDAGetCorners(da, NULL, NULL, NULL, &m, &n, &p));
364   PetscCall(DMDAGetGhostCorners(da, &istart, &jstart, &kstart, &mwidth, &nwidth, &pwidth));
365   istart = istart ? -1 : 0;
366   jstart = jstart ? -1 : 0;
367   kstart = kstart ? -1 : 0;
368 
369   /*
370     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
371     to all the local degrees of freedom (this includes the vertices, edges and faces).
372 
373     Xint are the subset of the interpolation into the interior
374 
375     Xface are the interpolation onto faces but not into the interior
376 
377     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
378                                       Xint
379     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
380                                       Xsurf
381   */
382   N     = (m - istart) * (n - jstart) * (p - kstart);
383   Nint  = (m - 2 - istart) * (n - 2 - jstart) * (p - 2 - kstart);
384   Nface = 2 * ((m - 2 - istart) * (n - 2 - jstart) + (m - 2 - istart) * (p - 2 - kstart) + (n - 2 - jstart) * (p - 2 - kstart));
385   Nwire = 4 * ((m - 2 - istart) + (n - 2 - jstart) + (p - 2 - kstart)) + 8;
386   Nsurf = Nface + Nwire;
387   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nint, 6, NULL, &Xint));
388   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nsurf, 6, NULL, &Xsurf));
389   PetscCall(MatDenseGetArray(Xsurf, &xsurf));
390 
391   /*
392      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
393      Xsurf will be all zero (thus making the coarse matrix singular).
394   */
395   PetscCheck(m - istart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in X direction must be at least 3");
396   PetscCheck(n - jstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Y direction must be at least 3");
397   PetscCheck(p - kstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Z direction must be at least 3");
398 
399   cnt = 0;
400   for (j = 1; j < n - 1 - jstart; j++) {
401     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 0 * Nsurf] = 1;
402   }
403 
404   for (k = 1; k < p - 1 - kstart; k++) {
405     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 1 * Nsurf] = 1;
406     for (j = 1; j < n - 1 - jstart; j++) {
407       xsurf[cnt++ + 2 * Nsurf] = 1;
408       /* these are the interior nodes */
409       xsurf[cnt++ + 3 * Nsurf] = 1;
410     }
411     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 4 * Nsurf] = 1;
412   }
413   for (j = 1; j < n - 1 - jstart; j++) {
414     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 5 * Nsurf] = 1;
415   }
416 
417 #if defined(PETSC_USE_DEBUG_foo)
418   for (i = 0; i < Nsurf; i++) {
419     tmp = 0.0;
420     for (j = 0; j < 6; j++) tmp += xsurf[i + j * Nsurf];
421 
422     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xsurf interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
423   }
424 #endif
425   PetscCall(MatDenseRestoreArray(Xsurf, &xsurf));
426   /* PetscCall(MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD));*/
427 
428   /*
429        I are the indices for all the needed vertices (in global numbering)
430        Iint are the indices for the interior values, I surf for the surface values
431             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
432              is NOT the local DMDA ordering.)
433        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
434   */
435 #define Endpoint(a, start, b) (a == 0 || a == (b - 1 - start))
436   PetscCall(PetscMalloc3(N, &II, Nint, &Iint, Nsurf, &Isurf));
437   PetscCall(PetscMalloc2(Nint, &IIint, Nsurf, &IIsurf));
438   for (k = 0; k < p - kstart; k++) {
439     for (j = 0; j < n - jstart; j++) {
440       for (i = 0; i < m - istart; i++) {
441         II[c++] = i + j * mwidth + k * mwidth * nwidth;
442 
443         if (!Endpoint(i, istart, m) && !Endpoint(j, jstart, n) && !Endpoint(k, kstart, p)) {
444           IIint[cint]  = i + j * mwidth + k * mwidth * nwidth;
445           Iint[cint++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
446         } else {
447           IIsurf[csurf]  = i + j * mwidth + k * mwidth * nwidth;
448           Isurf[csurf++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
449         }
450       }
451     }
452   }
453   PetscCheck(c == N, PETSC_COMM_SELF, PETSC_ERR_PLIB, "c != N");
454   PetscCheck(cint == Nint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "cint != Nint");
455   PetscCheck(csurf == Nsurf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "csurf != Nsurf");
456   PetscCall(DMGetLocalToGlobalMapping(da, &ltg));
457   PetscCall(ISLocalToGlobalMappingApply(ltg, N, II, II));
458   PetscCall(ISLocalToGlobalMappingApply(ltg, Nint, IIint, IIint));
459   PetscCall(ISLocalToGlobalMappingApply(ltg, Nsurf, IIsurf, IIsurf));
460   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
461   PetscCall(ISCreateGeneral(comm, N, II, PETSC_COPY_VALUES, &is));
462   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nint, Iint, PETSC_COPY_VALUES, &isint));
463   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nsurf, Isurf, PETSC_COPY_VALUES, &issurf));
464   PetscCall(PetscFree3(II, Iint, Isurf));
465 
466   PetscCall(ISSort(is));
467   PetscCall(MatCreateSubMatrices(Aglobal, 1, &is, &is, MAT_INITIAL_MATRIX, &Aholder));
468   A = *Aholder;
469   PetscCall(PetscFree(Aholder));
470 
471   PetscCall(MatCreateSubMatrix(A, isint, isint, MAT_INITIAL_MATRIX, &Aii));
472   PetscCall(MatCreateSubMatrix(A, isint, issurf, MAT_INITIAL_MATRIX, &Ais));
473   PetscCall(MatCreateSubMatrix(A, issurf, isint, MAT_INITIAL_MATRIX, &Asi));
474 
475   /*
476      Solve for the interpolation onto the interior Xint
477   */
478   PetscCall(MatMatMult(Ais, Xsurf, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &Xint_tmp));
479   PetscCall(MatScale(Xint_tmp, -1.0));
480 
481   if (exotic->directSolve) {
482     PetscCall(MatGetFactor(Aii, MATSOLVERPETSC, MAT_FACTOR_LU, &iAii));
483     PetscCall(MatFactorInfoInitialize(&info));
484     PetscCall(MatGetOrdering(Aii, MATORDERINGND, &row, &col));
485     PetscCall(MatLUFactorSymbolic(iAii, Aii, row, col, &info));
486     PetscCall(ISDestroy(&row));
487     PetscCall(ISDestroy(&col));
488     PetscCall(MatLUFactorNumeric(iAii, Aii, &info));
489     PetscCall(MatMatSolve(iAii, Xint_tmp, Xint));
490     PetscCall(MatDestroy(&iAii));
491   } else {
492     Vec          b, x;
493     PetscScalar *xint_tmp;
494 
495     PetscCall(MatDenseGetArray(Xint, &xint));
496     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &x));
497     PetscCall(MatDenseGetArray(Xint_tmp, &xint_tmp));
498     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &b));
499     PetscCall(KSPSetOperators(exotic->ksp, Aii, Aii));
500     for (i = 0; i < 6; i++) {
501       PetscCall(VecPlaceArray(x, xint + i * Nint));
502       PetscCall(VecPlaceArray(b, xint_tmp + i * Nint));
503       PetscCall(KSPSolve(exotic->ksp, b, x));
504       PetscCall(KSPCheckSolve(exotic->ksp, pc, x));
505       PetscCall(VecResetArray(x));
506       PetscCall(VecResetArray(b));
507     }
508     PetscCall(MatDenseRestoreArray(Xint, &xint));
509     PetscCall(MatDenseRestoreArray(Xint_tmp, &xint_tmp));
510     PetscCall(VecDestroy(&x));
511     PetscCall(VecDestroy(&b));
512   }
513   PetscCall(MatDestroy(&Xint_tmp));
514 
515 #if defined(PETSC_USE_DEBUG_foo)
516   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
517   for (i = 0; i < Nint; i++) {
518     tmp = 0.0;
519     for (j = 0; j < 6; j++) tmp += rxint[i + j * Nint];
520 
521     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xint interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
522   }
523   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
524   /* PetscCall(MatView(Xint,PETSC_VIEWER_STDOUT_WORLD)); */
525 #endif
526 
527   /*         total faces    */
528   Ntotal = mp * np * (pp + 1) + mp * pp * (np + 1) + np * pp * (mp + 1);
529 
530   /*
531       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
532   */
533   cnt = 0;
534   { gl[cnt++] = mwidth + 1; }
535   {{gl[cnt++] = mwidth * nwidth + 1;
536 }
537 {
538   gl[cnt++] = mwidth * nwidth + mwidth; /* these are the interior nodes */
539   gl[cnt++] = mwidth * nwidth + mwidth + m - istart - 1;
540 }
541 { gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + 1; }
542 }
543 { gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + 1; }
544 
545 /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
546 /* convert that to global numbering and get them on all processes */
547 PetscCall(ISLocalToGlobalMappingApply(ltg, 6, gl, gl));
548 /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
549 PetscCall(PetscMalloc1(6 * mp * np * pp, &globals));
550 PetscCallMPI(MPI_Allgather(gl, 6, MPIU_INT, globals, 6, MPIU_INT, PetscObjectComm((PetscObject)da)));
551 
552 /* Number the coarse grid points from 0 to Ntotal */
553 PetscCall(MatGetSize(Aglobal, &Nt, NULL));
554 PetscCall(PetscTableCreate(Ntotal / 3, Nt + 1, &ht));
555 for (i = 0; i < 6 * mp * np * pp; i++) { PetscCall(PetscTableAddCount(ht, globals[i] + 1)); }
556 PetscCall(PetscTableGetCount(ht, &cnt));
557 PetscCheck(cnt == Ntotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Hash table size %" PetscInt_FMT " not equal to total number coarse grid points %" PetscInt_FMT, cnt, Ntotal);
558 PetscCall(PetscFree(globals));
559 for (i = 0; i < 6; i++) {
560   PetscCall(PetscTableFind(ht, gl[i] + 1, &gl[i]));
561   gl[i]--;
562 }
563 PetscCall(PetscTableDestroy(&ht));
564 /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
565 
566 /* construct global interpolation matrix */
567 PetscCall(MatGetLocalSize(Aglobal, &Ng, NULL));
568 if (reuse == MAT_INITIAL_MATRIX) {
569   PetscCall(MatCreateAIJ(PetscObjectComm((PetscObject)da), Ng, PETSC_DECIDE, PETSC_DECIDE, Ntotal, Nint + Nsurf, NULL, Nint, NULL, P));
570 } else {
571   PetscCall(MatZeroEntries(*P));
572 }
573 PetscCall(MatSetOption(*P, MAT_ROW_ORIENTED, PETSC_FALSE));
574 PetscCall(MatDenseGetArrayRead(Xint, &rxint));
575 PetscCall(MatSetValues(*P, Nint, IIint, 6, gl, rxint, INSERT_VALUES));
576 PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
577 PetscCall(MatDenseGetArrayRead(Xsurf, &rxint));
578 PetscCall(MatSetValues(*P, Nsurf, IIsurf, 6, gl, rxint, INSERT_VALUES));
579 PetscCall(MatDenseRestoreArrayRead(Xsurf, &rxint));
580 PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
581 PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
582 PetscCall(PetscFree2(IIint, IIsurf));
583 
584 #if defined(PETSC_USE_DEBUG_foo)
585 {
586   Vec          x, y;
587   PetscScalar *yy;
588   PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), Ng, PETSC_DETERMINE, &y));
589   PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), PETSC_DETERMINE, Ntotal, &x));
590   PetscCall(VecSet(x, 1.0));
591   PetscCall(MatMult(*P, x, y));
592   PetscCall(VecGetArray(y, &yy));
593   for (i = 0; i < Ng; i++) { PetscCheck(PetscAbsScalar(yy[i] - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong p interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(yy[i])); }
594   PetscCall(VecRestoreArray(y, &yy));
595   PetscCall(VecDestroy(x));
596   PetscCall(VecDestroy(y));
597 }
598 #endif
599 
600 PetscCall(MatDestroy(&Aii));
601 PetscCall(MatDestroy(&Ais));
602 PetscCall(MatDestroy(&Asi));
603 PetscCall(MatDestroy(&A));
604 PetscCall(ISDestroy(&is));
605 PetscCall(ISDestroy(&isint));
606 PetscCall(ISDestroy(&issurf));
607 PetscCall(MatDestroy(&Xint));
608 PetscCall(MatDestroy(&Xsurf));
609 PetscFunctionReturn(0);
610 }
611 
612 /*@
613    PCExoticSetType - Sets the type of coarse grid interpolation to use
614 
615    Logically Collective on PC
616 
617    Input Parameters:
618 +  pc - the preconditioner context
619 -  type - either PC_EXOTIC_FACE or PC_EXOTIC_WIREBASKET (defaults to face)
620 
621    Notes:
622     The face based interpolation has 1 degree of freedom per face and ignores the
623      edge and vertex values completely in the coarse problem. For any seven point
624      stencil the interpolation of a constant on all faces into the interior is that constant.
625 
626      The wirebasket interpolation has 1 degree of freedom per vertex, per edge and
627      per face. A constant on the subdomain boundary is interpolated as that constant
628      in the interior of the domain.
629 
630      The coarse grid matrix is obtained via the Galerkin computation A_c = R A R^T, hence
631      if A is nonsingular A_c is also nonsingular.
632 
633      Both interpolations are suitable for only scalar problems.
634 
635    Level: intermediate
636 
637 .seealso: `PCEXOTIC`, `PCExoticType()`
638 @*/
639 PetscErrorCode PCExoticSetType(PC pc, PCExoticType type) {
640   PetscFunctionBegin;
641   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
642   PetscValidLogicalCollectiveEnum(pc, type, 2);
643   PetscTryMethod(pc, "PCExoticSetType_C", (PC, PCExoticType), (pc, type));
644   PetscFunctionReturn(0);
645 }
646 
647 static PetscErrorCode PCExoticSetType_Exotic(PC pc, PCExoticType type) {
648   PC_MG     *mg  = (PC_MG *)pc->data;
649   PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;
650 
651   PetscFunctionBegin;
652   ctx->type = type;
653   PetscFunctionReturn(0);
654 }
655 
656 PetscErrorCode PCSetUp_Exotic(PC pc) {
657   Mat        A;
658   PC_MG     *mg    = (PC_MG *)pc->data;
659   PC_Exotic *ex    = (PC_Exotic *)mg->innerctx;
660   MatReuse   reuse = (ex->P) ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX;
661 
662   PetscFunctionBegin;
663   PetscCheck(pc->dm, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Need to call PCSetDM() before using this PC");
664   PetscCall(PCGetOperators(pc, NULL, &A));
665   if (ex->type == PC_EXOTIC_FACE) {
666     PetscCall(DMDAGetFaceInterpolation(pc, pc->dm, ex, A, reuse, &ex->P));
667   } else if (ex->type == PC_EXOTIC_WIREBASKET) {
668     PetscCall(DMDAGetWireBasketInterpolation(pc, pc->dm, ex, A, reuse, &ex->P));
669   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unknown exotic coarse space %d", ex->type);
670   PetscCall(PCMGSetInterpolation(pc, 1, ex->P));
671   /* if PC has attached DM we must remove it or the PCMG will use it to compute incorrect sized vectors and interpolations */
672   PetscCall(PCSetDM(pc, NULL));
673   PetscCall(PCSetUp_MG(pc));
674   PetscFunctionReturn(0);
675 }
676 
677 PetscErrorCode PCDestroy_Exotic(PC pc) {
678   PC_MG     *mg  = (PC_MG *)pc->data;
679   PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;
680 
681   PetscFunctionBegin;
682   PetscCall(MatDestroy(&ctx->P));
683   PetscCall(KSPDestroy(&ctx->ksp));
684   PetscCall(PetscFree(ctx));
685   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCExoticSetType_C", NULL));
686   PetscCall(PCDestroy_MG(pc));
687   PetscFunctionReturn(0);
688 }
689 
690 PetscErrorCode PCView_Exotic(PC pc, PetscViewer viewer) {
691   PC_MG     *mg = (PC_MG *)pc->data;
692   PetscBool  iascii;
693   PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;
694 
695   PetscFunctionBegin;
696   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
697   if (iascii) {
698     PetscCall(PetscViewerASCIIPrintf(viewer, "    Exotic type = %s\n", PCExoticTypes[ctx->type]));
699     if (ctx->directSolve) {
700       PetscCall(PetscViewerASCIIPrintf(viewer, "      Using direct solver to construct interpolation\n"));
701     } else {
702       PetscViewer sviewer;
703       PetscMPIInt rank;
704 
705       PetscCall(PetscViewerASCIIPrintf(viewer, "      Using iterative solver to construct interpolation\n"));
706       PetscCall(PetscViewerASCIIPushTab(viewer));
707       PetscCall(PetscViewerASCIIPushTab(viewer)); /* should not need to push this twice? */
708       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
709       PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
710       if (rank == 0) { PetscCall(KSPView(ctx->ksp, sviewer)); }
711       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
712       PetscCall(PetscViewerASCIIPopTab(viewer));
713       PetscCall(PetscViewerASCIIPopTab(viewer));
714     }
715   }
716   PetscCall(PCView_MG(pc, viewer));
717   PetscFunctionReturn(0);
718 }
719 
720 PetscErrorCode PCSetFromOptions_Exotic(PC pc, PetscOptionItems *PetscOptionsObject) {
721   PetscBool    flg;
722   PC_MG       *mg = (PC_MG *)pc->data;
723   PCExoticType mgctype;
724   PC_Exotic   *ctx = (PC_Exotic *)mg->innerctx;
725 
726   PetscFunctionBegin;
727   PetscOptionsHeadBegin(PetscOptionsObject, "Exotic coarse space options");
728   PetscCall(PetscOptionsEnum("-pc_exotic_type", "face or wirebasket", "PCExoticSetType", PCExoticTypes, (PetscEnum)ctx->type, (PetscEnum *)&mgctype, &flg));
729   if (flg) PetscCall(PCExoticSetType(pc, mgctype));
730   PetscCall(PetscOptionsBool("-pc_exotic_direct_solver", "use direct solver to construct interpolation", "None", ctx->directSolve, &ctx->directSolve, NULL));
731   if (!ctx->directSolve) {
732     if (!ctx->ksp) {
733       const char *prefix;
734       PetscCall(KSPCreate(PETSC_COMM_SELF, &ctx->ksp));
735       PetscCall(KSPSetErrorIfNotConverged(ctx->ksp, pc->erroriffailure));
736       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ctx->ksp, (PetscObject)pc, 1));
737       PetscCall(PetscLogObjectParent((PetscObject)pc, (PetscObject)ctx->ksp));
738       PetscCall(PCGetOptionsPrefix(pc, &prefix));
739       PetscCall(KSPSetOptionsPrefix(ctx->ksp, prefix));
740       PetscCall(KSPAppendOptionsPrefix(ctx->ksp, "exotic_"));
741     }
742     PetscCall(KSPSetFromOptions(ctx->ksp));
743   }
744   PetscOptionsHeadEnd();
745   PetscFunctionReturn(0);
746 }
747 
748 /*MC
749      PCEXOTIC - Two level overlapping Schwarz preconditioner with exotic (non-standard) coarse grid spaces
750 
751      This uses the PCMG infrastructure restricted to two levels and the face and wirebasket based coarse
752    grid spaces.
753 
754    Notes:
755     By default this uses GMRES on the fine grid smoother so this should be used with KSPFGMRES or the smoother changed to not use GMRES
756 
757    References:
758 +  * - These coarse grid spaces originate in the work of Bramble, Pasciak  and Schatz, "The Construction
759    of Preconditioners for Elliptic Problems by Substructing IV", Mathematics of Computation, volume 53, 1989.
760 .  * - They were generalized slightly in "Domain Decomposition Method for Linear Elasticity", Ph. D. thesis, Barry Smith,
761    New York University, 1990.
762 .  * - They were then explored in great detail in Dryja, Smith, Widlund, "Schwarz Analysis
763    of Iterative Substructuring Methods for Elliptic Problems in Three Dimensions, SIAM Journal on Numerical
764    Analysis, volume 31. 1994. These were developed in the context of iterative substructuring preconditioners.
765 .  * - They were then ingeniously applied as coarse grid spaces for overlapping Schwarz methods by Dohrmann and Widlund.
766    They refer to them as GDSW (generalized Dryja, Smith, Widlund preconditioners). See, for example,
767    Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Extending theory for domain decomposition algorithms to irregular subdomains. In Ulrich Langer, Marco
768    Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
769    of the 17th International Conference on Domain Decomposition Methods in
770    Science and Engineering, held in Strobl, Austria, 2006, number 60 in
771    Springer Verlag, Lecture Notes in Computational Science and Engineering, 2007.
772 .  * -  Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. A family of energy minimizing coarse spaces for overlapping Schwarz preconditioners. In Ulrich Langer,
773    Marco Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
774    of the 17th International Conference on Domain Decomposition Methods
775    in Science and Engineering, held in Strobl, Austria, 2006, number 60 in
776    Springer Verlag, Lecture Notes in Computational Science and Engineering, 2007
777 .  * - Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Domain decomposition
778    for less regular subdomains: Overlapping Schwarz in two dimensions. SIAM J.
779    Numer. Anal., 46(4), 2008.
780 -  * - Clark R. Dohrmann and Olof B. Widlund. An overlapping Schwarz
781    algorithm for almost incompressible elasticity. Technical Report
782    TR2008 912, Department of Computer Science, Courant Institute
783    of Mathematical Sciences, New York University, May 2008. URL:
784 
785    Options Database: The usual PCMG options are supported, such as -mg_levels_pc_type <type> -mg_coarse_pc_type <type>
786       -pc_mg_type <type>
787 
788    Level: advanced
789 
790 .seealso: `PCMG`, `PCSetDM()`, `PCExoticType`, `PCExoticSetType()`
791 M*/
792 
793 PETSC_EXTERN PetscErrorCode PCCreate_Exotic(PC pc) {
794   PC_Exotic *ex;
795   PC_MG     *mg;
796 
797   PetscFunctionBegin;
798   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
799   PetscTryTypeMethod(pc, destroy);
800   pc->data = NULL;
801 
802   PetscCall(PetscFree(((PetscObject)pc)->type_name));
803   ((PetscObject)pc)->type_name = NULL;
804 
805   PetscCall(PCSetType(pc, PCMG));
806   PetscCall(PCMGSetLevels(pc, 2, NULL));
807   PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_PMAT));
808   PetscCall(PetscNew(&ex));
809   ex->type     = PC_EXOTIC_FACE;
810   mg           = (PC_MG *)pc->data;
811   mg->innerctx = ex;
812 
813   pc->ops->setfromoptions = PCSetFromOptions_Exotic;
814   pc->ops->view           = PCView_Exotic;
815   pc->ops->destroy        = PCDestroy_Exotic;
816   pc->ops->setup          = PCSetUp_Exotic;
817 
818   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCExoticSetType_C", PCExoticSetType_Exotic));
819   PetscFunctionReturn(0);
820 }
821