xref: /petsc/src/snes/tests/ex69.c (revision 4e8208cbcbc709572b8abe32f33c78b69c819375)
1 static char help[] = "Tests recovery from domain errors in MatMult() and PCApply()\n\n";
2 
3 /*
4       See src/ksp/ksp/tutorials/ex19.c from which this was copied
5 */
6 
7 #include <petscsnes.h>
8 #include <petscdm.h>
9 #include <petscdmda.h>
10 
11 /*
12    User-defined routines and data structures
13 */
14 typedef struct {
15   PetscScalar u, v, omega, temp;
16 } Field;
17 
18 PetscErrorCode FormFunctionLocal(DMDALocalInfo *, Field **, Field **, void *);
19 
20 typedef struct {
21   PetscReal lidvelocity, prandtl, grashof; /* physical parameters */
22   PetscBool draw_contours;                 /* flag - 1 indicates drawing contours */
23   PetscBool errorindomain;
24   PetscBool errorindomainmf;
25   SNES      snes;
26 } AppCtx;
27 
28 typedef struct {
29   Mat Jmf;
30 } MatShellCtx;
31 
32 extern PetscErrorCode FormInitialGuess(AppCtx *, DM, Vec);
33 extern PetscErrorCode MatMult_MyShell(Mat, Vec, Vec);
34 extern PetscErrorCode MatAssemblyEnd_MyShell(Mat, MatAssemblyType);
35 extern PetscErrorCode PCApply_MyShell(PC, Vec, Vec);
36 extern PetscErrorCode SNESComputeJacobian_MyShell(SNES, Vec, Mat, Mat, void *);
37 
main(int argc,char ** argv)38 int main(int argc, char **argv)
39 {
40   AppCtx      user; /* user-defined work context */
41   PetscInt    mx, my;
42   MPI_Comm    comm;
43   DM          da;
44   Vec         x;
45   Mat         J = NULL, Jmf = NULL;
46   MatShellCtx matshellctx;
47   PetscInt    mlocal, nlocal;
48   PC          pc;
49   KSP         ksp;
50   PetscBool   errorinmatmult = PETSC_FALSE, errorinpcapply = PETSC_FALSE, errorinpcsetup = PETSC_FALSE;
51 
52   PetscFunctionBeginUser;
53   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
54   PetscCall(PetscOptionsGetBool(NULL, NULL, "-error_in_matmult", &errorinmatmult, NULL));
55   PetscCall(PetscOptionsGetBool(NULL, NULL, "-error_in_pcapply", &errorinpcapply, NULL));
56   PetscCall(PetscOptionsGetBool(NULL, NULL, "-error_in_pcsetup", &errorinpcsetup, NULL));
57   user.errorindomain = PETSC_FALSE;
58   PetscCall(PetscOptionsGetBool(NULL, NULL, "-error_in_domain", &user.errorindomain, NULL));
59   user.errorindomainmf = PETSC_FALSE;
60   PetscCall(PetscOptionsGetBool(NULL, NULL, "-error_in_domainmf", &user.errorindomainmf, NULL));
61 
62   comm = PETSC_COMM_WORLD;
63   PetscCall(SNESCreate(comm, &user.snes));
64 
65   /*
66       Create distributed array object to manage parallel grid and vectors
67       for principal unknowns (x) and governing residuals (f)
68   */
69   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 4, 4, PETSC_DECIDE, PETSC_DECIDE, 4, 1, 0, 0, &da));
70   PetscCall(DMSetFromOptions(da));
71   PetscCall(DMSetUp(da));
72   PetscCall(SNESSetDM(user.snes, da));
73 
74   PetscCall(DMDAGetInfo(da, 0, &mx, &my, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
75   /*
76      Problem parameters (velocity of lid, prandtl, and grashof numbers)
77   */
78   user.lidvelocity = 1.0 / (mx * my);
79   user.prandtl     = 1.0;
80   user.grashof     = 1.0;
81 
82   PetscCall(PetscOptionsGetReal(NULL, NULL, "-lidvelocity", &user.lidvelocity, NULL));
83   PetscCall(PetscOptionsGetReal(NULL, NULL, "-prandtl", &user.prandtl, NULL));
84   PetscCall(PetscOptionsGetReal(NULL, NULL, "-grashof", &user.grashof, NULL));
85   PetscCall(PetscOptionsHasName(NULL, NULL, "-contours", &user.draw_contours));
86 
87   PetscCall(DMDASetFieldName(da, 0, "x_velocity"));
88   PetscCall(DMDASetFieldName(da, 1, "y_velocity"));
89   PetscCall(DMDASetFieldName(da, 2, "Omega"));
90   PetscCall(DMDASetFieldName(da, 3, "temperature"));
91 
92   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
93      Create user context, set problem data, create vector data structures.
94      Also, compute the initial guess.
95      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
96 
97   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
98      Create nonlinear solver context
99 
100      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
101   PetscCall(DMSetApplicationContext(da, &user));
102   PetscCall(DMDASNESSetFunctionLocal(da, INSERT_VALUES, (PetscErrorCode (*)(DMDALocalInfo *, void *, void *, void *))FormFunctionLocal, &user));
103 
104   if (errorinmatmult) {
105     PetscCall(MatCreateSNESMF(user.snes, &Jmf));
106     PetscCall(MatSetFromOptions(Jmf));
107     PetscCall(MatGetLocalSize(Jmf, &mlocal, &nlocal));
108     matshellctx.Jmf = Jmf;
109     PetscCall(MatCreateShell(PetscObjectComm((PetscObject)Jmf), mlocal, nlocal, PETSC_DECIDE, PETSC_DECIDE, &matshellctx, &J));
110     PetscCall(MatShellSetOperation(J, MATOP_MULT, (PetscErrorCodeFn *)MatMult_MyShell));
111     PetscCall(MatShellSetOperation(J, MATOP_ASSEMBLY_END, (PetscErrorCodeFn *)MatAssemblyEnd_MyShell));
112     PetscCall(SNESSetJacobian(user.snes, J, J, MatMFFDComputeJacobian, NULL));
113   }
114 
115   PetscCall(SNESSetFromOptions(user.snes));
116   PetscCall(PetscPrintf(comm, "lid velocity = %g, prandtl # = %g, grashof # = %g\n", (double)user.lidvelocity, (double)user.prandtl, (double)user.grashof));
117 
118   if (errorinpcapply) {
119     PetscCall(SNESGetKSP(user.snes, &ksp));
120     PetscCall(KSPGetPC(ksp, &pc));
121     PetscCall(PCSetType(pc, PCSHELL));
122     PetscCall(PCShellSetApply(pc, PCApply_MyShell));
123   }
124 
125   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126      Solve the nonlinear system
127      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128   PetscCall(DMCreateGlobalVector(da, &x));
129   PetscCall(FormInitialGuess(&user, da, x));
130 
131   if (errorinpcsetup) {
132     PetscCall(SNESSetUp(user.snes));
133     PetscCall(SNESSetJacobian(user.snes, NULL, NULL, SNESComputeJacobian_MyShell, NULL));
134   }
135   PetscCall(SNESSolve(user.snes, NULL, x));
136 
137   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138      Free work space.  All PETSc objects should be destroyed when they
139      are no longer needed.
140      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
141   PetscCall(MatDestroy(&J));
142   PetscCall(MatDestroy(&Jmf));
143   PetscCall(VecDestroy(&x));
144   PetscCall(DMDestroy(&da));
145   PetscCall(SNESDestroy(&user.snes));
146   PetscCall(PetscFinalize());
147   return 0;
148 }
149 
150 /*
151    FormInitialGuess - Forms initial approximation.
152 
153    Input Parameters:
154    user - user-defined application context
155    X - vector
156 
157    Output Parameter:
158    X - vector
159 */
FormInitialGuess(AppCtx * user,DM da,Vec X)160 PetscErrorCode FormInitialGuess(AppCtx *user, DM da, Vec X)
161 {
162   PetscInt  i, j, mx, xs, ys, xm, ym;
163   PetscReal grashof, dx;
164   Field   **x;
165 
166   PetscFunctionBeginUser;
167   grashof = user->grashof;
168 
169   PetscCall(DMDAGetInfo(da, 0, &mx, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
170   dx = 1.0 / (mx - 1);
171 
172   /*
173      Get local grid boundaries (for 2-dimensional DMDA):
174        xs, ys   - starting grid indices (no ghost points)
175        xm, ym   - widths of local grid (no ghost points)
176   */
177   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
178 
179   /*
180      Get a pointer to vector data.
181        - For default PETSc vectors, VecGetArray() returns a pointer to
182          the data array.  Otherwise, the routine is implementation dependent.
183        - You MUST call VecRestoreArray() when you no longer need access to
184          the array.
185   */
186   PetscCall(DMDAVecGetArray(da, X, &x));
187 
188   /*
189      Compute initial guess over the locally owned part of the grid
190      Initial condition is motionless fluid and equilibrium temperature
191   */
192   for (j = ys; j < ys + ym; j++) {
193     for (i = xs; i < xs + xm; i++) {
194       x[j][i].u     = 0.0;
195       x[j][i].v     = 0.0;
196       x[j][i].omega = 0.0;
197       x[j][i].temp  = (grashof > 0) * i * dx;
198     }
199   }
200 
201   /*
202      Restore vector
203   */
204   PetscCall(DMDAVecRestoreArray(da, X, &x));
205   PetscFunctionReturn(PETSC_SUCCESS);
206 }
207 
FormFunctionLocal(DMDALocalInfo * info,Field ** x,Field ** f,void * ptr)208 PetscErrorCode FormFunctionLocal(DMDALocalInfo *info, Field **x, Field **f, void *ptr)
209 {
210   AppCtx         *user = (AppCtx *)ptr;
211   PetscInt        xints, xinte, yints, yinte, i, j;
212   PetscReal       hx, hy, dhx, dhy, hxdhy, hydhx;
213   PetscReal       grashof, prandtl, lid;
214   PetscScalar     u, uxx, uyy, vx, vy, avx, avy, vxp, vxm, vyp, vym;
215   static PetscInt fail = 0;
216 
217   PetscFunctionBeginUser;
218   if ((fail++ > 7 && user->errorindomainmf) || (fail++ > 36 && user->errorindomain)) {
219     PetscMPIInt rank;
220     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)user->snes), &rank));
221     if (rank == 0) PetscCall(SNESSetFunctionDomainError(user->snes));
222   }
223   grashof = user->grashof;
224   prandtl = user->prandtl;
225   lid     = user->lidvelocity;
226 
227   /*
228      Define mesh intervals ratios for uniform grid.
229 
230      Note: FD formulae below are normalized by multiplying through by
231      local volume element (i.e. hx*hy) to obtain coefficients O(1) in two dimensions.
232 
233   */
234   dhx   = (PetscReal)(info->mx - 1);
235   dhy   = (PetscReal)(info->my - 1);
236   hx    = 1.0 / dhx;
237   hy    = 1.0 / dhy;
238   hxdhy = hx * dhy;
239   hydhx = hy * dhx;
240 
241   xints = info->xs;
242   xinte = info->xs + info->xm;
243   yints = info->ys;
244   yinte = info->ys + info->ym;
245 
246   /* Test whether we are on the bottom edge of the global array */
247   if (yints == 0) {
248     j     = 0;
249     yints = yints + 1;
250     /* bottom edge */
251     for (i = info->xs; i < info->xs + info->xm; i++) {
252       f[j][i].u     = x[j][i].u;
253       f[j][i].v     = x[j][i].v;
254       f[j][i].omega = x[j][i].omega + (x[j + 1][i].u - x[j][i].u) * dhy;
255       f[j][i].temp  = x[j][i].temp - x[j + 1][i].temp;
256     }
257   }
258 
259   /* Test whether we are on the top edge of the global array */
260   if (yinte == info->my) {
261     j     = info->my - 1;
262     yinte = yinte - 1;
263     /* top edge */
264     for (i = info->xs; i < info->xs + info->xm; i++) {
265       f[j][i].u     = x[j][i].u - lid;
266       f[j][i].v     = x[j][i].v;
267       f[j][i].omega = x[j][i].omega + (x[j][i].u - x[j - 1][i].u) * dhy;
268       f[j][i].temp  = x[j][i].temp - x[j - 1][i].temp;
269     }
270   }
271 
272   /* Test whether we are on the left edge of the global array */
273   if (xints == 0) {
274     i     = 0;
275     xints = xints + 1;
276     /* left edge */
277     for (j = info->ys; j < info->ys + info->ym; j++) {
278       f[j][i].u     = x[j][i].u;
279       f[j][i].v     = x[j][i].v;
280       f[j][i].omega = x[j][i].omega - (x[j][i + 1].v - x[j][i].v) * dhx;
281       f[j][i].temp  = x[j][i].temp;
282     }
283   }
284 
285   /* Test whether we are on the right edge of the global array */
286   if (xinte == info->mx) {
287     i     = info->mx - 1;
288     xinte = xinte - 1;
289     /* right edge */
290     for (j = info->ys; j < info->ys + info->ym; j++) {
291       f[j][i].u     = x[j][i].u;
292       f[j][i].v     = x[j][i].v;
293       f[j][i].omega = x[j][i].omega - (x[j][i].v - x[j][i - 1].v) * dhx;
294       f[j][i].temp  = x[j][i].temp - (PetscReal)(grashof > 0);
295     }
296   }
297 
298   /* Compute over the interior points */
299   for (j = yints; j < yinte; j++) {
300     for (i = xints; i < xinte; i++) {
301       /*
302        convective coefficients for upwinding
303       */
304       vx  = x[j][i].u;
305       avx = PetscAbsScalar(vx);
306       vxp = .5 * (vx + avx);
307       vxm = .5 * (vx - avx);
308       vy  = x[j][i].v;
309       avy = PetscAbsScalar(vy);
310       vyp = .5 * (vy + avy);
311       vym = .5 * (vy - avy);
312 
313       /* U velocity */
314       u         = x[j][i].u;
315       uxx       = (2.0 * u - x[j][i - 1].u - x[j][i + 1].u) * hydhx;
316       uyy       = (2.0 * u - x[j - 1][i].u - x[j + 1][i].u) * hxdhy;
317       f[j][i].u = uxx + uyy - .5 * (x[j + 1][i].omega - x[j - 1][i].omega) * hx;
318 
319       /* V velocity */
320       u         = x[j][i].v;
321       uxx       = (2.0 * u - x[j][i - 1].v - x[j][i + 1].v) * hydhx;
322       uyy       = (2.0 * u - x[j - 1][i].v - x[j + 1][i].v) * hxdhy;
323       f[j][i].v = uxx + uyy + .5 * (x[j][i + 1].omega - x[j][i - 1].omega) * hy;
324 
325       /* Omega */
326       u             = x[j][i].omega;
327       uxx           = (2.0 * u - x[j][i - 1].omega - x[j][i + 1].omega) * hydhx;
328       uyy           = (2.0 * u - x[j - 1][i].omega - x[j + 1][i].omega) * hxdhy;
329       f[j][i].omega = uxx + uyy + (vxp * (u - x[j][i - 1].omega) + vxm * (x[j][i + 1].omega - u)) * hy + (vyp * (u - x[j - 1][i].omega) + vym * (x[j + 1][i].omega - u)) * hx - .5 * grashof * (x[j][i + 1].temp - x[j][i - 1].temp) * hy;
330 
331       /* Temperature */
332       u            = x[j][i].temp;
333       uxx          = (2.0 * u - x[j][i - 1].temp - x[j][i + 1].temp) * hydhx;
334       uyy          = (2.0 * u - x[j - 1][i].temp - x[j + 1][i].temp) * hxdhy;
335       f[j][i].temp = uxx + uyy + prandtl * ((vxp * (u - x[j][i - 1].temp) + vxm * (x[j][i + 1].temp - u)) * hy + (vyp * (u - x[j - 1][i].temp) + vym * (x[j + 1][i].temp - u)) * hx);
336     }
337   }
338 
339   /*
340      Flop count (multiply-adds are counted as 2 operations)
341   */
342   PetscCall(PetscLogFlops(84.0 * info->ym * info->xm));
343   PetscFunctionReturn(PETSC_SUCCESS);
344 }
345 
MatMult_MyShell(Mat A,Vec x,Vec y)346 PetscErrorCode MatMult_MyShell(Mat A, Vec x, Vec y)
347 {
348   MatShellCtx    *matshellctx;
349   static PetscInt fail = 0;
350   PetscMPIInt     rank;
351 
352   PetscFunctionBegin;
353   PetscCall(MatShellGetContext(A, &matshellctx));
354   PetscCall(MatMult(matshellctx->Jmf, x, y));
355   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
356   if (fail++ > 5) {
357     PetscCall(VecFlag(y, rank == 0));
358     PetscCall(VecAssemblyBegin(y));
359     PetscCall(VecAssemblyEnd(y));
360   }
361   PetscFunctionReturn(PETSC_SUCCESS);
362 }
363 
MatAssemblyEnd_MyShell(Mat A,MatAssemblyType tp)364 PetscErrorCode MatAssemblyEnd_MyShell(Mat A, MatAssemblyType tp)
365 {
366   MatShellCtx *matshellctx;
367 
368   PetscFunctionBegin;
369   PetscCall(MatShellGetContext(A, &matshellctx));
370   PetscCall(MatAssemblyEnd(matshellctx->Jmf, tp));
371   PetscFunctionReturn(PETSC_SUCCESS);
372 }
373 
PCApply_MyShell(PC pc,Vec x,Vec y)374 PetscErrorCode PCApply_MyShell(PC pc, Vec x, Vec y)
375 {
376   static PetscInt fail = 0;
377   PetscMPIInt     rank;
378 
379   PetscFunctionBegin;
380   PetscCall(VecCopy(x, y));
381   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
382   if (fail++ > 3) {
383     PetscCall(VecFlag(y, rank == 0));
384     PetscCall(VecAssemblyBegin(y));
385     PetscCall(VecAssemblyEnd(y));
386   }
387   PetscFunctionReturn(PETSC_SUCCESS);
388 }
389 
390 PETSC_EXTERN PetscErrorCode SNESComputeJacobian_DMDA(SNES, Vec, Mat, Mat, void *);
391 
SNESComputeJacobian_MyShell(SNES snes,Vec X,Mat A,Mat B,PetscCtx ctx)392 PetscErrorCode SNESComputeJacobian_MyShell(SNES snes, Vec X, Mat A, Mat B, PetscCtx ctx)
393 {
394   static PetscInt fail = 0;
395 
396   PetscFunctionBegin;
397   PetscCall(SNESComputeJacobian_DMDA(snes, X, A, B, ctx));
398   if (fail++ > 0) PetscCall(MatZeroEntries(A));
399   PetscFunctionReturn(PETSC_SUCCESS);
400 }
401 
402 /*TEST
403 
404    test:
405       args: -snes_converged_reason -ksp_converged_reason
406 
407    test:
408       suffix: 2
409       args: -snes_converged_reason -ksp_converged_reason -error_in_matmult -fp_trap 0
410 
411    test:
412       suffix: 3
413       args: -snes_converged_reason -ksp_converged_reason -error_in_pcapply -fp_trap 0
414 
415    test:
416       suffix: 4
417       args: -snes_converged_reason -ksp_converged_reason -error_in_pcsetup -fp_trap 0
418 
419    test:
420       suffix: 5
421       args: -snes_converged_reason -ksp_converged_reason -error_in_pcsetup -pc_type bjacobi -fp_trap 0
422 
423    test:
424       suffix: 5_fieldsplit
425       args: -snes_converged_reason -ksp_converged_reason -error_in_pcsetup -pc_type fieldsplit -fp_trap 0
426       output_file: output/ex69_5.out
427 
428    test:
429       suffix: 6
430       args: -snes_converged_reason -ksp_converged_reason -error_in_domainmf -snes_mf -pc_type none -fp_trap 0
431 
432    test:
433       suffix: 7
434       args: -snes_converged_reason -ksp_converged_reason -error_in_domain -fp_trap 0
435 
436    test:
437       suffix: 8
438       args: -snes_converged_reason -ksp_converged_reason -error_in_domain -snes_mf -pc_type none
439 
440 TEST*/
441