xref: /petsc/src/snes/tutorials/ex56.c (revision ef1023bda9d7138933c4c6fa7b7cf4a26d60c86d)
1 static char help[] = "3D, tensor hexahedra (Q1-K), displacement finite element formulation\n\
2 of linear elasticity.  E=1.0, nu=1/3.\n\
3 Unit cube domain with Dirichlet boundary\n\n";
4 
5 #include <petscdmplex.h>
6 #include <petscsnes.h>
7 #include <petscds.h>
8 #include <petscdmforest.h>
9 
10 static PetscReal s_soft_alpha=1.e-3;
11 static PetscReal s_mu=0.4;
12 static PetscReal s_lambda=0.4;
13 
14 static void f0_bd_u_3d(PetscInt dim, PetscInt Nf, PetscInt NfAux,
15                        const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
16                        const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
17                        PetscReal t, const PetscReal x[], const PetscReal n[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
18 {
19   f0[0] = 1;     /* x direction pull */
20   f0[1] = -x[2]; /* add a twist around x-axis */
21   f0[2] =  x[1];
22 }
23 
24 static void f1_bd_u(PetscInt dim, PetscInt Nf, PetscInt NfAux,
25                     const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
26                     const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
27                     PetscReal t, const PetscReal x[], const PetscReal n[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f1[])
28 {
29   const PetscInt Ncomp = dim;
30   PetscInt       comp, d;
31   for (comp = 0; comp < Ncomp; ++comp) {
32     for (d = 0; d < dim; ++d) {
33       f1[comp*dim+d] = 0.0;
34     }
35   }
36 }
37 
38 /* gradU[comp*dim+d] = {u_x, u_y} or {u_x, u_y, u_z} */
39 static void f1_u_3d_alpha(PetscInt dim, PetscInt Nf, PetscInt NfAux,
40                           const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
41                           const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
42                           PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f1[])
43 {
44   PetscReal trace,mu=s_mu,lambda=s_lambda,rad;
45   PetscInt i,j;
46   for (i=0,rad=0.;i<dim;i++) {
47     PetscReal t=x[i];
48     rad += t*t;
49   }
50   rad = PetscSqrtReal(rad);
51   if (rad>0.25) {
52     mu *= s_soft_alpha;
53     lambda *= s_soft_alpha; /* we could keep the bulk the same like rubberish */
54   }
55   for (i=0,trace=0; i < dim; ++i) {
56     trace += PetscRealPart(u_x[i*dim+i]);
57   }
58   for (i=0; i < dim; ++i) {
59     for (j=0; j < dim; ++j) {
60       f1[i*dim+j] = mu*(u_x[i*dim+j]+u_x[j*dim+i]);
61     }
62     f1[i*dim+i] += lambda*trace;
63   }
64 }
65 
66 /* gradU[comp*dim+d] = {u_x, u_y} or {u_x, u_y, u_z} */
67 static void f1_u_3d(PetscInt dim, PetscInt Nf, PetscInt NfAux,
68                     const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
69                     const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
70                     PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f1[])
71 {
72   PetscReal trace,mu=s_mu,lambda=s_lambda;
73   PetscInt i,j;
74   for (i=0,trace=0; i < dim; ++i) {
75     trace += PetscRealPart(u_x[i*dim+i]);
76   }
77   for (i=0; i < dim; ++i) {
78     for (j=0; j < dim; ++j) {
79       f1[i*dim+j] = mu*(u_x[i*dim+j]+u_x[j*dim+i]);
80     }
81     f1[i*dim+i] += lambda*trace;
82   }
83 }
84 
85 /* 3D elasticity */
86 #define IDX(ii,jj,kk,ll) (27*ii+9*jj+3*kk+ll)
87 
88 void g3_uu_3d_private( PetscScalar g3[], const PetscReal mu, const PetscReal lambda)
89 {
90   if (1) {
91     g3[0] += lambda;
92     g3[0] += mu;
93     g3[0] += mu;
94     g3[4] += lambda;
95     g3[8] += lambda;
96     g3[10] += mu;
97     g3[12] += mu;
98     g3[20] += mu;
99     g3[24] += mu;
100     g3[28] += mu;
101     g3[30] += mu;
102     g3[36] += lambda;
103     g3[40] += lambda;
104     g3[40] += mu;
105     g3[40] += mu;
106     g3[44] += lambda;
107     g3[50] += mu;
108     g3[52] += mu;
109     g3[56] += mu;
110     g3[60] += mu;
111     g3[68] += mu;
112     g3[70] += mu;
113     g3[72] += lambda;
114     g3[76] += lambda;
115     g3[80] += lambda;
116     g3[80] += mu;
117     g3[80] += mu;
118   } else {
119     int        i,j,k,l;
120     static int cc=-1;
121     cc++;
122     for (i = 0; i < 3; ++i) {
123       for (j = 0; j < 3; ++j) {
124         for (k = 0; k < 3; ++k) {
125           for (l = 0; l < 3; ++l) {
126             if (k==l && i==j) g3[IDX(i,j,k,l)] += lambda;
127             if (i==k && j==l) g3[IDX(i,j,k,l)] += mu;
128             if (i==l && j==k) g3[IDX(i,j,k,l)] += mu;
129             if (k==l && i==j && !cc) (void) PetscPrintf(PETSC_COMM_WORLD,"g3[%d] += lambda;\n",IDX(i,j,k,l));
130             if (i==k && j==l && !cc) (void) PetscPrintf(PETSC_COMM_WORLD,"g3[%d] += mu;\n",IDX(i,j,k,l));
131             if (i==l && j==k && !cc) (void) PetscPrintf(PETSC_COMM_WORLD,"g3[%d] += mu;\n",IDX(i,j,k,l));
132           }
133         }
134       }
135     }
136   }
137 }
138 
139 static void g3_uu_3d_alpha(PetscInt dim, PetscInt Nf, PetscInt NfAux,
140                            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
141                            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
142                            PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g3[])
143 {
144   PetscReal mu=s_mu, lambda=s_lambda,rad;
145   PetscInt i;
146   for (i=0,rad=0.;i<dim;i++) {
147     PetscReal t=x[i];
148     rad += t*t;
149   }
150   rad = PetscSqrtReal(rad);
151   if (rad>0.25) {
152     mu *= s_soft_alpha;
153     lambda *= s_soft_alpha; /* we could keep the bulk the same like rubberish */
154   }
155   g3_uu_3d_private(g3,mu,lambda);
156 }
157 
158 static void g3_uu_3d(PetscInt dim, PetscInt Nf, PetscInt NfAux,
159                      const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
160                      const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
161                      PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g3[])
162 {
163   g3_uu_3d_private(g3,s_mu,s_lambda);
164 }
165 
166 static void f0_u(PetscInt dim, PetscInt Nf, PetscInt NfAux,
167                  const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
168                  const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
169                  PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
170 {
171   const    PetscInt Ncomp = dim;
172   PetscInt comp;
173 
174   for (comp = 0; comp < Ncomp; ++comp) f0[comp] = 0.0;
175 }
176 
177 /* PI_i (x_i^4 - x_i^2) */
178 static void f0_u_x4(PetscInt dim, PetscInt Nf, PetscInt NfAux,
179                     const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
180                     const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
181                     PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
182 {
183   const    PetscInt Ncomp = dim;
184   PetscInt comp,i;
185 
186   for (comp = 0; comp < Ncomp; ++comp) {
187     f0[comp] = 1e5;
188     for (i = 0; i < Ncomp; ++i) {
189       f0[comp] *= /* (comp+1)* */(x[i]*x[i]*x[i]*x[i] - x[i]*x[i]); /* assumes (0,1]^D domain */
190     }
191   }
192 }
193 
194 PetscErrorCode zero(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx)
195 {
196   const PetscInt Ncomp = dim;
197   PetscInt       comp;
198 
199   for (comp = 0; comp < Ncomp; ++comp) u[comp] = 0;
200   return 0;
201 }
202 
203 int main(int argc,char **args)
204 {
205   Mat                Amat;
206   SNES               snes;
207   KSP                ksp;
208   MPI_Comm           comm;
209   PetscMPIInt        rank;
210 #if defined(PETSC_USE_LOG)
211   PetscLogStage      stage[17];
212 #endif
213   PetscBool          test_nonzero_cols = PETSC_FALSE,use_nearnullspace = PETSC_TRUE,attach_nearnullspace = PETSC_FALSE;
214   Vec                xx,bb;
215   PetscInt           iter,i,N,dim = 3,cells[3] = {1,1,1},max_conv_its,local_sizes[7],run_type = 1;
216   DM                 dm,distdm,basedm;
217   PetscBool          flg;
218   char               convType[256];
219   PetscReal          Lx,mdisp[10],err[10];
220   const char * const options[10] = {"-ex56_dm_refine 0",
221                                     "-ex56_dm_refine 1",
222                                     "-ex56_dm_refine 2",
223                                     "-ex56_dm_refine 3",
224                                     "-ex56_dm_refine 4",
225                                     "-ex56_dm_refine 5",
226                                     "-ex56_dm_refine 6",
227                                     "-ex56_dm_refine 7",
228                                     "-ex56_dm_refine 8",
229                                     "-ex56_dm_refine 9"};
230   PetscFunctionBeginUser;
231   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
232   comm = PETSC_COMM_WORLD;
233   PetscCallMPI(MPI_Comm_rank(comm, &rank));
234   /* options */
235   PetscOptionsBegin(comm,NULL,"3D bilinear Q1 elasticity options","");
236   {
237     i = 3;
238     PetscCall(PetscOptionsIntArray("-cells", "Number of (flux tube) processor in each dimension", "ex56.c", cells, &i, NULL));
239 
240     Lx = 1.; /* or ne for rod */
241     max_conv_its = 3;
242     PetscCall(PetscOptionsInt("-max_conv_its","Number of iterations in convergence study","",max_conv_its,&max_conv_its,NULL));
243     PetscCheck(max_conv_its > 0 && max_conv_its < 7,PETSC_COMM_WORLD, PETSC_ERR_USER, "Bad number of iterations for convergence test (%" PetscInt_FMT ")",max_conv_its);
244     PetscCall(PetscOptionsReal("-lx","Length of domain","",Lx,&Lx,NULL));
245     PetscCall(PetscOptionsReal("-alpha","material coefficient inside circle","",s_soft_alpha,&s_soft_alpha,NULL));
246     PetscCall(PetscOptionsBool("-test_nonzero_cols","nonzero test","",test_nonzero_cols,&test_nonzero_cols,NULL));
247     PetscCall(PetscOptionsBool("-use_mat_nearnullspace","MatNearNullSpace API test","",use_nearnullspace,&use_nearnullspace,NULL));
248     PetscCall(PetscOptionsBool("-attach_mat_nearnullspace","MatNearNullSpace API test (via MatSetNearNullSpace)","",attach_nearnullspace,&attach_nearnullspace,NULL));
249     PetscCall(PetscOptionsInt("-run_type","0: twisting load on cantalever, 1: 3rd order accurate convergence test","",run_type,&run_type,NULL));
250   }
251   PetscOptionsEnd();
252   PetscCall(PetscLogStageRegister("Mesh Setup", &stage[16]));
253   for (iter=0 ; iter<max_conv_its ; iter++) {
254     char str[] = "Solve 0";
255     str[6] += iter;
256     PetscCall(PetscLogStageRegister(str, &stage[iter]));
257   }
258   /* create DM, Plex calls DMSetup */
259   PetscCall(PetscLogStagePush(stage[16]));
260   PetscCall(DMPlexCreateBoxMesh(comm, dim, PETSC_FALSE, cells, NULL, NULL, NULL, PETSC_TRUE, &dm));
261   {
262     DMLabel         label;
263     IS              is;
264     PetscCall(DMCreateLabel(dm, "boundary"));
265     PetscCall(DMGetLabel(dm, "boundary", &label));
266     PetscCall(DMPlexMarkBoundaryFaces(dm, 1, label));
267     if (run_type == 0) {
268       PetscCall(DMGetStratumIS(dm, "boundary", 1,  &is));
269       PetscCall(DMCreateLabel(dm,"Faces"));
270       if (is) {
271         PetscInt        d, f, Nf;
272         const PetscInt *faces;
273         PetscInt        csize;
274         PetscSection    cs;
275         Vec             coordinates ;
276         DM              cdm;
277         PetscCall(ISGetLocalSize(is, &Nf));
278         PetscCall(ISGetIndices(is, &faces));
279         PetscCall(DMGetCoordinatesLocal(dm, &coordinates));
280         PetscCall(DMGetCoordinateDM(dm, &cdm));
281         PetscCall(DMGetLocalSection(cdm, &cs));
282         /* Check for each boundary face if any component of its centroid is either 0.0 or 1.0 */
283         for (f = 0; f < Nf; ++f) {
284           PetscReal   faceCoord;
285           PetscInt    b,v;
286           PetscScalar *coords = NULL;
287           PetscInt    Nv;
288           PetscCall(DMPlexVecGetClosure(cdm, cs, coordinates, faces[f], &csize, &coords));
289           Nv   = csize/dim; /* Calculate mean coordinate vector */
290           for (d = 0; d < dim; ++d) {
291             faceCoord = 0.0;
292             for (v = 0; v < Nv; ++v) faceCoord += PetscRealPart(coords[v*dim+d]);
293             faceCoord /= Nv;
294             for (b = 0; b < 2; ++b) {
295               if (PetscAbs(faceCoord - b) < PETSC_SMALL) { /* domain have not been set yet, still [0,1]^3 */
296                 PetscCall(DMSetLabelValue(dm, "Faces", faces[f], d*2+b+1));
297               }
298             }
299           }
300           PetscCall(DMPlexVecRestoreClosure(cdm, cs, coordinates, faces[f], &csize, &coords));
301         }
302         PetscCall(ISRestoreIndices(is, &faces));
303       }
304       PetscCall(ISDestroy(&is));
305       PetscCall(DMGetLabel(dm, "Faces", &label));
306       PetscCall(DMPlexLabelComplete(dm, label));
307     }
308   }
309   {
310     PetscInt    dimEmbed, i;
311     PetscInt    nCoords;
312     PetscScalar *coords,bounds[] = {0,1,-.5,.5,-.5,.5,}; /* x_min,x_max,y_min,y_max */
313     Vec         coordinates;
314     bounds[1] = Lx;
315     if (run_type==1) {
316       for (i = 0; i < 2*dim; i++) bounds[i] = (i%2) ? 1 : 0;
317     }
318     PetscCall(DMGetCoordinatesLocal(dm,&coordinates));
319     PetscCall(DMGetCoordinateDim(dm,&dimEmbed));
320     PetscCheck(dimEmbed == dim,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"dimEmbed != dim %" PetscInt_FMT,dimEmbed);
321     PetscCall(VecGetLocalSize(coordinates,&nCoords));
322     PetscCheck((nCoords % dimEmbed) == 0,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Coordinate vector the wrong size");
323     PetscCall(VecGetArray(coordinates,&coords));
324     for (i = 0; i < nCoords; i += dimEmbed) {
325       PetscInt    j;
326       PetscScalar *coord = &coords[i];
327       for (j = 0; j < dimEmbed; j++) {
328         coord[j] = bounds[2 * j] + coord[j] * (bounds[2 * j + 1] - bounds[2 * j]);
329       }
330     }
331     PetscCall(VecRestoreArray(coordinates,&coords));
332     PetscCall(DMSetCoordinatesLocal(dm,coordinates));
333   }
334 
335   /* convert to p4est, and distribute */
336   PetscOptionsBegin(comm, "", "Mesh conversion options", "DMPLEX");
337   PetscCall(PetscOptionsFList("-dm_type","Convert DMPlex to another format (should not be Plex!)","ex56.c",DMList,DMPLEX,convType,256,&flg));
338   PetscOptionsEnd();
339   if (flg) {
340     DM newdm;
341     PetscCall(DMConvert(dm,convType,&newdm));
342     if (newdm) {
343       const char *prefix;
344       PetscBool isForest;
345       PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm,&prefix));
346       PetscCall(PetscObjectSetOptionsPrefix((PetscObject)newdm,prefix));
347       PetscCall(DMIsForest(newdm,&isForest));
348       PetscCheck(isForest,PETSC_COMM_WORLD, PETSC_ERR_USER, "Converted to non Forest?");
349       PetscCall(DMDestroy(&dm));
350       dm   = newdm;
351     } else SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_USER, "Convert failed?");
352   } else {
353     PetscPartitioner part;
354     /* Plex Distribute mesh over processes */
355     PetscCall(DMPlexGetPartitioner(dm,&part));
356     PetscCall(PetscPartitionerSetFromOptions(part));
357     PetscCall(DMPlexDistribute(dm, 0, NULL, &distdm));
358     if (distdm) {
359       const char *prefix;
360       PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm,&prefix));
361       PetscCall(PetscObjectSetOptionsPrefix((PetscObject)distdm,prefix));
362       PetscCall(DMDestroy(&dm));
363       dm   = distdm;
364     }
365   }
366   PetscCall(PetscLogStagePop());
367   basedm = dm; dm = NULL;
368 
369   for (iter=0 ; iter<max_conv_its ; iter++) {
370     PetscCall(PetscLogStagePush(stage[16]));
371     /* make new DM */
372     PetscCall(DMClone(basedm, &dm));
373     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) dm, "ex56_"));
374     PetscCall(PetscObjectSetName( (PetscObject)dm,"Mesh"));
375     if (max_conv_its > 1) {
376       /* If max_conv_its == 1, then we are not doing a convergence study. */
377       PetscCall(PetscOptionsInsertString(NULL,options[iter]));
378     }
379     PetscCall(DMSetFromOptions(dm)); /* refinement done here in Plex, p4est */
380     /* snes */
381     PetscCall(SNESCreate(comm, &snes));
382     PetscCall(SNESSetDM(snes, dm));
383     /* fem */
384     {
385       const PetscInt Ncomp = dim;
386       const PetscInt components[] = {0,1,2};
387       const PetscInt Nfid = 1, Npid = 1;
388       const PetscInt fid[] = {1}; /* The fixed faces (x=0) */
389       const PetscInt pid[] = {2}; /* The faces with loading (x=L_x) */
390       PetscFE        fe;
391       PetscDS        prob;
392       DMLabel        label;
393       DM             cdm = dm;
394 
395       PetscCall(PetscFECreateDefault(PetscObjectComm((PetscObject) dm), dim, dim, PETSC_FALSE, NULL, PETSC_DECIDE, &fe)); /* elasticity */
396       PetscCall(PetscObjectSetName((PetscObject) fe, "deformation"));
397       /* FEM prob */
398       PetscCall(DMSetField(dm, 0, NULL, (PetscObject) fe));
399       PetscCall(DMCreateDS(dm));
400       PetscCall(DMGetDS(dm, &prob));
401       /* setup problem */
402       if (run_type==1) {
403         PetscCall(PetscDSSetJacobian(prob, 0, 0, NULL, NULL, NULL, g3_uu_3d));
404         PetscCall(PetscDSSetResidual(prob, 0, f0_u_x4, f1_u_3d));
405       } else {
406         PetscWeakForm wf;
407         PetscInt      bd, i;
408 
409         PetscCall(PetscDSSetJacobian(prob, 0, 0, NULL, NULL, NULL, g3_uu_3d_alpha));
410         PetscCall(PetscDSSetResidual(prob, 0, f0_u, f1_u_3d_alpha));
411 
412         PetscCall(DMGetLabel(dm, "Faces", &label));
413         PetscCall(DMAddBoundary(dm, DM_BC_NATURAL, "traction", label, Npid, pid, 0, Ncomp, components, NULL, NULL, NULL, &bd));
414         PetscCall(PetscDSGetBoundary(prob, bd, &wf, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
415         for (i = 0; i < Npid; ++i) PetscCall(PetscWeakFormSetIndexBdResidual(wf, label, pid[i], 0, 0, 0, f0_bd_u_3d, 0, f1_bd_u));
416       }
417       /* bcs */
418       if (run_type==1) {
419         PetscInt id = 1;
420         PetscCall(DMGetLabel(dm, "boundary", &label));
421         PetscCall(DMAddBoundary(dm, DM_BC_ESSENTIAL, "wall", label, 1, &id, 0, 0, NULL, (void (*)(void)) zero, NULL, NULL, NULL));
422       } else {
423         PetscCall(DMGetLabel(dm, "Faces", &label));
424         PetscCall(DMAddBoundary(dm, DM_BC_ESSENTIAL, "fixed", label, Nfid, fid, 0, Ncomp, components, (void (*)(void)) zero, NULL, NULL, NULL));
425       }
426       while (cdm) {
427         PetscCall(DMCopyDisc(dm, cdm));
428         PetscCall(DMGetCoarseDM(cdm, &cdm));
429       }
430       PetscCall(PetscFEDestroy(&fe));
431     }
432     /* vecs & mat */
433     PetscCall(DMCreateGlobalVector(dm,&xx));
434     PetscCall(VecDuplicate(xx, &bb));
435     PetscCall(PetscObjectSetName((PetscObject) bb, "b"));
436     PetscCall(PetscObjectSetName((PetscObject) xx, "u"));
437     PetscCall(DMCreateMatrix(dm, &Amat));
438     PetscCall(MatSetOption(Amat,MAT_SYMMETRIC,PETSC_TRUE));        /* Some matrix kernels can take advantage of symmetry if we set this. */
439     PetscCall(MatSetOption(Amat,MAT_SYMMETRY_ETERNAL,PETSC_TRUE)); /* Inform PETSc that Amat is always symmetric, so info set above isn't lost. */
440     PetscCall(MatSetBlockSize(Amat,3));
441     PetscCall(MatSetOption(Amat,MAT_SPD,PETSC_TRUE));
442     PetscCall(MatSetOption(Amat,MAT_SPD_ETERNAL,PETSC_TRUE));
443     PetscCall(VecGetSize(bb,&N));
444     local_sizes[iter] = N;
445     PetscCall(PetscInfo(snes,"%" PetscInt_FMT " global equations, %" PetscInt_FMT " vertices\n",N,N/dim));
446     if ((use_nearnullspace || attach_nearnullspace) && N/dim > 1) {
447       /* Set up the near null space (a.k.a. rigid body modes) that will be used by the multigrid preconditioner */
448       DM           subdm;
449       MatNullSpace nearNullSpace;
450       PetscInt     fields = 0;
451       PetscObject  deformation;
452       PetscCall(DMCreateSubDM(dm, 1, &fields, NULL, &subdm));
453       PetscCall(DMPlexCreateRigidBody(subdm, 0, &nearNullSpace));
454       PetscCall(DMGetField(dm, 0, NULL, &deformation));
455       PetscCall(PetscObjectCompose(deformation, "nearnullspace", (PetscObject) nearNullSpace));
456       PetscCall(DMDestroy(&subdm));
457       if (attach_nearnullspace) PetscCall(MatSetNearNullSpace(Amat,nearNullSpace));
458       PetscCall(MatNullSpaceDestroy(&nearNullSpace)); /* created by DM and destroyed by Mat */
459     }
460     PetscCall(DMPlexSetSNESLocalFEM(dm,NULL,NULL,NULL));
461     PetscCall(SNESSetJacobian(snes, Amat, Amat, NULL, NULL));
462     PetscCall(SNESSetFromOptions(snes));
463     PetscCall(DMSetUp(dm));
464     PetscCall(PetscLogStagePop());
465     PetscCall(PetscLogStagePush(stage[16]));
466     /* ksp */
467     PetscCall(SNESGetKSP(snes, &ksp));
468     PetscCall(KSPSetComputeSingularValues(ksp,PETSC_TRUE));
469     /* test BCs */
470     PetscCall(VecZeroEntries(xx));
471     if (test_nonzero_cols) {
472       if (rank == 0) {
473         PetscCall(VecSetValue(xx,0,1.0,INSERT_VALUES));
474       }
475       PetscCall(VecAssemblyBegin(xx));
476       PetscCall(VecAssemblyEnd(xx));
477     }
478     PetscCall(VecZeroEntries(bb));
479     PetscCall(VecGetSize(bb,&i));
480     local_sizes[iter] = i;
481     PetscCall(PetscInfo(snes,"%" PetscInt_FMT " equations in vector, %" PetscInt_FMT " vertices\n",i,i/dim));
482     PetscCall(PetscLogStagePop());
483     /* solve */
484     PetscCall(PetscLogStagePush(stage[iter]));
485     PetscCall(SNESSolve(snes, bb, xx));
486     PetscCall(PetscLogStagePop());
487     PetscCall(VecNorm(xx,NORM_INFINITY,&mdisp[iter]));
488     PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
489     {
490       PetscViewer       viewer = NULL;
491       PetscViewerFormat fmt;
492       PetscCall(PetscOptionsGetViewer(comm,NULL,"ex56_","-vec_view",&viewer,&fmt,&flg));
493       if (flg) {
494         PetscCall(PetscViewerPushFormat(viewer,fmt));
495         PetscCall(VecView(xx,viewer));
496         PetscCall(VecView(bb,viewer));
497         PetscCall(PetscViewerPopFormat(viewer));
498       }
499       PetscCall(PetscViewerDestroy(&viewer));
500     }
501     /* Free work space */
502     PetscCall(DMDestroy(&dm));
503     PetscCall(SNESDestroy(&snes));
504     PetscCall(VecDestroy(&xx));
505     PetscCall(VecDestroy(&bb));
506     PetscCall(MatDestroy(&Amat));
507   }
508   PetscCall(DMDestroy(&basedm));
509   if (run_type==1) err[0] = 59.975208 - mdisp[0]; /* error with what I think is the exact solution */
510   else             err[0] = 171.038 - mdisp[0];
511   for (iter=1 ; iter<max_conv_its ; iter++) {
512     if (run_type==1) err[iter] = 59.975208 - mdisp[iter];
513     else             err[iter] = 171.038 - mdisp[iter];
514     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"[%d] %" PetscInt_FMT ") N=%12" PetscInt_FMT ", max displ=%9.7e, disp diff=%9.2e, error=%4.3e, rate=%3.2g\n",rank,iter,local_sizes[iter],(double)mdisp[iter],
515                           (double)(mdisp[iter]-mdisp[iter-1]),(double)err[iter],(double)(PetscLogReal(err[iter-1]/err[iter])/PetscLogReal(2.))));
516   }
517 
518   PetscCall(PetscFinalize());
519   return 0;
520 }
521 
522 /*TEST
523 
524   test:
525     suffix: 0
526     nsize: 4
527     requires: !single
528     args: -cells 2,2,1 -max_conv_its 2 -petscspace_degree 3 -snes_max_it 1 -ksp_max_it 100 -ksp_type cg -ksp_rtol 1.e-10 -ksp_norm_type unpreconditioned -pc_type gamg -pc_gamg_coarse_eq_limit 10 -pc_gamg_reuse_interpolation true -pc_gamg_aggressive_coarsening 0 -pc_gamg_threshold 0.001 -ksp_converged_reason -snes_converged_reason -use_mat_nearnullspace true -mg_levels_ksp_max_it 2 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.2,0,1.1 -mg_levels_pc_type jacobi -petscpartitioner_type simple -ex56_dm_view -snes_lag_jacobian -2 -snes_type ksponly -use_gpu_aware_mpi true
529     timeoutfactor: 2
530 
531   # HYPRE PtAP broken with complex numbers
532   test:
533     suffix: hypre
534     requires: hypre !single !complex !defined(PETSC_HAVE_HYPRE_DEVICE)
535     nsize: 4
536     args: -cells 2,2,1 -max_conv_its 2 -lx 1. -alpha .01 -petscspace_degree 2 -ksp_type cg -ksp_monitor_short -ksp_rtol 1.e-8 -pc_type hypre -pc_hypre_type boomeramg -pc_hypre_boomeramg_no_CF true -pc_hypre_boomeramg_agg_nl 1 -pc_hypre_boomeramg_coarsen_type HMIS -pc_hypre_boomeramg_interp_type ext+i -ksp_converged_reason -use_mat_nearnullspace true -petscpartitioner_type simple
537 
538   test:
539     suffix: ml
540     requires: ml !single
541     nsize: 4
542     args: -cells 2,2,1 -max_conv_its 2 -lx 1. -alpha .01 -petscspace_degree 2 -ksp_type cg -ksp_monitor_short -ksp_converged_reason -ksp_rtol 1.e-8 -pc_type ml -mg_levels_ksp_type chebyshev -mg_levels_ksp_max_it 3 -mg_levels_ksp_chebyshev_esteig 0,0.05,0,1.05 -mg_levels_pc_type sor -petscpartitioner_type simple -use_mat_nearnullspace
543 
544   test:
545     suffix: hpddm
546     requires: hpddm slepc !single defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
547     nsize: 4
548     args: -cells 2,2,1 -max_conv_its 2 -lx 1. -alpha .01 -petscspace_degree 2 -ksp_type fgmres -ksp_monitor_short -ksp_converged_reason -ksp_rtol 1.e-8 -pc_type hpddm -petscpartitioner_type simple -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 6 -pc_hpddm_coarse_p 1 -pc_hpddm_coarse_pc_type svd
549 
550   test:
551     suffix: repart
552     nsize: 4
553     requires: parmetis !single
554     args: -cells 8,2,2 -max_conv_its 1 -petscspace_degree 2 -snes_max_it 4 -ksp_max_it 100 -ksp_type cg -ksp_rtol 1.e-2 -ksp_norm_type unpreconditioned -snes_rtol 1.e-3 -pc_type gamg -pc_gamg_esteig_ksp_max_it 10 -pc_gamg_type agg -pc_gamg_agg_nsmooths 1 -pc_gamg_aggressive_coarsening 1 -pc_gamg_threshold 0.05 -pc_gamg_threshold_scale .0 -use_mat_nearnullspace true -mg_levels_ksp_max_it 2 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.05,0,1.05 -mg_levels_pc_type jacobi -pc_gamg_mat_partitioning_type parmetis -pc_gamg_repartition true -snes_converged_reason -pc_gamg_process_eq_limit 20 -pc_gamg_coarse_eq_limit 10 -ksp_converged_reason -snes_converged_reason -pc_gamg_reuse_interpolation true
555 
556   test:
557     suffix: bddc
558     nsize: 4
559     requires: !single
560     args: -cells 2,2,1 -max_conv_its 2 -lx 1. -alpha .01 -petscspace_degree 2 -ksp_type cg -ksp_monitor_short -ksp_rtol 1.e-8 -ksp_converged_reason -petscpartitioner_type simple -ex56_dm_mat_type is -matis_localmat_type {{sbaij baij aij}} -pc_type bddc
561 
562   testset:
563     nsize: 4
564     requires: !single
565     args: -cells 2,2,1 -max_conv_its 2 -lx 1. -alpha .01 -petscspace_degree 2 -ksp_type cg -ksp_monitor_short -ksp_rtol 1.e-10 -ksp_converged_reason -petscpartitioner_type simple -ex56_dm_mat_type is -matis_localmat_type aij -pc_type bddc -attach_mat_nearnullspace {{0 1}separate output}
566     test:
567       suffix: bddc_approx_gamg
568       args: -pc_bddc_switch_static -prefix_push pc_bddc_dirichlet_ -approximate -pc_type gamg -pc_gamg_esteig_ksp_max_it 10 -pc_gamg_type agg -pc_gamg_agg_nsmooths 1 -pc_gamg_reuse_interpolation true -pc_gamg_aggressive_coarsening 1 -pc_gamg_threshold 0.05 -pc_gamg_threshold_scale .0 -mg_levels_ksp_max_it 1 -mg_levels_ksp_type chebyshev -prefix_pop -prefix_push pc_bddc_neumann_ -approximate -pc_type gamg -pc_gamg_esteig_ksp_max_it 10 -pc_gamg_type agg -pc_gamg_agg_nsmooths 1 -pc_gamg_coarse_eq_limit 10 -pc_gamg_reuse_interpolation true -pc_gamg_aggressive_coarsening 1 -pc_gamg_threshold 0.05 -pc_gamg_threshold_scale .0 -mg_levels_ksp_max_it 1 -mg_levels_ksp_type chebyshev -prefix_pop
569     # HYPRE PtAP broken with complex numbers
570     test:
571       requires: hypre !complex !defined(PETSC_HAVE_HYPRE_DEVICE)
572       suffix: bddc_approx_hypre
573       args: -pc_bddc_switch_static -prefix_push pc_bddc_dirichlet_ -pc_type hypre -pc_hypre_boomeramg_no_CF true -pc_hypre_boomeramg_strong_threshold 0.75 -pc_hypre_boomeramg_agg_nl 1 -pc_hypre_boomeramg_coarsen_type HMIS -pc_hypre_boomeramg_interp_type ext+i -prefix_pop -prefix_push pc_bddc_neumann_ -pc_type hypre -pc_hypre_boomeramg_no_CF true -pc_hypre_boomeramg_strong_threshold 0.75 -pc_hypre_boomeramg_agg_nl 1 -pc_hypre_boomeramg_coarsen_type HMIS -pc_hypre_boomeramg_interp_type ext+i -prefix_pop
574     test:
575       requires: ml
576       suffix: bddc_approx_ml
577       args: -pc_bddc_switch_static -prefix_push pc_bddc_dirichlet_ -approximate -pc_type ml -mg_levels_ksp_max_it 1 -mg_levels_ksp_type chebyshev -prefix_pop -prefix_push pc_bddc_neumann_ -approximate -pc_type ml -mg_levels_ksp_max_it 1 -mg_levels_ksp_type chebyshev -prefix_pop
578 
579   test:
580     suffix: fetidp
581     nsize: 4
582     requires: !single
583     args: -cells 2,2,1 -max_conv_its 2 -lx 1. -alpha .01 -petscspace_degree 2 -ksp_type fetidp -fetidp_ksp_type cg -ksp_monitor_short -ksp_rtol 1.e-8 -ksp_converged_reason -petscpartitioner_type simple -ex56_dm_mat_type is -matis_localmat_type {{sbaij baij aij}}
584 
585   test:
586     suffix: bddc_elast
587     nsize: 4
588     requires: !single
589     args: -cells 2,2,1 -max_conv_its 2 -lx 1. -alpha .01 -petscspace_degree 2 -ksp_type cg -ksp_monitor_short -ksp_rtol 1.e-8 -ksp_converged_reason -petscpartitioner_type simple -ex56_dm_mat_type is -matis_localmat_type sbaij -pc_type bddc -pc_bddc_monolithic -attach_mat_nearnullspace
590 
591   test:
592     suffix: fetidp_elast
593     nsize: 4
594     requires: !single
595     args: -cells 2,2,1 -max_conv_its 2 -lx 1. -alpha .01 -petscspace_degree 2 -ksp_type fetidp -fetidp_ksp_type cg -ksp_monitor_short -ksp_rtol 1.e-8 -ksp_converged_reason -petscpartitioner_type simple -ex56_dm_mat_type is -matis_localmat_type sbaij -fetidp_bddc_pc_bddc_monolithic -attach_mat_nearnullspace
596 
597   test:
598     suffix: gdsw
599     nsize: 4
600     requires: !single
601     args: -cells 2,2,1 -max_conv_its 2 -lx 1. -alpha .01 -petscspace_degree 2 -ksp_type cg -ksp_monitor_short -ksp_rtol 1.e-8 -ksp_converged_reason -petscpartitioner_type simple -ex56_dm_mat_type is -attach_mat_nearnullspace \
602           -pc_type mg -pc_mg_galerkin -pc_mg_adapt_interp_coarse_space gdsw -pc_mg_levels 2 -mg_levels_pc_type bjacobi -mg_levels_sub_pc_type icc
603 
604   testset:
605     nsize: 4
606     requires: !single
607     args: -cells 2,2,1 -max_conv_its 2 -petscspace_degree 2 -snes_max_it 2 -ksp_max_it 100 -ksp_type cg -ksp_rtol 1.e-10 -ksp_norm_type unpreconditioned -snes_rtol 1.e-10 -pc_type gamg -pc_gamg_esteig_ksp_max_it 10 -pc_gamg_type agg -pc_gamg_agg_nsmooths 1 -pc_gamg_coarse_eq_limit 10 -pc_gamg_reuse_interpolation true -pc_gamg_aggressive_coarsening 1 -pc_gamg_threshold 0.05 -pc_gamg_threshold_scale .0 -use_mat_nearnullspace true -mg_levels_ksp_max_it 2 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.05,0,1.05 -mg_levels_pc_type jacobi -ksp_monitor_short -ksp_converged_reason -snes_converged_reason -snes_monitor_short -ex56_dm_view -petscpartitioner_type simple -pc_gamg_process_eq_limit 20
608     output_file: output/ex56_cuda.out
609 
610     test:
611       suffix: cuda
612       requires: cuda
613       args: -ex56_dm_mat_type aijcusparse -ex56_dm_vec_type cuda
614 
615     test:
616       suffix: viennacl
617       requires: viennacl
618       args: -ex56_dm_mat_type aijviennacl -ex56_dm_vec_type viennacl
619 
620     test:
621       suffix: kokkos
622       requires: !sycl kokkos_kernels
623       args: -ex56_dm_mat_type aijkokkos -ex56_dm_vec_type kokkos
624   # Don't run AIJMKL caes with complex scalars because of convergence issues.
625   # Note that we need to test both single and multiple MPI rank cases, because these use different sparse MKL routines to implement the PtAP operation.
626   test:
627     suffix: seqaijmkl
628     nsize: 1
629     requires: defined(PETSC_HAVE_MKL_SPARSE_OPTIMIZE) !single !complex
630     args: -cells 2,2,1 -max_conv_its 2 -petscspace_degree 2 -snes_max_it 2 -ksp_max_it 100 -ksp_type cg -ksp_rtol 1.e-11 -ksp_norm_type unpreconditioned -snes_rtol 1.e-10 -pc_type gamg -pc_gamg_type agg -pc_gamg_agg_nsmooths 1 -pc_gamg_coarse_eq_limit 1000 -pc_gamg_reuse_interpolation true -pc_gamg_aggressive_coarsening 1 -pc_gamg_threshold 0.05 -pc_gamg_threshold_scale .0 -ksp_converged_reason -snes_monitor_short -ksp_monitor_short -snes_converged_reason -use_mat_nearnullspace true -mg_levels_ksp_max_it 1 -mg_levels_ksp_type chebyshev -pc_gamg_esteig_ksp_type cg -pc_gamg_esteig_ksp_max_it 10 -mg_levels_ksp_chebyshev_esteig 0,0.05,0,1.1 -mg_levels_pc_type jacobi -petscpartitioner_type simple -mat_block_size 3 -ex56_dm_view -run_type 1 -mat_seqaij_type seqaijmkl
631     timeoutfactor: 2
632 
633   test:
634     suffix: mpiaijmkl
635     nsize: 2
636     requires: defined(PETSC_HAVE_MKL_SPARSE_OPTIMIZE) !single !complex
637     args: -cells 2,2,1 -max_conv_its 2 -petscspace_degree 2 -snes_max_it 2 -ksp_max_it 100 -ksp_type cg -ksp_rtol 1.e-11 -ksp_norm_type unpreconditioned -snes_rtol 1.e-10 -pc_type gamg -pc_gamg_type agg -pc_gamg_agg_nsmooths 1 -pc_gamg_coarse_eq_limit 1000 -pc_gamg_reuse_interpolation true -pc_gamg_aggressive_coarsening 1 -pc_gamg_threshold 0.05 -pc_gamg_threshold_scale .0 -ksp_converged_reason -snes_monitor_short -ksp_monitor_short -snes_converged_reason -use_mat_nearnullspace true -mg_levels_ksp_max_it 1 -mg_levels_ksp_type chebyshev -pc_gamg_esteig_ksp_type cg -pc_gamg_esteig_ksp_max_it 10 -mg_levels_ksp_chebyshev_esteig 0,0.05,0,1.1 -mg_levels_pc_type jacobi -petscpartitioner_type simple -mat_block_size 3 -ex56_dm_view -run_type 1 -mat_seqaij_type seqaijmkl
638     timeoutfactor: 2
639 
640 TEST*/
641