xref: /petsc/src/dm/impls/swarm/tests/ex7.c (revision 503c0ea9b45bcfbcebbb1ea5341243bbc69f0bea)
1 static char help[] = "Example program demonstrating projection between particle and finite element spaces using OpenMP in 2D cylindrical coordinates\n";
2 
3 #include "petscdmplex.h"
4 #include "petscds.h"
5 #include "petscdmswarm.h"
6 #include "petscksp.h"
7 #include <petsc/private/petscimpl.h>
8 #if defined(PETSC_HAVE_OPENMP) && defined(PETSC_HAVE_THREADSAFETY)
9 #include <omp.h>
10 #endif
11 
12 typedef struct {
13   Mat MpTrans;
14   Mat Mp;
15   Vec ff;
16   Vec uu;
17 } MatShellCtx;
18 
19 PetscErrorCode MatMultMtM_SeqAIJ(Mat MtM,Vec xx,Vec yy)
20 {
21   MatShellCtx    *matshellctx;
22 
23   PetscFunctionBeginUser;
24   PetscCall(MatShellGetContext(MtM,&matshellctx));
25   PetscCheck(matshellctx,PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "No context");
26   PetscCall(MatMult(matshellctx->Mp, xx, matshellctx->ff));
27   PetscCall(MatMult(matshellctx->MpTrans, matshellctx->ff, yy));
28   PetscFunctionReturn(0);
29 }
30 
31 PetscErrorCode MatMultAddMtM_SeqAIJ(Mat MtM,Vec xx, Vec yy, Vec zz)
32 {
33   MatShellCtx    *matshellctx;
34 
35   PetscFunctionBeginUser;
36   PetscCall(MatShellGetContext(MtM,&matshellctx));
37   PetscCheck(matshellctx,PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "No context");
38   PetscCall(MatMult(matshellctx->Mp, xx, matshellctx->ff));
39   PetscCall(MatMultAdd(matshellctx->MpTrans, matshellctx->ff, yy, zz));
40   PetscFunctionReturn(0);
41 }
42 
43 PetscErrorCode createSwarm(const DM dm, DM *sw)
44 {
45   PetscInt Nc = 1, dim = 2;
46 
47   PetscFunctionBeginUser;
48   PetscCall(DMCreate(PETSC_COMM_SELF, sw));
49   PetscCall(DMSetType(*sw, DMSWARM));
50   PetscCall(DMSetDimension(*sw, dim));
51   PetscCall(DMSwarmSetType(*sw, DMSWARM_PIC));
52   PetscCall(DMSwarmSetCellDM(*sw, dm));
53   PetscCall(DMSwarmRegisterPetscDatatypeField(*sw, "w_q", Nc, PETSC_SCALAR));
54   PetscCall(DMSwarmFinalizeFieldRegister(*sw));
55   PetscCall(DMSetFromOptions(*sw));
56   PetscFunctionReturn(0);
57 }
58 
59 PetscErrorCode gridToParticles(const DM dm, DM sw, PetscReal *moments, Vec rhs, Mat M_p)
60 {
61   PetscBool      is_lsqr;
62   KSP            ksp;
63   Mat            PM_p=NULL,MtM,D;
64   Vec            ff;
65   PetscInt       Np, timestep = 0, bs, N, M, nzl;
66   PetscReal      time = 0.0;
67   PetscDataType  dtype;
68   MatShellCtx    *matshellctx;
69 
70   PetscFunctionBeginUser;
71   PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
72   PetscCall(KSPSetOptionsPrefix(ksp, "ftop_"));
73   PetscCall(KSPSetFromOptions(ksp));
74   PetscCall(PetscObjectTypeCompare((PetscObject)ksp,KSPLSQR,&is_lsqr));
75   if (!is_lsqr) {
76     PetscCall(MatGetLocalSize(M_p, &M, &N));
77     if (N>M) {
78       PC        pc;
79       PetscCall(PetscInfo(ksp, " M (%D) < M (%D) -- skip revert to lsqr\n",M,N));
80       is_lsqr = PETSC_TRUE;
81       PetscCall(KSPSetType(ksp,KSPLSQR));
82       PetscCall(KSPGetPC(ksp,&pc));
83       PetscCall(PCSetType(pc,PCNONE)); // could put in better solver -ftop_pc_type bjacobi -ftop_sub_pc_type lu -ftop_sub_pc_factor_shift_type nonzero
84     } else {
85       PetscCall(PetscNew(&matshellctx));
86       PetscCall(MatCreateShell(PetscObjectComm((PetscObject)dm),N,N,PETSC_DECIDE,PETSC_DECIDE,matshellctx,&MtM));
87       PetscCall(MatTranspose(M_p,MAT_INITIAL_MATRIX,&matshellctx->MpTrans));
88       matshellctx->Mp = M_p;
89       PetscCall(MatShellSetOperation(MtM, MATOP_MULT, (void (*)(void))MatMultMtM_SeqAIJ));
90       PetscCall(MatShellSetOperation(MtM, MATOP_MULT_ADD, (void (*)(void))MatMultAddMtM_SeqAIJ));
91       PetscCall(MatCreateVecs(M_p,&matshellctx->uu,&matshellctx->ff));
92       PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF,N,N,1,NULL,&D));
93       for (int i=0 ; i<N ; i++) {
94         const PetscScalar *vals;
95         const PetscInt    *cols;
96         PetscScalar dot = 0;
97         PetscCall(MatGetRow(matshellctx->MpTrans,i,&nzl,&cols,&vals));
98         for (int ii=0 ; ii<nzl ; ii++) dot += PetscSqr(vals[ii]);
99         PetscCheckFalse(dot==0.0,PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Row %D is empty", i);
100         PetscCall(MatSetValue(D,i,i,dot,INSERT_VALUES));
101       }
102       PetscCall(MatAssemblyBegin(D, MAT_FINAL_ASSEMBLY));
103       PetscCall(MatAssemblyEnd(D, MAT_FINAL_ASSEMBLY));
104       PetscInfo(M_p,"createMtMKSP Have %D eqs, nzl = %D\n",N,nzl);
105       PetscCall(KSPSetOperators(ksp, MtM, D));
106       PetscCall(MatViewFromOptions(D,NULL,"-ftop2_D_mat_view"));
107       PetscCall(MatViewFromOptions(M_p,NULL,"-ftop2_Mp_mat_view"));
108       PetscCall(MatViewFromOptions(matshellctx->MpTrans,NULL,"-ftop2_MpTranspose_mat_view"));
109     }
110   }
111   if (is_lsqr) {
112     PC        pc;
113     PetscBool is_bjac;
114     PetscCall(KSPGetPC(ksp,&pc));
115     PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCBJACOBI,&is_bjac));
116     if (is_bjac) {
117       PetscCall(DMSwarmCreateMassMatrixSquare(sw, dm, &PM_p));
118       PetscCall(KSPSetOperators(ksp, M_p, PM_p));
119     } else {
120       PetscCall(KSPSetOperators(ksp, M_p, M_p));
121     }
122   }
123   PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "w_q", &ff)); // this grabs access !!!!!
124   if (!is_lsqr) {
125     PetscCall(KSPSolve(ksp, rhs, matshellctx->uu));
126     PetscCall(MatMult(M_p, matshellctx->uu, ff));
127     PetscCall(MatDestroy(&matshellctx->MpTrans));
128     PetscCall(VecDestroy(&matshellctx->ff));
129     PetscCall(VecDestroy(&matshellctx->uu));
130     PetscCall(MatDestroy(&D));
131     PetscCall(MatDestroy(&MtM));
132     PetscCall(PetscFree(matshellctx));
133   } else {
134     PetscCall(KSPSolveTranspose(ksp, rhs, ff));
135   }
136   PetscCall(KSPDestroy(&ksp));
137   /* Visualize particle field */
138   PetscCall(DMSetOutputSequenceNumber(sw, timestep, time));
139   PetscCall(VecViewFromOptions(ff, NULL, "-weights_view"));
140   PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "w_q", &ff));
141 
142   /* compute energy */
143   if (moments) {
144     PetscReal *wq, *coords;
145     PetscCall(DMSwarmGetLocalSize(sw,&Np));
146     PetscCall(DMSwarmGetField(sw, "w_q", &bs, &dtype, (void**)&wq));
147     PetscCall(DMSwarmGetField(sw, "DMSwarmPIC_coor", &bs, &dtype, (void**)&coords));
148     moments[0] = moments[1] = moments[2] = 0;
149     for (int p=0;p<Np;p++) {
150       moments[0] += wq[p];
151       moments[1] += wq[p] * coords[p*2+0]; // x-momentum
152       moments[2] += wq[p] * (PetscSqr(coords[p*2+0])+PetscSqr(coords[p*2+1]));
153     }
154     PetscCall(DMSwarmRestoreField(sw, "DMSwarmPIC_coor", &bs, &dtype, (void**)&coords));
155     PetscCall(DMSwarmRestoreField(sw, "w_q", &bs, &dtype, (void**)&wq));
156   }
157   PetscCall(MatDestroy(&PM_p));
158   PetscFunctionReturn(0);
159 }
160 
161 PetscErrorCode particlesToGrid(const DM dm, DM sw, const PetscInt Np, const PetscInt a_tid, const PetscInt dim, const PetscInt target,
162                                const PetscReal xx[], const PetscReal yy[], const PetscReal a_wp[], Vec rho, Mat *Mp_out)
163 {
164 
165   PetscBool      removePoints = PETSC_TRUE;
166   PetscReal      *wq, *coords;
167   PetscDataType  dtype;
168   Mat            M_p;
169   Vec            ff;
170   PetscInt       bs,p,zero=0;
171 
172   PetscFunctionBeginUser;
173   PetscCall(DMSwarmSetLocalSizes(sw, Np, zero));
174   PetscCall(DMSwarmGetField(sw, "w_q", &bs, &dtype, (void**)&wq));
175   PetscCall(DMSwarmGetField(sw, "DMSwarmPIC_coor", &bs, &dtype, (void**)&coords));
176   for (p=0;p<Np;p++) {
177     coords[p*2+0]  = xx[p];
178     coords[p*2+1]  = yy[p];
179     wq[p]          = a_wp[p];
180   }
181   PetscCall(DMSwarmRestoreField(sw, "DMSwarmPIC_coor", &bs, &dtype, (void**)&coords));
182   PetscCall(DMSwarmRestoreField(sw, "w_q", &bs, &dtype, (void**)&wq));
183   PetscCall(DMSwarmMigrate(sw, removePoints));
184   PetscCall(PetscObjectSetName((PetscObject)sw, "Particle Grid"));
185   if (a_tid==target) PetscCall(DMViewFromOptions(sw, NULL, "-swarm_view"));
186 
187   /* Project particles to field */
188   /* This gives M f = \int_\Omega \phi f, which looks like a rhs for a PDE */
189   PetscCall(DMCreateMassMatrix(sw, dm, &M_p));
190   PetscCall(PetscObjectSetName((PetscObject)rho, "rho"));
191 
192   PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "w_q", &ff)); // this grabs access !!!!!
193   PetscCall(PetscObjectSetName((PetscObject)ff, "weights"));
194   PetscCall(MatMultTranspose(M_p, ff, rho));
195   PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "w_q", &ff));
196 
197   /* Visualize mesh field */
198   if (a_tid==target) PetscCall(VecViewFromOptions(rho, NULL, "-rho_view"));
199   // output
200   *Mp_out = M_p;
201 
202   PetscFunctionReturn(0);
203 }
204 static PetscErrorCode maxwellian(PetscInt dim, const PetscReal x[], PetscReal kt_m, PetscReal n, PetscScalar *u)
205 {
206   PetscInt      i;
207   PetscReal     v2 = 0, theta = 2*kt_m; /* theta = 2kT/mc^2 */
208 
209   PetscFunctionBegin;
210   /* compute the exponents, v^2 */
211   for (i = 0; i < dim; ++i) v2 += x[i]*x[i];
212   /* evaluate the Maxwellian */
213   u[0] = n*PetscPowReal(PETSC_PI*theta,-1.5)*(PetscExpReal(-v2/theta)) * 2.*PETSC_PI*x[1]; // radial term for 2D axi-sym.
214   PetscFunctionReturn(0);
215 }
216 #define NUM_SOLVE_LOOPS 100
217 #define MAX_NUM_THRDS 12
218 PetscErrorCode go()
219 {
220   DM              dm_t[MAX_NUM_THRDS], sw_t[MAX_NUM_THRDS];
221   PetscFE         fe;
222   PetscInt        dim = 2, Nc = 1, timestep = 0, i, faces[3];
223   PetscInt        Np[2] = {10,10}, Np2[2], field = 0, target = 0, Np_t[MAX_NUM_THRDS];
224   PetscReal       time = 0.0, moments_0[3], moments_1[3], vol;
225   PetscReal       lo[3] = {-5,0,-5}, hi[3] = {5,5,5}, h[3], hp[3], *xx_t[MAX_NUM_THRDS], *yy_t[MAX_NUM_THRDS], *wp_t[MAX_NUM_THRDS], solve_time = 0;
226   Vec             rho_t[MAX_NUM_THRDS], rhs_t[MAX_NUM_THRDS];
227   Mat             M_p_t[MAX_NUM_THRDS];
228 #if defined PETSC_USE_LOG
229   PetscLogStage   stage;
230   PetscLogEvent   swarm_create_ev, solve_ev, solve_loop_ev;
231 #endif
232 #if defined(PETSC_HAVE_OPENMP) && defined(PETSC_HAVE_THREADSAFETY)
233   PetscInt        numthreads = PetscNumOMPThreads;
234   double          starttime, endtime;
235 #else
236   PetscInt        numthreads = 1;
237 #endif
238 
239   PetscFunctionBeginUser;
240 #if defined(PETSC_HAVE_OPENMP) && defined(PETSC_HAVE_THREADSAFETY)
241   PetscCheckFalse(numthreads>MAX_NUM_THRDS,PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Too many threads %D > %D", numthreads, MAX_NUM_THRDS);
242   PetscCheckFalse(numthreads<=0,PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "No threads %D > %D ", numthreads,  MAX_NUM_THRDS);
243 #endif
244   if (target >= numthreads) target = numthreads-1;
245   PetscCall(PetscLogEventRegister("Create Swarm", DM_CLASSID, &swarm_create_ev));
246   PetscCall(PetscLogEventRegister("Single solve", DM_CLASSID, &solve_ev));
247   PetscCall(PetscLogEventRegister("Solve loop", DM_CLASSID, &solve_loop_ev));
248   PetscCall(PetscLogStageRegister("Solve", &stage));
249   i    = dim;
250   PetscCall(PetscOptionsGetIntArray(NULL, NULL, "-dm_plex_box_faces", faces, &i, NULL));
251   i    = dim;
252   PetscCall(PetscOptionsGetIntArray(NULL, NULL, "-np", Np,  &i, NULL));
253   /* Create thread meshes */
254   for (int tid=0; tid<numthreads; tid++) {
255     // setup mesh dm_t, could use PETSc's Landau create velocity space mesh here to get dm_t[tid]
256     PetscCall(DMCreate(PETSC_COMM_SELF, &dm_t[tid]));
257     PetscCall(DMSetType(dm_t[tid], DMPLEX));
258     PetscCall(DMSetFromOptions(dm_t[tid]));
259     PetscCall(PetscFECreateDefault(PETSC_COMM_SELF, dim, Nc, PETSC_FALSE, "", PETSC_DECIDE, &fe));
260     PetscCall(PetscFESetFromOptions(fe));
261     PetscCall(PetscObjectSetName((PetscObject)fe, "fe"));
262     PetscCall(DMSetField(dm_t[tid], field, NULL, (PetscObject)fe));
263     PetscCall(DMCreateDS(dm_t[tid]));
264     PetscCall(PetscFEDestroy(&fe));
265     // helper vectors
266     PetscCall(DMSetOutputSequenceNumber(dm_t[tid], timestep, time)); // not used
267     PetscCall(DMCreateGlobalVector(dm_t[tid], &rho_t[tid]));
268     PetscCall(DMCreateGlobalVector(dm_t[tid], &rhs_t[tid]));
269     // this mimics application code
270     PetscCall(DMGetBoundingBox(dm_t[tid], lo, hi));
271     if (tid==target) {
272       PetscCall(DMViewFromOptions(dm_t[tid], NULL, "-dm_view"));
273       for (i=0,vol=1;i<dim;i++) {
274         h[i] = (hi[i] - lo[i])/faces[i];
275         hp[i] = (hi[i] - lo[i])/Np[i];
276         vol *= (hi[i] - lo[i]);
277         PetscCall(PetscInfo(dm_t[tid]," lo = %g hi = %g n = %D h = %g hp = %g\n",lo[i],hi[i],faces[i],h[i],hp[i]));
278       }
279     }
280   }
281   // prepare particle data for problems. This mimics application code
282   PetscCall(PetscLogEventBegin(swarm_create_ev,0,0,0,0));
283   Np2[0] = Np[0]; Np2[1] = Np[1];
284   for (int tid=0; tid<numthreads; tid++) { // change size of particle list a little
285     Np_t[tid] = Np2[0]*Np2[1];
286     PetscCall(PetscMalloc3(Np_t[tid],&xx_t[tid],Np_t[tid],&yy_t[tid],Np_t[tid],&wp_t[tid]));
287     if (tid==target) {moments_0[0] = moments_0[1] = moments_0[2] = 0;}
288     for (int pi=0, pp=0;pi<Np2[0];pi++) {
289       for (int pj=0;pj<Np2[1];pj++,pp++) {
290         xx_t[tid][pp] = lo[0] + hp[0]/2. + pi*hp[0];
291         yy_t[tid][pp] = lo[1] + hp[1]/2. + pj*hp[1];
292         {
293           PetscReal x[] = {xx_t[tid][pp],yy_t[tid][pp]};
294           PetscCall(maxwellian(2, x, 1.0, vol/(PetscReal)Np_t[tid], &wp_t[tid][pp]));
295         }
296         if (tid==target) { //energy_0 += wp_t[tid][pp]*(PetscSqr(xx_t[tid][pp])+PetscSqr(yy_t[tid][pp]));
297           moments_0[0] += wp_t[tid][pp];
298           moments_0[1] += wp_t[tid][pp] * xx_t[tid][pp]; // x-momentum
299           moments_0[2] += wp_t[tid][pp] * (PetscSqr(xx_t[tid][pp]) + PetscSqr(yy_t[tid][pp]));
300         }
301       }
302     }
303     Np2[0]++; Np2[1]++;
304   }
305   PetscCall(PetscLogEventEnd(swarm_create_ev,0,0,0,0));
306   PetscCall(PetscLogEventBegin(solve_ev,0,0,0,0));
307   /* Create particle swarm */
308   PetscPragmaOMP(parallel for)
309   for (int tid=0; tid<numthreads; tid++) {
310     PetscCallAbort(PETSC_COMM_SELF,createSwarm(dm_t[tid], &sw_t[tid]));
311   }
312   PetscPragmaOMP(parallel for)
313   for (int tid=0; tid<numthreads; tid++) {
314     PetscCallAbort(PETSC_COMM_SELF,particlesToGrid(dm_t[tid], sw_t[tid], Np_t[tid], tid, dim, target, xx_t[tid], yy_t[tid], wp_t[tid], rho_t[tid], &M_p_t[tid]));
315   }
316   /* Project field to particles */
317   /*   This gives f_p = M_p^+ M f */
318   PetscPragmaOMP(parallel for)
319   for (int tid=0; tid<numthreads; tid++) {
320     PetscCallAbort(PETSC_COMM_SELF,VecCopy(rho_t[tid], rhs_t[tid])); /* Identity: M^1 M rho */
321   }
322   PetscPragmaOMP(parallel for)
323   for (int tid=0; tid<numthreads; tid++) {
324     PetscCallAbort(PETSC_COMM_SELF,gridToParticles(dm_t[tid], sw_t[tid], (tid==target) ?  moments_1 : NULL, rhs_t[tid], M_p_t[tid]));
325   }
326   /* Cleanup */
327   for (int tid=0; tid<numthreads; tid++) {
328     PetscCall(MatDestroy(&M_p_t[tid]));
329     PetscCall(DMDestroy(&sw_t[tid]));
330   }
331   PetscCall(PetscLogEventEnd(solve_ev,0,0,0,0));
332   /* for timing */
333   PetscCall(PetscOptionsClearValue(NULL,"-ftop_ksp_converged_reason"));
334   PetscCall(PetscOptionsClearValue(NULL,"-ftop_ksp_monitor"));
335   PetscCall(PetscOptionsClearValue(NULL,"-ftop_ksp_view"));
336   PetscCall(PetscOptionsClearValue(NULL,"-info"));
337   // repeat solve many times to get warmed up data
338   PetscCall(PetscLogStagePush(stage));
339 #if defined(PETSC_HAVE_OPENMP) && defined(PETSC_HAVE_THREADSAFETY)
340   starttime = MPI_Wtime();
341 #endif
342   PetscCall(PetscLogEventBegin(solve_loop_ev,0,0,0,0));
343   for (int d=0; d<NUM_SOLVE_LOOPS; d++) {
344   /* Create particle swarm */
345     PetscPragmaOMP(parallel for)
346     for (int tid=0; tid<numthreads; tid++) {
347       PetscCallAbort(PETSC_COMM_SELF,createSwarm(dm_t[tid], &sw_t[tid]));
348     }
349     PetscPragmaOMP(parallel for)
350     for (int tid=0; tid<numthreads; tid++) {
351       PetscCallAbort(PETSC_COMM_SELF,particlesToGrid(dm_t[tid], sw_t[tid], Np_t[tid], tid, dim, target, xx_t[tid], yy_t[tid], wp_t[tid], rho_t[tid], &M_p_t[tid]));
352     }
353     PetscPragmaOMP(parallel for)
354     for (int tid=0; tid<numthreads; tid++) {
355       PetscCallAbort(PETSC_COMM_SELF,VecCopy(rho_t[tid], rhs_t[tid])); /* Identity: M^1 M rho */
356     }
357     PetscPragmaOMP(parallel for)
358     for (int tid=0; tid<numthreads; tid++) {
359       PetscCallAbort(PETSC_COMM_SELF,gridToParticles(dm_t[tid], sw_t[tid], NULL, rhs_t[tid], M_p_t[tid]));
360     }
361     /* Cleanup */
362     for (int tid=0; tid<numthreads; tid++) {
363       PetscCall(MatDestroy(&M_p_t[tid]));
364       PetscCall(DMDestroy(&sw_t[tid]));
365     }
366   }
367   PetscCall(PetscLogEventEnd(solve_loop_ev,0,0,0,0));
368 #if defined(PETSC_HAVE_OPENMP) && defined(PETSC_HAVE_THREADSAFETY)
369   endtime = MPI_Wtime();
370   solve_time += (endtime - starttime);
371 #endif
372   PetscCall(PetscLogStagePop());
373   //
374   PetscCall(PetscPrintf(PETSC_COMM_SELF,"Total number density: %20.12e (%20.12e); x-momentum = %g (%g); energy = %g error = %e, %D particles. Use %D threads, Solve time: %g\n", moments_1[0], moments_0[0], moments_1[1], moments_0[1], moments_1[2], (moments_1[2]-moments_0[2])/moments_0[2],Np[0]*Np[1],numthreads,solve_time));
375   /* Cleanup */
376   for (int tid=0; tid<numthreads; tid++) {
377     PetscCall(VecDestroy(&rho_t[tid]));
378     PetscCall(VecDestroy(&rhs_t[tid]));
379     PetscCall(DMDestroy(&dm_t[tid]));
380     PetscCall(PetscFree3(xx_t[tid],yy_t[tid],wp_t[tid]));
381   }
382   PetscFunctionReturn(0);
383 }
384 
385 int main(int argc, char **argv)
386 {
387   PetscCall(PetscInitialize(&argc, &argv, NULL,help));
388   PetscCall(go());
389   PetscCall(PetscFinalize());
390   return 0;
391 }
392 
393 /*TEST
394 
395   build:
396     requires: !complex
397 
398   test:
399     suffix: 0
400     requires: double triangle
401     args: -dm_plex_simplex 0 -dm_plex_box_faces 4,2 -dm_plex_box_lower -2.0,0.0 -dm_plex_box_upper 2.0,2.0 -petscspace_degree 2 -ftop_ksp_type lsqr -ftop_pc_type none -dm_view -ftop_ksp_converged_reason -ftop_ksp_rtol 1.e-14
402     filter: grep -v DM_ | grep -v atomic
403 
404   test:
405     suffix: 1
406     requires: double triangle
407     args: -dm_plex_simplex 0 -dm_plex_box_faces 4,2 -dm_plex_box_lower -2.0,0.0 -dm_plex_box_upper 2.0,2.0 -petscspace_degree 2 -dm_plex_hash_location -ftop_ksp_type lsqr -ftop_pc_type bjacobi -ftop_sub_pc_type lu -ftop_sub_pc_factor_shift_type nonzero -dm_view -ftop_ksp_converged_reason -ftop_ksp_rtol 1.e-14
408     filter: grep -v DM_ | grep -v atomic
409 
410   test:
411     suffix: 2
412     requires: double triangle
413     args: -dm_plex_simplex 0 -dm_plex_box_faces 4,2 -dm_plex_box_lower -2.0,0.0 -dm_plex_box_upper 2.0,2.0 -petscspace_degree 2 -dm_plex_hash_location -ftop_ksp_type cg -ftop_pc_type jacobi -dm_view -ftop_ksp_converged_reason -ftop_ksp_rtol 1.e-14
414     filter: grep -v DM_ | grep -v atomic
415 
416 TEST*/
417