xref: /petsc/src/ts/tutorials/network/pipes.c (revision 2fa40bb9206b96114faa7cb222621ec184d31cd2)
1 static char help[] = "This example demonstrates DMNetwork. It is used for testing parallel generation of dmnetwork, then redistribute. \n\\n";
2 /*
3   Example: mpiexec -n <np> ./pipes -ts_max_steps 10
4 */
5 
6 #include "wash.h"
7 
8 /*
9   WashNetworkDistribute - proc[0] distributes sequential wash object
10    Input Parameters:
11 .  comm - MPI communicator
12 .  wash - wash context with all network data in proc[0]
13 
14    Output Parameter:
15 .  wash - wash context with nedge, nvertex and edgelist distributed
16 
17    Note: The routine is used for testing parallel generation of dmnetwork, then redistribute.
18 */
19 PetscErrorCode WashNetworkDistribute(MPI_Comm comm,Wash wash)
20 {
21   PetscErrorCode ierr;
22   PetscMPIInt    rank,size,tag=0;
23   PetscInt       i,e,v,numEdges,numVertices,nedges,*eowners=NULL,estart,eend,*vtype=NULL,nvertices;
24   PetscInt       *edgelist = wash->edgelist,*nvtx=NULL,*vtxDone=NULL;
25 
26   PetscFunctionBegin;
27   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
28   if (size == 1) PetscFunctionReturn(0);
29 
30   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
31   numEdges    = wash->nedge;
32   numVertices = wash->nvertex;
33 
34   /* (1) all processes get global and local number of edges */
35   ierr = MPI_Bcast(&numEdges,1,MPIU_INT,0,comm);CHKERRMPI(ierr);
36   nedges = numEdges/size; /* local nedges */
37   if (rank == 0) {
38     nedges += numEdges - size*(numEdges/size);
39   }
40   wash->Nedge = numEdges;
41   wash->nedge = nedges;
42   /* ierr = PetscPrintf(PETSC_COMM_SELF,"[%d] nedges %d, numEdges %d\n",rank,nedges,numEdges);CHKERRQ(ierr); */
43 
44   ierr = PetscCalloc3(size+1,&eowners,size,&nvtx,numVertices,&vtxDone);CHKERRQ(ierr);
45   ierr = MPI_Allgather(&nedges,1,MPIU_INT,eowners+1,1,MPIU_INT,PETSC_COMM_WORLD);CHKERRMPI(ierr);
46   eowners[0] = 0;
47   for (i=2; i<=size; i++) {
48     eowners[i] += eowners[i-1];
49   }
50 
51   estart = eowners[rank];
52   eend   = eowners[rank+1];
53   /* ierr = PetscPrintf(PETSC_COMM_SELF,"[%d] own lists row %d - %d\n",rank,estart,eend);CHKERRQ(ierr); */
54 
55   /* (2) distribute row block edgelist to all processors */
56   if (rank == 0) {
57     vtype = wash->vtype;
58     for (i=1; i<size; i++) {
59       /* proc[0] sends edgelist to proc[i] */
60       ierr = MPI_Send(edgelist+2*eowners[i],2*(eowners[i+1]-eowners[i]),MPIU_INT,i,tag,comm);CHKERRMPI(ierr);
61 
62       /* proc[0] sends vtype to proc[i] */
63       ierr = MPI_Send(vtype+2*eowners[i],2*(eowners[i+1]-eowners[i]),MPIU_INT,i,tag,comm);CHKERRMPI(ierr);
64     }
65   } else {
66     MPI_Status      status;
67     ierr = PetscMalloc1(2*(eend-estart),&vtype);CHKERRQ(ierr);
68     ierr = PetscMalloc1(2*(eend-estart),&edgelist);CHKERRQ(ierr);
69 
70     ierr = MPI_Recv(edgelist,2*(eend-estart),MPIU_INT,0,tag,comm,&status);CHKERRMPI(ierr);
71     ierr = MPI_Recv(vtype,2*(eend-estart),MPIU_INT,0,tag,comm,&status);CHKERRMPI(ierr);
72   }
73 
74   wash->edgelist = edgelist;
75 
76   /* (3) all processes get global and local number of vertices, without ghost vertices */
77   if (rank == 0) {
78     for (i=0; i<size; i++) {
79       for (e=eowners[i]; e<eowners[i+1]; e++) {
80         v = edgelist[2*e];
81         if (!vtxDone[v]) {
82           nvtx[i]++; vtxDone[v] = 1;
83         }
84         v = edgelist[2*e+1];
85         if (!vtxDone[v]) {
86           nvtx[i]++; vtxDone[v] = 1;
87         }
88       }
89     }
90   }
91   ierr = MPI_Bcast(&numVertices,1,MPIU_INT,0,PETSC_COMM_WORLD);CHKERRMPI(ierr);
92   ierr = MPI_Scatter(nvtx,1,MPIU_INT,&nvertices,1,MPIU_INT,0,PETSC_COMM_WORLD);CHKERRMPI(ierr);
93   ierr = PetscFree3(eowners,nvtx,vtxDone);CHKERRQ(ierr);
94 
95   wash->Nvertex = numVertices;
96   wash->nvertex = nvertices;
97   wash->vtype   = vtype;
98   PetscFunctionReturn(0);
99 }
100 
101 PetscErrorCode WASHIFunction(TS ts,PetscReal t,Vec X,Vec Xdot,Vec F,void* ctx)
102 {
103   PetscErrorCode ierr;
104   Wash           wash=(Wash)ctx;
105   DM             networkdm;
106   Vec            localX,localXdot,localF, localXold;
107   const PetscInt *cone;
108   PetscInt       vfrom,vto,offsetfrom,offsetto,varoffset;
109   PetscInt       v,vStart,vEnd,e,eStart,eEnd;
110   PetscInt       nend,type;
111   PetscBool      ghost;
112   PetscScalar    *farr,*juncf, *pipef;
113   PetscReal      dt;
114   Pipe           pipe;
115   PipeField      *pipex,*pipexdot,*juncx;
116   Junction       junction;
117   DMDALocalInfo  info;
118   const PetscScalar *xarr,*xdotarr, *xoldarr;
119 
120   PetscFunctionBegin;
121   localX    = wash->localX;
122   localXdot = wash->localXdot;
123 
124   ierr = TSGetSolution(ts,&localXold);CHKERRQ(ierr);
125   ierr = TSGetDM(ts,&networkdm);CHKERRQ(ierr);
126   ierr = TSGetTimeStep(ts,&dt);CHKERRQ(ierr);
127   ierr = DMGetLocalVector(networkdm,&localF);CHKERRQ(ierr);
128 
129   /* Set F and localF as zero */
130   ierr = VecSet(F,0.0);CHKERRQ(ierr);
131   ierr = VecSet(localF,0.0);CHKERRQ(ierr);
132 
133   /* Update ghost values of locaX and locaXdot */
134   ierr = DMGlobalToLocalBegin(networkdm,X,INSERT_VALUES,localX);CHKERRQ(ierr);
135   ierr = DMGlobalToLocalEnd(networkdm,X,INSERT_VALUES,localX);CHKERRQ(ierr);
136 
137   ierr = DMGlobalToLocalBegin(networkdm,Xdot,INSERT_VALUES,localXdot);CHKERRQ(ierr);
138   ierr = DMGlobalToLocalEnd(networkdm,Xdot,INSERT_VALUES,localXdot);CHKERRQ(ierr);
139 
140   ierr = VecGetArrayRead(localX,&xarr);CHKERRQ(ierr);
141   ierr = VecGetArrayRead(localXdot,&xdotarr);CHKERRQ(ierr);
142   ierr = VecGetArrayRead(localXold,&xoldarr);CHKERRQ(ierr);
143   ierr = VecGetArray(localF,&farr);CHKERRQ(ierr);
144 
145    /* junction->type == JUNCTION:
146            juncf[0] = -qJ + sum(qin); juncf[1] = qJ - sum(qout)
147        junction->type == RESERVOIR (upper stream):
148            juncf[0] = -hJ + H0; juncf[1] = qJ - sum(qout)
149        junction->type == VALVE (down stream):
150            juncf[0] =  -qJ + sum(qin); juncf[1] = qJ
151   */
152   /* Vertex/junction initialization */
153   ierr = DMNetworkGetVertexRange(networkdm,&vStart,&vEnd);CHKERRQ(ierr);
154   for (v=vStart; v<vEnd; v++) {
155     ierr = DMNetworkIsGhostVertex(networkdm,v,&ghost);CHKERRQ(ierr);
156     if (ghost) continue;
157 
158     ierr = DMNetworkGetComponent(networkdm,v,0,&type,(void**)&junction,NULL);CHKERRQ(ierr);
159     ierr = DMNetworkGetLocalVecOffset(networkdm,v,ALL_COMPONENTS,&varoffset);CHKERRQ(ierr);
160     juncx      = (PipeField*)(xarr + varoffset);
161     juncf      = (PetscScalar*)(farr + varoffset);
162 
163     juncf[0] = -juncx[0].q;
164     juncf[1] =  juncx[0].q;
165 
166     if (junction->type == RESERVOIR) { /* upstream reservoir */
167       juncf[0] = juncx[0].h - wash->H0;
168     }
169   }
170 
171   /* Edge/pipe */
172   ierr = DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd);CHKERRQ(ierr);
173   for (e=eStart; e<eEnd; e++) {
174     ierr = DMNetworkGetComponent(networkdm,e,0,&type,(void**)&pipe,NULL);CHKERRQ(ierr);
175     ierr = DMNetworkGetLocalVecOffset(networkdm,e,ALL_COMPONENTS,&varoffset);CHKERRQ(ierr);
176     pipex    = (PipeField*)(xarr + varoffset);
177     pipexdot = (PipeField*)(xdotarr + varoffset);
178     pipef    = (PetscScalar*)(farr + varoffset);
179 
180     /* Get some data into the pipe structure: note, some of these operations
181      * might be redundant. Will it consume too much time? */
182     pipe->dt   = dt;
183     pipe->xold = (PipeField*)(xoldarr + varoffset);
184 
185     /* Evaluate F over this edge/pipe: pipef[1], ...,pipef[2*nend] */
186     ierr = DMDAGetLocalInfo(pipe->da,&info);CHKERRQ(ierr);
187     ierr = PipeIFunctionLocal_Lax(&info,t,pipex,pipexdot,pipef,pipe);CHKERRQ(ierr);
188 
189     /* Get boundary values from connected vertices */
190     ierr = DMNetworkGetConnectedVertices(networkdm,e,&cone);CHKERRQ(ierr);
191     vfrom = cone[0]; /* local ordering */
192     vto   = cone[1];
193     ierr = DMNetworkGetLocalVecOffset(networkdm,vfrom,ALL_COMPONENTS,&offsetfrom);CHKERRQ(ierr);
194     ierr = DMNetworkGetLocalVecOffset(networkdm,vto,ALL_COMPONENTS,&offsetto);CHKERRQ(ierr);
195 
196     /* Evaluate upstream boundary */
197     ierr = DMNetworkGetComponent(networkdm,vfrom,0,&type,(void**)&junction,NULL);CHKERRQ(ierr);
198     if (junction->type != JUNCTION && junction->type != RESERVOIR) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"junction type is not supported");
199     juncx = (PipeField*)(xarr + offsetfrom);
200     juncf = (PetscScalar*)(farr + offsetfrom);
201 
202     pipef[0] = pipex[0].h - juncx[0].h;
203     juncf[1] -= pipex[0].q;
204 
205     /* Evaluate downstream boundary */
206     ierr = DMNetworkGetComponent(networkdm,vto,0,&type,(void**)&junction,NULL);CHKERRQ(ierr);
207     if (junction->type != JUNCTION && junction->type != VALVE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"junction type is not supported");
208     juncx = (PipeField*)(xarr + offsetto);
209     juncf = (PetscScalar*)(farr + offsetto);
210     nend  = pipe->nnodes - 1;
211 
212     pipef[2*nend + 1] = pipex[nend].h - juncx[0].h;
213     juncf[0] += pipex[nend].q;
214   }
215 
216   ierr = VecRestoreArrayRead(localX,&xarr);CHKERRQ(ierr);
217   ierr = VecRestoreArrayRead(localXdot,&xdotarr);CHKERRQ(ierr);
218   ierr = VecRestoreArray(localF,&farr);CHKERRQ(ierr);
219 
220   ierr = DMLocalToGlobalBegin(networkdm,localF,ADD_VALUES,F);CHKERRQ(ierr);
221   ierr = DMLocalToGlobalEnd(networkdm,localF,ADD_VALUES,F);CHKERRQ(ierr);
222   ierr = DMRestoreLocalVector(networkdm,&localF);CHKERRQ(ierr);
223   /*
224    ierr = PetscPrintf(PETSC_COMM_WORLD("F:\n");CHKERRQ(ierr);
225    ierr = VecView(F,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
226    */
227   PetscFunctionReturn(0);
228 }
229 
230 PetscErrorCode WASHSetInitialSolution(DM networkdm,Vec X,Wash wash)
231 {
232   PetscErrorCode ierr;
233   PetscInt       k,nx,vkey,vfrom,vto,offsetfrom,offsetto;
234   PetscInt       type,varoffset;
235   PetscInt       e,eStart,eEnd;
236   Vec            localX;
237   PetscScalar    *xarr;
238   Pipe           pipe;
239   Junction       junction;
240   const PetscInt *cone;
241   const PetscScalar *xarray;
242 
243   PetscFunctionBegin;
244   ierr = VecSet(X,0.0);CHKERRQ(ierr);
245   ierr = DMGetLocalVector(networkdm,&localX);CHKERRQ(ierr);
246   ierr = VecGetArray(localX,&xarr);CHKERRQ(ierr);
247 
248   /* Edge */
249   ierr = DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd);CHKERRQ(ierr);
250   for (e=eStart; e<eEnd; e++) {
251     ierr = DMNetworkGetLocalVecOffset(networkdm,e,ALL_COMPONENTS,&varoffset);CHKERRQ(ierr);
252     ierr = DMNetworkGetComponent(networkdm,e,0,&type,(void**)&pipe,NULL);CHKERRQ(ierr);
253 
254     /* set initial values for this pipe */
255     ierr = PipeComputeSteadyState(pipe,wash->Q0,wash->H0);CHKERRQ(ierr);
256     ierr = VecGetSize(pipe->x,&nx);CHKERRQ(ierr);
257 
258     ierr = VecGetArrayRead(pipe->x,&xarray);CHKERRQ(ierr);
259     /* copy pipe->x to xarray */
260     for (k=0; k<nx; k++) {
261       (xarr+varoffset)[k] = xarray[k];
262     }
263 
264     /* set boundary values into vfrom and vto */
265     ierr = DMNetworkGetConnectedVertices(networkdm,e,&cone);CHKERRQ(ierr);
266     vfrom = cone[0]; /* local ordering */
267     vto   = cone[1];
268     ierr = DMNetworkGetLocalVecOffset(networkdm,vfrom,ALL_COMPONENTS,&offsetfrom);CHKERRQ(ierr);
269     ierr = DMNetworkGetLocalVecOffset(networkdm,vto,ALL_COMPONENTS,&offsetto);CHKERRQ(ierr);
270 
271     /* if vform is a head vertex: */
272     ierr = DMNetworkGetComponent(networkdm,vfrom,0,&vkey,(void**)&junction,NULL);CHKERRQ(ierr);
273     if (junction->type == RESERVOIR) {
274       (xarr+offsetfrom)[1] = wash->H0; /* 1st H */
275     }
276 
277     /* if vto is an end vertex: */
278     ierr = DMNetworkGetComponent(networkdm,vto,0,&vkey,(void**)&junction,NULL);CHKERRQ(ierr);
279     if (junction->type == VALVE) {
280       (xarr+offsetto)[0] = wash->QL; /* last Q */
281     }
282     ierr = VecRestoreArrayRead(pipe->x,&xarray);CHKERRQ(ierr);
283   }
284 
285   ierr = VecRestoreArray(localX,&xarr);CHKERRQ(ierr);
286   ierr = DMLocalToGlobalBegin(networkdm,localX,ADD_VALUES,X);CHKERRQ(ierr);
287   ierr = DMLocalToGlobalEnd(networkdm,localX,ADD_VALUES,X);CHKERRQ(ierr);
288   ierr = DMRestoreLocalVector(networkdm,&localX);CHKERRQ(ierr);
289 
290 #if 0
291   PetscInt N;
292   ierr = VecGetSize(X,&N);CHKERRQ(ierr);
293   ierr = PetscPrintf(PETSC_COMM_WORLD,"initial solution %d:\n",N);CHKERRQ(ierr);
294   ierr = VecView(X,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
295 #endif
296   PetscFunctionReturn(0);
297 }
298 
299 PetscErrorCode TSDMNetworkMonitor(TS ts, PetscInt step, PetscReal t, Vec x, void *context)
300 {
301   PetscErrorCode     ierr;
302   DMNetworkMonitor   monitor;
303 
304   PetscFunctionBegin;
305   monitor = (DMNetworkMonitor)context;
306   ierr = DMNetworkMonitorView(monitor,x);CHKERRQ(ierr);
307   PetscFunctionReturn(0);
308 }
309 
310 PetscErrorCode PipesView(DM networkdm,PetscInt KeyPipe,Vec X)
311 {
312   PetscErrorCode ierr;
313   PetscInt       i,numkeys=1,*blocksize,*numselectedvariable,**selectedvariables,n;
314   IS             isfrom_q,isfrom_h,isfrom;
315   Vec            Xto;
316   VecScatter     ctx;
317   MPI_Comm       comm;
318 
319   PetscFunctionBegin;
320   ierr = PetscObjectGetComm((PetscObject)networkdm,&comm);CHKERRQ(ierr);
321 
322   /* 1. Create isfrom_q for q-variable of pipes */
323   ierr = PetscMalloc3(numkeys,&blocksize,numkeys,&numselectedvariable,numkeys,&selectedvariables);CHKERRQ(ierr);
324   for (i=0; i<numkeys; i++) {
325     blocksize[i]           = 2;
326     numselectedvariable[i] = 1;
327     ierr = PetscMalloc1(numselectedvariable[i],&selectedvariables[i]);CHKERRQ(ierr);
328     selectedvariables[i][0] = 0; /* q-variable */
329   }
330   ierr = DMNetworkCreateIS(networkdm,numkeys,&KeyPipe,blocksize,numselectedvariable,selectedvariables,&isfrom_q);CHKERRQ(ierr);
331 
332   /* 2. Create Xto and isto */
333   ierr = ISGetLocalSize(isfrom_q, &n);CHKERRQ(ierr);
334   ierr = VecCreate(comm,&Xto);CHKERRQ(ierr);
335   ierr = VecSetSizes(Xto,n,PETSC_DECIDE);CHKERRQ(ierr);
336   ierr = VecSetFromOptions(Xto);CHKERRQ(ierr);
337   ierr = VecSet(Xto,0.0);CHKERRQ(ierr);
338 
339   /* 3. Create scatter */
340   ierr = VecScatterCreate(X,isfrom_q,Xto,NULL,&ctx);CHKERRQ(ierr);
341 
342   /* 4. Scatter to Xq */
343   ierr = VecScatterBegin(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
344   ierr = VecScatterEnd(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
345   ierr = VecScatterDestroy(&ctx);CHKERRQ(ierr);
346   ierr = ISDestroy(&isfrom_q);CHKERRQ(ierr);
347 
348   ierr = PetscPrintf(PETSC_COMM_WORLD,"Xq:\n");CHKERRQ(ierr);
349   ierr = VecView(Xto,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
350 
351   /* 5. Create isfrom_h for h-variable of pipes; Create scatter; Scatter to Xh */
352   for (i=0; i<numkeys; i++) {
353     selectedvariables[i][0] = 1; /* h-variable */
354   }
355   ierr = DMNetworkCreateIS(networkdm,numkeys,&KeyPipe,blocksize,numselectedvariable,selectedvariables,&isfrom_h);CHKERRQ(ierr);
356 
357   ierr = VecScatterCreate(X,isfrom_h,Xto,NULL,&ctx);CHKERRQ(ierr);
358   ierr = VecScatterBegin(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
359   ierr = VecScatterEnd(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
360   ierr = VecScatterDestroy(&ctx);CHKERRQ(ierr);
361   ierr = ISDestroy(&isfrom_h);CHKERRQ(ierr);
362 
363   ierr = PetscPrintf(PETSC_COMM_WORLD,"Xh:\n");CHKERRQ(ierr);
364   ierr = VecView(Xto,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
365   ierr = VecDestroy(&Xto);CHKERRQ(ierr);
366 
367   /* 6. Create isfrom for all pipe variables; Create scatter; Scatter to Xpipes */
368   for (i=0; i<numkeys; i++) {
369     blocksize[i] = -1; /* select all the variables of the i-th component */
370   }
371   ierr = DMNetworkCreateIS(networkdm,numkeys,&KeyPipe,blocksize,NULL,NULL,&isfrom);CHKERRQ(ierr);
372   ierr = ISDestroy(&isfrom);CHKERRQ(ierr);
373   ierr = DMNetworkCreateIS(networkdm,numkeys,&KeyPipe,NULL,NULL,NULL,&isfrom);CHKERRQ(ierr);
374 
375   ierr = ISGetLocalSize(isfrom, &n);CHKERRQ(ierr);
376   ierr = VecCreate(comm,&Xto);CHKERRQ(ierr);
377   ierr = VecSetSizes(Xto,n,PETSC_DECIDE);CHKERRQ(ierr);
378   ierr = VecSetFromOptions(Xto);CHKERRQ(ierr);
379   ierr = VecSet(Xto,0.0);CHKERRQ(ierr);
380 
381   ierr = VecScatterCreate(X,isfrom,Xto,NULL,&ctx);CHKERRQ(ierr);
382   ierr = VecScatterBegin(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
383   ierr = VecScatterEnd(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
384   ierr = VecScatterDestroy(&ctx);CHKERRQ(ierr);
385   ierr = ISDestroy(&isfrom);CHKERRQ(ierr);
386 
387   ierr = PetscPrintf(PETSC_COMM_WORLD,"Xpipes:\n");CHKERRQ(ierr);
388   ierr = VecView(Xto,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
389 
390   /* 7. Free spaces */
391   for (i=0; i<numkeys; i++) {
392     ierr = PetscFree(selectedvariables[i]);CHKERRQ(ierr);
393   }
394   ierr = PetscFree3(blocksize,numselectedvariable,selectedvariables);CHKERRQ(ierr);
395   ierr = VecDestroy(&Xto);CHKERRQ(ierr);
396   PetscFunctionReturn(0);
397 }
398 
399 PetscErrorCode ISJunctionsView(DM networkdm,PetscInt KeyJunc)
400 {
401   PetscErrorCode ierr;
402   PetscInt       numkeys=1;
403   IS             isfrom;
404   MPI_Comm       comm;
405   PetscMPIInt    rank;
406 
407   PetscFunctionBegin;
408   ierr = PetscObjectGetComm((PetscObject)networkdm,&comm);CHKERRQ(ierr);
409   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
410 
411   /* Create a global isfrom for all junction variables */
412   ierr = DMNetworkCreateIS(networkdm,numkeys,&KeyJunc,NULL,NULL,NULL,&isfrom);CHKERRQ(ierr);
413   ierr = PetscPrintf(PETSC_COMM_WORLD,"ISJunctions:\n");CHKERRQ(ierr);
414   ierr = ISView(isfrom,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
415   ierr = ISDestroy(&isfrom);CHKERRQ(ierr);
416 
417   /* Create a local isfrom for all junction variables */
418   ierr = DMNetworkCreateLocalIS(networkdm,numkeys,&KeyJunc,NULL,NULL,NULL,&isfrom);CHKERRQ(ierr);
419   if (!rank) {
420     ierr = PetscPrintf(PETSC_COMM_SELF,"[%d] ISLocalJunctions:\n",rank);CHKERRQ(ierr);
421     ierr = ISView(isfrom,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
422   }
423   ierr = ISDestroy(&isfrom);CHKERRQ(ierr);
424   PetscFunctionReturn(0);
425 }
426 
427 PetscErrorCode WashNetworkCleanUp(Wash wash)
428 {
429   PetscErrorCode ierr;
430   PetscMPIInt    rank;
431 
432   PetscFunctionBegin;
433   ierr = MPI_Comm_rank(wash->comm,&rank);CHKERRMPI(ierr);
434   ierr = PetscFree(wash->edgelist);CHKERRQ(ierr);
435   ierr = PetscFree(wash->vtype);CHKERRQ(ierr);
436   if (rank == 0) {
437     ierr = PetscFree2(wash->junction,wash->pipe);CHKERRQ(ierr);
438   }
439   PetscFunctionReturn(0);
440 }
441 
442 PetscErrorCode WashNetworkCreate(MPI_Comm comm,PetscInt pipesCase,Wash *wash_ptr)
443 {
444   PetscErrorCode ierr;
445   PetscInt       npipes;
446   PetscMPIInt    rank;
447   Wash           wash=NULL;
448   PetscInt       i,numVertices,numEdges,*vtype;
449   PetscInt       *edgelist;
450   Junction       junctions=NULL;
451   Pipe           pipes=NULL;
452   PetscBool      washdist=PETSC_TRUE;
453 
454   PetscFunctionBegin;
455   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
456 
457   ierr = PetscCalloc1(1,&wash);CHKERRQ(ierr);
458   wash->comm = comm;
459   *wash_ptr  = wash;
460   wash->Q0   = 0.477432; /* RESERVOIR */
461   wash->H0   = 150.0;
462   wash->HL   = 143.488;  /* VALVE */
463   wash->QL   = 0.0;
464   wash->nnodes_loc = 0;
465 
466   numVertices = 0;
467   numEdges    = 0;
468   edgelist    = NULL;
469 
470   /* proc[0] creates a sequential wash and edgelist */
471   ierr = PetscPrintf(PETSC_COMM_WORLD,"Setup pipesCase %D\n",pipesCase);CHKERRQ(ierr);
472 
473   /* Set global number of pipes, edges, and junctions */
474   /*-------------------------------------------------*/
475   switch (pipesCase) {
476   case 0:
477     /* pipeCase 0: */
478     /* =================================================
479     (RESERVOIR) v0 --E0--> v1--E1--> v2 --E2-->v3 (VALVE)
480     ====================================================  */
481     npipes = 3;
482     ierr = PetscOptionsGetInt(NULL,NULL, "-npipes", &npipes, NULL);CHKERRQ(ierr);
483     wash->nedge   = npipes;
484     wash->nvertex = npipes + 1;
485 
486     /* Set local edges and vertices -- proc[0] sets entire network, then distributes */
487     numVertices = 0;
488     numEdges    = 0;
489     edgelist    = NULL;
490     if (rank == 0) {
491       numVertices = wash->nvertex;
492       numEdges    = wash->nedge;
493 
494       ierr = PetscCalloc1(2*numEdges,&edgelist);CHKERRQ(ierr);
495       for (i=0; i<numEdges; i++) {
496         edgelist[2*i] = i; edgelist[2*i+1] = i+1;
497       }
498 
499       /* Add network components */
500       /*------------------------*/
501       ierr = PetscCalloc2(numVertices,&junctions,numEdges,&pipes);CHKERRQ(ierr);
502 
503       /* vertex */
504       for (i=0; i<numVertices; i++) {
505         junctions[i].id = i;
506         junctions[i].type = JUNCTION;
507       }
508 
509       junctions[0].type             = RESERVOIR;
510       junctions[numVertices-1].type = VALVE;
511     }
512     break;
513   case 1:
514     /* pipeCase 1: */
515     /* ==========================
516                 v2 (VALVE)
517                 ^
518                 |
519                E2
520                 |
521     v0 --E0--> v3--E1--> v1
522   (RESERVOIR)            (RESERVOIR)
523     =============================  */
524     npipes = 3;
525     wash->nedge   = npipes;
526     wash->nvertex = npipes + 1;
527 
528     /* Set local edges and vertices -- proc[0] sets entire network, then distributes */
529     if (rank == 0) {
530       numVertices = wash->nvertex;
531       numEdges    = wash->nedge;
532 
533       ierr = PetscCalloc1(2*numEdges,&edgelist);CHKERRQ(ierr);
534       edgelist[0] = 0; edgelist[1] = 3;  /* edge[0] */
535       edgelist[2] = 3; edgelist[3] = 1;  /* edge[1] */
536       edgelist[4] = 3; edgelist[5] = 2;  /* edge[2] */
537 
538       /* Add network components */
539       /*------------------------*/
540       ierr = PetscCalloc2(numVertices,&junctions,numEdges,&pipes);CHKERRQ(ierr);
541       /* vertex */
542       for (i=0; i<numVertices; i++) {
543         junctions[i].id   = i;
544         junctions[i].type = JUNCTION;
545       }
546 
547       junctions[0].type = RESERVOIR;
548       junctions[1].type = VALVE;
549       junctions[2].type = VALVE;
550     }
551     break;
552   case 2:
553     /* pipeCase 2: */
554     /* ==========================
555     (RESERVOIR)  v2--> E2
556                        |
557             v0 --E0--> v3--E1--> v1
558     (RESERVOIR)               (VALVE)
559     =============================  */
560 
561     /* Set application parameters -- to be used in function evalutions */
562     npipes = 3;
563     wash->nedge   = npipes;
564     wash->nvertex = npipes + 1;
565 
566     /* Set local edges and vertices -- proc[0] sets entire network, then distributes */
567     if (rank == 0) {
568       numVertices = wash->nvertex;
569       numEdges    = wash->nedge;
570 
571       ierr = PetscCalloc1(2*numEdges,&edgelist);CHKERRQ(ierr);
572       edgelist[0] = 0; edgelist[1] = 3;  /* edge[0] */
573       edgelist[2] = 3; edgelist[3] = 1;  /* edge[1] */
574       edgelist[4] = 2; edgelist[5] = 3;  /* edge[2] */
575 
576       /* Add network components */
577       /*------------------------*/
578       ierr = PetscCalloc2(numVertices,&junctions,numEdges,&pipes);CHKERRQ(ierr);
579       /* vertex */
580       for (i=0; i<numVertices; i++) {
581         junctions[i].id = i;
582         junctions[i].type = JUNCTION;
583       }
584 
585       junctions[0].type = RESERVOIR;
586       junctions[1].type = VALVE;
587       junctions[2].type = RESERVOIR;
588     }
589     break;
590   default:
591     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"not done yet");
592   }
593 
594   /* set edge global id */
595   for (i=0; i<numEdges; i++) pipes[i].id = i;
596 
597   if (rank == 0) { /* set vtype for proc[0] */
598     PetscInt v;
599     ierr = PetscMalloc1(2*numEdges,&vtype);CHKERRQ(ierr);
600     for (i=0; i<2*numEdges; i++) {
601       v        = edgelist[i];
602       vtype[i] = junctions[v].type;
603     }
604     wash->vtype = vtype;
605   }
606 
607   *wash_ptr      = wash;
608   wash->nedge    = numEdges;
609   wash->nvertex  = numVertices;
610   wash->edgelist = edgelist;
611   wash->junction = junctions;
612   wash->pipe     = pipes;
613 
614   /* Distribute edgelist to other processors */
615   ierr = PetscOptionsGetBool(NULL,NULL,"-wash_distribute",&washdist,NULL);CHKERRQ(ierr);
616   if (washdist) {
617     /*
618      ierr = PetscPrintf(PETSC_COMM_WORLD," Distribute sequential wash ...\n");CHKERRQ(ierr);
619      */
620     ierr = WashNetworkDistribute(comm,wash);CHKERRQ(ierr);
621   }
622   PetscFunctionReturn(0);
623 }
624 
625 /* ------------------------------------------------------- */
626 int main(int argc,char ** argv)
627 {
628   PetscErrorCode    ierr;
629   Wash              wash;
630   Junction          junctions,junction;
631   Pipe              pipe,pipes;
632   PetscInt          KeyPipe,KeyJunction,*edgelist = NULL,*vtype = NULL;
633   PetscInt          i,e,v,eStart,eEnd,vStart,vEnd,key,vkey,type;
634   PetscInt          steps=1,nedges,nnodes=6;
635   const PetscInt    *cone;
636   DM                networkdm;
637   PetscMPIInt       size,rank;
638   PetscReal         ftime;
639   Vec               X;
640   TS                ts;
641   TSConvergedReason reason;
642   PetscBool         viewpipes,viewjuncs,monipipes=PETSC_FALSE,userJac=PETSC_TRUE,viewdm=PETSC_FALSE,viewX=PETSC_FALSE;
643   PetscInt          pipesCase=0;
644   DMNetworkMonitor  monitor;
645   MPI_Comm          comm;
646 
647   ierr = PetscInitialize(&argc,&argv,"pOption",help);if (ierr) return ierr;
648 
649   /* Read runtime options */
650   ierr = PetscOptionsGetInt(NULL,NULL, "-case", &pipesCase, NULL);CHKERRQ(ierr);
651   ierr = PetscOptionsGetBool(NULL,NULL,"-user_Jac",&userJac,NULL);CHKERRQ(ierr);
652   ierr = PetscOptionsGetBool(NULL,NULL,"-pipe_monitor",&monipipes,NULL);CHKERRQ(ierr);
653   ierr = PetscOptionsGetBool(NULL,NULL,"-viewdm",&viewdm,NULL);CHKERRQ(ierr);
654   ierr = PetscOptionsGetBool(NULL,NULL,"-viewX",&viewX,NULL);CHKERRQ(ierr);
655   ierr = PetscOptionsGetInt(NULL,NULL, "-npipenodes", &nnodes, NULL);CHKERRQ(ierr);
656 
657   /* Create networkdm */
658   /*------------------*/
659   ierr = DMNetworkCreate(PETSC_COMM_WORLD,&networkdm);CHKERRQ(ierr);
660   ierr = PetscObjectGetComm((PetscObject)networkdm,&comm);CHKERRQ(ierr);
661   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
662   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
663 
664   if (size == 1 && monipipes) {
665     ierr = DMNetworkMonitorCreate(networkdm,&monitor);CHKERRQ(ierr);
666   }
667 
668   /* Register the components in the network */
669   ierr = DMNetworkRegisterComponent(networkdm,"junctionstruct",sizeof(struct _p_Junction),&KeyJunction);CHKERRQ(ierr);
670   ierr = DMNetworkRegisterComponent(networkdm,"pipestruct",sizeof(struct _p_Pipe),&KeyPipe);CHKERRQ(ierr);
671 
672   /* Create a distributed wash network (user-specific) */
673   ierr = WashNetworkCreate(comm,pipesCase,&wash);CHKERRQ(ierr);
674   nedges      = wash->nedge;
675   edgelist    = wash->edgelist;
676   vtype       = wash->vtype;
677   junctions   = wash->junction;
678   pipes       = wash->pipe;
679 
680   /* Set up the network layout */
681   ierr = DMNetworkSetNumSubNetworks(networkdm,PETSC_DECIDE,1);CHKERRQ(ierr);
682   ierr = DMNetworkAddSubnetwork(networkdm,NULL,nedges,edgelist,NULL);CHKERRQ(ierr);
683 
684   ierr = DMNetworkLayoutSetUp(networkdm);CHKERRQ(ierr);
685 
686   ierr = DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd);CHKERRQ(ierr);
687   ierr = DMNetworkGetVertexRange(networkdm,&vStart,&vEnd);CHKERRQ(ierr);
688   /* ierr = PetscPrintf(PETSC_COMM_SELF,"[%d] eStart/End: %d - %d; vStart/End: %d - %d\n",rank,eStart,eEnd,vStart,vEnd);CHKERRQ(ierr); */
689 
690   if (rank) { /* junctions[] and pipes[] for proc[0] are allocated in WashNetworkCreate() */
691     /* vEnd - vStart = nvertices + number of ghost vertices! */
692     ierr = PetscCalloc2(vEnd - vStart,&junctions,nedges,&pipes);CHKERRQ(ierr);
693   }
694 
695   /* Add Pipe component and number of variables to all local edges */
696   for (e = eStart; e < eEnd; e++) {
697     pipes[e-eStart].nnodes = nnodes;
698     ierr = DMNetworkAddComponent(networkdm,e,KeyPipe,&pipes[e-eStart],2*pipes[e-eStart].nnodes);CHKERRQ(ierr);
699 
700     if (size == 1 && monipipes) { /* Add monitor -- show Q_{pipes[e-eStart].id}? */
701       pipes[e-eStart].length = 600.0;
702       ierr = DMNetworkMonitorAdd(monitor, "Pipe Q", e, pipes[e-eStart].nnodes, 0, 2, 0.0,pipes[e-eStart].length, -0.8, 0.8, PETSC_TRUE);CHKERRQ(ierr);
703       ierr = DMNetworkMonitorAdd(monitor, "Pipe H", e, pipes[e-eStart].nnodes, 1, 2, 0.0,pipes[e-eStart].length, -400.0, 800.0, PETSC_TRUE);CHKERRQ(ierr);
704     }
705   }
706 
707   /* Add Junction component and number of variables to all local vertices, including ghost vertices! (current implementation requires setting the same number of variables at ghost points */
708   for (v = vStart; v < vEnd; v++) {
709     ierr = DMNetworkAddComponent(networkdm,v,KeyJunction,&junctions[v-vStart],2);CHKERRQ(ierr);
710   }
711 
712   if (size > 1) {  /* must be called before DMSetUp()???. Other partitioners do not work yet??? -- cause crash in proc[0]! */
713     DM               plexdm;
714     PetscPartitioner part;
715     ierr = DMNetworkGetPlex(networkdm,&plexdm);CHKERRQ(ierr);
716     ierr = DMPlexGetPartitioner(plexdm, &part);CHKERRQ(ierr);
717     ierr = PetscPartitionerSetType(part,PETSCPARTITIONERSIMPLE);CHKERRQ(ierr);
718     ierr = PetscOptionsSetValue(NULL,"-dm_plex_csr_via_mat","true");CHKERRQ(ierr); /* for parmetis */
719   }
720 
721   /* Set up DM for use */
722   ierr = DMSetUp(networkdm);CHKERRQ(ierr);
723   if (viewdm) {
724     ierr = PetscPrintf(PETSC_COMM_WORLD,"\nOriginal networkdm, DMView:\n");CHKERRQ(ierr);
725     ierr = DMView(networkdm,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
726   }
727 
728   /* Set user physical parameters to the components */
729   for (e = eStart; e < eEnd; e++) {
730     ierr = DMNetworkGetConnectedVertices(networkdm,e,&cone);CHKERRQ(ierr);
731     /* vfrom */
732     ierr = DMNetworkGetComponent(networkdm,cone[0],0,&vkey,(void**)&junction,NULL);CHKERRQ(ierr);
733     junction->type = (VertexType)vtype[2*e];
734 
735     /* vto */
736     ierr = DMNetworkGetComponent(networkdm,cone[1],0,&vkey,(void**)&junction,NULL);CHKERRQ(ierr);
737     junction->type = (VertexType)vtype[2*e+1];
738   }
739 
740   ierr = WashNetworkCleanUp(wash);CHKERRQ(ierr);
741 
742   /* Network partitioning and distribution of data */
743   ierr = DMNetworkDistribute(&networkdm,0);CHKERRQ(ierr);
744   if (viewdm) {
745     PetscPrintf(PETSC_COMM_WORLD,"\nAfter DMNetworkDistribute, DMView:\n");CHKERRQ(ierr);
746     ierr = DMView(networkdm,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
747   }
748 
749   /* create vectors */
750   ierr = DMCreateGlobalVector(networkdm,&X);CHKERRQ(ierr);
751   ierr = DMCreateLocalVector(networkdm,&wash->localX);CHKERRQ(ierr);
752   ierr = DMCreateLocalVector(networkdm,&wash->localXdot);CHKERRQ(ierr);
753 
754   /* PipeSetUp -- each process only sets its own pipes */
755   /*---------------------------------------------------*/
756   ierr = DMNetworkGetVertexRange(networkdm,&vStart,&vEnd);CHKERRQ(ierr);
757 
758   userJac = PETSC_TRUE;
759   ierr = DMNetworkHasJacobian(networkdm,userJac,userJac);CHKERRQ(ierr);
760   ierr = DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd);CHKERRQ(ierr);
761   for (e=eStart; e<eEnd; e++) { /* each edge has only one component, pipe */
762     ierr = DMNetworkGetComponent(networkdm,e,0,&type,(void**)&pipe,NULL);CHKERRQ(ierr);
763 
764     wash->nnodes_loc += pipe->nnodes; /* local total number of nodes, will be used by PipesView() */
765     ierr = PipeSetParameters(pipe,
766                              600.0,          /* length */
767                              0.5,            /* diameter */
768                              1200.0,         /* a */
769                              0.018);CHKERRQ(ierr);    /* friction */
770     ierr = PipeSetUp(pipe);CHKERRQ(ierr);
771 
772     if (userJac) {
773       /* Create Jacobian matrix structures for a Pipe */
774       Mat            *J;
775       ierr = PipeCreateJacobian(pipe,NULL,&J);CHKERRQ(ierr);
776       ierr = DMNetworkEdgeSetMatrix(networkdm,e,J);CHKERRQ(ierr);
777     }
778   }
779 
780   if (userJac) {
781     ierr = DMNetworkGetVertexRange(networkdm,&vStart,&vEnd);CHKERRQ(ierr);
782     for (v=vStart; v<vEnd; v++) {
783       Mat            *J;
784       ierr = JunctionCreateJacobian(networkdm,v,NULL,&J);CHKERRQ(ierr);
785       ierr = DMNetworkVertexSetMatrix(networkdm,v,J);CHKERRQ(ierr);
786 
787       ierr = DMNetworkGetComponent(networkdm,v,0,&vkey,(void**)&junction,NULL);CHKERRQ(ierr);
788       junction->jacobian = J;
789     }
790   }
791 
792   /* Setup solver                                           */
793   /*--------------------------------------------------------*/
794   ierr = TSCreate(PETSC_COMM_WORLD,&ts);CHKERRQ(ierr);
795 
796   ierr = TSSetDM(ts,(DM)networkdm);CHKERRQ(ierr);
797   ierr = TSSetIFunction(ts,NULL,WASHIFunction,wash);CHKERRQ(ierr);
798 
799   ierr = TSSetMaxSteps(ts,steps);CHKERRQ(ierr);
800   ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);CHKERRQ(ierr);
801   ierr = TSSetTimeStep(ts,0.1);CHKERRQ(ierr);
802   ierr = TSSetType(ts,TSBEULER);CHKERRQ(ierr);
803   if (size == 1 && monipipes) {
804     ierr = TSMonitorSet(ts, TSDMNetworkMonitor, monitor, NULL);CHKERRQ(ierr);
805   }
806   ierr = TSSetFromOptions(ts);CHKERRQ(ierr);
807 
808   ierr = WASHSetInitialSolution(networkdm,X,wash);CHKERRQ(ierr);
809 
810   ierr = TSSolve(ts,X);CHKERRQ(ierr);
811 
812   ierr = TSGetSolveTime(ts,&ftime);CHKERRQ(ierr);
813   ierr = TSGetStepNumber(ts,&steps);CHKERRQ(ierr);
814   ierr = TSGetConvergedReason(ts,&reason);CHKERRQ(ierr);
815   ierr = PetscPrintf(PETSC_COMM_WORLD,"%s at time %g after %D steps\n",TSConvergedReasons[reason],(double)ftime,steps);CHKERRQ(ierr);
816   if (viewX) {
817     ierr = VecView(X,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
818   }
819 
820   viewpipes = PETSC_FALSE;
821   ierr = PetscOptionsGetBool(NULL,NULL, "-Jac_view", &viewpipes,NULL);CHKERRQ(ierr);
822   if (viewpipes) {
823     SNES snes;
824     Mat  Jac;
825     ierr = TSGetSNES(ts,&snes);CHKERRQ(ierr);
826     ierr = SNESGetJacobian(snes,&Jac,NULL,NULL,NULL);CHKERRQ(ierr);
827     ierr = MatView(Jac,PETSC_VIEWER_DRAW_WORLD);CHKERRQ(ierr);
828   }
829 
830   /* View solutions */
831   /* -------------- */
832   viewpipes = PETSC_FALSE;
833   ierr = PetscOptionsGetBool(NULL,NULL, "-pipe_view", &viewpipes,NULL);CHKERRQ(ierr);
834   if (viewpipes) {
835     ierr = PipesView(networkdm,KeyPipe,X);CHKERRQ(ierr);
836   }
837 
838   /* Test IS */
839   viewjuncs = PETSC_FALSE;
840   ierr = PetscOptionsGetBool(NULL,NULL, "-isjunc_view", &viewjuncs,NULL);CHKERRQ(ierr);
841   if (viewjuncs) {
842     ierr = ISJunctionsView(networkdm,KeyJunction);CHKERRQ(ierr);
843   }
844 
845   /* Free spaces */
846   /* ----------- */
847   ierr = TSDestroy(&ts);CHKERRQ(ierr);
848   ierr = VecDestroy(&X);CHKERRQ(ierr);
849   ierr = VecDestroy(&wash->localX);CHKERRQ(ierr);
850   ierr = VecDestroy(&wash->localXdot);CHKERRQ(ierr);
851 
852   /* Destroy objects from each pipe that are created in PipeSetUp() */
853   ierr = DMNetworkGetEdgeRange(networkdm,&eStart, &eEnd);CHKERRQ(ierr);
854   for (i = eStart; i < eEnd; i++) {
855     ierr = DMNetworkGetComponent(networkdm,i,0,&key,(void**)&pipe,NULL);CHKERRQ(ierr);
856     ierr = PipeDestroy(&pipe);CHKERRQ(ierr);
857   }
858   if (userJac) {
859     for (v=vStart; v<vEnd; v++) {
860       ierr = DMNetworkGetComponent(networkdm,v,0,&vkey,(void**)&junction,NULL);CHKERRQ(ierr);
861       ierr = JunctionDestroyJacobian(networkdm,v,junction);CHKERRQ(ierr);
862     }
863   }
864 
865   if (size == 1 && monipipes) {
866     ierr = DMNetworkMonitorDestroy(&monitor);CHKERRQ(ierr);
867   }
868   ierr = DMDestroy(&networkdm);CHKERRQ(ierr);
869   ierr = PetscFree(wash);CHKERRQ(ierr);
870 
871   if (rank) {
872     ierr = PetscFree2(junctions,pipes);CHKERRQ(ierr);
873   }
874   ierr = PetscFinalize();
875   return ierr;
876 }
877 
878 /*TEST
879 
880    build:
881      depends: pipeInterface.c pipeImpls.c
882      requires: mumps
883 
884    test:
885       args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -options_left no -viewX
886       localrunfiles: pOption
887       output_file: output/pipes_1.out
888 
889    test:
890       suffix: 2
891       nsize: 2
892       args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX
893       localrunfiles: pOption
894       output_file: output/pipes_2.out
895 
896    test:
897       suffix: 3
898       nsize: 2
899       args: -ts_monitor -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX
900       localrunfiles: pOption
901       output_file: output/pipes_3.out
902 
903    test:
904       suffix: 4
905       args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -options_left no -viewX
906       localrunfiles: pOption
907       output_file: output/pipes_4.out
908 
909    test:
910       suffix: 5
911       nsize: 3
912       args: -ts_monitor -case 2 -ts_max_steps 10 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX
913       localrunfiles: pOption
914       output_file: output/pipes_5.out
915 
916    test:
917       suffix: 6
918       nsize: 2
919       args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -viewX
920       localrunfiles: pOption
921       output_file: output/pipes_6.out
922 
923    test:
924       suffix: 7
925       nsize: 2
926       args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -viewX
927       localrunfiles: pOption
928       output_file: output/pipes_7.out
929 
930    test:
931       suffix: 8
932       nsize: 2
933       requires: parmetis
934       args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type parmetis -options_left no -wash_distribute 1
935       localrunfiles: pOption
936       output_file: output/pipes_8.out
937 
938    test:
939       suffix: 9
940       nsize: 2
941       args: -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -pipe_view
942       localrunfiles: pOption
943       output_file: output/pipes_9.out
944 
945    test:
946       suffix: 10
947       nsize: 2
948       args: -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -isjunc_view
949       localrunfiles: pOption
950       output_file: output/pipes_10.out
951 
952 TEST*/
953