xref: /petsc/src/ts/tutorials/network/pipes.c (revision f97672e55eacc8688507b9471cd7ec2664d7f203)
1 static char help[] = "This example demonstrates DMNetwork. It is used for testing parallel generation of dmnetwork, then redistribute. \n\\n";
2 /*
3   Example: mpiexec -n <np> ./pipes -ts_max_steps 10
4 */
5 
6 #include "wash.h"
7 
8 /*
9   WashNetworkDistribute - proc[0] distributes sequential wash object
10    Input Parameters:
11 .  comm - MPI communicator
12 .  wash - wash context with all network data in proc[0]
13 
14    Output Parameter:
15 .  wash - wash context with nedge, nvertex and edgelist distributed
16 
17    Note: The routine is used for testing parallel generation of dmnetwork, then redistribute.
18 */
19 PetscErrorCode WashNetworkDistribute(MPI_Comm comm,Wash wash)
20 {
21   PetscMPIInt    rank,size,tag=0;
22   PetscInt       i,e,v,numEdges,numVertices,nedges,*eowners=NULL,estart,eend,*vtype=NULL,nvertices;
23   PetscInt       *edgelist = wash->edgelist,*nvtx=NULL,*vtxDone=NULL;
24 
25   PetscFunctionBegin;
26   PetscCallMPI(MPI_Comm_size(comm,&size));
27   if (size == 1) PetscFunctionReturn(0);
28 
29   PetscCallMPI(MPI_Comm_rank(comm,&rank));
30   numEdges    = wash->nedge;
31   numVertices = wash->nvertex;
32 
33   /* (1) all processes get global and local number of edges */
34   PetscCallMPI(MPI_Bcast(&numEdges,1,MPIU_INT,0,comm));
35   nedges = numEdges/size; /* local nedges */
36   if (rank == 0) {
37     nedges += numEdges - size*(numEdges/size);
38   }
39   wash->Nedge = numEdges;
40   wash->nedge = nedges;
41   /* PetscCall(PetscPrintf(PETSC_COMM_SELF,"[%d] nedges %d, numEdges %d\n",rank,nedges,numEdges)); */
42 
43   PetscCall(PetscCalloc3(size+1,&eowners,size,&nvtx,numVertices,&vtxDone));
44   PetscCallMPI(MPI_Allgather(&nedges,1,MPIU_INT,eowners+1,1,MPIU_INT,PETSC_COMM_WORLD));
45   eowners[0] = 0;
46   for (i=2; i<=size; i++) {
47     eowners[i] += eowners[i-1];
48   }
49 
50   estart = eowners[rank];
51   eend   = eowners[rank+1];
52   /* PetscCall(PetscPrintf(PETSC_COMM_SELF,"[%d] own lists row %d - %d\n",rank,estart,eend)); */
53 
54   /* (2) distribute row block edgelist to all processors */
55   if (rank == 0) {
56     vtype = wash->vtype;
57     for (i=1; i<size; i++) {
58       /* proc[0] sends edgelist to proc[i] */
59       PetscCallMPI(MPI_Send(edgelist+2*eowners[i],2*(eowners[i+1]-eowners[i]),MPIU_INT,i,tag,comm));
60 
61       /* proc[0] sends vtype to proc[i] */
62       PetscCallMPI(MPI_Send(vtype+2*eowners[i],2*(eowners[i+1]-eowners[i]),MPIU_INT,i,tag,comm));
63     }
64   } else {
65     MPI_Status      status;
66     PetscCall(PetscMalloc1(2*(eend-estart),&vtype));
67     PetscCall(PetscMalloc1(2*(eend-estart),&edgelist));
68 
69     PetscCallMPI(MPI_Recv(edgelist,2*(eend-estart),MPIU_INT,0,tag,comm,&status));
70     PetscCallMPI(MPI_Recv(vtype,2*(eend-estart),MPIU_INT,0,tag,comm,&status));
71   }
72 
73   wash->edgelist = edgelist;
74 
75   /* (3) all processes get global and local number of vertices, without ghost vertices */
76   if (rank == 0) {
77     for (i=0; i<size; i++) {
78       for (e=eowners[i]; e<eowners[i+1]; e++) {
79         v = edgelist[2*e];
80         if (!vtxDone[v]) {
81           nvtx[i]++; vtxDone[v] = 1;
82         }
83         v = edgelist[2*e+1];
84         if (!vtxDone[v]) {
85           nvtx[i]++; vtxDone[v] = 1;
86         }
87       }
88     }
89   }
90   PetscCallMPI(MPI_Bcast(&numVertices,1,MPIU_INT,0,PETSC_COMM_WORLD));
91   PetscCallMPI(MPI_Scatter(nvtx,1,MPIU_INT,&nvertices,1,MPIU_INT,0,PETSC_COMM_WORLD));
92   PetscCall(PetscFree3(eowners,nvtx,vtxDone));
93 
94   wash->Nvertex = numVertices;
95   wash->nvertex = nvertices;
96   wash->vtype   = vtype;
97   PetscFunctionReturn(0);
98 }
99 
100 PetscErrorCode WASHIFunction(TS ts,PetscReal t,Vec X,Vec Xdot,Vec F,void* ctx)
101 {
102   Wash           wash=(Wash)ctx;
103   DM             networkdm;
104   Vec            localX,localXdot,localF, localXold;
105   const PetscInt *cone;
106   PetscInt       vfrom,vto,offsetfrom,offsetto,varoffset;
107   PetscInt       v,vStart,vEnd,e,eStart,eEnd;
108   PetscInt       nend,type;
109   PetscBool      ghost;
110   PetscScalar    *farr,*juncf, *pipef;
111   PetscReal      dt;
112   Pipe           pipe;
113   PipeField      *pipex,*pipexdot,*juncx;
114   Junction       junction;
115   DMDALocalInfo  info;
116   const PetscScalar *xarr,*xdotarr, *xoldarr;
117 
118   PetscFunctionBegin;
119   localX    = wash->localX;
120   localXdot = wash->localXdot;
121 
122   PetscCall(TSGetSolution(ts,&localXold));
123   PetscCall(TSGetDM(ts,&networkdm));
124   PetscCall(TSGetTimeStep(ts,&dt));
125   PetscCall(DMGetLocalVector(networkdm,&localF));
126 
127   /* Set F and localF as zero */
128   PetscCall(VecSet(F,0.0));
129   PetscCall(VecSet(localF,0.0));
130 
131   /* Update ghost values of locaX and locaXdot */
132   PetscCall(DMGlobalToLocalBegin(networkdm,X,INSERT_VALUES,localX));
133   PetscCall(DMGlobalToLocalEnd(networkdm,X,INSERT_VALUES,localX));
134 
135   PetscCall(DMGlobalToLocalBegin(networkdm,Xdot,INSERT_VALUES,localXdot));
136   PetscCall(DMGlobalToLocalEnd(networkdm,Xdot,INSERT_VALUES,localXdot));
137 
138   PetscCall(VecGetArrayRead(localX,&xarr));
139   PetscCall(VecGetArrayRead(localXdot,&xdotarr));
140   PetscCall(VecGetArrayRead(localXold,&xoldarr));
141   PetscCall(VecGetArray(localF,&farr));
142 
143    /* junction->type == JUNCTION:
144            juncf[0] = -qJ + sum(qin); juncf[1] = qJ - sum(qout)
145        junction->type == RESERVOIR (upper stream):
146            juncf[0] = -hJ + H0; juncf[1] = qJ - sum(qout)
147        junction->type == VALVE (down stream):
148            juncf[0] =  -qJ + sum(qin); juncf[1] = qJ
149   */
150   /* Vertex/junction initialization */
151   PetscCall(DMNetworkGetVertexRange(networkdm,&vStart,&vEnd));
152   for (v=vStart; v<vEnd; v++) {
153     PetscCall(DMNetworkIsGhostVertex(networkdm,v,&ghost));
154     if (ghost) continue;
155 
156     PetscCall(DMNetworkGetComponent(networkdm,v,0,&type,(void**)&junction,NULL));
157     PetscCall(DMNetworkGetLocalVecOffset(networkdm,v,ALL_COMPONENTS,&varoffset));
158     juncx      = (PipeField*)(xarr + varoffset);
159     juncf      = (PetscScalar*)(farr + varoffset);
160 
161     juncf[0] = -juncx[0].q;
162     juncf[1] =  juncx[0].q;
163 
164     if (junction->type == RESERVOIR) { /* upstream reservoir */
165       juncf[0] = juncx[0].h - wash->H0;
166     }
167   }
168 
169   /* Edge/pipe */
170   PetscCall(DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd));
171   for (e=eStart; e<eEnd; e++) {
172     PetscCall(DMNetworkGetComponent(networkdm,e,0,&type,(void**)&pipe,NULL));
173     PetscCall(DMNetworkGetLocalVecOffset(networkdm,e,ALL_COMPONENTS,&varoffset));
174     pipex    = (PipeField*)(xarr + varoffset);
175     pipexdot = (PipeField*)(xdotarr + varoffset);
176     pipef    = (PetscScalar*)(farr + varoffset);
177 
178     /* Get some data into the pipe structure: note, some of these operations
179      * might be redundant. Will it consume too much time? */
180     pipe->dt   = dt;
181     pipe->xold = (PipeField*)(xoldarr + varoffset);
182 
183     /* Evaluate F over this edge/pipe: pipef[1], ...,pipef[2*nend] */
184     PetscCall(DMDAGetLocalInfo(pipe->da,&info));
185     PetscCall(PipeIFunctionLocal_Lax(&info,t,pipex,pipexdot,pipef,pipe));
186 
187     /* Get boundary values from connected vertices */
188     PetscCall(DMNetworkGetConnectedVertices(networkdm,e,&cone));
189     vfrom = cone[0]; /* local ordering */
190     vto   = cone[1];
191     PetscCall(DMNetworkGetLocalVecOffset(networkdm,vfrom,ALL_COMPONENTS,&offsetfrom));
192     PetscCall(DMNetworkGetLocalVecOffset(networkdm,vto,ALL_COMPONENTS,&offsetto));
193 
194     /* Evaluate upstream boundary */
195     PetscCall(DMNetworkGetComponent(networkdm,vfrom,0,&type,(void**)&junction,NULL));
196     PetscCheck(junction->type == JUNCTION || junction->type == RESERVOIR,PETSC_COMM_SELF,PETSC_ERR_SUP,"junction type is not supported");
197     juncx = (PipeField*)(xarr + offsetfrom);
198     juncf = (PetscScalar*)(farr + offsetfrom);
199 
200     pipef[0] = pipex[0].h - juncx[0].h;
201     juncf[1] -= pipex[0].q;
202 
203     /* Evaluate downstream boundary */
204     PetscCall(DMNetworkGetComponent(networkdm,vto,0,&type,(void**)&junction,NULL));
205     PetscCheck(junction->type == JUNCTION || junction->type == VALVE,PETSC_COMM_SELF,PETSC_ERR_SUP,"junction type is not supported");
206     juncx = (PipeField*)(xarr + offsetto);
207     juncf = (PetscScalar*)(farr + offsetto);
208     nend  = pipe->nnodes - 1;
209 
210     pipef[2*nend + 1] = pipex[nend].h - juncx[0].h;
211     juncf[0] += pipex[nend].q;
212   }
213 
214   PetscCall(VecRestoreArrayRead(localX,&xarr));
215   PetscCall(VecRestoreArrayRead(localXdot,&xdotarr));
216   PetscCall(VecRestoreArray(localF,&farr));
217 
218   PetscCall(DMLocalToGlobalBegin(networkdm,localF,ADD_VALUES,F));
219   PetscCall(DMLocalToGlobalEnd(networkdm,localF,ADD_VALUES,F));
220   PetscCall(DMRestoreLocalVector(networkdm,&localF));
221   /*
222    PetscCall(PetscPrintf(PETSC_COMM_WORLD("F:\n"));
223    PetscCall(VecView(F,PETSC_VIEWER_STDOUT_WORLD));
224    */
225   PetscFunctionReturn(0);
226 }
227 
228 PetscErrorCode WASHSetInitialSolution(DM networkdm,Vec X,Wash wash)
229 {
230   PetscInt       k,nx,vkey,vfrom,vto,offsetfrom,offsetto;
231   PetscInt       type,varoffset;
232   PetscInt       e,eStart,eEnd;
233   Vec            localX;
234   PetscScalar    *xarr;
235   Pipe           pipe;
236   Junction       junction;
237   const PetscInt *cone;
238   const PetscScalar *xarray;
239 
240   PetscFunctionBegin;
241   PetscCall(VecSet(X,0.0));
242   PetscCall(DMGetLocalVector(networkdm,&localX));
243   PetscCall(VecGetArray(localX,&xarr));
244 
245   /* Edge */
246   PetscCall(DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd));
247   for (e=eStart; e<eEnd; e++) {
248     PetscCall(DMNetworkGetLocalVecOffset(networkdm,e,ALL_COMPONENTS,&varoffset));
249     PetscCall(DMNetworkGetComponent(networkdm,e,0,&type,(void**)&pipe,NULL));
250 
251     /* set initial values for this pipe */
252     PetscCall(PipeComputeSteadyState(pipe,wash->Q0,wash->H0));
253     PetscCall(VecGetSize(pipe->x,&nx));
254 
255     PetscCall(VecGetArrayRead(pipe->x,&xarray));
256     /* copy pipe->x to xarray */
257     for (k=0; k<nx; k++) {
258       (xarr+varoffset)[k] = xarray[k];
259     }
260 
261     /* set boundary values into vfrom and vto */
262     PetscCall(DMNetworkGetConnectedVertices(networkdm,e,&cone));
263     vfrom = cone[0]; /* local ordering */
264     vto   = cone[1];
265     PetscCall(DMNetworkGetLocalVecOffset(networkdm,vfrom,ALL_COMPONENTS,&offsetfrom));
266     PetscCall(DMNetworkGetLocalVecOffset(networkdm,vto,ALL_COMPONENTS,&offsetto));
267 
268     /* if vform is a head vertex: */
269     PetscCall(DMNetworkGetComponent(networkdm,vfrom,0,&vkey,(void**)&junction,NULL));
270     if (junction->type == RESERVOIR) {
271       (xarr+offsetfrom)[1] = wash->H0; /* 1st H */
272     }
273 
274     /* if vto is an end vertex: */
275     PetscCall(DMNetworkGetComponent(networkdm,vto,0,&vkey,(void**)&junction,NULL));
276     if (junction->type == VALVE) {
277       (xarr+offsetto)[0] = wash->QL; /* last Q */
278     }
279     PetscCall(VecRestoreArrayRead(pipe->x,&xarray));
280   }
281 
282   PetscCall(VecRestoreArray(localX,&xarr));
283   PetscCall(DMLocalToGlobalBegin(networkdm,localX,ADD_VALUES,X));
284   PetscCall(DMLocalToGlobalEnd(networkdm,localX,ADD_VALUES,X));
285   PetscCall(DMRestoreLocalVector(networkdm,&localX));
286 
287 #if 0
288   PetscInt N;
289   PetscCall(VecGetSize(X,&N));
290   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"initial solution %d:\n",N));
291   PetscCall(VecView(X,PETSC_VIEWER_STDOUT_WORLD));
292 #endif
293   PetscFunctionReturn(0);
294 }
295 
296 PetscErrorCode TSDMNetworkMonitor(TS ts, PetscInt step, PetscReal t, Vec x, void *context)
297 {
298   DMNetworkMonitor   monitor;
299 
300   PetscFunctionBegin;
301   monitor = (DMNetworkMonitor)context;
302   PetscCall(DMNetworkMonitorView(monitor,x));
303   PetscFunctionReturn(0);
304 }
305 
306 PetscErrorCode PipesView(DM networkdm,PetscInt KeyPipe,Vec X)
307 {
308   PetscInt       i,numkeys=1,*blocksize,*numselectedvariable,**selectedvariables,n;
309   IS             isfrom_q,isfrom_h,isfrom;
310   Vec            Xto;
311   VecScatter     ctx;
312   MPI_Comm       comm;
313 
314   PetscFunctionBegin;
315   PetscCall(PetscObjectGetComm((PetscObject)networkdm,&comm));
316 
317   /* 1. Create isfrom_q for q-variable of pipes */
318   PetscCall(PetscMalloc3(numkeys,&blocksize,numkeys,&numselectedvariable,numkeys,&selectedvariables));
319   for (i=0; i<numkeys; i++) {
320     blocksize[i]           = 2;
321     numselectedvariable[i] = 1;
322     PetscCall(PetscMalloc1(numselectedvariable[i],&selectedvariables[i]));
323     selectedvariables[i][0] = 0; /* q-variable */
324   }
325   PetscCall(DMNetworkCreateIS(networkdm,numkeys,&KeyPipe,blocksize,numselectedvariable,selectedvariables,&isfrom_q));
326 
327   /* 2. Create Xto and isto */
328   PetscCall(ISGetLocalSize(isfrom_q, &n));
329   PetscCall(VecCreate(comm,&Xto));
330   PetscCall(VecSetSizes(Xto,n,PETSC_DECIDE));
331   PetscCall(VecSetFromOptions(Xto));
332   PetscCall(VecSet(Xto,0.0));
333 
334   /* 3. Create scatter */
335   PetscCall(VecScatterCreate(X,isfrom_q,Xto,NULL,&ctx));
336 
337   /* 4. Scatter to Xq */
338   PetscCall(VecScatterBegin(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD));
339   PetscCall(VecScatterEnd(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD));
340   PetscCall(VecScatterDestroy(&ctx));
341   PetscCall(ISDestroy(&isfrom_q));
342 
343   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Xq:\n"));
344   PetscCall(VecView(Xto,PETSC_VIEWER_STDOUT_WORLD));
345 
346   /* 5. Create isfrom_h for h-variable of pipes; Create scatter; Scatter to Xh */
347   for (i=0; i<numkeys; i++) {
348     selectedvariables[i][0] = 1; /* h-variable */
349   }
350   PetscCall(DMNetworkCreateIS(networkdm,numkeys,&KeyPipe,blocksize,numselectedvariable,selectedvariables,&isfrom_h));
351 
352   PetscCall(VecScatterCreate(X,isfrom_h,Xto,NULL,&ctx));
353   PetscCall(VecScatterBegin(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD));
354   PetscCall(VecScatterEnd(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD));
355   PetscCall(VecScatterDestroy(&ctx));
356   PetscCall(ISDestroy(&isfrom_h));
357 
358   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Xh:\n"));
359   PetscCall(VecView(Xto,PETSC_VIEWER_STDOUT_WORLD));
360   PetscCall(VecDestroy(&Xto));
361 
362   /* 6. Create isfrom for all pipe variables; Create scatter; Scatter to Xpipes */
363   for (i=0; i<numkeys; i++) {
364     blocksize[i] = -1; /* select all the variables of the i-th component */
365   }
366   PetscCall(DMNetworkCreateIS(networkdm,numkeys,&KeyPipe,blocksize,NULL,NULL,&isfrom));
367   PetscCall(ISDestroy(&isfrom));
368   PetscCall(DMNetworkCreateIS(networkdm,numkeys,&KeyPipe,NULL,NULL,NULL,&isfrom));
369 
370   PetscCall(ISGetLocalSize(isfrom, &n));
371   PetscCall(VecCreate(comm,&Xto));
372   PetscCall(VecSetSizes(Xto,n,PETSC_DECIDE));
373   PetscCall(VecSetFromOptions(Xto));
374   PetscCall(VecSet(Xto,0.0));
375 
376   PetscCall(VecScatterCreate(X,isfrom,Xto,NULL,&ctx));
377   PetscCall(VecScatterBegin(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD));
378   PetscCall(VecScatterEnd(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD));
379   PetscCall(VecScatterDestroy(&ctx));
380   PetscCall(ISDestroy(&isfrom));
381 
382   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Xpipes:\n"));
383   PetscCall(VecView(Xto,PETSC_VIEWER_STDOUT_WORLD));
384 
385   /* 7. Free spaces */
386   for (i=0; i<numkeys; i++) {
387     PetscCall(PetscFree(selectedvariables[i]));
388   }
389   PetscCall(PetscFree3(blocksize,numselectedvariable,selectedvariables));
390   PetscCall(VecDestroy(&Xto));
391   PetscFunctionReturn(0);
392 }
393 
394 PetscErrorCode ISJunctionsView(DM networkdm,PetscInt KeyJunc)
395 {
396   PetscInt       numkeys=1;
397   IS             isfrom;
398   MPI_Comm       comm;
399   PetscMPIInt    rank;
400 
401   PetscFunctionBegin;
402   PetscCall(PetscObjectGetComm((PetscObject)networkdm,&comm));
403   PetscCallMPI(MPI_Comm_rank(comm,&rank));
404 
405   /* Create a global isfrom for all junction variables */
406   PetscCall(DMNetworkCreateIS(networkdm,numkeys,&KeyJunc,NULL,NULL,NULL,&isfrom));
407   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"ISJunctions:\n"));
408   PetscCall(ISView(isfrom,PETSC_VIEWER_STDOUT_WORLD));
409   PetscCall(ISDestroy(&isfrom));
410 
411   /* Create a local isfrom for all junction variables */
412   PetscCall(DMNetworkCreateLocalIS(networkdm,numkeys,&KeyJunc,NULL,NULL,NULL,&isfrom));
413   if (!rank) {
414     PetscCall(PetscPrintf(PETSC_COMM_SELF,"[%d] ISLocalJunctions:\n",rank));
415     PetscCall(ISView(isfrom,PETSC_VIEWER_STDOUT_SELF));
416   }
417   PetscCall(ISDestroy(&isfrom));
418   PetscFunctionReturn(0);
419 }
420 
421 PetscErrorCode WashNetworkCleanUp(Wash wash)
422 {
423   PetscMPIInt    rank;
424 
425   PetscFunctionBegin;
426   PetscCallMPI(MPI_Comm_rank(wash->comm,&rank));
427   PetscCall(PetscFree(wash->edgelist));
428   PetscCall(PetscFree(wash->vtype));
429   if (rank == 0) {
430     PetscCall(PetscFree2(wash->junction,wash->pipe));
431   }
432   PetscFunctionReturn(0);
433 }
434 
435 PetscErrorCode WashNetworkCreate(MPI_Comm comm,PetscInt pipesCase,Wash *wash_ptr)
436 {
437   PetscInt       npipes;
438   PetscMPIInt    rank;
439   Wash           wash=NULL;
440   PetscInt       i,numVertices,numEdges,*vtype;
441   PetscInt       *edgelist;
442   Junction       junctions=NULL;
443   Pipe           pipes=NULL;
444   PetscBool      washdist=PETSC_TRUE;
445 
446   PetscFunctionBegin;
447   PetscCallMPI(MPI_Comm_rank(comm,&rank));
448 
449   PetscCall(PetscCalloc1(1,&wash));
450   wash->comm = comm;
451   *wash_ptr  = wash;
452   wash->Q0   = 0.477432; /* RESERVOIR */
453   wash->H0   = 150.0;
454   wash->HL   = 143.488;  /* VALVE */
455   wash->QL   = 0.0;
456   wash->nnodes_loc = 0;
457 
458   numVertices = 0;
459   numEdges    = 0;
460   edgelist    = NULL;
461 
462   /* proc[0] creates a sequential wash and edgelist */
463   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Setup pipesCase %" PetscInt_FMT "\n",pipesCase));
464 
465   /* Set global number of pipes, edges, and junctions */
466   /*-------------------------------------------------*/
467   switch (pipesCase) {
468   case 0:
469     /* pipeCase 0: */
470     /* =================================================
471     (RESERVOIR) v0 --E0--> v1--E1--> v2 --E2-->v3 (VALVE)
472     ====================================================  */
473     npipes = 3;
474     PetscCall(PetscOptionsGetInt(NULL,NULL, "-npipes", &npipes, NULL));
475     wash->nedge   = npipes;
476     wash->nvertex = npipes + 1;
477 
478     /* Set local edges and vertices -- proc[0] sets entire network, then distributes */
479     numVertices = 0;
480     numEdges    = 0;
481     edgelist    = NULL;
482     if (rank == 0) {
483       numVertices = wash->nvertex;
484       numEdges    = wash->nedge;
485 
486       PetscCall(PetscCalloc1(2*numEdges,&edgelist));
487       for (i=0; i<numEdges; i++) {
488         edgelist[2*i] = i; edgelist[2*i+1] = i+1;
489       }
490 
491       /* Add network components */
492       /*------------------------*/
493       PetscCall(PetscCalloc2(numVertices,&junctions,numEdges,&pipes));
494 
495       /* vertex */
496       for (i=0; i<numVertices; i++) {
497         junctions[i].id = i;
498         junctions[i].type = JUNCTION;
499       }
500 
501       junctions[0].type             = RESERVOIR;
502       junctions[numVertices-1].type = VALVE;
503     }
504     break;
505   case 1:
506     /* pipeCase 1: */
507     /* ==========================
508                 v2 (VALVE)
509                 ^
510                 |
511                E2
512                 |
513     v0 --E0--> v3--E1--> v1
514   (RESERVOIR)            (RESERVOIR)
515     =============================  */
516     npipes = 3;
517     wash->nedge   = npipes;
518     wash->nvertex = npipes + 1;
519 
520     /* Set local edges and vertices -- proc[0] sets entire network, then distributes */
521     if (rank == 0) {
522       numVertices = wash->nvertex;
523       numEdges    = wash->nedge;
524 
525       PetscCall(PetscCalloc1(2*numEdges,&edgelist));
526       edgelist[0] = 0; edgelist[1] = 3;  /* edge[0] */
527       edgelist[2] = 3; edgelist[3] = 1;  /* edge[1] */
528       edgelist[4] = 3; edgelist[5] = 2;  /* edge[2] */
529 
530       /* Add network components */
531       /*------------------------*/
532       PetscCall(PetscCalloc2(numVertices,&junctions,numEdges,&pipes));
533       /* vertex */
534       for (i=0; i<numVertices; i++) {
535         junctions[i].id   = i;
536         junctions[i].type = JUNCTION;
537       }
538 
539       junctions[0].type = RESERVOIR;
540       junctions[1].type = VALVE;
541       junctions[2].type = VALVE;
542     }
543     break;
544   case 2:
545     /* pipeCase 2: */
546     /* ==========================
547     (RESERVOIR)  v2--> E2
548                        |
549             v0 --E0--> v3--E1--> v1
550     (RESERVOIR)               (VALVE)
551     =============================  */
552 
553     /* Set application parameters -- to be used in function evalutions */
554     npipes = 3;
555     wash->nedge   = npipes;
556     wash->nvertex = npipes + 1;
557 
558     /* Set local edges and vertices -- proc[0] sets entire network, then distributes */
559     if (rank == 0) {
560       numVertices = wash->nvertex;
561       numEdges    = wash->nedge;
562 
563       PetscCall(PetscCalloc1(2*numEdges,&edgelist));
564       edgelist[0] = 0; edgelist[1] = 3;  /* edge[0] */
565       edgelist[2] = 3; edgelist[3] = 1;  /* edge[1] */
566       edgelist[4] = 2; edgelist[5] = 3;  /* edge[2] */
567 
568       /* Add network components */
569       /*------------------------*/
570       PetscCall(PetscCalloc2(numVertices,&junctions,numEdges,&pipes));
571       /* vertex */
572       for (i=0; i<numVertices; i++) {
573         junctions[i].id = i;
574         junctions[i].type = JUNCTION;
575       }
576 
577       junctions[0].type = RESERVOIR;
578       junctions[1].type = VALVE;
579       junctions[2].type = RESERVOIR;
580     }
581     break;
582   default:
583     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"not done yet");
584   }
585 
586   /* set edge global id */
587   for (i=0; i<numEdges; i++) pipes[i].id = i;
588 
589   if (rank == 0) { /* set vtype for proc[0] */
590     PetscInt v;
591     PetscCall(PetscMalloc1(2*numEdges,&vtype));
592     for (i=0; i<2*numEdges; i++) {
593       v        = edgelist[i];
594       vtype[i] = junctions[v].type;
595     }
596     wash->vtype = vtype;
597   }
598 
599   *wash_ptr      = wash;
600   wash->nedge    = numEdges;
601   wash->nvertex  = numVertices;
602   wash->edgelist = edgelist;
603   wash->junction = junctions;
604   wash->pipe     = pipes;
605 
606   /* Distribute edgelist to other processors */
607   PetscCall(PetscOptionsGetBool(NULL,NULL,"-wash_distribute",&washdist,NULL));
608   if (washdist) {
609     /*
610      PetscCall(PetscPrintf(PETSC_COMM_WORLD," Distribute sequential wash ...\n"));
611      */
612     PetscCall(WashNetworkDistribute(comm,wash));
613   }
614   PetscFunctionReturn(0);
615 }
616 
617 /* ------------------------------------------------------- */
618 int main(int argc,char ** argv)
619 {
620   Wash              wash;
621   Junction          junctions,junction;
622   Pipe              pipe,pipes;
623   PetscInt          KeyPipe,KeyJunction,*edgelist = NULL,*vtype = NULL;
624   PetscInt          i,e,v,eStart,eEnd,vStart,vEnd,key,vkey,type;
625   PetscInt          steps=1,nedges,nnodes=6;
626   const PetscInt    *cone;
627   DM                networkdm;
628   PetscMPIInt       size,rank;
629   PetscReal         ftime;
630   Vec               X;
631   TS                ts;
632   TSConvergedReason reason;
633   PetscBool         viewpipes,viewjuncs,monipipes=PETSC_FALSE,userJac=PETSC_TRUE,viewdm=PETSC_FALSE,viewX=PETSC_FALSE;
634   PetscInt          pipesCase=0;
635   DMNetworkMonitor  monitor;
636   MPI_Comm          comm;
637 
638   PetscCall(PetscInitialize(&argc,&argv,"pOption",help));
639 
640   /* Read runtime options */
641   PetscCall(PetscOptionsGetInt(NULL,NULL, "-case", &pipesCase, NULL));
642   PetscCall(PetscOptionsGetBool(NULL,NULL,"-user_Jac",&userJac,NULL));
643   PetscCall(PetscOptionsGetBool(NULL,NULL,"-pipe_monitor",&monipipes,NULL));
644   PetscCall(PetscOptionsGetBool(NULL,NULL,"-viewdm",&viewdm,NULL));
645   PetscCall(PetscOptionsGetBool(NULL,NULL,"-viewX",&viewX,NULL));
646   PetscCall(PetscOptionsGetInt(NULL,NULL, "-npipenodes", &nnodes, NULL));
647 
648   /* Create networkdm */
649   /*------------------*/
650   PetscCall(DMNetworkCreate(PETSC_COMM_WORLD,&networkdm));
651   PetscCall(PetscObjectGetComm((PetscObject)networkdm,&comm));
652   PetscCallMPI(MPI_Comm_rank(comm,&rank));
653   PetscCallMPI(MPI_Comm_size(comm,&size));
654 
655   if (size == 1 && monipipes) {
656     PetscCall(DMNetworkMonitorCreate(networkdm,&monitor));
657   }
658 
659   /* Register the components in the network */
660   PetscCall(DMNetworkRegisterComponent(networkdm,"junctionstruct",sizeof(struct _p_Junction),&KeyJunction));
661   PetscCall(DMNetworkRegisterComponent(networkdm,"pipestruct",sizeof(struct _p_Pipe),&KeyPipe));
662 
663   /* Create a distributed wash network (user-specific) */
664   PetscCall(WashNetworkCreate(comm,pipesCase,&wash));
665   nedges      = wash->nedge;
666   edgelist    = wash->edgelist;
667   vtype       = wash->vtype;
668   junctions   = wash->junction;
669   pipes       = wash->pipe;
670 
671   /* Set up the network layout */
672   PetscCall(DMNetworkSetNumSubNetworks(networkdm,PETSC_DECIDE,1));
673   PetscCall(DMNetworkAddSubnetwork(networkdm,NULL,nedges,edgelist,NULL));
674 
675   PetscCall(DMNetworkLayoutSetUp(networkdm));
676 
677   PetscCall(DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd));
678   PetscCall(DMNetworkGetVertexRange(networkdm,&vStart,&vEnd));
679   /* PetscCall(PetscPrintf(PETSC_COMM_SELF,"[%d] eStart/End: %d - %d; vStart/End: %d - %d\n",rank,eStart,eEnd,vStart,vEnd)); */
680 
681   if (rank) { /* junctions[] and pipes[] for proc[0] are allocated in WashNetworkCreate() */
682     /* vEnd - vStart = nvertices + number of ghost vertices! */
683     PetscCall(PetscCalloc2(vEnd - vStart,&junctions,nedges,&pipes));
684   }
685 
686   /* Add Pipe component and number of variables to all local edges */
687   for (e = eStart; e < eEnd; e++) {
688     pipes[e-eStart].nnodes = nnodes;
689     PetscCall(DMNetworkAddComponent(networkdm,e,KeyPipe,&pipes[e-eStart],2*pipes[e-eStart].nnodes));
690 
691     if (size == 1 && monipipes) { /* Add monitor -- show Q_{pipes[e-eStart].id}? */
692       pipes[e-eStart].length = 600.0;
693       PetscCall(DMNetworkMonitorAdd(monitor, "Pipe Q", e, pipes[e-eStart].nnodes, 0, 2, 0.0,pipes[e-eStart].length, -0.8, 0.8, PETSC_TRUE));
694       PetscCall(DMNetworkMonitorAdd(monitor, "Pipe H", e, pipes[e-eStart].nnodes, 1, 2, 0.0,pipes[e-eStart].length, -400.0, 800.0, PETSC_TRUE));
695     }
696   }
697 
698   /* Add Junction component and number of variables to all local vertices */
699   for (v = vStart; v < vEnd; v++) {
700     PetscCall(DMNetworkAddComponent(networkdm,v,KeyJunction,&junctions[v-vStart],2));
701   }
702 
703   if (size > 1) {  /* must be called before DMSetUp()???. Other partitioners do not work yet??? -- cause crash in proc[0]! */
704     DM               plexdm;
705     PetscPartitioner part;
706     PetscCall(DMNetworkGetPlex(networkdm,&plexdm));
707     PetscCall(DMPlexGetPartitioner(plexdm, &part));
708     PetscCall(PetscPartitionerSetType(part,PETSCPARTITIONERSIMPLE));
709     PetscCall(PetscOptionsSetValue(NULL,"-dm_plex_csr_alg","mat")); /* for parmetis */
710   }
711 
712   /* Set up DM for use */
713   PetscCall(DMSetUp(networkdm));
714   if (viewdm) {
715     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\nOriginal networkdm, DMView:\n"));
716     PetscCall(DMView(networkdm,PETSC_VIEWER_STDOUT_WORLD));
717   }
718 
719   /* Set user physical parameters to the components */
720   for (e = eStart; e < eEnd; e++) {
721     PetscCall(DMNetworkGetConnectedVertices(networkdm,e,&cone));
722     /* vfrom */
723     PetscCall(DMNetworkGetComponent(networkdm,cone[0],0,&vkey,(void**)&junction,NULL));
724     junction->type = (VertexType)vtype[2*e];
725 
726     /* vto */
727     PetscCall(DMNetworkGetComponent(networkdm,cone[1],0,&vkey,(void**)&junction,NULL));
728     junction->type = (VertexType)vtype[2*e+1];
729   }
730 
731   PetscCall(WashNetworkCleanUp(wash));
732 
733   /* Network partitioning and distribution of data */
734   PetscCall(DMNetworkDistribute(&networkdm,0));
735   if (viewdm) {
736     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\nAfter DMNetworkDistribute, DMView:\n"));
737     PetscCall(DMView(networkdm,PETSC_VIEWER_STDOUT_WORLD));
738   }
739 
740   /* create vectors */
741   PetscCall(DMCreateGlobalVector(networkdm,&X));
742   PetscCall(DMCreateLocalVector(networkdm,&wash->localX));
743   PetscCall(DMCreateLocalVector(networkdm,&wash->localXdot));
744 
745   /* PipeSetUp -- each process only sets its own pipes */
746   /*---------------------------------------------------*/
747   PetscCall(DMNetworkGetVertexRange(networkdm,&vStart,&vEnd));
748 
749   userJac = PETSC_TRUE;
750   PetscCall(DMNetworkHasJacobian(networkdm,userJac,userJac));
751   PetscCall(DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd));
752   for (e=eStart; e<eEnd; e++) { /* each edge has only one component, pipe */
753     PetscCall(DMNetworkGetComponent(networkdm,e,0,&type,(void**)&pipe,NULL));
754 
755     wash->nnodes_loc += pipe->nnodes; /* local total number of nodes, will be used by PipesView() */
756     PetscCall(PipeSetParameters(pipe,
757                               600.0,   /* length   */
758                               0.5,     /* diameter */
759                               1200.0,  /* a        */
760                               0.018)); /* friction */
761     PetscCall(PipeSetUp(pipe));
762 
763     if (userJac) {
764       /* Create Jacobian matrix structures for a Pipe */
765       Mat            *J;
766       PetscCall(PipeCreateJacobian(pipe,NULL,&J));
767       PetscCall(DMNetworkEdgeSetMatrix(networkdm,e,J));
768     }
769   }
770 
771   if (userJac) {
772     PetscCall(DMNetworkGetVertexRange(networkdm,&vStart,&vEnd));
773     for (v=vStart; v<vEnd; v++) {
774       Mat            *J;
775       PetscCall(JunctionCreateJacobian(networkdm,v,NULL,&J));
776       PetscCall(DMNetworkVertexSetMatrix(networkdm,v,J));
777 
778       PetscCall(DMNetworkGetComponent(networkdm,v,0,&vkey,(void**)&junction,NULL));
779       junction->jacobian = J;
780     }
781   }
782 
783   /* Setup solver                                           */
784   /*--------------------------------------------------------*/
785   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
786 
787   PetscCall(TSSetDM(ts,(DM)networkdm));
788   PetscCall(TSSetIFunction(ts,NULL,WASHIFunction,wash));
789 
790   PetscCall(TSSetMaxSteps(ts,steps));
791   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
792   PetscCall(TSSetTimeStep(ts,0.1));
793   PetscCall(TSSetType(ts,TSBEULER));
794   if (size == 1 && monipipes) PetscCall(TSMonitorSet(ts, TSDMNetworkMonitor, monitor, NULL));
795   PetscCall(TSSetFromOptions(ts));
796 
797   PetscCall(WASHSetInitialSolution(networkdm,X,wash));
798 
799   PetscCall(TSSolve(ts,X));
800 
801   PetscCall(TSGetSolveTime(ts,&ftime));
802   PetscCall(TSGetStepNumber(ts,&steps));
803   PetscCall(TSGetConvergedReason(ts,&reason));
804   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%s at time %g after %" PetscInt_FMT " steps\n",TSConvergedReasons[reason],(double)ftime,steps));
805   if (viewX) {
806     PetscCall(VecView(X,PETSC_VIEWER_STDOUT_WORLD));
807   }
808 
809   viewpipes = PETSC_FALSE;
810   PetscCall(PetscOptionsGetBool(NULL,NULL, "-Jac_view", &viewpipes,NULL));
811   if (viewpipes) {
812     SNES snes;
813     Mat  Jac;
814     PetscCall(TSGetSNES(ts,&snes));
815     PetscCall(SNESGetJacobian(snes,&Jac,NULL,NULL,NULL));
816     PetscCall(MatView(Jac,PETSC_VIEWER_DRAW_WORLD));
817   }
818 
819   /* View solutions */
820   /* -------------- */
821   viewpipes = PETSC_FALSE;
822   PetscCall(PetscOptionsGetBool(NULL,NULL, "-pipe_view", &viewpipes,NULL));
823   if (viewpipes) {
824     PetscCall(PipesView(networkdm,KeyPipe,X));
825   }
826 
827   /* Test IS */
828   viewjuncs = PETSC_FALSE;
829   PetscCall(PetscOptionsGetBool(NULL,NULL, "-isjunc_view", &viewjuncs,NULL));
830   if (viewjuncs) {
831     PetscCall(ISJunctionsView(networkdm,KeyJunction));
832   }
833 
834   /* Free spaces */
835   /* ----------- */
836   PetscCall(TSDestroy(&ts));
837   PetscCall(VecDestroy(&X));
838   PetscCall(VecDestroy(&wash->localX));
839   PetscCall(VecDestroy(&wash->localXdot));
840 
841   /* Destroy objects from each pipe that are created in PipeSetUp() */
842   PetscCall(DMNetworkGetEdgeRange(networkdm,&eStart, &eEnd));
843   for (i = eStart; i < eEnd; i++) {
844     PetscCall(DMNetworkGetComponent(networkdm,i,0,&key,(void**)&pipe,NULL));
845     PetscCall(PipeDestroy(&pipe));
846   }
847   if (userJac) {
848     for (v=vStart; v<vEnd; v++) {
849       PetscCall(DMNetworkGetComponent(networkdm,v,0,&vkey,(void**)&junction,NULL));
850       PetscCall(JunctionDestroyJacobian(networkdm,v,junction));
851     }
852   }
853 
854   if (size == 1 && monipipes) {
855     PetscCall(DMNetworkMonitorDestroy(&monitor));
856   }
857   PetscCall(DMDestroy(&networkdm));
858   PetscCall(PetscFree(wash));
859 
860   if (rank) {
861     PetscCall(PetscFree2(junctions,pipes));
862   }
863   PetscCall(PetscFinalize());
864   return 0;
865 }
866 
867 /*TEST
868 
869    build:
870      depends: pipeInterface.c pipeImpls.c
871      requires: mumps
872 
873    test:
874       args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -options_left no -viewX
875       localrunfiles: pOption
876       output_file: output/pipes_1.out
877 
878    test:
879       suffix: 2
880       nsize: 2
881       args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX
882       localrunfiles: pOption
883       output_file: output/pipes_2.out
884 
885    test:
886       suffix: 3
887       nsize: 2
888       args: -ts_monitor -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX
889       localrunfiles: pOption
890       output_file: output/pipes_3.out
891 
892    test:
893       suffix: 4
894       args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -options_left no -viewX
895       localrunfiles: pOption
896       output_file: output/pipes_4.out
897 
898    test:
899       suffix: 5
900       nsize: 3
901       args: -ts_monitor -case 2 -ts_max_steps 10 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX
902       localrunfiles: pOption
903       output_file: output/pipes_5.out
904 
905    test:
906       suffix: 6
907       nsize: 2
908       args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -viewX
909       localrunfiles: pOption
910       output_file: output/pipes_6.out
911 
912    test:
913       suffix: 7
914       nsize: 2
915       args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -viewX
916       localrunfiles: pOption
917       output_file: output/pipes_7.out
918 
919    test:
920       suffix: 8
921       nsize: 2
922       requires: parmetis
923       args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type parmetis -options_left no -wash_distribute 1
924       localrunfiles: pOption
925       output_file: output/pipes_8.out
926 
927    test:
928       suffix: 9
929       nsize: 2
930       args: -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -pipe_view
931       localrunfiles: pOption
932       output_file: output/pipes_9.out
933 
934    test:
935       suffix: 10
936       nsize: 2
937       args: -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -isjunc_view
938       localrunfiles: pOption
939       output_file: output/pipes_10.out
940 
941 TEST*/
942