1 static char help[] = "This example demonstrates DMNetwork. It is used for testing parallel generation of dmnetwork, then redistribute. \n\\n"; 2 /* 3 Example: mpiexec -n <np> ./pipes -ts_max_steps 10 4 */ 5 6 #include "wash.h" 7 8 /* 9 WashNetworkDistribute - proc[0] distributes sequential wash object 10 Input Parameters: 11 . comm - MPI communicator 12 . wash - wash context with all network data in proc[0] 13 14 Output Parameter: 15 . wash - wash context with nedge, nvertex and edgelist distributed 16 17 Note: The routine is used for testing parallel generation of dmnetwork, then redistribute. 18 */ 19 PetscErrorCode WashNetworkDistribute(MPI_Comm comm, Wash wash) { 20 PetscMPIInt rank, size, tag = 0; 21 PetscInt i, e, v, numEdges, numVertices, nedges, *eowners = NULL, estart, eend, *vtype = NULL, nvertices; 22 PetscInt *edgelist = wash->edgelist, *nvtx = NULL, *vtxDone = NULL; 23 24 PetscFunctionBegin; 25 PetscCallMPI(MPI_Comm_size(comm, &size)); 26 if (size == 1) PetscFunctionReturn(0); 27 28 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 29 numEdges = wash->nedge; 30 numVertices = wash->nvertex; 31 32 /* (1) all processes get global and local number of edges */ 33 PetscCallMPI(MPI_Bcast(&numEdges, 1, MPIU_INT, 0, comm)); 34 nedges = numEdges / size; /* local nedges */ 35 if (rank == 0) nedges += numEdges - size * (numEdges / size); 36 wash->Nedge = numEdges; 37 wash->nedge = nedges; 38 /* PetscCall(PetscPrintf(PETSC_COMM_SELF,"[%d] nedges %d, numEdges %d\n",rank,nedges,numEdges)); */ 39 40 PetscCall(PetscCalloc3(size + 1, &eowners, size, &nvtx, numVertices, &vtxDone)); 41 PetscCallMPI(MPI_Allgather(&nedges, 1, MPIU_INT, eowners + 1, 1, MPIU_INT, PETSC_COMM_WORLD)); 42 eowners[0] = 0; 43 for (i = 2; i <= size; i++) eowners[i] += eowners[i - 1]; 44 45 estart = eowners[rank]; 46 eend = eowners[rank + 1]; 47 /* PetscCall(PetscPrintf(PETSC_COMM_SELF,"[%d] own lists row %d - %d\n",rank,estart,eend)); */ 48 49 /* (2) distribute row block edgelist to all processors */ 50 if (rank == 0) { 51 vtype = wash->vtype; 52 for (i = 1; i < size; i++) { 53 /* proc[0] sends edgelist to proc[i] */ 54 PetscCallMPI(MPI_Send(edgelist + 2 * eowners[i], 2 * (eowners[i + 1] - eowners[i]), MPIU_INT, i, tag, comm)); 55 56 /* proc[0] sends vtype to proc[i] */ 57 PetscCallMPI(MPI_Send(vtype + 2 * eowners[i], 2 * (eowners[i + 1] - eowners[i]), MPIU_INT, i, tag, comm)); 58 } 59 } else { 60 MPI_Status status; 61 PetscCall(PetscMalloc1(2 * (eend - estart), &vtype)); 62 PetscCall(PetscMalloc1(2 * (eend - estart), &edgelist)); 63 64 PetscCallMPI(MPI_Recv(edgelist, 2 * (eend - estart), MPIU_INT, 0, tag, comm, &status)); 65 PetscCallMPI(MPI_Recv(vtype, 2 * (eend - estart), MPIU_INT, 0, tag, comm, &status)); 66 } 67 68 wash->edgelist = edgelist; 69 70 /* (3) all processes get global and local number of vertices, without ghost vertices */ 71 if (rank == 0) { 72 for (i = 0; i < size; i++) { 73 for (e = eowners[i]; e < eowners[i + 1]; e++) { 74 v = edgelist[2 * e]; 75 if (!vtxDone[v]) { 76 nvtx[i]++; 77 vtxDone[v] = 1; 78 } 79 v = edgelist[2 * e + 1]; 80 if (!vtxDone[v]) { 81 nvtx[i]++; 82 vtxDone[v] = 1; 83 } 84 } 85 } 86 } 87 PetscCallMPI(MPI_Bcast(&numVertices, 1, MPIU_INT, 0, PETSC_COMM_WORLD)); 88 PetscCallMPI(MPI_Scatter(nvtx, 1, MPIU_INT, &nvertices, 1, MPIU_INT, 0, PETSC_COMM_WORLD)); 89 PetscCall(PetscFree3(eowners, nvtx, vtxDone)); 90 91 wash->Nvertex = numVertices; 92 wash->nvertex = nvertices; 93 wash->vtype = vtype; 94 PetscFunctionReturn(0); 95 } 96 97 PetscErrorCode WASHIFunction(TS ts, PetscReal t, Vec X, Vec Xdot, Vec F, void *ctx) { 98 Wash wash = (Wash)ctx; 99 DM networkdm; 100 Vec localX, localXdot, localF, localXold; 101 const PetscInt *cone; 102 PetscInt vfrom, vto, offsetfrom, offsetto, varoffset; 103 PetscInt v, vStart, vEnd, e, eStart, eEnd; 104 PetscInt nend, type; 105 PetscBool ghost; 106 PetscScalar *farr, *juncf, *pipef; 107 PetscReal dt; 108 Pipe pipe; 109 PipeField *pipex, *pipexdot, *juncx; 110 Junction junction; 111 DMDALocalInfo info; 112 const PetscScalar *xarr, *xdotarr, *xoldarr; 113 114 PetscFunctionBegin; 115 localX = wash->localX; 116 localXdot = wash->localXdot; 117 118 PetscCall(TSGetSolution(ts, &localXold)); 119 PetscCall(TSGetDM(ts, &networkdm)); 120 PetscCall(TSGetTimeStep(ts, &dt)); 121 PetscCall(DMGetLocalVector(networkdm, &localF)); 122 123 /* Set F and localF as zero */ 124 PetscCall(VecSet(F, 0.0)); 125 PetscCall(VecSet(localF, 0.0)); 126 127 /* Update ghost values of locaX and locaXdot */ 128 PetscCall(DMGlobalToLocalBegin(networkdm, X, INSERT_VALUES, localX)); 129 PetscCall(DMGlobalToLocalEnd(networkdm, X, INSERT_VALUES, localX)); 130 131 PetscCall(DMGlobalToLocalBegin(networkdm, Xdot, INSERT_VALUES, localXdot)); 132 PetscCall(DMGlobalToLocalEnd(networkdm, Xdot, INSERT_VALUES, localXdot)); 133 134 PetscCall(VecGetArrayRead(localX, &xarr)); 135 PetscCall(VecGetArrayRead(localXdot, &xdotarr)); 136 PetscCall(VecGetArrayRead(localXold, &xoldarr)); 137 PetscCall(VecGetArray(localF, &farr)); 138 139 /* junction->type == JUNCTION: 140 juncf[0] = -qJ + sum(qin); juncf[1] = qJ - sum(qout) 141 junction->type == RESERVOIR (upper stream): 142 juncf[0] = -hJ + H0; juncf[1] = qJ - sum(qout) 143 junction->type == VALVE (down stream): 144 juncf[0] = -qJ + sum(qin); juncf[1] = qJ 145 */ 146 /* Vertex/junction initialization */ 147 PetscCall(DMNetworkGetVertexRange(networkdm, &vStart, &vEnd)); 148 for (v = vStart; v < vEnd; v++) { 149 PetscCall(DMNetworkIsGhostVertex(networkdm, v, &ghost)); 150 if (ghost) continue; 151 152 PetscCall(DMNetworkGetComponent(networkdm, v, 0, &type, (void **)&junction, NULL)); 153 PetscCall(DMNetworkGetLocalVecOffset(networkdm, v, ALL_COMPONENTS, &varoffset)); 154 juncx = (PipeField *)(xarr + varoffset); 155 juncf = (PetscScalar *)(farr + varoffset); 156 157 juncf[0] = -juncx[0].q; 158 juncf[1] = juncx[0].q; 159 160 if (junction->type == RESERVOIR) { /* upstream reservoir */ 161 juncf[0] = juncx[0].h - wash->H0; 162 } 163 } 164 165 /* Edge/pipe */ 166 PetscCall(DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd)); 167 for (e = eStart; e < eEnd; e++) { 168 PetscCall(DMNetworkGetComponent(networkdm, e, 0, &type, (void **)&pipe, NULL)); 169 PetscCall(DMNetworkGetLocalVecOffset(networkdm, e, ALL_COMPONENTS, &varoffset)); 170 pipex = (PipeField *)(xarr + varoffset); 171 pipexdot = (PipeField *)(xdotarr + varoffset); 172 pipef = (PetscScalar *)(farr + varoffset); 173 174 /* Get some data into the pipe structure: note, some of these operations 175 * might be redundant. Will it consume too much time? */ 176 pipe->dt = dt; 177 pipe->xold = (PipeField *)(xoldarr + varoffset); 178 179 /* Evaluate F over this edge/pipe: pipef[1], ...,pipef[2*nend] */ 180 PetscCall(DMDAGetLocalInfo(pipe->da, &info)); 181 PetscCall(PipeIFunctionLocal_Lax(&info, t, pipex, pipexdot, pipef, pipe)); 182 183 /* Get boundary values from connected vertices */ 184 PetscCall(DMNetworkGetConnectedVertices(networkdm, e, &cone)); 185 vfrom = cone[0]; /* local ordering */ 186 vto = cone[1]; 187 PetscCall(DMNetworkGetLocalVecOffset(networkdm, vfrom, ALL_COMPONENTS, &offsetfrom)); 188 PetscCall(DMNetworkGetLocalVecOffset(networkdm, vto, ALL_COMPONENTS, &offsetto)); 189 190 /* Evaluate upstream boundary */ 191 PetscCall(DMNetworkGetComponent(networkdm, vfrom, 0, &type, (void **)&junction, NULL)); 192 PetscCheck(junction->type == JUNCTION || junction->type == RESERVOIR, PETSC_COMM_SELF, PETSC_ERR_SUP, "junction type is not supported"); 193 juncx = (PipeField *)(xarr + offsetfrom); 194 juncf = (PetscScalar *)(farr + offsetfrom); 195 196 pipef[0] = pipex[0].h - juncx[0].h; 197 juncf[1] -= pipex[0].q; 198 199 /* Evaluate downstream boundary */ 200 PetscCall(DMNetworkGetComponent(networkdm, vto, 0, &type, (void **)&junction, NULL)); 201 PetscCheck(junction->type == JUNCTION || junction->type == VALVE, PETSC_COMM_SELF, PETSC_ERR_SUP, "junction type is not supported"); 202 juncx = (PipeField *)(xarr + offsetto); 203 juncf = (PetscScalar *)(farr + offsetto); 204 nend = pipe->nnodes - 1; 205 206 pipef[2 * nend + 1] = pipex[nend].h - juncx[0].h; 207 juncf[0] += pipex[nend].q; 208 } 209 210 PetscCall(VecRestoreArrayRead(localX, &xarr)); 211 PetscCall(VecRestoreArrayRead(localXdot, &xdotarr)); 212 PetscCall(VecRestoreArray(localF, &farr)); 213 214 PetscCall(DMLocalToGlobalBegin(networkdm, localF, ADD_VALUES, F)); 215 PetscCall(DMLocalToGlobalEnd(networkdm, localF, ADD_VALUES, F)); 216 PetscCall(DMRestoreLocalVector(networkdm, &localF)); 217 /* 218 PetscCall(PetscPrintf(PETSC_COMM_WORLD("F:\n")); 219 PetscCall(VecView(F,PETSC_VIEWER_STDOUT_WORLD)); 220 */ 221 PetscFunctionReturn(0); 222 } 223 224 PetscErrorCode WASHSetInitialSolution(DM networkdm, Vec X, Wash wash) { 225 PetscInt k, nx, vkey, vfrom, vto, offsetfrom, offsetto; 226 PetscInt type, varoffset; 227 PetscInt e, eStart, eEnd; 228 Vec localX; 229 PetscScalar *xarr; 230 Pipe pipe; 231 Junction junction; 232 const PetscInt *cone; 233 const PetscScalar *xarray; 234 235 PetscFunctionBegin; 236 PetscCall(VecSet(X, 0.0)); 237 PetscCall(DMGetLocalVector(networkdm, &localX)); 238 PetscCall(VecGetArray(localX, &xarr)); 239 240 /* Edge */ 241 PetscCall(DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd)); 242 for (e = eStart; e < eEnd; e++) { 243 PetscCall(DMNetworkGetLocalVecOffset(networkdm, e, ALL_COMPONENTS, &varoffset)); 244 PetscCall(DMNetworkGetComponent(networkdm, e, 0, &type, (void **)&pipe, NULL)); 245 246 /* set initial values for this pipe */ 247 PetscCall(PipeComputeSteadyState(pipe, wash->Q0, wash->H0)); 248 PetscCall(VecGetSize(pipe->x, &nx)); 249 250 PetscCall(VecGetArrayRead(pipe->x, &xarray)); 251 /* copy pipe->x to xarray */ 252 for (k = 0; k < nx; k++) (xarr + varoffset)[k] = xarray[k]; 253 254 /* set boundary values into vfrom and vto */ 255 PetscCall(DMNetworkGetConnectedVertices(networkdm, e, &cone)); 256 vfrom = cone[0]; /* local ordering */ 257 vto = cone[1]; 258 PetscCall(DMNetworkGetLocalVecOffset(networkdm, vfrom, ALL_COMPONENTS, &offsetfrom)); 259 PetscCall(DMNetworkGetLocalVecOffset(networkdm, vto, ALL_COMPONENTS, &offsetto)); 260 261 /* if vform is a head vertex: */ 262 PetscCall(DMNetworkGetComponent(networkdm, vfrom, 0, &vkey, (void **)&junction, NULL)); 263 if (junction->type == RESERVOIR) { (xarr + offsetfrom)[1] = wash->H0; /* 1st H */ } 264 265 /* if vto is an end vertex: */ 266 PetscCall(DMNetworkGetComponent(networkdm, vto, 0, &vkey, (void **)&junction, NULL)); 267 if (junction->type == VALVE) { (xarr + offsetto)[0] = wash->QL; /* last Q */ } 268 PetscCall(VecRestoreArrayRead(pipe->x, &xarray)); 269 } 270 271 PetscCall(VecRestoreArray(localX, &xarr)); 272 PetscCall(DMLocalToGlobalBegin(networkdm, localX, ADD_VALUES, X)); 273 PetscCall(DMLocalToGlobalEnd(networkdm, localX, ADD_VALUES, X)); 274 PetscCall(DMRestoreLocalVector(networkdm, &localX)); 275 276 #if 0 277 PetscInt N; 278 PetscCall(VecGetSize(X,&N)); 279 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"initial solution %d:\n",N)); 280 PetscCall(VecView(X,PETSC_VIEWER_STDOUT_WORLD)); 281 #endif 282 PetscFunctionReturn(0); 283 } 284 285 PetscErrorCode TSDMNetworkMonitor(TS ts, PetscInt step, PetscReal t, Vec x, void *context) { 286 DMNetworkMonitor monitor; 287 288 PetscFunctionBegin; 289 monitor = (DMNetworkMonitor)context; 290 PetscCall(DMNetworkMonitorView(monitor, x)); 291 PetscFunctionReturn(0); 292 } 293 294 PetscErrorCode PipesView(DM networkdm, PetscInt KeyPipe, Vec X) { 295 PetscInt i, numkeys = 1, *blocksize, *numselectedvariable, **selectedvariables, n; 296 IS isfrom_q, isfrom_h, isfrom; 297 Vec Xto; 298 VecScatter ctx; 299 MPI_Comm comm; 300 301 PetscFunctionBegin; 302 PetscCall(PetscObjectGetComm((PetscObject)networkdm, &comm)); 303 304 /* 1. Create isfrom_q for q-variable of pipes */ 305 PetscCall(PetscMalloc3(numkeys, &blocksize, numkeys, &numselectedvariable, numkeys, &selectedvariables)); 306 for (i = 0; i < numkeys; i++) { 307 blocksize[i] = 2; 308 numselectedvariable[i] = 1; 309 PetscCall(PetscMalloc1(numselectedvariable[i], &selectedvariables[i])); 310 selectedvariables[i][0] = 0; /* q-variable */ 311 } 312 PetscCall(DMNetworkCreateIS(networkdm, numkeys, &KeyPipe, blocksize, numselectedvariable, selectedvariables, &isfrom_q)); 313 314 /* 2. Create Xto and isto */ 315 PetscCall(ISGetLocalSize(isfrom_q, &n)); 316 PetscCall(VecCreate(comm, &Xto)); 317 PetscCall(VecSetSizes(Xto, n, PETSC_DECIDE)); 318 PetscCall(VecSetFromOptions(Xto)); 319 PetscCall(VecSet(Xto, 0.0)); 320 321 /* 3. Create scatter */ 322 PetscCall(VecScatterCreate(X, isfrom_q, Xto, NULL, &ctx)); 323 324 /* 4. Scatter to Xq */ 325 PetscCall(VecScatterBegin(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD)); 326 PetscCall(VecScatterEnd(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD)); 327 PetscCall(VecScatterDestroy(&ctx)); 328 PetscCall(ISDestroy(&isfrom_q)); 329 330 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Xq:\n")); 331 PetscCall(VecView(Xto, PETSC_VIEWER_STDOUT_WORLD)); 332 333 /* 5. Create isfrom_h for h-variable of pipes; Create scatter; Scatter to Xh */ 334 for (i = 0; i < numkeys; i++) { selectedvariables[i][0] = 1; /* h-variable */ } 335 PetscCall(DMNetworkCreateIS(networkdm, numkeys, &KeyPipe, blocksize, numselectedvariable, selectedvariables, &isfrom_h)); 336 337 PetscCall(VecScatterCreate(X, isfrom_h, Xto, NULL, &ctx)); 338 PetscCall(VecScatterBegin(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD)); 339 PetscCall(VecScatterEnd(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD)); 340 PetscCall(VecScatterDestroy(&ctx)); 341 PetscCall(ISDestroy(&isfrom_h)); 342 343 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Xh:\n")); 344 PetscCall(VecView(Xto, PETSC_VIEWER_STDOUT_WORLD)); 345 PetscCall(VecDestroy(&Xto)); 346 347 /* 6. Create isfrom for all pipe variables; Create scatter; Scatter to Xpipes */ 348 for (i = 0; i < numkeys; i++) { blocksize[i] = -1; /* select all the variables of the i-th component */ } 349 PetscCall(DMNetworkCreateIS(networkdm, numkeys, &KeyPipe, blocksize, NULL, NULL, &isfrom)); 350 PetscCall(ISDestroy(&isfrom)); 351 PetscCall(DMNetworkCreateIS(networkdm, numkeys, &KeyPipe, NULL, NULL, NULL, &isfrom)); 352 353 PetscCall(ISGetLocalSize(isfrom, &n)); 354 PetscCall(VecCreate(comm, &Xto)); 355 PetscCall(VecSetSizes(Xto, n, PETSC_DECIDE)); 356 PetscCall(VecSetFromOptions(Xto)); 357 PetscCall(VecSet(Xto, 0.0)); 358 359 PetscCall(VecScatterCreate(X, isfrom, Xto, NULL, &ctx)); 360 PetscCall(VecScatterBegin(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD)); 361 PetscCall(VecScatterEnd(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD)); 362 PetscCall(VecScatterDestroy(&ctx)); 363 PetscCall(ISDestroy(&isfrom)); 364 365 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Xpipes:\n")); 366 PetscCall(VecView(Xto, PETSC_VIEWER_STDOUT_WORLD)); 367 368 /* 7. Free spaces */ 369 for (i = 0; i < numkeys; i++) PetscCall(PetscFree(selectedvariables[i])); 370 PetscCall(PetscFree3(blocksize, numselectedvariable, selectedvariables)); 371 PetscCall(VecDestroy(&Xto)); 372 PetscFunctionReturn(0); 373 } 374 375 PetscErrorCode ISJunctionsView(DM networkdm, PetscInt KeyJunc) { 376 PetscInt numkeys = 1; 377 IS isfrom; 378 MPI_Comm comm; 379 PetscMPIInt rank; 380 381 PetscFunctionBegin; 382 PetscCall(PetscObjectGetComm((PetscObject)networkdm, &comm)); 383 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 384 385 /* Create a global isfrom for all junction variables */ 386 PetscCall(DMNetworkCreateIS(networkdm, numkeys, &KeyJunc, NULL, NULL, NULL, &isfrom)); 387 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "ISJunctions:\n")); 388 PetscCall(ISView(isfrom, PETSC_VIEWER_STDOUT_WORLD)); 389 PetscCall(ISDestroy(&isfrom)); 390 391 /* Create a local isfrom for all junction variables */ 392 PetscCall(DMNetworkCreateLocalIS(networkdm, numkeys, &KeyJunc, NULL, NULL, NULL, &isfrom)); 393 if (rank == 0) { 394 PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] ISLocalJunctions:\n", rank)); 395 PetscCall(ISView(isfrom, PETSC_VIEWER_STDOUT_SELF)); 396 } 397 PetscCall(ISDestroy(&isfrom)); 398 PetscFunctionReturn(0); 399 } 400 401 PetscErrorCode WashNetworkCleanUp(Wash wash) { 402 PetscMPIInt rank; 403 404 PetscFunctionBegin; 405 PetscCallMPI(MPI_Comm_rank(wash->comm, &rank)); 406 PetscCall(PetscFree(wash->edgelist)); 407 PetscCall(PetscFree(wash->vtype)); 408 if (rank == 0) PetscCall(PetscFree2(wash->junction, wash->pipe)); 409 PetscFunctionReturn(0); 410 } 411 412 PetscErrorCode WashNetworkCreate(MPI_Comm comm, PetscInt pipesCase, Wash *wash_ptr) { 413 PetscInt npipes; 414 PetscMPIInt rank; 415 Wash wash = NULL; 416 PetscInt i, numVertices, numEdges, *vtype; 417 PetscInt *edgelist; 418 Junction junctions = NULL; 419 Pipe pipes = NULL; 420 PetscBool washdist = PETSC_TRUE; 421 422 PetscFunctionBegin; 423 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 424 425 PetscCall(PetscCalloc1(1, &wash)); 426 wash->comm = comm; 427 *wash_ptr = wash; 428 wash->Q0 = 0.477432; /* RESERVOIR */ 429 wash->H0 = 150.0; 430 wash->HL = 143.488; /* VALVE */ 431 wash->QL = 0.0; 432 wash->nnodes_loc = 0; 433 434 numVertices = 0; 435 numEdges = 0; 436 edgelist = NULL; 437 438 /* proc[0] creates a sequential wash and edgelist */ 439 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Setup pipesCase %" PetscInt_FMT "\n", pipesCase)); 440 441 /* Set global number of pipes, edges, and junctions */ 442 /*-------------------------------------------------*/ 443 switch (pipesCase) { 444 case 0: 445 /* pipeCase 0: */ 446 /* ================================================= 447 (RESERVOIR) v0 --E0--> v1--E1--> v2 --E2-->v3 (VALVE) 448 ==================================================== */ 449 npipes = 3; 450 PetscCall(PetscOptionsGetInt(NULL, NULL, "-npipes", &npipes, NULL)); 451 wash->nedge = npipes; 452 wash->nvertex = npipes + 1; 453 454 /* Set local edges and vertices -- proc[0] sets entire network, then distributes */ 455 numVertices = 0; 456 numEdges = 0; 457 edgelist = NULL; 458 if (rank == 0) { 459 numVertices = wash->nvertex; 460 numEdges = wash->nedge; 461 462 PetscCall(PetscCalloc1(2 * numEdges, &edgelist)); 463 for (i = 0; i < numEdges; i++) { 464 edgelist[2 * i] = i; 465 edgelist[2 * i + 1] = i + 1; 466 } 467 468 /* Add network components */ 469 /*------------------------*/ 470 PetscCall(PetscCalloc2(numVertices, &junctions, numEdges, &pipes)); 471 472 /* vertex */ 473 for (i = 0; i < numVertices; i++) { 474 junctions[i].id = i; 475 junctions[i].type = JUNCTION; 476 } 477 478 junctions[0].type = RESERVOIR; 479 junctions[numVertices - 1].type = VALVE; 480 } 481 break; 482 case 1: 483 /* pipeCase 1: */ 484 /* ========================== 485 v2 (VALVE) 486 ^ 487 | 488 E2 489 | 490 v0 --E0--> v3--E1--> v1 491 (RESERVOIR) (RESERVOIR) 492 ============================= */ 493 npipes = 3; 494 wash->nedge = npipes; 495 wash->nvertex = npipes + 1; 496 497 /* Set local edges and vertices -- proc[0] sets entire network, then distributes */ 498 if (rank == 0) { 499 numVertices = wash->nvertex; 500 numEdges = wash->nedge; 501 502 PetscCall(PetscCalloc1(2 * numEdges, &edgelist)); 503 edgelist[0] = 0; 504 edgelist[1] = 3; /* edge[0] */ 505 edgelist[2] = 3; 506 edgelist[3] = 1; /* edge[1] */ 507 edgelist[4] = 3; 508 edgelist[5] = 2; /* edge[2] */ 509 510 /* Add network components */ 511 /*------------------------*/ 512 PetscCall(PetscCalloc2(numVertices, &junctions, numEdges, &pipes)); 513 /* vertex */ 514 for (i = 0; i < numVertices; i++) { 515 junctions[i].id = i; 516 junctions[i].type = JUNCTION; 517 } 518 519 junctions[0].type = RESERVOIR; 520 junctions[1].type = VALVE; 521 junctions[2].type = VALVE; 522 } 523 break; 524 case 2: 525 /* pipeCase 2: */ 526 /* ========================== 527 (RESERVOIR) v2--> E2 528 | 529 v0 --E0--> v3--E1--> v1 530 (RESERVOIR) (VALVE) 531 ============================= */ 532 533 /* Set application parameters -- to be used in function evalutions */ 534 npipes = 3; 535 wash->nedge = npipes; 536 wash->nvertex = npipes + 1; 537 538 /* Set local edges and vertices -- proc[0] sets entire network, then distributes */ 539 if (rank == 0) { 540 numVertices = wash->nvertex; 541 numEdges = wash->nedge; 542 543 PetscCall(PetscCalloc1(2 * numEdges, &edgelist)); 544 edgelist[0] = 0; 545 edgelist[1] = 3; /* edge[0] */ 546 edgelist[2] = 3; 547 edgelist[3] = 1; /* edge[1] */ 548 edgelist[4] = 2; 549 edgelist[5] = 3; /* edge[2] */ 550 551 /* Add network components */ 552 /*------------------------*/ 553 PetscCall(PetscCalloc2(numVertices, &junctions, numEdges, &pipes)); 554 /* vertex */ 555 for (i = 0; i < numVertices; i++) { 556 junctions[i].id = i; 557 junctions[i].type = JUNCTION; 558 } 559 560 junctions[0].type = RESERVOIR; 561 junctions[1].type = VALVE; 562 junctions[2].type = RESERVOIR; 563 } 564 break; 565 default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "not done yet"); 566 } 567 568 /* set edge global id */ 569 for (i = 0; i < numEdges; i++) pipes[i].id = i; 570 571 if (rank == 0) { /* set vtype for proc[0] */ 572 PetscInt v; 573 PetscCall(PetscMalloc1(2 * numEdges, &vtype)); 574 for (i = 0; i < 2 * numEdges; i++) { 575 v = edgelist[i]; 576 vtype[i] = junctions[v].type; 577 } 578 wash->vtype = vtype; 579 } 580 581 *wash_ptr = wash; 582 wash->nedge = numEdges; 583 wash->nvertex = numVertices; 584 wash->edgelist = edgelist; 585 wash->junction = junctions; 586 wash->pipe = pipes; 587 588 /* Distribute edgelist to other processors */ 589 PetscCall(PetscOptionsGetBool(NULL, NULL, "-wash_distribute", &washdist, NULL)); 590 if (washdist) { 591 /* 592 PetscCall(PetscPrintf(PETSC_COMM_WORLD," Distribute sequential wash ...\n")); 593 */ 594 PetscCall(WashNetworkDistribute(comm, wash)); 595 } 596 PetscFunctionReturn(0); 597 } 598 599 /* ------------------------------------------------------- */ 600 int main(int argc, char **argv) { 601 Wash wash; 602 Junction junctions, junction; 603 Pipe pipe, pipes; 604 PetscInt KeyPipe, KeyJunction, *edgelist = NULL, *vtype = NULL; 605 PetscInt i, e, v, eStart, eEnd, vStart, vEnd, key, vkey, type; 606 PetscInt steps = 1, nedges, nnodes = 6; 607 const PetscInt *cone; 608 DM networkdm; 609 PetscMPIInt size, rank; 610 PetscReal ftime; 611 Vec X; 612 TS ts; 613 TSConvergedReason reason; 614 PetscBool viewpipes, viewjuncs, monipipes = PETSC_FALSE, userJac = PETSC_TRUE, viewdm = PETSC_FALSE, viewX = PETSC_FALSE; 615 PetscInt pipesCase = 0; 616 DMNetworkMonitor monitor; 617 MPI_Comm comm; 618 619 PetscFunctionBeginUser; 620 PetscCall(PetscInitialize(&argc, &argv, "pOption", help)); 621 622 /* Read runtime options */ 623 PetscCall(PetscOptionsGetInt(NULL, NULL, "-case", &pipesCase, NULL)); 624 PetscCall(PetscOptionsGetBool(NULL, NULL, "-user_Jac", &userJac, NULL)); 625 PetscCall(PetscOptionsGetBool(NULL, NULL, "-pipe_monitor", &monipipes, NULL)); 626 PetscCall(PetscOptionsGetBool(NULL, NULL, "-viewdm", &viewdm, NULL)); 627 PetscCall(PetscOptionsGetBool(NULL, NULL, "-viewX", &viewX, NULL)); 628 PetscCall(PetscOptionsGetInt(NULL, NULL, "-npipenodes", &nnodes, NULL)); 629 630 /* Create networkdm */ 631 /*------------------*/ 632 PetscCall(DMNetworkCreate(PETSC_COMM_WORLD, &networkdm)); 633 PetscCall(PetscObjectGetComm((PetscObject)networkdm, &comm)); 634 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 635 PetscCallMPI(MPI_Comm_size(comm, &size)); 636 637 if (size == 1 && monipipes) PetscCall(DMNetworkMonitorCreate(networkdm, &monitor)); 638 639 /* Register the components in the network */ 640 PetscCall(DMNetworkRegisterComponent(networkdm, "junctionstruct", sizeof(struct _p_Junction), &KeyJunction)); 641 PetscCall(DMNetworkRegisterComponent(networkdm, "pipestruct", sizeof(struct _p_Pipe), &KeyPipe)); 642 643 /* Create a distributed wash network (user-specific) */ 644 PetscCall(WashNetworkCreate(comm, pipesCase, &wash)); 645 nedges = wash->nedge; 646 edgelist = wash->edgelist; 647 vtype = wash->vtype; 648 junctions = wash->junction; 649 pipes = wash->pipe; 650 651 /* Set up the network layout */ 652 PetscCall(DMNetworkSetNumSubNetworks(networkdm, PETSC_DECIDE, 1)); 653 PetscCall(DMNetworkAddSubnetwork(networkdm, NULL, nedges, edgelist, NULL)); 654 655 PetscCall(DMNetworkLayoutSetUp(networkdm)); 656 657 PetscCall(DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd)); 658 PetscCall(DMNetworkGetVertexRange(networkdm, &vStart, &vEnd)); 659 /* PetscCall(PetscPrintf(PETSC_COMM_SELF,"[%d] eStart/End: %d - %d; vStart/End: %d - %d\n",rank,eStart,eEnd,vStart,vEnd)); */ 660 661 if (rank) { /* junctions[] and pipes[] for proc[0] are allocated in WashNetworkCreate() */ 662 /* vEnd - vStart = nvertices + number of ghost vertices! */ 663 PetscCall(PetscCalloc2(vEnd - vStart, &junctions, nedges, &pipes)); 664 } 665 666 /* Add Pipe component and number of variables to all local edges */ 667 for (e = eStart; e < eEnd; e++) { 668 pipes[e - eStart].nnodes = nnodes; 669 PetscCall(DMNetworkAddComponent(networkdm, e, KeyPipe, &pipes[e - eStart], 2 * pipes[e - eStart].nnodes)); 670 671 if (size == 1 && monipipes) { /* Add monitor -- show Q_{pipes[e-eStart].id}? */ 672 pipes[e - eStart].length = 600.0; 673 PetscCall(DMNetworkMonitorAdd(monitor, "Pipe Q", e, pipes[e - eStart].nnodes, 0, 2, 0.0, pipes[e - eStart].length, -0.8, 0.8, PETSC_TRUE)); 674 PetscCall(DMNetworkMonitorAdd(monitor, "Pipe H", e, pipes[e - eStart].nnodes, 1, 2, 0.0, pipes[e - eStart].length, -400.0, 800.0, PETSC_TRUE)); 675 } 676 } 677 678 /* Add Junction component and number of variables to all local vertices */ 679 for (v = vStart; v < vEnd; v++) PetscCall(DMNetworkAddComponent(networkdm, v, KeyJunction, &junctions[v - vStart], 2)); 680 681 if (size > 1) { /* must be called before DMSetUp()???. Other partitioners do not work yet??? -- cause crash in proc[0]! */ 682 DM plexdm; 683 PetscPartitioner part; 684 PetscCall(DMNetworkGetPlex(networkdm, &plexdm)); 685 PetscCall(DMPlexGetPartitioner(plexdm, &part)); 686 PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE)); 687 PetscCall(PetscOptionsSetValue(NULL, "-dm_plex_csr_alg", "mat")); /* for parmetis */ 688 } 689 690 /* Set up DM for use */ 691 PetscCall(DMSetUp(networkdm)); 692 if (viewdm) { 693 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nOriginal networkdm, DMView:\n")); 694 PetscCall(DMView(networkdm, PETSC_VIEWER_STDOUT_WORLD)); 695 } 696 697 /* Set user physical parameters to the components */ 698 for (e = eStart; e < eEnd; e++) { 699 PetscCall(DMNetworkGetConnectedVertices(networkdm, e, &cone)); 700 /* vfrom */ 701 PetscCall(DMNetworkGetComponent(networkdm, cone[0], 0, &vkey, (void **)&junction, NULL)); 702 junction->type = (VertexType)vtype[2 * e]; 703 704 /* vto */ 705 PetscCall(DMNetworkGetComponent(networkdm, cone[1], 0, &vkey, (void **)&junction, NULL)); 706 junction->type = (VertexType)vtype[2 * e + 1]; 707 } 708 709 PetscCall(WashNetworkCleanUp(wash)); 710 711 /* Network partitioning and distribution of data */ 712 PetscCall(DMNetworkDistribute(&networkdm, 0)); 713 if (viewdm) { 714 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nAfter DMNetworkDistribute, DMView:\n")); 715 PetscCall(DMView(networkdm, PETSC_VIEWER_STDOUT_WORLD)); 716 } 717 718 /* create vectors */ 719 PetscCall(DMCreateGlobalVector(networkdm, &X)); 720 PetscCall(DMCreateLocalVector(networkdm, &wash->localX)); 721 PetscCall(DMCreateLocalVector(networkdm, &wash->localXdot)); 722 723 /* PipeSetUp -- each process only sets its own pipes */ 724 /*---------------------------------------------------*/ 725 PetscCall(DMNetworkGetVertexRange(networkdm, &vStart, &vEnd)); 726 727 userJac = PETSC_TRUE; 728 PetscCall(DMNetworkHasJacobian(networkdm, userJac, userJac)); 729 PetscCall(DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd)); 730 for (e = eStart; e < eEnd; e++) { /* each edge has only one component, pipe */ 731 PetscCall(DMNetworkGetComponent(networkdm, e, 0, &type, (void **)&pipe, NULL)); 732 733 wash->nnodes_loc += pipe->nnodes; /* local total number of nodes, will be used by PipesView() */ 734 PetscCall(PipeSetParameters(pipe, 600.0, /* length */ 735 0.5, /* diameter */ 736 1200.0, /* a */ 737 0.018)); /* friction */ 738 PetscCall(PipeSetUp(pipe)); 739 740 if (userJac) { 741 /* Create Jacobian matrix structures for a Pipe */ 742 Mat *J; 743 PetscCall(PipeCreateJacobian(pipe, NULL, &J)); 744 PetscCall(DMNetworkEdgeSetMatrix(networkdm, e, J)); 745 } 746 } 747 748 if (userJac) { 749 PetscCall(DMNetworkGetVertexRange(networkdm, &vStart, &vEnd)); 750 for (v = vStart; v < vEnd; v++) { 751 Mat *J; 752 PetscCall(JunctionCreateJacobian(networkdm, v, NULL, &J)); 753 PetscCall(DMNetworkVertexSetMatrix(networkdm, v, J)); 754 755 PetscCall(DMNetworkGetComponent(networkdm, v, 0, &vkey, (void **)&junction, NULL)); 756 junction->jacobian = J; 757 } 758 } 759 760 /* Setup solver */ 761 /*--------------------------------------------------------*/ 762 PetscCall(TSCreate(PETSC_COMM_WORLD, &ts)); 763 764 PetscCall(TSSetDM(ts, (DM)networkdm)); 765 PetscCall(TSSetIFunction(ts, NULL, WASHIFunction, wash)); 766 767 PetscCall(TSSetMaxSteps(ts, steps)); 768 PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER)); 769 PetscCall(TSSetTimeStep(ts, 0.1)); 770 PetscCall(TSSetType(ts, TSBEULER)); 771 if (size == 1 && monipipes) PetscCall(TSMonitorSet(ts, TSDMNetworkMonitor, monitor, NULL)); 772 PetscCall(TSSetFromOptions(ts)); 773 774 PetscCall(WASHSetInitialSolution(networkdm, X, wash)); 775 776 PetscCall(TSSolve(ts, X)); 777 778 PetscCall(TSGetSolveTime(ts, &ftime)); 779 PetscCall(TSGetStepNumber(ts, &steps)); 780 PetscCall(TSGetConvergedReason(ts, &reason)); 781 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%s at time %g after %" PetscInt_FMT " steps\n", TSConvergedReasons[reason], (double)ftime, steps)); 782 if (viewX) PetscCall(VecView(X, PETSC_VIEWER_STDOUT_WORLD)); 783 784 viewpipes = PETSC_FALSE; 785 PetscCall(PetscOptionsGetBool(NULL, NULL, "-Jac_view", &viewpipes, NULL)); 786 if (viewpipes) { 787 SNES snes; 788 Mat Jac; 789 PetscCall(TSGetSNES(ts, &snes)); 790 PetscCall(SNESGetJacobian(snes, &Jac, NULL, NULL, NULL)); 791 PetscCall(MatView(Jac, PETSC_VIEWER_DRAW_WORLD)); 792 } 793 794 /* View solutions */ 795 /* -------------- */ 796 viewpipes = PETSC_FALSE; 797 PetscCall(PetscOptionsGetBool(NULL, NULL, "-pipe_view", &viewpipes, NULL)); 798 if (viewpipes) PetscCall(PipesView(networkdm, KeyPipe, X)); 799 800 /* Test IS */ 801 viewjuncs = PETSC_FALSE; 802 PetscCall(PetscOptionsGetBool(NULL, NULL, "-isjunc_view", &viewjuncs, NULL)); 803 if (viewjuncs) PetscCall(ISJunctionsView(networkdm, KeyJunction)); 804 805 /* Free spaces */ 806 /* ----------- */ 807 PetscCall(TSDestroy(&ts)); 808 PetscCall(VecDestroy(&X)); 809 PetscCall(VecDestroy(&wash->localX)); 810 PetscCall(VecDestroy(&wash->localXdot)); 811 812 /* Destroy objects from each pipe that are created in PipeSetUp() */ 813 PetscCall(DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd)); 814 for (i = eStart; i < eEnd; i++) { 815 PetscCall(DMNetworkGetComponent(networkdm, i, 0, &key, (void **)&pipe, NULL)); 816 PetscCall(PipeDestroy(&pipe)); 817 } 818 if (userJac) { 819 for (v = vStart; v < vEnd; v++) { 820 PetscCall(DMNetworkGetComponent(networkdm, v, 0, &vkey, (void **)&junction, NULL)); 821 PetscCall(JunctionDestroyJacobian(networkdm, v, junction)); 822 } 823 } 824 825 if (size == 1 && monipipes) PetscCall(DMNetworkMonitorDestroy(&monitor)); 826 PetscCall(DMDestroy(&networkdm)); 827 PetscCall(PetscFree(wash)); 828 829 if (rank) PetscCall(PetscFree2(junctions, pipes)); 830 PetscCall(PetscFinalize()); 831 return 0; 832 } 833 834 /*TEST 835 836 build: 837 depends: pipeInterface.c pipeImpls.c 838 requires: mumps 839 840 test: 841 args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -options_left no -viewX 842 localrunfiles: pOption 843 output_file: output/pipes_1.out 844 845 test: 846 suffix: 2 847 nsize: 2 848 args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX 849 localrunfiles: pOption 850 output_file: output/pipes_2.out 851 852 test: 853 suffix: 3 854 nsize: 2 855 args: -ts_monitor -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX 856 localrunfiles: pOption 857 output_file: output/pipes_3.out 858 859 test: 860 suffix: 4 861 args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -options_left no -viewX 862 localrunfiles: pOption 863 output_file: output/pipes_4.out 864 865 test: 866 suffix: 5 867 nsize: 3 868 args: -ts_monitor -case 2 -ts_max_steps 10 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX 869 localrunfiles: pOption 870 output_file: output/pipes_5.out 871 872 test: 873 suffix: 6 874 nsize: 2 875 args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -viewX 876 localrunfiles: pOption 877 output_file: output/pipes_6.out 878 879 test: 880 suffix: 7 881 nsize: 2 882 args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -viewX 883 localrunfiles: pOption 884 output_file: output/pipes_7.out 885 886 test: 887 suffix: 8 888 nsize: 2 889 requires: parmetis 890 args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type parmetis -options_left no -wash_distribute 1 891 localrunfiles: pOption 892 output_file: output/pipes_8.out 893 894 test: 895 suffix: 9 896 nsize: 2 897 args: -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -pipe_view 898 localrunfiles: pOption 899 output_file: output/pipes_9.out 900 901 test: 902 suffix: 10 903 nsize: 2 904 args: -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -isjunc_view 905 localrunfiles: pOption 906 output_file: output/pipes_10.out 907 908 TEST*/ 909