1 static char help[] = "This example demonstrates DMNetwork. It is used for testing parallel generation of dmnetwork, then redistribute. \n\\n"; 2 /* 3 Example: mpiexec -n <np> ./pipes -ts_max_steps 10 4 */ 5 6 #include "wash.h" 7 8 /* 9 WashNetworkDistribute - proc[0] distributes sequential wash object 10 Input Parameters: 11 . comm - MPI communicator 12 . wash - wash context with all network data in proc[0] 13 14 Output Parameter: 15 . wash - wash context with nedge, nvertex and edgelist distributed 16 17 Note: The routine is used for testing parallel generation of dmnetwork, then redistribute. 18 */ 19 PetscErrorCode WashNetworkDistribute(MPI_Comm comm, Wash wash) 20 { 21 PetscMPIInt rank, size, tag = 0; 22 PetscInt i, e, v, numEdges, numVertices, nedges, *eowners = NULL, estart, eend, *vtype = NULL, nvertices; 23 PetscInt *edgelist = wash->edgelist, *nvtx = NULL, *vtxDone = NULL; 24 25 PetscFunctionBegin; 26 PetscCallMPI(MPI_Comm_size(comm, &size)); 27 if (size == 1) PetscFunctionReturn(PETSC_SUCCESS); 28 29 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 30 numEdges = wash->nedge; 31 numVertices = wash->nvertex; 32 33 /* (1) all processes get global and local number of edges */ 34 PetscCallMPI(MPI_Bcast(&numEdges, 1, MPIU_INT, 0, comm)); 35 nedges = numEdges / size; /* local nedges */ 36 if (rank == 0) nedges += numEdges - size * (numEdges / size); 37 wash->Nedge = numEdges; 38 wash->nedge = nedges; 39 /* PetscCall(PetscPrintf(PETSC_COMM_SELF,"[%d] nedges %d, numEdges %d\n",rank,nedges,numEdges)); */ 40 41 PetscCall(PetscCalloc3(size + 1, &eowners, size, &nvtx, numVertices, &vtxDone)); 42 PetscCallMPI(MPI_Allgather(&nedges, 1, MPIU_INT, eowners + 1, 1, MPIU_INT, PETSC_COMM_WORLD)); 43 eowners[0] = 0; 44 for (i = 2; i <= size; i++) eowners[i] += eowners[i - 1]; 45 46 estart = eowners[rank]; 47 eend = eowners[rank + 1]; 48 /* PetscCall(PetscPrintf(PETSC_COMM_SELF,"[%d] own lists row %d - %d\n",rank,estart,eend)); */ 49 50 /* (2) distribute row block edgelist to all processors */ 51 if (rank == 0) { 52 vtype = wash->vtype; 53 for (i = 1; i < size; i++) { 54 /* proc[0] sends edgelist to proc[i] */ 55 PetscCallMPI(MPI_Send(edgelist + 2 * eowners[i], 2 * (eowners[i + 1] - eowners[i]), MPIU_INT, i, tag, comm)); 56 57 /* proc[0] sends vtype to proc[i] */ 58 PetscCallMPI(MPI_Send(vtype + 2 * eowners[i], 2 * (eowners[i + 1] - eowners[i]), MPIU_INT, i, tag, comm)); 59 } 60 } else { 61 MPI_Status status; 62 PetscCall(PetscMalloc1(2 * (eend - estart), &vtype)); 63 PetscCall(PetscMalloc1(2 * (eend - estart), &edgelist)); 64 65 PetscCallMPI(MPI_Recv(edgelist, 2 * (eend - estart), MPIU_INT, 0, tag, comm, &status)); 66 PetscCallMPI(MPI_Recv(vtype, 2 * (eend - estart), MPIU_INT, 0, tag, comm, &status)); 67 } 68 69 wash->edgelist = edgelist; 70 71 /* (3) all processes get global and local number of vertices, without ghost vertices */ 72 if (rank == 0) { 73 for (i = 0; i < size; i++) { 74 for (e = eowners[i]; e < eowners[i + 1]; e++) { 75 v = edgelist[2 * e]; 76 if (!vtxDone[v]) { 77 nvtx[i]++; 78 vtxDone[v] = 1; 79 } 80 v = edgelist[2 * e + 1]; 81 if (!vtxDone[v]) { 82 nvtx[i]++; 83 vtxDone[v] = 1; 84 } 85 } 86 } 87 } 88 PetscCallMPI(MPI_Bcast(&numVertices, 1, MPIU_INT, 0, PETSC_COMM_WORLD)); 89 PetscCallMPI(MPI_Scatter(nvtx, 1, MPIU_INT, &nvertices, 1, MPIU_INT, 0, PETSC_COMM_WORLD)); 90 PetscCall(PetscFree3(eowners, nvtx, vtxDone)); 91 92 wash->Nvertex = numVertices; 93 wash->nvertex = nvertices; 94 wash->vtype = vtype; 95 PetscFunctionReturn(PETSC_SUCCESS); 96 } 97 98 PetscErrorCode WASHIFunction(TS ts, PetscReal t, Vec X, Vec Xdot, Vec F, void *ctx) 99 { 100 Wash wash = (Wash)ctx; 101 DM networkdm; 102 Vec localX, localXdot, localF, localXold; 103 const PetscInt *cone; 104 PetscInt vfrom, vto, offsetfrom, offsetto, varoffset; 105 PetscInt v, vStart, vEnd, e, eStart, eEnd; 106 PetscInt nend, type; 107 PetscBool ghost; 108 PetscScalar *farr, *juncf, *pipef; 109 PetscReal dt; 110 Pipe pipe; 111 PipeField *pipex, *pipexdot, *juncx; 112 Junction junction; 113 DMDALocalInfo info; 114 const PetscScalar *xarr, *xdotarr, *xoldarr; 115 116 PetscFunctionBegin; 117 localX = wash->localX; 118 localXdot = wash->localXdot; 119 120 PetscCall(TSGetSolution(ts, &localXold)); 121 PetscCall(TSGetDM(ts, &networkdm)); 122 PetscCall(TSGetTimeStep(ts, &dt)); 123 PetscCall(DMGetLocalVector(networkdm, &localF)); 124 125 /* Set F and localF as zero */ 126 PetscCall(VecSet(F, 0.0)); 127 PetscCall(VecSet(localF, 0.0)); 128 129 /* Update ghost values of locaX and locaXdot */ 130 PetscCall(DMGlobalToLocalBegin(networkdm, X, INSERT_VALUES, localX)); 131 PetscCall(DMGlobalToLocalEnd(networkdm, X, INSERT_VALUES, localX)); 132 133 PetscCall(DMGlobalToLocalBegin(networkdm, Xdot, INSERT_VALUES, localXdot)); 134 PetscCall(DMGlobalToLocalEnd(networkdm, Xdot, INSERT_VALUES, localXdot)); 135 136 PetscCall(VecGetArrayRead(localX, &xarr)); 137 PetscCall(VecGetArrayRead(localXdot, &xdotarr)); 138 PetscCall(VecGetArrayRead(localXold, &xoldarr)); 139 PetscCall(VecGetArray(localF, &farr)); 140 141 /* junction->type == JUNCTION: 142 juncf[0] = -qJ + sum(qin); juncf[1] = qJ - sum(qout) 143 junction->type == RESERVOIR (upper stream): 144 juncf[0] = -hJ + H0; juncf[1] = qJ - sum(qout) 145 junction->type == VALVE (down stream): 146 juncf[0] = -qJ + sum(qin); juncf[1] = qJ 147 */ 148 /* Vertex/junction initialization */ 149 PetscCall(DMNetworkGetVertexRange(networkdm, &vStart, &vEnd)); 150 for (v = vStart; v < vEnd; v++) { 151 PetscCall(DMNetworkIsGhostVertex(networkdm, v, &ghost)); 152 if (ghost) continue; 153 154 PetscCall(DMNetworkGetComponent(networkdm, v, 0, &type, (void **)&junction, NULL)); 155 PetscCall(DMNetworkGetLocalVecOffset(networkdm, v, ALL_COMPONENTS, &varoffset)); 156 juncx = (PipeField *)(xarr + varoffset); 157 juncf = (PetscScalar *)(farr + varoffset); 158 159 juncf[0] = -juncx[0].q; 160 juncf[1] = juncx[0].q; 161 162 if (junction->type == RESERVOIR) { /* upstream reservoir */ 163 juncf[0] = juncx[0].h - wash->H0; 164 } 165 } 166 167 /* Edge/pipe */ 168 PetscCall(DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd)); 169 for (e = eStart; e < eEnd; e++) { 170 PetscCall(DMNetworkGetComponent(networkdm, e, 0, &type, (void **)&pipe, NULL)); 171 PetscCall(DMNetworkGetLocalVecOffset(networkdm, e, ALL_COMPONENTS, &varoffset)); 172 pipex = (PipeField *)(xarr + varoffset); 173 pipexdot = (PipeField *)(xdotarr + varoffset); 174 pipef = (PetscScalar *)(farr + varoffset); 175 176 /* Get some data into the pipe structure: note, some of these operations 177 * might be redundant. Will it consume too much time? */ 178 pipe->dt = dt; 179 pipe->xold = (PipeField *)(xoldarr + varoffset); 180 181 /* Evaluate F over this edge/pipe: pipef[1], ...,pipef[2*nend] */ 182 PetscCall(DMDAGetLocalInfo(pipe->da, &info)); 183 PetscCall(PipeIFunctionLocal_Lax(&info, t, pipex, pipexdot, pipef, pipe)); 184 185 /* Get boundary values from connected vertices */ 186 PetscCall(DMNetworkGetConnectedVertices(networkdm, e, &cone)); 187 vfrom = cone[0]; /* local ordering */ 188 vto = cone[1]; 189 PetscCall(DMNetworkGetLocalVecOffset(networkdm, vfrom, ALL_COMPONENTS, &offsetfrom)); 190 PetscCall(DMNetworkGetLocalVecOffset(networkdm, vto, ALL_COMPONENTS, &offsetto)); 191 192 /* Evaluate upstream boundary */ 193 PetscCall(DMNetworkGetComponent(networkdm, vfrom, 0, &type, (void **)&junction, NULL)); 194 PetscCheck(junction->type == JUNCTION || junction->type == RESERVOIR, PETSC_COMM_SELF, PETSC_ERR_SUP, "junction type is not supported"); 195 juncx = (PipeField *)(xarr + offsetfrom); 196 juncf = (PetscScalar *)(farr + offsetfrom); 197 198 pipef[0] = pipex[0].h - juncx[0].h; 199 juncf[1] -= pipex[0].q; 200 201 /* Evaluate downstream boundary */ 202 PetscCall(DMNetworkGetComponent(networkdm, vto, 0, &type, (void **)&junction, NULL)); 203 PetscCheck(junction->type == JUNCTION || junction->type == VALVE, PETSC_COMM_SELF, PETSC_ERR_SUP, "junction type is not supported"); 204 juncx = (PipeField *)(xarr + offsetto); 205 juncf = (PetscScalar *)(farr + offsetto); 206 nend = pipe->nnodes - 1; 207 208 pipef[2 * nend + 1] = pipex[nend].h - juncx[0].h; 209 juncf[0] += pipex[nend].q; 210 } 211 212 PetscCall(VecRestoreArrayRead(localX, &xarr)); 213 PetscCall(VecRestoreArrayRead(localXdot, &xdotarr)); 214 PetscCall(VecRestoreArray(localF, &farr)); 215 216 PetscCall(DMLocalToGlobalBegin(networkdm, localF, ADD_VALUES, F)); 217 PetscCall(DMLocalToGlobalEnd(networkdm, localF, ADD_VALUES, F)); 218 PetscCall(DMRestoreLocalVector(networkdm, &localF)); 219 /* 220 PetscCall(PetscPrintf(PETSC_COMM_WORLD("F:\n")); 221 PetscCall(VecView(F,PETSC_VIEWER_STDOUT_WORLD)); 222 */ 223 PetscFunctionReturn(PETSC_SUCCESS); 224 } 225 226 PetscErrorCode WASHSetInitialSolution(DM networkdm, Vec X, Wash wash) 227 { 228 PetscInt k, nx, vkey, vfrom, vto, offsetfrom, offsetto; 229 PetscInt type, varoffset; 230 PetscInt e, eStart, eEnd; 231 Vec localX; 232 PetscScalar *xarr; 233 Pipe pipe; 234 Junction junction; 235 const PetscInt *cone; 236 const PetscScalar *xarray; 237 238 PetscFunctionBegin; 239 PetscCall(VecSet(X, 0.0)); 240 PetscCall(DMGetLocalVector(networkdm, &localX)); 241 PetscCall(VecGetArray(localX, &xarr)); 242 243 /* Edge */ 244 PetscCall(DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd)); 245 for (e = eStart; e < eEnd; e++) { 246 PetscCall(DMNetworkGetLocalVecOffset(networkdm, e, ALL_COMPONENTS, &varoffset)); 247 PetscCall(DMNetworkGetComponent(networkdm, e, 0, &type, (void **)&pipe, NULL)); 248 249 /* set initial values for this pipe */ 250 PetscCall(PipeComputeSteadyState(pipe, wash->Q0, wash->H0)); 251 PetscCall(VecGetSize(pipe->x, &nx)); 252 253 PetscCall(VecGetArrayRead(pipe->x, &xarray)); 254 /* copy pipe->x to xarray */ 255 for (k = 0; k < nx; k++) (xarr + varoffset)[k] = xarray[k]; 256 257 /* set boundary values into vfrom and vto */ 258 PetscCall(DMNetworkGetConnectedVertices(networkdm, e, &cone)); 259 vfrom = cone[0]; /* local ordering */ 260 vto = cone[1]; 261 PetscCall(DMNetworkGetLocalVecOffset(networkdm, vfrom, ALL_COMPONENTS, &offsetfrom)); 262 PetscCall(DMNetworkGetLocalVecOffset(networkdm, vto, ALL_COMPONENTS, &offsetto)); 263 264 /* if vform is a head vertex: */ 265 PetscCall(DMNetworkGetComponent(networkdm, vfrom, 0, &vkey, (void **)&junction, NULL)); 266 if (junction->type == RESERVOIR) (xarr + offsetfrom)[1] = wash->H0; /* 1st H */ 267 268 /* if vto is an end vertex: */ 269 PetscCall(DMNetworkGetComponent(networkdm, vto, 0, &vkey, (void **)&junction, NULL)); 270 if (junction->type == VALVE) (xarr + offsetto)[0] = wash->QL; /* last Q */ 271 PetscCall(VecRestoreArrayRead(pipe->x, &xarray)); 272 } 273 274 PetscCall(VecRestoreArray(localX, &xarr)); 275 PetscCall(DMLocalToGlobalBegin(networkdm, localX, ADD_VALUES, X)); 276 PetscCall(DMLocalToGlobalEnd(networkdm, localX, ADD_VALUES, X)); 277 PetscCall(DMRestoreLocalVector(networkdm, &localX)); 278 279 #if 0 280 PetscInt N; 281 PetscCall(VecGetSize(X,&N)); 282 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"initial solution %d:\n",N)); 283 PetscCall(VecView(X,PETSC_VIEWER_STDOUT_WORLD)); 284 #endif 285 PetscFunctionReturn(PETSC_SUCCESS); 286 } 287 288 PetscErrorCode TSDMNetworkMonitor(TS ts, PetscInt step, PetscReal t, Vec x, void *context) 289 { 290 DMNetworkMonitor monitor; 291 292 PetscFunctionBegin; 293 monitor = (DMNetworkMonitor)context; 294 PetscCall(DMNetworkMonitorView(monitor, x)); 295 PetscFunctionReturn(PETSC_SUCCESS); 296 } 297 298 PetscErrorCode PipesView(DM networkdm, PetscInt KeyPipe, Vec X) 299 { 300 PetscInt i, numkeys = 1, *blocksize, *numselectedvariable, **selectedvariables, n; 301 IS isfrom_q, isfrom_h, isfrom; 302 Vec Xto; 303 VecScatter ctx; 304 MPI_Comm comm; 305 306 PetscFunctionBegin; 307 PetscCall(PetscObjectGetComm((PetscObject)networkdm, &comm)); 308 309 /* 1. Create isfrom_q for q-variable of pipes */ 310 PetscCall(PetscMalloc3(numkeys, &blocksize, numkeys, &numselectedvariable, numkeys, &selectedvariables)); 311 for (i = 0; i < numkeys; i++) { 312 blocksize[i] = 2; 313 numselectedvariable[i] = 1; 314 PetscCall(PetscMalloc1(numselectedvariable[i], &selectedvariables[i])); 315 selectedvariables[i][0] = 0; /* q-variable */ 316 } 317 PetscCall(DMNetworkCreateIS(networkdm, numkeys, &KeyPipe, blocksize, numselectedvariable, selectedvariables, &isfrom_q)); 318 319 /* 2. Create Xto and isto */ 320 PetscCall(ISGetLocalSize(isfrom_q, &n)); 321 PetscCall(VecCreate(comm, &Xto)); 322 PetscCall(VecSetSizes(Xto, n, PETSC_DECIDE)); 323 PetscCall(VecSetFromOptions(Xto)); 324 PetscCall(VecSet(Xto, 0.0)); 325 326 /* 3. Create scatter */ 327 PetscCall(VecScatterCreate(X, isfrom_q, Xto, NULL, &ctx)); 328 329 /* 4. Scatter to Xq */ 330 PetscCall(VecScatterBegin(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD)); 331 PetscCall(VecScatterEnd(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD)); 332 PetscCall(VecScatterDestroy(&ctx)); 333 PetscCall(ISDestroy(&isfrom_q)); 334 335 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Xq:\n")); 336 PetscCall(VecView(Xto, PETSC_VIEWER_STDOUT_WORLD)); 337 338 /* 5. Create isfrom_h for h-variable of pipes; Create scatter; Scatter to Xh */ 339 for (i = 0; i < numkeys; i++) selectedvariables[i][0] = 1; /* h-variable */ 340 PetscCall(DMNetworkCreateIS(networkdm, numkeys, &KeyPipe, blocksize, numselectedvariable, selectedvariables, &isfrom_h)); 341 342 PetscCall(VecScatterCreate(X, isfrom_h, Xto, NULL, &ctx)); 343 PetscCall(VecScatterBegin(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD)); 344 PetscCall(VecScatterEnd(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD)); 345 PetscCall(VecScatterDestroy(&ctx)); 346 PetscCall(ISDestroy(&isfrom_h)); 347 348 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Xh:\n")); 349 PetscCall(VecView(Xto, PETSC_VIEWER_STDOUT_WORLD)); 350 PetscCall(VecDestroy(&Xto)); 351 352 /* 6. Create isfrom for all pipe variables; Create scatter; Scatter to Xpipes */ 353 for (i = 0; i < numkeys; i++) blocksize[i] = -1; /* select all the variables of the i-th component */ 354 PetscCall(DMNetworkCreateIS(networkdm, numkeys, &KeyPipe, blocksize, NULL, NULL, &isfrom)); 355 PetscCall(ISDestroy(&isfrom)); 356 PetscCall(DMNetworkCreateIS(networkdm, numkeys, &KeyPipe, NULL, NULL, NULL, &isfrom)); 357 358 PetscCall(ISGetLocalSize(isfrom, &n)); 359 PetscCall(VecCreate(comm, &Xto)); 360 PetscCall(VecSetSizes(Xto, n, PETSC_DECIDE)); 361 PetscCall(VecSetFromOptions(Xto)); 362 PetscCall(VecSet(Xto, 0.0)); 363 364 PetscCall(VecScatterCreate(X, isfrom, Xto, NULL, &ctx)); 365 PetscCall(VecScatterBegin(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD)); 366 PetscCall(VecScatterEnd(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD)); 367 PetscCall(VecScatterDestroy(&ctx)); 368 PetscCall(ISDestroy(&isfrom)); 369 370 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Xpipes:\n")); 371 PetscCall(VecView(Xto, PETSC_VIEWER_STDOUT_WORLD)); 372 373 /* 7. Free spaces */ 374 for (i = 0; i < numkeys; i++) PetscCall(PetscFree(selectedvariables[i])); 375 PetscCall(PetscFree3(blocksize, numselectedvariable, selectedvariables)); 376 PetscCall(VecDestroy(&Xto)); 377 PetscFunctionReturn(PETSC_SUCCESS); 378 } 379 380 PetscErrorCode ISJunctionsView(DM networkdm, PetscInt KeyJunc) 381 { 382 PetscInt numkeys = 1; 383 IS isfrom; 384 MPI_Comm comm; 385 PetscMPIInt rank; 386 387 PetscFunctionBegin; 388 PetscCall(PetscObjectGetComm((PetscObject)networkdm, &comm)); 389 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 390 391 /* Create a global isfrom for all junction variables */ 392 PetscCall(DMNetworkCreateIS(networkdm, numkeys, &KeyJunc, NULL, NULL, NULL, &isfrom)); 393 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "ISJunctions:\n")); 394 PetscCall(ISView(isfrom, PETSC_VIEWER_STDOUT_WORLD)); 395 PetscCall(ISDestroy(&isfrom)); 396 397 /* Create a local isfrom for all junction variables */ 398 PetscCall(DMNetworkCreateLocalIS(networkdm, numkeys, &KeyJunc, NULL, NULL, NULL, &isfrom)); 399 if (rank == 0) { 400 PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] ISLocalJunctions:\n", rank)); 401 PetscCall(ISView(isfrom, PETSC_VIEWER_STDOUT_SELF)); 402 } 403 PetscCall(ISDestroy(&isfrom)); 404 PetscFunctionReturn(PETSC_SUCCESS); 405 } 406 407 PetscErrorCode WashNetworkCleanUp(Wash wash) 408 { 409 PetscMPIInt rank; 410 411 PetscFunctionBegin; 412 PetscCallMPI(MPI_Comm_rank(wash->comm, &rank)); 413 PetscCall(PetscFree(wash->edgelist)); 414 PetscCall(PetscFree(wash->vtype)); 415 if (rank == 0) PetscCall(PetscFree2(wash->junction, wash->pipe)); 416 PetscFunctionReturn(PETSC_SUCCESS); 417 } 418 419 PetscErrorCode WashNetworkCreate(MPI_Comm comm, PetscInt pipesCase, Wash *wash_ptr) 420 { 421 PetscInt npipes; 422 PetscMPIInt rank; 423 Wash wash = NULL; 424 PetscInt i, numVertices, numEdges, *vtype; 425 PetscInt *edgelist; 426 Junction junctions = NULL; 427 Pipe pipes = NULL; 428 PetscBool washdist = PETSC_TRUE; 429 430 PetscFunctionBegin; 431 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 432 433 PetscCall(PetscCalloc1(1, &wash)); 434 wash->comm = comm; 435 *wash_ptr = wash; 436 wash->Q0 = 0.477432; /* RESERVOIR */ 437 wash->H0 = 150.0; 438 wash->HL = 143.488; /* VALVE */ 439 wash->QL = 0.0; 440 wash->nnodes_loc = 0; 441 442 numVertices = 0; 443 numEdges = 0; 444 edgelist = NULL; 445 446 /* proc[0] creates a sequential wash and edgelist */ 447 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Setup pipesCase %" PetscInt_FMT "\n", pipesCase)); 448 449 /* Set global number of pipes, edges, and junctions */ 450 /*-------------------------------------------------*/ 451 switch (pipesCase) { 452 case 0: 453 /* pipeCase 0: */ 454 /* ================================================= 455 (RESERVOIR) v0 --E0--> v1--E1--> v2 --E2-->v3 (VALVE) 456 ==================================================== */ 457 npipes = 3; 458 PetscCall(PetscOptionsGetInt(NULL, NULL, "-npipes", &npipes, NULL)); 459 wash->nedge = npipes; 460 wash->nvertex = npipes + 1; 461 462 /* Set local edges and vertices -- proc[0] sets entire network, then distributes */ 463 numVertices = 0; 464 numEdges = 0; 465 edgelist = NULL; 466 if (rank == 0) { 467 numVertices = wash->nvertex; 468 numEdges = wash->nedge; 469 470 PetscCall(PetscCalloc1(2 * numEdges, &edgelist)); 471 for (i = 0; i < numEdges; i++) { 472 edgelist[2 * i] = i; 473 edgelist[2 * i + 1] = i + 1; 474 } 475 476 /* Add network components */ 477 /*------------------------*/ 478 PetscCall(PetscCalloc2(numVertices, &junctions, numEdges, &pipes)); 479 480 /* vertex */ 481 for (i = 0; i < numVertices; i++) { 482 junctions[i].id = i; 483 junctions[i].type = JUNCTION; 484 } 485 486 junctions[0].type = RESERVOIR; 487 junctions[numVertices - 1].type = VALVE; 488 } 489 break; 490 case 1: 491 /* pipeCase 1: */ 492 /* ========================== 493 v2 (VALVE) 494 ^ 495 | 496 E2 497 | 498 v0 --E0--> v3--E1--> v1 499 (RESERVOIR) (RESERVOIR) 500 ============================= */ 501 npipes = 3; 502 wash->nedge = npipes; 503 wash->nvertex = npipes + 1; 504 505 /* Set local edges and vertices -- proc[0] sets entire network, then distributes */ 506 if (rank == 0) { 507 numVertices = wash->nvertex; 508 numEdges = wash->nedge; 509 510 PetscCall(PetscCalloc1(2 * numEdges, &edgelist)); 511 edgelist[0] = 0; 512 edgelist[1] = 3; /* edge[0] */ 513 edgelist[2] = 3; 514 edgelist[3] = 1; /* edge[1] */ 515 edgelist[4] = 3; 516 edgelist[5] = 2; /* edge[2] */ 517 518 /* Add network components */ 519 /*------------------------*/ 520 PetscCall(PetscCalloc2(numVertices, &junctions, numEdges, &pipes)); 521 /* vertex */ 522 for (i = 0; i < numVertices; i++) { 523 junctions[i].id = i; 524 junctions[i].type = JUNCTION; 525 } 526 527 junctions[0].type = RESERVOIR; 528 junctions[1].type = VALVE; 529 junctions[2].type = VALVE; 530 } 531 break; 532 case 2: 533 /* pipeCase 2: */ 534 /* ========================== 535 (RESERVOIR) v2--> E2 536 | 537 v0 --E0--> v3--E1--> v1 538 (RESERVOIR) (VALVE) 539 ============================= */ 540 541 /* Set application parameters -- to be used in function evaluations */ 542 npipes = 3; 543 wash->nedge = npipes; 544 wash->nvertex = npipes + 1; 545 546 /* Set local edges and vertices -- proc[0] sets entire network, then distributes */ 547 if (rank == 0) { 548 numVertices = wash->nvertex; 549 numEdges = wash->nedge; 550 551 PetscCall(PetscCalloc1(2 * numEdges, &edgelist)); 552 edgelist[0] = 0; 553 edgelist[1] = 3; /* edge[0] */ 554 edgelist[2] = 3; 555 edgelist[3] = 1; /* edge[1] */ 556 edgelist[4] = 2; 557 edgelist[5] = 3; /* edge[2] */ 558 559 /* Add network components */ 560 /*------------------------*/ 561 PetscCall(PetscCalloc2(numVertices, &junctions, numEdges, &pipes)); 562 /* vertex */ 563 for (i = 0; i < numVertices; i++) { 564 junctions[i].id = i; 565 junctions[i].type = JUNCTION; 566 } 567 568 junctions[0].type = RESERVOIR; 569 junctions[1].type = VALVE; 570 junctions[2].type = RESERVOIR; 571 } 572 break; 573 default: 574 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "not done yet"); 575 } 576 577 /* set edge global id */ 578 for (i = 0; i < numEdges; i++) pipes[i].id = i; 579 580 if (rank == 0) { /* set vtype for proc[0] */ 581 PetscInt v; 582 PetscCall(PetscMalloc1(2 * numEdges, &vtype)); 583 for (i = 0; i < 2 * numEdges; i++) { 584 v = edgelist[i]; 585 vtype[i] = junctions[v].type; 586 } 587 wash->vtype = vtype; 588 } 589 590 *wash_ptr = wash; 591 wash->nedge = numEdges; 592 wash->nvertex = numVertices; 593 wash->edgelist = edgelist; 594 wash->junction = junctions; 595 wash->pipe = pipes; 596 597 /* Distribute edgelist to other processors */ 598 PetscCall(PetscOptionsGetBool(NULL, NULL, "-wash_distribute", &washdist, NULL)); 599 if (washdist) { 600 /* 601 PetscCall(PetscPrintf(PETSC_COMM_WORLD," Distribute sequential wash ...\n")); 602 */ 603 PetscCall(WashNetworkDistribute(comm, wash)); 604 } 605 PetscFunctionReturn(PETSC_SUCCESS); 606 } 607 608 /* ------------------------------------------------------- */ 609 int main(int argc, char **argv) 610 { 611 Wash wash; 612 Junction junctions, junction; 613 Pipe pipe, pipes; 614 PetscInt KeyPipe, KeyJunction, *edgelist = NULL, *vtype = NULL; 615 PetscInt i, e, v, eStart, eEnd, vStart, vEnd, key, vkey, type; 616 PetscInt steps = 1, nedges, nnodes = 6; 617 const PetscInt *cone; 618 DM networkdm; 619 PetscMPIInt size, rank; 620 PetscReal ftime; 621 Vec X; 622 TS ts; 623 TSConvergedReason reason; 624 PetscBool viewpipes, viewjuncs, monipipes = PETSC_FALSE, userJac = PETSC_TRUE, viewdm = PETSC_FALSE, viewX = PETSC_FALSE; 625 PetscInt pipesCase = 0; 626 DMNetworkMonitor monitor; 627 MPI_Comm comm; 628 629 PetscFunctionBeginUser; 630 PetscCall(PetscInitialize(&argc, &argv, "pOption", help)); 631 632 /* Read runtime options */ 633 PetscCall(PetscOptionsGetInt(NULL, NULL, "-case", &pipesCase, NULL)); 634 PetscCall(PetscOptionsGetBool(NULL, NULL, "-user_Jac", &userJac, NULL)); 635 PetscCall(PetscOptionsGetBool(NULL, NULL, "-pipe_monitor", &monipipes, NULL)); 636 PetscCall(PetscOptionsGetBool(NULL, NULL, "-viewdm", &viewdm, NULL)); 637 PetscCall(PetscOptionsGetBool(NULL, NULL, "-viewX", &viewX, NULL)); 638 PetscCall(PetscOptionsGetInt(NULL, NULL, "-npipenodes", &nnodes, NULL)); 639 640 /* Create networkdm */ 641 /*------------------*/ 642 PetscCall(DMNetworkCreate(PETSC_COMM_WORLD, &networkdm)); 643 PetscCall(PetscObjectGetComm((PetscObject)networkdm, &comm)); 644 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 645 PetscCallMPI(MPI_Comm_size(comm, &size)); 646 647 if (size == 1 && monipipes) PetscCall(DMNetworkMonitorCreate(networkdm, &monitor)); 648 649 /* Register the components in the network */ 650 PetscCall(DMNetworkRegisterComponent(networkdm, "junctionstruct", sizeof(struct _p_Junction), &KeyJunction)); 651 PetscCall(DMNetworkRegisterComponent(networkdm, "pipestruct", sizeof(struct _p_Pipe), &KeyPipe)); 652 653 /* Create a distributed wash network (user-specific) */ 654 PetscCall(WashNetworkCreate(comm, pipesCase, &wash)); 655 nedges = wash->nedge; 656 edgelist = wash->edgelist; 657 vtype = wash->vtype; 658 junctions = wash->junction; 659 pipes = wash->pipe; 660 661 /* Set up the network layout */ 662 PetscCall(DMNetworkSetNumSubNetworks(networkdm, PETSC_DECIDE, 1)); 663 PetscCall(DMNetworkAddSubnetwork(networkdm, NULL, nedges, edgelist, NULL)); 664 665 PetscCall(DMNetworkLayoutSetUp(networkdm)); 666 667 PetscCall(DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd)); 668 PetscCall(DMNetworkGetVertexRange(networkdm, &vStart, &vEnd)); 669 /* PetscCall(PetscPrintf(PETSC_COMM_SELF,"[%d] eStart/End: %d - %d; vStart/End: %d - %d\n",rank,eStart,eEnd,vStart,vEnd)); */ 670 671 if (rank) { /* junctions[] and pipes[] for proc[0] are allocated in WashNetworkCreate() */ 672 /* vEnd - vStart = nvertices + number of ghost vertices! */ 673 PetscCall(PetscCalloc2(vEnd - vStart, &junctions, nedges, &pipes)); 674 } 675 676 /* Add Pipe component and number of variables to all local edges */ 677 for (e = eStart; e < eEnd; e++) { 678 pipes[e - eStart].nnodes = nnodes; 679 PetscCall(DMNetworkAddComponent(networkdm, e, KeyPipe, &pipes[e - eStart], 2 * pipes[e - eStart].nnodes)); 680 681 if (size == 1 && monipipes) { /* Add monitor -- show Q_{pipes[e-eStart].id}? */ 682 pipes[e - eStart].length = 600.0; 683 PetscCall(DMNetworkMonitorAdd(monitor, "Pipe Q", e, pipes[e - eStart].nnodes, 0, 2, 0.0, pipes[e - eStart].length, -0.8, 0.8, PETSC_TRUE)); 684 PetscCall(DMNetworkMonitorAdd(monitor, "Pipe H", e, pipes[e - eStart].nnodes, 1, 2, 0.0, pipes[e - eStart].length, -400.0, 800.0, PETSC_TRUE)); 685 } 686 } 687 688 /* Add Junction component and number of variables to all local vertices */ 689 for (v = vStart; v < vEnd; v++) PetscCall(DMNetworkAddComponent(networkdm, v, KeyJunction, &junctions[v - vStart], 2)); 690 691 if (size > 1) { /* must be called before DMSetUp()???. Other partitioners do not work yet??? -- cause crash in proc[0]! */ 692 DM plexdm; 693 PetscPartitioner part; 694 PetscCall(DMNetworkGetPlex(networkdm, &plexdm)); 695 PetscCall(DMPlexGetPartitioner(plexdm, &part)); 696 PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE)); 697 PetscCall(PetscOptionsSetValue(NULL, "-dm_plex_csr_alg", "mat")); /* for parmetis */ 698 } 699 700 /* Set up DM for use */ 701 PetscCall(DMSetUp(networkdm)); 702 if (viewdm) { 703 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nOriginal networkdm, DMView:\n")); 704 PetscCall(DMView(networkdm, PETSC_VIEWER_STDOUT_WORLD)); 705 } 706 707 /* Set user physical parameters to the components */ 708 for (e = eStart; e < eEnd; e++) { 709 PetscCall(DMNetworkGetConnectedVertices(networkdm, e, &cone)); 710 /* vfrom */ 711 PetscCall(DMNetworkGetComponent(networkdm, cone[0], 0, &vkey, (void **)&junction, NULL)); 712 junction->type = (VertexType)vtype[2 * e]; 713 714 /* vto */ 715 PetscCall(DMNetworkGetComponent(networkdm, cone[1], 0, &vkey, (void **)&junction, NULL)); 716 junction->type = (VertexType)vtype[2 * e + 1]; 717 } 718 719 PetscCall(WashNetworkCleanUp(wash)); 720 721 /* Network partitioning and distribution of data */ 722 PetscCall(DMNetworkDistribute(&networkdm, 0)); 723 if (viewdm) { 724 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nAfter DMNetworkDistribute, DMView:\n")); 725 PetscCall(DMView(networkdm, PETSC_VIEWER_STDOUT_WORLD)); 726 } 727 728 /* create vectors */ 729 PetscCall(DMCreateGlobalVector(networkdm, &X)); 730 PetscCall(DMCreateLocalVector(networkdm, &wash->localX)); 731 PetscCall(DMCreateLocalVector(networkdm, &wash->localXdot)); 732 733 /* PipeSetUp -- each process only sets its own pipes */ 734 /*---------------------------------------------------*/ 735 PetscCall(DMNetworkGetVertexRange(networkdm, &vStart, &vEnd)); 736 737 userJac = PETSC_TRUE; 738 PetscCall(DMNetworkHasJacobian(networkdm, userJac, userJac)); 739 PetscCall(DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd)); 740 for (e = eStart; e < eEnd; e++) { /* each edge has only one component, pipe */ 741 PetscCall(DMNetworkGetComponent(networkdm, e, 0, &type, (void **)&pipe, NULL)); 742 743 wash->nnodes_loc += pipe->nnodes; /* local total number of nodes, will be used by PipesView() */ 744 PetscCall(PipeSetParameters(pipe, 600.0, /* length */ 745 0.5, /* diameter */ 746 1200.0, /* a */ 747 0.018)); /* friction */ 748 PetscCall(PipeSetUp(pipe)); 749 750 if (userJac) { 751 /* Create Jacobian matrix nonzero structures for a Pipe */ 752 Mat *J; 753 PetscCall(PipeCreateJacobian(pipe, NULL, &J)); 754 PetscCall(DMNetworkEdgeSetMatrix(networkdm, e, J)); 755 } 756 } 757 758 if (userJac) { 759 PetscCall(DMNetworkGetVertexRange(networkdm, &vStart, &vEnd)); 760 for (v = vStart; v < vEnd; v++) { 761 Mat *J; 762 PetscCall(JunctionCreateJacobian(networkdm, v, NULL, &J)); 763 PetscCall(DMNetworkVertexSetMatrix(networkdm, v, J)); 764 765 PetscCall(DMNetworkGetComponent(networkdm, v, 0, &vkey, (void **)&junction, NULL)); 766 junction->jacobian = J; 767 } 768 } 769 770 /* Setup solver */ 771 /*--------------------------------------------------------*/ 772 PetscCall(TSCreate(PETSC_COMM_WORLD, &ts)); 773 774 PetscCall(TSSetDM(ts, (DM)networkdm)); 775 PetscCall(TSSetIFunction(ts, NULL, WASHIFunction, wash)); 776 777 PetscCall(TSSetMaxSteps(ts, steps)); 778 PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER)); 779 PetscCall(TSSetTimeStep(ts, 0.1)); 780 PetscCall(TSSetType(ts, TSBEULER)); 781 if (size == 1 && monipipes) PetscCall(TSMonitorSet(ts, TSDMNetworkMonitor, monitor, NULL)); 782 PetscCall(TSSetFromOptions(ts)); 783 784 PetscCall(WASHSetInitialSolution(networkdm, X, wash)); 785 786 PetscCall(TSSolve(ts, X)); 787 788 PetscCall(TSGetSolveTime(ts, &ftime)); 789 PetscCall(TSGetStepNumber(ts, &steps)); 790 PetscCall(TSGetConvergedReason(ts, &reason)); 791 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%s at time %g after %" PetscInt_FMT " steps\n", TSConvergedReasons[reason], (double)ftime, steps)); 792 if (viewX) PetscCall(VecView(X, PETSC_VIEWER_STDOUT_WORLD)); 793 794 viewpipes = PETSC_FALSE; 795 PetscCall(PetscOptionsGetBool(NULL, NULL, "-Jac_view", &viewpipes, NULL)); 796 if (viewpipes) { 797 SNES snes; 798 Mat Jac; 799 PetscCall(TSGetSNES(ts, &snes)); 800 PetscCall(SNESGetJacobian(snes, &Jac, NULL, NULL, NULL)); 801 PetscCall(MatView(Jac, PETSC_VIEWER_DRAW_WORLD)); 802 } 803 804 /* View solutions */ 805 /* -------------- */ 806 viewpipes = PETSC_FALSE; 807 PetscCall(PetscOptionsGetBool(NULL, NULL, "-pipe_view", &viewpipes, NULL)); 808 if (viewpipes) PetscCall(PipesView(networkdm, KeyPipe, X)); 809 810 /* Test IS */ 811 viewjuncs = PETSC_FALSE; 812 PetscCall(PetscOptionsGetBool(NULL, NULL, "-isjunc_view", &viewjuncs, NULL)); 813 if (viewjuncs) PetscCall(ISJunctionsView(networkdm, KeyJunction)); 814 815 /* Free spaces */ 816 /* ----------- */ 817 PetscCall(TSDestroy(&ts)); 818 PetscCall(VecDestroy(&X)); 819 PetscCall(VecDestroy(&wash->localX)); 820 PetscCall(VecDestroy(&wash->localXdot)); 821 822 /* Destroy objects from each pipe that are created in PipeSetUp() */ 823 PetscCall(DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd)); 824 for (i = eStart; i < eEnd; i++) { 825 PetscCall(DMNetworkGetComponent(networkdm, i, 0, &key, (void **)&pipe, NULL)); 826 PetscCall(PipeDestroy(&pipe)); 827 } 828 if (userJac) { 829 for (v = vStart; v < vEnd; v++) { 830 PetscCall(DMNetworkGetComponent(networkdm, v, 0, &vkey, (void **)&junction, NULL)); 831 PetscCall(JunctionDestroyJacobian(networkdm, v, junction)); 832 } 833 } 834 835 if (size == 1 && monipipes) PetscCall(DMNetworkMonitorDestroy(&monitor)); 836 PetscCall(DMDestroy(&networkdm)); 837 PetscCall(PetscFree(wash)); 838 839 if (rank) PetscCall(PetscFree2(junctions, pipes)); 840 PetscCall(PetscFinalize()); 841 return 0; 842 } 843 844 /*TEST 845 846 build: 847 depends: pipeInterface.c pipeImpls.c 848 requires: mumps 849 850 test: 851 args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -options_left no -viewX 852 localrunfiles: pOption 853 output_file: output/pipes_1.out 854 855 test: 856 suffix: 2 857 nsize: 2 858 args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX 859 localrunfiles: pOption 860 output_file: output/pipes_2.out 861 862 test: 863 suffix: 3 864 nsize: 2 865 args: -ts_monitor -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX 866 localrunfiles: pOption 867 output_file: output/pipes_3.out 868 869 test: 870 suffix: 4 871 args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -options_left no -viewX 872 localrunfiles: pOption 873 output_file: output/pipes_4.out 874 875 test: 876 suffix: 5 877 nsize: 3 878 args: -ts_monitor -case 2 -ts_max_steps 10 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX 879 localrunfiles: pOption 880 output_file: output/pipes_5.out 881 882 test: 883 suffix: 6 884 nsize: 2 885 args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -viewX 886 localrunfiles: pOption 887 output_file: output/pipes_6.out 888 889 test: 890 suffix: 7 891 nsize: 2 892 args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -viewX 893 localrunfiles: pOption 894 output_file: output/pipes_7.out 895 896 test: 897 suffix: 8 898 nsize: 2 899 requires: parmetis 900 args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type parmetis -options_left no -wash_distribute 1 901 localrunfiles: pOption 902 output_file: output/pipes_8.out 903 904 test: 905 suffix: 9 906 nsize: 2 907 args: -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -pipe_view 908 localrunfiles: pOption 909 output_file: output/pipes_9.out 910 911 test: 912 suffix: 10 913 nsize: 2 914 args: -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -isjunc_view 915 localrunfiles: pOption 916 output_file: output/pipes_10.out 917 918 TEST*/ 919