1 #include <petsc/private/characteristicimpl.h> /*I "petsccharacteristic.h" I*/ 2 #include <petscdmda.h> 3 #include <petscviewer.h> 4 5 PetscClassId CHARACTERISTIC_CLASSID; 6 PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate; 7 PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange; 8 PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange; 9 /* 10 Contains the list of registered characteristic routines 11 */ 12 PetscFunctionList CharacteristicList = NULL; 13 PetscBool CharacteristicRegisterAllCalled = PETSC_FALSE; 14 15 static PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt[]); 16 static PetscMPIInt DMDAGetNeighborRelative(DM, PetscReal, PetscReal); 17 18 static PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt); 19 static PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt); 20 21 static PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer) 22 { 23 PetscBool isascii; 24 25 PetscFunctionBegin; 26 PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1); 27 if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c), &viewer)); 28 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 29 PetscCheckSameComm(c, 1, viewer, 2); 30 31 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 32 if (!isascii) PetscTryTypeMethod(c, view, viewer); 33 PetscFunctionReturn(PETSC_SUCCESS); 34 } 35 36 /*@ 37 CharacteristicDestroy - Destroys a `Characteristic` context created with `CharacteristicCreate()` 38 39 Collective 40 41 Input Parameter: 42 . c - the `Characteristic` context 43 44 Level: beginner 45 46 .seealso: `Characteristic`, `CharacteristicCreate()` 47 @*/ 48 PetscErrorCode CharacteristicDestroy(Characteristic *c) 49 { 50 PetscFunctionBegin; 51 if (!*c) PetscFunctionReturn(PETSC_SUCCESS); 52 PetscValidHeaderSpecific(*c, CHARACTERISTIC_CLASSID, 1); 53 if (--((PetscObject)*c)->refct > 0) PetscFunctionReturn(PETSC_SUCCESS); 54 55 PetscTryTypeMethod(*c, destroy); 56 PetscCallMPI(MPI_Type_free(&(*c)->itemType)); 57 PetscCall(PetscFree((*c)->queue)); 58 PetscCall(PetscFree((*c)->queueLocal)); 59 PetscCall(PetscFree((*c)->queueRemote)); 60 PetscCall(PetscFree((*c)->neighbors)); 61 PetscCall(PetscFree((*c)->needCount)); 62 PetscCall(PetscFree((*c)->localOffsets)); 63 PetscCall(PetscFree((*c)->fillCount)); 64 PetscCall(PetscFree((*c)->remoteOffsets)); 65 PetscCall(PetscFree((*c)->request)); 66 PetscCall(PetscFree((*c)->status)); 67 PetscCall(PetscHeaderDestroy(c)); 68 PetscFunctionReturn(PETSC_SUCCESS); 69 } 70 71 /*@ 72 CharacteristicCreate - Creates a `Characteristic` context for use with the Method of Characteristics 73 74 Collective 75 76 Input Parameter: 77 . comm - MPI communicator 78 79 Output Parameter: 80 . c - the `Characteristic` context 81 82 Level: beginner 83 84 .seealso: `Characteristic`, `CharacteristicDestroy()` 85 @*/ 86 PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c) 87 { 88 Characteristic newC; 89 90 PetscFunctionBegin; 91 PetscAssertPointer(c, 2); 92 *c = NULL; 93 PetscCall(CharacteristicInitializePackage()); 94 95 PetscCall(PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "Characteristic", comm, CharacteristicDestroy, CharacteristicView)); 96 *c = newC; 97 98 newC->structured = PETSC_TRUE; 99 newC->numIds = 0; 100 newC->velocityDA = NULL; 101 newC->velocity = NULL; 102 newC->velocityOld = NULL; 103 newC->numVelocityComp = 0; 104 newC->velocityComp = NULL; 105 newC->velocityInterp = NULL; 106 newC->velocityInterpLocal = NULL; 107 newC->velocityCtx = NULL; 108 newC->fieldDA = NULL; 109 newC->field = NULL; 110 newC->numFieldComp = 0; 111 newC->fieldComp = NULL; 112 newC->fieldInterp = NULL; 113 newC->fieldInterpLocal = NULL; 114 newC->fieldCtx = NULL; 115 newC->itemType = 0; 116 newC->queue = NULL; 117 newC->queueSize = 0; 118 newC->queueMax = 0; 119 newC->queueLocal = NULL; 120 newC->queueLocalSize = 0; 121 newC->queueLocalMax = 0; 122 newC->queueRemote = NULL; 123 newC->queueRemoteSize = 0; 124 newC->queueRemoteMax = 0; 125 newC->numNeighbors = 0; 126 newC->neighbors = NULL; 127 newC->needCount = NULL; 128 newC->localOffsets = NULL; 129 newC->fillCount = NULL; 130 newC->remoteOffsets = NULL; 131 newC->request = NULL; 132 newC->status = NULL; 133 PetscFunctionReturn(PETSC_SUCCESS); 134 } 135 136 /*@ 137 CharacteristicSetType - Builds Characteristic for a particular solver. 138 139 Logically Collective 140 141 Input Parameters: 142 + c - the method of characteristics context 143 - type - a known method 144 145 Options Database Key: 146 . -characteristic_type <method> - Sets the method; use -help for a list 147 of available methods 148 149 Level: intermediate 150 151 Note: 152 See "include/petsccharacteristic.h" for available methods 153 154 .seealso: [](ch_ts), `CharacteristicType` 155 @*/ 156 PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type) 157 { 158 PetscBool match; 159 PetscErrorCode (*r)(Characteristic); 160 161 PetscFunctionBegin; 162 PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1); 163 PetscAssertPointer(type, 2); 164 165 PetscCall(PetscObjectTypeCompare((PetscObject)c, type, &match)); 166 if (match) PetscFunctionReturn(PETSC_SUCCESS); 167 168 if (c->data) { 169 /* destroy the old private Characteristic context */ 170 PetscUseTypeMethod(c, destroy); 171 c->ops->destroy = NULL; 172 c->data = NULL; 173 } 174 175 PetscCall(PetscFunctionListFind(CharacteristicList, type, &r)); 176 PetscCheck(r, PetscObjectComm((PetscObject)c), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type); 177 c->setupcalled = PETSC_FALSE; 178 PetscCall((*r)(c)); 179 PetscCall(PetscObjectChangeTypeName((PetscObject)c, type)); 180 PetscFunctionReturn(PETSC_SUCCESS); 181 } 182 183 /*@ 184 CharacteristicSetUp - Sets up the internal data structures for the 185 later use of a `Charactoristic` . 186 187 Collective 188 189 Input Parameter: 190 . c - context obtained from CharacteristicCreate() 191 192 Level: developer 193 194 .seealso: [](ch_ts), `Characteristic`, `CharacteristicCreate()`, `CharacteristicSolve()`, `CharacteristicDestroy()` 195 @*/ 196 PetscErrorCode CharacteristicSetUp(Characteristic c) 197 { 198 PetscFunctionBegin; 199 PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1); 200 201 if (!((PetscObject)c)->type_name) PetscCall(CharacteristicSetType(c, CHARACTERISTICDA)); 202 203 if (c->setupcalled) PetscFunctionReturn(PETSC_SUCCESS); 204 205 PetscCall(PetscLogEventBegin(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL)); 206 if (!c->setupcalled) PetscUseTypeMethod(c, setup); 207 PetscCall(PetscLogEventEnd(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL)); 208 c->setupcalled = PETSC_TRUE; 209 PetscFunctionReturn(PETSC_SUCCESS); 210 } 211 212 /*@C 213 CharacteristicRegister - Adds an approarch to the method of characteristics package. 214 215 Not Collective, No Fortran Support 216 217 Input Parameters: 218 + sname - name of a new approach 219 - function - routine to create method context 220 221 Level: advanced 222 223 Example Usage: 224 .vb 225 CharacteristicRegister("my_char", MyCharCreate); 226 .ve 227 228 Then, your Characteristic type can be chosen with the procedural interface via 229 .vb 230 CharacteristicCreate(MPI_Comm, Characteristic* &char); 231 CharacteristicSetType(char,"my_char"); 232 .ve 233 or at runtime via the option 234 .vb 235 -characteristic_type my_char 236 .ve 237 238 Notes: 239 `CharacteristicRegister()` may be called multiple times to add several approaches. 240 241 .seealso: [](ch_ts), `CharacteristicRegisterAll()`, `CharacteristicRegisterDestroy()` 242 @*/ 243 PetscErrorCode CharacteristicRegister(const char sname[], PetscErrorCode (*function)(Characteristic)) 244 { 245 PetscFunctionBegin; 246 PetscCall(CharacteristicInitializePackage()); 247 PetscCall(PetscFunctionListAdd(&CharacteristicList, sname, function)); 248 PetscFunctionReturn(PETSC_SUCCESS); 249 } 250 251 PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx) 252 { 253 PetscFunctionBegin; 254 c->velocityDA = da; 255 c->velocity = v; 256 c->velocityOld = vOld; 257 c->numVelocityComp = numComponents; 258 c->velocityComp = components; 259 c->velocityInterp = interp; 260 c->velocityCtx = ctx; 261 PetscFunctionReturn(PETSC_SUCCESS); 262 } 263 264 PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx) 265 { 266 PetscFunctionBegin; 267 c->velocityDA = da; 268 c->velocity = v; 269 c->velocityOld = vOld; 270 c->numVelocityComp = numComponents; 271 c->velocityComp = components; 272 c->velocityInterpLocal = interp; 273 c->velocityCtx = ctx; 274 PetscFunctionReturn(PETSC_SUCCESS); 275 } 276 277 PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx) 278 { 279 PetscFunctionBegin; 280 #if 0 281 PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov."); 282 #endif 283 c->fieldDA = da; 284 c->field = v; 285 c->numFieldComp = numComponents; 286 c->fieldComp = components; 287 c->fieldInterp = interp; 288 c->fieldCtx = ctx; 289 PetscFunctionReturn(PETSC_SUCCESS); 290 } 291 292 PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx) 293 { 294 PetscFunctionBegin; 295 #if 0 296 PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov."); 297 #endif 298 c->fieldDA = da; 299 c->field = v; 300 c->numFieldComp = numComponents; 301 c->fieldComp = components; 302 c->fieldInterpLocal = interp; 303 c->fieldCtx = ctx; 304 PetscFunctionReturn(PETSC_SUCCESS); 305 } 306 307 /*@ 308 CharacteristicSolve - Apply the Method of Characteristics solver 309 310 Collective 311 312 Input Parameters: 313 + c - context obtained from `CharacteristicCreate()` 314 . dt - the time-step 315 - solution - vector holding the solution 316 317 Level: developer 318 319 .seealso: [](ch_ts), `Characteristic`, `CharacteristicCreate()`, `CharacteristicDestroy()` 320 @*/ 321 PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution) 322 { 323 CharacteristicPointDA2D Qi; 324 DM da = c->velocityDA; 325 Vec velocityLocal, velocityLocalOld; 326 Vec fieldLocal; 327 DMDALocalInfo info; 328 PetscScalar **solArray; 329 void *velocityArray; 330 void *velocityArrayOld; 331 void *fieldArray; 332 PetscScalar *interpIndices; 333 PetscScalar *velocityValues, *velocityValuesOld; 334 PetscScalar *fieldValues; 335 PetscMPIInt rank; 336 PetscInt dim; 337 PetscMPIInt neighbors[9]; 338 PetscInt dof; 339 PetscInt gx, gy; 340 PetscInt n, is, ie, js, je, comp; 341 342 PetscFunctionBegin; 343 c->queueSize = 0; 344 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank)); 345 PetscCall(DMDAGetNeighborsRank(da, neighbors)); 346 PetscCall(CharacteristicSetNeighbors(c, 9, neighbors)); 347 PetscCall(CharacteristicSetUp(c)); 348 /* global and local grid info */ 349 PetscCall(DMDAGetInfo(da, &dim, &gx, &gy, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)); 350 PetscCall(DMDAGetLocalInfo(da, &info)); 351 is = info.xs; 352 ie = info.xs + info.xm; 353 js = info.ys; 354 je = info.ys + info.ym; 355 /* Allocation */ 356 PetscCall(PetscMalloc1(dim, &interpIndices)); 357 PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValues)); 358 PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValuesOld)); 359 PetscCall(PetscMalloc1(c->numFieldComp, &fieldValues)); 360 PetscCall(PetscLogEventBegin(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL)); 361 362 /* 363 PART 1, AT t-dt/2 364 */ 365 PetscCall(PetscLogEventBegin(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL)); 366 /* GET POSITION AT HALF TIME IN THE PAST */ 367 if (c->velocityInterpLocal) { 368 PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocal)); 369 PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocalOld)); 370 PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal)); 371 PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal)); 372 PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld)); 373 PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld)); 374 PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocal, &velocityArray)); 375 PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld)); 376 } 377 PetscCall(PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n")); 378 for (Qi.j = js; Qi.j < je; Qi.j++) { 379 for (Qi.i = is; Qi.i < ie; Qi.i++) { 380 interpIndices[0] = Qi.i; 381 interpIndices[1] = Qi.j; 382 if (c->velocityInterpLocal) PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx)); 383 else PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx)); 384 Qi.x = Qi.i - velocityValues[0] * dt / 2.0; 385 Qi.y = Qi.j - velocityValues[1] * dt / 2.0; 386 387 /* Determine whether the position at t - dt/2 is local */ 388 Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y); 389 390 /* Check for Periodic boundaries and move all periodic points back onto the domain */ 391 PetscCall(DMDAMapCoordsToPeriodicDomain(da, &Qi.x, &Qi.y)); 392 PetscCall(CharacteristicAddPoint(c, &Qi)); 393 } 394 } 395 PetscCall(PetscLogEventEnd(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL)); 396 397 PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL)); 398 PetscCall(CharacteristicSendCoordinatesBegin(c)); 399 PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL)); 400 401 PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL)); 402 /* Calculate velocity at t_n+1/2 (local values) */ 403 PetscCall(PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n")); 404 for (n = 0; n < c->queueSize; n++) { 405 Qi = c->queue[n]; 406 if (c->neighbors[Qi.proc] == rank) { 407 interpIndices[0] = Qi.x; 408 interpIndices[1] = Qi.y; 409 if (c->velocityInterpLocal) { 410 PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx)); 411 PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx)); 412 } else { 413 PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx)); 414 PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx)); 415 } 416 Qi.x = 0.5 * (velocityValues[0] + velocityValuesOld[0]); 417 Qi.y = 0.5 * (velocityValues[1] + velocityValuesOld[1]); 418 } 419 c->queue[n] = Qi; 420 } 421 PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL)); 422 423 PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL)); 424 PetscCall(CharacteristicSendCoordinatesEnd(c)); 425 PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL)); 426 427 /* Calculate velocity at t_n+1/2 (fill remote requests) */ 428 PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL)); 429 PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote velocities at t_{n - 1/2}\n", c->queueRemoteSize)); 430 for (n = 0; n < c->queueRemoteSize; n++) { 431 Qi = c->queueRemote[n]; 432 interpIndices[0] = Qi.x; 433 interpIndices[1] = Qi.y; 434 if (c->velocityInterpLocal) { 435 PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx)); 436 PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx)); 437 } else { 438 PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx)); 439 PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx)); 440 } 441 Qi.x = 0.5 * (velocityValues[0] + velocityValuesOld[0]); 442 Qi.y = 0.5 * (velocityValues[1] + velocityValuesOld[1]); 443 c->queueRemote[n] = Qi; 444 } 445 PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL)); 446 PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL)); 447 PetscCall(CharacteristicGetValuesBegin(c)); 448 PetscCall(CharacteristicGetValuesEnd(c)); 449 if (c->velocityInterpLocal) { 450 PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocal, &velocityArray)); 451 PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld)); 452 PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocal)); 453 PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocalOld)); 454 } 455 PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL)); 456 457 /* 458 PART 2, AT t-dt 459 */ 460 461 /* GET POSITION AT t_n (local values) */ 462 PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL)); 463 PetscCall(PetscInfo(NULL, "Calculating position at t_{n}\n")); 464 for (n = 0; n < c->queueSize; n++) { 465 Qi = c->queue[n]; 466 Qi.x = Qi.i - Qi.x * dt; 467 Qi.y = Qi.j - Qi.y * dt; 468 469 /* Determine whether the position at t-dt is local */ 470 Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y); 471 472 /* Check for Periodic boundaries and move all periodic points back onto the domain */ 473 PetscCall(DMDAMapCoordsToPeriodicDomain(da, &Qi.x, &Qi.y)); 474 475 c->queue[n] = Qi; 476 } 477 PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL)); 478 479 PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL)); 480 PetscCall(CharacteristicSendCoordinatesBegin(c)); 481 PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL)); 482 483 /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */ 484 PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL)); 485 if (c->fieldInterpLocal) { 486 PetscCall(DMGetLocalVector(c->fieldDA, &fieldLocal)); 487 PetscCall(DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal)); 488 PetscCall(DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal)); 489 PetscCall(DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray)); 490 } 491 PetscCall(PetscInfo(NULL, "Calculating local field at t_{n}\n")); 492 for (n = 0; n < c->queueSize; n++) { 493 if (c->neighbors[c->queue[n].proc] == rank) { 494 interpIndices[0] = c->queue[n].x; 495 interpIndices[1] = c->queue[n].y; 496 if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx)); 497 else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx)); 498 for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp]; 499 } 500 } 501 PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL)); 502 503 PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL)); 504 PetscCall(CharacteristicSendCoordinatesEnd(c)); 505 PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL)); 506 507 /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */ 508 PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL)); 509 PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote field points at t_{n}\n", c->queueRemoteSize)); 510 for (n = 0; n < c->queueRemoteSize; n++) { 511 interpIndices[0] = c->queueRemote[n].x; 512 interpIndices[1] = c->queueRemote[n].y; 513 514 /* for debugging purposes */ 515 if (1) { /* hacked bounds test...let's do better */ 516 PetscScalar im = interpIndices[0]; 517 PetscScalar jm = interpIndices[1]; 518 519 PetscCheck((im >= (PetscScalar)is - 1.) && (im <= (PetscScalar)ie) && (jm >= (PetscScalar)js - 1.) && (jm <= (PetscScalar)je), PETSC_COMM_SELF, PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", (double)PetscAbsScalar(im), (double)PetscAbsScalar(jm)); 520 } 521 522 if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx)); 523 else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx)); 524 for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp]; 525 } 526 PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL)); 527 528 PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL)); 529 PetscCall(CharacteristicGetValuesBegin(c)); 530 PetscCall(CharacteristicGetValuesEnd(c)); 531 if (c->fieldInterpLocal) { 532 PetscCall(DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray)); 533 PetscCall(DMRestoreLocalVector(c->fieldDA, &fieldLocal)); 534 } 535 PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL)); 536 537 /* Return field of characteristics at t_n-1 */ 538 PetscCall(PetscLogEventBegin(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL)); 539 PetscCall(DMDAGetInfo(c->fieldDA, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL)); 540 PetscCall(DMDAVecGetArray(c->fieldDA, solution, &solArray)); 541 for (n = 0; n < c->queueSize; n++) { 542 Qi = c->queue[n]; 543 for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i * dof + c->fieldComp[comp]] = Qi.field[comp]; 544 } 545 PetscCall(DMDAVecRestoreArray(c->fieldDA, solution, &solArray)); 546 PetscCall(PetscLogEventEnd(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL)); 547 PetscCall(PetscLogEventEnd(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL)); 548 549 /* Cleanup */ 550 PetscCall(PetscFree(interpIndices)); 551 PetscCall(PetscFree(velocityValues)); 552 PetscCall(PetscFree(velocityValuesOld)); 553 PetscCall(PetscFree(fieldValues)); 554 PetscFunctionReturn(PETSC_SUCCESS); 555 } 556 557 PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[]) 558 { 559 PetscFunctionBegin; 560 PetscCall(PetscMPIIntCast(numNeighbors, &c->numNeighbors)); 561 PetscCall(PetscFree(c->neighbors)); 562 PetscCall(PetscMalloc1(numNeighbors, &c->neighbors)); 563 PetscCall(PetscArraycpy(c->neighbors, neighbors, numNeighbors)); 564 PetscFunctionReturn(PETSC_SUCCESS); 565 } 566 567 PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point) 568 { 569 PetscFunctionBegin; 570 PetscCheck(c->queueSize < c->queueMax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Exceeded maximum queue size %" PetscInt_FMT, c->queueMax); 571 c->queue[c->queueSize++] = *point; 572 PetscFunctionReturn(PETSC_SUCCESS); 573 } 574 575 PetscErrorCode CharacteristicSendCoordinatesBegin(Characteristic c) 576 { 577 PetscMPIInt rank, tag = 121; 578 PetscInt i, n; 579 580 PetscFunctionBegin; 581 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank)); 582 PetscCall(CharacteristicHeapSort(c, c->queue, c->queueSize)); 583 PetscCall(PetscArrayzero(c->needCount, c->numNeighbors)); 584 for (i = 0; i < c->queueSize; i++) c->needCount[c->queue[i].proc]++; 585 c->fillCount[0] = 0; 586 for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPIU_Irecv(&c->fillCount[n], 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &c->request[n - 1])); 587 for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPIU_Send(&c->needCount[n], 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c))); 588 PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status)); 589 /* Initialize the remote queue */ 590 c->queueLocalMax = c->localOffsets[0] = 0; 591 c->queueRemoteMax = c->remoteOffsets[0] = 0; 592 for (n = 1; n < c->numNeighbors; n++) { 593 c->remoteOffsets[n] = c->queueRemoteMax; 594 c->queueRemoteMax += c->fillCount[n]; 595 c->localOffsets[n] = c->queueLocalMax; 596 c->queueLocalMax += c->needCount[n]; 597 } 598 /* HACK BEGIN */ 599 for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0]; 600 c->needCount[0] = 0; 601 /* HACK END */ 602 if (c->queueRemoteMax) PetscCall(PetscMalloc1(c->queueRemoteMax, &c->queueRemote)); 603 else c->queueRemote = NULL; 604 c->queueRemoteSize = c->queueRemoteMax; 605 606 /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */ 607 for (n = 1; n < c->numNeighbors; n++) { 608 PetscCall(PetscInfo(NULL, "Receiving %" PetscInt_FMT " requests for values from proc %d\n", c->fillCount[n], c->neighbors[n])); 609 PetscCallMPI(MPIU_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &c->request[n - 1])); 610 } 611 for (n = 1; n < c->numNeighbors; n++) { 612 PetscCall(PetscInfo(NULL, "Sending %" PetscInt_FMT " requests for values from proc %d\n", c->needCount[n], c->neighbors[n])); 613 PetscCallMPI(MPIU_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c))); 614 } 615 PetscFunctionReturn(PETSC_SUCCESS); 616 } 617 618 PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c) 619 { 620 #if 0 621 PetscMPIInt rank; 622 PetscInt n; 623 #endif 624 625 PetscFunctionBegin; 626 PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status)); 627 #if 0 628 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank)); 629 for (n = 0; n < c->queueRemoteSize; n++) PetscCheck(c->neighbors[c->queueRemote[n].proc] != rank,PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc = %d", n, c->queueRemote[n].proc); 630 #endif 631 PetscFunctionReturn(PETSC_SUCCESS); 632 } 633 634 PetscErrorCode CharacteristicGetValuesBegin(Characteristic c) 635 { 636 PetscMPIInt tag = 121; 637 PetscInt n; 638 639 PetscFunctionBegin; 640 /* SEND AND RECEIVE FILLED REQUESTS for velocities at t_n+1/2 */ 641 for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPIU_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &c->request[n - 1])); 642 for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPIU_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c))); 643 PetscFunctionReturn(PETSC_SUCCESS); 644 } 645 646 PetscErrorCode CharacteristicGetValuesEnd(Characteristic c) 647 { 648 PetscFunctionBegin; 649 PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status)); 650 /* Free queue of requests from other procs */ 651 PetscCall(PetscFree(c->queueRemote)); 652 PetscFunctionReturn(PETSC_SUCCESS); 653 } 654 655 /* 656 Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html 657 */ 658 static PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size) 659 { 660 CharacteristicPointDA2D temp; 661 PetscInt n; 662 663 PetscFunctionBegin; 664 if (0) { /* Check the order of the queue before sorting */ 665 PetscCall(PetscInfo(NULL, "Before Heap sort\n")); 666 for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc)); 667 } 668 669 /* SORTING PHASE */ 670 for (n = (size / 2) - 1; n >= 0; n--) PetscCall(CharacteristicSiftDown(c, queue, n, size - 1)); /* Rich had size-1 here, Matt had size*/ 671 for (n = size - 1; n >= 1; n--) { 672 temp = queue[0]; 673 queue[0] = queue[n]; 674 queue[n] = temp; 675 PetscCall(CharacteristicSiftDown(c, queue, 0, n - 1)); 676 } 677 if (0) { /* Check the order of the queue after sorting */ 678 PetscCall(PetscInfo(NULL, "Avter Heap sort\n")); 679 for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc)); 680 } 681 PetscFunctionReturn(PETSC_SUCCESS); 682 } 683 684 /* 685 Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html 686 */ 687 static PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom) 688 { 689 PetscBool done = PETSC_FALSE; 690 PetscInt maxChild; 691 CharacteristicPointDA2D temp; 692 693 PetscFunctionBegin; 694 while ((root * 2 <= bottom) && (!done)) { 695 if (root * 2 == bottom) maxChild = root * 2; 696 else if (queue[root * 2].proc > queue[root * 2 + 1].proc) maxChild = root * 2; 697 else maxChild = root * 2 + 1; 698 699 if (queue[root].proc < queue[maxChild].proc) { 700 temp = queue[root]; 701 queue[root] = queue[maxChild]; 702 queue[maxChild] = temp; 703 root = maxChild; 704 } else done = PETSC_TRUE; 705 } 706 PetscFunctionReturn(PETSC_SUCCESS); 707 } 708 709 /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */ 710 static PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[]) 711 { 712 DMBoundaryType bx, by; 713 PetscBool IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE; 714 MPI_Comm comm; 715 PetscMPIInt rank; 716 PetscMPIInt **procs, pi, pj, pim, pip, pjm, pjp, PIi, PJi; 717 PetscInt PI, PJ; 718 719 PetscFunctionBegin; 720 PetscCall(PetscObjectGetComm((PetscObject)da, &comm)); 721 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 722 PetscCall(DMDAGetInfo(da, NULL, NULL, NULL, NULL, &PI, &PJ, NULL, NULL, NULL, &bx, &by, NULL, NULL)); 723 PetscCall(PetscMPIIntCast(PI, &PIi)); 724 PetscCall(PetscMPIIntCast(PJ, &PJi)); 725 if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE; 726 if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE; 727 728 neighbors[0] = rank; 729 rank = 0; 730 PetscCall(PetscMalloc1(PJ, &procs)); 731 for (pj = 0; pj < PJ; pj++) { 732 PetscCall(PetscMalloc1(PI, &procs[pj])); 733 for (pi = 0; pi < PI; pi++) { 734 procs[pj][pi] = rank; 735 rank++; 736 } 737 } 738 739 pi = neighbors[0] % PI; 740 pj = neighbors[0] / PI; 741 pim = pi - 1; 742 if (pim < 0) pim = PIi - 1; 743 pip = (pi + 1) % PIi; 744 pjm = pj - 1; 745 if (pjm < 0) pjm = PJi - 1; 746 pjp = (pj + 1) % PJi; 747 748 neighbors[1] = procs[pj][pim]; 749 neighbors[2] = procs[pjp][pim]; 750 neighbors[3] = procs[pjp][pi]; 751 neighbors[4] = procs[pjp][pip]; 752 neighbors[5] = procs[pj][pip]; 753 neighbors[6] = procs[pjm][pip]; 754 neighbors[7] = procs[pjm][pi]; 755 neighbors[8] = procs[pjm][pim]; 756 757 if (!IPeriodic) { 758 if (pi == 0) neighbors[1] = neighbors[2] = neighbors[8] = neighbors[0]; 759 if (pi == PI - 1) neighbors[4] = neighbors[5] = neighbors[6] = neighbors[0]; 760 } 761 762 if (!JPeriodic) { 763 if (pj == 0) neighbors[6] = neighbors[7] = neighbors[8] = neighbors[0]; 764 if (pj == PJ - 1) neighbors[2] = neighbors[3] = neighbors[4] = neighbors[0]; 765 } 766 767 for (pj = 0; pj < PJ; pj++) PetscCall(PetscFree(procs[pj])); 768 PetscCall(PetscFree(procs)); 769 PetscFunctionReturn(PETSC_SUCCESS); 770 } 771 772 /* 773 SUBDOMAIN NEIGHBORHOOD PROCESS MAP: 774 2 | 3 | 4 775 __|___|__ 776 1 | 0 | 5 777 __|___|__ 778 8 | 7 | 6 779 | | 780 */ 781 static PetscMPIInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr) 782 { 783 DMDALocalInfo info; 784 PetscReal is, ie, js, je; 785 786 PetscCallAbort(PETSC_COMM_SELF, DMDAGetLocalInfo(da, &info)); 787 is = (PetscReal)info.xs - 0.5; 788 ie = (PetscReal)info.xs + info.xm - 0.5; 789 js = (PetscReal)info.ys - 0.5; 790 je = (PetscReal)info.ys + info.ym - 0.5; 791 792 if (ir >= is && ir <= ie) { /* center column */ 793 if (jr >= js && jr <= je) return 0; 794 else if (jr < js) return 7; 795 else return 3; 796 } else if (ir < is) { /* left column */ 797 if (jr >= js && jr <= je) return 1; 798 else if (jr < js) return 8; 799 else return 2; 800 } else { /* right column */ 801 if (jr >= js && jr <= je) return 5; 802 else if (jr < js) return 6; 803 else return 4; 804 } 805 } 806