1 #include <petsc/private/characteristicimpl.h> /*I "petsccharacteristic.h" I*/ 2 #include <petscdmda.h> 3 #include <petscviewer.h> 4 5 PetscClassId CHARACTERISTIC_CLASSID; 6 PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate; 7 PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange; 8 PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange; 9 /* 10 Contains the list of registered characteristic routines 11 */ 12 PetscFunctionList CharacteristicList = NULL; 13 PetscBool CharacteristicRegisterAllCalled = PETSC_FALSE; 14 15 static PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt[]); 16 static PetscInt DMDAGetNeighborRelative(DM, PetscReal, PetscReal); 17 18 static PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt); 19 static PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt); 20 21 static PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer) 22 { 23 PetscBool iascii; 24 25 PetscFunctionBegin; 26 PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1); 27 if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c), &viewer)); 28 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 29 PetscCheckSameComm(c, 1, viewer, 2); 30 31 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 32 if (!iascii) PetscTryTypeMethod(c, view, viewer); 33 PetscFunctionReturn(PETSC_SUCCESS); 34 } 35 36 PetscErrorCode CharacteristicDestroy(Characteristic *c) 37 { 38 PetscFunctionBegin; 39 if (!*c) PetscFunctionReturn(PETSC_SUCCESS); 40 PetscValidHeaderSpecific(*c, CHARACTERISTIC_CLASSID, 1); 41 if (--((PetscObject)(*c))->refct > 0) PetscFunctionReturn(PETSC_SUCCESS); 42 43 if ((*c)->ops->destroy) PetscCall((*(*c)->ops->destroy)((*c))); 44 PetscCallMPI(MPI_Type_free(&(*c)->itemType)); 45 PetscCall(PetscFree((*c)->queue)); 46 PetscCall(PetscFree((*c)->queueLocal)); 47 PetscCall(PetscFree((*c)->queueRemote)); 48 PetscCall(PetscFree((*c)->neighbors)); 49 PetscCall(PetscFree((*c)->needCount)); 50 PetscCall(PetscFree((*c)->localOffsets)); 51 PetscCall(PetscFree((*c)->fillCount)); 52 PetscCall(PetscFree((*c)->remoteOffsets)); 53 PetscCall(PetscFree((*c)->request)); 54 PetscCall(PetscFree((*c)->status)); 55 PetscCall(PetscHeaderDestroy(c)); 56 PetscFunctionReturn(PETSC_SUCCESS); 57 } 58 59 PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c) 60 { 61 Characteristic newC; 62 63 PetscFunctionBegin; 64 PetscAssertPointer(c, 2); 65 *c = NULL; 66 PetscCall(CharacteristicInitializePackage()); 67 68 PetscCall(PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "Characteristic", comm, CharacteristicDestroy, CharacteristicView)); 69 *c = newC; 70 71 newC->structured = PETSC_TRUE; 72 newC->numIds = 0; 73 newC->velocityDA = NULL; 74 newC->velocity = NULL; 75 newC->velocityOld = NULL; 76 newC->numVelocityComp = 0; 77 newC->velocityComp = NULL; 78 newC->velocityInterp = NULL; 79 newC->velocityInterpLocal = NULL; 80 newC->velocityCtx = NULL; 81 newC->fieldDA = NULL; 82 newC->field = NULL; 83 newC->numFieldComp = 0; 84 newC->fieldComp = NULL; 85 newC->fieldInterp = NULL; 86 newC->fieldInterpLocal = NULL; 87 newC->fieldCtx = NULL; 88 newC->itemType = 0; 89 newC->queue = NULL; 90 newC->queueSize = 0; 91 newC->queueMax = 0; 92 newC->queueLocal = NULL; 93 newC->queueLocalSize = 0; 94 newC->queueLocalMax = 0; 95 newC->queueRemote = NULL; 96 newC->queueRemoteSize = 0; 97 newC->queueRemoteMax = 0; 98 newC->numNeighbors = 0; 99 newC->neighbors = NULL; 100 newC->needCount = NULL; 101 newC->localOffsets = NULL; 102 newC->fillCount = NULL; 103 newC->remoteOffsets = NULL; 104 newC->request = NULL; 105 newC->status = NULL; 106 PetscFunctionReturn(PETSC_SUCCESS); 107 } 108 109 /*@C 110 CharacteristicSetType - Builds Characteristic for a particular solver. 111 112 Logically Collective 113 114 Input Parameters: 115 + c - the method of characteristics context 116 - type - a known method 117 118 Options Database Key: 119 . -characteristic_type <method> - Sets the method; use -help for a list 120 of available methods 121 122 Level: intermediate 123 124 Notes: 125 See "include/petsccharacteristic.h" for available methods 126 127 Normally, it is best to use the CharacteristicSetFromOptions() command and 128 then set the Characteristic type from the options database rather than by using 129 this routine. Using the options database provides the user with 130 maximum flexibility in evaluating the many different Krylov methods. 131 The CharacteristicSetType() routine is provided for those situations where it 132 is necessary to set the iterative solver independently of the command 133 line or options database. This might be the case, for example, when 134 the choice of iterative solver changes during the execution of the 135 program, and the user's application is taking responsibility for 136 choosing the appropriate method. In other words, this routine is 137 not for beginners. 138 139 .seealso: [](ch_ts), `CharacteristicType` 140 @*/ 141 PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type) 142 { 143 PetscBool match; 144 PetscErrorCode (*r)(Characteristic); 145 146 PetscFunctionBegin; 147 PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1); 148 PetscAssertPointer(type, 2); 149 150 PetscCall(PetscObjectTypeCompare((PetscObject)c, type, &match)); 151 if (match) PetscFunctionReturn(PETSC_SUCCESS); 152 153 if (c->data) { 154 /* destroy the old private Characteristic context */ 155 PetscUseTypeMethod(c, destroy); 156 c->ops->destroy = NULL; 157 c->data = NULL; 158 } 159 160 PetscCall(PetscFunctionListFind(CharacteristicList, type, &r)); 161 PetscCheck(r, PetscObjectComm((PetscObject)c), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type); 162 c->setupcalled = 0; 163 PetscCall((*r)(c)); 164 PetscCall(PetscObjectChangeTypeName((PetscObject)c, type)); 165 PetscFunctionReturn(PETSC_SUCCESS); 166 } 167 168 /*@ 169 CharacteristicSetUp - Sets up the internal data structures for the 170 later use of an iterative solver. 171 172 Collective 173 174 Input Parameter: 175 . c - iterative context obtained from CharacteristicCreate() 176 177 Level: developer 178 179 .seealso: [](ch_ts), `CharacteristicCreate()`, `CharacteristicSolve()`, `CharacteristicDestroy()` 180 @*/ 181 PetscErrorCode CharacteristicSetUp(Characteristic c) 182 { 183 PetscFunctionBegin; 184 PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1); 185 186 if (!((PetscObject)c)->type_name) PetscCall(CharacteristicSetType(c, CHARACTERISTICDA)); 187 188 if (c->setupcalled == 2) PetscFunctionReturn(PETSC_SUCCESS); 189 190 PetscCall(PetscLogEventBegin(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL)); 191 if (!c->setupcalled) PetscUseTypeMethod(c, setup); 192 PetscCall(PetscLogEventEnd(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL)); 193 c->setupcalled = 2; 194 PetscFunctionReturn(PETSC_SUCCESS); 195 } 196 197 /*@C 198 CharacteristicRegister - Adds a solver to the method of characteristics package. 199 200 Not Collective 201 202 Input Parameters: 203 + sname - name of a new user-defined solver 204 - function - routine to create method context 205 206 Level: advanced 207 208 Example Usage: 209 .vb 210 CharacteristicRegister("my_char", MyCharCreate); 211 .ve 212 213 Then, your Characteristic type can be chosen with the procedural interface via 214 .vb 215 CharacteristicCreate(MPI_Comm, Characteristic* &char); 216 CharacteristicSetType(char,"my_char"); 217 .ve 218 or at runtime via the option 219 .vb 220 -characteristic_type my_char 221 .ve 222 223 Notes: 224 CharacteristicRegister() may be called multiple times to add several user-defined solvers. 225 226 .seealso: [](ch_ts), `CharacteristicRegisterAll()`, `CharacteristicRegisterDestroy()` 227 @*/ 228 PetscErrorCode CharacteristicRegister(const char sname[], PetscErrorCode (*function)(Characteristic)) 229 { 230 PetscFunctionBegin; 231 PetscCall(CharacteristicInitializePackage()); 232 PetscCall(PetscFunctionListAdd(&CharacteristicList, sname, function)); 233 PetscFunctionReturn(PETSC_SUCCESS); 234 } 235 236 PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx) 237 { 238 PetscFunctionBegin; 239 c->velocityDA = da; 240 c->velocity = v; 241 c->velocityOld = vOld; 242 c->numVelocityComp = numComponents; 243 c->velocityComp = components; 244 c->velocityInterp = interp; 245 c->velocityCtx = ctx; 246 PetscFunctionReturn(PETSC_SUCCESS); 247 } 248 249 PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx) 250 { 251 PetscFunctionBegin; 252 c->velocityDA = da; 253 c->velocity = v; 254 c->velocityOld = vOld; 255 c->numVelocityComp = numComponents; 256 c->velocityComp = components; 257 c->velocityInterpLocal = interp; 258 c->velocityCtx = ctx; 259 PetscFunctionReturn(PETSC_SUCCESS); 260 } 261 262 PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx) 263 { 264 PetscFunctionBegin; 265 #if 0 266 PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov."); 267 #endif 268 c->fieldDA = da; 269 c->field = v; 270 c->numFieldComp = numComponents; 271 c->fieldComp = components; 272 c->fieldInterp = interp; 273 c->fieldCtx = ctx; 274 PetscFunctionReturn(PETSC_SUCCESS); 275 } 276 277 PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx) 278 { 279 PetscFunctionBegin; 280 #if 0 281 PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov."); 282 #endif 283 c->fieldDA = da; 284 c->field = v; 285 c->numFieldComp = numComponents; 286 c->fieldComp = components; 287 c->fieldInterpLocal = interp; 288 c->fieldCtx = ctx; 289 PetscFunctionReturn(PETSC_SUCCESS); 290 } 291 292 PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution) 293 { 294 CharacteristicPointDA2D Qi; 295 DM da = c->velocityDA; 296 Vec velocityLocal, velocityLocalOld; 297 Vec fieldLocal; 298 DMDALocalInfo info; 299 PetscScalar **solArray; 300 void *velocityArray; 301 void *velocityArrayOld; 302 void *fieldArray; 303 PetscScalar *interpIndices; 304 PetscScalar *velocityValues, *velocityValuesOld; 305 PetscScalar *fieldValues; 306 PetscMPIInt rank; 307 PetscInt dim; 308 PetscMPIInt neighbors[9]; 309 PetscInt dof; 310 PetscInt gx, gy; 311 PetscInt n, is, ie, js, je, comp; 312 313 PetscFunctionBegin; 314 c->queueSize = 0; 315 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank)); 316 PetscCall(DMDAGetNeighborsRank(da, neighbors)); 317 PetscCall(CharacteristicSetNeighbors(c, 9, neighbors)); 318 PetscCall(CharacteristicSetUp(c)); 319 /* global and local grid info */ 320 PetscCall(DMDAGetInfo(da, &dim, &gx, &gy, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)); 321 PetscCall(DMDAGetLocalInfo(da, &info)); 322 is = info.xs; 323 ie = info.xs + info.xm; 324 js = info.ys; 325 je = info.ys + info.ym; 326 /* Allocation */ 327 PetscCall(PetscMalloc1(dim, &interpIndices)); 328 PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValues)); 329 PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValuesOld)); 330 PetscCall(PetscMalloc1(c->numFieldComp, &fieldValues)); 331 PetscCall(PetscLogEventBegin(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL)); 332 333 /* ----------------------------------------------------------------------- 334 PART 1, AT t-dt/2 335 -----------------------------------------------------------------------*/ 336 PetscCall(PetscLogEventBegin(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL)); 337 /* GET POSITION AT HALF TIME IN THE PAST */ 338 if (c->velocityInterpLocal) { 339 PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocal)); 340 PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocalOld)); 341 PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal)); 342 PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal)); 343 PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld)); 344 PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld)); 345 PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocal, &velocityArray)); 346 PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld)); 347 } 348 PetscCall(PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n")); 349 for (Qi.j = js; Qi.j < je; Qi.j++) { 350 for (Qi.i = is; Qi.i < ie; Qi.i++) { 351 interpIndices[0] = Qi.i; 352 interpIndices[1] = Qi.j; 353 if (c->velocityInterpLocal) PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx)); 354 else PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx)); 355 Qi.x = Qi.i - velocityValues[0] * dt / 2.0; 356 Qi.y = Qi.j - velocityValues[1] * dt / 2.0; 357 358 /* Determine whether the position at t - dt/2 is local */ 359 Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y); 360 361 /* Check for Periodic boundaries and move all periodic points back onto the domain */ 362 PetscCall(DMDAMapCoordsToPeriodicDomain(da, &(Qi.x), &(Qi.y))); 363 PetscCall(CharacteristicAddPoint(c, &Qi)); 364 } 365 } 366 PetscCall(PetscLogEventEnd(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL)); 367 368 PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL)); 369 PetscCall(CharacteristicSendCoordinatesBegin(c)); 370 PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL)); 371 372 PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL)); 373 /* Calculate velocity at t_n+1/2 (local values) */ 374 PetscCall(PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n")); 375 for (n = 0; n < c->queueSize; n++) { 376 Qi = c->queue[n]; 377 if (c->neighbors[Qi.proc] == rank) { 378 interpIndices[0] = Qi.x; 379 interpIndices[1] = Qi.y; 380 if (c->velocityInterpLocal) { 381 PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx)); 382 PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx)); 383 } else { 384 PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx)); 385 PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx)); 386 } 387 Qi.x = 0.5 * (velocityValues[0] + velocityValuesOld[0]); 388 Qi.y = 0.5 * (velocityValues[1] + velocityValuesOld[1]); 389 } 390 c->queue[n] = Qi; 391 } 392 PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL)); 393 394 PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL)); 395 PetscCall(CharacteristicSendCoordinatesEnd(c)); 396 PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL)); 397 398 /* Calculate velocity at t_n+1/2 (fill remote requests) */ 399 PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL)); 400 PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote velocities at t_{n - 1/2}\n", c->queueRemoteSize)); 401 for (n = 0; n < c->queueRemoteSize; n++) { 402 Qi = c->queueRemote[n]; 403 interpIndices[0] = Qi.x; 404 interpIndices[1] = Qi.y; 405 if (c->velocityInterpLocal) { 406 PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx)); 407 PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx)); 408 } else { 409 PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx)); 410 PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx)); 411 } 412 Qi.x = 0.5 * (velocityValues[0] + velocityValuesOld[0]); 413 Qi.y = 0.5 * (velocityValues[1] + velocityValuesOld[1]); 414 c->queueRemote[n] = Qi; 415 } 416 PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL)); 417 PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL)); 418 PetscCall(CharacteristicGetValuesBegin(c)); 419 PetscCall(CharacteristicGetValuesEnd(c)); 420 if (c->velocityInterpLocal) { 421 PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocal, &velocityArray)); 422 PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld)); 423 PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocal)); 424 PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocalOld)); 425 } 426 PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL)); 427 428 /* ----------------------------------------------------------------------- 429 PART 2, AT t-dt 430 -----------------------------------------------------------------------*/ 431 432 /* GET POSITION AT t_n (local values) */ 433 PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL)); 434 PetscCall(PetscInfo(NULL, "Calculating position at t_{n}\n")); 435 for (n = 0; n < c->queueSize; n++) { 436 Qi = c->queue[n]; 437 Qi.x = Qi.i - Qi.x * dt; 438 Qi.y = Qi.j - Qi.y * dt; 439 440 /* Determine whether the position at t-dt is local */ 441 Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y); 442 443 /* Check for Periodic boundaries and move all periodic points back onto the domain */ 444 PetscCall(DMDAMapCoordsToPeriodicDomain(da, &(Qi.x), &(Qi.y))); 445 446 c->queue[n] = Qi; 447 } 448 PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL)); 449 450 PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL)); 451 PetscCall(CharacteristicSendCoordinatesBegin(c)); 452 PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL)); 453 454 /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */ 455 PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL)); 456 if (c->fieldInterpLocal) { 457 PetscCall(DMGetLocalVector(c->fieldDA, &fieldLocal)); 458 PetscCall(DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal)); 459 PetscCall(DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal)); 460 PetscCall(DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray)); 461 } 462 PetscCall(PetscInfo(NULL, "Calculating local field at t_{n}\n")); 463 for (n = 0; n < c->queueSize; n++) { 464 if (c->neighbors[c->queue[n].proc] == rank) { 465 interpIndices[0] = c->queue[n].x; 466 interpIndices[1] = c->queue[n].y; 467 if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx)); 468 else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx)); 469 for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp]; 470 } 471 } 472 PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL)); 473 474 PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL)); 475 PetscCall(CharacteristicSendCoordinatesEnd(c)); 476 PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL)); 477 478 /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */ 479 PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL)); 480 PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote field points at t_{n}\n", c->queueRemoteSize)); 481 for (n = 0; n < c->queueRemoteSize; n++) { 482 interpIndices[0] = c->queueRemote[n].x; 483 interpIndices[1] = c->queueRemote[n].y; 484 485 /* for debugging purposes */ 486 if (1) { /* hacked bounds test...let's do better */ 487 PetscScalar im = interpIndices[0]; 488 PetscScalar jm = interpIndices[1]; 489 490 PetscCheck((im >= (PetscScalar)is - 1.) && (im <= (PetscScalar)ie) && (jm >= (PetscScalar)js - 1.) && (jm <= (PetscScalar)je), PETSC_COMM_SELF, PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", (double)PetscAbsScalar(im), (double)PetscAbsScalar(jm)); 491 } 492 493 if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx)); 494 else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx)); 495 for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp]; 496 } 497 PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL)); 498 499 PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL)); 500 PetscCall(CharacteristicGetValuesBegin(c)); 501 PetscCall(CharacteristicGetValuesEnd(c)); 502 if (c->fieldInterpLocal) { 503 PetscCall(DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray)); 504 PetscCall(DMRestoreLocalVector(c->fieldDA, &fieldLocal)); 505 } 506 PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL)); 507 508 /* Return field of characteristics at t_n-1 */ 509 PetscCall(PetscLogEventBegin(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL)); 510 PetscCall(DMDAGetInfo(c->fieldDA, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL)); 511 PetscCall(DMDAVecGetArray(c->fieldDA, solution, &solArray)); 512 for (n = 0; n < c->queueSize; n++) { 513 Qi = c->queue[n]; 514 for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i * dof + c->fieldComp[comp]] = Qi.field[comp]; 515 } 516 PetscCall(DMDAVecRestoreArray(c->fieldDA, solution, &solArray)); 517 PetscCall(PetscLogEventEnd(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL)); 518 PetscCall(PetscLogEventEnd(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL)); 519 520 /* Cleanup */ 521 PetscCall(PetscFree(interpIndices)); 522 PetscCall(PetscFree(velocityValues)); 523 PetscCall(PetscFree(velocityValuesOld)); 524 PetscCall(PetscFree(fieldValues)); 525 PetscFunctionReturn(PETSC_SUCCESS); 526 } 527 528 PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[]) 529 { 530 PetscFunctionBegin; 531 c->numNeighbors = numNeighbors; 532 PetscCall(PetscFree(c->neighbors)); 533 PetscCall(PetscMalloc1(numNeighbors, &c->neighbors)); 534 PetscCall(PetscArraycpy(c->neighbors, neighbors, numNeighbors)); 535 PetscFunctionReturn(PETSC_SUCCESS); 536 } 537 538 PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point) 539 { 540 PetscFunctionBegin; 541 PetscCheck(c->queueSize < c->queueMax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Exceeded maximum queue size %" PetscInt_FMT, c->queueMax); 542 c->queue[c->queueSize++] = *point; 543 PetscFunctionReturn(PETSC_SUCCESS); 544 } 545 546 PetscErrorCode CharacteristicSendCoordinatesBegin(Characteristic c) 547 { 548 PetscMPIInt rank, tag = 121; 549 PetscInt i, n; 550 551 PetscFunctionBegin; 552 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank)); 553 PetscCall(CharacteristicHeapSort(c, c->queue, c->queueSize)); 554 PetscCall(PetscArrayzero(c->needCount, c->numNeighbors)); 555 for (i = 0; i < c->queueSize; i++) c->needCount[c->queue[i].proc]++; 556 c->fillCount[0] = 0; 557 for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1]))); 558 for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c))); 559 PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status)); 560 /* Initialize the remote queue */ 561 c->queueLocalMax = c->localOffsets[0] = 0; 562 c->queueRemoteMax = c->remoteOffsets[0] = 0; 563 for (n = 1; n < c->numNeighbors; n++) { 564 c->remoteOffsets[n] = c->queueRemoteMax; 565 c->queueRemoteMax += c->fillCount[n]; 566 c->localOffsets[n] = c->queueLocalMax; 567 c->queueLocalMax += c->needCount[n]; 568 } 569 /* HACK BEGIN */ 570 for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0]; 571 c->needCount[0] = 0; 572 /* HACK END */ 573 if (c->queueRemoteMax) { 574 PetscCall(PetscMalloc1(c->queueRemoteMax, &c->queueRemote)); 575 } else c->queueRemote = NULL; 576 c->queueRemoteSize = c->queueRemoteMax; 577 578 /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */ 579 for (n = 1; n < c->numNeighbors; n++) { 580 PetscCall(PetscInfo(NULL, "Receiving %" PetscInt_FMT " requests for values from proc %d\n", c->fillCount[n], c->neighbors[n])); 581 PetscCallMPI(MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1]))); 582 } 583 for (n = 1; n < c->numNeighbors; n++) { 584 PetscCall(PetscInfo(NULL, "Sending %" PetscInt_FMT " requests for values from proc %d\n", c->needCount[n], c->neighbors[n])); 585 PetscCallMPI(MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c))); 586 } 587 PetscFunctionReturn(PETSC_SUCCESS); 588 } 589 590 PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c) 591 { 592 #if 0 593 PetscMPIInt rank; 594 PetscInt n; 595 #endif 596 597 PetscFunctionBegin; 598 PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status)); 599 #if 0 600 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank)); 601 for (n = 0; n < c->queueRemoteSize; n++) { 602 PetscCheck(c->neighbors[c->queueRemote[n].proc] != rank,PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc = %d", n, c->queueRemote[n].proc); 603 } 604 #endif 605 PetscFunctionReturn(PETSC_SUCCESS); 606 } 607 608 PetscErrorCode CharacteristicGetValuesBegin(Characteristic c) 609 { 610 PetscMPIInt tag = 121; 611 PetscInt n; 612 613 PetscFunctionBegin; 614 /* SEND AND RECEIVE FILLED REQUESTS for velocities at t_n+1/2 */ 615 for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1]))); 616 for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c))); 617 PetscFunctionReturn(PETSC_SUCCESS); 618 } 619 620 PetscErrorCode CharacteristicGetValuesEnd(Characteristic c) 621 { 622 PetscFunctionBegin; 623 PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status)); 624 /* Free queue of requests from other procs */ 625 PetscCall(PetscFree(c->queueRemote)); 626 PetscFunctionReturn(PETSC_SUCCESS); 627 } 628 629 /*---------------------------------------------------------------------*/ 630 /* 631 Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html 632 */ 633 static PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size) 634 /*---------------------------------------------------------------------*/ 635 { 636 CharacteristicPointDA2D temp; 637 PetscInt n; 638 639 PetscFunctionBegin; 640 if (0) { /* Check the order of the queue before sorting */ 641 PetscCall(PetscInfo(NULL, "Before Heap sort\n")); 642 for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc)); 643 } 644 645 /* SORTING PHASE */ 646 for (n = (size / 2) - 1; n >= 0; n--) { PetscCall(CharacteristicSiftDown(c, queue, n, size - 1)); /* Rich had size-1 here, Matt had size*/ } 647 for (n = size - 1; n >= 1; n--) { 648 temp = queue[0]; 649 queue[0] = queue[n]; 650 queue[n] = temp; 651 PetscCall(CharacteristicSiftDown(c, queue, 0, n - 1)); 652 } 653 if (0) { /* Check the order of the queue after sorting */ 654 PetscCall(PetscInfo(NULL, "Avter Heap sort\n")); 655 for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc)); 656 } 657 PetscFunctionReturn(PETSC_SUCCESS); 658 } 659 660 /*---------------------------------------------------------------------*/ 661 /* 662 Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html 663 */ 664 static PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom) 665 /*---------------------------------------------------------------------*/ 666 { 667 PetscBool done = PETSC_FALSE; 668 PetscInt maxChild; 669 CharacteristicPointDA2D temp; 670 671 PetscFunctionBegin; 672 while ((root * 2 <= bottom) && (!done)) { 673 if (root * 2 == bottom) maxChild = root * 2; 674 else if (queue[root * 2].proc > queue[root * 2 + 1].proc) maxChild = root * 2; 675 else maxChild = root * 2 + 1; 676 677 if (queue[root].proc < queue[maxChild].proc) { 678 temp = queue[root]; 679 queue[root] = queue[maxChild]; 680 queue[maxChild] = temp; 681 root = maxChild; 682 } else done = PETSC_TRUE; 683 } 684 PetscFunctionReturn(PETSC_SUCCESS); 685 } 686 687 /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */ 688 static PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[]) 689 { 690 DMBoundaryType bx, by; 691 PetscBool IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE; 692 MPI_Comm comm; 693 PetscMPIInt rank; 694 PetscInt **procs, pi, pj, pim, pip, pjm, pjp, PI, PJ; 695 696 PetscFunctionBegin; 697 PetscCall(PetscObjectGetComm((PetscObject)da, &comm)); 698 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 699 PetscCall(DMDAGetInfo(da, NULL, NULL, NULL, NULL, &PI, &PJ, NULL, NULL, NULL, &bx, &by, NULL, NULL)); 700 701 if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE; 702 if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE; 703 704 neighbors[0] = rank; 705 rank = 0; 706 PetscCall(PetscMalloc1(PJ, &procs)); 707 for (pj = 0; pj < PJ; pj++) { 708 PetscCall(PetscMalloc1(PI, &(procs[pj]))); 709 for (pi = 0; pi < PI; pi++) { 710 procs[pj][pi] = rank; 711 rank++; 712 } 713 } 714 715 pi = neighbors[0] % PI; 716 pj = neighbors[0] / PI; 717 pim = pi - 1; 718 if (pim < 0) pim = PI - 1; 719 pip = (pi + 1) % PI; 720 pjm = pj - 1; 721 if (pjm < 0) pjm = PJ - 1; 722 pjp = (pj + 1) % PJ; 723 724 neighbors[1] = procs[pj][pim]; 725 neighbors[2] = procs[pjp][pim]; 726 neighbors[3] = procs[pjp][pi]; 727 neighbors[4] = procs[pjp][pip]; 728 neighbors[5] = procs[pj][pip]; 729 neighbors[6] = procs[pjm][pip]; 730 neighbors[7] = procs[pjm][pi]; 731 neighbors[8] = procs[pjm][pim]; 732 733 if (!IPeriodic) { 734 if (pi == 0) neighbors[1] = neighbors[2] = neighbors[8] = neighbors[0]; 735 if (pi == PI - 1) neighbors[4] = neighbors[5] = neighbors[6] = neighbors[0]; 736 } 737 738 if (!JPeriodic) { 739 if (pj == 0) neighbors[6] = neighbors[7] = neighbors[8] = neighbors[0]; 740 if (pj == PJ - 1) neighbors[2] = neighbors[3] = neighbors[4] = neighbors[0]; 741 } 742 743 for (pj = 0; pj < PJ; pj++) PetscCall(PetscFree(procs[pj])); 744 PetscCall(PetscFree(procs)); 745 PetscFunctionReturn(PETSC_SUCCESS); 746 } 747 748 /* 749 SUBDOMAIN NEIGHBORHOOD PROCESS MAP: 750 2 | 3 | 4 751 __|___|__ 752 1 | 0 | 5 753 __|___|__ 754 8 | 7 | 6 755 | | 756 */ 757 static PetscInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr) 758 { 759 DMDALocalInfo info; 760 PetscReal is, ie, js, je; 761 762 PetscCall(DMDAGetLocalInfo(da, &info)); 763 is = (PetscReal)info.xs - 0.5; 764 ie = (PetscReal)info.xs + info.xm - 0.5; 765 js = (PetscReal)info.ys - 0.5; 766 je = (PetscReal)info.ys + info.ym - 0.5; 767 768 if (ir >= is && ir <= ie) { /* center column */ 769 if (jr >= js && jr <= je) return 0; 770 else if (jr < js) return 7; 771 else return 3; 772 } else if (ir < is) { /* left column */ 773 if (jr >= js && jr <= je) return 1; 774 else if (jr < js) return 8; 775 else return 2; 776 } else { /* right column */ 777 if (jr >= js && jr <= je) return 5; 778 else if (jr < js) return 6; 779 else return 4; 780 } 781 } 782