1 2 #include <petsc/private/isimpl.h> /*I "petscis.h" I*/ 3 #include <petsc/private/hashmapi.h> 4 #include <petscsf.h> 5 #include <petscviewer.h> 6 7 PetscClassId IS_LTOGM_CLASSID; 8 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping, PetscInt *, PetscInt **, PetscInt **, PetscInt ***); 9 10 typedef struct { 11 PetscInt *globals; 12 } ISLocalToGlobalMapping_Basic; 13 14 typedef struct { 15 PetscHMapI globalht; 16 } ISLocalToGlobalMapping_Hash; 17 18 /*@C 19 ISGetPointRange - Returns a description of the points in an IS suitable for traversal 20 21 Not collective 22 23 Input Parameter: 24 . pointIS - The IS object 25 26 Output Parameters: 27 + pStart - The first index, see notes 28 . pEnd - One past the last index, see notes 29 - points - The indices, see notes 30 31 Notes: 32 If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern 33 $ ISGetPointRange(is, &pStart, &pEnd, &points); 34 $ for (p = pStart; p < pEnd; ++p) { 35 $ const PetscInt point = points ? points[p] : p; 36 $ } 37 $ ISRestorePointRange(is, &pstart, &pEnd, &points); 38 39 Level: intermediate 40 41 .seealso: `ISRestorePointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()` 42 @*/ 43 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) 44 { 45 PetscInt numCells, step = 1; 46 PetscBool isStride; 47 48 PetscFunctionBeginHot; 49 *pStart = 0; 50 *points = NULL; 51 PetscCall(ISGetLocalSize(pointIS, &numCells)); 52 PetscCall(PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride)); 53 if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step)); 54 *pEnd = *pStart + numCells; 55 if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points)); 56 PetscFunctionReturn(0); 57 } 58 59 /*@C 60 ISRestorePointRange - Destroys the traversal description 61 62 Not collective 63 64 Input Parameters: 65 + pointIS - The IS object 66 . pStart - The first index, from ISGetPointRange() 67 . pEnd - One past the last index, from ISGetPointRange() 68 - points - The indices, from ISGetPointRange() 69 70 Notes: 71 If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern 72 $ ISGetPointRange(is, &pStart, &pEnd, &points); 73 $ for (p = pStart; p < pEnd; ++p) { 74 $ const PetscInt point = points ? points[p] : p; 75 $ } 76 $ ISRestorePointRange(is, &pstart, &pEnd, &points); 77 78 Level: intermediate 79 80 .seealso: `ISGetPointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()` 81 @*/ 82 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) 83 { 84 PetscInt step = 1; 85 PetscBool isStride; 86 87 PetscFunctionBeginHot; 88 PetscCall(PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride)); 89 if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step)); 90 if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points)); 91 PetscFunctionReturn(0); 92 } 93 94 /*@C 95 ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given 96 97 Not collective 98 99 Input Parameters: 100 + subpointIS - The IS object to be configured 101 . pStar t - The first index of the subrange 102 . pEnd - One past the last index for the subrange 103 - points - The indices for the entire range, from ISGetPointRange() 104 105 Output Parameters: 106 . subpointIS - The IS object now configured to be a subrange 107 108 Notes: 109 The input IS will now respond properly to calls to ISGetPointRange() and return the subrange. 110 111 Level: intermediate 112 113 .seealso: `ISGetPointRange()`, `ISRestorePointRange()`, `ISGetIndices()`, `ISCreateStride()` 114 @*/ 115 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points) 116 { 117 PetscFunctionBeginHot; 118 if (points) { 119 PetscCall(ISSetType(subpointIS, ISGENERAL)); 120 PetscCall(ISGeneralSetIndices(subpointIS, pEnd - pStart, &points[pStart], PETSC_USE_POINTER)); 121 } else { 122 PetscCall(ISSetType(subpointIS, ISSTRIDE)); 123 PetscCall(ISStrideSetStride(subpointIS, pEnd - pStart, pStart, 1)); 124 } 125 PetscFunctionReturn(0); 126 } 127 128 /* -----------------------------------------------------------------------------------------*/ 129 130 /* 131 Creates the global mapping information in the ISLocalToGlobalMapping structure 132 133 If the user has not selected how to handle the global to local mapping then use HASH for "large" problems 134 */ 135 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping) 136 { 137 PetscInt i, *idx = mapping->indices, n = mapping->n, end, start; 138 139 PetscFunctionBegin; 140 if (mapping->data) PetscFunctionReturn(0); 141 end = 0; 142 start = PETSC_MAX_INT; 143 144 for (i = 0; i < n; i++) { 145 if (idx[i] < 0) continue; 146 if (idx[i] < start) start = idx[i]; 147 if (idx[i] > end) end = idx[i]; 148 } 149 if (start > end) { 150 start = 0; 151 end = -1; 152 } 153 mapping->globalstart = start; 154 mapping->globalend = end; 155 if (!((PetscObject)mapping)->type_name) { 156 if ((end - start) > PetscMax(4 * n, 1000000)) { 157 PetscCall(ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGHASH)); 158 } else { 159 PetscCall(ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGBASIC)); 160 } 161 } 162 PetscTryTypeMethod(mapping, globaltolocalmappingsetup); 163 PetscFunctionReturn(0); 164 } 165 166 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping) 167 { 168 PetscInt i, *idx = mapping->indices, n = mapping->n, end, start, *globals; 169 ISLocalToGlobalMapping_Basic *map; 170 171 PetscFunctionBegin; 172 start = mapping->globalstart; 173 end = mapping->globalend; 174 PetscCall(PetscNew(&map)); 175 PetscCall(PetscMalloc1(end - start + 2, &globals)); 176 map->globals = globals; 177 for (i = 0; i < end - start + 1; i++) globals[i] = -1; 178 for (i = 0; i < n; i++) { 179 if (idx[i] < 0) continue; 180 globals[idx[i] - start] = i; 181 } 182 mapping->data = (void *)map; 183 PetscFunctionReturn(0); 184 } 185 186 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping) 187 { 188 PetscInt i, *idx = mapping->indices, n = mapping->n; 189 ISLocalToGlobalMapping_Hash *map; 190 191 PetscFunctionBegin; 192 PetscCall(PetscNew(&map)); 193 PetscCall(PetscHMapICreate(&map->globalht)); 194 for (i = 0; i < n; i++) { 195 if (idx[i] < 0) continue; 196 PetscCall(PetscHMapISet(map->globalht, idx[i], i)); 197 } 198 mapping->data = (void *)map; 199 PetscFunctionReturn(0); 200 } 201 202 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping) 203 { 204 ISLocalToGlobalMapping_Basic *map = (ISLocalToGlobalMapping_Basic *)mapping->data; 205 206 PetscFunctionBegin; 207 if (!map) PetscFunctionReturn(0); 208 PetscCall(PetscFree(map->globals)); 209 PetscCall(PetscFree(mapping->data)); 210 PetscFunctionReturn(0); 211 } 212 213 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping) 214 { 215 ISLocalToGlobalMapping_Hash *map = (ISLocalToGlobalMapping_Hash *)mapping->data; 216 217 PetscFunctionBegin; 218 if (!map) PetscFunctionReturn(0); 219 PetscCall(PetscHMapIDestroy(&map->globalht)); 220 PetscCall(PetscFree(mapping->data)); 221 PetscFunctionReturn(0); 222 } 223 224 #define GTOLTYPE _Basic 225 #define GTOLNAME _Basic 226 #define GTOLBS mapping->bs 227 #define GTOL(g, local) \ 228 do { \ 229 local = map->globals[g / bs - start]; \ 230 if (local >= 0) local = bs * local + (g % bs); \ 231 } while (0) 232 233 #include <../src/vec/is/utils/isltog.h> 234 235 #define GTOLTYPE _Basic 236 #define GTOLNAME Block_Basic 237 #define GTOLBS 1 238 #define GTOL(g, local) \ 239 do { \ 240 local = map->globals[g - start]; \ 241 } while (0) 242 #include <../src/vec/is/utils/isltog.h> 243 244 #define GTOLTYPE _Hash 245 #define GTOLNAME _Hash 246 #define GTOLBS mapping->bs 247 #define GTOL(g, local) \ 248 do { \ 249 (void)PetscHMapIGet(map->globalht, g / bs, &local); \ 250 if (local >= 0) local = bs * local + (g % bs); \ 251 } while (0) 252 #include <../src/vec/is/utils/isltog.h> 253 254 #define GTOLTYPE _Hash 255 #define GTOLNAME Block_Hash 256 #define GTOLBS 1 257 #define GTOL(g, local) \ 258 do { \ 259 (void)PetscHMapIGet(map->globalht, g, &local); \ 260 } while (0) 261 #include <../src/vec/is/utils/isltog.h> 262 263 /*@ 264 ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object 265 266 Not Collective 267 268 Input Parameter: 269 . ltog - local to global mapping 270 271 Output Parameter: 272 . nltog - the duplicated local to global mapping 273 274 Level: advanced 275 276 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()` 277 @*/ 278 PetscErrorCode ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog, ISLocalToGlobalMapping *nltog) 279 { 280 ISLocalToGlobalMappingType l2gtype; 281 282 PetscFunctionBegin; 283 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 284 PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog), ltog->bs, ltog->n, ltog->indices, PETSC_COPY_VALUES, nltog)); 285 PetscCall(ISLocalToGlobalMappingGetType(ltog, &l2gtype)); 286 PetscCall(ISLocalToGlobalMappingSetType(*nltog, l2gtype)); 287 PetscFunctionReturn(0); 288 } 289 290 /*@ 291 ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping 292 293 Not Collective 294 295 Input Parameter: 296 . ltog - local to global mapping 297 298 Output Parameter: 299 . n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length 300 301 Level: advanced 302 303 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()` 304 @*/ 305 PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping, PetscInt *n) 306 { 307 PetscFunctionBegin; 308 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 309 PetscValidIntPointer(n, 2); 310 *n = mapping->bs * mapping->n; 311 PetscFunctionReturn(0); 312 } 313 314 /*@C 315 ISLocalToGlobalMappingViewFromOptions - View from Options 316 317 Collective on ISLocalToGlobalMapping 318 319 Input Parameters: 320 + A - the local to global mapping object 321 . obj - Optional object 322 - name - command line option 323 324 Level: intermediate 325 .seealso: `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingView`, `PetscObjectViewFromOptions()`, `ISLocalToGlobalMappingCreate()` 326 @*/ 327 PetscErrorCode ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A, PetscObject obj, const char name[]) 328 { 329 PetscFunctionBegin; 330 PetscValidHeaderSpecific(A, IS_LTOGM_CLASSID, 1); 331 PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name)); 332 PetscFunctionReturn(0); 333 } 334 335 /*@C 336 ISLocalToGlobalMappingView - View a local to global mapping 337 338 Not Collective 339 340 Input Parameters: 341 + ltog - local to global mapping 342 - viewer - viewer 343 344 Level: advanced 345 346 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()` 347 @*/ 348 PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping, PetscViewer viewer) 349 { 350 PetscInt i; 351 PetscMPIInt rank; 352 PetscBool iascii; 353 354 PetscFunctionBegin; 355 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 356 if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping), &viewer)); 357 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 358 359 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mapping), &rank)); 360 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 361 if (iascii) { 362 PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mapping, viewer)); 363 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 364 for (i = 0; i < mapping->n; i++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n", rank, i, mapping->indices[i])); 365 PetscCall(PetscViewerFlush(viewer)); 366 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 367 } 368 PetscFunctionReturn(0); 369 } 370 371 /*@ 372 ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n) 373 ordering and a global parallel ordering. 374 375 Not collective 376 377 Input Parameter: 378 . is - index set containing the global numbers for each local number 379 380 Output Parameter: 381 . mapping - new mapping data structure 382 383 Notes: 384 the block size of the IS determines the block size of the mapping 385 Level: advanced 386 387 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetFromOptions()` 388 @*/ 389 PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is, ISLocalToGlobalMapping *mapping) 390 { 391 PetscInt n, bs; 392 const PetscInt *indices; 393 MPI_Comm comm; 394 PetscBool isblock; 395 396 PetscFunctionBegin; 397 PetscValidHeaderSpecific(is, IS_CLASSID, 1); 398 PetscValidPointer(mapping, 2); 399 400 PetscCall(PetscObjectGetComm((PetscObject)is, &comm)); 401 PetscCall(ISGetLocalSize(is, &n)); 402 PetscCall(PetscObjectTypeCompare((PetscObject)is, ISBLOCK, &isblock)); 403 if (!isblock) { 404 PetscCall(ISGetIndices(is, &indices)); 405 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n, indices, PETSC_COPY_VALUES, mapping)); 406 PetscCall(ISRestoreIndices(is, &indices)); 407 } else { 408 PetscCall(ISGetBlockSize(is, &bs)); 409 PetscCall(ISBlockGetIndices(is, &indices)); 410 PetscCall(ISLocalToGlobalMappingCreate(comm, bs, n / bs, indices, PETSC_COPY_VALUES, mapping)); 411 PetscCall(ISBlockRestoreIndices(is, &indices)); 412 } 413 PetscFunctionReturn(0); 414 } 415 416 /*@C 417 ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n) 418 ordering and a global parallel ordering. 419 420 Collective 421 422 Input Parameters: 423 + sf - star forest mapping contiguous local indices to (rank, offset) 424 - start - first global index on this process, or PETSC_DECIDE to compute contiguous global numbering automatically 425 426 Output Parameter: 427 . mapping - new mapping data structure 428 429 Level: advanced 430 431 Notes: 432 If any processor calls this with start = PETSC_DECIDE then all processors must, otherwise the program will hang. 433 434 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()` 435 @*/ 436 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf, PetscInt start, ISLocalToGlobalMapping *mapping) 437 { 438 PetscInt i, maxlocal, nroots, nleaves, *globals, *ltog; 439 MPI_Comm comm; 440 441 PetscFunctionBegin; 442 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 443 PetscValidPointer(mapping, 3); 444 PetscCall(PetscObjectGetComm((PetscObject)sf, &comm)); 445 PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, NULL)); 446 if (start == PETSC_DECIDE) { 447 start = 0; 448 PetscCallMPI(MPI_Exscan(&nroots, &start, 1, MPIU_INT, MPI_SUM, comm)); 449 } else PetscCheck(start >= 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE"); 450 PetscCall(PetscSFGetLeafRange(sf, NULL, &maxlocal)); 451 ++maxlocal; 452 PetscCall(PetscMalloc1(nroots, &globals)); 453 PetscCall(PetscMalloc1(maxlocal, <og)); 454 for (i = 0; i < nroots; i++) globals[i] = start + i; 455 for (i = 0; i < maxlocal; i++) ltog[i] = -1; 456 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, globals, ltog, MPI_REPLACE)); 457 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, globals, ltog, MPI_REPLACE)); 458 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, maxlocal, ltog, PETSC_OWN_POINTER, mapping)); 459 PetscCall(PetscFree(globals)); 460 PetscFunctionReturn(0); 461 } 462 463 /*@ 464 ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping 465 466 Not collective 467 468 Input Parameters: 469 + mapping - mapping data structure 470 - bs - the blocksize 471 472 Level: advanced 473 474 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()` 475 @*/ 476 PetscErrorCode ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping, PetscInt bs) 477 { 478 PetscInt *nid; 479 const PetscInt *oid; 480 PetscInt i, cn, on, obs, nn; 481 482 PetscFunctionBegin; 483 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 484 PetscCheck(bs >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid block size %" PetscInt_FMT, bs); 485 if (bs == mapping->bs) PetscFunctionReturn(0); 486 on = mapping->n; 487 obs = mapping->bs; 488 oid = mapping->indices; 489 nn = (on * obs) / bs; 490 PetscCheck((on * obs) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block size %" PetscInt_FMT " is inconsistent with block size %" PetscInt_FMT " and number of block indices %" PetscInt_FMT, bs, obs, on); 491 492 PetscCall(PetscMalloc1(nn, &nid)); 493 PetscCall(ISLocalToGlobalMappingGetIndices(mapping, &oid)); 494 for (i = 0; i < nn; i++) { 495 PetscInt j; 496 for (j = 0, cn = 0; j < bs - 1; j++) { 497 if (oid[i * bs + j] < 0) { 498 cn++; 499 continue; 500 } 501 PetscCheck(oid[i * bs + j] == oid[i * bs + j + 1] - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: non consecutive indices %" PetscInt_FMT " %" PetscInt_FMT, bs, obs, oid[i * bs + j], oid[i * bs + j + 1]); 502 } 503 if (oid[i * bs + j] < 0) cn++; 504 if (cn) { 505 PetscCheck(cn == bs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: invalid number of negative entries in block %" PetscInt_FMT, bs, obs, cn); 506 nid[i] = -1; 507 } else { 508 nid[i] = oid[i * bs] / bs; 509 } 510 } 511 PetscCall(ISLocalToGlobalMappingRestoreIndices(mapping, &oid)); 512 513 mapping->n = nn; 514 mapping->bs = bs; 515 PetscCall(PetscFree(mapping->indices)); 516 mapping->indices = nid; 517 mapping->globalstart = 0; 518 mapping->globalend = 0; 519 520 /* reset the cached information */ 521 PetscCall(PetscFree(mapping->info_procs)); 522 PetscCall(PetscFree(mapping->info_numprocs)); 523 if (mapping->info_indices) { 524 PetscInt i; 525 526 PetscCall(PetscFree((mapping->info_indices)[0])); 527 for (i = 1; i < mapping->info_nproc; i++) PetscCall(PetscFree(mapping->info_indices[i])); 528 PetscCall(PetscFree(mapping->info_indices)); 529 } 530 mapping->info_cached = PETSC_FALSE; 531 532 PetscTryTypeMethod(mapping, destroy); 533 PetscFunctionReturn(0); 534 } 535 536 /*@ 537 ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping 538 ordering and a global parallel ordering. 539 540 Not Collective 541 542 Input Parameters: 543 . mapping - mapping data structure 544 545 Output Parameter: 546 . bs - the blocksize 547 548 Level: advanced 549 550 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()` 551 @*/ 552 PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping, PetscInt *bs) 553 { 554 PetscFunctionBegin; 555 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 556 *bs = mapping->bs; 557 PetscFunctionReturn(0); 558 } 559 560 /*@ 561 ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n) 562 ordering and a global parallel ordering. 563 564 Not Collective, but communicator may have more than one process 565 566 Input Parameters: 567 + comm - MPI communicator 568 . bs - the block size 569 . n - the number of local elements divided by the block size, or equivalently the number of block indices 570 . indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs 571 - mode - see PetscCopyMode 572 573 Output Parameter: 574 . mapping - new mapping data structure 575 576 Notes: 577 There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1 578 579 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 580 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 581 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 582 583 Level: advanced 584 585 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH` 586 `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType` 587 @*/ 588 PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm comm, PetscInt bs, PetscInt n, const PetscInt indices[], PetscCopyMode mode, ISLocalToGlobalMapping *mapping) 589 { 590 PetscInt *in; 591 592 PetscFunctionBegin; 593 if (n) PetscValidIntPointer(indices, 4); 594 PetscValidPointer(mapping, 6); 595 596 *mapping = NULL; 597 PetscCall(ISInitializePackage()); 598 599 PetscCall(PetscHeaderCreate(*mapping, IS_LTOGM_CLASSID, "ISLocalToGlobalMapping", "Local to global mapping", "IS", comm, ISLocalToGlobalMappingDestroy, ISLocalToGlobalMappingView)); 600 (*mapping)->n = n; 601 (*mapping)->bs = bs; 602 if (mode == PETSC_COPY_VALUES) { 603 PetscCall(PetscMalloc1(n, &in)); 604 PetscCall(PetscArraycpy(in, indices, n)); 605 (*mapping)->indices = in; 606 (*mapping)->dealloc_indices = PETSC_TRUE; 607 } else if (mode == PETSC_OWN_POINTER) { 608 (*mapping)->indices = (PetscInt *)indices; 609 (*mapping)->dealloc_indices = PETSC_TRUE; 610 } else if (mode == PETSC_USE_POINTER) { 611 (*mapping)->indices = (PetscInt *)indices; 612 } else SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid mode %d", mode); 613 PetscFunctionReturn(0); 614 } 615 616 PetscFunctionList ISLocalToGlobalMappingList = NULL; 617 618 /*@ 619 ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database. 620 621 Not collective 622 623 Input Parameters: 624 . mapping - mapping data structure 625 626 Level: advanced 627 628 @*/ 629 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping) 630 { 631 char type[256]; 632 ISLocalToGlobalMappingType defaulttype = "Not set"; 633 PetscBool flg; 634 635 PetscFunctionBegin; 636 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 637 PetscCall(ISLocalToGlobalMappingRegisterAll()); 638 PetscObjectOptionsBegin((PetscObject)mapping); 639 PetscCall(PetscOptionsFList("-islocaltoglobalmapping_type", "ISLocalToGlobalMapping method", "ISLocalToGlobalMappingSetType", ISLocalToGlobalMappingList, (char *)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype, type, 256, &flg)); 640 if (flg) PetscCall(ISLocalToGlobalMappingSetType(mapping, type)); 641 PetscOptionsEnd(); 642 PetscFunctionReturn(0); 643 } 644 645 /*@ 646 ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n) 647 ordering and a global parallel ordering. 648 649 Note Collective 650 651 Input Parameters: 652 . mapping - mapping data structure 653 654 Level: advanced 655 656 .seealso: `ISLocalToGlobalMappingCreate()` 657 @*/ 658 PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping) 659 { 660 PetscFunctionBegin; 661 if (!*mapping) PetscFunctionReturn(0); 662 PetscValidHeaderSpecific((*mapping), IS_LTOGM_CLASSID, 1); 663 if (--((PetscObject)(*mapping))->refct > 0) { 664 *mapping = NULL; 665 PetscFunctionReturn(0); 666 } 667 if ((*mapping)->dealloc_indices) PetscCall(PetscFree((*mapping)->indices)); 668 PetscCall(PetscFree((*mapping)->info_procs)); 669 PetscCall(PetscFree((*mapping)->info_numprocs)); 670 if ((*mapping)->info_indices) { 671 PetscInt i; 672 673 PetscCall(PetscFree(((*mapping)->info_indices)[0])); 674 for (i = 1; i < (*mapping)->info_nproc; i++) PetscCall(PetscFree(((*mapping)->info_indices)[i])); 675 PetscCall(PetscFree((*mapping)->info_indices)); 676 } 677 if ((*mapping)->info_nodei) PetscCall(PetscFree(((*mapping)->info_nodei)[0])); 678 PetscCall(PetscFree2((*mapping)->info_nodec, (*mapping)->info_nodei)); 679 if ((*mapping)->ops->destroy) PetscCall((*(*mapping)->ops->destroy)(*mapping)); 680 PetscCall(PetscHeaderDestroy(mapping)); 681 *mapping = NULL; 682 PetscFunctionReturn(0); 683 } 684 685 /*@ 686 ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering 687 a new index set using the global numbering defined in an ISLocalToGlobalMapping 688 context. 689 690 Collective on is 691 692 Input Parameters: 693 + mapping - mapping between local and global numbering 694 - is - index set in local numbering 695 696 Output Parameters: 697 . newis - index set in global numbering 698 699 Notes: 700 The output IS will have the same communicator of the input IS. 701 702 Level: advanced 703 704 .seealso: `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, 705 `ISLocalToGlobalMappingDestroy()`, `ISGlobalToLocalMappingApply()` 706 @*/ 707 PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping, IS is, IS *newis) 708 { 709 PetscInt n, *idxout; 710 const PetscInt *idxin; 711 712 PetscFunctionBegin; 713 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 714 PetscValidHeaderSpecific(is, IS_CLASSID, 2); 715 PetscValidPointer(newis, 3); 716 717 PetscCall(ISGetLocalSize(is, &n)); 718 PetscCall(ISGetIndices(is, &idxin)); 719 PetscCall(PetscMalloc1(n, &idxout)); 720 PetscCall(ISLocalToGlobalMappingApply(mapping, n, idxin, idxout)); 721 PetscCall(ISRestoreIndices(is, &idxin)); 722 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)is), n, idxout, PETSC_OWN_POINTER, newis)); 723 PetscFunctionReturn(0); 724 } 725 726 /*@ 727 ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering 728 and converts them to the global numbering. 729 730 Not collective 731 732 Input Parameters: 733 + mapping - the local to global mapping context 734 . N - number of integers 735 - in - input indices in local numbering 736 737 Output Parameter: 738 . out - indices in global numbering 739 740 Notes: 741 The in and out array parameters may be identical. 742 743 Level: advanced 744 745 .seealso: `ISLocalToGlobalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`, 746 `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`, 747 `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()` 748 749 @*/ 750 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[]) 751 { 752 PetscInt i, bs, Nmax; 753 754 PetscFunctionBegin; 755 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 756 bs = mapping->bs; 757 Nmax = bs * mapping->n; 758 if (bs == 1) { 759 const PetscInt *idx = mapping->indices; 760 for (i = 0; i < N; i++) { 761 if (in[i] < 0) { 762 out[i] = in[i]; 763 continue; 764 } 765 PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i); 766 out[i] = idx[in[i]]; 767 } 768 } else { 769 const PetscInt *idx = mapping->indices; 770 for (i = 0; i < N; i++) { 771 if (in[i] < 0) { 772 out[i] = in[i]; 773 continue; 774 } 775 PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i); 776 out[i] = idx[in[i] / bs] * bs + (in[i] % bs); 777 } 778 } 779 PetscFunctionReturn(0); 780 } 781 782 /*@ 783 ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering 784 785 Not collective 786 787 Input Parameters: 788 + mapping - the local to global mapping context 789 . N - number of integers 790 - in - input indices in local block numbering 791 792 Output Parameter: 793 . out - indices in global block numbering 794 795 Notes: 796 The in and out array parameters may be identical. 797 798 Example: 799 If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0 800 (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3. 801 802 Level: advanced 803 804 .seealso: `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`, 805 `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`, 806 `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()` 807 808 @*/ 809 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[]) 810 { 811 PetscInt i, Nmax; 812 const PetscInt *idx; 813 814 PetscFunctionBegin; 815 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 816 Nmax = mapping->n; 817 idx = mapping->indices; 818 for (i = 0; i < N; i++) { 819 if (in[i] < 0) { 820 out[i] = in[i]; 821 continue; 822 } 823 PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local block index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i); 824 out[i] = idx[in[i]]; 825 } 826 PetscFunctionReturn(0); 827 } 828 829 /*@ 830 ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers 831 specified with a global numbering. 832 833 Not collective 834 835 Input Parameters: 836 + mapping - mapping between local and global numbering 837 . type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them) 838 IS_GTOLM_DROP - drops the indices with no local value from the output list 839 . n - number of global indices to map 840 - idx - global indices to map 841 842 Output Parameters: 843 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 844 - idxout - local index of each global index, one must pass in an array long enough 845 to hold all the indices. You can call ISGlobalToLocalMappingApply() with 846 idxout == NULL to determine the required length (returned in nout) 847 and then allocate the required space and call ISGlobalToLocalMappingApply() 848 a second time to set the values. 849 850 Notes: 851 Either nout or idxout may be NULL. idx and idxout may be identical. 852 853 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 854 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 855 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 856 857 Level: advanced 858 859 Developer Note: The manual page states that idx and idxout may be identical but the calling 860 sequence declares idx as const so it cannot be the same as idxout. 861 862 .seealso: `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, 863 `ISLocalToGlobalMappingDestroy()` 864 @*/ 865 PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[]) 866 { 867 PetscFunctionBegin; 868 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 869 if (!mapping->data) PetscCall(ISGlobalToLocalMappingSetUp(mapping)); 870 PetscUseTypeMethod(mapping, globaltolocalmappingapply, type, n, idx, nout, idxout); 871 PetscFunctionReturn(0); 872 } 873 874 /*@ 875 ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering 876 a new index set using the local numbering defined in an ISLocalToGlobalMapping 877 context. 878 879 Not collective 880 881 Input Parameters: 882 + mapping - mapping between local and global numbering 883 . type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them) 884 IS_GTOLM_DROP - drops the indices with no local value from the output list 885 - is - index set in global numbering 886 887 Output Parameters: 888 . newis - index set in local numbering 889 890 Notes: 891 The output IS will be sequential, as it encodes a purely local operation 892 893 Level: advanced 894 895 .seealso: `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`, 896 `ISLocalToGlobalMappingDestroy()` 897 @*/ 898 PetscErrorCode ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, IS is, IS *newis) 899 { 900 PetscInt n, nout, *idxout; 901 const PetscInt *idxin; 902 903 PetscFunctionBegin; 904 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 905 PetscValidHeaderSpecific(is, IS_CLASSID, 3); 906 PetscValidPointer(newis, 4); 907 908 PetscCall(ISGetLocalSize(is, &n)); 909 PetscCall(ISGetIndices(is, &idxin)); 910 if (type == IS_GTOLM_MASK) { 911 PetscCall(PetscMalloc1(n, &idxout)); 912 } else { 913 PetscCall(ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, NULL)); 914 PetscCall(PetscMalloc1(nout, &idxout)); 915 } 916 PetscCall(ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, idxout)); 917 PetscCall(ISRestoreIndices(is, &idxin)); 918 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nout, idxout, PETSC_OWN_POINTER, newis)); 919 PetscFunctionReturn(0); 920 } 921 922 /*@ 923 ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers 924 specified with a block global numbering. 925 926 Not collective 927 928 Input Parameters: 929 + mapping - mapping between local and global numbering 930 . type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them) 931 IS_GTOLM_DROP - drops the indices with no local value from the output list 932 . n - number of global indices to map 933 - idx - global indices to map 934 935 Output Parameters: 936 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 937 - idxout - local index of each global index, one must pass in an array long enough 938 to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with 939 idxout == NULL to determine the required length (returned in nout) 940 and then allocate the required space and call ISGlobalToLocalMappingApplyBlock() 941 a second time to set the values. 942 943 Notes: 944 Either nout or idxout may be NULL. idx and idxout may be identical. 945 946 For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used; 947 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable. 948 Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 949 950 Level: advanced 951 952 Developer Note: The manual page states that idx and idxout may be identical but the calling 953 sequence declares idx as const so it cannot be the same as idxout. 954 955 .seealso: `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`, 956 `ISLocalToGlobalMappingDestroy()` 957 @*/ 958 PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[]) 959 { 960 PetscFunctionBegin; 961 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 962 if (!mapping->data) PetscCall(ISGlobalToLocalMappingSetUp(mapping)); 963 PetscUseTypeMethod(mapping, globaltolocalmappingapplyblock, type, n, idx, nout, idxout); 964 PetscFunctionReturn(0); 965 } 966 967 /*@C 968 ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and 969 each index shared by more than one processor 970 971 Collective on ISLocalToGlobalMapping 972 973 Input Parameter: 974 . mapping - the mapping from local to global indexing 975 976 Output Parameters: 977 + nproc - number of processors that are connected to this one 978 . proc - neighboring processors 979 . numproc - number of indices for each subdomain (processor) 980 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 981 982 Level: advanced 983 984 Fortran Usage: 985 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 986 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 987 PetscInt indices[nproc][numprocmax],ierr) 988 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 989 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 990 991 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 992 `ISLocalToGlobalMappingRestoreInfo()` 993 @*/ 994 PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) 995 { 996 PetscFunctionBegin; 997 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 998 if (mapping->info_cached) { 999 *nproc = mapping->info_nproc; 1000 *procs = mapping->info_procs; 1001 *numprocs = mapping->info_numprocs; 1002 *indices = mapping->info_indices; 1003 } else { 1004 PetscCall(ISLocalToGlobalMappingGetBlockInfo_Private(mapping, nproc, procs, numprocs, indices)); 1005 } 1006 PetscFunctionReturn(0); 1007 } 1008 1009 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) 1010 { 1011 PetscMPIInt size, rank, tag1, tag2, tag3, *len, *source, imdex; 1012 PetscInt i, n = mapping->n, Ng, ng, max = 0, *lindices = mapping->indices; 1013 PetscInt *nprocs, *owner, nsends, *sends, j, *starts, nmax, nrecvs, *recvs, proc; 1014 PetscInt cnt, scale, *ownedsenders, *nownedsenders, rstart; 1015 PetscInt node, nownedm, nt, *sends2, nsends2, *starts2, *lens2, *dest, nrecvs2, *starts3, *recvs2, k, *bprocs, *tmp; 1016 PetscInt first_procs, first_numprocs, *first_indices; 1017 MPI_Request *recv_waits, *send_waits; 1018 MPI_Status recv_status, *send_status, *recv_statuses; 1019 MPI_Comm comm; 1020 PetscBool debug = PETSC_FALSE; 1021 1022 PetscFunctionBegin; 1023 PetscCall(PetscObjectGetComm((PetscObject)mapping, &comm)); 1024 PetscCallMPI(MPI_Comm_size(comm, &size)); 1025 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1026 if (size == 1) { 1027 *nproc = 0; 1028 *procs = NULL; 1029 PetscCall(PetscNew(numprocs)); 1030 (*numprocs)[0] = 0; 1031 PetscCall(PetscNew(indices)); 1032 (*indices)[0] = NULL; 1033 /* save info for reuse */ 1034 mapping->info_nproc = *nproc; 1035 mapping->info_procs = *procs; 1036 mapping->info_numprocs = *numprocs; 1037 mapping->info_indices = *indices; 1038 mapping->info_cached = PETSC_TRUE; 1039 PetscFunctionReturn(0); 1040 } 1041 1042 PetscCall(PetscOptionsGetBool(((PetscObject)mapping)->options, NULL, "-islocaltoglobalmappinggetinfo_debug", &debug, NULL)); 1043 1044 /* 1045 Notes on ISLocalToGlobalMappingGetBlockInfo 1046 1047 globally owned node - the nodes that have been assigned to this processor in global 1048 numbering, just for this routine. 1049 1050 nontrivial globally owned node - node assigned to this processor that is on a subdomain 1051 boundary (i.e. is has more than one local owner) 1052 1053 locally owned node - node that exists on this processors subdomain 1054 1055 nontrivial locally owned node - node that is not in the interior (i.e. has more than one 1056 local subdomain 1057 */ 1058 PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag1)); 1059 PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag2)); 1060 PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag3)); 1061 1062 for (i = 0; i < n; i++) { 1063 if (lindices[i] > max) max = lindices[i]; 1064 } 1065 PetscCall(MPIU_Allreduce(&max, &Ng, 1, MPIU_INT, MPI_MAX, comm)); 1066 Ng++; 1067 PetscCallMPI(MPI_Comm_size(comm, &size)); 1068 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1069 scale = Ng / size + 1; 1070 ng = scale; 1071 if (rank == size - 1) ng = Ng - scale * (size - 1); 1072 ng = PetscMax(1, ng); 1073 rstart = scale * rank; 1074 1075 /* determine ownership ranges of global indices */ 1076 PetscCall(PetscMalloc1(2 * size, &nprocs)); 1077 PetscCall(PetscArrayzero(nprocs, 2 * size)); 1078 1079 /* determine owners of each local node */ 1080 PetscCall(PetscMalloc1(n, &owner)); 1081 for (i = 0; i < n; i++) { 1082 proc = lindices[i] / scale; /* processor that globally owns this index */ 1083 nprocs[2 * proc + 1] = 1; /* processor globally owns at least one of ours */ 1084 owner[i] = proc; 1085 nprocs[2 * proc]++; /* count of how many that processor globally owns of ours */ 1086 } 1087 nsends = 0; 1088 for (i = 0; i < size; i++) nsends += nprocs[2 * i + 1]; 1089 PetscCall(PetscInfo(mapping, "Number of global owners for my local data %" PetscInt_FMT "\n", nsends)); 1090 1091 /* inform other processors of number of messages and max length*/ 1092 PetscCall(PetscMaxSum(comm, nprocs, &nmax, &nrecvs)); 1093 PetscCall(PetscInfo(mapping, "Number of local owners for my global data %" PetscInt_FMT "\n", nrecvs)); 1094 1095 /* post receives for owned rows */ 1096 PetscCall(PetscMalloc1((2 * nrecvs + 1) * (nmax + 1), &recvs)); 1097 PetscCall(PetscMalloc1(nrecvs + 1, &recv_waits)); 1098 for (i = 0; i < nrecvs; i++) PetscCallMPI(MPI_Irecv(recvs + 2 * nmax * i, 2 * nmax, MPIU_INT, MPI_ANY_SOURCE, tag1, comm, recv_waits + i)); 1099 1100 /* pack messages containing lists of local nodes to owners */ 1101 PetscCall(PetscMalloc1(2 * n + 1, &sends)); 1102 PetscCall(PetscMalloc1(size + 1, &starts)); 1103 starts[0] = 0; 1104 for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2]; 1105 for (i = 0; i < n; i++) { 1106 sends[starts[owner[i]]++] = lindices[i]; 1107 sends[starts[owner[i]]++] = i; 1108 } 1109 PetscCall(PetscFree(owner)); 1110 starts[0] = 0; 1111 for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2]; 1112 1113 /* send the messages */ 1114 PetscCall(PetscMalloc1(nsends + 1, &send_waits)); 1115 PetscCall(PetscMalloc1(nsends + 1, &dest)); 1116 cnt = 0; 1117 for (i = 0; i < size; i++) { 1118 if (nprocs[2 * i]) { 1119 PetscCallMPI(MPI_Isend(sends + starts[i], 2 * nprocs[2 * i], MPIU_INT, i, tag1, comm, send_waits + cnt)); 1120 dest[cnt] = i; 1121 cnt++; 1122 } 1123 } 1124 PetscCall(PetscFree(starts)); 1125 1126 /* wait on receives */ 1127 PetscCall(PetscMalloc1(nrecvs + 1, &source)); 1128 PetscCall(PetscMalloc1(nrecvs + 1, &len)); 1129 cnt = nrecvs; 1130 PetscCall(PetscCalloc1(ng + 1, &nownedsenders)); 1131 while (cnt) { 1132 PetscCallMPI(MPI_Waitany(nrecvs, recv_waits, &imdex, &recv_status)); 1133 /* unpack receives into our local space */ 1134 PetscCallMPI(MPI_Get_count(&recv_status, MPIU_INT, &len[imdex])); 1135 source[imdex] = recv_status.MPI_SOURCE; 1136 len[imdex] = len[imdex] / 2; 1137 /* count how many local owners for each of my global owned indices */ 1138 for (i = 0; i < len[imdex]; i++) nownedsenders[recvs[2 * imdex * nmax + 2 * i] - rstart]++; 1139 cnt--; 1140 } 1141 PetscCall(PetscFree(recv_waits)); 1142 1143 /* count how many globally owned indices are on an edge multiplied by how many processors own them. */ 1144 nownedm = 0; 1145 for (i = 0; i < ng; i++) { 1146 if (nownedsenders[i] > 1) nownedm += nownedsenders[i]; 1147 } 1148 1149 /* create single array to contain rank of all local owners of each globally owned index */ 1150 PetscCall(PetscMalloc1(nownedm + 1, &ownedsenders)); 1151 PetscCall(PetscMalloc1(ng + 1, &starts)); 1152 starts[0] = 0; 1153 for (i = 1; i < ng; i++) { 1154 if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1]; 1155 else starts[i] = starts[i - 1]; 1156 } 1157 1158 /* for each nontrivial globally owned node list all arriving processors */ 1159 for (i = 0; i < nrecvs; i++) { 1160 for (j = 0; j < len[i]; j++) { 1161 node = recvs[2 * i * nmax + 2 * j] - rstart; 1162 if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i]; 1163 } 1164 } 1165 1166 if (debug) { /* ----------------------------------- */ 1167 starts[0] = 0; 1168 for (i = 1; i < ng; i++) { 1169 if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1]; 1170 else starts[i] = starts[i - 1]; 1171 } 1172 for (i = 0; i < ng; i++) { 1173 if (nownedsenders[i] > 1) { 1174 PetscCall(PetscSynchronizedPrintf(comm, "[%d] global node %" PetscInt_FMT " local owner processors: ", rank, i + rstart)); 1175 for (j = 0; j < nownedsenders[i]; j++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", ownedsenders[starts[i] + j])); 1176 PetscCall(PetscSynchronizedPrintf(comm, "\n")); 1177 } 1178 } 1179 PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); 1180 } /* ----------------------------------- */ 1181 1182 /* wait on original sends */ 1183 if (nsends) { 1184 PetscCall(PetscMalloc1(nsends, &send_status)); 1185 PetscCallMPI(MPI_Waitall(nsends, send_waits, send_status)); 1186 PetscCall(PetscFree(send_status)); 1187 } 1188 PetscCall(PetscFree(send_waits)); 1189 PetscCall(PetscFree(sends)); 1190 PetscCall(PetscFree(nprocs)); 1191 1192 /* pack messages to send back to local owners */ 1193 starts[0] = 0; 1194 for (i = 1; i < ng; i++) { 1195 if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1]; 1196 else starts[i] = starts[i - 1]; 1197 } 1198 nsends2 = nrecvs; 1199 PetscCall(PetscMalloc1(nsends2 + 1, &nprocs)); /* length of each message */ 1200 for (i = 0; i < nrecvs; i++) { 1201 nprocs[i] = 1; 1202 for (j = 0; j < len[i]; j++) { 1203 node = recvs[2 * i * nmax + 2 * j] - rstart; 1204 if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node]; 1205 } 1206 } 1207 nt = 0; 1208 for (i = 0; i < nsends2; i++) nt += nprocs[i]; 1209 1210 PetscCall(PetscMalloc1(nt + 1, &sends2)); 1211 PetscCall(PetscMalloc1(nsends2 + 1, &starts2)); 1212 1213 starts2[0] = 0; 1214 for (i = 1; i < nsends2; i++) starts2[i] = starts2[i - 1] + nprocs[i - 1]; 1215 /* 1216 Each message is 1 + nprocs[i] long, and consists of 1217 (0) the number of nodes being sent back 1218 (1) the local node number, 1219 (2) the number of processors sharing it, 1220 (3) the processors sharing it 1221 */ 1222 for (i = 0; i < nsends2; i++) { 1223 cnt = 1; 1224 sends2[starts2[i]] = 0; 1225 for (j = 0; j < len[i]; j++) { 1226 node = recvs[2 * i * nmax + 2 * j] - rstart; 1227 if (nownedsenders[node] > 1) { 1228 sends2[starts2[i]]++; 1229 sends2[starts2[i] + cnt++] = recvs[2 * i * nmax + 2 * j + 1]; 1230 sends2[starts2[i] + cnt++] = nownedsenders[node]; 1231 PetscCall(PetscArraycpy(&sends2[starts2[i] + cnt], &ownedsenders[starts[node]], nownedsenders[node])); 1232 cnt += nownedsenders[node]; 1233 } 1234 } 1235 } 1236 1237 /* receive the message lengths */ 1238 nrecvs2 = nsends; 1239 PetscCall(PetscMalloc1(nrecvs2 + 1, &lens2)); 1240 PetscCall(PetscMalloc1(nrecvs2 + 1, &starts3)); 1241 PetscCall(PetscMalloc1(nrecvs2 + 1, &recv_waits)); 1242 for (i = 0; i < nrecvs2; i++) PetscCallMPI(MPI_Irecv(&lens2[i], 1, MPIU_INT, dest[i], tag2, comm, recv_waits + i)); 1243 1244 /* send the message lengths */ 1245 for (i = 0; i < nsends2; i++) PetscCallMPI(MPI_Send(&nprocs[i], 1, MPIU_INT, source[i], tag2, comm)); 1246 1247 /* wait on receives of lens */ 1248 if (nrecvs2) { 1249 PetscCall(PetscMalloc1(nrecvs2, &recv_statuses)); 1250 PetscCallMPI(MPI_Waitall(nrecvs2, recv_waits, recv_statuses)); 1251 PetscCall(PetscFree(recv_statuses)); 1252 } 1253 PetscCall(PetscFree(recv_waits)); 1254 1255 starts3[0] = 0; 1256 nt = 0; 1257 for (i = 0; i < nrecvs2 - 1; i++) { 1258 starts3[i + 1] = starts3[i] + lens2[i]; 1259 nt += lens2[i]; 1260 } 1261 if (nrecvs2) nt += lens2[nrecvs2 - 1]; 1262 1263 PetscCall(PetscMalloc1(nt + 1, &recvs2)); 1264 PetscCall(PetscMalloc1(nrecvs2 + 1, &recv_waits)); 1265 for (i = 0; i < nrecvs2; i++) PetscCallMPI(MPI_Irecv(recvs2 + starts3[i], lens2[i], MPIU_INT, dest[i], tag3, comm, recv_waits + i)); 1266 1267 /* send the messages */ 1268 PetscCall(PetscMalloc1(nsends2 + 1, &send_waits)); 1269 for (i = 0; i < nsends2; i++) PetscCallMPI(MPI_Isend(sends2 + starts2[i], nprocs[i], MPIU_INT, source[i], tag3, comm, send_waits + i)); 1270 1271 /* wait on receives */ 1272 if (nrecvs2) { 1273 PetscCall(PetscMalloc1(nrecvs2, &recv_statuses)); 1274 PetscCallMPI(MPI_Waitall(nrecvs2, recv_waits, recv_statuses)); 1275 PetscCall(PetscFree(recv_statuses)); 1276 } 1277 PetscCall(PetscFree(recv_waits)); 1278 PetscCall(PetscFree(nprocs)); 1279 1280 if (debug) { /* ----------------------------------- */ 1281 cnt = 0; 1282 for (i = 0; i < nrecvs2; i++) { 1283 nt = recvs2[cnt++]; 1284 for (j = 0; j < nt; j++) { 1285 PetscCall(PetscSynchronizedPrintf(comm, "[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ", rank, recvs2[cnt], recvs2[cnt + 1])); 1286 for (k = 0; k < recvs2[cnt + 1]; k++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", recvs2[cnt + 2 + k])); 1287 cnt += 2 + recvs2[cnt + 1]; 1288 PetscCall(PetscSynchronizedPrintf(comm, "\n")); 1289 } 1290 } 1291 PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); 1292 } /* ----------------------------------- */ 1293 1294 /* count number subdomains for each local node */ 1295 PetscCall(PetscCalloc1(size, &nprocs)); 1296 cnt = 0; 1297 for (i = 0; i < nrecvs2; i++) { 1298 nt = recvs2[cnt++]; 1299 for (j = 0; j < nt; j++) { 1300 for (k = 0; k < recvs2[cnt + 1]; k++) nprocs[recvs2[cnt + 2 + k]]++; 1301 cnt += 2 + recvs2[cnt + 1]; 1302 } 1303 } 1304 nt = 0; 1305 for (i = 0; i < size; i++) nt += (nprocs[i] > 0); 1306 *nproc = nt; 1307 PetscCall(PetscMalloc1(nt + 1, procs)); 1308 PetscCall(PetscMalloc1(nt + 1, numprocs)); 1309 PetscCall(PetscMalloc1(nt + 1, indices)); 1310 for (i = 0; i < nt + 1; i++) (*indices)[i] = NULL; 1311 PetscCall(PetscMalloc1(size, &bprocs)); 1312 cnt = 0; 1313 for (i = 0; i < size; i++) { 1314 if (nprocs[i] > 0) { 1315 bprocs[i] = cnt; 1316 (*procs)[cnt] = i; 1317 (*numprocs)[cnt] = nprocs[i]; 1318 PetscCall(PetscMalloc1(nprocs[i], &(*indices)[cnt])); 1319 cnt++; 1320 } 1321 } 1322 1323 /* make the list of subdomains for each nontrivial local node */ 1324 PetscCall(PetscArrayzero(*numprocs, nt)); 1325 cnt = 0; 1326 for (i = 0; i < nrecvs2; i++) { 1327 nt = recvs2[cnt++]; 1328 for (j = 0; j < nt; j++) { 1329 for (k = 0; k < recvs2[cnt + 1]; k++) (*indices)[bprocs[recvs2[cnt + 2 + k]]][(*numprocs)[bprocs[recvs2[cnt + 2 + k]]]++] = recvs2[cnt]; 1330 cnt += 2 + recvs2[cnt + 1]; 1331 } 1332 } 1333 PetscCall(PetscFree(bprocs)); 1334 PetscCall(PetscFree(recvs2)); 1335 1336 /* sort the node indexing by their global numbers */ 1337 nt = *nproc; 1338 for (i = 0; i < nt; i++) { 1339 PetscCall(PetscMalloc1((*numprocs)[i], &tmp)); 1340 for (j = 0; j < (*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]]; 1341 PetscCall(PetscSortIntWithArray((*numprocs)[i], tmp, (*indices)[i])); 1342 PetscCall(PetscFree(tmp)); 1343 } 1344 1345 if (debug) { /* ----------------------------------- */ 1346 nt = *nproc; 1347 for (i = 0; i < nt; i++) { 1348 PetscCall(PetscSynchronizedPrintf(comm, "[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ", rank, (*procs)[i], (*numprocs)[i])); 1349 for (j = 0; j < (*numprocs)[i]; j++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", (*indices)[i][j])); 1350 PetscCall(PetscSynchronizedPrintf(comm, "\n")); 1351 } 1352 PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); 1353 } /* ----------------------------------- */ 1354 1355 /* wait on sends */ 1356 if (nsends2) { 1357 PetscCall(PetscMalloc1(nsends2, &send_status)); 1358 PetscCallMPI(MPI_Waitall(nsends2, send_waits, send_status)); 1359 PetscCall(PetscFree(send_status)); 1360 } 1361 1362 PetscCall(PetscFree(starts3)); 1363 PetscCall(PetscFree(dest)); 1364 PetscCall(PetscFree(send_waits)); 1365 1366 PetscCall(PetscFree(nownedsenders)); 1367 PetscCall(PetscFree(ownedsenders)); 1368 PetscCall(PetscFree(starts)); 1369 PetscCall(PetscFree(starts2)); 1370 PetscCall(PetscFree(lens2)); 1371 1372 PetscCall(PetscFree(source)); 1373 PetscCall(PetscFree(len)); 1374 PetscCall(PetscFree(recvs)); 1375 PetscCall(PetscFree(nprocs)); 1376 PetscCall(PetscFree(sends2)); 1377 1378 /* put the information about myself as the first entry in the list */ 1379 first_procs = (*procs)[0]; 1380 first_numprocs = (*numprocs)[0]; 1381 first_indices = (*indices)[0]; 1382 for (i = 0; i < *nproc; i++) { 1383 if ((*procs)[i] == rank) { 1384 (*procs)[0] = (*procs)[i]; 1385 (*numprocs)[0] = (*numprocs)[i]; 1386 (*indices)[0] = (*indices)[i]; 1387 (*procs)[i] = first_procs; 1388 (*numprocs)[i] = first_numprocs; 1389 (*indices)[i] = first_indices; 1390 break; 1391 } 1392 } 1393 1394 /* save info for reuse */ 1395 mapping->info_nproc = *nproc; 1396 mapping->info_procs = *procs; 1397 mapping->info_numprocs = *numprocs; 1398 mapping->info_indices = *indices; 1399 mapping->info_cached = PETSC_TRUE; 1400 PetscFunctionReturn(0); 1401 } 1402 1403 /*@C 1404 ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo() 1405 1406 Collective on ISLocalToGlobalMapping 1407 1408 Input Parameter: 1409 . mapping - the mapping from local to global indexing 1410 1411 Output Parameters: 1412 + nproc - number of processors that are connected to this one 1413 . proc - neighboring processors 1414 . numproc - number of indices for each processor 1415 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1416 1417 Level: advanced 1418 1419 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1420 `ISLocalToGlobalMappingGetInfo()` 1421 @*/ 1422 PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) 1423 { 1424 PetscFunctionBegin; 1425 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1426 if (mapping->info_free) { 1427 PetscCall(PetscFree(*numprocs)); 1428 if (*indices) { 1429 PetscInt i; 1430 1431 PetscCall(PetscFree((*indices)[0])); 1432 for (i = 1; i < *nproc; i++) PetscCall(PetscFree((*indices)[i])); 1433 PetscCall(PetscFree(*indices)); 1434 } 1435 } 1436 *nproc = 0; 1437 *procs = NULL; 1438 *numprocs = NULL; 1439 *indices = NULL; 1440 PetscFunctionReturn(0); 1441 } 1442 1443 /*@C 1444 ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and 1445 each index shared by more than one processor 1446 1447 Collective on ISLocalToGlobalMapping 1448 1449 Input Parameter: 1450 . mapping - the mapping from local to global indexing 1451 1452 Output Parameters: 1453 + nproc - number of processors that are connected to this one 1454 . proc - neighboring processors 1455 . numproc - number of indices for each subdomain (processor) 1456 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 1457 1458 Level: advanced 1459 1460 Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed. 1461 1462 Fortran Usage: 1463 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 1464 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 1465 PetscInt indices[nproc][numprocmax],ierr) 1466 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 1467 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 1468 1469 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1470 `ISLocalToGlobalMappingRestoreInfo()` 1471 @*/ 1472 PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) 1473 { 1474 PetscInt **bindices = NULL, *bnumprocs = NULL, bs, i, j, k; 1475 1476 PetscFunctionBegin; 1477 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1478 bs = mapping->bs; 1479 PetscCall(ISLocalToGlobalMappingGetBlockInfo(mapping, nproc, procs, &bnumprocs, &bindices)); 1480 if (bs > 1) { /* we need to expand the cached info */ 1481 PetscCall(PetscCalloc1(*nproc, &*indices)); 1482 PetscCall(PetscCalloc1(*nproc, &*numprocs)); 1483 for (i = 0; i < *nproc; i++) { 1484 PetscCall(PetscMalloc1(bs * bnumprocs[i], &(*indices)[i])); 1485 for (j = 0; j < bnumprocs[i]; j++) { 1486 for (k = 0; k < bs; k++) (*indices)[i][j * bs + k] = bs * bindices[i][j] + k; 1487 } 1488 (*numprocs)[i] = bnumprocs[i] * bs; 1489 } 1490 mapping->info_free = PETSC_TRUE; 1491 } else { 1492 *numprocs = bnumprocs; 1493 *indices = bindices; 1494 } 1495 PetscFunctionReturn(0); 1496 } 1497 1498 /*@C 1499 ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo() 1500 1501 Collective on ISLocalToGlobalMapping 1502 1503 Input Parameter: 1504 . mapping - the mapping from local to global indexing 1505 1506 Output Parameters: 1507 + nproc - number of processors that are connected to this one 1508 . proc - neighboring processors 1509 . numproc - number of indices for each processor 1510 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1511 1512 Level: advanced 1513 1514 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1515 `ISLocalToGlobalMappingGetInfo()` 1516 @*/ 1517 PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) 1518 { 1519 PetscFunctionBegin; 1520 PetscCall(ISLocalToGlobalMappingRestoreBlockInfo(mapping, nproc, procs, numprocs, indices)); 1521 PetscFunctionReturn(0); 1522 } 1523 1524 /*@C 1525 ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node 1526 1527 Collective on ISLocalToGlobalMapping 1528 1529 Input Parameter: 1530 . mapping - the mapping from local to global indexing 1531 1532 Output Parameters: 1533 + nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize()) 1534 . count - number of neighboring processors per node 1535 - indices - indices of processes sharing the node (sorted) 1536 1537 Level: advanced 1538 1539 Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed. 1540 1541 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1542 `ISLocalToGlobalMappingGetInfo()`, `ISLocalToGlobalMappingRestoreNodeInfo()` 1543 @*/ 1544 PetscErrorCode ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[]) 1545 { 1546 PetscInt n; 1547 1548 PetscFunctionBegin; 1549 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1550 PetscCall(ISLocalToGlobalMappingGetSize(mapping, &n)); 1551 if (!mapping->info_nodec) { 1552 PetscInt i, m, n_neigh, *neigh, *n_shared, **shared; 1553 1554 PetscCall(PetscMalloc2(n + 1, &mapping->info_nodec, n, &mapping->info_nodei)); 1555 PetscCall(ISLocalToGlobalMappingGetInfo(mapping, &n_neigh, &neigh, &n_shared, &shared)); 1556 for (i = 0; i < n; i++) mapping->info_nodec[i] = 1; 1557 m = n; 1558 mapping->info_nodec[n] = 0; 1559 for (i = 1; i < n_neigh; i++) { 1560 PetscInt j; 1561 1562 m += n_shared[i]; 1563 for (j = 0; j < n_shared[i]; j++) mapping->info_nodec[shared[i][j]] += 1; 1564 } 1565 if (n) PetscCall(PetscMalloc1(m, &mapping->info_nodei[0])); 1566 for (i = 1; i < n; i++) mapping->info_nodei[i] = mapping->info_nodei[i - 1] + mapping->info_nodec[i - 1]; 1567 PetscCall(PetscArrayzero(mapping->info_nodec, n)); 1568 for (i = 0; i < n; i++) { 1569 mapping->info_nodec[i] = 1; 1570 mapping->info_nodei[i][0] = neigh[0]; 1571 } 1572 for (i = 1; i < n_neigh; i++) { 1573 PetscInt j; 1574 1575 for (j = 0; j < n_shared[i]; j++) { 1576 PetscInt k = shared[i][j]; 1577 1578 mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i]; 1579 mapping->info_nodec[k] += 1; 1580 } 1581 } 1582 for (i = 0; i < n; i++) PetscCall(PetscSortRemoveDupsInt(&mapping->info_nodec[i], mapping->info_nodei[i])); 1583 PetscCall(ISLocalToGlobalMappingRestoreInfo(mapping, &n_neigh, &neigh, &n_shared, &shared)); 1584 } 1585 if (nnodes) *nnodes = n; 1586 if (count) *count = mapping->info_nodec; 1587 if (indices) *indices = mapping->info_nodei; 1588 PetscFunctionReturn(0); 1589 } 1590 1591 /*@C 1592 ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo() 1593 1594 Collective on ISLocalToGlobalMapping 1595 1596 Input Parameter: 1597 . mapping - the mapping from local to global indexing 1598 1599 Output Parameters: 1600 + nnodes - number of local nodes 1601 . count - number of neighboring processors per node 1602 - indices - indices of processes sharing the node (sorted) 1603 1604 Level: advanced 1605 1606 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1607 `ISLocalToGlobalMappingGetInfo()` 1608 @*/ 1609 PetscErrorCode ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[]) 1610 { 1611 PetscFunctionBegin; 1612 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1613 if (nnodes) *nnodes = 0; 1614 if (count) *count = NULL; 1615 if (indices) *indices = NULL; 1616 PetscFunctionReturn(0); 1617 } 1618 1619 /*@C 1620 ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped 1621 1622 Not Collective 1623 1624 Input Parameter: 1625 . ltog - local to global mapping 1626 1627 Output Parameter: 1628 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize() 1629 1630 Level: advanced 1631 1632 Notes: 1633 ISLocalToGlobalMappingGetSize() returns the length the this array 1634 1635 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreIndices()`, `ISLocalToGlobalMappingGetBlockIndices()`, `ISLocalToGlobalMappingRestoreBlockIndices()` 1636 @*/ 1637 PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) 1638 { 1639 PetscFunctionBegin; 1640 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1641 PetscValidPointer(array, 2); 1642 if (ltog->bs == 1) { 1643 *array = ltog->indices; 1644 } else { 1645 PetscInt *jj, k, i, j, n = ltog->n, bs = ltog->bs; 1646 const PetscInt *ii; 1647 1648 PetscCall(PetscMalloc1(bs * n, &jj)); 1649 *array = jj; 1650 k = 0; 1651 ii = ltog->indices; 1652 for (i = 0; i < n; i++) 1653 for (j = 0; j < bs; j++) jj[k++] = bs * ii[i] + j; 1654 } 1655 PetscFunctionReturn(0); 1656 } 1657 1658 /*@C 1659 ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices() 1660 1661 Not Collective 1662 1663 Input Parameters: 1664 + ltog - local to global mapping 1665 - array - array of indices 1666 1667 Level: advanced 1668 1669 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()` 1670 @*/ 1671 PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) 1672 { 1673 PetscFunctionBegin; 1674 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1675 PetscValidPointer(array, 2); 1676 PetscCheck(ltog->bs != 1 || *array == ltog->indices, PETSC_COMM_SELF, PETSC_ERR_ARG_BADPTR, "Trying to return mismatched pointer"); 1677 1678 if (ltog->bs > 1) PetscCall(PetscFree(*(void **)array)); 1679 PetscFunctionReturn(0); 1680 } 1681 1682 /*@C 1683 ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block 1684 1685 Not Collective 1686 1687 Input Parameter: 1688 . ltog - local to global mapping 1689 1690 Output Parameter: 1691 . array - array of indices 1692 1693 Level: advanced 1694 1695 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreBlockIndices()` 1696 @*/ 1697 PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) 1698 { 1699 PetscFunctionBegin; 1700 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1701 PetscValidPointer(array, 2); 1702 *array = ltog->indices; 1703 PetscFunctionReturn(0); 1704 } 1705 1706 /*@C 1707 ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices() 1708 1709 Not Collective 1710 1711 Input Parameters: 1712 + ltog - local to global mapping 1713 - array - array of indices 1714 1715 Level: advanced 1716 1717 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()` 1718 @*/ 1719 PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) 1720 { 1721 PetscFunctionBegin; 1722 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1723 PetscValidPointer(array, 2); 1724 PetscCheck(*array == ltog->indices, PETSC_COMM_SELF, PETSC_ERR_ARG_BADPTR, "Trying to return mismatched pointer"); 1725 *array = NULL; 1726 PetscFunctionReturn(0); 1727 } 1728 1729 /*@C 1730 ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings 1731 1732 Not Collective 1733 1734 Input Parameters: 1735 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate 1736 . n - number of mappings to concatenate 1737 - ltogs - local to global mappings 1738 1739 Output Parameter: 1740 . ltogcat - new mapping 1741 1742 Note: this currently always returns a mapping with block size of 1 1743 1744 Developer Note: If all the input mapping have the same block size we could easily handle that as a special case 1745 1746 Level: advanced 1747 1748 .seealso: `ISLocalToGlobalMappingCreate()` 1749 @*/ 1750 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm, PetscInt n, const ISLocalToGlobalMapping ltogs[], ISLocalToGlobalMapping *ltogcat) 1751 { 1752 PetscInt i, cnt, m, *idx; 1753 1754 PetscFunctionBegin; 1755 PetscCheck(n >= 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "Must have a non-negative number of mappings, given %" PetscInt_FMT, n); 1756 if (n > 0) PetscValidPointer(ltogs, 3); 1757 for (i = 0; i < n; i++) PetscValidHeaderSpecific(ltogs[i], IS_LTOGM_CLASSID, 3); 1758 PetscValidPointer(ltogcat, 4); 1759 for (cnt = 0, i = 0; i < n; i++) { 1760 PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i], &m)); 1761 cnt += m; 1762 } 1763 PetscCall(PetscMalloc1(cnt, &idx)); 1764 for (cnt = 0, i = 0; i < n; i++) { 1765 const PetscInt *subidx; 1766 PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i], &m)); 1767 PetscCall(ISLocalToGlobalMappingGetIndices(ltogs[i], &subidx)); 1768 PetscCall(PetscArraycpy(&idx[cnt], subidx, m)); 1769 PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogs[i], &subidx)); 1770 cnt += m; 1771 } 1772 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, cnt, idx, PETSC_OWN_POINTER, ltogcat)); 1773 PetscFunctionReturn(0); 1774 } 1775 1776 /*MC 1777 ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is 1778 used this is good for only small and moderate size problems. 1779 1780 Options Database Keys: 1781 . -islocaltoglobalmapping_type basic - select this method 1782 1783 Level: beginner 1784 1785 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH` 1786 M*/ 1787 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog) 1788 { 1789 PetscFunctionBegin; 1790 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Basic; 1791 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Basic; 1792 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic; 1793 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Basic; 1794 PetscFunctionReturn(0); 1795 } 1796 1797 /*MC 1798 ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is 1799 used this is good for large memory problems. 1800 1801 Options Database Keys: 1802 . -islocaltoglobalmapping_type hash - select this method 1803 1804 Notes: 1805 This is selected automatically for large problems if the user does not set the type. 1806 1807 Level: beginner 1808 1809 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH` 1810 M*/ 1811 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog) 1812 { 1813 PetscFunctionBegin; 1814 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Hash; 1815 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Hash; 1816 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash; 1817 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Hash; 1818 PetscFunctionReturn(0); 1819 } 1820 1821 /*@C 1822 ISLocalToGlobalMappingRegister - Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping 1823 1824 Not Collective 1825 1826 Input Parameters: 1827 + sname - name of a new method 1828 - routine_create - routine to create method context 1829 1830 Notes: 1831 ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings. 1832 1833 Sample usage: 1834 .vb 1835 ISLocalToGlobalMappingRegister("my_mapper",MyCreate); 1836 .ve 1837 1838 Then, your mapping can be chosen with the procedural interface via 1839 $ ISLocalToGlobalMappingSetType(ltog,"my_mapper") 1840 or at runtime via the option 1841 $ -islocaltoglobalmapping_type my_mapper 1842 1843 Level: advanced 1844 1845 .seealso: `ISLocalToGlobalMappingRegisterAll()`, `ISLocalToGlobalMappingRegisterDestroy()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH` 1846 1847 @*/ 1848 PetscErrorCode ISLocalToGlobalMappingRegister(const char sname[], PetscErrorCode (*function)(ISLocalToGlobalMapping)) 1849 { 1850 PetscFunctionBegin; 1851 PetscCall(ISInitializePackage()); 1852 PetscCall(PetscFunctionListAdd(&ISLocalToGlobalMappingList, sname, function)); 1853 PetscFunctionReturn(0); 1854 } 1855 1856 /*@C 1857 ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach. 1858 1859 Logically Collective on ISLocalToGlobalMapping 1860 1861 Input Parameters: 1862 + ltog - the ISLocalToGlobalMapping object 1863 - type - a known method 1864 1865 Options Database Key: 1866 . -islocaltoglobalmapping_type <method> - Sets the method; use -help for a list 1867 of available methods (for instance, basic or hash) 1868 1869 Notes: 1870 See "petsc/include/petscis.h" for available methods 1871 1872 Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and 1873 then set the ISLocalToGlobalMapping type from the options database rather than by using 1874 this routine. 1875 1876 Level: intermediate 1877 1878 Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they 1879 are accessed by ISLocalToGlobalMappingSetType(). 1880 1881 .seealso: `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingGetType()` 1882 @*/ 1883 PetscErrorCode ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type) 1884 { 1885 PetscBool match; 1886 PetscErrorCode (*r)(ISLocalToGlobalMapping) = NULL; 1887 1888 PetscFunctionBegin; 1889 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1890 if (type) PetscValidCharPointer(type, 2); 1891 1892 PetscCall(PetscObjectTypeCompare((PetscObject)ltog, type, &match)); 1893 if (match) PetscFunctionReturn(0); 1894 1895 /* L2G maps defer type setup at globaltolocal calls, allow passing NULL here */ 1896 if (type) { 1897 PetscCall(PetscFunctionListFind(ISLocalToGlobalMappingList, type, &r)); 1898 PetscCheck(r, PetscObjectComm((PetscObject)ltog), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unable to find requested ISLocalToGlobalMapping type %s", type); 1899 } 1900 /* Destroy the previous private LTOG context */ 1901 PetscTryTypeMethod(ltog, destroy); 1902 ltog->ops->destroy = NULL; 1903 1904 PetscCall(PetscObjectChangeTypeName((PetscObject)ltog, type)); 1905 if (r) PetscCall((*r)(ltog)); 1906 PetscFunctionReturn(0); 1907 } 1908 1909 /*@C 1910 ISLocalToGlobalMappingGetType - Get the type of the l2g map 1911 1912 Not Collective 1913 1914 Input Parameter: 1915 . ltog - the ISLocalToGlobalMapping object 1916 1917 Output Parameter: 1918 . type - the type 1919 1920 Level: intermediate 1921 1922 .seealso: `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()` 1923 @*/ 1924 PetscErrorCode ISLocalToGlobalMappingGetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType *type) 1925 { 1926 PetscFunctionBegin; 1927 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1928 PetscValidPointer(type, 2); 1929 *type = ((PetscObject)ltog)->type_name; 1930 PetscFunctionReturn(0); 1931 } 1932 1933 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE; 1934 1935 /*@C 1936 ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package. 1937 1938 Not Collective 1939 1940 Level: advanced 1941 1942 .seealso: `ISRegister()`, `ISLocalToGlobalRegister()` 1943 @*/ 1944 PetscErrorCode ISLocalToGlobalMappingRegisterAll(void) 1945 { 1946 PetscFunctionBegin; 1947 if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0); 1948 ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE; 1949 PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic)); 1950 PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash)); 1951 PetscFunctionReturn(0); 1952 } 1953