1 2 #include <petsc/private/isimpl.h> /*I "petscis.h" I*/ 3 #include <petsc/private/hashmapi.h> 4 #include <petscsf.h> 5 #include <petscviewer.h> 6 7 PetscClassId IS_LTOGM_CLASSID; 8 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping, PetscInt *, PetscInt **, PetscInt **, PetscInt ***); 9 10 typedef struct { 11 PetscInt *globals; 12 } ISLocalToGlobalMapping_Basic; 13 14 typedef struct { 15 PetscHMapI globalht; 16 } ISLocalToGlobalMapping_Hash; 17 18 /*@C 19 ISGetPointRange - Returns a description of the points in an `IS` suitable for traversal 20 21 Not collective 22 23 Input Parameter: 24 . pointIS - The `IS` object 25 26 Output Parameters: 27 + pStart - The first index, see notes 28 . pEnd - One past the last index, see notes 29 - points - The indices, see notes 30 31 Level: intermediate 32 33 Notes: 34 If the `IS` contains contiguous indices in an `ISSTRIDE`, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern 35 .vb 36 ISGetPointRange(is, &pStart, &pEnd, &points); 37 for (p = pStart; p < pEnd; ++p) { 38 const PetscInt point = points ? points[p] : p; 39 } 40 ISRestorePointRange(is, &pstart, &pEnd, &points); 41 .ve 42 43 .seealso: [](sec_scatter), `IS`, `ISRestorePointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()` 44 @*/ 45 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) 46 { 47 PetscInt numCells, step = 1; 48 PetscBool isStride; 49 50 PetscFunctionBeginHot; 51 *pStart = 0; 52 *points = NULL; 53 PetscCall(ISGetLocalSize(pointIS, &numCells)); 54 PetscCall(PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride)); 55 if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step)); 56 *pEnd = *pStart + numCells; 57 if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points)); 58 PetscFunctionReturn(0); 59 } 60 61 /*@C 62 ISRestorePointRange - Destroys the traversal description created with `ISGetPointRange()` 63 64 Not collective 65 66 Input Parameters: 67 + pointIS - The `IS` object 68 . pStart - The first index, from `ISGetPointRange()` 69 . pEnd - One past the last index, from `ISGetPointRange()` 70 - points - The indices, from `ISGetPointRange()` 71 72 Level: intermediate 73 74 .seealso: [](sec_scatter), `IS`, `ISGetPointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()` 75 @*/ 76 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) 77 { 78 PetscInt step = 1; 79 PetscBool isStride; 80 81 PetscFunctionBeginHot; 82 PetscCall(PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride)); 83 if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step)); 84 if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points)); 85 PetscFunctionReturn(0); 86 } 87 88 /*@C 89 ISGetPointSubrange - Configures the input `IS` to be a subrange for the traversal information given 90 91 Not collective 92 93 Input Parameters: 94 + subpointIS - The `IS` object to be configured 95 . pStar t - The first index of the subrange 96 . pEnd - One past the last index for the subrange 97 - points - The indices for the entire range, from `ISGetPointRange()` 98 99 Output Parameters: 100 . subpointIS - The `IS` object now configured to be a subrange 101 102 Level: intermediate 103 104 Note: 105 The input `IS` will now respond properly to calls to `ISGetPointRange()` and return the subrange. 106 107 .seealso: [](sec_scatter), `IS`, `ISGetPointRange()`, `ISRestorePointRange()`, `ISGetIndices()`, `ISCreateStride()` 108 @*/ 109 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points) 110 { 111 PetscFunctionBeginHot; 112 if (points) { 113 PetscCall(ISSetType(subpointIS, ISGENERAL)); 114 PetscCall(ISGeneralSetIndices(subpointIS, pEnd - pStart, &points[pStart], PETSC_USE_POINTER)); 115 } else { 116 PetscCall(ISSetType(subpointIS, ISSTRIDE)); 117 PetscCall(ISStrideSetStride(subpointIS, pEnd - pStart, pStart, 1)); 118 } 119 PetscFunctionReturn(0); 120 } 121 122 /* -----------------------------------------------------------------------------------------*/ 123 124 /* 125 Creates the global mapping information in the ISLocalToGlobalMapping structure 126 127 If the user has not selected how to handle the global to local mapping then use HASH for "large" problems 128 */ 129 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping) 130 { 131 PetscInt i, *idx = mapping->indices, n = mapping->n, end, start; 132 133 PetscFunctionBegin; 134 if (mapping->data) PetscFunctionReturn(0); 135 end = 0; 136 start = PETSC_MAX_INT; 137 138 for (i = 0; i < n; i++) { 139 if (idx[i] < 0) continue; 140 if (idx[i] < start) start = idx[i]; 141 if (idx[i] > end) end = idx[i]; 142 } 143 if (start > end) { 144 start = 0; 145 end = -1; 146 } 147 mapping->globalstart = start; 148 mapping->globalend = end; 149 if (!((PetscObject)mapping)->type_name) { 150 if ((end - start) > PetscMax(4 * n, 1000000)) { 151 PetscCall(ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGHASH)); 152 } else { 153 PetscCall(ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGBASIC)); 154 } 155 } 156 PetscTryTypeMethod(mapping, globaltolocalmappingsetup); 157 PetscFunctionReturn(0); 158 } 159 160 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping) 161 { 162 PetscInt i, *idx = mapping->indices, n = mapping->n, end, start, *globals; 163 ISLocalToGlobalMapping_Basic *map; 164 165 PetscFunctionBegin; 166 start = mapping->globalstart; 167 end = mapping->globalend; 168 PetscCall(PetscNew(&map)); 169 PetscCall(PetscMalloc1(end - start + 2, &globals)); 170 map->globals = globals; 171 for (i = 0; i < end - start + 1; i++) globals[i] = -1; 172 for (i = 0; i < n; i++) { 173 if (idx[i] < 0) continue; 174 globals[idx[i] - start] = i; 175 } 176 mapping->data = (void *)map; 177 PetscFunctionReturn(0); 178 } 179 180 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping) 181 { 182 PetscInt i, *idx = mapping->indices, n = mapping->n; 183 ISLocalToGlobalMapping_Hash *map; 184 185 PetscFunctionBegin; 186 PetscCall(PetscNew(&map)); 187 PetscCall(PetscHMapICreate(&map->globalht)); 188 for (i = 0; i < n; i++) { 189 if (idx[i] < 0) continue; 190 PetscCall(PetscHMapISet(map->globalht, idx[i], i)); 191 } 192 mapping->data = (void *)map; 193 PetscFunctionReturn(0); 194 } 195 196 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping) 197 { 198 ISLocalToGlobalMapping_Basic *map = (ISLocalToGlobalMapping_Basic *)mapping->data; 199 200 PetscFunctionBegin; 201 if (!map) PetscFunctionReturn(0); 202 PetscCall(PetscFree(map->globals)); 203 PetscCall(PetscFree(mapping->data)); 204 PetscFunctionReturn(0); 205 } 206 207 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping) 208 { 209 ISLocalToGlobalMapping_Hash *map = (ISLocalToGlobalMapping_Hash *)mapping->data; 210 211 PetscFunctionBegin; 212 if (!map) PetscFunctionReturn(0); 213 PetscCall(PetscHMapIDestroy(&map->globalht)); 214 PetscCall(PetscFree(mapping->data)); 215 PetscFunctionReturn(0); 216 } 217 218 #define GTOLTYPE _Basic 219 #define GTOLNAME _Basic 220 #define GTOLBS mapping->bs 221 #define GTOL(g, local) \ 222 do { \ 223 local = map->globals[g / bs - start]; \ 224 if (local >= 0) local = bs * local + (g % bs); \ 225 } while (0) 226 227 #include <../src/vec/is/utils/isltog.h> 228 229 #define GTOLTYPE _Basic 230 #define GTOLNAME Block_Basic 231 #define GTOLBS 1 232 #define GTOL(g, local) \ 233 do { \ 234 local = map->globals[g - start]; \ 235 } while (0) 236 #include <../src/vec/is/utils/isltog.h> 237 238 #define GTOLTYPE _Hash 239 #define GTOLNAME _Hash 240 #define GTOLBS mapping->bs 241 #define GTOL(g, local) \ 242 do { \ 243 (void)PetscHMapIGet(map->globalht, g / bs, &local); \ 244 if (local >= 0) local = bs * local + (g % bs); \ 245 } while (0) 246 #include <../src/vec/is/utils/isltog.h> 247 248 #define GTOLTYPE _Hash 249 #define GTOLNAME Block_Hash 250 #define GTOLBS 1 251 #define GTOL(g, local) \ 252 do { \ 253 (void)PetscHMapIGet(map->globalht, g, &local); \ 254 } while (0) 255 #include <../src/vec/is/utils/isltog.h> 256 257 /*@ 258 ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object 259 260 Not Collective 261 262 Input Parameter: 263 . ltog - local to global mapping 264 265 Output Parameter: 266 . nltog - the duplicated local to global mapping 267 268 Level: advanced 269 270 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()` 271 @*/ 272 PetscErrorCode ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog, ISLocalToGlobalMapping *nltog) 273 { 274 ISLocalToGlobalMappingType l2gtype; 275 276 PetscFunctionBegin; 277 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 278 PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog), ltog->bs, ltog->n, ltog->indices, PETSC_COPY_VALUES, nltog)); 279 PetscCall(ISLocalToGlobalMappingGetType(ltog, &l2gtype)); 280 PetscCall(ISLocalToGlobalMappingSetType(*nltog, l2gtype)); 281 PetscFunctionReturn(0); 282 } 283 284 /*@ 285 ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping 286 287 Not Collective 288 289 Input Parameter: 290 . ltog - local to global mapping 291 292 Output Parameter: 293 . n - the number of entries in the local mapping, `ISLocalToGlobalMappingGetIndices()` returns an array of this length 294 295 Level: advanced 296 297 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()` 298 @*/ 299 PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping, PetscInt *n) 300 { 301 PetscFunctionBegin; 302 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 303 PetscValidIntPointer(n, 2); 304 *n = mapping->bs * mapping->n; 305 PetscFunctionReturn(0); 306 } 307 308 /*@C 309 ISLocalToGlobalMappingViewFromOptions - View an `ISLocalToGlobalMapping` based on values in the options database 310 311 Collective on A 312 313 Input Parameters: 314 + A - the local to global mapping object 315 . obj - Optional object 316 - name - command line option 317 318 Level: intermediate 319 320 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingView`, `PetscObjectViewFromOptions()`, `ISLocalToGlobalMappingCreate()` 321 @*/ 322 PetscErrorCode ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A, PetscObject obj, const char name[]) 323 { 324 PetscFunctionBegin; 325 PetscValidHeaderSpecific(A, IS_LTOGM_CLASSID, 1); 326 PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name)); 327 PetscFunctionReturn(0); 328 } 329 330 /*@C 331 ISLocalToGlobalMappingView - View a local to global mapping 332 333 Not Collective 334 335 Input Parameters: 336 + ltog - local to global mapping 337 - viewer - viewer 338 339 Level: advanced 340 341 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()` 342 @*/ 343 PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping, PetscViewer viewer) 344 { 345 PetscInt i; 346 PetscMPIInt rank; 347 PetscBool iascii; 348 349 PetscFunctionBegin; 350 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 351 if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping), &viewer)); 352 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 353 354 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mapping), &rank)); 355 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 356 if (iascii) { 357 PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mapping, viewer)); 358 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 359 for (i = 0; i < mapping->n; i++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n", rank, i, mapping->indices[i])); 360 PetscCall(PetscViewerFlush(viewer)); 361 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 362 } 363 PetscFunctionReturn(0); 364 } 365 366 /*@ 367 ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n) 368 ordering and a global parallel ordering. 369 370 Not collective 371 372 Input Parameter: 373 . is - index set containing the global numbers for each local number 374 375 Output Parameter: 376 . mapping - new mapping data structure 377 378 Level: advanced 379 380 Note: 381 the block size of the `IS` determines the block size of the mapping 382 383 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetFromOptions()` 384 @*/ 385 PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is, ISLocalToGlobalMapping *mapping) 386 { 387 PetscInt n, bs; 388 const PetscInt *indices; 389 MPI_Comm comm; 390 PetscBool isblock; 391 392 PetscFunctionBegin; 393 PetscValidHeaderSpecific(is, IS_CLASSID, 1); 394 PetscValidPointer(mapping, 2); 395 396 PetscCall(PetscObjectGetComm((PetscObject)is, &comm)); 397 PetscCall(ISGetLocalSize(is, &n)); 398 PetscCall(PetscObjectTypeCompare((PetscObject)is, ISBLOCK, &isblock)); 399 if (!isblock) { 400 PetscCall(ISGetIndices(is, &indices)); 401 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n, indices, PETSC_COPY_VALUES, mapping)); 402 PetscCall(ISRestoreIndices(is, &indices)); 403 } else { 404 PetscCall(ISGetBlockSize(is, &bs)); 405 PetscCall(ISBlockGetIndices(is, &indices)); 406 PetscCall(ISLocalToGlobalMappingCreate(comm, bs, n / bs, indices, PETSC_COPY_VALUES, mapping)); 407 PetscCall(ISBlockRestoreIndices(is, &indices)); 408 } 409 PetscFunctionReturn(0); 410 } 411 412 /*@C 413 ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n) 414 ordering and a global parallel ordering. 415 416 Collective 417 418 Input Parameters: 419 + sf - star forest mapping contiguous local indices to (rank, offset) 420 - start - first global index on this process, or `PETSC_DECIDE` to compute contiguous global numbering automatically 421 422 Output Parameter: 423 . mapping - new mapping data structure 424 425 Level: advanced 426 427 Notes: 428 If any processor calls this with start = `PETSC_DECIDE` then all processors must, otherwise the program will hang. 429 430 .seealso: [](sec_scatter), `PetscSF`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()` 431 @*/ 432 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf, PetscInt start, ISLocalToGlobalMapping *mapping) 433 { 434 PetscInt i, maxlocal, nroots, nleaves, *globals, *ltog; 435 MPI_Comm comm; 436 437 PetscFunctionBegin; 438 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 439 PetscValidPointer(mapping, 3); 440 PetscCall(PetscObjectGetComm((PetscObject)sf, &comm)); 441 PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, NULL)); 442 if (start == PETSC_DECIDE) { 443 start = 0; 444 PetscCallMPI(MPI_Exscan(&nroots, &start, 1, MPIU_INT, MPI_SUM, comm)); 445 } else PetscCheck(start >= 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE"); 446 PetscCall(PetscSFGetLeafRange(sf, NULL, &maxlocal)); 447 ++maxlocal; 448 PetscCall(PetscMalloc1(nroots, &globals)); 449 PetscCall(PetscMalloc1(maxlocal, <og)); 450 for (i = 0; i < nroots; i++) globals[i] = start + i; 451 for (i = 0; i < maxlocal; i++) ltog[i] = -1; 452 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, globals, ltog, MPI_REPLACE)); 453 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, globals, ltog, MPI_REPLACE)); 454 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, maxlocal, ltog, PETSC_OWN_POINTER, mapping)); 455 PetscCall(PetscFree(globals)); 456 PetscFunctionReturn(0); 457 } 458 459 /*@ 460 ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping 461 462 Not collective 463 464 Input Parameters: 465 + mapping - mapping data structure 466 - bs - the blocksize 467 468 Level: advanced 469 470 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()` 471 @*/ 472 PetscErrorCode ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping, PetscInt bs) 473 { 474 PetscInt *nid; 475 const PetscInt *oid; 476 PetscInt i, cn, on, obs, nn; 477 478 PetscFunctionBegin; 479 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 480 PetscCheck(bs >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid block size %" PetscInt_FMT, bs); 481 if (bs == mapping->bs) PetscFunctionReturn(0); 482 on = mapping->n; 483 obs = mapping->bs; 484 oid = mapping->indices; 485 nn = (on * obs) / bs; 486 PetscCheck((on * obs) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block size %" PetscInt_FMT " is inconsistent with block size %" PetscInt_FMT " and number of block indices %" PetscInt_FMT, bs, obs, on); 487 488 PetscCall(PetscMalloc1(nn, &nid)); 489 PetscCall(ISLocalToGlobalMappingGetIndices(mapping, &oid)); 490 for (i = 0; i < nn; i++) { 491 PetscInt j; 492 for (j = 0, cn = 0; j < bs - 1; j++) { 493 if (oid[i * bs + j] < 0) { 494 cn++; 495 continue; 496 } 497 PetscCheck(oid[i * bs + j] == oid[i * bs + j + 1] - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: non consecutive indices %" PetscInt_FMT " %" PetscInt_FMT, bs, obs, oid[i * bs + j], oid[i * bs + j + 1]); 498 } 499 if (oid[i * bs + j] < 0) cn++; 500 if (cn) { 501 PetscCheck(cn == bs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: invalid number of negative entries in block %" PetscInt_FMT, bs, obs, cn); 502 nid[i] = -1; 503 } else { 504 nid[i] = oid[i * bs] / bs; 505 } 506 } 507 PetscCall(ISLocalToGlobalMappingRestoreIndices(mapping, &oid)); 508 509 mapping->n = nn; 510 mapping->bs = bs; 511 PetscCall(PetscFree(mapping->indices)); 512 mapping->indices = nid; 513 mapping->globalstart = 0; 514 mapping->globalend = 0; 515 516 /* reset the cached information */ 517 PetscCall(PetscFree(mapping->info_procs)); 518 PetscCall(PetscFree(mapping->info_numprocs)); 519 if (mapping->info_indices) { 520 PetscInt i; 521 522 PetscCall(PetscFree((mapping->info_indices)[0])); 523 for (i = 1; i < mapping->info_nproc; i++) PetscCall(PetscFree(mapping->info_indices[i])); 524 PetscCall(PetscFree(mapping->info_indices)); 525 } 526 mapping->info_cached = PETSC_FALSE; 527 528 PetscTryTypeMethod(mapping, destroy); 529 PetscFunctionReturn(0); 530 } 531 532 /*@ 533 ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping 534 ordering and a global parallel ordering. 535 536 Not Collective 537 538 Input Parameters: 539 . mapping - mapping data structure 540 541 Output Parameter: 542 . bs - the blocksize 543 544 Level: advanced 545 546 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()` 547 @*/ 548 PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping, PetscInt *bs) 549 { 550 PetscFunctionBegin; 551 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 552 *bs = mapping->bs; 553 PetscFunctionReturn(0); 554 } 555 556 /*@ 557 ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n) 558 ordering and a global parallel ordering. 559 560 Not Collective, but communicator may have more than one process 561 562 Input Parameters: 563 + comm - MPI communicator 564 . bs - the block size 565 . n - the number of local elements divided by the block size, or equivalently the number of block indices 566 . indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs 567 - mode - see PetscCopyMode 568 569 Output Parameter: 570 . mapping - new mapping data structure 571 572 Level: advanced 573 574 Notes: 575 There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1 576 577 For "small" problems when using `ISGlobalToLocalMappingApply()` and `ISGlobalToLocalMappingApplyBlock()`, the `ISLocalToGlobalMappingType` 578 of `ISLOCALTOGLOBALMAPPINGBASIC` will be used; this uses more memory but is faster; this approach is not scalable for extremely large mappings. 579 580 For large problems `ISLOCALTOGLOBALMAPPINGHASH` is used, this is scalable. 581 Use `ISLocalToGlobalMappingSetType()` or call `ISLocalToGlobalMappingSetFromOptions()` with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 582 583 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH` 584 `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType` 585 @*/ 586 PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm comm, PetscInt bs, PetscInt n, const PetscInt indices[], PetscCopyMode mode, ISLocalToGlobalMapping *mapping) 587 { 588 PetscInt *in; 589 590 PetscFunctionBegin; 591 if (n) PetscValidIntPointer(indices, 4); 592 PetscValidPointer(mapping, 6); 593 594 *mapping = NULL; 595 PetscCall(ISInitializePackage()); 596 597 PetscCall(PetscHeaderCreate(*mapping, IS_LTOGM_CLASSID, "ISLocalToGlobalMapping", "Local to global mapping", "IS", comm, ISLocalToGlobalMappingDestroy, ISLocalToGlobalMappingView)); 598 (*mapping)->n = n; 599 (*mapping)->bs = bs; 600 if (mode == PETSC_COPY_VALUES) { 601 PetscCall(PetscMalloc1(n, &in)); 602 PetscCall(PetscArraycpy(in, indices, n)); 603 (*mapping)->indices = in; 604 (*mapping)->dealloc_indices = PETSC_TRUE; 605 } else if (mode == PETSC_OWN_POINTER) { 606 (*mapping)->indices = (PetscInt *)indices; 607 (*mapping)->dealloc_indices = PETSC_TRUE; 608 } else if (mode == PETSC_USE_POINTER) { 609 (*mapping)->indices = (PetscInt *)indices; 610 } else SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid mode %d", mode); 611 PetscFunctionReturn(0); 612 } 613 614 PetscFunctionList ISLocalToGlobalMappingList = NULL; 615 616 /*@ 617 ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database. 618 619 Not collective 620 621 Input Parameters: 622 . mapping - mapping data structure 623 624 Level: advanced 625 626 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH` 627 `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType` 628 @*/ 629 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping) 630 { 631 char type[256]; 632 ISLocalToGlobalMappingType defaulttype = "Not set"; 633 PetscBool flg; 634 635 PetscFunctionBegin; 636 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 637 PetscCall(ISLocalToGlobalMappingRegisterAll()); 638 PetscObjectOptionsBegin((PetscObject)mapping); 639 PetscCall(PetscOptionsFList("-islocaltoglobalmapping_type", "ISLocalToGlobalMapping method", "ISLocalToGlobalMappingSetType", ISLocalToGlobalMappingList, (char *)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype, type, 256, &flg)); 640 if (flg) PetscCall(ISLocalToGlobalMappingSetType(mapping, type)); 641 PetscOptionsEnd(); 642 PetscFunctionReturn(0); 643 } 644 645 /*@ 646 ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n) 647 ordering and a global parallel ordering. 648 649 Note Collective 650 651 Input Parameters: 652 . mapping - mapping data structure 653 654 Level: advanced 655 656 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingCreate()` 657 @*/ 658 PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping) 659 { 660 PetscFunctionBegin; 661 if (!*mapping) PetscFunctionReturn(0); 662 PetscValidHeaderSpecific((*mapping), IS_LTOGM_CLASSID, 1); 663 if (--((PetscObject)(*mapping))->refct > 0) { 664 *mapping = NULL; 665 PetscFunctionReturn(0); 666 } 667 if ((*mapping)->dealloc_indices) PetscCall(PetscFree((*mapping)->indices)); 668 PetscCall(PetscFree((*mapping)->info_procs)); 669 PetscCall(PetscFree((*mapping)->info_numprocs)); 670 if ((*mapping)->info_indices) { 671 PetscInt i; 672 673 PetscCall(PetscFree(((*mapping)->info_indices)[0])); 674 for (i = 1; i < (*mapping)->info_nproc; i++) PetscCall(PetscFree(((*mapping)->info_indices)[i])); 675 PetscCall(PetscFree((*mapping)->info_indices)); 676 } 677 if ((*mapping)->info_nodei) PetscCall(PetscFree(((*mapping)->info_nodei)[0])); 678 PetscCall(PetscFree2((*mapping)->info_nodec, (*mapping)->info_nodei)); 679 if ((*mapping)->ops->destroy) PetscCall((*(*mapping)->ops->destroy)(*mapping)); 680 PetscCall(PetscHeaderDestroy(mapping)); 681 *mapping = NULL; 682 PetscFunctionReturn(0); 683 } 684 685 /*@ 686 ISLocalToGlobalMappingApplyIS - Creates from an `IS` in the local numbering 687 a new index set using the global numbering defined in an `ISLocalToGlobalMapping` 688 context. 689 690 Collective on mapping 691 692 Input Parameters: 693 + mapping - mapping between local and global numbering 694 - is - index set in local numbering 695 696 Output Parameter: 697 . newis - index set in global numbering 698 699 Level: advanced 700 701 Note: 702 The output `IS` will have the same communicator of the input `IS`. 703 704 .seealso: [](sec_scatter), `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, 705 `ISLocalToGlobalMappingDestroy()`, `ISGlobalToLocalMappingApply()` 706 @*/ 707 PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping, IS is, IS *newis) 708 { 709 PetscInt n, *idxout; 710 const PetscInt *idxin; 711 712 PetscFunctionBegin; 713 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 714 PetscValidHeaderSpecific(is, IS_CLASSID, 2); 715 PetscValidPointer(newis, 3); 716 717 PetscCall(ISGetLocalSize(is, &n)); 718 PetscCall(ISGetIndices(is, &idxin)); 719 PetscCall(PetscMalloc1(n, &idxout)); 720 PetscCall(ISLocalToGlobalMappingApply(mapping, n, idxin, idxout)); 721 PetscCall(ISRestoreIndices(is, &idxin)); 722 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)is), n, idxout, PETSC_OWN_POINTER, newis)); 723 PetscFunctionReturn(0); 724 } 725 726 /*@ 727 ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering 728 and converts them to the global numbering. 729 730 Not collective 731 732 Input Parameters: 733 + mapping - the local to global mapping context 734 . N - number of integers 735 - in - input indices in local numbering 736 737 Output Parameter: 738 . out - indices in global numbering 739 740 Level: advanced 741 742 Note: 743 The in and out array parameters may be identical. 744 745 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`, 746 `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`, 747 `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()` 748 @*/ 749 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[]) 750 { 751 PetscInt i, bs, Nmax; 752 753 PetscFunctionBegin; 754 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 755 bs = mapping->bs; 756 Nmax = bs * mapping->n; 757 if (bs == 1) { 758 const PetscInt *idx = mapping->indices; 759 for (i = 0; i < N; i++) { 760 if (in[i] < 0) { 761 out[i] = in[i]; 762 continue; 763 } 764 PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i); 765 out[i] = idx[in[i]]; 766 } 767 } else { 768 const PetscInt *idx = mapping->indices; 769 for (i = 0; i < N; i++) { 770 if (in[i] < 0) { 771 out[i] = in[i]; 772 continue; 773 } 774 PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i); 775 out[i] = idx[in[i] / bs] * bs + (in[i] % bs); 776 } 777 } 778 PetscFunctionReturn(0); 779 } 780 781 /*@ 782 ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering 783 784 Not collective 785 786 Input Parameters: 787 + mapping - the local to global mapping context 788 . N - number of integers 789 - in - input indices in local block numbering 790 791 Output Parameter: 792 . out - indices in global block numbering 793 794 Level: advanced 795 796 Note: 797 The in and out array parameters may be identical. 798 799 Example: 800 If the index values are {0,1,6,7} set with a call to `ISLocalToGlobalMappingCreate`(`PETSC_COMM_SELF`,2,2,{0,3}) then the mapping applied to 0 801 (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3. 802 803 .seealso: [](sec_scatter), `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`, 804 `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`, 805 `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()` 806 @*/ 807 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[]) 808 { 809 PetscInt i, Nmax; 810 const PetscInt *idx; 811 812 PetscFunctionBegin; 813 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 814 Nmax = mapping->n; 815 idx = mapping->indices; 816 for (i = 0; i < N; i++) { 817 if (in[i] < 0) { 818 out[i] = in[i]; 819 continue; 820 } 821 PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local block index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i); 822 out[i] = idx[in[i]]; 823 } 824 PetscFunctionReturn(0); 825 } 826 827 /*@ 828 ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers 829 specified with a global numbering. 830 831 Not collective 832 833 Input Parameters: 834 + mapping - mapping between local and global numbering 835 . type - `IS_GTOLM_MASK` - maps global indices with no local value to -1 in the output list (i.e., mask them) 836 `IS_GTOLM_DROP` - drops the indices with no local value from the output list 837 . n - number of global indices to map 838 - idx - global indices to map 839 840 Output Parameters: 841 + nout - number of indices in output array (if type == `IS_GTOLM_MASK` then nout = n) 842 - idxout - local index of each global index, one must pass in an array long enough 843 to hold all the indices. You can call `ISGlobalToLocalMappingApply()` with 844 idxout == NULL to determine the required length (returned in nout) 845 and then allocate the required space and call `ISGlobalToLocalMappingApply()` 846 a second time to set the values. 847 848 Level: advanced 849 850 Notes: 851 Either nout or idxout may be NULL. idx and idxout may be identical. 852 853 For "small" problems when using `ISGlobalToLocalMappingApply()` and `ISGlobalToLocalMappingApplyBlock()`, the `ISLocalToGlobalMappingType` of `ISLOCALTOGLOBALMAPPINGBASIC` will be used; 854 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems `ISLOCALTOGLOBALMAPPINGHASH` is used, this is scalable. 855 Use `ISLocalToGlobalMappingSetType()` or call `ISLocalToGlobalMappingSetFromOptions()` with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 856 857 Developer Note: 858 The manual page states that idx and idxout may be identical but the calling 859 sequence declares idx as const so it cannot be the same as idxout. 860 861 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, 862 `ISLocalToGlobalMappingDestroy()` 863 @*/ 864 PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[]) 865 { 866 PetscFunctionBegin; 867 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 868 if (!mapping->data) PetscCall(ISGlobalToLocalMappingSetUp(mapping)); 869 PetscUseTypeMethod(mapping, globaltolocalmappingapply, type, n, idx, nout, idxout); 870 PetscFunctionReturn(0); 871 } 872 873 /*@ 874 ISGlobalToLocalMappingApplyIS - Creates from an `IS` in the global numbering 875 a new index set using the local numbering defined in an `ISLocalToGlobalMapping` 876 context. 877 878 Not collective 879 880 Input Parameters: 881 + mapping - mapping between local and global numbering 882 . type - `IS_GTOLM_MASK` - maps global indices with no local value to -1 in the output list (i.e., mask them) 883 `IS_GTOLM_DROP` - drops the indices with no local value from the output list 884 - is - index set in global numbering 885 886 Output Parameters: 887 . newis - index set in local numbering 888 889 Level: advanced 890 891 Note: 892 The output `IS` will be sequential, as it encodes a purely local operation 893 894 .seealso: [](sec_scatter), `ISGlobalToLocalMapping`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`, 895 `ISLocalToGlobalMappingDestroy()` 896 @*/ 897 PetscErrorCode ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, IS is, IS *newis) 898 { 899 PetscInt n, nout, *idxout; 900 const PetscInt *idxin; 901 902 PetscFunctionBegin; 903 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 904 PetscValidHeaderSpecific(is, IS_CLASSID, 3); 905 PetscValidPointer(newis, 4); 906 907 PetscCall(ISGetLocalSize(is, &n)); 908 PetscCall(ISGetIndices(is, &idxin)); 909 if (type == IS_GTOLM_MASK) { 910 PetscCall(PetscMalloc1(n, &idxout)); 911 } else { 912 PetscCall(ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, NULL)); 913 PetscCall(PetscMalloc1(nout, &idxout)); 914 } 915 PetscCall(ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, idxout)); 916 PetscCall(ISRestoreIndices(is, &idxin)); 917 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nout, idxout, PETSC_OWN_POINTER, newis)); 918 PetscFunctionReturn(0); 919 } 920 921 /*@ 922 ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers 923 specified with a block global numbering. 924 925 Not collective 926 927 Input Parameters: 928 + mapping - mapping between local and global numbering 929 . type - `IS_GTOLM_MASK` - maps global indices with no local value to -1 in the output list (i.e., mask them) 930 `IS_GTOLM_DROP` - drops the indices with no local value from the output list 931 . n - number of global indices to map 932 - idx - global indices to map 933 934 Output Parameters: 935 + nout - number of indices in output array (if type == `IS_GTOLM_MASK` then nout = n) 936 - idxout - local index of each global index, one must pass in an array long enough 937 to hold all the indices. You can call `ISGlobalToLocalMappingApplyBlock()` with 938 idxout == NULL to determine the required length (returned in nout) 939 and then allocate the required space and call `ISGlobalToLocalMappingApplyBlock()` 940 a second time to set the values. 941 942 Level: advanced 943 944 Notes: 945 Either nout or idxout may be NULL. idx and idxout may be identical. 946 947 For "small" problems when using `ISGlobalToLocalMappingApply()` and `ISGlobalToLocalMappingApplyBlock()`, the `ISLocalToGlobalMappingType` of `ISLOCALTOGLOBALMAPPINGBASIC` will be used; 948 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems `ISLOCALTOGLOBALMAPPINGHASH` is used, this is scalable. 949 Use `ISLocalToGlobalMappingSetType()` or call `ISLocalToGlobalMappingSetFromOptions()` with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 950 951 Developer Note: 952 The manual page states that idx and idxout may be identical but the calling 953 sequence declares idx as const so it cannot be the same as idxout. 954 955 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`, 956 `ISLocalToGlobalMappingDestroy()` 957 @*/ 958 PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[]) 959 { 960 PetscFunctionBegin; 961 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 962 if (!mapping->data) PetscCall(ISGlobalToLocalMappingSetUp(mapping)); 963 PetscUseTypeMethod(mapping, globaltolocalmappingapplyblock, type, n, idx, nout, idxout); 964 PetscFunctionReturn(0); 965 } 966 967 /*@C 968 ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and 969 each index shared by more than one processor 970 971 Collective on mapping 972 973 Input Parameter: 974 . mapping - the mapping from local to global indexing 975 976 Output Parameters: 977 + nproc - number of processors that are connected to this one 978 . proc - neighboring processors 979 . numproc - number of indices for each subdomain (processor) 980 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 981 982 Level: advanced 983 984 Fortran Usage: 985 .vb 986 PetscInt indices[nproc][numprocmax],ierr) 987 ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 988 ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 989 .ve 990 991 Fortran Note: 992 There is no `ISLocalToGlobalMappingRestoreInfo()` in Fortran. You must make sure that procs[], numprocs[] and 993 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 994 995 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 996 `ISLocalToGlobalMappingRestoreInfo()` 997 @*/ 998 PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) 999 { 1000 PetscFunctionBegin; 1001 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1002 if (mapping->info_cached) { 1003 *nproc = mapping->info_nproc; 1004 *procs = mapping->info_procs; 1005 *numprocs = mapping->info_numprocs; 1006 *indices = mapping->info_indices; 1007 } else { 1008 PetscCall(ISLocalToGlobalMappingGetBlockInfo_Private(mapping, nproc, procs, numprocs, indices)); 1009 } 1010 PetscFunctionReturn(0); 1011 } 1012 1013 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) 1014 { 1015 PetscMPIInt size, rank, tag1, tag2, tag3, *len, *source, imdex; 1016 PetscInt i, n = mapping->n, Ng, ng, max = 0, *lindices = mapping->indices; 1017 PetscInt *nprocs, *owner, nsends, *sends, j, *starts, nmax, nrecvs, *recvs, proc; 1018 PetscInt cnt, scale, *ownedsenders, *nownedsenders, rstart; 1019 PetscInt node, nownedm, nt, *sends2, nsends2, *starts2, *lens2, *dest, nrecvs2, *starts3, *recvs2, k, *bprocs, *tmp; 1020 PetscInt first_procs, first_numprocs, *first_indices; 1021 MPI_Request *recv_waits, *send_waits; 1022 MPI_Status recv_status, *send_status, *recv_statuses; 1023 MPI_Comm comm; 1024 PetscBool debug = PETSC_FALSE; 1025 1026 PetscFunctionBegin; 1027 PetscCall(PetscObjectGetComm((PetscObject)mapping, &comm)); 1028 PetscCallMPI(MPI_Comm_size(comm, &size)); 1029 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1030 if (size == 1) { 1031 *nproc = 0; 1032 *procs = NULL; 1033 PetscCall(PetscNew(numprocs)); 1034 (*numprocs)[0] = 0; 1035 PetscCall(PetscNew(indices)); 1036 (*indices)[0] = NULL; 1037 /* save info for reuse */ 1038 mapping->info_nproc = *nproc; 1039 mapping->info_procs = *procs; 1040 mapping->info_numprocs = *numprocs; 1041 mapping->info_indices = *indices; 1042 mapping->info_cached = PETSC_TRUE; 1043 PetscFunctionReturn(0); 1044 } 1045 1046 PetscCall(PetscOptionsGetBool(((PetscObject)mapping)->options, NULL, "-islocaltoglobalmappinggetinfo_debug", &debug, NULL)); 1047 1048 /* 1049 Notes on ISLocalToGlobalMappingGetBlockInfo 1050 1051 globally owned node - the nodes that have been assigned to this processor in global 1052 numbering, just for this routine. 1053 1054 nontrivial globally owned node - node assigned to this processor that is on a subdomain 1055 boundary (i.e. is has more than one local owner) 1056 1057 locally owned node - node that exists on this processors subdomain 1058 1059 nontrivial locally owned node - node that is not in the interior (i.e. has more than one 1060 local subdomain 1061 */ 1062 PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag1)); 1063 PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag2)); 1064 PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag3)); 1065 1066 for (i = 0; i < n; i++) { 1067 if (lindices[i] > max) max = lindices[i]; 1068 } 1069 PetscCall(MPIU_Allreduce(&max, &Ng, 1, MPIU_INT, MPI_MAX, comm)); 1070 Ng++; 1071 PetscCallMPI(MPI_Comm_size(comm, &size)); 1072 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1073 scale = Ng / size + 1; 1074 ng = scale; 1075 if (rank == size - 1) ng = Ng - scale * (size - 1); 1076 ng = PetscMax(1, ng); 1077 rstart = scale * rank; 1078 1079 /* determine ownership ranges of global indices */ 1080 PetscCall(PetscMalloc1(2 * size, &nprocs)); 1081 PetscCall(PetscArrayzero(nprocs, 2 * size)); 1082 1083 /* determine owners of each local node */ 1084 PetscCall(PetscMalloc1(n, &owner)); 1085 for (i = 0; i < n; i++) { 1086 proc = lindices[i] / scale; /* processor that globally owns this index */ 1087 nprocs[2 * proc + 1] = 1; /* processor globally owns at least one of ours */ 1088 owner[i] = proc; 1089 nprocs[2 * proc]++; /* count of how many that processor globally owns of ours */ 1090 } 1091 nsends = 0; 1092 for (i = 0; i < size; i++) nsends += nprocs[2 * i + 1]; 1093 PetscCall(PetscInfo(mapping, "Number of global owners for my local data %" PetscInt_FMT "\n", nsends)); 1094 1095 /* inform other processors of number of messages and max length*/ 1096 PetscCall(PetscMaxSum(comm, nprocs, &nmax, &nrecvs)); 1097 PetscCall(PetscInfo(mapping, "Number of local owners for my global data %" PetscInt_FMT "\n", nrecvs)); 1098 1099 /* post receives for owned rows */ 1100 PetscCall(PetscMalloc1((2 * nrecvs + 1) * (nmax + 1), &recvs)); 1101 PetscCall(PetscMalloc1(nrecvs + 1, &recv_waits)); 1102 for (i = 0; i < nrecvs; i++) PetscCallMPI(MPI_Irecv(recvs + 2 * nmax * i, 2 * nmax, MPIU_INT, MPI_ANY_SOURCE, tag1, comm, recv_waits + i)); 1103 1104 /* pack messages containing lists of local nodes to owners */ 1105 PetscCall(PetscMalloc1(2 * n + 1, &sends)); 1106 PetscCall(PetscMalloc1(size + 1, &starts)); 1107 starts[0] = 0; 1108 for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2]; 1109 for (i = 0; i < n; i++) { 1110 sends[starts[owner[i]]++] = lindices[i]; 1111 sends[starts[owner[i]]++] = i; 1112 } 1113 PetscCall(PetscFree(owner)); 1114 starts[0] = 0; 1115 for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2]; 1116 1117 /* send the messages */ 1118 PetscCall(PetscMalloc1(nsends + 1, &send_waits)); 1119 PetscCall(PetscMalloc1(nsends + 1, &dest)); 1120 cnt = 0; 1121 for (i = 0; i < size; i++) { 1122 if (nprocs[2 * i]) { 1123 PetscCallMPI(MPI_Isend(sends + starts[i], 2 * nprocs[2 * i], MPIU_INT, i, tag1, comm, send_waits + cnt)); 1124 dest[cnt] = i; 1125 cnt++; 1126 } 1127 } 1128 PetscCall(PetscFree(starts)); 1129 1130 /* wait on receives */ 1131 PetscCall(PetscMalloc1(nrecvs + 1, &source)); 1132 PetscCall(PetscMalloc1(nrecvs + 1, &len)); 1133 cnt = nrecvs; 1134 PetscCall(PetscCalloc1(ng + 1, &nownedsenders)); 1135 while (cnt) { 1136 PetscCallMPI(MPI_Waitany(nrecvs, recv_waits, &imdex, &recv_status)); 1137 /* unpack receives into our local space */ 1138 PetscCallMPI(MPI_Get_count(&recv_status, MPIU_INT, &len[imdex])); 1139 source[imdex] = recv_status.MPI_SOURCE; 1140 len[imdex] = len[imdex] / 2; 1141 /* count how many local owners for each of my global owned indices */ 1142 for (i = 0; i < len[imdex]; i++) nownedsenders[recvs[2 * imdex * nmax + 2 * i] - rstart]++; 1143 cnt--; 1144 } 1145 PetscCall(PetscFree(recv_waits)); 1146 1147 /* count how many globally owned indices are on an edge multiplied by how many processors own them. */ 1148 nownedm = 0; 1149 for (i = 0; i < ng; i++) { 1150 if (nownedsenders[i] > 1) nownedm += nownedsenders[i]; 1151 } 1152 1153 /* create single array to contain rank of all local owners of each globally owned index */ 1154 PetscCall(PetscMalloc1(nownedm + 1, &ownedsenders)); 1155 PetscCall(PetscMalloc1(ng + 1, &starts)); 1156 starts[0] = 0; 1157 for (i = 1; i < ng; i++) { 1158 if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1]; 1159 else starts[i] = starts[i - 1]; 1160 } 1161 1162 /* for each nontrivial globally owned node list all arriving processors */ 1163 for (i = 0; i < nrecvs; i++) { 1164 for (j = 0; j < len[i]; j++) { 1165 node = recvs[2 * i * nmax + 2 * j] - rstart; 1166 if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i]; 1167 } 1168 } 1169 1170 if (debug) { /* ----------------------------------- */ 1171 starts[0] = 0; 1172 for (i = 1; i < ng; i++) { 1173 if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1]; 1174 else starts[i] = starts[i - 1]; 1175 } 1176 for (i = 0; i < ng; i++) { 1177 if (nownedsenders[i] > 1) { 1178 PetscCall(PetscSynchronizedPrintf(comm, "[%d] global node %" PetscInt_FMT " local owner processors: ", rank, i + rstart)); 1179 for (j = 0; j < nownedsenders[i]; j++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", ownedsenders[starts[i] + j])); 1180 PetscCall(PetscSynchronizedPrintf(comm, "\n")); 1181 } 1182 } 1183 PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); 1184 } /* ----------------------------------- */ 1185 1186 /* wait on original sends */ 1187 if (nsends) { 1188 PetscCall(PetscMalloc1(nsends, &send_status)); 1189 PetscCallMPI(MPI_Waitall(nsends, send_waits, send_status)); 1190 PetscCall(PetscFree(send_status)); 1191 } 1192 PetscCall(PetscFree(send_waits)); 1193 PetscCall(PetscFree(sends)); 1194 PetscCall(PetscFree(nprocs)); 1195 1196 /* pack messages to send back to local owners */ 1197 starts[0] = 0; 1198 for (i = 1; i < ng; i++) { 1199 if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1]; 1200 else starts[i] = starts[i - 1]; 1201 } 1202 nsends2 = nrecvs; 1203 PetscCall(PetscMalloc1(nsends2 + 1, &nprocs)); /* length of each message */ 1204 for (i = 0; i < nrecvs; i++) { 1205 nprocs[i] = 1; 1206 for (j = 0; j < len[i]; j++) { 1207 node = recvs[2 * i * nmax + 2 * j] - rstart; 1208 if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node]; 1209 } 1210 } 1211 nt = 0; 1212 for (i = 0; i < nsends2; i++) nt += nprocs[i]; 1213 1214 PetscCall(PetscMalloc1(nt + 1, &sends2)); 1215 PetscCall(PetscMalloc1(nsends2 + 1, &starts2)); 1216 1217 starts2[0] = 0; 1218 for (i = 1; i < nsends2; i++) starts2[i] = starts2[i - 1] + nprocs[i - 1]; 1219 /* 1220 Each message is 1 + nprocs[i] long, and consists of 1221 (0) the number of nodes being sent back 1222 (1) the local node number, 1223 (2) the number of processors sharing it, 1224 (3) the processors sharing it 1225 */ 1226 for (i = 0; i < nsends2; i++) { 1227 cnt = 1; 1228 sends2[starts2[i]] = 0; 1229 for (j = 0; j < len[i]; j++) { 1230 node = recvs[2 * i * nmax + 2 * j] - rstart; 1231 if (nownedsenders[node] > 1) { 1232 sends2[starts2[i]]++; 1233 sends2[starts2[i] + cnt++] = recvs[2 * i * nmax + 2 * j + 1]; 1234 sends2[starts2[i] + cnt++] = nownedsenders[node]; 1235 PetscCall(PetscArraycpy(&sends2[starts2[i] + cnt], &ownedsenders[starts[node]], nownedsenders[node])); 1236 cnt += nownedsenders[node]; 1237 } 1238 } 1239 } 1240 1241 /* receive the message lengths */ 1242 nrecvs2 = nsends; 1243 PetscCall(PetscMalloc1(nrecvs2 + 1, &lens2)); 1244 PetscCall(PetscMalloc1(nrecvs2 + 1, &starts3)); 1245 PetscCall(PetscMalloc1(nrecvs2 + 1, &recv_waits)); 1246 for (i = 0; i < nrecvs2; i++) PetscCallMPI(MPI_Irecv(&lens2[i], 1, MPIU_INT, dest[i], tag2, comm, recv_waits + i)); 1247 1248 /* send the message lengths */ 1249 for (i = 0; i < nsends2; i++) PetscCallMPI(MPI_Send(&nprocs[i], 1, MPIU_INT, source[i], tag2, comm)); 1250 1251 /* wait on receives of lens */ 1252 if (nrecvs2) { 1253 PetscCall(PetscMalloc1(nrecvs2, &recv_statuses)); 1254 PetscCallMPI(MPI_Waitall(nrecvs2, recv_waits, recv_statuses)); 1255 PetscCall(PetscFree(recv_statuses)); 1256 } 1257 PetscCall(PetscFree(recv_waits)); 1258 1259 starts3[0] = 0; 1260 nt = 0; 1261 for (i = 0; i < nrecvs2 - 1; i++) { 1262 starts3[i + 1] = starts3[i] + lens2[i]; 1263 nt += lens2[i]; 1264 } 1265 if (nrecvs2) nt += lens2[nrecvs2 - 1]; 1266 1267 PetscCall(PetscMalloc1(nt + 1, &recvs2)); 1268 PetscCall(PetscMalloc1(nrecvs2 + 1, &recv_waits)); 1269 for (i = 0; i < nrecvs2; i++) PetscCallMPI(MPI_Irecv(recvs2 + starts3[i], lens2[i], MPIU_INT, dest[i], tag3, comm, recv_waits + i)); 1270 1271 /* send the messages */ 1272 PetscCall(PetscMalloc1(nsends2 + 1, &send_waits)); 1273 for (i = 0; i < nsends2; i++) PetscCallMPI(MPI_Isend(sends2 + starts2[i], nprocs[i], MPIU_INT, source[i], tag3, comm, send_waits + i)); 1274 1275 /* wait on receives */ 1276 if (nrecvs2) { 1277 PetscCall(PetscMalloc1(nrecvs2, &recv_statuses)); 1278 PetscCallMPI(MPI_Waitall(nrecvs2, recv_waits, recv_statuses)); 1279 PetscCall(PetscFree(recv_statuses)); 1280 } 1281 PetscCall(PetscFree(recv_waits)); 1282 PetscCall(PetscFree(nprocs)); 1283 1284 if (debug) { /* ----------------------------------- */ 1285 cnt = 0; 1286 for (i = 0; i < nrecvs2; i++) { 1287 nt = recvs2[cnt++]; 1288 for (j = 0; j < nt; j++) { 1289 PetscCall(PetscSynchronizedPrintf(comm, "[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ", rank, recvs2[cnt], recvs2[cnt + 1])); 1290 for (k = 0; k < recvs2[cnt + 1]; k++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", recvs2[cnt + 2 + k])); 1291 cnt += 2 + recvs2[cnt + 1]; 1292 PetscCall(PetscSynchronizedPrintf(comm, "\n")); 1293 } 1294 } 1295 PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); 1296 } /* ----------------------------------- */ 1297 1298 /* count number subdomains for each local node */ 1299 PetscCall(PetscCalloc1(size, &nprocs)); 1300 cnt = 0; 1301 for (i = 0; i < nrecvs2; i++) { 1302 nt = recvs2[cnt++]; 1303 for (j = 0; j < nt; j++) { 1304 for (k = 0; k < recvs2[cnt + 1]; k++) nprocs[recvs2[cnt + 2 + k]]++; 1305 cnt += 2 + recvs2[cnt + 1]; 1306 } 1307 } 1308 nt = 0; 1309 for (i = 0; i < size; i++) nt += (nprocs[i] > 0); 1310 *nproc = nt; 1311 PetscCall(PetscMalloc1(nt + 1, procs)); 1312 PetscCall(PetscMalloc1(nt + 1, numprocs)); 1313 PetscCall(PetscMalloc1(nt + 1, indices)); 1314 for (i = 0; i < nt + 1; i++) (*indices)[i] = NULL; 1315 PetscCall(PetscMalloc1(size, &bprocs)); 1316 cnt = 0; 1317 for (i = 0; i < size; i++) { 1318 if (nprocs[i] > 0) { 1319 bprocs[i] = cnt; 1320 (*procs)[cnt] = i; 1321 (*numprocs)[cnt] = nprocs[i]; 1322 PetscCall(PetscMalloc1(nprocs[i], &(*indices)[cnt])); 1323 cnt++; 1324 } 1325 } 1326 1327 /* make the list of subdomains for each nontrivial local node */ 1328 PetscCall(PetscArrayzero(*numprocs, nt)); 1329 cnt = 0; 1330 for (i = 0; i < nrecvs2; i++) { 1331 nt = recvs2[cnt++]; 1332 for (j = 0; j < nt; j++) { 1333 for (k = 0; k < recvs2[cnt + 1]; k++) (*indices)[bprocs[recvs2[cnt + 2 + k]]][(*numprocs)[bprocs[recvs2[cnt + 2 + k]]]++] = recvs2[cnt]; 1334 cnt += 2 + recvs2[cnt + 1]; 1335 } 1336 } 1337 PetscCall(PetscFree(bprocs)); 1338 PetscCall(PetscFree(recvs2)); 1339 1340 /* sort the node indexing by their global numbers */ 1341 nt = *nproc; 1342 for (i = 0; i < nt; i++) { 1343 PetscCall(PetscMalloc1((*numprocs)[i], &tmp)); 1344 for (j = 0; j < (*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]]; 1345 PetscCall(PetscSortIntWithArray((*numprocs)[i], tmp, (*indices)[i])); 1346 PetscCall(PetscFree(tmp)); 1347 } 1348 1349 if (debug) { /* ----------------------------------- */ 1350 nt = *nproc; 1351 for (i = 0; i < nt; i++) { 1352 PetscCall(PetscSynchronizedPrintf(comm, "[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ", rank, (*procs)[i], (*numprocs)[i])); 1353 for (j = 0; j < (*numprocs)[i]; j++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", (*indices)[i][j])); 1354 PetscCall(PetscSynchronizedPrintf(comm, "\n")); 1355 } 1356 PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); 1357 } /* ----------------------------------- */ 1358 1359 /* wait on sends */ 1360 if (nsends2) { 1361 PetscCall(PetscMalloc1(nsends2, &send_status)); 1362 PetscCallMPI(MPI_Waitall(nsends2, send_waits, send_status)); 1363 PetscCall(PetscFree(send_status)); 1364 } 1365 1366 PetscCall(PetscFree(starts3)); 1367 PetscCall(PetscFree(dest)); 1368 PetscCall(PetscFree(send_waits)); 1369 1370 PetscCall(PetscFree(nownedsenders)); 1371 PetscCall(PetscFree(ownedsenders)); 1372 PetscCall(PetscFree(starts)); 1373 PetscCall(PetscFree(starts2)); 1374 PetscCall(PetscFree(lens2)); 1375 1376 PetscCall(PetscFree(source)); 1377 PetscCall(PetscFree(len)); 1378 PetscCall(PetscFree(recvs)); 1379 PetscCall(PetscFree(nprocs)); 1380 PetscCall(PetscFree(sends2)); 1381 1382 /* put the information about myself as the first entry in the list */ 1383 first_procs = (*procs)[0]; 1384 first_numprocs = (*numprocs)[0]; 1385 first_indices = (*indices)[0]; 1386 for (i = 0; i < *nproc; i++) { 1387 if ((*procs)[i] == rank) { 1388 (*procs)[0] = (*procs)[i]; 1389 (*numprocs)[0] = (*numprocs)[i]; 1390 (*indices)[0] = (*indices)[i]; 1391 (*procs)[i] = first_procs; 1392 (*numprocs)[i] = first_numprocs; 1393 (*indices)[i] = first_indices; 1394 break; 1395 } 1396 } 1397 1398 /* save info for reuse */ 1399 mapping->info_nproc = *nproc; 1400 mapping->info_procs = *procs; 1401 mapping->info_numprocs = *numprocs; 1402 mapping->info_indices = *indices; 1403 mapping->info_cached = PETSC_TRUE; 1404 PetscFunctionReturn(0); 1405 } 1406 1407 /*@C 1408 ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by `ISLocalToGlobalMappingGetBlockInfo()` 1409 1410 Collective on mapping 1411 1412 Input Parameter: 1413 . mapping - the mapping from local to global indexing 1414 1415 Output Parameters: 1416 + nproc - number of processors that are connected to this one 1417 . proc - neighboring processors 1418 . numproc - number of indices for each processor 1419 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1420 1421 Level: advanced 1422 1423 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1424 `ISLocalToGlobalMappingGetInfo()` 1425 @*/ 1426 PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) 1427 { 1428 PetscFunctionBegin; 1429 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1430 if (mapping->info_free) { 1431 PetscCall(PetscFree(*numprocs)); 1432 if (*indices) { 1433 PetscInt i; 1434 1435 PetscCall(PetscFree((*indices)[0])); 1436 for (i = 1; i < *nproc; i++) PetscCall(PetscFree((*indices)[i])); 1437 PetscCall(PetscFree(*indices)); 1438 } 1439 } 1440 *nproc = 0; 1441 *procs = NULL; 1442 *numprocs = NULL; 1443 *indices = NULL; 1444 PetscFunctionReturn(0); 1445 } 1446 1447 /*@C 1448 ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and 1449 each index shared by more than one processor 1450 1451 Collective on mapping 1452 1453 Input Parameter: 1454 . mapping - the mapping from local to global indexing 1455 1456 Output Parameters: 1457 + nproc - number of processors that are connected to this one 1458 . proc - neighboring processors 1459 . numproc - number of indices for each subdomain (processor) 1460 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 1461 1462 Level: advanced 1463 1464 Note: 1465 The user needs to call `ISLocalToGlobalMappingRestoreInfo()` when the data is no longer needed. 1466 1467 Fortran Usage: 1468 .vb 1469 PetscInt indices[nproc][numprocmax],ierr) 1470 ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 1471 ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 1472 .ve 1473 1474 Fortran Note: 1475 There is no `ISLocalToGlobalMappingRestoreInfo()` in Fortran. You must make sure that procs[], numprocs[] and 1476 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 1477 1478 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1479 `ISLocalToGlobalMappingRestoreInfo()` 1480 @*/ 1481 PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) 1482 { 1483 PetscInt **bindices = NULL, *bnumprocs = NULL, bs, i, j, k; 1484 1485 PetscFunctionBegin; 1486 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1487 bs = mapping->bs; 1488 PetscCall(ISLocalToGlobalMappingGetBlockInfo(mapping, nproc, procs, &bnumprocs, &bindices)); 1489 if (bs > 1) { /* we need to expand the cached info */ 1490 PetscCall(PetscCalloc1(*nproc, &*indices)); 1491 PetscCall(PetscCalloc1(*nproc, &*numprocs)); 1492 for (i = 0; i < *nproc; i++) { 1493 PetscCall(PetscMalloc1(bs * bnumprocs[i], &(*indices)[i])); 1494 for (j = 0; j < bnumprocs[i]; j++) { 1495 for (k = 0; k < bs; k++) (*indices)[i][j * bs + k] = bs * bindices[i][j] + k; 1496 } 1497 (*numprocs)[i] = bnumprocs[i] * bs; 1498 } 1499 mapping->info_free = PETSC_TRUE; 1500 } else { 1501 *numprocs = bnumprocs; 1502 *indices = bindices; 1503 } 1504 PetscFunctionReturn(0); 1505 } 1506 1507 /*@C 1508 ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by `ISLocalToGlobalMappingGetInfo()` 1509 1510 Collective on mapping 1511 1512 Input Parameter: 1513 . mapping - the mapping from local to global indexing 1514 1515 Output Parameters: 1516 + nproc - number of processors that are connected to this one 1517 . proc - neighboring processors 1518 . numproc - number of indices for each processor 1519 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1520 1521 Level: advanced 1522 1523 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1524 `ISLocalToGlobalMappingGetInfo()` 1525 @*/ 1526 PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) 1527 { 1528 PetscFunctionBegin; 1529 PetscCall(ISLocalToGlobalMappingRestoreBlockInfo(mapping, nproc, procs, numprocs, indices)); 1530 PetscFunctionReturn(0); 1531 } 1532 1533 /*@C 1534 ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each MPI rank 1535 1536 Collective on mapping 1537 1538 Input Parameter: 1539 . mapping - the mapping from local to global indexing 1540 1541 Output Parameters: 1542 + nnodes - number of local nodes (same `ISLocalToGlobalMappingGetSize()`) 1543 . count - number of neighboring processors per node 1544 - indices - indices of processes sharing the node (sorted) 1545 1546 Level: advanced 1547 1548 Note: 1549 The user needs to call `ISLocalToGlobalMappingRestoreInfo()` when the data is no longer needed. 1550 1551 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1552 `ISLocalToGlobalMappingGetInfo()`, `ISLocalToGlobalMappingRestoreNodeInfo()` 1553 @*/ 1554 PetscErrorCode ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[]) 1555 { 1556 PetscInt n; 1557 1558 PetscFunctionBegin; 1559 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1560 PetscCall(ISLocalToGlobalMappingGetSize(mapping, &n)); 1561 if (!mapping->info_nodec) { 1562 PetscInt i, m, n_neigh, *neigh, *n_shared, **shared; 1563 1564 PetscCall(PetscMalloc2(n + 1, &mapping->info_nodec, n, &mapping->info_nodei)); 1565 PetscCall(ISLocalToGlobalMappingGetInfo(mapping, &n_neigh, &neigh, &n_shared, &shared)); 1566 for (i = 0; i < n; i++) mapping->info_nodec[i] = 1; 1567 m = n; 1568 mapping->info_nodec[n] = 0; 1569 for (i = 1; i < n_neigh; i++) { 1570 PetscInt j; 1571 1572 m += n_shared[i]; 1573 for (j = 0; j < n_shared[i]; j++) mapping->info_nodec[shared[i][j]] += 1; 1574 } 1575 if (n) PetscCall(PetscMalloc1(m, &mapping->info_nodei[0])); 1576 for (i = 1; i < n; i++) mapping->info_nodei[i] = mapping->info_nodei[i - 1] + mapping->info_nodec[i - 1]; 1577 PetscCall(PetscArrayzero(mapping->info_nodec, n)); 1578 for (i = 0; i < n; i++) { 1579 mapping->info_nodec[i] = 1; 1580 mapping->info_nodei[i][0] = neigh[0]; 1581 } 1582 for (i = 1; i < n_neigh; i++) { 1583 PetscInt j; 1584 1585 for (j = 0; j < n_shared[i]; j++) { 1586 PetscInt k = shared[i][j]; 1587 1588 mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i]; 1589 mapping->info_nodec[k] += 1; 1590 } 1591 } 1592 for (i = 0; i < n; i++) PetscCall(PetscSortRemoveDupsInt(&mapping->info_nodec[i], mapping->info_nodei[i])); 1593 PetscCall(ISLocalToGlobalMappingRestoreInfo(mapping, &n_neigh, &neigh, &n_shared, &shared)); 1594 } 1595 if (nnodes) *nnodes = n; 1596 if (count) *count = mapping->info_nodec; 1597 if (indices) *indices = mapping->info_nodei; 1598 PetscFunctionReturn(0); 1599 } 1600 1601 /*@C 1602 ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by `ISLocalToGlobalMappingGetNodeInfo()` 1603 1604 Collective on mapping 1605 1606 Input Parameter: 1607 . mapping - the mapping from local to global indexing 1608 1609 Output Parameters: 1610 + nnodes - number of local nodes 1611 . count - number of neighboring processors per node 1612 - indices - indices of processes sharing the node (sorted) 1613 1614 Level: advanced 1615 1616 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1617 `ISLocalToGlobalMappingGetInfo()` 1618 @*/ 1619 PetscErrorCode ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[]) 1620 { 1621 PetscFunctionBegin; 1622 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1623 if (nnodes) *nnodes = 0; 1624 if (count) *count = NULL; 1625 if (indices) *indices = NULL; 1626 PetscFunctionReturn(0); 1627 } 1628 1629 /*@C 1630 ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped 1631 1632 Not Collective 1633 1634 Input Parameter: 1635 . ltog - local to global mapping 1636 1637 Output Parameter: 1638 . array - array of indices, the length of this array may be obtained with `ISLocalToGlobalMappingGetSize()` 1639 1640 Level: advanced 1641 1642 Note: 1643 `ISLocalToGlobalMappingGetSize()` returns the length the this array 1644 1645 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreIndices()`, `ISLocalToGlobalMappingGetBlockIndices()`, `ISLocalToGlobalMappingRestoreBlockIndices()` 1646 @*/ 1647 PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) 1648 { 1649 PetscFunctionBegin; 1650 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1651 PetscValidPointer(array, 2); 1652 if (ltog->bs == 1) { 1653 *array = ltog->indices; 1654 } else { 1655 PetscInt *jj, k, i, j, n = ltog->n, bs = ltog->bs; 1656 const PetscInt *ii; 1657 1658 PetscCall(PetscMalloc1(bs * n, &jj)); 1659 *array = jj; 1660 k = 0; 1661 ii = ltog->indices; 1662 for (i = 0; i < n; i++) 1663 for (j = 0; j < bs; j++) jj[k++] = bs * ii[i] + j; 1664 } 1665 PetscFunctionReturn(0); 1666 } 1667 1668 /*@C 1669 ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with `ISLocalToGlobalMappingGetIndices()` 1670 1671 Not Collective 1672 1673 Input Parameters: 1674 + ltog - local to global mapping 1675 - array - array of indices 1676 1677 Level: advanced 1678 1679 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()` 1680 @*/ 1681 PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) 1682 { 1683 PetscFunctionBegin; 1684 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1685 PetscValidPointer(array, 2); 1686 PetscCheck(ltog->bs != 1 || *array == ltog->indices, PETSC_COMM_SELF, PETSC_ERR_ARG_BADPTR, "Trying to return mismatched pointer"); 1687 1688 if (ltog->bs > 1) PetscCall(PetscFree(*(void **)array)); 1689 PetscFunctionReturn(0); 1690 } 1691 1692 /*@C 1693 ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block 1694 1695 Not Collective 1696 1697 Input Parameter: 1698 . ltog - local to global mapping 1699 1700 Output Parameter: 1701 . array - array of indices 1702 1703 Level: advanced 1704 1705 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreBlockIndices()` 1706 @*/ 1707 PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) 1708 { 1709 PetscFunctionBegin; 1710 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1711 PetscValidPointer(array, 2); 1712 *array = ltog->indices; 1713 PetscFunctionReturn(0); 1714 } 1715 1716 /*@C 1717 ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with `ISLocalToGlobalMappingGetBlockIndices()` 1718 1719 Not Collective 1720 1721 Input Parameters: 1722 + ltog - local to global mapping 1723 - array - array of indices 1724 1725 Level: advanced 1726 1727 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()` 1728 @*/ 1729 PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) 1730 { 1731 PetscFunctionBegin; 1732 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1733 PetscValidPointer(array, 2); 1734 PetscCheck(*array == ltog->indices, PETSC_COMM_SELF, PETSC_ERR_ARG_BADPTR, "Trying to return mismatched pointer"); 1735 *array = NULL; 1736 PetscFunctionReturn(0); 1737 } 1738 1739 /*@C 1740 ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings 1741 1742 Not Collective 1743 1744 Input Parameters: 1745 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate 1746 . n - number of mappings to concatenate 1747 - ltogs - local to global mappings 1748 1749 Output Parameter: 1750 . ltogcat - new mapping 1751 1752 Level: advanced 1753 1754 Note: 1755 This currently always returns a mapping with block size of 1 1756 1757 Developer Note: 1758 If all the input mapping have the same block size we could easily handle that as a special case 1759 1760 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingCreate()` 1761 @*/ 1762 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm, PetscInt n, const ISLocalToGlobalMapping ltogs[], ISLocalToGlobalMapping *ltogcat) 1763 { 1764 PetscInt i, cnt, m, *idx; 1765 1766 PetscFunctionBegin; 1767 PetscCheck(n >= 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "Must have a non-negative number of mappings, given %" PetscInt_FMT, n); 1768 if (n > 0) PetscValidPointer(ltogs, 3); 1769 for (i = 0; i < n; i++) PetscValidHeaderSpecific(ltogs[i], IS_LTOGM_CLASSID, 3); 1770 PetscValidPointer(ltogcat, 4); 1771 for (cnt = 0, i = 0; i < n; i++) { 1772 PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i], &m)); 1773 cnt += m; 1774 } 1775 PetscCall(PetscMalloc1(cnt, &idx)); 1776 for (cnt = 0, i = 0; i < n; i++) { 1777 const PetscInt *subidx; 1778 PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i], &m)); 1779 PetscCall(ISLocalToGlobalMappingGetIndices(ltogs[i], &subidx)); 1780 PetscCall(PetscArraycpy(&idx[cnt], subidx, m)); 1781 PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogs[i], &subidx)); 1782 cnt += m; 1783 } 1784 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, cnt, idx, PETSC_OWN_POINTER, ltogcat)); 1785 PetscFunctionReturn(0); 1786 } 1787 1788 /*MC 1789 ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the `ISLocalToGlobalMapping` object. When `ISGlobalToLocalMappingApply()` is 1790 used this is good for only small and moderate size problems. 1791 1792 Options Database Keys: 1793 . -islocaltoglobalmapping_type basic - select this method 1794 1795 Level: beginner 1796 1797 Developer Note: 1798 This stores all the mapping information on each MPI rank. 1799 1800 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH` 1801 M*/ 1802 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog) 1803 { 1804 PetscFunctionBegin; 1805 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Basic; 1806 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Basic; 1807 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic; 1808 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Basic; 1809 PetscFunctionReturn(0); 1810 } 1811 1812 /*MC 1813 ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the `ISLocalToGlobalMapping` object. When `ISGlobalToLocalMappingApply()` is 1814 used this is good for large memory problems. 1815 1816 Options Database Keys: 1817 . -islocaltoglobalmapping_type hash - select this method 1818 1819 Level: beginner 1820 1821 Note: 1822 This is selected automatically for large problems if the user does not set the type. 1823 1824 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGBASIC` 1825 M*/ 1826 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog) 1827 { 1828 PetscFunctionBegin; 1829 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Hash; 1830 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Hash; 1831 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash; 1832 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Hash; 1833 PetscFunctionReturn(0); 1834 } 1835 1836 /*@C 1837 ISLocalToGlobalMappingRegister - Registers a method for applying a global to local mapping with an `ISLocalToGlobalMapping` 1838 1839 Not Collective 1840 1841 Input Parameters: 1842 + sname - name of a new method 1843 - routine_create - routine to create method context 1844 1845 Sample usage: 1846 .vb 1847 ISLocalToGlobalMappingRegister("my_mapper",MyCreate); 1848 .ve 1849 1850 Then, your mapping can be chosen with the procedural interface via 1851 $ ISLocalToGlobalMappingSetType(ltog,"my_mapper") 1852 or at runtime via the option 1853 $ -islocaltoglobalmapping_type my_mapper 1854 1855 Level: advanced 1856 1857 Note: 1858 `ISLocalToGlobalMappingRegister()` may be called multiple times to add several user-defined mappings. 1859 1860 .seealso: [](sec_scatter), `ISLocalToGlobalMappingRegisterAll()`, `ISLocalToGlobalMappingRegisterDestroy()`, `ISLOCALTOGLOBALMAPPINGBASIC`, 1861 `ISLOCALTOGLOBALMAPPINGHASH`, `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApply()` 1862 @*/ 1863 PetscErrorCode ISLocalToGlobalMappingRegister(const char sname[], PetscErrorCode (*function)(ISLocalToGlobalMapping)) 1864 { 1865 PetscFunctionBegin; 1866 PetscCall(ISInitializePackage()); 1867 PetscCall(PetscFunctionListAdd(&ISLocalToGlobalMappingList, sname, function)); 1868 PetscFunctionReturn(0); 1869 } 1870 1871 /*@C 1872 ISLocalToGlobalMappingSetType - Sets the implementation type `ISLocalToGlobalMapping` will use 1873 1874 Logically Collective on ltogf 1875 1876 Input Parameters: 1877 + ltog - the `ISLocalToGlobalMapping` object 1878 - type - a known method 1879 1880 Options Database Key: 1881 . -islocaltoglobalmapping_type <method> - Sets the method; use -help for a list of available methods (for instance, basic or hash) 1882 1883 Level: intermediate 1884 1885 Notes: 1886 See "petsc/include/petscis.h" for available methods 1887 1888 Normally, it is best to use the `ISLocalToGlobalMappingSetFromOptions()` command and 1889 then set the `ISLocalToGlobalMappingType` from the options database rather than by using 1890 this routine. 1891 1892 Developer Note: 1893 `ISLocalToGlobalMappingRegister()` is used to add new types to `ISLocalToGlobalMappingList` from which they 1894 are accessed by `ISLocalToGlobalMappingSetType()`. 1895 1896 .seealso: [](sec_scatter), `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingGetType()` 1897 @*/ 1898 PetscErrorCode ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type) 1899 { 1900 PetscBool match; 1901 PetscErrorCode (*r)(ISLocalToGlobalMapping) = NULL; 1902 1903 PetscFunctionBegin; 1904 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1905 if (type) PetscValidCharPointer(type, 2); 1906 1907 PetscCall(PetscObjectTypeCompare((PetscObject)ltog, type, &match)); 1908 if (match) PetscFunctionReturn(0); 1909 1910 /* L2G maps defer type setup at globaltolocal calls, allow passing NULL here */ 1911 if (type) { 1912 PetscCall(PetscFunctionListFind(ISLocalToGlobalMappingList, type, &r)); 1913 PetscCheck(r, PetscObjectComm((PetscObject)ltog), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unable to find requested ISLocalToGlobalMapping type %s", type); 1914 } 1915 /* Destroy the previous private LTOG context */ 1916 PetscTryTypeMethod(ltog, destroy); 1917 ltog->ops->destroy = NULL; 1918 1919 PetscCall(PetscObjectChangeTypeName((PetscObject)ltog, type)); 1920 if (r) PetscCall((*r)(ltog)); 1921 PetscFunctionReturn(0); 1922 } 1923 1924 /*@C 1925 ISLocalToGlobalMappingGetType - Get the type of the `ISLocalToGlobalMapping` 1926 1927 Not Collective 1928 1929 Input Parameter: 1930 . ltog - the `ISLocalToGlobalMapping` object 1931 1932 Output Parameter: 1933 . type - the type 1934 1935 Level: intermediate 1936 1937 .seealso: [](sec_scatter), `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()` 1938 @*/ 1939 PetscErrorCode ISLocalToGlobalMappingGetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType *type) 1940 { 1941 PetscFunctionBegin; 1942 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1943 PetscValidPointer(type, 2); 1944 *type = ((PetscObject)ltog)->type_name; 1945 PetscFunctionReturn(0); 1946 } 1947 1948 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE; 1949 1950 /*@C 1951 ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the `IS` package. 1952 1953 Not Collective 1954 1955 Level: advanced 1956 1957 .seealso: [](sec_scatter), `ISRegister()`, `ISLocalToGlobalRegister()` 1958 @*/ 1959 PetscErrorCode ISLocalToGlobalMappingRegisterAll(void) 1960 { 1961 PetscFunctionBegin; 1962 if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0); 1963 ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE; 1964 PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic)); 1965 PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash)); 1966 PetscFunctionReturn(0); 1967 } 1968