1 #include <petsc/private/isimpl.h> /*I "petscis.h" I*/ 2 #include <petsc/private/hashmapi.h> 3 #include <petscsf.h> 4 #include <petscviewer.h> 5 6 PetscClassId IS_LTOGM_CLASSID; 7 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping, PetscInt *, PetscInt **, PetscInt **, PetscInt ***); 8 9 typedef struct { 10 PetscInt *globals; 11 } ISLocalToGlobalMapping_Basic; 12 13 typedef struct { 14 PetscHMapI globalht; 15 } ISLocalToGlobalMapping_Hash; 16 17 /*@C 18 ISGetPointRange - Returns a description of the points in an `IS` suitable for traversal 19 20 Not Collective 21 22 Input Parameter: 23 . pointIS - The `IS` object 24 25 Output Parameters: 26 + pStart - The first index, see notes 27 . pEnd - One past the last index, see notes 28 - points - The indices, see notes 29 30 Level: intermediate 31 32 Notes: 33 If the `IS` contains contiguous indices in an `ISSTRIDE`, then the indices are contained in [pStart, pEnd) and points = `NULL`. 34 Otherwise, `pStart = 0`, `pEnd = numIndices`, and points is an array of the indices. This supports the following pattern 35 .vb 36 ISGetPointRange(is, &pStart, &pEnd, &points); 37 for (p = pStart; p < pEnd; ++p) { 38 const PetscInt point = points ? points[p] : p; 39 // use point 40 } 41 ISRestorePointRange(is, &pstart, &pEnd, &points); 42 .ve 43 Hence the same code can be written for `pointIS` being a `ISSTRIDE` or `ISGENERAL` 44 45 .seealso: [](sec_scatter), `IS`, `ISRestorePointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()` 46 @*/ 47 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) 48 { 49 PetscInt numCells, step = 1; 50 PetscBool isStride; 51 52 PetscFunctionBeginHot; 53 *pStart = 0; 54 *points = NULL; 55 PetscCall(ISGetLocalSize(pointIS, &numCells)); 56 PetscCall(PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride)); 57 if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step)); 58 *pEnd = *pStart + numCells; 59 if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points)); 60 PetscFunctionReturn(PETSC_SUCCESS); 61 } 62 63 /*@C 64 ISRestorePointRange - Destroys the traversal description created with `ISGetPointRange()` 65 66 Not Collective 67 68 Input Parameters: 69 + pointIS - The `IS` object 70 . pStart - The first index, from `ISGetPointRange()` 71 . pEnd - One past the last index, from `ISGetPointRange()` 72 - points - The indices, from `ISGetPointRange()` 73 74 Level: intermediate 75 76 .seealso: [](sec_scatter), `IS`, `ISGetPointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()` 77 @*/ 78 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points) 79 { 80 PetscInt step = 1; 81 PetscBool isStride; 82 83 PetscFunctionBeginHot; 84 PetscCall(PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride)); 85 if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step)); 86 if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points)); 87 PetscFunctionReturn(PETSC_SUCCESS); 88 } 89 90 /*@C 91 ISGetPointSubrange - Configures the input `IS` to be a subrange for the traversal information given 92 93 Not Collective 94 95 Input Parameters: 96 + subpointIS - The `IS` object to be configured 97 . pStart - The first index of the subrange 98 . pEnd - One past the last index for the subrange 99 - points - The indices for the entire range, from `ISGetPointRange()` 100 101 Output Parameters: 102 . subpointIS - The `IS` object now configured to be a subrange 103 104 Level: intermediate 105 106 Note: 107 The input `IS` will now respond properly to calls to `ISGetPointRange()` and return the subrange. 108 109 .seealso: [](sec_scatter), `IS`, `ISGetPointRange()`, `ISRestorePointRange()`, `ISGetIndices()`, `ISCreateStride()` 110 @*/ 111 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points) 112 { 113 PetscFunctionBeginHot; 114 if (points) { 115 PetscCall(ISSetType(subpointIS, ISGENERAL)); 116 PetscCall(ISGeneralSetIndices(subpointIS, pEnd - pStart, &points[pStart], PETSC_USE_POINTER)); 117 } else { 118 PetscCall(ISSetType(subpointIS, ISSTRIDE)); 119 PetscCall(ISStrideSetStride(subpointIS, pEnd - pStart, pStart, 1)); 120 } 121 PetscFunctionReturn(PETSC_SUCCESS); 122 } 123 124 /* -----------------------------------------------------------------------------------------*/ 125 126 /* 127 Creates the global mapping information in the ISLocalToGlobalMapping structure 128 129 If the user has not selected how to handle the global to local mapping then use HASH for "large" problems 130 */ 131 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping) 132 { 133 PetscInt i, *idx = mapping->indices, n = mapping->n, end, start; 134 135 PetscFunctionBegin; 136 if (mapping->data) PetscFunctionReturn(PETSC_SUCCESS); 137 end = 0; 138 start = PETSC_MAX_INT; 139 140 for (i = 0; i < n; i++) { 141 if (idx[i] < 0) continue; 142 if (idx[i] < start) start = idx[i]; 143 if (idx[i] > end) end = idx[i]; 144 } 145 if (start > end) { 146 start = 0; 147 end = -1; 148 } 149 mapping->globalstart = start; 150 mapping->globalend = end; 151 if (!((PetscObject)mapping)->type_name) { 152 if ((end - start) > PetscMax(4 * n, 1000000)) { 153 PetscCall(ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGHASH)); 154 } else { 155 PetscCall(ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGBASIC)); 156 } 157 } 158 PetscTryTypeMethod(mapping, globaltolocalmappingsetup); 159 PetscFunctionReturn(PETSC_SUCCESS); 160 } 161 162 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping) 163 { 164 PetscInt i, *idx = mapping->indices, n = mapping->n, end, start, *globals; 165 ISLocalToGlobalMapping_Basic *map; 166 167 PetscFunctionBegin; 168 start = mapping->globalstart; 169 end = mapping->globalend; 170 PetscCall(PetscNew(&map)); 171 PetscCall(PetscMalloc1(end - start + 2, &globals)); 172 map->globals = globals; 173 for (i = 0; i < end - start + 1; i++) globals[i] = -1; 174 for (i = 0; i < n; i++) { 175 if (idx[i] < 0) continue; 176 globals[idx[i] - start] = i; 177 } 178 mapping->data = (void *)map; 179 PetscFunctionReturn(PETSC_SUCCESS); 180 } 181 182 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping) 183 { 184 PetscInt i, *idx = mapping->indices, n = mapping->n; 185 ISLocalToGlobalMapping_Hash *map; 186 187 PetscFunctionBegin; 188 PetscCall(PetscNew(&map)); 189 PetscCall(PetscHMapICreate(&map->globalht)); 190 for (i = 0; i < n; i++) { 191 if (idx[i] < 0) continue; 192 PetscCall(PetscHMapISet(map->globalht, idx[i], i)); 193 } 194 mapping->data = (void *)map; 195 PetscFunctionReturn(PETSC_SUCCESS); 196 } 197 198 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping) 199 { 200 ISLocalToGlobalMapping_Basic *map = (ISLocalToGlobalMapping_Basic *)mapping->data; 201 202 PetscFunctionBegin; 203 if (!map) PetscFunctionReturn(PETSC_SUCCESS); 204 PetscCall(PetscFree(map->globals)); 205 PetscCall(PetscFree(mapping->data)); 206 PetscFunctionReturn(PETSC_SUCCESS); 207 } 208 209 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping) 210 { 211 ISLocalToGlobalMapping_Hash *map = (ISLocalToGlobalMapping_Hash *)mapping->data; 212 213 PetscFunctionBegin; 214 if (!map) PetscFunctionReturn(PETSC_SUCCESS); 215 PetscCall(PetscHMapIDestroy(&map->globalht)); 216 PetscCall(PetscFree(mapping->data)); 217 PetscFunctionReturn(PETSC_SUCCESS); 218 } 219 220 #define GTOLTYPE _Basic 221 #define GTOLNAME _Basic 222 #define GTOLBS mapping->bs 223 #define GTOL(g, local) \ 224 do { \ 225 local = map->globals[g / bs - start]; \ 226 if (local >= 0) local = bs * local + (g % bs); \ 227 } while (0) 228 229 #include <../src/vec/is/utils/isltog.h> 230 231 #define GTOLTYPE _Basic 232 #define GTOLNAME Block_Basic 233 #define GTOLBS 1 234 #define GTOL(g, local) \ 235 do { \ 236 local = map->globals[g - start]; \ 237 } while (0) 238 #include <../src/vec/is/utils/isltog.h> 239 240 #define GTOLTYPE _Hash 241 #define GTOLNAME _Hash 242 #define GTOLBS mapping->bs 243 #define GTOL(g, local) \ 244 do { \ 245 (void)PetscHMapIGet(map->globalht, g / bs, &local); \ 246 if (local >= 0) local = bs * local + (g % bs); \ 247 } while (0) 248 #include <../src/vec/is/utils/isltog.h> 249 250 #define GTOLTYPE _Hash 251 #define GTOLNAME Block_Hash 252 #define GTOLBS 1 253 #define GTOL(g, local) \ 254 do { \ 255 (void)PetscHMapIGet(map->globalht, g, &local); \ 256 } while (0) 257 #include <../src/vec/is/utils/isltog.h> 258 259 /*@ 260 ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object 261 262 Not Collective 263 264 Input Parameter: 265 . ltog - local to global mapping 266 267 Output Parameter: 268 . nltog - the duplicated local to global mapping 269 270 Level: advanced 271 272 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()` 273 @*/ 274 PetscErrorCode ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog, ISLocalToGlobalMapping *nltog) 275 { 276 ISLocalToGlobalMappingType l2gtype; 277 278 PetscFunctionBegin; 279 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 280 PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog), ltog->bs, ltog->n, ltog->indices, PETSC_COPY_VALUES, nltog)); 281 PetscCall(ISLocalToGlobalMappingGetType(ltog, &l2gtype)); 282 PetscCall(ISLocalToGlobalMappingSetType(*nltog, l2gtype)); 283 PetscFunctionReturn(PETSC_SUCCESS); 284 } 285 286 /*@ 287 ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping 288 289 Not Collective 290 291 Input Parameter: 292 . mapping - local to global mapping 293 294 Output Parameter: 295 . n - the number of entries in the local mapping, `ISLocalToGlobalMappingGetIndices()` returns an array of this length 296 297 Level: advanced 298 299 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()` 300 @*/ 301 PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping, PetscInt *n) 302 { 303 PetscFunctionBegin; 304 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 305 PetscAssertPointer(n, 2); 306 *n = mapping->bs * mapping->n; 307 PetscFunctionReturn(PETSC_SUCCESS); 308 } 309 310 /*@C 311 ISLocalToGlobalMappingViewFromOptions - View an `ISLocalToGlobalMapping` based on values in the options database 312 313 Collective 314 315 Input Parameters: 316 + A - the local to global mapping object 317 . obj - Optional object that provides the options prefix used for the options database query 318 - name - command line option 319 320 Level: intermediate 321 322 Note: 323 See `PetscObjectViewFromOptions()` for the available `PetscViewer` and `PetscViewerFormat` 324 325 .seealso: [](sec_scatter), `PetscViewer`, ``ISLocalToGlobalMapping`, `ISLocalToGlobalMappingView`, `PetscObjectViewFromOptions()`, `ISLocalToGlobalMappingCreate()` 326 @*/ 327 PetscErrorCode ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A, PetscObject obj, const char name[]) 328 { 329 PetscFunctionBegin; 330 PetscValidHeaderSpecific(A, IS_LTOGM_CLASSID, 1); 331 PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name)); 332 PetscFunctionReturn(PETSC_SUCCESS); 333 } 334 335 /*@C 336 ISLocalToGlobalMappingView - View a local to global mapping 337 338 Not Collective 339 340 Input Parameters: 341 + mapping - local to global mapping 342 - viewer - viewer 343 344 Level: advanced 345 346 .seealso: [](sec_scatter), `PetscViewer`, `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()` 347 @*/ 348 PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping, PetscViewer viewer) 349 { 350 PetscInt i; 351 PetscMPIInt rank; 352 PetscBool iascii; 353 354 PetscFunctionBegin; 355 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 356 if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping), &viewer)); 357 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 358 359 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mapping), &rank)); 360 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 361 if (iascii) { 362 PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mapping, viewer)); 363 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 364 for (i = 0; i < mapping->n; i++) { 365 PetscInt bs = mapping->bs, g = mapping->indices[i]; 366 if (bs == 1) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n", rank, i, g)); 367 else PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT ":%" PetscInt_FMT " %" PetscInt_FMT ":%" PetscInt_FMT "\n", rank, i * bs, (i + 1) * bs, g * bs, (g + 1) * bs)); 368 } 369 PetscCall(PetscViewerFlush(viewer)); 370 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 371 } 372 PetscFunctionReturn(PETSC_SUCCESS); 373 } 374 375 /*@ 376 ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n) 377 ordering and a global parallel ordering. 378 379 Not Collective 380 381 Input Parameter: 382 . is - index set containing the global numbers for each local number 383 384 Output Parameter: 385 . mapping - new mapping data structure 386 387 Level: advanced 388 389 Note: 390 the block size of the `IS` determines the block size of the mapping 391 392 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetFromOptions()` 393 @*/ 394 PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is, ISLocalToGlobalMapping *mapping) 395 { 396 PetscInt n, bs; 397 const PetscInt *indices; 398 MPI_Comm comm; 399 PetscBool isblock; 400 401 PetscFunctionBegin; 402 PetscValidHeaderSpecific(is, IS_CLASSID, 1); 403 PetscAssertPointer(mapping, 2); 404 405 PetscCall(PetscObjectGetComm((PetscObject)is, &comm)); 406 PetscCall(ISGetLocalSize(is, &n)); 407 PetscCall(PetscObjectTypeCompare((PetscObject)is, ISBLOCK, &isblock)); 408 if (!isblock) { 409 PetscCall(ISGetIndices(is, &indices)); 410 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n, indices, PETSC_COPY_VALUES, mapping)); 411 PetscCall(ISRestoreIndices(is, &indices)); 412 } else { 413 PetscCall(ISGetBlockSize(is, &bs)); 414 PetscCall(ISBlockGetIndices(is, &indices)); 415 PetscCall(ISLocalToGlobalMappingCreate(comm, bs, n / bs, indices, PETSC_COPY_VALUES, mapping)); 416 PetscCall(ISBlockRestoreIndices(is, &indices)); 417 } 418 PetscFunctionReturn(PETSC_SUCCESS); 419 } 420 421 /*@C 422 ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n) 423 ordering and a global parallel ordering. 424 425 Collective 426 427 Input Parameters: 428 + sf - star forest mapping contiguous local indices to (rank, offset) 429 - start - first global index on this process, or `PETSC_DECIDE` to compute contiguous global numbering automatically 430 431 Output Parameter: 432 . mapping - new mapping data structure 433 434 Level: advanced 435 436 Note: 437 If any processor calls this with `start` = `PETSC_DECIDE` then all processors must, otherwise the program will hang. 438 439 .seealso: [](sec_scatter), `PetscSF`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()` 440 @*/ 441 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf, PetscInt start, ISLocalToGlobalMapping *mapping) 442 { 443 PetscInt i, maxlocal, nroots, nleaves, *globals, *ltog; 444 MPI_Comm comm; 445 446 PetscFunctionBegin; 447 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 448 PetscAssertPointer(mapping, 3); 449 PetscCall(PetscObjectGetComm((PetscObject)sf, &comm)); 450 PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, NULL)); 451 if (start == PETSC_DECIDE) { 452 start = 0; 453 PetscCallMPI(MPI_Exscan(&nroots, &start, 1, MPIU_INT, MPI_SUM, comm)); 454 } else PetscCheck(start >= 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE"); 455 PetscCall(PetscSFGetLeafRange(sf, NULL, &maxlocal)); 456 ++maxlocal; 457 PetscCall(PetscMalloc1(nroots, &globals)); 458 PetscCall(PetscMalloc1(maxlocal, <og)); 459 for (i = 0; i < nroots; i++) globals[i] = start + i; 460 for (i = 0; i < maxlocal; i++) ltog[i] = -1; 461 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, globals, ltog, MPI_REPLACE)); 462 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, globals, ltog, MPI_REPLACE)); 463 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, maxlocal, ltog, PETSC_OWN_POINTER, mapping)); 464 PetscCall(PetscFree(globals)); 465 PetscFunctionReturn(PETSC_SUCCESS); 466 } 467 468 /*@ 469 ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping 470 471 Not Collective 472 473 Input Parameters: 474 + mapping - mapping data structure 475 - bs - the blocksize 476 477 Level: advanced 478 479 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()` 480 @*/ 481 PetscErrorCode ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping, PetscInt bs) 482 { 483 PetscInt *nid; 484 const PetscInt *oid; 485 PetscInt i, cn, on, obs, nn; 486 487 PetscFunctionBegin; 488 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 489 PetscCheck(bs >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid block size %" PetscInt_FMT, bs); 490 if (bs == mapping->bs) PetscFunctionReturn(PETSC_SUCCESS); 491 on = mapping->n; 492 obs = mapping->bs; 493 oid = mapping->indices; 494 nn = (on * obs) / bs; 495 PetscCheck((on * obs) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block size %" PetscInt_FMT " is inconsistent with block size %" PetscInt_FMT " and number of block indices %" PetscInt_FMT, bs, obs, on); 496 497 PetscCall(PetscMalloc1(nn, &nid)); 498 PetscCall(ISLocalToGlobalMappingGetIndices(mapping, &oid)); 499 for (i = 0; i < nn; i++) { 500 PetscInt j; 501 for (j = 0, cn = 0; j < bs - 1; j++) { 502 if (oid[i * bs + j] < 0) { 503 cn++; 504 continue; 505 } 506 PetscCheck(oid[i * bs + j] == oid[i * bs + j + 1] - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: non consecutive indices %" PetscInt_FMT " %" PetscInt_FMT, bs, obs, oid[i * bs + j], oid[i * bs + j + 1]); 507 } 508 if (oid[i * bs + j] < 0) cn++; 509 if (cn) { 510 PetscCheck(cn == bs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: invalid number of negative entries in block %" PetscInt_FMT, bs, obs, cn); 511 nid[i] = -1; 512 } else { 513 nid[i] = oid[i * bs] / bs; 514 } 515 } 516 PetscCall(ISLocalToGlobalMappingRestoreIndices(mapping, &oid)); 517 518 mapping->n = nn; 519 mapping->bs = bs; 520 PetscCall(PetscFree(mapping->indices)); 521 mapping->indices = nid; 522 mapping->globalstart = 0; 523 mapping->globalend = 0; 524 525 /* reset the cached information */ 526 PetscCall(PetscFree(mapping->info_procs)); 527 PetscCall(PetscFree(mapping->info_numprocs)); 528 if (mapping->info_indices) { 529 PetscInt i; 530 531 PetscCall(PetscFree(mapping->info_indices[0])); 532 for (i = 1; i < mapping->info_nproc; i++) PetscCall(PetscFree(mapping->info_indices[i])); 533 PetscCall(PetscFree(mapping->info_indices)); 534 } 535 mapping->info_cached = PETSC_FALSE; 536 537 PetscTryTypeMethod(mapping, destroy); 538 PetscFunctionReturn(PETSC_SUCCESS); 539 } 540 541 /*@ 542 ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping 543 ordering and a global parallel ordering. 544 545 Not Collective 546 547 Input Parameter: 548 . mapping - mapping data structure 549 550 Output Parameter: 551 . bs - the blocksize 552 553 Level: advanced 554 555 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()` 556 @*/ 557 PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping, PetscInt *bs) 558 { 559 PetscFunctionBegin; 560 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 561 *bs = mapping->bs; 562 PetscFunctionReturn(PETSC_SUCCESS); 563 } 564 565 /*@ 566 ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n) 567 ordering and a global parallel ordering. 568 569 Not Collective, but communicator may have more than one process 570 571 Input Parameters: 572 + comm - MPI communicator 573 . bs - the block size 574 . n - the number of local elements divided by the block size, or equivalently the number of block indices 575 . indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs 576 - mode - see PetscCopyMode 577 578 Output Parameter: 579 . mapping - new mapping data structure 580 581 Level: advanced 582 583 Notes: 584 There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1 585 586 For "small" problems when using `ISGlobalToLocalMappingApply()` and `ISGlobalToLocalMappingApplyBlock()`, the `ISLocalToGlobalMappingType` 587 of `ISLOCALTOGLOBALMAPPINGBASIC` will be used; this uses more memory but is faster; this approach is not scalable for extremely large mappings. 588 589 For large problems `ISLOCALTOGLOBALMAPPINGHASH` is used, this is scalable. 590 Use `ISLocalToGlobalMappingSetType()` or call `ISLocalToGlobalMappingSetFromOptions()` with the option 591 `-islocaltoglobalmapping_type` <`basic`,`hash`> to control which is used. 592 593 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`, 594 `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH` 595 `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType` 596 @*/ 597 PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm comm, PetscInt bs, PetscInt n, const PetscInt indices[], PetscCopyMode mode, ISLocalToGlobalMapping *mapping) 598 { 599 PetscInt *in; 600 601 PetscFunctionBegin; 602 if (n) PetscAssertPointer(indices, 4); 603 PetscAssertPointer(mapping, 6); 604 605 *mapping = NULL; 606 PetscCall(ISInitializePackage()); 607 608 PetscCall(PetscHeaderCreate(*mapping, IS_LTOGM_CLASSID, "ISLocalToGlobalMapping", "Local to global mapping", "IS", comm, ISLocalToGlobalMappingDestroy, ISLocalToGlobalMappingView)); 609 (*mapping)->n = n; 610 (*mapping)->bs = bs; 611 if (mode == PETSC_COPY_VALUES) { 612 PetscCall(PetscMalloc1(n, &in)); 613 PetscCall(PetscArraycpy(in, indices, n)); 614 (*mapping)->indices = in; 615 (*mapping)->dealloc_indices = PETSC_TRUE; 616 } else if (mode == PETSC_OWN_POINTER) { 617 (*mapping)->indices = (PetscInt *)indices; 618 (*mapping)->dealloc_indices = PETSC_TRUE; 619 } else if (mode == PETSC_USE_POINTER) { 620 (*mapping)->indices = (PetscInt *)indices; 621 } else SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid mode %d", mode); 622 PetscFunctionReturn(PETSC_SUCCESS); 623 } 624 625 PetscFunctionList ISLocalToGlobalMappingList = NULL; 626 627 /*@ 628 ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database. 629 630 Not Collective 631 632 Input Parameter: 633 . mapping - mapping data structure 634 635 Options Database Key: 636 . -islocaltoglobalmapping_type - <basic,hash> nonscalable and scalable versions 637 638 Level: advanced 639 640 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, 641 `ISLocalToGlobalMappingCreateIS()`, `ISLOCALTOGLOBALMAPPINGBASIC`, 642 `ISLOCALTOGLOBALMAPPINGHASH`, `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType` 643 @*/ 644 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping) 645 { 646 char type[256]; 647 ISLocalToGlobalMappingType defaulttype = "Not set"; 648 PetscBool flg; 649 650 PetscFunctionBegin; 651 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 652 PetscCall(ISLocalToGlobalMappingRegisterAll()); 653 PetscObjectOptionsBegin((PetscObject)mapping); 654 PetscCall(PetscOptionsFList("-islocaltoglobalmapping_type", "ISLocalToGlobalMapping method", "ISLocalToGlobalMappingSetType", ISLocalToGlobalMappingList, (char *)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype, type, 256, &flg)); 655 if (flg) PetscCall(ISLocalToGlobalMappingSetType(mapping, type)); 656 PetscOptionsEnd(); 657 PetscFunctionReturn(PETSC_SUCCESS); 658 } 659 660 /*@ 661 ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n) 662 ordering and a global parallel ordering. 663 664 Not Collective 665 666 Input Parameter: 667 . mapping - mapping data structure 668 669 Level: advanced 670 671 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingCreate()` 672 @*/ 673 PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping) 674 { 675 PetscFunctionBegin; 676 if (!*mapping) PetscFunctionReturn(PETSC_SUCCESS); 677 PetscValidHeaderSpecific(*mapping, IS_LTOGM_CLASSID, 1); 678 if (--((PetscObject)*mapping)->refct > 0) { 679 *mapping = NULL; 680 PetscFunctionReturn(PETSC_SUCCESS); 681 } 682 if ((*mapping)->dealloc_indices) PetscCall(PetscFree((*mapping)->indices)); 683 PetscCall(PetscFree((*mapping)->info_procs)); 684 PetscCall(PetscFree((*mapping)->info_numprocs)); 685 if ((*mapping)->info_indices) { 686 PetscInt i; 687 688 PetscCall(PetscFree(((*mapping)->info_indices)[0])); 689 for (i = 1; i < (*mapping)->info_nproc; i++) PetscCall(PetscFree(((*mapping)->info_indices)[i])); 690 PetscCall(PetscFree((*mapping)->info_indices)); 691 } 692 if ((*mapping)->info_nodei) PetscCall(PetscFree(((*mapping)->info_nodei)[0])); 693 PetscCall(PetscFree2((*mapping)->info_nodec, (*mapping)->info_nodei)); 694 PetscTryTypeMethod(*mapping, destroy); 695 PetscCall(PetscHeaderDestroy(mapping)); 696 *mapping = NULL; 697 PetscFunctionReturn(PETSC_SUCCESS); 698 } 699 700 /*@ 701 ISLocalToGlobalMappingApplyIS - Creates from an `IS` in the local numbering 702 a new index set using the global numbering defined in an `ISLocalToGlobalMapping` 703 context. 704 705 Collective 706 707 Input Parameters: 708 + mapping - mapping between local and global numbering 709 - is - index set in local numbering 710 711 Output Parameter: 712 . newis - index set in global numbering 713 714 Level: advanced 715 716 Note: 717 The output `IS` will have the same communicator as the input `IS`. 718 719 .seealso: [](sec_scatter), `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, 720 `ISLocalToGlobalMappingDestroy()`, `ISGlobalToLocalMappingApply()` 721 @*/ 722 PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping, IS is, IS *newis) 723 { 724 PetscInt n, *idxout; 725 const PetscInt *idxin; 726 727 PetscFunctionBegin; 728 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 729 PetscValidHeaderSpecific(is, IS_CLASSID, 2); 730 PetscAssertPointer(newis, 3); 731 732 PetscCall(ISGetLocalSize(is, &n)); 733 PetscCall(ISGetIndices(is, &idxin)); 734 PetscCall(PetscMalloc1(n, &idxout)); 735 PetscCall(ISLocalToGlobalMappingApply(mapping, n, idxin, idxout)); 736 PetscCall(ISRestoreIndices(is, &idxin)); 737 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)is), n, idxout, PETSC_OWN_POINTER, newis)); 738 PetscFunctionReturn(PETSC_SUCCESS); 739 } 740 741 /*@ 742 ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering 743 and converts them to the global numbering. 744 745 Not Collective 746 747 Input Parameters: 748 + mapping - the local to global mapping context 749 . N - number of integers 750 - in - input indices in local numbering 751 752 Output Parameter: 753 . out - indices in global numbering 754 755 Level: advanced 756 757 Note: 758 The `in` and `out` array parameters may be identical. 759 760 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`, 761 `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`, 762 `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()` 763 @*/ 764 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[]) 765 { 766 PetscInt i, bs, Nmax; 767 768 PetscFunctionBegin; 769 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 770 bs = mapping->bs; 771 Nmax = bs * mapping->n; 772 if (bs == 1) { 773 const PetscInt *idx = mapping->indices; 774 for (i = 0; i < N; i++) { 775 if (in[i] < 0) { 776 out[i] = in[i]; 777 continue; 778 } 779 PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i); 780 out[i] = idx[in[i]]; 781 } 782 } else { 783 const PetscInt *idx = mapping->indices; 784 for (i = 0; i < N; i++) { 785 if (in[i] < 0) { 786 out[i] = in[i]; 787 continue; 788 } 789 PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i); 790 out[i] = idx[in[i] / bs] * bs + (in[i] % bs); 791 } 792 } 793 PetscFunctionReturn(PETSC_SUCCESS); 794 } 795 796 /*@ 797 ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering 798 799 Not Collective 800 801 Input Parameters: 802 + mapping - the local to global mapping context 803 . N - number of integers 804 - in - input indices in local block numbering 805 806 Output Parameter: 807 . out - indices in global block numbering 808 809 Example: 810 If the index values are {0,1,6,7} set with a call to `ISLocalToGlobalMappingCreate`(`PETSC_COMM_SELF`,2,2,{0,3}) then the mapping applied to 0 811 (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3. 812 813 Level: advanced 814 815 Note: 816 The `in` and `out` array parameters may be identical. 817 818 .seealso: [](sec_scatter), `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`, 819 `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`, 820 `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()` 821 @*/ 822 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[]) 823 { 824 PetscInt i, Nmax; 825 const PetscInt *idx; 826 827 PetscFunctionBegin; 828 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 829 Nmax = mapping->n; 830 idx = mapping->indices; 831 for (i = 0; i < N; i++) { 832 if (in[i] < 0) { 833 out[i] = in[i]; 834 continue; 835 } 836 PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local block index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i); 837 out[i] = idx[in[i]]; 838 } 839 PetscFunctionReturn(PETSC_SUCCESS); 840 } 841 842 /*@ 843 ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers 844 specified with a global numbering. 845 846 Not Collective 847 848 Input Parameters: 849 + mapping - mapping between local and global numbering 850 . type - `IS_GTOLM_MASK` - maps global indices with no local value to -1 in the output list (i.e., mask them) 851 `IS_GTOLM_DROP` - drops the indices with no local value from the output list 852 . n - number of global indices to map 853 - idx - global indices to map 854 855 Output Parameters: 856 + nout - number of indices in output array (if type == `IS_GTOLM_MASK` then nout = n) 857 - idxout - local index of each global index, one must pass in an array long enough 858 to hold all the indices. You can call `ISGlobalToLocalMappingApply()` with 859 idxout == NULL to determine the required length (returned in nout) 860 and then allocate the required space and call `ISGlobalToLocalMappingApply()` 861 a second time to set the values. 862 863 Level: advanced 864 865 Notes: 866 Either `nout` or `idxout` may be `NULL`. `idx` and `idxout` may be identical. 867 868 For "small" problems when using `ISGlobalToLocalMappingApply()` and `ISGlobalToLocalMappingApplyBlock()`, the `ISLocalToGlobalMappingType` of 869 `ISLOCALTOGLOBALMAPPINGBASIC` will be used; 870 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems `ISLOCALTOGLOBALMAPPINGHASH` is used, this is scalable. 871 Use `ISLocalToGlobalMappingSetType()` or call `ISLocalToGlobalMappingSetFromOptions()` with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 872 873 Developer Notes: 874 The manual page states that `idx` and `idxout` may be identical but the calling 875 sequence declares `idx` as const so it cannot be the same as `idxout`. 876 877 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, 878 `ISLocalToGlobalMappingDestroy()` 879 @*/ 880 PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[]) 881 { 882 PetscFunctionBegin; 883 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 884 if (!mapping->data) PetscCall(ISGlobalToLocalMappingSetUp(mapping)); 885 PetscUseTypeMethod(mapping, globaltolocalmappingapply, type, n, idx, nout, idxout); 886 PetscFunctionReturn(PETSC_SUCCESS); 887 } 888 889 /*@ 890 ISGlobalToLocalMappingApplyIS - Creates from an `IS` in the global numbering 891 a new index set using the local numbering defined in an `ISLocalToGlobalMapping` 892 context. 893 894 Not Collective 895 896 Input Parameters: 897 + mapping - mapping between local and global numbering 898 . type - `IS_GTOLM_MASK` - maps global indices with no local value to -1 in the output list (i.e., mask them) 899 `IS_GTOLM_DROP` - drops the indices with no local value from the output list 900 - is - index set in global numbering 901 902 Output Parameter: 903 . newis - index set in local numbering 904 905 Level: advanced 906 907 Note: 908 The output `IS` will be sequential, as it encodes a purely local operation 909 910 .seealso: [](sec_scatter), `ISGlobalToLocalMapping`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`, 911 `ISLocalToGlobalMappingDestroy()` 912 @*/ 913 PetscErrorCode ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, IS is, IS *newis) 914 { 915 PetscInt n, nout, *idxout; 916 const PetscInt *idxin; 917 918 PetscFunctionBegin; 919 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 920 PetscValidHeaderSpecific(is, IS_CLASSID, 3); 921 PetscAssertPointer(newis, 4); 922 923 PetscCall(ISGetLocalSize(is, &n)); 924 PetscCall(ISGetIndices(is, &idxin)); 925 if (type == IS_GTOLM_MASK) { 926 PetscCall(PetscMalloc1(n, &idxout)); 927 } else { 928 PetscCall(ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, NULL)); 929 PetscCall(PetscMalloc1(nout, &idxout)); 930 } 931 PetscCall(ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, idxout)); 932 PetscCall(ISRestoreIndices(is, &idxin)); 933 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nout, idxout, PETSC_OWN_POINTER, newis)); 934 PetscFunctionReturn(PETSC_SUCCESS); 935 } 936 937 /*@ 938 ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers 939 specified with a block global numbering. 940 941 Not Collective 942 943 Input Parameters: 944 + mapping - mapping between local and global numbering 945 . type - `IS_GTOLM_MASK` - maps global indices with no local value to -1 in the output list (i.e., mask them) 946 `IS_GTOLM_DROP` - drops the indices with no local value from the output list 947 . n - number of global indices to map 948 - idx - global indices to map 949 950 Output Parameters: 951 + nout - number of indices in output array (if type == `IS_GTOLM_MASK` then nout = n) 952 - idxout - local index of each global index, one must pass in an array long enough 953 to hold all the indices. You can call `ISGlobalToLocalMappingApplyBlock()` with 954 idxout == NULL to determine the required length (returned in nout) 955 and then allocate the required space and call `ISGlobalToLocalMappingApplyBlock()` 956 a second time to set the values. 957 958 Level: advanced 959 960 Notes: 961 Either `nout` or `idxout` may be `NULL`. `idx` and `idxout` may be identical. 962 963 For "small" problems when using `ISGlobalToLocalMappingApply()` and `ISGlobalToLocalMappingApplyBlock()`, the `ISLocalToGlobalMappingType` of 964 `ISLOCALTOGLOBALMAPPINGBASIC` will be used; 965 this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems `ISLOCALTOGLOBALMAPPINGHASH` is used, this is scalable. 966 Use `ISLocalToGlobalMappingSetType()` or call `ISLocalToGlobalMappingSetFromOptions()` with the option -islocaltoglobalmapping_type <basic,hash> to control which is used. 967 968 Developer Notes: 969 The manual page states that `idx` and `idxout` may be identical but the calling 970 sequence declares `idx` as const so it cannot be the same as `idxout`. 971 972 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`, 973 `ISLocalToGlobalMappingDestroy()` 974 @*/ 975 PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[]) 976 { 977 PetscFunctionBegin; 978 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 979 if (!mapping->data) PetscCall(ISGlobalToLocalMappingSetUp(mapping)); 980 PetscUseTypeMethod(mapping, globaltolocalmappingapplyblock, type, n, idx, nout, idxout); 981 PetscFunctionReturn(PETSC_SUCCESS); 982 } 983 984 /*@C 985 ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and 986 each index shared by more than one processor 987 988 Collective 989 990 Input Parameter: 991 . mapping - the mapping from local to global indexing 992 993 Output Parameters: 994 + nproc - number of processors that are connected to this one 995 . procs - neighboring processors 996 . numprocs - number of indices for each subdomain (processor) 997 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 998 999 Level: advanced 1000 1001 Fortran Notes: 1002 There is no `ISLocalToGlobalMappingRestoreInfo()` in Fortran. You must make sure that 1003 `procs`[], `numprocs`[] and `indices`[][] are large enough arrays, either by allocating them 1004 dynamically or defining static ones large enough. 1005 .vb 1006 PetscInt indices[nproc][numprocmax],ierr) 1007 ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 1008 ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 1009 .ve 1010 1011 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1012 `ISLocalToGlobalMappingRestoreInfo()` 1013 @*/ 1014 PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) 1015 { 1016 PetscFunctionBegin; 1017 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1018 if (mapping->info_cached) { 1019 *nproc = mapping->info_nproc; 1020 *procs = mapping->info_procs; 1021 *numprocs = mapping->info_numprocs; 1022 *indices = mapping->info_indices; 1023 } else { 1024 PetscCall(ISLocalToGlobalMappingGetBlockInfo_Private(mapping, nproc, procs, numprocs, indices)); 1025 } 1026 PetscFunctionReturn(PETSC_SUCCESS); 1027 } 1028 1029 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) 1030 { 1031 PetscMPIInt size, rank, tag1, tag2, tag3, *len, *source, imdex; 1032 PetscInt i, n = mapping->n, Ng, ng, max = 0, *lindices = mapping->indices; 1033 PetscInt *nprocs, *owner, nsends, *sends, j, *starts, nmax, nrecvs, *recvs, proc; 1034 PetscInt cnt, scale, *ownedsenders, *nownedsenders, rstart; 1035 PetscInt node, nownedm, nt, *sends2, nsends2, *starts2, *lens2, *dest, nrecvs2, *starts3, *recvs2, k, *bprocs, *tmp; 1036 PetscInt first_procs, first_numprocs, *first_indices; 1037 MPI_Request *recv_waits, *send_waits; 1038 MPI_Status recv_status, *send_status, *recv_statuses; 1039 MPI_Comm comm; 1040 PetscBool debug = PETSC_FALSE; 1041 1042 PetscFunctionBegin; 1043 PetscCall(PetscObjectGetComm((PetscObject)mapping, &comm)); 1044 PetscCallMPI(MPI_Comm_size(comm, &size)); 1045 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1046 if (size == 1) { 1047 *nproc = 0; 1048 *procs = NULL; 1049 PetscCall(PetscNew(numprocs)); 1050 (*numprocs)[0] = 0; 1051 PetscCall(PetscNew(indices)); 1052 (*indices)[0] = NULL; 1053 /* save info for reuse */ 1054 mapping->info_nproc = *nproc; 1055 mapping->info_procs = *procs; 1056 mapping->info_numprocs = *numprocs; 1057 mapping->info_indices = *indices; 1058 mapping->info_cached = PETSC_TRUE; 1059 PetscFunctionReturn(PETSC_SUCCESS); 1060 } 1061 1062 PetscCall(PetscOptionsGetBool(((PetscObject)mapping)->options, NULL, "-islocaltoglobalmappinggetinfo_debug", &debug, NULL)); 1063 1064 /* 1065 Notes on ISLocalToGlobalMappingGetBlockInfo 1066 1067 globally owned node - the nodes that have been assigned to this processor in global 1068 numbering, just for this routine. 1069 1070 nontrivial globally owned node - node assigned to this processor that is on a subdomain 1071 boundary (i.e. is has more than one local owner) 1072 1073 locally owned node - node that exists on this processors subdomain 1074 1075 nontrivial locally owned node - node that is not in the interior (i.e. has more than one 1076 local subdomain 1077 */ 1078 PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag1)); 1079 PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag2)); 1080 PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag3)); 1081 1082 for (i = 0; i < n; i++) { 1083 if (lindices[i] > max) max = lindices[i]; 1084 } 1085 PetscCall(MPIU_Allreduce(&max, &Ng, 1, MPIU_INT, MPI_MAX, comm)); 1086 Ng++; 1087 PetscCallMPI(MPI_Comm_size(comm, &size)); 1088 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1089 scale = Ng / size + 1; 1090 ng = scale; 1091 if (rank == size - 1) ng = Ng - scale * (size - 1); 1092 ng = PetscMax(1, ng); 1093 rstart = scale * rank; 1094 1095 /* determine ownership ranges of global indices */ 1096 PetscCall(PetscMalloc1(2 * size, &nprocs)); 1097 PetscCall(PetscArrayzero(nprocs, 2 * size)); 1098 1099 /* determine owners of each local node */ 1100 PetscCall(PetscMalloc1(n, &owner)); 1101 for (i = 0; i < n; i++) { 1102 proc = lindices[i] / scale; /* processor that globally owns this index */ 1103 nprocs[2 * proc + 1] = 1; /* processor globally owns at least one of ours */ 1104 owner[i] = proc; 1105 nprocs[2 * proc]++; /* count of how many that processor globally owns of ours */ 1106 } 1107 nsends = 0; 1108 for (i = 0; i < size; i++) nsends += nprocs[2 * i + 1]; 1109 PetscCall(PetscInfo(mapping, "Number of global owners for my local data %" PetscInt_FMT "\n", nsends)); 1110 1111 /* inform other processors of number of messages and max length*/ 1112 PetscCall(PetscMaxSum(comm, nprocs, &nmax, &nrecvs)); 1113 PetscCall(PetscInfo(mapping, "Number of local owners for my global data %" PetscInt_FMT "\n", nrecvs)); 1114 1115 /* post receives for owned rows */ 1116 PetscCall(PetscMalloc1((2 * nrecvs + 1) * (nmax + 1), &recvs)); 1117 PetscCall(PetscMalloc1(nrecvs + 1, &recv_waits)); 1118 for (i = 0; i < nrecvs; i++) PetscCallMPI(MPI_Irecv(recvs + 2 * nmax * i, 2 * nmax, MPIU_INT, MPI_ANY_SOURCE, tag1, comm, recv_waits + i)); 1119 1120 /* pack messages containing lists of local nodes to owners */ 1121 PetscCall(PetscMalloc1(2 * n + 1, &sends)); 1122 PetscCall(PetscMalloc1(size + 1, &starts)); 1123 starts[0] = 0; 1124 for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2]; 1125 for (i = 0; i < n; i++) { 1126 sends[starts[owner[i]]++] = lindices[i]; 1127 sends[starts[owner[i]]++] = i; 1128 } 1129 PetscCall(PetscFree(owner)); 1130 starts[0] = 0; 1131 for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2]; 1132 1133 /* send the messages */ 1134 PetscCall(PetscMalloc1(nsends + 1, &send_waits)); 1135 PetscCall(PetscMalloc1(nsends + 1, &dest)); 1136 cnt = 0; 1137 for (i = 0; i < size; i++) { 1138 if (nprocs[2 * i]) { 1139 PetscCallMPI(MPI_Isend(sends + starts[i], 2 * nprocs[2 * i], MPIU_INT, i, tag1, comm, send_waits + cnt)); 1140 dest[cnt] = i; 1141 cnt++; 1142 } 1143 } 1144 PetscCall(PetscFree(starts)); 1145 1146 /* wait on receives */ 1147 PetscCall(PetscMalloc1(nrecvs + 1, &source)); 1148 PetscCall(PetscMalloc1(nrecvs + 1, &len)); 1149 cnt = nrecvs; 1150 PetscCall(PetscCalloc1(ng + 1, &nownedsenders)); 1151 while (cnt) { 1152 PetscCallMPI(MPI_Waitany(nrecvs, recv_waits, &imdex, &recv_status)); 1153 /* unpack receives into our local space */ 1154 PetscCallMPI(MPI_Get_count(&recv_status, MPIU_INT, &len[imdex])); 1155 source[imdex] = recv_status.MPI_SOURCE; 1156 len[imdex] = len[imdex] / 2; 1157 /* count how many local owners for each of my global owned indices */ 1158 for (i = 0; i < len[imdex]; i++) nownedsenders[recvs[2 * imdex * nmax + 2 * i] - rstart]++; 1159 cnt--; 1160 } 1161 PetscCall(PetscFree(recv_waits)); 1162 1163 /* count how many globally owned indices are on an edge multiplied by how many processors own them. */ 1164 nownedm = 0; 1165 for (i = 0; i < ng; i++) { 1166 if (nownedsenders[i] > 1) nownedm += nownedsenders[i]; 1167 } 1168 1169 /* create single array to contain rank of all local owners of each globally owned index */ 1170 PetscCall(PetscMalloc1(nownedm + 1, &ownedsenders)); 1171 PetscCall(PetscMalloc1(ng + 1, &starts)); 1172 starts[0] = 0; 1173 for (i = 1; i < ng; i++) { 1174 if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1]; 1175 else starts[i] = starts[i - 1]; 1176 } 1177 1178 /* for each nontrivial globally owned node list all arriving processors */ 1179 for (i = 0; i < nrecvs; i++) { 1180 for (j = 0; j < len[i]; j++) { 1181 node = recvs[2 * i * nmax + 2 * j] - rstart; 1182 if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i]; 1183 } 1184 } 1185 1186 if (debug) { /* ----------------------------------- */ 1187 starts[0] = 0; 1188 for (i = 1; i < ng; i++) { 1189 if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1]; 1190 else starts[i] = starts[i - 1]; 1191 } 1192 for (i = 0; i < ng; i++) { 1193 if (nownedsenders[i] > 1) { 1194 PetscCall(PetscSynchronizedPrintf(comm, "[%d] global node %" PetscInt_FMT " local owner processors: ", rank, i + rstart)); 1195 for (j = 0; j < nownedsenders[i]; j++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", ownedsenders[starts[i] + j])); 1196 PetscCall(PetscSynchronizedPrintf(comm, "\n")); 1197 } 1198 } 1199 PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); 1200 } /* ----------------------------------- */ 1201 1202 /* wait on original sends */ 1203 if (nsends) { 1204 PetscCall(PetscMalloc1(nsends, &send_status)); 1205 PetscCallMPI(MPI_Waitall(nsends, send_waits, send_status)); 1206 PetscCall(PetscFree(send_status)); 1207 } 1208 PetscCall(PetscFree(send_waits)); 1209 PetscCall(PetscFree(sends)); 1210 PetscCall(PetscFree(nprocs)); 1211 1212 /* pack messages to send back to local owners */ 1213 starts[0] = 0; 1214 for (i = 1; i < ng; i++) { 1215 if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1]; 1216 else starts[i] = starts[i - 1]; 1217 } 1218 nsends2 = nrecvs; 1219 PetscCall(PetscMalloc1(nsends2 + 1, &nprocs)); /* length of each message */ 1220 for (i = 0; i < nrecvs; i++) { 1221 nprocs[i] = 1; 1222 for (j = 0; j < len[i]; j++) { 1223 node = recvs[2 * i * nmax + 2 * j] - rstart; 1224 if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node]; 1225 } 1226 } 1227 nt = 0; 1228 for (i = 0; i < nsends2; i++) nt += nprocs[i]; 1229 1230 PetscCall(PetscMalloc1(nt + 1, &sends2)); 1231 PetscCall(PetscMalloc1(nsends2 + 1, &starts2)); 1232 1233 starts2[0] = 0; 1234 for (i = 1; i < nsends2; i++) starts2[i] = starts2[i - 1] + nprocs[i - 1]; 1235 /* 1236 Each message is 1 + nprocs[i] long, and consists of 1237 (0) the number of nodes being sent back 1238 (1) the local node number, 1239 (2) the number of processors sharing it, 1240 (3) the processors sharing it 1241 */ 1242 for (i = 0; i < nsends2; i++) { 1243 cnt = 1; 1244 sends2[starts2[i]] = 0; 1245 for (j = 0; j < len[i]; j++) { 1246 node = recvs[2 * i * nmax + 2 * j] - rstart; 1247 if (nownedsenders[node] > 1) { 1248 sends2[starts2[i]]++; 1249 sends2[starts2[i] + cnt++] = recvs[2 * i * nmax + 2 * j + 1]; 1250 sends2[starts2[i] + cnt++] = nownedsenders[node]; 1251 PetscCall(PetscArraycpy(&sends2[starts2[i] + cnt], &ownedsenders[starts[node]], nownedsenders[node])); 1252 cnt += nownedsenders[node]; 1253 } 1254 } 1255 } 1256 1257 /* receive the message lengths */ 1258 nrecvs2 = nsends; 1259 PetscCall(PetscMalloc1(nrecvs2 + 1, &lens2)); 1260 PetscCall(PetscMalloc1(nrecvs2 + 1, &starts3)); 1261 PetscCall(PetscMalloc1(nrecvs2 + 1, &recv_waits)); 1262 for (i = 0; i < nrecvs2; i++) PetscCallMPI(MPI_Irecv(&lens2[i], 1, MPIU_INT, dest[i], tag2, comm, recv_waits + i)); 1263 1264 /* send the message lengths */ 1265 for (i = 0; i < nsends2; i++) PetscCallMPI(MPI_Send(&nprocs[i], 1, MPIU_INT, source[i], tag2, comm)); 1266 1267 /* wait on receives of lens */ 1268 if (nrecvs2) { 1269 PetscCall(PetscMalloc1(nrecvs2, &recv_statuses)); 1270 PetscCallMPI(MPI_Waitall(nrecvs2, recv_waits, recv_statuses)); 1271 PetscCall(PetscFree(recv_statuses)); 1272 } 1273 PetscCall(PetscFree(recv_waits)); 1274 1275 starts3[0] = 0; 1276 nt = 0; 1277 for (i = 0; i < nrecvs2 - 1; i++) { 1278 starts3[i + 1] = starts3[i] + lens2[i]; 1279 nt += lens2[i]; 1280 } 1281 if (nrecvs2) nt += lens2[nrecvs2 - 1]; 1282 1283 PetscCall(PetscMalloc1(nt + 1, &recvs2)); 1284 PetscCall(PetscMalloc1(nrecvs2 + 1, &recv_waits)); 1285 for (i = 0; i < nrecvs2; i++) PetscCallMPI(MPI_Irecv(recvs2 + starts3[i], lens2[i], MPIU_INT, dest[i], tag3, comm, recv_waits + i)); 1286 1287 /* send the messages */ 1288 PetscCall(PetscMalloc1(nsends2 + 1, &send_waits)); 1289 for (i = 0; i < nsends2; i++) PetscCallMPI(MPI_Isend(sends2 + starts2[i], nprocs[i], MPIU_INT, source[i], tag3, comm, send_waits + i)); 1290 1291 /* wait on receives */ 1292 if (nrecvs2) { 1293 PetscCall(PetscMalloc1(nrecvs2, &recv_statuses)); 1294 PetscCallMPI(MPI_Waitall(nrecvs2, recv_waits, recv_statuses)); 1295 PetscCall(PetscFree(recv_statuses)); 1296 } 1297 PetscCall(PetscFree(recv_waits)); 1298 PetscCall(PetscFree(nprocs)); 1299 1300 if (debug) { /* ----------------------------------- */ 1301 cnt = 0; 1302 for (i = 0; i < nrecvs2; i++) { 1303 nt = recvs2[cnt++]; 1304 for (j = 0; j < nt; j++) { 1305 PetscCall(PetscSynchronizedPrintf(comm, "[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ", rank, recvs2[cnt], recvs2[cnt + 1])); 1306 for (k = 0; k < recvs2[cnt + 1]; k++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", recvs2[cnt + 2 + k])); 1307 cnt += 2 + recvs2[cnt + 1]; 1308 PetscCall(PetscSynchronizedPrintf(comm, "\n")); 1309 } 1310 } 1311 PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); 1312 } /* ----------------------------------- */ 1313 1314 /* count number subdomains for each local node */ 1315 PetscCall(PetscCalloc1(size, &nprocs)); 1316 cnt = 0; 1317 for (i = 0; i < nrecvs2; i++) { 1318 nt = recvs2[cnt++]; 1319 for (j = 0; j < nt; j++) { 1320 for (k = 0; k < recvs2[cnt + 1]; k++) nprocs[recvs2[cnt + 2 + k]]++; 1321 cnt += 2 + recvs2[cnt + 1]; 1322 } 1323 } 1324 nt = 0; 1325 for (i = 0; i < size; i++) nt += (nprocs[i] > 0); 1326 *nproc = nt; 1327 PetscCall(PetscMalloc1(nt + 1, procs)); 1328 PetscCall(PetscMalloc1(nt + 1, numprocs)); 1329 PetscCall(PetscMalloc1(nt + 1, indices)); 1330 for (i = 0; i < nt + 1; i++) (*indices)[i] = NULL; 1331 PetscCall(PetscMalloc1(size, &bprocs)); 1332 cnt = 0; 1333 for (i = 0; i < size; i++) { 1334 if (nprocs[i] > 0) { 1335 bprocs[i] = cnt; 1336 (*procs)[cnt] = i; 1337 (*numprocs)[cnt] = nprocs[i]; 1338 PetscCall(PetscMalloc1(nprocs[i], &(*indices)[cnt])); 1339 cnt++; 1340 } 1341 } 1342 1343 /* make the list of subdomains for each nontrivial local node */ 1344 PetscCall(PetscArrayzero(*numprocs, nt)); 1345 cnt = 0; 1346 for (i = 0; i < nrecvs2; i++) { 1347 nt = recvs2[cnt++]; 1348 for (j = 0; j < nt; j++) { 1349 for (k = 0; k < recvs2[cnt + 1]; k++) (*indices)[bprocs[recvs2[cnt + 2 + k]]][(*numprocs)[bprocs[recvs2[cnt + 2 + k]]]++] = recvs2[cnt]; 1350 cnt += 2 + recvs2[cnt + 1]; 1351 } 1352 } 1353 PetscCall(PetscFree(bprocs)); 1354 PetscCall(PetscFree(recvs2)); 1355 1356 /* sort the node indexing by their global numbers */ 1357 nt = *nproc; 1358 for (i = 0; i < nt; i++) { 1359 PetscCall(PetscMalloc1((*numprocs)[i], &tmp)); 1360 for (j = 0; j < (*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]]; 1361 PetscCall(PetscSortIntWithArray((*numprocs)[i], tmp, (*indices)[i])); 1362 PetscCall(PetscFree(tmp)); 1363 } 1364 1365 if (debug) { /* ----------------------------------- */ 1366 nt = *nproc; 1367 for (i = 0; i < nt; i++) { 1368 PetscCall(PetscSynchronizedPrintf(comm, "[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ", rank, (*procs)[i], (*numprocs)[i])); 1369 for (j = 0; j < (*numprocs)[i]; j++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", (*indices)[i][j])); 1370 PetscCall(PetscSynchronizedPrintf(comm, "\n")); 1371 } 1372 PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT)); 1373 } /* ----------------------------------- */ 1374 1375 /* wait on sends */ 1376 if (nsends2) { 1377 PetscCall(PetscMalloc1(nsends2, &send_status)); 1378 PetscCallMPI(MPI_Waitall(nsends2, send_waits, send_status)); 1379 PetscCall(PetscFree(send_status)); 1380 } 1381 1382 PetscCall(PetscFree(starts3)); 1383 PetscCall(PetscFree(dest)); 1384 PetscCall(PetscFree(send_waits)); 1385 1386 PetscCall(PetscFree(nownedsenders)); 1387 PetscCall(PetscFree(ownedsenders)); 1388 PetscCall(PetscFree(starts)); 1389 PetscCall(PetscFree(starts2)); 1390 PetscCall(PetscFree(lens2)); 1391 1392 PetscCall(PetscFree(source)); 1393 PetscCall(PetscFree(len)); 1394 PetscCall(PetscFree(recvs)); 1395 PetscCall(PetscFree(nprocs)); 1396 PetscCall(PetscFree(sends2)); 1397 1398 /* put the information about myself as the first entry in the list */ 1399 first_procs = (*procs)[0]; 1400 first_numprocs = (*numprocs)[0]; 1401 first_indices = (*indices)[0]; 1402 for (i = 0; i < *nproc; i++) { 1403 if ((*procs)[i] == rank) { 1404 (*procs)[0] = (*procs)[i]; 1405 (*numprocs)[0] = (*numprocs)[i]; 1406 (*indices)[0] = (*indices)[i]; 1407 (*procs)[i] = first_procs; 1408 (*numprocs)[i] = first_numprocs; 1409 (*indices)[i] = first_indices; 1410 break; 1411 } 1412 } 1413 1414 /* save info for reuse */ 1415 mapping->info_nproc = *nproc; 1416 mapping->info_procs = *procs; 1417 mapping->info_numprocs = *numprocs; 1418 mapping->info_indices = *indices; 1419 mapping->info_cached = PETSC_TRUE; 1420 PetscFunctionReturn(PETSC_SUCCESS); 1421 } 1422 1423 /*@C 1424 ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by `ISLocalToGlobalMappingGetBlockInfo()` 1425 1426 Collective 1427 1428 Input Parameter: 1429 . mapping - the mapping from local to global indexing 1430 1431 Output Parameters: 1432 + nproc - number of processors that are connected to this one 1433 . procs - neighboring processors 1434 . numprocs - number of indices for each processor 1435 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1436 1437 Level: advanced 1438 1439 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1440 `ISLocalToGlobalMappingGetInfo()` 1441 @*/ 1442 PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) 1443 { 1444 PetscFunctionBegin; 1445 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1446 if (mapping->info_free) { 1447 PetscCall(PetscFree(*numprocs)); 1448 if (*indices) { 1449 PetscInt i; 1450 1451 PetscCall(PetscFree((*indices)[0])); 1452 for (i = 1; i < *nproc; i++) PetscCall(PetscFree((*indices)[i])); 1453 PetscCall(PetscFree(*indices)); 1454 } 1455 } 1456 *nproc = 0; 1457 *procs = NULL; 1458 *numprocs = NULL; 1459 *indices = NULL; 1460 PetscFunctionReturn(PETSC_SUCCESS); 1461 } 1462 1463 /*@C 1464 ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and 1465 each index shared by more than one processor 1466 1467 Collective 1468 1469 Input Parameter: 1470 . mapping - the mapping from local to global indexing 1471 1472 Output Parameters: 1473 + nproc - number of processors that are connected to this one 1474 . procs - neighboring processors 1475 . numprocs - number of indices for each subdomain (processor) 1476 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 1477 1478 Level: advanced 1479 1480 Note: 1481 The user needs to call `ISLocalToGlobalMappingRestoreInfo()` when the data is no longer needed. 1482 1483 Fortran Notes: 1484 There is no `ISLocalToGlobalMappingRestoreInfo()` in Fortran. You must make sure that 1485 `procs`[], `numprocs`[] and `indices`[][] are large enough arrays, either by allocating them 1486 dynamically or defining static ones large enough. 1487 1488 .vb 1489 PetscInt indices[nproc][numprocmax],ierr) 1490 ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 1491 ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 1492 .ve 1493 1494 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1495 `ISLocalToGlobalMappingRestoreInfo()` 1496 @*/ 1497 PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) 1498 { 1499 PetscInt **bindices = NULL, *bnumprocs = NULL, bs, i, j, k; 1500 1501 PetscFunctionBegin; 1502 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1503 bs = mapping->bs; 1504 PetscCall(ISLocalToGlobalMappingGetBlockInfo(mapping, nproc, procs, &bnumprocs, &bindices)); 1505 if (bs > 1) { /* we need to expand the cached info */ 1506 PetscCall(PetscCalloc1(*nproc, &*indices)); 1507 PetscCall(PetscCalloc1(*nproc, &*numprocs)); 1508 for (i = 0; i < *nproc; i++) { 1509 PetscCall(PetscMalloc1(bs * bnumprocs[i], &(*indices)[i])); 1510 for (j = 0; j < bnumprocs[i]; j++) { 1511 for (k = 0; k < bs; k++) (*indices)[i][j * bs + k] = bs * bindices[i][j] + k; 1512 } 1513 (*numprocs)[i] = bnumprocs[i] * bs; 1514 } 1515 mapping->info_free = PETSC_TRUE; 1516 } else { 1517 *numprocs = bnumprocs; 1518 *indices = bindices; 1519 } 1520 PetscFunctionReturn(PETSC_SUCCESS); 1521 } 1522 1523 /*@C 1524 ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by `ISLocalToGlobalMappingGetInfo()` 1525 1526 Collective 1527 1528 Input Parameter: 1529 . mapping - the mapping from local to global indexing 1530 1531 Output Parameters: 1532 + nproc - number of processors that are connected to this one 1533 . procs - neighboring processors 1534 . numprocs - number of indices for each processor 1535 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1536 1537 Level: advanced 1538 1539 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1540 `ISLocalToGlobalMappingGetInfo()` 1541 @*/ 1542 PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[]) 1543 { 1544 PetscFunctionBegin; 1545 PetscCall(ISLocalToGlobalMappingRestoreBlockInfo(mapping, nproc, procs, numprocs, indices)); 1546 PetscFunctionReturn(PETSC_SUCCESS); 1547 } 1548 1549 /*@C 1550 ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each MPI rank 1551 1552 Collective 1553 1554 Input Parameter: 1555 . mapping - the mapping from local to global indexing 1556 1557 Output Parameters: 1558 + nnodes - number of local nodes (same `ISLocalToGlobalMappingGetSize()`) 1559 . count - number of neighboring processors per node 1560 - indices - indices of processes sharing the node (sorted) 1561 1562 Level: advanced 1563 1564 Note: 1565 The user needs to call `ISLocalToGlobalMappingRestoreInfo()` when the data is no longer needed. 1566 1567 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1568 `ISLocalToGlobalMappingGetInfo()`, `ISLocalToGlobalMappingRestoreNodeInfo()` 1569 @*/ 1570 PetscErrorCode ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[]) 1571 { 1572 PetscInt n; 1573 1574 PetscFunctionBegin; 1575 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1576 PetscCall(ISLocalToGlobalMappingGetSize(mapping, &n)); 1577 if (!mapping->info_nodec) { 1578 PetscInt i, m, n_neigh, *neigh, *n_shared, **shared; 1579 1580 PetscCall(PetscMalloc2(n + 1, &mapping->info_nodec, n, &mapping->info_nodei)); 1581 PetscCall(ISLocalToGlobalMappingGetInfo(mapping, &n_neigh, &neigh, &n_shared, &shared)); 1582 for (i = 0; i < n; i++) mapping->info_nodec[i] = 1; 1583 m = n; 1584 mapping->info_nodec[n] = 0; 1585 for (i = 1; i < n_neigh; i++) { 1586 PetscInt j; 1587 1588 m += n_shared[i]; 1589 for (j = 0; j < n_shared[i]; j++) mapping->info_nodec[shared[i][j]] += 1; 1590 } 1591 if (n) PetscCall(PetscMalloc1(m, &mapping->info_nodei[0])); 1592 for (i = 1; i < n; i++) mapping->info_nodei[i] = mapping->info_nodei[i - 1] + mapping->info_nodec[i - 1]; 1593 PetscCall(PetscArrayzero(mapping->info_nodec, n)); 1594 for (i = 0; i < n; i++) { 1595 mapping->info_nodec[i] = 1; 1596 mapping->info_nodei[i][0] = neigh[0]; 1597 } 1598 for (i = 1; i < n_neigh; i++) { 1599 PetscInt j; 1600 1601 for (j = 0; j < n_shared[i]; j++) { 1602 PetscInt k = shared[i][j]; 1603 1604 mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i]; 1605 mapping->info_nodec[k] += 1; 1606 } 1607 } 1608 for (i = 0; i < n; i++) PetscCall(PetscSortRemoveDupsInt(&mapping->info_nodec[i], mapping->info_nodei[i])); 1609 PetscCall(ISLocalToGlobalMappingRestoreInfo(mapping, &n_neigh, &neigh, &n_shared, &shared)); 1610 } 1611 if (nnodes) *nnodes = n; 1612 if (count) *count = mapping->info_nodec; 1613 if (indices) *indices = mapping->info_nodei; 1614 PetscFunctionReturn(PETSC_SUCCESS); 1615 } 1616 1617 /*@C 1618 ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by `ISLocalToGlobalMappingGetNodeInfo()` 1619 1620 Collective 1621 1622 Input Parameter: 1623 . mapping - the mapping from local to global indexing 1624 1625 Output Parameters: 1626 + nnodes - number of local nodes 1627 . count - number of neighboring processors per node 1628 - indices - indices of processes sharing the node (sorted) 1629 1630 Level: advanced 1631 1632 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`, 1633 `ISLocalToGlobalMappingGetInfo()` 1634 @*/ 1635 PetscErrorCode ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[]) 1636 { 1637 PetscFunctionBegin; 1638 PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1); 1639 if (nnodes) *nnodes = 0; 1640 if (count) *count = NULL; 1641 if (indices) *indices = NULL; 1642 PetscFunctionReturn(PETSC_SUCCESS); 1643 } 1644 1645 /*MC 1646 ISLocalToGlobalMappingGetIndicesF90 - Get global indices for every local point that is mapped 1647 1648 Synopsis: 1649 ISLocalToGlobalMappingGetIndicesF90(ISLocalToGlobalMapping ltog, PetscInt, pointer :: array(:)}, integer ierr) 1650 1651 Not Collective 1652 1653 Input Parameter: 1654 . A - the matrix 1655 1656 Output Parameter: 1657 . array - array of indices, the length of this array may be obtained with `ISLocalToGlobalMappingGetSize()` 1658 1659 Level: advanced 1660 1661 Note: 1662 Use `ISLocalToGlobalMappingGetIndicesF90()` when you no longer need access to the data 1663 1664 .seealso: [](sec_fortranarrays), [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingGetIndices()`, `ISLocalToGlobalMappingRestoreIndices()`, 1665 `ISLocalToGlobalMappingRestoreIndicesF90()` 1666 M*/ 1667 1668 /*MC 1669 ISLocalToGlobalMappingRestoreIndicesF90 - restores the global indices for every local point that is mapped obtained with `ISLocalToGlobalMappingGetIndicesF90()` 1670 1671 Synopsis: 1672 ISLocalToGlobalMappingRestoreIndicesF90(ISLocalToGlobalMapping ltog, PetscInt, pointer :: array(:)}, integer ierr) 1673 1674 Not Collective 1675 1676 Input Parameters: 1677 + A - the matrix 1678 - array - array of indices, the length of this array may be obtained with `ISLocalToGlobalMappingGetSize()` 1679 1680 Level: advanced 1681 1682 .seealso: [](sec_fortranarrays), [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingGetIndices()`, `ISLocalToGlobalMappingRestoreIndices()`, 1683 `ISLocalToGlobalMappingGetIndicesF90()` 1684 M*/ 1685 1686 /*@C 1687 ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped 1688 1689 Not Collective 1690 1691 Input Parameter: 1692 . ltog - local to global mapping 1693 1694 Output Parameter: 1695 . array - array of indices, the length of this array may be obtained with `ISLocalToGlobalMappingGetSize()` 1696 1697 Level: advanced 1698 1699 Note: 1700 `ISLocalToGlobalMappingGetSize()` returns the length the this array 1701 1702 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreIndices()`, 1703 `ISLocalToGlobalMappingGetBlockIndices()`, `ISLocalToGlobalMappingRestoreBlockIndices()` 1704 @*/ 1705 PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) 1706 { 1707 PetscFunctionBegin; 1708 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1709 PetscAssertPointer(array, 2); 1710 if (ltog->bs == 1) { 1711 *array = ltog->indices; 1712 } else { 1713 PetscInt *jj, k, i, j, n = ltog->n, bs = ltog->bs; 1714 const PetscInt *ii; 1715 1716 PetscCall(PetscMalloc1(bs * n, &jj)); 1717 *array = jj; 1718 k = 0; 1719 ii = ltog->indices; 1720 for (i = 0; i < n; i++) 1721 for (j = 0; j < bs; j++) jj[k++] = bs * ii[i] + j; 1722 } 1723 PetscFunctionReturn(PETSC_SUCCESS); 1724 } 1725 1726 /*@C 1727 ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with `ISLocalToGlobalMappingGetIndices()` 1728 1729 Not Collective 1730 1731 Input Parameters: 1732 + ltog - local to global mapping 1733 - array - array of indices 1734 1735 Level: advanced 1736 1737 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()` 1738 @*/ 1739 PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) 1740 { 1741 PetscFunctionBegin; 1742 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1743 PetscAssertPointer(array, 2); 1744 PetscCheck(ltog->bs != 1 || *array == ltog->indices, PETSC_COMM_SELF, PETSC_ERR_ARG_BADPTR, "Trying to return mismatched pointer"); 1745 1746 if (ltog->bs > 1) PetscCall(PetscFree(*(void **)array)); 1747 PetscFunctionReturn(PETSC_SUCCESS); 1748 } 1749 1750 /*@C 1751 ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block 1752 1753 Not Collective 1754 1755 Input Parameter: 1756 . ltog - local to global mapping 1757 1758 Output Parameter: 1759 . array - array of indices 1760 1761 Level: advanced 1762 1763 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreBlockIndices()` 1764 @*/ 1765 PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) 1766 { 1767 PetscFunctionBegin; 1768 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1769 PetscAssertPointer(array, 2); 1770 *array = ltog->indices; 1771 PetscFunctionReturn(PETSC_SUCCESS); 1772 } 1773 1774 /*@C 1775 ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with `ISLocalToGlobalMappingGetBlockIndices()` 1776 1777 Not Collective 1778 1779 Input Parameters: 1780 + ltog - local to global mapping 1781 - array - array of indices 1782 1783 Level: advanced 1784 1785 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()` 1786 @*/ 1787 PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array) 1788 { 1789 PetscFunctionBegin; 1790 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1791 PetscAssertPointer(array, 2); 1792 PetscCheck(*array == ltog->indices, PETSC_COMM_SELF, PETSC_ERR_ARG_BADPTR, "Trying to return mismatched pointer"); 1793 *array = NULL; 1794 PetscFunctionReturn(PETSC_SUCCESS); 1795 } 1796 1797 /*@C 1798 ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings 1799 1800 Not Collective 1801 1802 Input Parameters: 1803 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate 1804 . n - number of mappings to concatenate 1805 - ltogs - local to global mappings 1806 1807 Output Parameter: 1808 . ltogcat - new mapping 1809 1810 Level: advanced 1811 1812 Note: 1813 This currently always returns a mapping with block size of 1 1814 1815 Developer Notes: 1816 If all the input mapping have the same block size we could easily handle that as a special case 1817 1818 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingCreate()` 1819 @*/ 1820 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm, PetscInt n, const ISLocalToGlobalMapping ltogs[], ISLocalToGlobalMapping *ltogcat) 1821 { 1822 PetscInt i, cnt, m, *idx; 1823 1824 PetscFunctionBegin; 1825 PetscCheck(n >= 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "Must have a non-negative number of mappings, given %" PetscInt_FMT, n); 1826 if (n > 0) PetscAssertPointer(ltogs, 3); 1827 for (i = 0; i < n; i++) PetscValidHeaderSpecific(ltogs[i], IS_LTOGM_CLASSID, 3); 1828 PetscAssertPointer(ltogcat, 4); 1829 for (cnt = 0, i = 0; i < n; i++) { 1830 PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i], &m)); 1831 cnt += m; 1832 } 1833 PetscCall(PetscMalloc1(cnt, &idx)); 1834 for (cnt = 0, i = 0; i < n; i++) { 1835 const PetscInt *subidx; 1836 PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i], &m)); 1837 PetscCall(ISLocalToGlobalMappingGetIndices(ltogs[i], &subidx)); 1838 PetscCall(PetscArraycpy(&idx[cnt], subidx, m)); 1839 PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogs[i], &subidx)); 1840 cnt += m; 1841 } 1842 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, cnt, idx, PETSC_OWN_POINTER, ltogcat)); 1843 PetscFunctionReturn(PETSC_SUCCESS); 1844 } 1845 1846 /*MC 1847 ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the `ISLocalToGlobalMapping` object. When `ISGlobalToLocalMappingApply()` is 1848 used this is good for only small and moderate size problems. 1849 1850 Options Database Key: 1851 . -islocaltoglobalmapping_type basic - select this method 1852 1853 Level: beginner 1854 1855 Developer Note: 1856 This stores all the mapping information on each MPI rank. 1857 1858 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH` 1859 M*/ 1860 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog) 1861 { 1862 PetscFunctionBegin; 1863 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Basic; 1864 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Basic; 1865 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic; 1866 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Basic; 1867 PetscFunctionReturn(PETSC_SUCCESS); 1868 } 1869 1870 /*MC 1871 ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the `ISLocalToGlobalMapping` object. When `ISGlobalToLocalMappingApply()` is 1872 used this is good for large memory problems. 1873 1874 Options Database Key: 1875 . -islocaltoglobalmapping_type hash - select this method 1876 1877 Level: beginner 1878 1879 Note: 1880 This is selected automatically for large problems if the user does not set the type. 1881 1882 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGBASIC` 1883 M*/ 1884 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog) 1885 { 1886 PetscFunctionBegin; 1887 ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Hash; 1888 ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Hash; 1889 ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash; 1890 ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Hash; 1891 PetscFunctionReturn(PETSC_SUCCESS); 1892 } 1893 1894 /*@C 1895 ISLocalToGlobalMappingRegister - Registers a method for applying a global to local mapping with an `ISLocalToGlobalMapping` 1896 1897 Not Collective 1898 1899 Input Parameters: 1900 + sname - name of a new method 1901 - function - routine to create method context 1902 1903 Example Usage: 1904 .vb 1905 ISLocalToGlobalMappingRegister("my_mapper", MyCreate); 1906 .ve 1907 1908 Then, your mapping can be chosen with the procedural interface via 1909 $ ISLocalToGlobalMappingSetType(ltog, "my_mapper") 1910 or at runtime via the option 1911 $ -islocaltoglobalmapping_type my_mapper 1912 1913 Level: advanced 1914 1915 Note: 1916 `ISLocalToGlobalMappingRegister()` may be called multiple times to add several user-defined mappings. 1917 1918 .seealso: [](sec_scatter), `ISLocalToGlobalMappingRegisterAll()`, `ISLocalToGlobalMappingRegisterDestroy()`, `ISLOCALTOGLOBALMAPPINGBASIC`, 1919 `ISLOCALTOGLOBALMAPPINGHASH`, `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApply()` 1920 @*/ 1921 PetscErrorCode ISLocalToGlobalMappingRegister(const char sname[], PetscErrorCode (*function)(ISLocalToGlobalMapping)) 1922 { 1923 PetscFunctionBegin; 1924 PetscCall(ISInitializePackage()); 1925 PetscCall(PetscFunctionListAdd(&ISLocalToGlobalMappingList, sname, function)); 1926 PetscFunctionReturn(PETSC_SUCCESS); 1927 } 1928 1929 /*@C 1930 ISLocalToGlobalMappingSetType - Sets the implementation type `ISLocalToGlobalMapping` will use 1931 1932 Logically Collective 1933 1934 Input Parameters: 1935 + ltog - the `ISLocalToGlobalMapping` object 1936 - type - a known method 1937 1938 Options Database Key: 1939 . -islocaltoglobalmapping_type <method> - Sets the method; use -help for a list of available methods (for instance, basic or hash) 1940 1941 Level: intermediate 1942 1943 Notes: 1944 See `ISLocalToGlobalMappingType` for available methods 1945 1946 Normally, it is best to use the `ISLocalToGlobalMappingSetFromOptions()` command and 1947 then set the `ISLocalToGlobalMappingType` from the options database rather than by using 1948 this routine. 1949 1950 Developer Notes: 1951 `ISLocalToGlobalMappingRegister()` is used to add new types to `ISLocalToGlobalMappingList` from which they 1952 are accessed by `ISLocalToGlobalMappingSetType()`. 1953 1954 .seealso: [](sec_scatter), `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingGetType()` 1955 @*/ 1956 PetscErrorCode ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type) 1957 { 1958 PetscBool match; 1959 PetscErrorCode (*r)(ISLocalToGlobalMapping) = NULL; 1960 1961 PetscFunctionBegin; 1962 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 1963 if (type) PetscAssertPointer(type, 2); 1964 1965 PetscCall(PetscObjectTypeCompare((PetscObject)ltog, type, &match)); 1966 if (match) PetscFunctionReturn(PETSC_SUCCESS); 1967 1968 /* L2G maps defer type setup at globaltolocal calls, allow passing NULL here */ 1969 if (type) { 1970 PetscCall(PetscFunctionListFind(ISLocalToGlobalMappingList, type, &r)); 1971 PetscCheck(r, PetscObjectComm((PetscObject)ltog), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unable to find requested ISLocalToGlobalMapping type %s", type); 1972 } 1973 /* Destroy the previous private LTOG context */ 1974 PetscTryTypeMethod(ltog, destroy); 1975 ltog->ops->destroy = NULL; 1976 1977 PetscCall(PetscObjectChangeTypeName((PetscObject)ltog, type)); 1978 if (r) PetscCall((*r)(ltog)); 1979 PetscFunctionReturn(PETSC_SUCCESS); 1980 } 1981 1982 /*@C 1983 ISLocalToGlobalMappingGetType - Get the type of the `ISLocalToGlobalMapping` 1984 1985 Not Collective 1986 1987 Input Parameter: 1988 . ltog - the `ISLocalToGlobalMapping` object 1989 1990 Output Parameter: 1991 . type - the type 1992 1993 Level: intermediate 1994 1995 .seealso: [](sec_scatter), `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()` 1996 @*/ 1997 PetscErrorCode ISLocalToGlobalMappingGetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType *type) 1998 { 1999 PetscFunctionBegin; 2000 PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1); 2001 PetscAssertPointer(type, 2); 2002 *type = ((PetscObject)ltog)->type_name; 2003 PetscFunctionReturn(PETSC_SUCCESS); 2004 } 2005 2006 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE; 2007 2008 /*@C 2009 ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the `IS` package. 2010 2011 Not Collective 2012 2013 Level: advanced 2014 2015 .seealso: [](sec_scatter), `ISRegister()`, `ISLocalToGlobalRegister()` 2016 @*/ 2017 PetscErrorCode ISLocalToGlobalMappingRegisterAll(void) 2018 { 2019 PetscFunctionBegin; 2020 if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(PETSC_SUCCESS); 2021 ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE; 2022 PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic)); 2023 PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash)); 2024 PetscFunctionReturn(PETSC_SUCCESS); 2025 } 2026